2015-10-28 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / fold-const.c
blob61801cbefee5ac0823733fc673d01b4c3609e515
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "predict.h"
48 #include "tree.h"
49 #include "gimple.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "alias.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
55 #include "calls.h"
56 #include "tree-iterator.h"
57 #include "realmpfr.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "varasm.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tm_p.h"
67 #include "target.h"
68 #include "diagnostic-core.h"
69 #include "intl.h"
70 #include "langhooks.h"
71 #include "md5.h"
72 #include "internal-fn.h"
73 #include "tree-eh.h"
74 #include "gimplify.h"
75 #include "tree-dfa.h"
76 #include "builtins.h"
77 #include "cgraph.h"
78 #include "generic-match.h"
79 #include "optabs-query.h"
80 #include "gimple-fold.h"
81 #include "params.h"
82 #include "tree-ssa-operands.h"
83 #include "tree-into-ssa.h"
85 #ifndef LOAD_EXTEND_OP
86 #define LOAD_EXTEND_OP(M) UNKNOWN
87 #endif
89 /* Nonzero if we are folding constants inside an initializer; zero
90 otherwise. */
91 int folding_initializer = 0;
93 /* The following constants represent a bit based encoding of GCC's
94 comparison operators. This encoding simplifies transformations
95 on relational comparison operators, such as AND and OR. */
96 enum comparison_code {
97 COMPCODE_FALSE = 0,
98 COMPCODE_LT = 1,
99 COMPCODE_EQ = 2,
100 COMPCODE_LE = 3,
101 COMPCODE_GT = 4,
102 COMPCODE_LTGT = 5,
103 COMPCODE_GE = 6,
104 COMPCODE_ORD = 7,
105 COMPCODE_UNORD = 8,
106 COMPCODE_UNLT = 9,
107 COMPCODE_UNEQ = 10,
108 COMPCODE_UNLE = 11,
109 COMPCODE_UNGT = 12,
110 COMPCODE_NE = 13,
111 COMPCODE_UNGE = 14,
112 COMPCODE_TRUE = 15
115 static bool negate_expr_p (tree);
116 static tree negate_expr (tree);
117 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
118 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
149 static bool reorder_operands_p (const_tree, const_tree);
150 static tree fold_negate_const (tree, tree);
151 static tree fold_not_const (const_tree, tree);
152 static tree fold_relational_const (enum tree_code, tree, tree, tree);
153 static tree fold_convert_const (enum tree_code, tree, tree);
154 static tree fold_view_convert_expr (tree, tree);
155 static bool vec_cst_ctor_to_array (tree, tree *);
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
161 static location_t
162 expr_location_or (tree t, location_t loc)
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
171 static inline tree
172 protected_set_expr_location_unshare (tree x, location_t loc)
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
180 x = copy_node (x);
181 SET_EXPR_LOCATION (x, loc);
183 return x;
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
188 NULL_TREE. */
190 tree
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
193 widest_int quo;
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
196 SIGNED, &quo))
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
199 return NULL_TREE;
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
228 void
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
243 void
244 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
246 const char *warnmsg;
247 location_t locus;
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
264 return;
266 if (gimple_no_warning_p (stmt))
267 return;
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
275 return;
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
295 bool
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
323 bool
324 negate_mathfn_p (enum built_in_function code)
326 switch (code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
359 default:
360 break;
362 return false;
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
368 bool
369 may_negate_without_overflow_p (const_tree t)
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 return !wi::only_sign_bit_p (t);
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
385 static bool
386 negate_expr_p (tree t)
388 tree type;
390 if (t == 0)
391 return false;
393 type = TREE_TYPE (t);
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
398 case INTEGER_CST:
399 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
400 return true;
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
408 case FIXED_CST:
409 return true;
411 case NEGATE_EXPR:
412 return !TYPE_OVERFLOW_SANITIZED (type);
414 case REAL_CST:
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419 case COMPLEX_CST:
420 return negate_expr_p (TREE_REALPART (t))
421 && negate_expr_p (TREE_IMAGPART (t));
423 case VECTOR_CST:
425 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
426 return true;
428 int count = TYPE_VECTOR_SUBPARTS (type), i;
430 for (i = 0; i < count; i++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
432 return false;
434 return true;
437 case COMPLEX_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0))
439 && negate_expr_p (TREE_OPERAND (t, 1));
441 case CONJ_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0));
444 case PLUS_EXPR:
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 || HONOR_SIGNED_ZEROS (element_mode (type))
447 || (INTEGRAL_TYPE_P (type)
448 && ! TYPE_OVERFLOW_WRAPS (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
461 && !HONOR_SIGNED_ZEROS (element_mode (type))
462 && (! INTEGRAL_TYPE_P (type)
463 || TYPE_OVERFLOW_WRAPS (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (type))
469 break;
470 /* INT_MIN/n * n doesn't overflow while negating one operand it does
471 if n is a power of two. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
474 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
475 && ! integer_pow2p (TREE_OPERAND (t, 0)))
476 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
477 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
478 break;
480 /* Fall through. */
482 case RDIV_EXPR:
483 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
484 return negate_expr_p (TREE_OPERAND (t, 1))
485 || negate_expr_p (TREE_OPERAND (t, 0));
486 break;
488 case TRUNC_DIV_EXPR:
489 case ROUND_DIV_EXPR:
490 case EXACT_DIV_EXPR:
491 /* In general we can't negate A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. But if overflow is
494 undefined, we can negate, because - (INT_MIN / 1) is an
495 overflow. */
496 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
498 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
499 break;
500 /* If overflow is undefined then we have to be careful because
501 we ask whether it's ok to associate the negate with the
502 division which is not ok for example for
503 -((a - b) / c) where (-(a - b)) / c may invoke undefined
504 overflow because of negating INT_MIN. So do not use
505 negate_expr_p here but open-code the two important cases. */
506 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
507 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
508 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
509 return true;
511 else if (negate_expr_p (TREE_OPERAND (t, 0)))
512 return true;
513 return negate_expr_p (TREE_OPERAND (t, 1));
515 case NOP_EXPR:
516 /* Negate -((double)float) as (double)(-float). */
517 if (TREE_CODE (type) == REAL_TYPE)
519 tree tem = strip_float_extensions (t);
520 if (tem != t)
521 return negate_expr_p (tem);
523 break;
525 case CALL_EXPR:
526 /* Negate -f(x) as f(-x). */
527 if (negate_mathfn_p (builtin_mathfn_code (t)))
528 return negate_expr_p (CALL_EXPR_ARG (t, 0));
529 break;
531 case RSHIFT_EXPR:
532 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
533 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
535 tree op1 = TREE_OPERAND (t, 1);
536 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
537 return true;
539 break;
541 default:
542 break;
544 return false;
547 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
548 simplification is possible.
549 If negate_expr_p would return true for T, NULL_TREE will never be
550 returned. */
552 static tree
553 fold_negate_expr (location_t loc, tree t)
555 tree type = TREE_TYPE (t);
556 tree tem;
558 switch (TREE_CODE (t))
560 /* Convert - (~A) to A + 1. */
561 case BIT_NOT_EXPR:
562 if (INTEGRAL_TYPE_P (type))
563 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
564 build_one_cst (type));
565 break;
567 case INTEGER_CST:
568 tem = fold_negate_const (t, type);
569 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
570 || (ANY_INTEGRAL_TYPE_P (type)
571 && !TYPE_OVERFLOW_TRAPS (type)
572 && TYPE_OVERFLOW_WRAPS (type))
573 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
574 return tem;
575 break;
577 case REAL_CST:
578 tem = fold_negate_const (t, type);
579 return tem;
581 case FIXED_CST:
582 tem = fold_negate_const (t, type);
583 return tem;
585 case COMPLEX_CST:
587 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
588 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
589 if (rpart && ipart)
590 return build_complex (type, rpart, ipart);
592 break;
594 case VECTOR_CST:
596 int count = TYPE_VECTOR_SUBPARTS (type), i;
597 tree *elts = XALLOCAVEC (tree, count);
599 for (i = 0; i < count; i++)
601 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
602 if (elts[i] == NULL_TREE)
603 return NULL_TREE;
606 return build_vector (type, elts);
609 case COMPLEX_EXPR:
610 if (negate_expr_p (t))
611 return fold_build2_loc (loc, COMPLEX_EXPR, type,
612 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
613 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
614 break;
616 case CONJ_EXPR:
617 if (negate_expr_p (t))
618 return fold_build1_loc (loc, CONJ_EXPR, type,
619 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
620 break;
622 case NEGATE_EXPR:
623 if (!TYPE_OVERFLOW_SANITIZED (type))
624 return TREE_OPERAND (t, 0);
625 break;
627 case PLUS_EXPR:
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
631 /* -(A + B) -> (-B) - A. */
632 if (negate_expr_p (TREE_OPERAND (t, 1))
633 && reorder_operands_p (TREE_OPERAND (t, 0),
634 TREE_OPERAND (t, 1)))
636 tem = negate_expr (TREE_OPERAND (t, 1));
637 return fold_build2_loc (loc, MINUS_EXPR, type,
638 tem, TREE_OPERAND (t, 0));
641 /* -(A + B) -> (-A) - B. */
642 if (negate_expr_p (TREE_OPERAND (t, 0)))
644 tem = negate_expr (TREE_OPERAND (t, 0));
645 return fold_build2_loc (loc, MINUS_EXPR, type,
646 tem, TREE_OPERAND (t, 1));
649 break;
651 case MINUS_EXPR:
652 /* - (A - B) -> B - A */
653 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
654 && !HONOR_SIGNED_ZEROS (element_mode (type))
655 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
656 return fold_build2_loc (loc, MINUS_EXPR, type,
657 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
658 break;
660 case MULT_EXPR:
661 if (TYPE_UNSIGNED (type))
662 break;
664 /* Fall through. */
666 case RDIV_EXPR:
667 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
669 tem = TREE_OPERAND (t, 1);
670 if (negate_expr_p (tem))
671 return fold_build2_loc (loc, TREE_CODE (t), type,
672 TREE_OPERAND (t, 0), negate_expr (tem));
673 tem = TREE_OPERAND (t, 0);
674 if (negate_expr_p (tem))
675 return fold_build2_loc (loc, TREE_CODE (t), type,
676 negate_expr (tem), TREE_OPERAND (t, 1));
678 break;
680 case TRUNC_DIV_EXPR:
681 case ROUND_DIV_EXPR:
682 case EXACT_DIV_EXPR:
683 /* In general we can't negate A / B, because if A is INT_MIN and
684 B is 1, we may turn this into INT_MIN / -1 which is undefined
685 and actually traps on some architectures. But if overflow is
686 undefined, we can negate, because - (INT_MIN / 1) is an
687 overflow. */
688 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
690 const char * const warnmsg = G_("assuming signed overflow does not "
691 "occur when negating a division");
692 tem = TREE_OPERAND (t, 1);
693 if (negate_expr_p (tem))
695 if (INTEGRAL_TYPE_P (type)
696 && (TREE_CODE (tem) != INTEGER_CST
697 || integer_onep (tem)))
698 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
699 return fold_build2_loc (loc, TREE_CODE (t), type,
700 TREE_OPERAND (t, 0), negate_expr (tem));
702 /* If overflow is undefined then we have to be careful because
703 we ask whether it's ok to associate the negate with the
704 division which is not ok for example for
705 -((a - b) / c) where (-(a - b)) / c may invoke undefined
706 overflow because of negating INT_MIN. So do not use
707 negate_expr_p here but open-code the two important cases. */
708 tem = TREE_OPERAND (t, 0);
709 if ((INTEGRAL_TYPE_P (type)
710 && (TREE_CODE (tem) == NEGATE_EXPR
711 || (TREE_CODE (tem) == INTEGER_CST
712 && may_negate_without_overflow_p (tem))))
713 || !INTEGRAL_TYPE_P (type))
714 return fold_build2_loc (loc, TREE_CODE (t), type,
715 negate_expr (tem), TREE_OPERAND (t, 1));
717 break;
719 case NOP_EXPR:
720 /* Convert -((double)float) into (double)(-float). */
721 if (TREE_CODE (type) == REAL_TYPE)
723 tem = strip_float_extensions (t);
724 if (tem != t && negate_expr_p (tem))
725 return fold_convert_loc (loc, type, negate_expr (tem));
727 break;
729 case CALL_EXPR:
730 /* Negate -f(x) as f(-x). */
731 if (negate_mathfn_p (builtin_mathfn_code (t))
732 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
734 tree fndecl, arg;
736 fndecl = get_callee_fndecl (t);
737 arg = negate_expr (CALL_EXPR_ARG (t, 0));
738 return build_call_expr_loc (loc, fndecl, 1, arg);
740 break;
742 case RSHIFT_EXPR:
743 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
744 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
746 tree op1 = TREE_OPERAND (t, 1);
747 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
749 tree ntype = TYPE_UNSIGNED (type)
750 ? signed_type_for (type)
751 : unsigned_type_for (type);
752 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
753 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
754 return fold_convert_loc (loc, type, temp);
757 break;
759 default:
760 break;
763 return NULL_TREE;
766 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
767 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
768 return NULL_TREE. */
770 static tree
771 negate_expr (tree t)
773 tree type, tem;
774 location_t loc;
776 if (t == NULL_TREE)
777 return NULL_TREE;
779 loc = EXPR_LOCATION (t);
780 type = TREE_TYPE (t);
781 STRIP_SIGN_NOPS (t);
783 tem = fold_negate_expr (loc, t);
784 if (!tem)
785 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
786 return fold_convert_loc (loc, type, tem);
789 /* Split a tree IN into a constant, literal and variable parts that could be
790 combined with CODE to make IN. "constant" means an expression with
791 TREE_CONSTANT but that isn't an actual constant. CODE must be a
792 commutative arithmetic operation. Store the constant part into *CONP,
793 the literal in *LITP and return the variable part. If a part isn't
794 present, set it to null. If the tree does not decompose in this way,
795 return the entire tree as the variable part and the other parts as null.
797 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
798 case, we negate an operand that was subtracted. Except if it is a
799 literal for which we use *MINUS_LITP instead.
801 If NEGATE_P is true, we are negating all of IN, again except a literal
802 for which we use *MINUS_LITP instead.
804 If IN is itself a literal or constant, return it as appropriate.
806 Note that we do not guarantee that any of the three values will be the
807 same type as IN, but they will have the same signedness and mode. */
809 static tree
810 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
811 tree *minus_litp, int negate_p)
813 tree var = 0;
815 *conp = 0;
816 *litp = 0;
817 *minus_litp = 0;
819 /* Strip any conversions that don't change the machine mode or signedness. */
820 STRIP_SIGN_NOPS (in);
822 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
823 || TREE_CODE (in) == FIXED_CST)
824 *litp = in;
825 else if (TREE_CODE (in) == code
826 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
827 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
828 /* We can associate addition and subtraction together (even
829 though the C standard doesn't say so) for integers because
830 the value is not affected. For reals, the value might be
831 affected, so we can't. */
832 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
833 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
835 tree op0 = TREE_OPERAND (in, 0);
836 tree op1 = TREE_OPERAND (in, 1);
837 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
838 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
840 /* First see if either of the operands is a literal, then a constant. */
841 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
842 || TREE_CODE (op0) == FIXED_CST)
843 *litp = op0, op0 = 0;
844 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
845 || TREE_CODE (op1) == FIXED_CST)
846 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
848 if (op0 != 0 && TREE_CONSTANT (op0))
849 *conp = op0, op0 = 0;
850 else if (op1 != 0 && TREE_CONSTANT (op1))
851 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
853 /* If we haven't dealt with either operand, this is not a case we can
854 decompose. Otherwise, VAR is either of the ones remaining, if any. */
855 if (op0 != 0 && op1 != 0)
856 var = in;
857 else if (op0 != 0)
858 var = op0;
859 else
860 var = op1, neg_var_p = neg1_p;
862 /* Now do any needed negations. */
863 if (neg_litp_p)
864 *minus_litp = *litp, *litp = 0;
865 if (neg_conp_p)
866 *conp = negate_expr (*conp);
867 if (neg_var_p)
868 var = negate_expr (var);
870 else if (TREE_CODE (in) == BIT_NOT_EXPR
871 && code == PLUS_EXPR)
873 /* -X - 1 is folded to ~X, undo that here. */
874 *minus_litp = build_one_cst (TREE_TYPE (in));
875 var = negate_expr (TREE_OPERAND (in, 0));
877 else if (TREE_CONSTANT (in))
878 *conp = in;
879 else
880 var = in;
882 if (negate_p)
884 if (*litp)
885 *minus_litp = *litp, *litp = 0;
886 else if (*minus_litp)
887 *litp = *minus_litp, *minus_litp = 0;
888 *conp = negate_expr (*conp);
889 var = negate_expr (var);
892 return var;
895 /* Re-associate trees split by the above function. T1 and T2 are
896 either expressions to associate or null. Return the new
897 expression, if any. LOC is the location of the new expression. If
898 we build an operation, do it in TYPE and with CODE. */
900 static tree
901 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
903 if (t1 == 0)
904 return t2;
905 else if (t2 == 0)
906 return t1;
908 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
909 try to fold this since we will have infinite recursion. But do
910 deal with any NEGATE_EXPRs. */
911 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
912 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
914 if (code == PLUS_EXPR)
916 if (TREE_CODE (t1) == NEGATE_EXPR)
917 return build2_loc (loc, MINUS_EXPR, type,
918 fold_convert_loc (loc, type, t2),
919 fold_convert_loc (loc, type,
920 TREE_OPERAND (t1, 0)));
921 else if (TREE_CODE (t2) == NEGATE_EXPR)
922 return build2_loc (loc, MINUS_EXPR, type,
923 fold_convert_loc (loc, type, t1),
924 fold_convert_loc (loc, type,
925 TREE_OPERAND (t2, 0)));
926 else if (integer_zerop (t2))
927 return fold_convert_loc (loc, type, t1);
929 else if (code == MINUS_EXPR)
931 if (integer_zerop (t2))
932 return fold_convert_loc (loc, type, t1);
935 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
936 fold_convert_loc (loc, type, t2));
939 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
940 fold_convert_loc (loc, type, t2));
943 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
944 for use in int_const_binop, size_binop and size_diffop. */
946 static bool
947 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
949 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
950 return false;
951 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
952 return false;
954 switch (code)
956 case LSHIFT_EXPR:
957 case RSHIFT_EXPR:
958 case LROTATE_EXPR:
959 case RROTATE_EXPR:
960 return true;
962 default:
963 break;
966 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
967 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
968 && TYPE_MODE (type1) == TYPE_MODE (type2);
972 /* Combine two integer constants ARG1 and ARG2 under operation CODE
973 to produce a new constant. Return NULL_TREE if we don't know how
974 to evaluate CODE at compile-time. */
976 static tree
977 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
978 int overflowable)
980 wide_int res;
981 tree t;
982 tree type = TREE_TYPE (arg1);
983 signop sign = TYPE_SIGN (type);
984 bool overflow = false;
986 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
987 TYPE_SIGN (TREE_TYPE (parg2)));
989 switch (code)
991 case BIT_IOR_EXPR:
992 res = wi::bit_or (arg1, arg2);
993 break;
995 case BIT_XOR_EXPR:
996 res = wi::bit_xor (arg1, arg2);
997 break;
999 case BIT_AND_EXPR:
1000 res = wi::bit_and (arg1, arg2);
1001 break;
1003 case RSHIFT_EXPR:
1004 case LSHIFT_EXPR:
1005 if (wi::neg_p (arg2))
1007 arg2 = -arg2;
1008 if (code == RSHIFT_EXPR)
1009 code = LSHIFT_EXPR;
1010 else
1011 code = RSHIFT_EXPR;
1014 if (code == RSHIFT_EXPR)
1015 /* It's unclear from the C standard whether shifts can overflow.
1016 The following code ignores overflow; perhaps a C standard
1017 interpretation ruling is needed. */
1018 res = wi::rshift (arg1, arg2, sign);
1019 else
1020 res = wi::lshift (arg1, arg2);
1021 break;
1023 case RROTATE_EXPR:
1024 case LROTATE_EXPR:
1025 if (wi::neg_p (arg2))
1027 arg2 = -arg2;
1028 if (code == RROTATE_EXPR)
1029 code = LROTATE_EXPR;
1030 else
1031 code = RROTATE_EXPR;
1034 if (code == RROTATE_EXPR)
1035 res = wi::rrotate (arg1, arg2);
1036 else
1037 res = wi::lrotate (arg1, arg2);
1038 break;
1040 case PLUS_EXPR:
1041 res = wi::add (arg1, arg2, sign, &overflow);
1042 break;
1044 case MINUS_EXPR:
1045 res = wi::sub (arg1, arg2, sign, &overflow);
1046 break;
1048 case MULT_EXPR:
1049 res = wi::mul (arg1, arg2, sign, &overflow);
1050 break;
1052 case MULT_HIGHPART_EXPR:
1053 res = wi::mul_high (arg1, arg2, sign);
1054 break;
1056 case TRUNC_DIV_EXPR:
1057 case EXACT_DIV_EXPR:
1058 if (arg2 == 0)
1059 return NULL_TREE;
1060 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1061 break;
1063 case FLOOR_DIV_EXPR:
1064 if (arg2 == 0)
1065 return NULL_TREE;
1066 res = wi::div_floor (arg1, arg2, sign, &overflow);
1067 break;
1069 case CEIL_DIV_EXPR:
1070 if (arg2 == 0)
1071 return NULL_TREE;
1072 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1073 break;
1075 case ROUND_DIV_EXPR:
1076 if (arg2 == 0)
1077 return NULL_TREE;
1078 res = wi::div_round (arg1, arg2, sign, &overflow);
1079 break;
1081 case TRUNC_MOD_EXPR:
1082 if (arg2 == 0)
1083 return NULL_TREE;
1084 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1085 break;
1087 case FLOOR_MOD_EXPR:
1088 if (arg2 == 0)
1089 return NULL_TREE;
1090 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1091 break;
1093 case CEIL_MOD_EXPR:
1094 if (arg2 == 0)
1095 return NULL_TREE;
1096 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1097 break;
1099 case ROUND_MOD_EXPR:
1100 if (arg2 == 0)
1101 return NULL_TREE;
1102 res = wi::mod_round (arg1, arg2, sign, &overflow);
1103 break;
1105 case MIN_EXPR:
1106 res = wi::min (arg1, arg2, sign);
1107 break;
1109 case MAX_EXPR:
1110 res = wi::max (arg1, arg2, sign);
1111 break;
1113 default:
1114 return NULL_TREE;
1117 t = force_fit_type (type, res, overflowable,
1118 (((sign == SIGNED || overflowable == -1)
1119 && overflow)
1120 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1122 return t;
1125 tree
1126 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1128 return int_const_binop_1 (code, arg1, arg2, 1);
1131 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1132 constant. We assume ARG1 and ARG2 have the same data type, or at least
1133 are the same kind of constant and the same machine mode. Return zero if
1134 combining the constants is not allowed in the current operating mode. */
1136 static tree
1137 const_binop (enum tree_code code, tree arg1, tree arg2)
1139 /* Sanity check for the recursive cases. */
1140 if (!arg1 || !arg2)
1141 return NULL_TREE;
1143 STRIP_NOPS (arg1);
1144 STRIP_NOPS (arg2);
1146 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1148 if (code == POINTER_PLUS_EXPR)
1149 return int_const_binop (PLUS_EXPR,
1150 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1152 return int_const_binop (code, arg1, arg2);
1155 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1157 machine_mode mode;
1158 REAL_VALUE_TYPE d1;
1159 REAL_VALUE_TYPE d2;
1160 REAL_VALUE_TYPE value;
1161 REAL_VALUE_TYPE result;
1162 bool inexact;
1163 tree t, type;
1165 /* The following codes are handled by real_arithmetic. */
1166 switch (code)
1168 case PLUS_EXPR:
1169 case MINUS_EXPR:
1170 case MULT_EXPR:
1171 case RDIV_EXPR:
1172 case MIN_EXPR:
1173 case MAX_EXPR:
1174 break;
1176 default:
1177 return NULL_TREE;
1180 d1 = TREE_REAL_CST (arg1);
1181 d2 = TREE_REAL_CST (arg2);
1183 type = TREE_TYPE (arg1);
1184 mode = TYPE_MODE (type);
1186 /* Don't perform operation if we honor signaling NaNs and
1187 either operand is a NaN. */
1188 if (HONOR_SNANS (mode)
1189 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1190 return NULL_TREE;
1192 /* Don't perform operation if it would raise a division
1193 by zero exception. */
1194 if (code == RDIV_EXPR
1195 && real_equal (&d2, &dconst0)
1196 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1197 return NULL_TREE;
1199 /* If either operand is a NaN, just return it. Otherwise, set up
1200 for floating-point trap; we return an overflow. */
1201 if (REAL_VALUE_ISNAN (d1))
1202 return arg1;
1203 else if (REAL_VALUE_ISNAN (d2))
1204 return arg2;
1206 inexact = real_arithmetic (&value, code, &d1, &d2);
1207 real_convert (&result, mode, &value);
1209 /* Don't constant fold this floating point operation if
1210 the result has overflowed and flag_trapping_math. */
1211 if (flag_trapping_math
1212 && MODE_HAS_INFINITIES (mode)
1213 && REAL_VALUE_ISINF (result)
1214 && !REAL_VALUE_ISINF (d1)
1215 && !REAL_VALUE_ISINF (d2))
1216 return NULL_TREE;
1218 /* Don't constant fold this floating point operation if the
1219 result may dependent upon the run-time rounding mode and
1220 flag_rounding_math is set, or if GCC's software emulation
1221 is unable to accurately represent the result. */
1222 if ((flag_rounding_math
1223 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1224 && (inexact || !real_identical (&result, &value)))
1225 return NULL_TREE;
1227 t = build_real (type, result);
1229 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1230 return t;
1233 if (TREE_CODE (arg1) == FIXED_CST)
1235 FIXED_VALUE_TYPE f1;
1236 FIXED_VALUE_TYPE f2;
1237 FIXED_VALUE_TYPE result;
1238 tree t, type;
1239 int sat_p;
1240 bool overflow_p;
1242 /* The following codes are handled by fixed_arithmetic. */
1243 switch (code)
1245 case PLUS_EXPR:
1246 case MINUS_EXPR:
1247 case MULT_EXPR:
1248 case TRUNC_DIV_EXPR:
1249 if (TREE_CODE (arg2) != FIXED_CST)
1250 return NULL_TREE;
1251 f2 = TREE_FIXED_CST (arg2);
1252 break;
1254 case LSHIFT_EXPR:
1255 case RSHIFT_EXPR:
1257 if (TREE_CODE (arg2) != INTEGER_CST)
1258 return NULL_TREE;
1259 wide_int w2 = arg2;
1260 f2.data.high = w2.elt (1);
1261 f2.data.low = w2.elt (0);
1262 f2.mode = SImode;
1264 break;
1266 default:
1267 return NULL_TREE;
1270 f1 = TREE_FIXED_CST (arg1);
1271 type = TREE_TYPE (arg1);
1272 sat_p = TYPE_SATURATING (type);
1273 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1274 t = build_fixed (type, result);
1275 /* Propagate overflow flags. */
1276 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1277 TREE_OVERFLOW (t) = 1;
1278 return t;
1281 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1283 tree type = TREE_TYPE (arg1);
1284 tree r1 = TREE_REALPART (arg1);
1285 tree i1 = TREE_IMAGPART (arg1);
1286 tree r2 = TREE_REALPART (arg2);
1287 tree i2 = TREE_IMAGPART (arg2);
1288 tree real, imag;
1290 switch (code)
1292 case PLUS_EXPR:
1293 case MINUS_EXPR:
1294 real = const_binop (code, r1, r2);
1295 imag = const_binop (code, i1, i2);
1296 break;
1298 case MULT_EXPR:
1299 if (COMPLEX_FLOAT_TYPE_P (type))
1300 return do_mpc_arg2 (arg1, arg2, type,
1301 /* do_nonfinite= */ folding_initializer,
1302 mpc_mul);
1304 real = const_binop (MINUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2),
1306 const_binop (MULT_EXPR, i1, i2));
1307 imag = const_binop (PLUS_EXPR,
1308 const_binop (MULT_EXPR, r1, i2),
1309 const_binop (MULT_EXPR, i1, r2));
1310 break;
1312 case RDIV_EXPR:
1313 if (COMPLEX_FLOAT_TYPE_P (type))
1314 return do_mpc_arg2 (arg1, arg2, type,
1315 /* do_nonfinite= */ folding_initializer,
1316 mpc_div);
1317 /* Fallthru ... */
1318 case TRUNC_DIV_EXPR:
1319 case CEIL_DIV_EXPR:
1320 case FLOOR_DIV_EXPR:
1321 case ROUND_DIV_EXPR:
1322 if (flag_complex_method == 0)
1324 /* Keep this algorithm in sync with
1325 tree-complex.c:expand_complex_div_straight().
1327 Expand complex division to scalars, straightforward algorithm.
1328 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1329 t = br*br + bi*bi
1331 tree magsquared
1332 = const_binop (PLUS_EXPR,
1333 const_binop (MULT_EXPR, r2, r2),
1334 const_binop (MULT_EXPR, i2, i2));
1335 tree t1
1336 = const_binop (PLUS_EXPR,
1337 const_binop (MULT_EXPR, r1, r2),
1338 const_binop (MULT_EXPR, i1, i2));
1339 tree t2
1340 = const_binop (MINUS_EXPR,
1341 const_binop (MULT_EXPR, i1, r2),
1342 const_binop (MULT_EXPR, r1, i2));
1344 real = const_binop (code, t1, magsquared);
1345 imag = const_binop (code, t2, magsquared);
1347 else
1349 /* Keep this algorithm in sync with
1350 tree-complex.c:expand_complex_div_wide().
1352 Expand complex division to scalars, modified algorithm to minimize
1353 overflow with wide input ranges. */
1354 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1355 fold_abs_const (r2, TREE_TYPE (type)),
1356 fold_abs_const (i2, TREE_TYPE (type)));
1358 if (integer_nonzerop (compare))
1360 /* In the TRUE branch, we compute
1361 ratio = br/bi;
1362 div = (br * ratio) + bi;
1363 tr = (ar * ratio) + ai;
1364 ti = (ai * ratio) - ar;
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, r2, i2);
1368 tree div = const_binop (PLUS_EXPR, i2,
1369 const_binop (MULT_EXPR, r2, ratio));
1370 real = const_binop (MULT_EXPR, r1, ratio);
1371 real = const_binop (PLUS_EXPR, real, i1);
1372 real = const_binop (code, real, div);
1374 imag = const_binop (MULT_EXPR, i1, ratio);
1375 imag = const_binop (MINUS_EXPR, imag, r1);
1376 imag = const_binop (code, imag, div);
1378 else
1380 /* In the FALSE branch, we compute
1381 ratio = d/c;
1382 divisor = (d * ratio) + c;
1383 tr = (b * ratio) + a;
1384 ti = b - (a * ratio);
1385 tr = tr / div;
1386 ti = ti / div; */
1387 tree ratio = const_binop (code, i2, r2);
1388 tree div = const_binop (PLUS_EXPR, r2,
1389 const_binop (MULT_EXPR, i2, ratio));
1391 real = const_binop (MULT_EXPR, i1, ratio);
1392 real = const_binop (PLUS_EXPR, real, r1);
1393 real = const_binop (code, real, div);
1395 imag = const_binop (MULT_EXPR, r1, ratio);
1396 imag = const_binop (MINUS_EXPR, i1, imag);
1397 imag = const_binop (code, imag, div);
1400 break;
1402 default:
1403 return NULL_TREE;
1406 if (real && imag)
1407 return build_complex (type, real, imag);
1410 if (TREE_CODE (arg1) == VECTOR_CST
1411 && TREE_CODE (arg2) == VECTOR_CST)
1413 tree type = TREE_TYPE (arg1);
1414 int count = TYPE_VECTOR_SUBPARTS (type), i;
1415 tree *elts = XALLOCAVEC (tree, count);
1417 for (i = 0; i < count; i++)
1419 tree elem1 = VECTOR_CST_ELT (arg1, i);
1420 tree elem2 = VECTOR_CST_ELT (arg2, i);
1422 elts[i] = const_binop (code, elem1, elem2);
1424 /* It is possible that const_binop cannot handle the given
1425 code and return NULL_TREE */
1426 if (elts[i] == NULL_TREE)
1427 return NULL_TREE;
1430 return build_vector (type, elts);
1433 /* Shifts allow a scalar offset for a vector. */
1434 if (TREE_CODE (arg1) == VECTOR_CST
1435 && TREE_CODE (arg2) == INTEGER_CST)
1437 tree type = TREE_TYPE (arg1);
1438 int count = TYPE_VECTOR_SUBPARTS (type), i;
1439 tree *elts = XALLOCAVEC (tree, count);
1441 for (i = 0; i < count; i++)
1443 tree elem1 = VECTOR_CST_ELT (arg1, i);
1445 elts[i] = const_binop (code, elem1, arg2);
1447 /* It is possible that const_binop cannot handle the given
1448 code and return NULL_TREE. */
1449 if (elts[i] == NULL_TREE)
1450 return NULL_TREE;
1453 return build_vector (type, elts);
1455 return NULL_TREE;
1458 /* Overload that adds a TYPE parameter to be able to dispatch
1459 to fold_relational_const. */
1461 tree
1462 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1464 if (TREE_CODE_CLASS (code) == tcc_comparison)
1465 return fold_relational_const (code, type, arg1, arg2);
1467 /* ??? Until we make the const_binop worker take the type of the
1468 result as argument put those cases that need it here. */
1469 switch (code)
1471 case COMPLEX_EXPR:
1472 if ((TREE_CODE (arg1) == REAL_CST
1473 && TREE_CODE (arg2) == REAL_CST)
1474 || (TREE_CODE (arg1) == INTEGER_CST
1475 && TREE_CODE (arg2) == INTEGER_CST))
1476 return build_complex (type, arg1, arg2);
1477 return NULL_TREE;
1479 case VEC_PACK_TRUNC_EXPR:
1480 case VEC_PACK_FIX_TRUNC_EXPR:
1482 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1483 tree *elts;
1485 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1486 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1487 if (TREE_CODE (arg1) != VECTOR_CST
1488 || TREE_CODE (arg2) != VECTOR_CST)
1489 return NULL_TREE;
1491 elts = XALLOCAVEC (tree, nelts);
1492 if (!vec_cst_ctor_to_array (arg1, elts)
1493 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1494 return NULL_TREE;
1496 for (i = 0; i < nelts; i++)
1498 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1499 ? NOP_EXPR : FIX_TRUNC_EXPR,
1500 TREE_TYPE (type), elts[i]);
1501 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1502 return NULL_TREE;
1505 return build_vector (type, elts);
1508 case VEC_WIDEN_MULT_LO_EXPR:
1509 case VEC_WIDEN_MULT_HI_EXPR:
1510 case VEC_WIDEN_MULT_EVEN_EXPR:
1511 case VEC_WIDEN_MULT_ODD_EXPR:
1513 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1514 unsigned int out, ofs, scale;
1515 tree *elts;
1517 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1518 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1519 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1520 return NULL_TREE;
1522 elts = XALLOCAVEC (tree, nelts * 4);
1523 if (!vec_cst_ctor_to_array (arg1, elts)
1524 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1525 return NULL_TREE;
1527 if (code == VEC_WIDEN_MULT_LO_EXPR)
1528 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1529 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1530 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1531 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1532 scale = 1, ofs = 0;
1533 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1534 scale = 1, ofs = 1;
1536 for (out = 0; out < nelts; out++)
1538 unsigned int in1 = (out << scale) + ofs;
1539 unsigned int in2 = in1 + nelts * 2;
1540 tree t1, t2;
1542 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1543 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1545 if (t1 == NULL_TREE || t2 == NULL_TREE)
1546 return NULL_TREE;
1547 elts[out] = const_binop (MULT_EXPR, t1, t2);
1548 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1549 return NULL_TREE;
1552 return build_vector (type, elts);
1555 default:;
1558 if (TREE_CODE_CLASS (code) != tcc_binary)
1559 return NULL_TREE;
1561 /* Make sure type and arg0 have the same saturating flag. */
1562 gcc_checking_assert (TYPE_SATURATING (type)
1563 == TYPE_SATURATING (TREE_TYPE (arg1)));
1565 return const_binop (code, arg1, arg2);
1568 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1569 Return zero if computing the constants is not possible. */
1571 tree
1572 const_unop (enum tree_code code, tree type, tree arg0)
1574 switch (code)
1576 CASE_CONVERT:
1577 case FLOAT_EXPR:
1578 case FIX_TRUNC_EXPR:
1579 case FIXED_CONVERT_EXPR:
1580 return fold_convert_const (code, type, arg0);
1582 case ADDR_SPACE_CONVERT_EXPR:
1583 if (integer_zerop (arg0))
1584 return fold_convert_const (code, type, arg0);
1585 break;
1587 case VIEW_CONVERT_EXPR:
1588 return fold_view_convert_expr (type, arg0);
1590 case NEGATE_EXPR:
1592 /* Can't call fold_negate_const directly here as that doesn't
1593 handle all cases and we might not be able to negate some
1594 constants. */
1595 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1596 if (tem && CONSTANT_CLASS_P (tem))
1597 return tem;
1598 break;
1601 case ABS_EXPR:
1602 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1603 return fold_abs_const (arg0, type);
1604 break;
1606 case CONJ_EXPR:
1607 if (TREE_CODE (arg0) == COMPLEX_CST)
1609 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1610 TREE_TYPE (type));
1611 return build_complex (type, TREE_REALPART (arg0), ipart);
1613 break;
1615 case BIT_NOT_EXPR:
1616 if (TREE_CODE (arg0) == INTEGER_CST)
1617 return fold_not_const (arg0, type);
1618 /* Perform BIT_NOT_EXPR on each element individually. */
1619 else if (TREE_CODE (arg0) == VECTOR_CST)
1621 tree *elements;
1622 tree elem;
1623 unsigned count = VECTOR_CST_NELTS (arg0), i;
1625 elements = XALLOCAVEC (tree, count);
1626 for (i = 0; i < count; i++)
1628 elem = VECTOR_CST_ELT (arg0, i);
1629 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1630 if (elem == NULL_TREE)
1631 break;
1632 elements[i] = elem;
1634 if (i == count)
1635 return build_vector (type, elements);
1637 break;
1639 case TRUTH_NOT_EXPR:
1640 if (TREE_CODE (arg0) == INTEGER_CST)
1641 return constant_boolean_node (integer_zerop (arg0), type);
1642 break;
1644 case REALPART_EXPR:
1645 if (TREE_CODE (arg0) == COMPLEX_CST)
1646 return fold_convert (type, TREE_REALPART (arg0));
1647 break;
1649 case IMAGPART_EXPR:
1650 if (TREE_CODE (arg0) == COMPLEX_CST)
1651 return fold_convert (type, TREE_IMAGPART (arg0));
1652 break;
1654 case VEC_UNPACK_LO_EXPR:
1655 case VEC_UNPACK_HI_EXPR:
1656 case VEC_UNPACK_FLOAT_LO_EXPR:
1657 case VEC_UNPACK_FLOAT_HI_EXPR:
1659 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1660 tree *elts;
1661 enum tree_code subcode;
1663 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1664 if (TREE_CODE (arg0) != VECTOR_CST)
1665 return NULL_TREE;
1667 elts = XALLOCAVEC (tree, nelts * 2);
1668 if (!vec_cst_ctor_to_array (arg0, elts))
1669 return NULL_TREE;
1671 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1672 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1673 elts += nelts;
1675 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1676 subcode = NOP_EXPR;
1677 else
1678 subcode = FLOAT_EXPR;
1680 for (i = 0; i < nelts; i++)
1682 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1683 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1684 return NULL_TREE;
1687 return build_vector (type, elts);
1690 case REDUC_MIN_EXPR:
1691 case REDUC_MAX_EXPR:
1692 case REDUC_PLUS_EXPR:
1694 unsigned int nelts, i;
1695 tree *elts;
1696 enum tree_code subcode;
1698 if (TREE_CODE (arg0) != VECTOR_CST)
1699 return NULL_TREE;
1700 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1702 elts = XALLOCAVEC (tree, nelts);
1703 if (!vec_cst_ctor_to_array (arg0, elts))
1704 return NULL_TREE;
1706 switch (code)
1708 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1709 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1710 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1711 default: gcc_unreachable ();
1714 for (i = 1; i < nelts; i++)
1716 elts[0] = const_binop (subcode, elts[0], elts[i]);
1717 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1718 return NULL_TREE;
1721 return elts[0];
1724 default:
1725 break;
1728 return NULL_TREE;
1731 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1732 indicates which particular sizetype to create. */
1734 tree
1735 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1737 return build_int_cst (sizetype_tab[(int) kind], number);
1740 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1741 is a tree code. The type of the result is taken from the operands.
1742 Both must be equivalent integer types, ala int_binop_types_match_p.
1743 If the operands are constant, so is the result. */
1745 tree
1746 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1748 tree type = TREE_TYPE (arg0);
1750 if (arg0 == error_mark_node || arg1 == error_mark_node)
1751 return error_mark_node;
1753 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1754 TREE_TYPE (arg1)));
1756 /* Handle the special case of two integer constants faster. */
1757 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1759 /* And some specific cases even faster than that. */
1760 if (code == PLUS_EXPR)
1762 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1763 return arg1;
1764 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1765 return arg0;
1767 else if (code == MINUS_EXPR)
1769 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1770 return arg0;
1772 else if (code == MULT_EXPR)
1774 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1775 return arg1;
1778 /* Handle general case of two integer constants. For sizetype
1779 constant calculations we always want to know about overflow,
1780 even in the unsigned case. */
1781 return int_const_binop_1 (code, arg0, arg1, -1);
1784 return fold_build2_loc (loc, code, type, arg0, arg1);
1787 /* Given two values, either both of sizetype or both of bitsizetype,
1788 compute the difference between the two values. Return the value
1789 in signed type corresponding to the type of the operands. */
1791 tree
1792 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1794 tree type = TREE_TYPE (arg0);
1795 tree ctype;
1797 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1798 TREE_TYPE (arg1)));
1800 /* If the type is already signed, just do the simple thing. */
1801 if (!TYPE_UNSIGNED (type))
1802 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1804 if (type == sizetype)
1805 ctype = ssizetype;
1806 else if (type == bitsizetype)
1807 ctype = sbitsizetype;
1808 else
1809 ctype = signed_type_for (type);
1811 /* If either operand is not a constant, do the conversions to the signed
1812 type and subtract. The hardware will do the right thing with any
1813 overflow in the subtraction. */
1814 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1815 return size_binop_loc (loc, MINUS_EXPR,
1816 fold_convert_loc (loc, ctype, arg0),
1817 fold_convert_loc (loc, ctype, arg1));
1819 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1820 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1821 overflow) and negate (which can't either). Special-case a result
1822 of zero while we're here. */
1823 if (tree_int_cst_equal (arg0, arg1))
1824 return build_int_cst (ctype, 0);
1825 else if (tree_int_cst_lt (arg1, arg0))
1826 return fold_convert_loc (loc, ctype,
1827 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1828 else
1829 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1830 fold_convert_loc (loc, ctype,
1831 size_binop_loc (loc,
1832 MINUS_EXPR,
1833 arg1, arg0)));
1836 /* A subroutine of fold_convert_const handling conversions of an
1837 INTEGER_CST to another integer type. */
1839 static tree
1840 fold_convert_const_int_from_int (tree type, const_tree arg1)
1842 /* Given an integer constant, make new constant with new type,
1843 appropriately sign-extended or truncated. Use widest_int
1844 so that any extension is done according ARG1's type. */
1845 return force_fit_type (type, wi::to_widest (arg1),
1846 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1847 TREE_OVERFLOW (arg1));
1850 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1851 to an integer type. */
1853 static tree
1854 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1856 bool overflow = false;
1857 tree t;
1859 /* The following code implements the floating point to integer
1860 conversion rules required by the Java Language Specification,
1861 that IEEE NaNs are mapped to zero and values that overflow
1862 the target precision saturate, i.e. values greater than
1863 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1864 are mapped to INT_MIN. These semantics are allowed by the
1865 C and C++ standards that simply state that the behavior of
1866 FP-to-integer conversion is unspecified upon overflow. */
1868 wide_int val;
1869 REAL_VALUE_TYPE r;
1870 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1872 switch (code)
1874 case FIX_TRUNC_EXPR:
1875 real_trunc (&r, VOIDmode, &x);
1876 break;
1878 default:
1879 gcc_unreachable ();
1882 /* If R is NaN, return zero and show we have an overflow. */
1883 if (REAL_VALUE_ISNAN (r))
1885 overflow = true;
1886 val = wi::zero (TYPE_PRECISION (type));
1889 /* See if R is less than the lower bound or greater than the
1890 upper bound. */
1892 if (! overflow)
1894 tree lt = TYPE_MIN_VALUE (type);
1895 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1896 if (real_less (&r, &l))
1898 overflow = true;
1899 val = lt;
1903 if (! overflow)
1905 tree ut = TYPE_MAX_VALUE (type);
1906 if (ut)
1908 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1909 if (real_less (&u, &r))
1911 overflow = true;
1912 val = ut;
1917 if (! overflow)
1918 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1920 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1921 return t;
1924 /* A subroutine of fold_convert_const handling conversions of a
1925 FIXED_CST to an integer type. */
1927 static tree
1928 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1930 tree t;
1931 double_int temp, temp_trunc;
1932 unsigned int mode;
1934 /* Right shift FIXED_CST to temp by fbit. */
1935 temp = TREE_FIXED_CST (arg1).data;
1936 mode = TREE_FIXED_CST (arg1).mode;
1937 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1939 temp = temp.rshift (GET_MODE_FBIT (mode),
1940 HOST_BITS_PER_DOUBLE_INT,
1941 SIGNED_FIXED_POINT_MODE_P (mode));
1943 /* Left shift temp to temp_trunc by fbit. */
1944 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1945 HOST_BITS_PER_DOUBLE_INT,
1946 SIGNED_FIXED_POINT_MODE_P (mode));
1948 else
1950 temp = double_int_zero;
1951 temp_trunc = double_int_zero;
1954 /* If FIXED_CST is negative, we need to round the value toward 0.
1955 By checking if the fractional bits are not zero to add 1 to temp. */
1956 if (SIGNED_FIXED_POINT_MODE_P (mode)
1957 && temp_trunc.is_negative ()
1958 && TREE_FIXED_CST (arg1).data != temp_trunc)
1959 temp += double_int_one;
1961 /* Given a fixed-point constant, make new constant with new type,
1962 appropriately sign-extended or truncated. */
1963 t = force_fit_type (type, temp, -1,
1964 (temp.is_negative ()
1965 && (TYPE_UNSIGNED (type)
1966 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1967 | TREE_OVERFLOW (arg1));
1969 return t;
1972 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1973 to another floating point type. */
1975 static tree
1976 fold_convert_const_real_from_real (tree type, const_tree arg1)
1978 REAL_VALUE_TYPE value;
1979 tree t;
1981 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1982 t = build_real (type, value);
1984 /* If converting an infinity or NAN to a representation that doesn't
1985 have one, set the overflow bit so that we can produce some kind of
1986 error message at the appropriate point if necessary. It's not the
1987 most user-friendly message, but it's better than nothing. */
1988 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1989 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1990 TREE_OVERFLOW (t) = 1;
1991 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1992 && !MODE_HAS_NANS (TYPE_MODE (type)))
1993 TREE_OVERFLOW (t) = 1;
1994 /* Regular overflow, conversion produced an infinity in a mode that
1995 can't represent them. */
1996 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1997 && REAL_VALUE_ISINF (value)
1998 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1999 TREE_OVERFLOW (t) = 1;
2000 else
2001 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2002 return t;
2005 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2006 to a floating point type. */
2008 static tree
2009 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2011 REAL_VALUE_TYPE value;
2012 tree t;
2014 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2015 t = build_real (type, value);
2017 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2018 return t;
2021 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2022 to another fixed-point type. */
2024 static tree
2025 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2027 FIXED_VALUE_TYPE value;
2028 tree t;
2029 bool overflow_p;
2031 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2032 TYPE_SATURATING (type));
2033 t = build_fixed (type, value);
2035 /* Propagate overflow flags. */
2036 if (overflow_p | TREE_OVERFLOW (arg1))
2037 TREE_OVERFLOW (t) = 1;
2038 return t;
2041 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2042 to a fixed-point type. */
2044 static tree
2045 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2047 FIXED_VALUE_TYPE value;
2048 tree t;
2049 bool overflow_p;
2050 double_int di;
2052 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2054 di.low = TREE_INT_CST_ELT (arg1, 0);
2055 if (TREE_INT_CST_NUNITS (arg1) == 1)
2056 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2057 else
2058 di.high = TREE_INT_CST_ELT (arg1, 1);
2060 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2061 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2062 TYPE_SATURATING (type));
2063 t = build_fixed (type, value);
2065 /* Propagate overflow flags. */
2066 if (overflow_p | TREE_OVERFLOW (arg1))
2067 TREE_OVERFLOW (t) = 1;
2068 return t;
2071 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2072 to a fixed-point type. */
2074 static tree
2075 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2077 FIXED_VALUE_TYPE value;
2078 tree t;
2079 bool overflow_p;
2081 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2082 &TREE_REAL_CST (arg1),
2083 TYPE_SATURATING (type));
2084 t = build_fixed (type, value);
2086 /* Propagate overflow flags. */
2087 if (overflow_p | TREE_OVERFLOW (arg1))
2088 TREE_OVERFLOW (t) = 1;
2089 return t;
2092 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2093 type TYPE. If no simplification can be done return NULL_TREE. */
2095 static tree
2096 fold_convert_const (enum tree_code code, tree type, tree arg1)
2098 if (TREE_TYPE (arg1) == type)
2099 return arg1;
2101 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2102 || TREE_CODE (type) == OFFSET_TYPE)
2104 if (TREE_CODE (arg1) == INTEGER_CST)
2105 return fold_convert_const_int_from_int (type, arg1);
2106 else if (TREE_CODE (arg1) == REAL_CST)
2107 return fold_convert_const_int_from_real (code, type, arg1);
2108 else if (TREE_CODE (arg1) == FIXED_CST)
2109 return fold_convert_const_int_from_fixed (type, arg1);
2111 else if (TREE_CODE (type) == REAL_TYPE)
2113 if (TREE_CODE (arg1) == INTEGER_CST)
2114 return build_real_from_int_cst (type, arg1);
2115 else if (TREE_CODE (arg1) == REAL_CST)
2116 return fold_convert_const_real_from_real (type, arg1);
2117 else if (TREE_CODE (arg1) == FIXED_CST)
2118 return fold_convert_const_real_from_fixed (type, arg1);
2120 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2122 if (TREE_CODE (arg1) == FIXED_CST)
2123 return fold_convert_const_fixed_from_fixed (type, arg1);
2124 else if (TREE_CODE (arg1) == INTEGER_CST)
2125 return fold_convert_const_fixed_from_int (type, arg1);
2126 else if (TREE_CODE (arg1) == REAL_CST)
2127 return fold_convert_const_fixed_from_real (type, arg1);
2129 return NULL_TREE;
2132 /* Construct a vector of zero elements of vector type TYPE. */
2134 static tree
2135 build_zero_vector (tree type)
2137 tree t;
2139 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2140 return build_vector_from_val (type, t);
2143 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2145 bool
2146 fold_convertible_p (const_tree type, const_tree arg)
2148 tree orig = TREE_TYPE (arg);
2150 if (type == orig)
2151 return true;
2153 if (TREE_CODE (arg) == ERROR_MARK
2154 || TREE_CODE (type) == ERROR_MARK
2155 || TREE_CODE (orig) == ERROR_MARK)
2156 return false;
2158 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2159 return true;
2161 switch (TREE_CODE (type))
2163 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2164 case POINTER_TYPE: case REFERENCE_TYPE:
2165 case OFFSET_TYPE:
2166 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2167 || TREE_CODE (orig) == OFFSET_TYPE)
2168 return true;
2169 return (TREE_CODE (orig) == VECTOR_TYPE
2170 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2172 case REAL_TYPE:
2173 case FIXED_POINT_TYPE:
2174 case COMPLEX_TYPE:
2175 case VECTOR_TYPE:
2176 case VOID_TYPE:
2177 return TREE_CODE (type) == TREE_CODE (orig);
2179 default:
2180 return false;
2184 /* Convert expression ARG to type TYPE. Used by the middle-end for
2185 simple conversions in preference to calling the front-end's convert. */
2187 tree
2188 fold_convert_loc (location_t loc, tree type, tree arg)
2190 tree orig = TREE_TYPE (arg);
2191 tree tem;
2193 if (type == orig)
2194 return arg;
2196 if (TREE_CODE (arg) == ERROR_MARK
2197 || TREE_CODE (type) == ERROR_MARK
2198 || TREE_CODE (orig) == ERROR_MARK)
2199 return error_mark_node;
2201 switch (TREE_CODE (type))
2203 case POINTER_TYPE:
2204 case REFERENCE_TYPE:
2205 /* Handle conversions between pointers to different address spaces. */
2206 if (POINTER_TYPE_P (orig)
2207 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2208 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2209 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2210 /* fall through */
2212 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2213 case OFFSET_TYPE:
2214 if (TREE_CODE (arg) == INTEGER_CST)
2216 tem = fold_convert_const (NOP_EXPR, type, arg);
2217 if (tem != NULL_TREE)
2218 return tem;
2220 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2221 || TREE_CODE (orig) == OFFSET_TYPE)
2222 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2223 if (TREE_CODE (orig) == COMPLEX_TYPE)
2224 return fold_convert_loc (loc, type,
2225 fold_build1_loc (loc, REALPART_EXPR,
2226 TREE_TYPE (orig), arg));
2227 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2228 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2229 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2231 case REAL_TYPE:
2232 if (TREE_CODE (arg) == INTEGER_CST)
2234 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2236 return tem;
2238 else if (TREE_CODE (arg) == REAL_CST)
2240 tem = fold_convert_const (NOP_EXPR, type, arg);
2241 if (tem != NULL_TREE)
2242 return tem;
2244 else if (TREE_CODE (arg) == FIXED_CST)
2246 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2247 if (tem != NULL_TREE)
2248 return tem;
2251 switch (TREE_CODE (orig))
2253 case INTEGER_TYPE:
2254 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2255 case POINTER_TYPE: case REFERENCE_TYPE:
2256 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2258 case REAL_TYPE:
2259 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2261 case FIXED_POINT_TYPE:
2262 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2264 case COMPLEX_TYPE:
2265 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2266 return fold_convert_loc (loc, type, tem);
2268 default:
2269 gcc_unreachable ();
2272 case FIXED_POINT_TYPE:
2273 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2274 || TREE_CODE (arg) == REAL_CST)
2276 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2277 if (tem != NULL_TREE)
2278 goto fold_convert_exit;
2281 switch (TREE_CODE (orig))
2283 case FIXED_POINT_TYPE:
2284 case INTEGER_TYPE:
2285 case ENUMERAL_TYPE:
2286 case BOOLEAN_TYPE:
2287 case REAL_TYPE:
2288 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2290 case COMPLEX_TYPE:
2291 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2292 return fold_convert_loc (loc, type, tem);
2294 default:
2295 gcc_unreachable ();
2298 case COMPLEX_TYPE:
2299 switch (TREE_CODE (orig))
2301 case INTEGER_TYPE:
2302 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2303 case POINTER_TYPE: case REFERENCE_TYPE:
2304 case REAL_TYPE:
2305 case FIXED_POINT_TYPE:
2306 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2307 fold_convert_loc (loc, TREE_TYPE (type), arg),
2308 fold_convert_loc (loc, TREE_TYPE (type),
2309 integer_zero_node));
2310 case COMPLEX_TYPE:
2312 tree rpart, ipart;
2314 if (TREE_CODE (arg) == COMPLEX_EXPR)
2316 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2317 TREE_OPERAND (arg, 0));
2318 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2319 TREE_OPERAND (arg, 1));
2320 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2323 arg = save_expr (arg);
2324 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2325 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2326 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2327 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2328 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2331 default:
2332 gcc_unreachable ();
2335 case VECTOR_TYPE:
2336 if (integer_zerop (arg))
2337 return build_zero_vector (type);
2338 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2339 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2340 || TREE_CODE (orig) == VECTOR_TYPE);
2341 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2343 case VOID_TYPE:
2344 tem = fold_ignored_result (arg);
2345 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2347 default:
2348 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2349 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2350 gcc_unreachable ();
2352 fold_convert_exit:
2353 protected_set_expr_location_unshare (tem, loc);
2354 return tem;
2357 /* Return false if expr can be assumed not to be an lvalue, true
2358 otherwise. */
2360 static bool
2361 maybe_lvalue_p (const_tree x)
2363 /* We only need to wrap lvalue tree codes. */
2364 switch (TREE_CODE (x))
2366 case VAR_DECL:
2367 case PARM_DECL:
2368 case RESULT_DECL:
2369 case LABEL_DECL:
2370 case FUNCTION_DECL:
2371 case SSA_NAME:
2373 case COMPONENT_REF:
2374 case MEM_REF:
2375 case INDIRECT_REF:
2376 case ARRAY_REF:
2377 case ARRAY_RANGE_REF:
2378 case BIT_FIELD_REF:
2379 case OBJ_TYPE_REF:
2381 case REALPART_EXPR:
2382 case IMAGPART_EXPR:
2383 case PREINCREMENT_EXPR:
2384 case PREDECREMENT_EXPR:
2385 case SAVE_EXPR:
2386 case TRY_CATCH_EXPR:
2387 case WITH_CLEANUP_EXPR:
2388 case COMPOUND_EXPR:
2389 case MODIFY_EXPR:
2390 case TARGET_EXPR:
2391 case COND_EXPR:
2392 case BIND_EXPR:
2393 break;
2395 default:
2396 /* Assume the worst for front-end tree codes. */
2397 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2398 break;
2399 return false;
2402 return true;
2405 /* Return an expr equal to X but certainly not valid as an lvalue. */
2407 tree
2408 non_lvalue_loc (location_t loc, tree x)
2410 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2411 us. */
2412 if (in_gimple_form)
2413 return x;
2415 if (! maybe_lvalue_p (x))
2416 return x;
2417 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2420 /* When pedantic, return an expr equal to X but certainly not valid as a
2421 pedantic lvalue. Otherwise, return X. */
2423 static tree
2424 pedantic_non_lvalue_loc (location_t loc, tree x)
2426 return protected_set_expr_location_unshare (x, loc);
2429 /* Given a tree comparison code, return the code that is the logical inverse.
2430 It is generally not safe to do this for floating-point comparisons, except
2431 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2432 ERROR_MARK in this case. */
2434 enum tree_code
2435 invert_tree_comparison (enum tree_code code, bool honor_nans)
2437 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2438 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2439 return ERROR_MARK;
2441 switch (code)
2443 case EQ_EXPR:
2444 return NE_EXPR;
2445 case NE_EXPR:
2446 return EQ_EXPR;
2447 case GT_EXPR:
2448 return honor_nans ? UNLE_EXPR : LE_EXPR;
2449 case GE_EXPR:
2450 return honor_nans ? UNLT_EXPR : LT_EXPR;
2451 case LT_EXPR:
2452 return honor_nans ? UNGE_EXPR : GE_EXPR;
2453 case LE_EXPR:
2454 return honor_nans ? UNGT_EXPR : GT_EXPR;
2455 case LTGT_EXPR:
2456 return UNEQ_EXPR;
2457 case UNEQ_EXPR:
2458 return LTGT_EXPR;
2459 case UNGT_EXPR:
2460 return LE_EXPR;
2461 case UNGE_EXPR:
2462 return LT_EXPR;
2463 case UNLT_EXPR:
2464 return GE_EXPR;
2465 case UNLE_EXPR:
2466 return GT_EXPR;
2467 case ORDERED_EXPR:
2468 return UNORDERED_EXPR;
2469 case UNORDERED_EXPR:
2470 return ORDERED_EXPR;
2471 default:
2472 gcc_unreachable ();
2476 /* Similar, but return the comparison that results if the operands are
2477 swapped. This is safe for floating-point. */
2479 enum tree_code
2480 swap_tree_comparison (enum tree_code code)
2482 switch (code)
2484 case EQ_EXPR:
2485 case NE_EXPR:
2486 case ORDERED_EXPR:
2487 case UNORDERED_EXPR:
2488 case LTGT_EXPR:
2489 case UNEQ_EXPR:
2490 return code;
2491 case GT_EXPR:
2492 return LT_EXPR;
2493 case GE_EXPR:
2494 return LE_EXPR;
2495 case LT_EXPR:
2496 return GT_EXPR;
2497 case LE_EXPR:
2498 return GE_EXPR;
2499 case UNGT_EXPR:
2500 return UNLT_EXPR;
2501 case UNGE_EXPR:
2502 return UNLE_EXPR;
2503 case UNLT_EXPR:
2504 return UNGT_EXPR;
2505 case UNLE_EXPR:
2506 return UNGE_EXPR;
2507 default:
2508 gcc_unreachable ();
2513 /* Convert a comparison tree code from an enum tree_code representation
2514 into a compcode bit-based encoding. This function is the inverse of
2515 compcode_to_comparison. */
2517 static enum comparison_code
2518 comparison_to_compcode (enum tree_code code)
2520 switch (code)
2522 case LT_EXPR:
2523 return COMPCODE_LT;
2524 case EQ_EXPR:
2525 return COMPCODE_EQ;
2526 case LE_EXPR:
2527 return COMPCODE_LE;
2528 case GT_EXPR:
2529 return COMPCODE_GT;
2530 case NE_EXPR:
2531 return COMPCODE_NE;
2532 case GE_EXPR:
2533 return COMPCODE_GE;
2534 case ORDERED_EXPR:
2535 return COMPCODE_ORD;
2536 case UNORDERED_EXPR:
2537 return COMPCODE_UNORD;
2538 case UNLT_EXPR:
2539 return COMPCODE_UNLT;
2540 case UNEQ_EXPR:
2541 return COMPCODE_UNEQ;
2542 case UNLE_EXPR:
2543 return COMPCODE_UNLE;
2544 case UNGT_EXPR:
2545 return COMPCODE_UNGT;
2546 case LTGT_EXPR:
2547 return COMPCODE_LTGT;
2548 case UNGE_EXPR:
2549 return COMPCODE_UNGE;
2550 default:
2551 gcc_unreachable ();
2555 /* Convert a compcode bit-based encoding of a comparison operator back
2556 to GCC's enum tree_code representation. This function is the
2557 inverse of comparison_to_compcode. */
2559 static enum tree_code
2560 compcode_to_comparison (enum comparison_code code)
2562 switch (code)
2564 case COMPCODE_LT:
2565 return LT_EXPR;
2566 case COMPCODE_EQ:
2567 return EQ_EXPR;
2568 case COMPCODE_LE:
2569 return LE_EXPR;
2570 case COMPCODE_GT:
2571 return GT_EXPR;
2572 case COMPCODE_NE:
2573 return NE_EXPR;
2574 case COMPCODE_GE:
2575 return GE_EXPR;
2576 case COMPCODE_ORD:
2577 return ORDERED_EXPR;
2578 case COMPCODE_UNORD:
2579 return UNORDERED_EXPR;
2580 case COMPCODE_UNLT:
2581 return UNLT_EXPR;
2582 case COMPCODE_UNEQ:
2583 return UNEQ_EXPR;
2584 case COMPCODE_UNLE:
2585 return UNLE_EXPR;
2586 case COMPCODE_UNGT:
2587 return UNGT_EXPR;
2588 case COMPCODE_LTGT:
2589 return LTGT_EXPR;
2590 case COMPCODE_UNGE:
2591 return UNGE_EXPR;
2592 default:
2593 gcc_unreachable ();
2597 /* Return a tree for the comparison which is the combination of
2598 doing the AND or OR (depending on CODE) of the two operations LCODE
2599 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2600 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2601 if this makes the transformation invalid. */
2603 tree
2604 combine_comparisons (location_t loc,
2605 enum tree_code code, enum tree_code lcode,
2606 enum tree_code rcode, tree truth_type,
2607 tree ll_arg, tree lr_arg)
2609 bool honor_nans = HONOR_NANS (ll_arg);
2610 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2611 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2612 int compcode;
2614 switch (code)
2616 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2617 compcode = lcompcode & rcompcode;
2618 break;
2620 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2621 compcode = lcompcode | rcompcode;
2622 break;
2624 default:
2625 return NULL_TREE;
2628 if (!honor_nans)
2630 /* Eliminate unordered comparisons, as well as LTGT and ORD
2631 which are not used unless the mode has NaNs. */
2632 compcode &= ~COMPCODE_UNORD;
2633 if (compcode == COMPCODE_LTGT)
2634 compcode = COMPCODE_NE;
2635 else if (compcode == COMPCODE_ORD)
2636 compcode = COMPCODE_TRUE;
2638 else if (flag_trapping_math)
2640 /* Check that the original operation and the optimized ones will trap
2641 under the same condition. */
2642 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2643 && (lcompcode != COMPCODE_EQ)
2644 && (lcompcode != COMPCODE_ORD);
2645 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2646 && (rcompcode != COMPCODE_EQ)
2647 && (rcompcode != COMPCODE_ORD);
2648 bool trap = (compcode & COMPCODE_UNORD) == 0
2649 && (compcode != COMPCODE_EQ)
2650 && (compcode != COMPCODE_ORD);
2652 /* In a short-circuited boolean expression the LHS might be
2653 such that the RHS, if evaluated, will never trap. For
2654 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2655 if neither x nor y is NaN. (This is a mixed blessing: for
2656 example, the expression above will never trap, hence
2657 optimizing it to x < y would be invalid). */
2658 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2659 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2660 rtrap = false;
2662 /* If the comparison was short-circuited, and only the RHS
2663 trapped, we may now generate a spurious trap. */
2664 if (rtrap && !ltrap
2665 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2666 return NULL_TREE;
2668 /* If we changed the conditions that cause a trap, we lose. */
2669 if ((ltrap || rtrap) != trap)
2670 return NULL_TREE;
2673 if (compcode == COMPCODE_TRUE)
2674 return constant_boolean_node (true, truth_type);
2675 else if (compcode == COMPCODE_FALSE)
2676 return constant_boolean_node (false, truth_type);
2677 else
2679 enum tree_code tcode;
2681 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2682 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2686 /* Return nonzero if two operands (typically of the same tree node)
2687 are necessarily equal. If either argument has side-effects this
2688 function returns zero. FLAGS modifies behavior as follows:
2690 If OEP_ONLY_CONST is set, only return nonzero for constants.
2691 This function tests whether the operands are indistinguishable;
2692 it does not test whether they are equal using C's == operation.
2693 The distinction is important for IEEE floating point, because
2694 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2695 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2697 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2698 even though it may hold multiple values during a function.
2699 This is because a GCC tree node guarantees that nothing else is
2700 executed between the evaluation of its "operands" (which may often
2701 be evaluated in arbitrary order). Hence if the operands themselves
2702 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2703 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2704 unset means assuming isochronic (or instantaneous) tree equivalence.
2705 Unless comparing arbitrary expression trees, such as from different
2706 statements, this flag can usually be left unset.
2708 If OEP_PURE_SAME is set, then pure functions with identical arguments
2709 are considered the same. It is used when the caller has other ways
2710 to ensure that global memory is unchanged in between.
2712 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2713 not values of expressions. OEP_CONSTANT_ADDRESS_OF in addition to
2714 OEP_ADDRESS_OF is used for ADDR_EXPR with TREE_CONSTANT flag set and we
2715 further ignore any side effects on SAVE_EXPRs then. */
2718 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2720 /* If either is ERROR_MARK, they aren't equal. */
2721 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2722 || TREE_TYPE (arg0) == error_mark_node
2723 || TREE_TYPE (arg1) == error_mark_node)
2724 return 0;
2726 /* Similar, if either does not have a type (like a released SSA name),
2727 they aren't equal. */
2728 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2729 return 0;
2731 /* Check equality of integer constants before bailing out due to
2732 precision differences. */
2733 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2735 /* Address of INTEGER_CST is not defined; check that we did not forget
2736 to drop the OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2737 gcc_checking_assert (!(flags
2738 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2739 return tree_int_cst_equal (arg0, arg1);
2742 if (!(flags & OEP_ADDRESS_OF))
2744 /* If both types don't have the same signedness, then we can't consider
2745 them equal. We must check this before the STRIP_NOPS calls
2746 because they may change the signedness of the arguments. As pointers
2747 strictly don't have a signedness, require either two pointers or
2748 two non-pointers as well. */
2749 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2750 || POINTER_TYPE_P (TREE_TYPE (arg0))
2751 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2752 return 0;
2754 /* We cannot consider pointers to different address space equal. */
2755 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2756 && POINTER_TYPE_P (TREE_TYPE (arg1))
2757 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2758 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2759 return 0;
2761 /* If both types don't have the same precision, then it is not safe
2762 to strip NOPs. */
2763 if (element_precision (TREE_TYPE (arg0))
2764 != element_precision (TREE_TYPE (arg1)))
2765 return 0;
2767 STRIP_NOPS (arg0);
2768 STRIP_NOPS (arg1);
2770 #if 0
2771 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2772 sanity check once the issue is solved. */
2773 else
2774 /* Addresses of conversions and SSA_NAMEs (and many other things)
2775 are not defined. Check that we did not forget to drop the
2776 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2777 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2778 && TREE_CODE (arg0) != SSA_NAME);
2779 #endif
2781 /* In case both args are comparisons but with different comparison
2782 code, try to swap the comparison operands of one arg to produce
2783 a match and compare that variant. */
2784 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2785 && COMPARISON_CLASS_P (arg0)
2786 && COMPARISON_CLASS_P (arg1))
2788 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2790 if (TREE_CODE (arg0) == swap_code)
2791 return operand_equal_p (TREE_OPERAND (arg0, 0),
2792 TREE_OPERAND (arg1, 1), flags)
2793 && operand_equal_p (TREE_OPERAND (arg0, 1),
2794 TREE_OPERAND (arg1, 0), flags);
2797 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2799 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2800 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2802 else if (flags & OEP_ADDRESS_OF)
2804 /* If we are interested in comparing addresses ignore
2805 MEM_REF wrappings of the base that can appear just for
2806 TBAA reasons. */
2807 if (TREE_CODE (arg0) == MEM_REF
2808 && DECL_P (arg1)
2809 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2810 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2811 && integer_zerop (TREE_OPERAND (arg0, 1)))
2812 return 1;
2813 else if (TREE_CODE (arg1) == MEM_REF
2814 && DECL_P (arg0)
2815 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2816 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2817 && integer_zerop (TREE_OPERAND (arg1, 1)))
2818 return 1;
2819 return 0;
2821 else
2822 return 0;
2825 /* When not checking adddresses, this is needed for conversions and for
2826 COMPONENT_REF. Might as well play it safe and always test this. */
2827 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2828 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2829 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2830 && !(flags & OEP_ADDRESS_OF)))
2831 return 0;
2833 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2834 We don't care about side effects in that case because the SAVE_EXPR
2835 takes care of that for us. In all other cases, two expressions are
2836 equal if they have no side effects. If we have two identical
2837 expressions with side effects that should be treated the same due
2838 to the only side effects being identical SAVE_EXPR's, that will
2839 be detected in the recursive calls below.
2840 If we are taking an invariant address of two identical objects
2841 they are necessarily equal as well. */
2842 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2843 && (TREE_CODE (arg0) == SAVE_EXPR
2844 || (flags & OEP_CONSTANT_ADDRESS_OF)
2845 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2846 return 1;
2848 /* Next handle constant cases, those for which we can return 1 even
2849 if ONLY_CONST is set. */
2850 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2851 switch (TREE_CODE (arg0))
2853 case INTEGER_CST:
2854 return tree_int_cst_equal (arg0, arg1);
2856 case FIXED_CST:
2857 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2858 TREE_FIXED_CST (arg1));
2860 case REAL_CST:
2861 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2862 return 1;
2865 if (!HONOR_SIGNED_ZEROS (arg0))
2867 /* If we do not distinguish between signed and unsigned zero,
2868 consider them equal. */
2869 if (real_zerop (arg0) && real_zerop (arg1))
2870 return 1;
2872 return 0;
2874 case VECTOR_CST:
2876 unsigned i;
2878 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2879 return 0;
2881 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2883 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2884 VECTOR_CST_ELT (arg1, i), flags))
2885 return 0;
2887 return 1;
2890 case COMPLEX_CST:
2891 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2892 flags)
2893 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2894 flags));
2896 case STRING_CST:
2897 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2898 && ! memcmp (TREE_STRING_POINTER (arg0),
2899 TREE_STRING_POINTER (arg1),
2900 TREE_STRING_LENGTH (arg0)));
2902 case ADDR_EXPR:
2903 gcc_checking_assert (!(flags
2904 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2905 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2906 flags | OEP_ADDRESS_OF
2907 | OEP_CONSTANT_ADDRESS_OF);
2908 case CONSTRUCTOR:
2909 /* In GIMPLE empty constructors are allowed in initializers of
2910 aggregates. */
2911 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2912 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2913 default:
2914 break;
2917 if (flags & OEP_ONLY_CONST)
2918 return 0;
2920 /* Define macros to test an operand from arg0 and arg1 for equality and a
2921 variant that allows null and views null as being different from any
2922 non-null value. In the latter case, if either is null, the both
2923 must be; otherwise, do the normal comparison. */
2924 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2925 TREE_OPERAND (arg1, N), flags)
2927 #define OP_SAME_WITH_NULL(N) \
2928 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2929 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2931 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2933 case tcc_unary:
2934 /* Two conversions are equal only if signedness and modes match. */
2935 switch (TREE_CODE (arg0))
2937 CASE_CONVERT:
2938 case FIX_TRUNC_EXPR:
2939 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2940 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2941 return 0;
2942 break;
2943 default:
2944 break;
2947 return OP_SAME (0);
2950 case tcc_comparison:
2951 case tcc_binary:
2952 if (OP_SAME (0) && OP_SAME (1))
2953 return 1;
2955 /* For commutative ops, allow the other order. */
2956 return (commutative_tree_code (TREE_CODE (arg0))
2957 && operand_equal_p (TREE_OPERAND (arg0, 0),
2958 TREE_OPERAND (arg1, 1), flags)
2959 && operand_equal_p (TREE_OPERAND (arg0, 1),
2960 TREE_OPERAND (arg1, 0), flags));
2962 case tcc_reference:
2963 /* If either of the pointer (or reference) expressions we are
2964 dereferencing contain a side effect, these cannot be equal,
2965 but their addresses can be. */
2966 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2967 && (TREE_SIDE_EFFECTS (arg0)
2968 || TREE_SIDE_EFFECTS (arg1)))
2969 return 0;
2971 switch (TREE_CODE (arg0))
2973 case INDIRECT_REF:
2974 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF))
2975 && (TYPE_ALIGN (TREE_TYPE (arg0))
2976 != TYPE_ALIGN (TREE_TYPE (arg1))))
2977 return 0;
2978 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2979 return OP_SAME (0);
2981 case REALPART_EXPR:
2982 case IMAGPART_EXPR:
2983 case VIEW_CONVERT_EXPR:
2984 return OP_SAME (0);
2986 case TARGET_MEM_REF:
2987 case MEM_REF:
2988 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)))
2990 /* Require equal access sizes */
2991 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2992 && (!TYPE_SIZE (TREE_TYPE (arg0))
2993 || !TYPE_SIZE (TREE_TYPE (arg1))
2994 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2995 TYPE_SIZE (TREE_TYPE (arg1)),
2996 flags)))
2997 return 0;
2998 /* Verify that access happens in similar types. */
2999 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3000 return 0;
3001 /* Verify that accesses are TBAA compatible. */
3002 if (flag_strict_aliasing
3003 && (!alias_ptr_types_compatible_p
3004 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3005 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3006 || (MR_DEPENDENCE_CLIQUE (arg0)
3007 != MR_DEPENDENCE_CLIQUE (arg1))
3008 || (MR_DEPENDENCE_BASE (arg0)
3009 != MR_DEPENDENCE_BASE (arg1))))
3010 return 0;
3011 /* Verify that alignment is compatible. */
3012 if (TYPE_ALIGN (TREE_TYPE (arg0))
3013 != TYPE_ALIGN (TREE_TYPE (arg1)))
3014 return 0;
3016 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3017 return (OP_SAME (0) && OP_SAME (1)
3018 /* TARGET_MEM_REF require equal extra operands. */
3019 && (TREE_CODE (arg0) != TARGET_MEM_REF
3020 || (OP_SAME_WITH_NULL (2)
3021 && OP_SAME_WITH_NULL (3)
3022 && OP_SAME_WITH_NULL (4))));
3024 case ARRAY_REF:
3025 case ARRAY_RANGE_REF:
3026 /* Operands 2 and 3 may be null.
3027 Compare the array index by value if it is constant first as we
3028 may have different types but same value here. */
3029 if (!OP_SAME (0))
3030 return 0;
3031 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3032 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3033 TREE_OPERAND (arg1, 1))
3034 || OP_SAME (1))
3035 && OP_SAME_WITH_NULL (2)
3036 && OP_SAME_WITH_NULL (3));
3038 case COMPONENT_REF:
3039 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3040 may be NULL when we're called to compare MEM_EXPRs. */
3041 if (!OP_SAME_WITH_NULL (0)
3042 || !OP_SAME (1))
3043 return 0;
3044 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3045 return OP_SAME_WITH_NULL (2);
3047 case BIT_FIELD_REF:
3048 if (!OP_SAME (0))
3049 return 0;
3050 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3051 return OP_SAME (1) && OP_SAME (2);
3053 default:
3054 return 0;
3057 case tcc_expression:
3058 switch (TREE_CODE (arg0))
3060 case ADDR_EXPR:
3061 /* Be sure we pass right ADDRESS_OF flag. */
3062 gcc_checking_assert (!(flags
3063 & (OEP_ADDRESS_OF
3064 | OEP_CONSTANT_ADDRESS_OF)));
3065 return operand_equal_p (TREE_OPERAND (arg0, 0),
3066 TREE_OPERAND (arg1, 0),
3067 flags | OEP_ADDRESS_OF);
3069 case TRUTH_NOT_EXPR:
3070 return OP_SAME (0);
3072 case TRUTH_ANDIF_EXPR:
3073 case TRUTH_ORIF_EXPR:
3074 return OP_SAME (0) && OP_SAME (1);
3076 case FMA_EXPR:
3077 case WIDEN_MULT_PLUS_EXPR:
3078 case WIDEN_MULT_MINUS_EXPR:
3079 if (!OP_SAME (2))
3080 return 0;
3081 /* The multiplcation operands are commutative. */
3082 /* FALLTHRU */
3084 case TRUTH_AND_EXPR:
3085 case TRUTH_OR_EXPR:
3086 case TRUTH_XOR_EXPR:
3087 if (OP_SAME (0) && OP_SAME (1))
3088 return 1;
3090 /* Otherwise take into account this is a commutative operation. */
3091 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3092 TREE_OPERAND (arg1, 1), flags)
3093 && operand_equal_p (TREE_OPERAND (arg0, 1),
3094 TREE_OPERAND (arg1, 0), flags));
3096 case COND_EXPR:
3097 case VEC_COND_EXPR:
3098 case DOT_PROD_EXPR:
3099 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3101 default:
3102 return 0;
3105 case tcc_vl_exp:
3106 switch (TREE_CODE (arg0))
3108 case CALL_EXPR:
3109 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3110 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3111 /* If not both CALL_EXPRs are either internal or normal function
3112 functions, then they are not equal. */
3113 return 0;
3114 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3116 /* If the CALL_EXPRs call different internal functions, then they
3117 are not equal. */
3118 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3119 return 0;
3121 else
3123 /* If the CALL_EXPRs call different functions, then they are not
3124 equal. */
3125 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3126 flags))
3127 return 0;
3131 unsigned int cef = call_expr_flags (arg0);
3132 if (flags & OEP_PURE_SAME)
3133 cef &= ECF_CONST | ECF_PURE;
3134 else
3135 cef &= ECF_CONST;
3136 if (!cef)
3137 return 0;
3140 /* Now see if all the arguments are the same. */
3142 const_call_expr_arg_iterator iter0, iter1;
3143 const_tree a0, a1;
3144 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3145 a1 = first_const_call_expr_arg (arg1, &iter1);
3146 a0 && a1;
3147 a0 = next_const_call_expr_arg (&iter0),
3148 a1 = next_const_call_expr_arg (&iter1))
3149 if (! operand_equal_p (a0, a1, flags))
3150 return 0;
3152 /* If we get here and both argument lists are exhausted
3153 then the CALL_EXPRs are equal. */
3154 return ! (a0 || a1);
3156 default:
3157 return 0;
3160 case tcc_declaration:
3161 /* Consider __builtin_sqrt equal to sqrt. */
3162 return (TREE_CODE (arg0) == FUNCTION_DECL
3163 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3164 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3165 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3167 case tcc_exceptional:
3168 if (TREE_CODE (arg0) == CONSTRUCTOR)
3170 /* In GIMPLE constructors are used only to build vectors from
3171 elements. Individual elements in the constructor must be
3172 indexed in increasing order and form an initial sequence.
3174 We make no effort to compare constructors in generic.
3175 (see sem_variable::equals in ipa-icf which can do so for
3176 constants). */
3177 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3178 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3179 return 0;
3181 /* Be sure that vectors constructed have the same representation.
3182 We only tested element precision and modes to match.
3183 Vectors may be BLKmode and thus also check that the number of
3184 parts match. */
3185 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3186 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3187 return 0;
3189 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3190 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3191 unsigned int len = vec_safe_length (v0);
3193 if (len != vec_safe_length (v1))
3194 return 0;
3196 for (unsigned int i = 0; i < len; i++)
3198 constructor_elt *c0 = &(*v0)[i];
3199 constructor_elt *c1 = &(*v1)[i];
3201 if (!operand_equal_p (c0->value, c1->value, flags)
3202 /* In GIMPLE the indexes can be either NULL or matching i.
3203 Double check this so we won't get false
3204 positives for GENERIC. */
3205 || (c0->index
3206 && (TREE_CODE (c0->index) != INTEGER_CST
3207 || !compare_tree_int (c0->index, i)))
3208 || (c1->index
3209 && (TREE_CODE (c1->index) != INTEGER_CST
3210 || !compare_tree_int (c1->index, i))))
3211 return 0;
3213 return 1;
3215 return 0;
3217 default:
3218 return 0;
3221 #undef OP_SAME
3222 #undef OP_SAME_WITH_NULL
3225 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3226 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3228 When in doubt, return 0. */
3230 static int
3231 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3233 int unsignedp1, unsignedpo;
3234 tree primarg0, primarg1, primother;
3235 unsigned int correct_width;
3237 if (operand_equal_p (arg0, arg1, 0))
3238 return 1;
3240 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3241 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3242 return 0;
3244 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3245 and see if the inner values are the same. This removes any
3246 signedness comparison, which doesn't matter here. */
3247 primarg0 = arg0, primarg1 = arg1;
3248 STRIP_NOPS (primarg0);
3249 STRIP_NOPS (primarg1);
3250 if (operand_equal_p (primarg0, primarg1, 0))
3251 return 1;
3253 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3254 actual comparison operand, ARG0.
3256 First throw away any conversions to wider types
3257 already present in the operands. */
3259 primarg1 = get_narrower (arg1, &unsignedp1);
3260 primother = get_narrower (other, &unsignedpo);
3262 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3263 if (unsignedp1 == unsignedpo
3264 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3265 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3267 tree type = TREE_TYPE (arg0);
3269 /* Make sure shorter operand is extended the right way
3270 to match the longer operand. */
3271 primarg1 = fold_convert (signed_or_unsigned_type_for
3272 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3274 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3275 return 1;
3278 return 0;
3281 /* See if ARG is an expression that is either a comparison or is performing
3282 arithmetic on comparisons. The comparisons must only be comparing
3283 two different values, which will be stored in *CVAL1 and *CVAL2; if
3284 they are nonzero it means that some operands have already been found.
3285 No variables may be used anywhere else in the expression except in the
3286 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3287 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3289 If this is true, return 1. Otherwise, return zero. */
3291 static int
3292 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3294 enum tree_code code = TREE_CODE (arg);
3295 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3297 /* We can handle some of the tcc_expression cases here. */
3298 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3299 tclass = tcc_unary;
3300 else if (tclass == tcc_expression
3301 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3302 || code == COMPOUND_EXPR))
3303 tclass = tcc_binary;
3305 else if (tclass == tcc_expression && code == SAVE_EXPR
3306 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3308 /* If we've already found a CVAL1 or CVAL2, this expression is
3309 two complex to handle. */
3310 if (*cval1 || *cval2)
3311 return 0;
3313 tclass = tcc_unary;
3314 *save_p = 1;
3317 switch (tclass)
3319 case tcc_unary:
3320 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3322 case tcc_binary:
3323 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3324 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3325 cval1, cval2, save_p));
3327 case tcc_constant:
3328 return 1;
3330 case tcc_expression:
3331 if (code == COND_EXPR)
3332 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3333 cval1, cval2, save_p)
3334 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3335 cval1, cval2, save_p)
3336 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3337 cval1, cval2, save_p));
3338 return 0;
3340 case tcc_comparison:
3341 /* First see if we can handle the first operand, then the second. For
3342 the second operand, we know *CVAL1 can't be zero. It must be that
3343 one side of the comparison is each of the values; test for the
3344 case where this isn't true by failing if the two operands
3345 are the same. */
3347 if (operand_equal_p (TREE_OPERAND (arg, 0),
3348 TREE_OPERAND (arg, 1), 0))
3349 return 0;
3351 if (*cval1 == 0)
3352 *cval1 = TREE_OPERAND (arg, 0);
3353 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3355 else if (*cval2 == 0)
3356 *cval2 = TREE_OPERAND (arg, 0);
3357 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3359 else
3360 return 0;
3362 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3364 else if (*cval2 == 0)
3365 *cval2 = TREE_OPERAND (arg, 1);
3366 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3368 else
3369 return 0;
3371 return 1;
3373 default:
3374 return 0;
3378 /* ARG is a tree that is known to contain just arithmetic operations and
3379 comparisons. Evaluate the operations in the tree substituting NEW0 for
3380 any occurrence of OLD0 as an operand of a comparison and likewise for
3381 NEW1 and OLD1. */
3383 static tree
3384 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3385 tree old1, tree new1)
3387 tree type = TREE_TYPE (arg);
3388 enum tree_code code = TREE_CODE (arg);
3389 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3391 /* We can handle some of the tcc_expression cases here. */
3392 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3393 tclass = tcc_unary;
3394 else if (tclass == tcc_expression
3395 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3396 tclass = tcc_binary;
3398 switch (tclass)
3400 case tcc_unary:
3401 return fold_build1_loc (loc, code, type,
3402 eval_subst (loc, TREE_OPERAND (arg, 0),
3403 old0, new0, old1, new1));
3405 case tcc_binary:
3406 return fold_build2_loc (loc, code, type,
3407 eval_subst (loc, TREE_OPERAND (arg, 0),
3408 old0, new0, old1, new1),
3409 eval_subst (loc, TREE_OPERAND (arg, 1),
3410 old0, new0, old1, new1));
3412 case tcc_expression:
3413 switch (code)
3415 case SAVE_EXPR:
3416 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3417 old1, new1);
3419 case COMPOUND_EXPR:
3420 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3421 old1, new1);
3423 case COND_EXPR:
3424 return fold_build3_loc (loc, code, type,
3425 eval_subst (loc, TREE_OPERAND (arg, 0),
3426 old0, new0, old1, new1),
3427 eval_subst (loc, TREE_OPERAND (arg, 1),
3428 old0, new0, old1, new1),
3429 eval_subst (loc, TREE_OPERAND (arg, 2),
3430 old0, new0, old1, new1));
3431 default:
3432 break;
3434 /* Fall through - ??? */
3436 case tcc_comparison:
3438 tree arg0 = TREE_OPERAND (arg, 0);
3439 tree arg1 = TREE_OPERAND (arg, 1);
3441 /* We need to check both for exact equality and tree equality. The
3442 former will be true if the operand has a side-effect. In that
3443 case, we know the operand occurred exactly once. */
3445 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3446 arg0 = new0;
3447 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3448 arg0 = new1;
3450 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3451 arg1 = new0;
3452 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3453 arg1 = new1;
3455 return fold_build2_loc (loc, code, type, arg0, arg1);
3458 default:
3459 return arg;
3463 /* Return a tree for the case when the result of an expression is RESULT
3464 converted to TYPE and OMITTED was previously an operand of the expression
3465 but is now not needed (e.g., we folded OMITTED * 0).
3467 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3468 the conversion of RESULT to TYPE. */
3470 tree
3471 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3473 tree t = fold_convert_loc (loc, type, result);
3475 /* If the resulting operand is an empty statement, just return the omitted
3476 statement casted to void. */
3477 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3478 return build1_loc (loc, NOP_EXPR, void_type_node,
3479 fold_ignored_result (omitted));
3481 if (TREE_SIDE_EFFECTS (omitted))
3482 return build2_loc (loc, COMPOUND_EXPR, type,
3483 fold_ignored_result (omitted), t);
3485 return non_lvalue_loc (loc, t);
3488 /* Return a tree for the case when the result of an expression is RESULT
3489 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3490 of the expression but are now not needed.
3492 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3493 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3494 evaluated before OMITTED2. Otherwise, if neither has side effects,
3495 just do the conversion of RESULT to TYPE. */
3497 tree
3498 omit_two_operands_loc (location_t loc, tree type, tree result,
3499 tree omitted1, tree omitted2)
3501 tree t = fold_convert_loc (loc, type, result);
3503 if (TREE_SIDE_EFFECTS (omitted2))
3504 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3505 if (TREE_SIDE_EFFECTS (omitted1))
3506 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3508 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3512 /* Return a simplified tree node for the truth-negation of ARG. This
3513 never alters ARG itself. We assume that ARG is an operation that
3514 returns a truth value (0 or 1).
3516 FIXME: one would think we would fold the result, but it causes
3517 problems with the dominator optimizer. */
3519 static tree
3520 fold_truth_not_expr (location_t loc, tree arg)
3522 tree type = TREE_TYPE (arg);
3523 enum tree_code code = TREE_CODE (arg);
3524 location_t loc1, loc2;
3526 /* If this is a comparison, we can simply invert it, except for
3527 floating-point non-equality comparisons, in which case we just
3528 enclose a TRUTH_NOT_EXPR around what we have. */
3530 if (TREE_CODE_CLASS (code) == tcc_comparison)
3532 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3533 if (FLOAT_TYPE_P (op_type)
3534 && flag_trapping_math
3535 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3536 && code != NE_EXPR && code != EQ_EXPR)
3537 return NULL_TREE;
3539 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3540 if (code == ERROR_MARK)
3541 return NULL_TREE;
3543 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3544 TREE_OPERAND (arg, 1));
3547 switch (code)
3549 case INTEGER_CST:
3550 return constant_boolean_node (integer_zerop (arg), type);
3552 case TRUTH_AND_EXPR:
3553 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3554 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3555 return build2_loc (loc, TRUTH_OR_EXPR, type,
3556 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3557 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3559 case TRUTH_OR_EXPR:
3560 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3561 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3562 return build2_loc (loc, TRUTH_AND_EXPR, type,
3563 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3564 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3566 case TRUTH_XOR_EXPR:
3567 /* Here we can invert either operand. We invert the first operand
3568 unless the second operand is a TRUTH_NOT_EXPR in which case our
3569 result is the XOR of the first operand with the inside of the
3570 negation of the second operand. */
3572 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3573 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3574 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3575 else
3576 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3577 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3578 TREE_OPERAND (arg, 1));
3580 case TRUTH_ANDIF_EXPR:
3581 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3582 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3583 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3584 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3585 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3587 case TRUTH_ORIF_EXPR:
3588 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3589 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3590 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3591 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3592 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3594 case TRUTH_NOT_EXPR:
3595 return TREE_OPERAND (arg, 0);
3597 case COND_EXPR:
3599 tree arg1 = TREE_OPERAND (arg, 1);
3600 tree arg2 = TREE_OPERAND (arg, 2);
3602 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3603 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3605 /* A COND_EXPR may have a throw as one operand, which
3606 then has void type. Just leave void operands
3607 as they are. */
3608 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3609 VOID_TYPE_P (TREE_TYPE (arg1))
3610 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3611 VOID_TYPE_P (TREE_TYPE (arg2))
3612 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3615 case COMPOUND_EXPR:
3616 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3617 return build2_loc (loc, COMPOUND_EXPR, type,
3618 TREE_OPERAND (arg, 0),
3619 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3621 case NON_LVALUE_EXPR:
3622 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3623 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3625 CASE_CONVERT:
3626 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3627 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3629 /* ... fall through ... */
3631 case FLOAT_EXPR:
3632 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3633 return build1_loc (loc, TREE_CODE (arg), type,
3634 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3636 case BIT_AND_EXPR:
3637 if (!integer_onep (TREE_OPERAND (arg, 1)))
3638 return NULL_TREE;
3639 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3641 case SAVE_EXPR:
3642 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3644 case CLEANUP_POINT_EXPR:
3645 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3646 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3647 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3649 default:
3650 return NULL_TREE;
3654 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3655 assume that ARG is an operation that returns a truth value (0 or 1
3656 for scalars, 0 or -1 for vectors). Return the folded expression if
3657 folding is successful. Otherwise, return NULL_TREE. */
3659 static tree
3660 fold_invert_truthvalue (location_t loc, tree arg)
3662 tree type = TREE_TYPE (arg);
3663 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3664 ? BIT_NOT_EXPR
3665 : TRUTH_NOT_EXPR,
3666 type, arg);
3669 /* Return a simplified tree node for the truth-negation of ARG. This
3670 never alters ARG itself. We assume that ARG is an operation that
3671 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3673 tree
3674 invert_truthvalue_loc (location_t loc, tree arg)
3676 if (TREE_CODE (arg) == ERROR_MARK)
3677 return arg;
3679 tree type = TREE_TYPE (arg);
3680 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3681 ? BIT_NOT_EXPR
3682 : TRUTH_NOT_EXPR,
3683 type, arg);
3686 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3687 with code CODE. This optimization is unsafe. */
3688 static tree
3689 distribute_real_division (location_t loc, enum tree_code code, tree type,
3690 tree arg0, tree arg1)
3692 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3693 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3695 /* (A / C) +- (B / C) -> (A +- B) / C. */
3696 if (mul0 == mul1
3697 && operand_equal_p (TREE_OPERAND (arg0, 1),
3698 TREE_OPERAND (arg1, 1), 0))
3699 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3700 fold_build2_loc (loc, code, type,
3701 TREE_OPERAND (arg0, 0),
3702 TREE_OPERAND (arg1, 0)),
3703 TREE_OPERAND (arg0, 1));
3705 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3706 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3707 TREE_OPERAND (arg1, 0), 0)
3708 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3709 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3711 REAL_VALUE_TYPE r0, r1;
3712 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3713 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3714 if (!mul0)
3715 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3716 if (!mul1)
3717 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3718 real_arithmetic (&r0, code, &r0, &r1);
3719 return fold_build2_loc (loc, MULT_EXPR, type,
3720 TREE_OPERAND (arg0, 0),
3721 build_real (type, r0));
3724 return NULL_TREE;
3727 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3728 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3730 static tree
3731 make_bit_field_ref (location_t loc, tree inner, tree type,
3732 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3734 tree result, bftype;
3736 if (bitpos == 0)
3738 tree size = TYPE_SIZE (TREE_TYPE (inner));
3739 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3740 || POINTER_TYPE_P (TREE_TYPE (inner)))
3741 && tree_fits_shwi_p (size)
3742 && tree_to_shwi (size) == bitsize)
3743 return fold_convert_loc (loc, type, inner);
3746 bftype = type;
3747 if (TYPE_PRECISION (bftype) != bitsize
3748 || TYPE_UNSIGNED (bftype) == !unsignedp)
3749 bftype = build_nonstandard_integer_type (bitsize, 0);
3751 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3752 size_int (bitsize), bitsize_int (bitpos));
3754 if (bftype != type)
3755 result = fold_convert_loc (loc, type, result);
3757 return result;
3760 /* Optimize a bit-field compare.
3762 There are two cases: First is a compare against a constant and the
3763 second is a comparison of two items where the fields are at the same
3764 bit position relative to the start of a chunk (byte, halfword, word)
3765 large enough to contain it. In these cases we can avoid the shift
3766 implicit in bitfield extractions.
3768 For constants, we emit a compare of the shifted constant with the
3769 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3770 compared. For two fields at the same position, we do the ANDs with the
3771 similar mask and compare the result of the ANDs.
3773 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3774 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3775 are the left and right operands of the comparison, respectively.
3777 If the optimization described above can be done, we return the resulting
3778 tree. Otherwise we return zero. */
3780 static tree
3781 optimize_bit_field_compare (location_t loc, enum tree_code code,
3782 tree compare_type, tree lhs, tree rhs)
3784 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3785 tree type = TREE_TYPE (lhs);
3786 tree unsigned_type;
3787 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3788 machine_mode lmode, rmode, nmode;
3789 int lunsignedp, runsignedp;
3790 int lvolatilep = 0, rvolatilep = 0;
3791 tree linner, rinner = NULL_TREE;
3792 tree mask;
3793 tree offset;
3795 /* Get all the information about the extractions being done. If the bit size
3796 if the same as the size of the underlying object, we aren't doing an
3797 extraction at all and so can do nothing. We also don't want to
3798 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3799 then will no longer be able to replace it. */
3800 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3801 &lunsignedp, &lvolatilep, false);
3802 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3803 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3804 return 0;
3806 if (!const_p)
3808 /* If this is not a constant, we can only do something if bit positions,
3809 sizes, and signedness are the same. */
3810 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3811 &runsignedp, &rvolatilep, false);
3813 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3814 || lunsignedp != runsignedp || offset != 0
3815 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3816 return 0;
3819 /* See if we can find a mode to refer to this field. We should be able to,
3820 but fail if we can't. */
3821 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3822 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3823 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3824 TYPE_ALIGN (TREE_TYPE (rinner))),
3825 word_mode, false);
3826 if (nmode == VOIDmode)
3827 return 0;
3829 /* Set signed and unsigned types of the precision of this mode for the
3830 shifts below. */
3831 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3833 /* Compute the bit position and size for the new reference and our offset
3834 within it. If the new reference is the same size as the original, we
3835 won't optimize anything, so return zero. */
3836 nbitsize = GET_MODE_BITSIZE (nmode);
3837 nbitpos = lbitpos & ~ (nbitsize - 1);
3838 lbitpos -= nbitpos;
3839 if (nbitsize == lbitsize)
3840 return 0;
3842 if (BYTES_BIG_ENDIAN)
3843 lbitpos = nbitsize - lbitsize - lbitpos;
3845 /* Make the mask to be used against the extracted field. */
3846 mask = build_int_cst_type (unsigned_type, -1);
3847 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3848 mask = const_binop (RSHIFT_EXPR, mask,
3849 size_int (nbitsize - lbitsize - lbitpos));
3851 if (! const_p)
3852 /* If not comparing with constant, just rework the comparison
3853 and return. */
3854 return fold_build2_loc (loc, code, compare_type,
3855 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3856 make_bit_field_ref (loc, linner,
3857 unsigned_type,
3858 nbitsize, nbitpos,
3860 mask),
3861 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3862 make_bit_field_ref (loc, rinner,
3863 unsigned_type,
3864 nbitsize, nbitpos,
3866 mask));
3868 /* Otherwise, we are handling the constant case. See if the constant is too
3869 big for the field. Warn and return a tree of for 0 (false) if so. We do
3870 this not only for its own sake, but to avoid having to test for this
3871 error case below. If we didn't, we might generate wrong code.
3873 For unsigned fields, the constant shifted right by the field length should
3874 be all zero. For signed fields, the high-order bits should agree with
3875 the sign bit. */
3877 if (lunsignedp)
3879 if (wi::lrshift (rhs, lbitsize) != 0)
3881 warning (0, "comparison is always %d due to width of bit-field",
3882 code == NE_EXPR);
3883 return constant_boolean_node (code == NE_EXPR, compare_type);
3886 else
3888 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3889 if (tem != 0 && tem != -1)
3891 warning (0, "comparison is always %d due to width of bit-field",
3892 code == NE_EXPR);
3893 return constant_boolean_node (code == NE_EXPR, compare_type);
3897 /* Single-bit compares should always be against zero. */
3898 if (lbitsize == 1 && ! integer_zerop (rhs))
3900 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3901 rhs = build_int_cst (type, 0);
3904 /* Make a new bitfield reference, shift the constant over the
3905 appropriate number of bits and mask it with the computed mask
3906 (in case this was a signed field). If we changed it, make a new one. */
3907 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3909 rhs = const_binop (BIT_AND_EXPR,
3910 const_binop (LSHIFT_EXPR,
3911 fold_convert_loc (loc, unsigned_type, rhs),
3912 size_int (lbitpos)),
3913 mask);
3915 lhs = build2_loc (loc, code, compare_type,
3916 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3917 return lhs;
3920 /* Subroutine for fold_truth_andor_1: decode a field reference.
3922 If EXP is a comparison reference, we return the innermost reference.
3924 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3925 set to the starting bit number.
3927 If the innermost field can be completely contained in a mode-sized
3928 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3930 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3931 otherwise it is not changed.
3933 *PUNSIGNEDP is set to the signedness of the field.
3935 *PMASK is set to the mask used. This is either contained in a
3936 BIT_AND_EXPR or derived from the width of the field.
3938 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3940 Return 0 if this is not a component reference or is one that we can't
3941 do anything with. */
3943 static tree
3944 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3945 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3946 int *punsignedp, int *pvolatilep,
3947 tree *pmask, tree *pand_mask)
3949 tree outer_type = 0;
3950 tree and_mask = 0;
3951 tree mask, inner, offset;
3952 tree unsigned_type;
3953 unsigned int precision;
3955 /* All the optimizations using this function assume integer fields.
3956 There are problems with FP fields since the type_for_size call
3957 below can fail for, e.g., XFmode. */
3958 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3959 return 0;
3961 /* We are interested in the bare arrangement of bits, so strip everything
3962 that doesn't affect the machine mode. However, record the type of the
3963 outermost expression if it may matter below. */
3964 if (CONVERT_EXPR_P (exp)
3965 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3966 outer_type = TREE_TYPE (exp);
3967 STRIP_NOPS (exp);
3969 if (TREE_CODE (exp) == BIT_AND_EXPR)
3971 and_mask = TREE_OPERAND (exp, 1);
3972 exp = TREE_OPERAND (exp, 0);
3973 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3974 if (TREE_CODE (and_mask) != INTEGER_CST)
3975 return 0;
3978 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3979 punsignedp, pvolatilep, false);
3980 if ((inner == exp && and_mask == 0)
3981 || *pbitsize < 0 || offset != 0
3982 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3983 return 0;
3985 /* If the number of bits in the reference is the same as the bitsize of
3986 the outer type, then the outer type gives the signedness. Otherwise
3987 (in case of a small bitfield) the signedness is unchanged. */
3988 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3989 *punsignedp = TYPE_UNSIGNED (outer_type);
3991 /* Compute the mask to access the bitfield. */
3992 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3993 precision = TYPE_PRECISION (unsigned_type);
3995 mask = build_int_cst_type (unsigned_type, -1);
3997 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3998 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4000 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4001 if (and_mask != 0)
4002 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4003 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4005 *pmask = mask;
4006 *pand_mask = and_mask;
4007 return inner;
4010 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4011 bit positions and MASK is SIGNED. */
4013 static int
4014 all_ones_mask_p (const_tree mask, unsigned int size)
4016 tree type = TREE_TYPE (mask);
4017 unsigned int precision = TYPE_PRECISION (type);
4019 /* If this function returns true when the type of the mask is
4020 UNSIGNED, then there will be errors. In particular see
4021 gcc.c-torture/execute/990326-1.c. There does not appear to be
4022 any documentation paper trail as to why this is so. But the pre
4023 wide-int worked with that restriction and it has been preserved
4024 here. */
4025 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4026 return false;
4028 return wi::mask (size, false, precision) == mask;
4031 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4032 represents the sign bit of EXP's type. If EXP represents a sign
4033 or zero extension, also test VAL against the unextended type.
4034 The return value is the (sub)expression whose sign bit is VAL,
4035 or NULL_TREE otherwise. */
4037 tree
4038 sign_bit_p (tree exp, const_tree val)
4040 int width;
4041 tree t;
4043 /* Tree EXP must have an integral type. */
4044 t = TREE_TYPE (exp);
4045 if (! INTEGRAL_TYPE_P (t))
4046 return NULL_TREE;
4048 /* Tree VAL must be an integer constant. */
4049 if (TREE_CODE (val) != INTEGER_CST
4050 || TREE_OVERFLOW (val))
4051 return NULL_TREE;
4053 width = TYPE_PRECISION (t);
4054 if (wi::only_sign_bit_p (val, width))
4055 return exp;
4057 /* Handle extension from a narrower type. */
4058 if (TREE_CODE (exp) == NOP_EXPR
4059 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4060 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4062 return NULL_TREE;
4065 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4066 to be evaluated unconditionally. */
4068 static int
4069 simple_operand_p (const_tree exp)
4071 /* Strip any conversions that don't change the machine mode. */
4072 STRIP_NOPS (exp);
4074 return (CONSTANT_CLASS_P (exp)
4075 || TREE_CODE (exp) == SSA_NAME
4076 || (DECL_P (exp)
4077 && ! TREE_ADDRESSABLE (exp)
4078 && ! TREE_THIS_VOLATILE (exp)
4079 && ! DECL_NONLOCAL (exp)
4080 /* Don't regard global variables as simple. They may be
4081 allocated in ways unknown to the compiler (shared memory,
4082 #pragma weak, etc). */
4083 && ! TREE_PUBLIC (exp)
4084 && ! DECL_EXTERNAL (exp)
4085 /* Weakrefs are not safe to be read, since they can be NULL.
4086 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4087 have DECL_WEAK flag set. */
4088 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4089 /* Loading a static variable is unduly expensive, but global
4090 registers aren't expensive. */
4091 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4094 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4095 to be evaluated unconditionally.
4096 I addition to simple_operand_p, we assume that comparisons, conversions,
4097 and logic-not operations are simple, if their operands are simple, too. */
4099 static bool
4100 simple_operand_p_2 (tree exp)
4102 enum tree_code code;
4104 if (TREE_SIDE_EFFECTS (exp)
4105 || tree_could_trap_p (exp))
4106 return false;
4108 while (CONVERT_EXPR_P (exp))
4109 exp = TREE_OPERAND (exp, 0);
4111 code = TREE_CODE (exp);
4113 if (TREE_CODE_CLASS (code) == tcc_comparison)
4114 return (simple_operand_p (TREE_OPERAND (exp, 0))
4115 && simple_operand_p (TREE_OPERAND (exp, 1)));
4117 if (code == TRUTH_NOT_EXPR)
4118 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4120 return simple_operand_p (exp);
4124 /* The following functions are subroutines to fold_range_test and allow it to
4125 try to change a logical combination of comparisons into a range test.
4127 For example, both
4128 X == 2 || X == 3 || X == 4 || X == 5
4130 X >= 2 && X <= 5
4131 are converted to
4132 (unsigned) (X - 2) <= 3
4134 We describe each set of comparisons as being either inside or outside
4135 a range, using a variable named like IN_P, and then describe the
4136 range with a lower and upper bound. If one of the bounds is omitted,
4137 it represents either the highest or lowest value of the type.
4139 In the comments below, we represent a range by two numbers in brackets
4140 preceded by a "+" to designate being inside that range, or a "-" to
4141 designate being outside that range, so the condition can be inverted by
4142 flipping the prefix. An omitted bound is represented by a "-". For
4143 example, "- [-, 10]" means being outside the range starting at the lowest
4144 possible value and ending at 10, in other words, being greater than 10.
4145 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4146 always false.
4148 We set up things so that the missing bounds are handled in a consistent
4149 manner so neither a missing bound nor "true" and "false" need to be
4150 handled using a special case. */
4152 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4153 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4154 and UPPER1_P are nonzero if the respective argument is an upper bound
4155 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4156 must be specified for a comparison. ARG1 will be converted to ARG0's
4157 type if both are specified. */
4159 static tree
4160 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4161 tree arg1, int upper1_p)
4163 tree tem;
4164 int result;
4165 int sgn0, sgn1;
4167 /* If neither arg represents infinity, do the normal operation.
4168 Else, if not a comparison, return infinity. Else handle the special
4169 comparison rules. Note that most of the cases below won't occur, but
4170 are handled for consistency. */
4172 if (arg0 != 0 && arg1 != 0)
4174 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4175 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4176 STRIP_NOPS (tem);
4177 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4180 if (TREE_CODE_CLASS (code) != tcc_comparison)
4181 return 0;
4183 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4184 for neither. In real maths, we cannot assume open ended ranges are
4185 the same. But, this is computer arithmetic, where numbers are finite.
4186 We can therefore make the transformation of any unbounded range with
4187 the value Z, Z being greater than any representable number. This permits
4188 us to treat unbounded ranges as equal. */
4189 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4190 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4191 switch (code)
4193 case EQ_EXPR:
4194 result = sgn0 == sgn1;
4195 break;
4196 case NE_EXPR:
4197 result = sgn0 != sgn1;
4198 break;
4199 case LT_EXPR:
4200 result = sgn0 < sgn1;
4201 break;
4202 case LE_EXPR:
4203 result = sgn0 <= sgn1;
4204 break;
4205 case GT_EXPR:
4206 result = sgn0 > sgn1;
4207 break;
4208 case GE_EXPR:
4209 result = sgn0 >= sgn1;
4210 break;
4211 default:
4212 gcc_unreachable ();
4215 return constant_boolean_node (result, type);
4218 /* Helper routine for make_range. Perform one step for it, return
4219 new expression if the loop should continue or NULL_TREE if it should
4220 stop. */
4222 tree
4223 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4224 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4225 bool *strict_overflow_p)
4227 tree arg0_type = TREE_TYPE (arg0);
4228 tree n_low, n_high, low = *p_low, high = *p_high;
4229 int in_p = *p_in_p, n_in_p;
4231 switch (code)
4233 case TRUTH_NOT_EXPR:
4234 /* We can only do something if the range is testing for zero. */
4235 if (low == NULL_TREE || high == NULL_TREE
4236 || ! integer_zerop (low) || ! integer_zerop (high))
4237 return NULL_TREE;
4238 *p_in_p = ! in_p;
4239 return arg0;
4241 case EQ_EXPR: case NE_EXPR:
4242 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4243 /* We can only do something if the range is testing for zero
4244 and if the second operand is an integer constant. Note that
4245 saying something is "in" the range we make is done by
4246 complementing IN_P since it will set in the initial case of
4247 being not equal to zero; "out" is leaving it alone. */
4248 if (low == NULL_TREE || high == NULL_TREE
4249 || ! integer_zerop (low) || ! integer_zerop (high)
4250 || TREE_CODE (arg1) != INTEGER_CST)
4251 return NULL_TREE;
4253 switch (code)
4255 case NE_EXPR: /* - [c, c] */
4256 low = high = arg1;
4257 break;
4258 case EQ_EXPR: /* + [c, c] */
4259 in_p = ! in_p, low = high = arg1;
4260 break;
4261 case GT_EXPR: /* - [-, c] */
4262 low = 0, high = arg1;
4263 break;
4264 case GE_EXPR: /* + [c, -] */
4265 in_p = ! in_p, low = arg1, high = 0;
4266 break;
4267 case LT_EXPR: /* - [c, -] */
4268 low = arg1, high = 0;
4269 break;
4270 case LE_EXPR: /* + [-, c] */
4271 in_p = ! in_p, low = 0, high = arg1;
4272 break;
4273 default:
4274 gcc_unreachable ();
4277 /* If this is an unsigned comparison, we also know that EXP is
4278 greater than or equal to zero. We base the range tests we make
4279 on that fact, so we record it here so we can parse existing
4280 range tests. We test arg0_type since often the return type
4281 of, e.g. EQ_EXPR, is boolean. */
4282 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4284 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4285 in_p, low, high, 1,
4286 build_int_cst (arg0_type, 0),
4287 NULL_TREE))
4288 return NULL_TREE;
4290 in_p = n_in_p, low = n_low, high = n_high;
4292 /* If the high bound is missing, but we have a nonzero low
4293 bound, reverse the range so it goes from zero to the low bound
4294 minus 1. */
4295 if (high == 0 && low && ! integer_zerop (low))
4297 in_p = ! in_p;
4298 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4299 build_int_cst (TREE_TYPE (low), 1), 0);
4300 low = build_int_cst (arg0_type, 0);
4304 *p_low = low;
4305 *p_high = high;
4306 *p_in_p = in_p;
4307 return arg0;
4309 case NEGATE_EXPR:
4310 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4311 low and high are non-NULL, then normalize will DTRT. */
4312 if (!TYPE_UNSIGNED (arg0_type)
4313 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4315 if (low == NULL_TREE)
4316 low = TYPE_MIN_VALUE (arg0_type);
4317 if (high == NULL_TREE)
4318 high = TYPE_MAX_VALUE (arg0_type);
4321 /* (-x) IN [a,b] -> x in [-b, -a] */
4322 n_low = range_binop (MINUS_EXPR, exp_type,
4323 build_int_cst (exp_type, 0),
4324 0, high, 1);
4325 n_high = range_binop (MINUS_EXPR, exp_type,
4326 build_int_cst (exp_type, 0),
4327 0, low, 0);
4328 if (n_high != 0 && TREE_OVERFLOW (n_high))
4329 return NULL_TREE;
4330 goto normalize;
4332 case BIT_NOT_EXPR:
4333 /* ~ X -> -X - 1 */
4334 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4335 build_int_cst (exp_type, 1));
4337 case PLUS_EXPR:
4338 case MINUS_EXPR:
4339 if (TREE_CODE (arg1) != INTEGER_CST)
4340 return NULL_TREE;
4342 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4343 move a constant to the other side. */
4344 if (!TYPE_UNSIGNED (arg0_type)
4345 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4346 return NULL_TREE;
4348 /* If EXP is signed, any overflow in the computation is undefined,
4349 so we don't worry about it so long as our computations on
4350 the bounds don't overflow. For unsigned, overflow is defined
4351 and this is exactly the right thing. */
4352 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4353 arg0_type, low, 0, arg1, 0);
4354 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4355 arg0_type, high, 1, arg1, 0);
4356 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4357 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4358 return NULL_TREE;
4360 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4361 *strict_overflow_p = true;
4363 normalize:
4364 /* Check for an unsigned range which has wrapped around the maximum
4365 value thus making n_high < n_low, and normalize it. */
4366 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4368 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4369 build_int_cst (TREE_TYPE (n_high), 1), 0);
4370 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4371 build_int_cst (TREE_TYPE (n_low), 1), 0);
4373 /* If the range is of the form +/- [ x+1, x ], we won't
4374 be able to normalize it. But then, it represents the
4375 whole range or the empty set, so make it
4376 +/- [ -, - ]. */
4377 if (tree_int_cst_equal (n_low, low)
4378 && tree_int_cst_equal (n_high, high))
4379 low = high = 0;
4380 else
4381 in_p = ! in_p;
4383 else
4384 low = n_low, high = n_high;
4386 *p_low = low;
4387 *p_high = high;
4388 *p_in_p = in_p;
4389 return arg0;
4391 CASE_CONVERT:
4392 case NON_LVALUE_EXPR:
4393 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4394 return NULL_TREE;
4396 if (! INTEGRAL_TYPE_P (arg0_type)
4397 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4398 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4399 return NULL_TREE;
4401 n_low = low, n_high = high;
4403 if (n_low != 0)
4404 n_low = fold_convert_loc (loc, arg0_type, n_low);
4406 if (n_high != 0)
4407 n_high = fold_convert_loc (loc, arg0_type, n_high);
4409 /* If we're converting arg0 from an unsigned type, to exp,
4410 a signed type, we will be doing the comparison as unsigned.
4411 The tests above have already verified that LOW and HIGH
4412 are both positive.
4414 So we have to ensure that we will handle large unsigned
4415 values the same way that the current signed bounds treat
4416 negative values. */
4418 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4420 tree high_positive;
4421 tree equiv_type;
4422 /* For fixed-point modes, we need to pass the saturating flag
4423 as the 2nd parameter. */
4424 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4425 equiv_type
4426 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4427 TYPE_SATURATING (arg0_type));
4428 else
4429 equiv_type
4430 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4432 /* A range without an upper bound is, naturally, unbounded.
4433 Since convert would have cropped a very large value, use
4434 the max value for the destination type. */
4435 high_positive
4436 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4437 : TYPE_MAX_VALUE (arg0_type);
4439 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4440 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4441 fold_convert_loc (loc, arg0_type,
4442 high_positive),
4443 build_int_cst (arg0_type, 1));
4445 /* If the low bound is specified, "and" the range with the
4446 range for which the original unsigned value will be
4447 positive. */
4448 if (low != 0)
4450 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4451 1, fold_convert_loc (loc, arg0_type,
4452 integer_zero_node),
4453 high_positive))
4454 return NULL_TREE;
4456 in_p = (n_in_p == in_p);
4458 else
4460 /* Otherwise, "or" the range with the range of the input
4461 that will be interpreted as negative. */
4462 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4463 1, fold_convert_loc (loc, arg0_type,
4464 integer_zero_node),
4465 high_positive))
4466 return NULL_TREE;
4468 in_p = (in_p != n_in_p);
4472 *p_low = n_low;
4473 *p_high = n_high;
4474 *p_in_p = in_p;
4475 return arg0;
4477 default:
4478 return NULL_TREE;
4482 /* Given EXP, a logical expression, set the range it is testing into
4483 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4484 actually being tested. *PLOW and *PHIGH will be made of the same
4485 type as the returned expression. If EXP is not a comparison, we
4486 will most likely not be returning a useful value and range. Set
4487 *STRICT_OVERFLOW_P to true if the return value is only valid
4488 because signed overflow is undefined; otherwise, do not change
4489 *STRICT_OVERFLOW_P. */
4491 tree
4492 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4493 bool *strict_overflow_p)
4495 enum tree_code code;
4496 tree arg0, arg1 = NULL_TREE;
4497 tree exp_type, nexp;
4498 int in_p;
4499 tree low, high;
4500 location_t loc = EXPR_LOCATION (exp);
4502 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4503 and see if we can refine the range. Some of the cases below may not
4504 happen, but it doesn't seem worth worrying about this. We "continue"
4505 the outer loop when we've changed something; otherwise we "break"
4506 the switch, which will "break" the while. */
4508 in_p = 0;
4509 low = high = build_int_cst (TREE_TYPE (exp), 0);
4511 while (1)
4513 code = TREE_CODE (exp);
4514 exp_type = TREE_TYPE (exp);
4515 arg0 = NULL_TREE;
4517 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4519 if (TREE_OPERAND_LENGTH (exp) > 0)
4520 arg0 = TREE_OPERAND (exp, 0);
4521 if (TREE_CODE_CLASS (code) == tcc_binary
4522 || TREE_CODE_CLASS (code) == tcc_comparison
4523 || (TREE_CODE_CLASS (code) == tcc_expression
4524 && TREE_OPERAND_LENGTH (exp) > 1))
4525 arg1 = TREE_OPERAND (exp, 1);
4527 if (arg0 == NULL_TREE)
4528 break;
4530 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4531 &high, &in_p, strict_overflow_p);
4532 if (nexp == NULL_TREE)
4533 break;
4534 exp = nexp;
4537 /* If EXP is a constant, we can evaluate whether this is true or false. */
4538 if (TREE_CODE (exp) == INTEGER_CST)
4540 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4541 exp, 0, low, 0))
4542 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4543 exp, 1, high, 1)));
4544 low = high = 0;
4545 exp = 0;
4548 *pin_p = in_p, *plow = low, *phigh = high;
4549 return exp;
4552 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4553 type, TYPE, return an expression to test if EXP is in (or out of, depending
4554 on IN_P) the range. Return 0 if the test couldn't be created. */
4556 tree
4557 build_range_check (location_t loc, tree type, tree exp, int in_p,
4558 tree low, tree high)
4560 tree etype = TREE_TYPE (exp), value;
4562 /* Disable this optimization for function pointer expressions
4563 on targets that require function pointer canonicalization. */
4564 if (targetm.have_canonicalize_funcptr_for_compare ()
4565 && TREE_CODE (etype) == POINTER_TYPE
4566 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4567 return NULL_TREE;
4569 if (! in_p)
4571 value = build_range_check (loc, type, exp, 1, low, high);
4572 if (value != 0)
4573 return invert_truthvalue_loc (loc, value);
4575 return 0;
4578 if (low == 0 && high == 0)
4579 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4581 if (low == 0)
4582 return fold_build2_loc (loc, LE_EXPR, type, exp,
4583 fold_convert_loc (loc, etype, high));
4585 if (high == 0)
4586 return fold_build2_loc (loc, GE_EXPR, type, exp,
4587 fold_convert_loc (loc, etype, low));
4589 if (operand_equal_p (low, high, 0))
4590 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4591 fold_convert_loc (loc, etype, low));
4593 if (integer_zerop (low))
4595 if (! TYPE_UNSIGNED (etype))
4597 etype = unsigned_type_for (etype);
4598 high = fold_convert_loc (loc, etype, high);
4599 exp = fold_convert_loc (loc, etype, exp);
4601 return build_range_check (loc, type, exp, 1, 0, high);
4604 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4605 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4607 int prec = TYPE_PRECISION (etype);
4609 if (wi::mask (prec - 1, false, prec) == high)
4611 if (TYPE_UNSIGNED (etype))
4613 tree signed_etype = signed_type_for (etype);
4614 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4615 etype
4616 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4617 else
4618 etype = signed_etype;
4619 exp = fold_convert_loc (loc, etype, exp);
4621 return fold_build2_loc (loc, GT_EXPR, type, exp,
4622 build_int_cst (etype, 0));
4626 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4627 This requires wrap-around arithmetics for the type of the expression.
4628 First make sure that arithmetics in this type is valid, then make sure
4629 that it wraps around. */
4630 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4631 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4632 TYPE_UNSIGNED (etype));
4634 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4636 tree utype, minv, maxv;
4638 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4639 for the type in question, as we rely on this here. */
4640 utype = unsigned_type_for (etype);
4641 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4642 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4643 build_int_cst (TREE_TYPE (maxv), 1), 1);
4644 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4646 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4647 minv, 1, maxv, 1)))
4648 etype = utype;
4649 else
4650 return 0;
4653 high = fold_convert_loc (loc, etype, high);
4654 low = fold_convert_loc (loc, etype, low);
4655 exp = fold_convert_loc (loc, etype, exp);
4657 value = const_binop (MINUS_EXPR, high, low);
4660 if (POINTER_TYPE_P (etype))
4662 if (value != 0 && !TREE_OVERFLOW (value))
4664 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4665 return build_range_check (loc, type,
4666 fold_build_pointer_plus_loc (loc, exp, low),
4667 1, build_int_cst (etype, 0), value);
4669 return 0;
4672 if (value != 0 && !TREE_OVERFLOW (value))
4673 return build_range_check (loc, type,
4674 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4675 1, build_int_cst (etype, 0), value);
4677 return 0;
4680 /* Return the predecessor of VAL in its type, handling the infinite case. */
4682 static tree
4683 range_predecessor (tree val)
4685 tree type = TREE_TYPE (val);
4687 if (INTEGRAL_TYPE_P (type)
4688 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4689 return 0;
4690 else
4691 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4692 build_int_cst (TREE_TYPE (val), 1), 0);
4695 /* Return the successor of VAL in its type, handling the infinite case. */
4697 static tree
4698 range_successor (tree val)
4700 tree type = TREE_TYPE (val);
4702 if (INTEGRAL_TYPE_P (type)
4703 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4704 return 0;
4705 else
4706 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4707 build_int_cst (TREE_TYPE (val), 1), 0);
4710 /* Given two ranges, see if we can merge them into one. Return 1 if we
4711 can, 0 if we can't. Set the output range into the specified parameters. */
4713 bool
4714 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4715 tree high0, int in1_p, tree low1, tree high1)
4717 int no_overlap;
4718 int subset;
4719 int temp;
4720 tree tem;
4721 int in_p;
4722 tree low, high;
4723 int lowequal = ((low0 == 0 && low1 == 0)
4724 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4725 low0, 0, low1, 0)));
4726 int highequal = ((high0 == 0 && high1 == 0)
4727 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4728 high0, 1, high1, 1)));
4730 /* Make range 0 be the range that starts first, or ends last if they
4731 start at the same value. Swap them if it isn't. */
4732 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4733 low0, 0, low1, 0))
4734 || (lowequal
4735 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4736 high1, 1, high0, 1))))
4738 temp = in0_p, in0_p = in1_p, in1_p = temp;
4739 tem = low0, low0 = low1, low1 = tem;
4740 tem = high0, high0 = high1, high1 = tem;
4743 /* Now flag two cases, whether the ranges are disjoint or whether the
4744 second range is totally subsumed in the first. Note that the tests
4745 below are simplified by the ones above. */
4746 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4747 high0, 1, low1, 0));
4748 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4749 high1, 1, high0, 1));
4751 /* We now have four cases, depending on whether we are including or
4752 excluding the two ranges. */
4753 if (in0_p && in1_p)
4755 /* If they don't overlap, the result is false. If the second range
4756 is a subset it is the result. Otherwise, the range is from the start
4757 of the second to the end of the first. */
4758 if (no_overlap)
4759 in_p = 0, low = high = 0;
4760 else if (subset)
4761 in_p = 1, low = low1, high = high1;
4762 else
4763 in_p = 1, low = low1, high = high0;
4766 else if (in0_p && ! in1_p)
4768 /* If they don't overlap, the result is the first range. If they are
4769 equal, the result is false. If the second range is a subset of the
4770 first, and the ranges begin at the same place, we go from just after
4771 the end of the second range to the end of the first. If the second
4772 range is not a subset of the first, or if it is a subset and both
4773 ranges end at the same place, the range starts at the start of the
4774 first range and ends just before the second range.
4775 Otherwise, we can't describe this as a single range. */
4776 if (no_overlap)
4777 in_p = 1, low = low0, high = high0;
4778 else if (lowequal && highequal)
4779 in_p = 0, low = high = 0;
4780 else if (subset && lowequal)
4782 low = range_successor (high1);
4783 high = high0;
4784 in_p = 1;
4785 if (low == 0)
4787 /* We are in the weird situation where high0 > high1 but
4788 high1 has no successor. Punt. */
4789 return 0;
4792 else if (! subset || highequal)
4794 low = low0;
4795 high = range_predecessor (low1);
4796 in_p = 1;
4797 if (high == 0)
4799 /* low0 < low1 but low1 has no predecessor. Punt. */
4800 return 0;
4803 else
4804 return 0;
4807 else if (! in0_p && in1_p)
4809 /* If they don't overlap, the result is the second range. If the second
4810 is a subset of the first, the result is false. Otherwise,
4811 the range starts just after the first range and ends at the
4812 end of the second. */
4813 if (no_overlap)
4814 in_p = 1, low = low1, high = high1;
4815 else if (subset || highequal)
4816 in_p = 0, low = high = 0;
4817 else
4819 low = range_successor (high0);
4820 high = high1;
4821 in_p = 1;
4822 if (low == 0)
4824 /* high1 > high0 but high0 has no successor. Punt. */
4825 return 0;
4830 else
4832 /* The case where we are excluding both ranges. Here the complex case
4833 is if they don't overlap. In that case, the only time we have a
4834 range is if they are adjacent. If the second is a subset of the
4835 first, the result is the first. Otherwise, the range to exclude
4836 starts at the beginning of the first range and ends at the end of the
4837 second. */
4838 if (no_overlap)
4840 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4841 range_successor (high0),
4842 1, low1, 0)))
4843 in_p = 0, low = low0, high = high1;
4844 else
4846 /* Canonicalize - [min, x] into - [-, x]. */
4847 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4848 switch (TREE_CODE (TREE_TYPE (low0)))
4850 case ENUMERAL_TYPE:
4851 if (TYPE_PRECISION (TREE_TYPE (low0))
4852 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4853 break;
4854 /* FALLTHROUGH */
4855 case INTEGER_TYPE:
4856 if (tree_int_cst_equal (low0,
4857 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4858 low0 = 0;
4859 break;
4860 case POINTER_TYPE:
4861 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4862 && integer_zerop (low0))
4863 low0 = 0;
4864 break;
4865 default:
4866 break;
4869 /* Canonicalize - [x, max] into - [x, -]. */
4870 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4871 switch (TREE_CODE (TREE_TYPE (high1)))
4873 case ENUMERAL_TYPE:
4874 if (TYPE_PRECISION (TREE_TYPE (high1))
4875 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4876 break;
4877 /* FALLTHROUGH */
4878 case INTEGER_TYPE:
4879 if (tree_int_cst_equal (high1,
4880 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4881 high1 = 0;
4882 break;
4883 case POINTER_TYPE:
4884 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4885 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4886 high1, 1,
4887 build_int_cst (TREE_TYPE (high1), 1),
4888 1)))
4889 high1 = 0;
4890 break;
4891 default:
4892 break;
4895 /* The ranges might be also adjacent between the maximum and
4896 minimum values of the given type. For
4897 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4898 return + [x + 1, y - 1]. */
4899 if (low0 == 0 && high1 == 0)
4901 low = range_successor (high0);
4902 high = range_predecessor (low1);
4903 if (low == 0 || high == 0)
4904 return 0;
4906 in_p = 1;
4908 else
4909 return 0;
4912 else if (subset)
4913 in_p = 0, low = low0, high = high0;
4914 else
4915 in_p = 0, low = low0, high = high1;
4918 *pin_p = in_p, *plow = low, *phigh = high;
4919 return 1;
4923 /* Subroutine of fold, looking inside expressions of the form
4924 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4925 of the COND_EXPR. This function is being used also to optimize
4926 A op B ? C : A, by reversing the comparison first.
4928 Return a folded expression whose code is not a COND_EXPR
4929 anymore, or NULL_TREE if no folding opportunity is found. */
4931 static tree
4932 fold_cond_expr_with_comparison (location_t loc, tree type,
4933 tree arg0, tree arg1, tree arg2)
4935 enum tree_code comp_code = TREE_CODE (arg0);
4936 tree arg00 = TREE_OPERAND (arg0, 0);
4937 tree arg01 = TREE_OPERAND (arg0, 1);
4938 tree arg1_type = TREE_TYPE (arg1);
4939 tree tem;
4941 STRIP_NOPS (arg1);
4942 STRIP_NOPS (arg2);
4944 /* If we have A op 0 ? A : -A, consider applying the following
4945 transformations:
4947 A == 0? A : -A same as -A
4948 A != 0? A : -A same as A
4949 A >= 0? A : -A same as abs (A)
4950 A > 0? A : -A same as abs (A)
4951 A <= 0? A : -A same as -abs (A)
4952 A < 0? A : -A same as -abs (A)
4954 None of these transformations work for modes with signed
4955 zeros. If A is +/-0, the first two transformations will
4956 change the sign of the result (from +0 to -0, or vice
4957 versa). The last four will fix the sign of the result,
4958 even though the original expressions could be positive or
4959 negative, depending on the sign of A.
4961 Note that all these transformations are correct if A is
4962 NaN, since the two alternatives (A and -A) are also NaNs. */
4963 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4964 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4965 ? real_zerop (arg01)
4966 : integer_zerop (arg01))
4967 && ((TREE_CODE (arg2) == NEGATE_EXPR
4968 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4969 /* In the case that A is of the form X-Y, '-A' (arg2) may
4970 have already been folded to Y-X, check for that. */
4971 || (TREE_CODE (arg1) == MINUS_EXPR
4972 && TREE_CODE (arg2) == MINUS_EXPR
4973 && operand_equal_p (TREE_OPERAND (arg1, 0),
4974 TREE_OPERAND (arg2, 1), 0)
4975 && operand_equal_p (TREE_OPERAND (arg1, 1),
4976 TREE_OPERAND (arg2, 0), 0))))
4977 switch (comp_code)
4979 case EQ_EXPR:
4980 case UNEQ_EXPR:
4981 tem = fold_convert_loc (loc, arg1_type, arg1);
4982 return pedantic_non_lvalue_loc (loc,
4983 fold_convert_loc (loc, type,
4984 negate_expr (tem)));
4985 case NE_EXPR:
4986 case LTGT_EXPR:
4987 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4988 case UNGE_EXPR:
4989 case UNGT_EXPR:
4990 if (flag_trapping_math)
4991 break;
4992 /* Fall through. */
4993 case GE_EXPR:
4994 case GT_EXPR:
4995 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4996 arg1 = fold_convert_loc (loc, signed_type_for
4997 (TREE_TYPE (arg1)), arg1);
4998 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4999 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5000 case UNLE_EXPR:
5001 case UNLT_EXPR:
5002 if (flag_trapping_math)
5003 break;
5004 case LE_EXPR:
5005 case LT_EXPR:
5006 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5007 arg1 = fold_convert_loc (loc, signed_type_for
5008 (TREE_TYPE (arg1)), arg1);
5009 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5010 return negate_expr (fold_convert_loc (loc, type, tem));
5011 default:
5012 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5013 break;
5016 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5017 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5018 both transformations are correct when A is NaN: A != 0
5019 is then true, and A == 0 is false. */
5021 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5022 && integer_zerop (arg01) && integer_zerop (arg2))
5024 if (comp_code == NE_EXPR)
5025 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5026 else if (comp_code == EQ_EXPR)
5027 return build_zero_cst (type);
5030 /* Try some transformations of A op B ? A : B.
5032 A == B? A : B same as B
5033 A != B? A : B same as A
5034 A >= B? A : B same as max (A, B)
5035 A > B? A : B same as max (B, A)
5036 A <= B? A : B same as min (A, B)
5037 A < B? A : B same as min (B, A)
5039 As above, these transformations don't work in the presence
5040 of signed zeros. For example, if A and B are zeros of
5041 opposite sign, the first two transformations will change
5042 the sign of the result. In the last four, the original
5043 expressions give different results for (A=+0, B=-0) and
5044 (A=-0, B=+0), but the transformed expressions do not.
5046 The first two transformations are correct if either A or B
5047 is a NaN. In the first transformation, the condition will
5048 be false, and B will indeed be chosen. In the case of the
5049 second transformation, the condition A != B will be true,
5050 and A will be chosen.
5052 The conversions to max() and min() are not correct if B is
5053 a number and A is not. The conditions in the original
5054 expressions will be false, so all four give B. The min()
5055 and max() versions would give a NaN instead. */
5056 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5057 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5058 /* Avoid these transformations if the COND_EXPR may be used
5059 as an lvalue in the C++ front-end. PR c++/19199. */
5060 && (in_gimple_form
5061 || VECTOR_TYPE_P (type)
5062 || (! lang_GNU_CXX ()
5063 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5064 || ! maybe_lvalue_p (arg1)
5065 || ! maybe_lvalue_p (arg2)))
5067 tree comp_op0 = arg00;
5068 tree comp_op1 = arg01;
5069 tree comp_type = TREE_TYPE (comp_op0);
5071 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5072 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5074 comp_type = type;
5075 comp_op0 = arg1;
5076 comp_op1 = arg2;
5079 switch (comp_code)
5081 case EQ_EXPR:
5082 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5083 case NE_EXPR:
5084 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5085 case LE_EXPR:
5086 case LT_EXPR:
5087 case UNLE_EXPR:
5088 case UNLT_EXPR:
5089 /* In C++ a ?: expression can be an lvalue, so put the
5090 operand which will be used if they are equal first
5091 so that we can convert this back to the
5092 corresponding COND_EXPR. */
5093 if (!HONOR_NANS (arg1))
5095 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5096 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5097 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5098 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5099 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5100 comp_op1, comp_op0);
5101 return pedantic_non_lvalue_loc (loc,
5102 fold_convert_loc (loc, type, tem));
5104 break;
5105 case GE_EXPR:
5106 case GT_EXPR:
5107 case UNGE_EXPR:
5108 case UNGT_EXPR:
5109 if (!HONOR_NANS (arg1))
5111 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5112 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5113 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5114 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5115 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5116 comp_op1, comp_op0);
5117 return pedantic_non_lvalue_loc (loc,
5118 fold_convert_loc (loc, type, tem));
5120 break;
5121 case UNEQ_EXPR:
5122 if (!HONOR_NANS (arg1))
5123 return pedantic_non_lvalue_loc (loc,
5124 fold_convert_loc (loc, type, arg2));
5125 break;
5126 case LTGT_EXPR:
5127 if (!HONOR_NANS (arg1))
5128 return pedantic_non_lvalue_loc (loc,
5129 fold_convert_loc (loc, type, arg1));
5130 break;
5131 default:
5132 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5133 break;
5137 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5138 we might still be able to simplify this. For example,
5139 if C1 is one less or one more than C2, this might have started
5140 out as a MIN or MAX and been transformed by this function.
5141 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5143 if (INTEGRAL_TYPE_P (type)
5144 && TREE_CODE (arg01) == INTEGER_CST
5145 && TREE_CODE (arg2) == INTEGER_CST)
5146 switch (comp_code)
5148 case EQ_EXPR:
5149 if (TREE_CODE (arg1) == INTEGER_CST)
5150 break;
5151 /* We can replace A with C1 in this case. */
5152 arg1 = fold_convert_loc (loc, type, arg01);
5153 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5155 case LT_EXPR:
5156 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5157 MIN_EXPR, to preserve the signedness of the comparison. */
5158 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5159 OEP_ONLY_CONST)
5160 && operand_equal_p (arg01,
5161 const_binop (PLUS_EXPR, arg2,
5162 build_int_cst (type, 1)),
5163 OEP_ONLY_CONST))
5165 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5166 fold_convert_loc (loc, TREE_TYPE (arg00),
5167 arg2));
5168 return pedantic_non_lvalue_loc (loc,
5169 fold_convert_loc (loc, type, tem));
5171 break;
5173 case LE_EXPR:
5174 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5175 as above. */
5176 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5177 OEP_ONLY_CONST)
5178 && operand_equal_p (arg01,
5179 const_binop (MINUS_EXPR, arg2,
5180 build_int_cst (type, 1)),
5181 OEP_ONLY_CONST))
5183 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5184 fold_convert_loc (loc, TREE_TYPE (arg00),
5185 arg2));
5186 return pedantic_non_lvalue_loc (loc,
5187 fold_convert_loc (loc, type, tem));
5189 break;
5191 case GT_EXPR:
5192 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5193 MAX_EXPR, to preserve the signedness of the comparison. */
5194 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5195 OEP_ONLY_CONST)
5196 && operand_equal_p (arg01,
5197 const_binop (MINUS_EXPR, arg2,
5198 build_int_cst (type, 1)),
5199 OEP_ONLY_CONST))
5201 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5202 fold_convert_loc (loc, TREE_TYPE (arg00),
5203 arg2));
5204 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5206 break;
5208 case GE_EXPR:
5209 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5210 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5211 OEP_ONLY_CONST)
5212 && operand_equal_p (arg01,
5213 const_binop (PLUS_EXPR, arg2,
5214 build_int_cst (type, 1)),
5215 OEP_ONLY_CONST))
5217 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5218 fold_convert_loc (loc, TREE_TYPE (arg00),
5219 arg2));
5220 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5222 break;
5223 case NE_EXPR:
5224 break;
5225 default:
5226 gcc_unreachable ();
5229 return NULL_TREE;
5234 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5235 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5236 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5237 false) >= 2)
5238 #endif
5240 /* EXP is some logical combination of boolean tests. See if we can
5241 merge it into some range test. Return the new tree if so. */
5243 static tree
5244 fold_range_test (location_t loc, enum tree_code code, tree type,
5245 tree op0, tree op1)
5247 int or_op = (code == TRUTH_ORIF_EXPR
5248 || code == TRUTH_OR_EXPR);
5249 int in0_p, in1_p, in_p;
5250 tree low0, low1, low, high0, high1, high;
5251 bool strict_overflow_p = false;
5252 tree tem, lhs, rhs;
5253 const char * const warnmsg = G_("assuming signed overflow does not occur "
5254 "when simplifying range test");
5256 if (!INTEGRAL_TYPE_P (type))
5257 return 0;
5259 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5260 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5262 /* If this is an OR operation, invert both sides; we will invert
5263 again at the end. */
5264 if (or_op)
5265 in0_p = ! in0_p, in1_p = ! in1_p;
5267 /* If both expressions are the same, if we can merge the ranges, and we
5268 can build the range test, return it or it inverted. If one of the
5269 ranges is always true or always false, consider it to be the same
5270 expression as the other. */
5271 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5272 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5273 in1_p, low1, high1)
5274 && 0 != (tem = (build_range_check (loc, type,
5275 lhs != 0 ? lhs
5276 : rhs != 0 ? rhs : integer_zero_node,
5277 in_p, low, high))))
5279 if (strict_overflow_p)
5280 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5281 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5284 /* On machines where the branch cost is expensive, if this is a
5285 short-circuited branch and the underlying object on both sides
5286 is the same, make a non-short-circuit operation. */
5287 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5288 && lhs != 0 && rhs != 0
5289 && (code == TRUTH_ANDIF_EXPR
5290 || code == TRUTH_ORIF_EXPR)
5291 && operand_equal_p (lhs, rhs, 0))
5293 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5294 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5295 which cases we can't do this. */
5296 if (simple_operand_p (lhs))
5297 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5298 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5299 type, op0, op1);
5301 else if (!lang_hooks.decls.global_bindings_p ()
5302 && !CONTAINS_PLACEHOLDER_P (lhs))
5304 tree common = save_expr (lhs);
5306 if (0 != (lhs = build_range_check (loc, type, common,
5307 or_op ? ! in0_p : in0_p,
5308 low0, high0))
5309 && (0 != (rhs = build_range_check (loc, type, common,
5310 or_op ? ! in1_p : in1_p,
5311 low1, high1))))
5313 if (strict_overflow_p)
5314 fold_overflow_warning (warnmsg,
5315 WARN_STRICT_OVERFLOW_COMPARISON);
5316 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5317 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5318 type, lhs, rhs);
5323 return 0;
5326 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5327 bit value. Arrange things so the extra bits will be set to zero if and
5328 only if C is signed-extended to its full width. If MASK is nonzero,
5329 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5331 static tree
5332 unextend (tree c, int p, int unsignedp, tree mask)
5334 tree type = TREE_TYPE (c);
5335 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5336 tree temp;
5338 if (p == modesize || unsignedp)
5339 return c;
5341 /* We work by getting just the sign bit into the low-order bit, then
5342 into the high-order bit, then sign-extend. We then XOR that value
5343 with C. */
5344 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5346 /* We must use a signed type in order to get an arithmetic right shift.
5347 However, we must also avoid introducing accidental overflows, so that
5348 a subsequent call to integer_zerop will work. Hence we must
5349 do the type conversion here. At this point, the constant is either
5350 zero or one, and the conversion to a signed type can never overflow.
5351 We could get an overflow if this conversion is done anywhere else. */
5352 if (TYPE_UNSIGNED (type))
5353 temp = fold_convert (signed_type_for (type), temp);
5355 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5356 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5357 if (mask != 0)
5358 temp = const_binop (BIT_AND_EXPR, temp,
5359 fold_convert (TREE_TYPE (c), mask));
5360 /* If necessary, convert the type back to match the type of C. */
5361 if (TYPE_UNSIGNED (type))
5362 temp = fold_convert (type, temp);
5364 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5367 /* For an expression that has the form
5368 (A && B) || ~B
5370 (A || B) && ~B,
5371 we can drop one of the inner expressions and simplify to
5372 A || ~B
5374 A && ~B
5375 LOC is the location of the resulting expression. OP is the inner
5376 logical operation; the left-hand side in the examples above, while CMPOP
5377 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5378 removing a condition that guards another, as in
5379 (A != NULL && A->...) || A == NULL
5380 which we must not transform. If RHS_ONLY is true, only eliminate the
5381 right-most operand of the inner logical operation. */
5383 static tree
5384 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5385 bool rhs_only)
5387 tree type = TREE_TYPE (cmpop);
5388 enum tree_code code = TREE_CODE (cmpop);
5389 enum tree_code truthop_code = TREE_CODE (op);
5390 tree lhs = TREE_OPERAND (op, 0);
5391 tree rhs = TREE_OPERAND (op, 1);
5392 tree orig_lhs = lhs, orig_rhs = rhs;
5393 enum tree_code rhs_code = TREE_CODE (rhs);
5394 enum tree_code lhs_code = TREE_CODE (lhs);
5395 enum tree_code inv_code;
5397 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5398 return NULL_TREE;
5400 if (TREE_CODE_CLASS (code) != tcc_comparison)
5401 return NULL_TREE;
5403 if (rhs_code == truthop_code)
5405 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5406 if (newrhs != NULL_TREE)
5408 rhs = newrhs;
5409 rhs_code = TREE_CODE (rhs);
5412 if (lhs_code == truthop_code && !rhs_only)
5414 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5415 if (newlhs != NULL_TREE)
5417 lhs = newlhs;
5418 lhs_code = TREE_CODE (lhs);
5422 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5423 if (inv_code == rhs_code
5424 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5425 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5426 return lhs;
5427 if (!rhs_only && inv_code == lhs_code
5428 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5429 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5430 return rhs;
5431 if (rhs != orig_rhs || lhs != orig_lhs)
5432 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5433 lhs, rhs);
5434 return NULL_TREE;
5437 /* Find ways of folding logical expressions of LHS and RHS:
5438 Try to merge two comparisons to the same innermost item.
5439 Look for range tests like "ch >= '0' && ch <= '9'".
5440 Look for combinations of simple terms on machines with expensive branches
5441 and evaluate the RHS unconditionally.
5443 For example, if we have p->a == 2 && p->b == 4 and we can make an
5444 object large enough to span both A and B, we can do this with a comparison
5445 against the object ANDed with the a mask.
5447 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5448 operations to do this with one comparison.
5450 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5451 function and the one above.
5453 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5454 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5456 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5457 two operands.
5459 We return the simplified tree or 0 if no optimization is possible. */
5461 static tree
5462 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5463 tree lhs, tree rhs)
5465 /* If this is the "or" of two comparisons, we can do something if
5466 the comparisons are NE_EXPR. If this is the "and", we can do something
5467 if the comparisons are EQ_EXPR. I.e.,
5468 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5470 WANTED_CODE is this operation code. For single bit fields, we can
5471 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5472 comparison for one-bit fields. */
5474 enum tree_code wanted_code;
5475 enum tree_code lcode, rcode;
5476 tree ll_arg, lr_arg, rl_arg, rr_arg;
5477 tree ll_inner, lr_inner, rl_inner, rr_inner;
5478 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5479 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5480 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5481 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5482 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5483 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5484 machine_mode lnmode, rnmode;
5485 tree ll_mask, lr_mask, rl_mask, rr_mask;
5486 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5487 tree l_const, r_const;
5488 tree lntype, rntype, result;
5489 HOST_WIDE_INT first_bit, end_bit;
5490 int volatilep;
5492 /* Start by getting the comparison codes. Fail if anything is volatile.
5493 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5494 it were surrounded with a NE_EXPR. */
5496 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5497 return 0;
5499 lcode = TREE_CODE (lhs);
5500 rcode = TREE_CODE (rhs);
5502 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5504 lhs = build2 (NE_EXPR, truth_type, lhs,
5505 build_int_cst (TREE_TYPE (lhs), 0));
5506 lcode = NE_EXPR;
5509 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5511 rhs = build2 (NE_EXPR, truth_type, rhs,
5512 build_int_cst (TREE_TYPE (rhs), 0));
5513 rcode = NE_EXPR;
5516 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5517 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5518 return 0;
5520 ll_arg = TREE_OPERAND (lhs, 0);
5521 lr_arg = TREE_OPERAND (lhs, 1);
5522 rl_arg = TREE_OPERAND (rhs, 0);
5523 rr_arg = TREE_OPERAND (rhs, 1);
5525 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5526 if (simple_operand_p (ll_arg)
5527 && simple_operand_p (lr_arg))
5529 if (operand_equal_p (ll_arg, rl_arg, 0)
5530 && operand_equal_p (lr_arg, rr_arg, 0))
5532 result = combine_comparisons (loc, code, lcode, rcode,
5533 truth_type, ll_arg, lr_arg);
5534 if (result)
5535 return result;
5537 else if (operand_equal_p (ll_arg, rr_arg, 0)
5538 && operand_equal_p (lr_arg, rl_arg, 0))
5540 result = combine_comparisons (loc, code, lcode,
5541 swap_tree_comparison (rcode),
5542 truth_type, ll_arg, lr_arg);
5543 if (result)
5544 return result;
5548 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5549 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5551 /* If the RHS can be evaluated unconditionally and its operands are
5552 simple, it wins to evaluate the RHS unconditionally on machines
5553 with expensive branches. In this case, this isn't a comparison
5554 that can be merged. */
5556 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5557 false) >= 2
5558 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5559 && simple_operand_p (rl_arg)
5560 && simple_operand_p (rr_arg))
5562 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5563 if (code == TRUTH_OR_EXPR
5564 && lcode == NE_EXPR && integer_zerop (lr_arg)
5565 && rcode == NE_EXPR && integer_zerop (rr_arg)
5566 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5567 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5568 return build2_loc (loc, NE_EXPR, truth_type,
5569 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5570 ll_arg, rl_arg),
5571 build_int_cst (TREE_TYPE (ll_arg), 0));
5573 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5574 if (code == TRUTH_AND_EXPR
5575 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5576 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5577 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5578 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5579 return build2_loc (loc, EQ_EXPR, truth_type,
5580 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5581 ll_arg, rl_arg),
5582 build_int_cst (TREE_TYPE (ll_arg), 0));
5585 /* See if the comparisons can be merged. Then get all the parameters for
5586 each side. */
5588 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5589 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5590 return 0;
5592 volatilep = 0;
5593 ll_inner = decode_field_reference (loc, ll_arg,
5594 &ll_bitsize, &ll_bitpos, &ll_mode,
5595 &ll_unsignedp, &volatilep, &ll_mask,
5596 &ll_and_mask);
5597 lr_inner = decode_field_reference (loc, lr_arg,
5598 &lr_bitsize, &lr_bitpos, &lr_mode,
5599 &lr_unsignedp, &volatilep, &lr_mask,
5600 &lr_and_mask);
5601 rl_inner = decode_field_reference (loc, rl_arg,
5602 &rl_bitsize, &rl_bitpos, &rl_mode,
5603 &rl_unsignedp, &volatilep, &rl_mask,
5604 &rl_and_mask);
5605 rr_inner = decode_field_reference (loc, rr_arg,
5606 &rr_bitsize, &rr_bitpos, &rr_mode,
5607 &rr_unsignedp, &volatilep, &rr_mask,
5608 &rr_and_mask);
5610 /* It must be true that the inner operation on the lhs of each
5611 comparison must be the same if we are to be able to do anything.
5612 Then see if we have constants. If not, the same must be true for
5613 the rhs's. */
5614 if (volatilep || ll_inner == 0 || rl_inner == 0
5615 || ! operand_equal_p (ll_inner, rl_inner, 0))
5616 return 0;
5618 if (TREE_CODE (lr_arg) == INTEGER_CST
5619 && TREE_CODE (rr_arg) == INTEGER_CST)
5620 l_const = lr_arg, r_const = rr_arg;
5621 else if (lr_inner == 0 || rr_inner == 0
5622 || ! operand_equal_p (lr_inner, rr_inner, 0))
5623 return 0;
5624 else
5625 l_const = r_const = 0;
5627 /* If either comparison code is not correct for our logical operation,
5628 fail. However, we can convert a one-bit comparison against zero into
5629 the opposite comparison against that bit being set in the field. */
5631 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5632 if (lcode != wanted_code)
5634 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5636 /* Make the left operand unsigned, since we are only interested
5637 in the value of one bit. Otherwise we are doing the wrong
5638 thing below. */
5639 ll_unsignedp = 1;
5640 l_const = ll_mask;
5642 else
5643 return 0;
5646 /* This is analogous to the code for l_const above. */
5647 if (rcode != wanted_code)
5649 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5651 rl_unsignedp = 1;
5652 r_const = rl_mask;
5654 else
5655 return 0;
5658 /* See if we can find a mode that contains both fields being compared on
5659 the left. If we can't, fail. Otherwise, update all constants and masks
5660 to be relative to a field of that size. */
5661 first_bit = MIN (ll_bitpos, rl_bitpos);
5662 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5663 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5664 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5665 volatilep);
5666 if (lnmode == VOIDmode)
5667 return 0;
5669 lnbitsize = GET_MODE_BITSIZE (lnmode);
5670 lnbitpos = first_bit & ~ (lnbitsize - 1);
5671 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5672 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5674 if (BYTES_BIG_ENDIAN)
5676 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5677 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5680 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5681 size_int (xll_bitpos));
5682 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5683 size_int (xrl_bitpos));
5685 if (l_const)
5687 l_const = fold_convert_loc (loc, lntype, l_const);
5688 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5689 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5690 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5691 fold_build1_loc (loc, BIT_NOT_EXPR,
5692 lntype, ll_mask))))
5694 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5696 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5699 if (r_const)
5701 r_const = fold_convert_loc (loc, lntype, r_const);
5702 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5703 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5704 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5705 fold_build1_loc (loc, BIT_NOT_EXPR,
5706 lntype, rl_mask))))
5708 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5710 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5714 /* If the right sides are not constant, do the same for it. Also,
5715 disallow this optimization if a size or signedness mismatch occurs
5716 between the left and right sides. */
5717 if (l_const == 0)
5719 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5720 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5721 /* Make sure the two fields on the right
5722 correspond to the left without being swapped. */
5723 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5724 return 0;
5726 first_bit = MIN (lr_bitpos, rr_bitpos);
5727 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5728 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5729 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5730 volatilep);
5731 if (rnmode == VOIDmode)
5732 return 0;
5734 rnbitsize = GET_MODE_BITSIZE (rnmode);
5735 rnbitpos = first_bit & ~ (rnbitsize - 1);
5736 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5737 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5739 if (BYTES_BIG_ENDIAN)
5741 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5742 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5745 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5746 rntype, lr_mask),
5747 size_int (xlr_bitpos));
5748 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5749 rntype, rr_mask),
5750 size_int (xrr_bitpos));
5752 /* Make a mask that corresponds to both fields being compared.
5753 Do this for both items being compared. If the operands are the
5754 same size and the bits being compared are in the same position
5755 then we can do this by masking both and comparing the masked
5756 results. */
5757 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5758 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5759 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5761 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5762 ll_unsignedp || rl_unsignedp);
5763 if (! all_ones_mask_p (ll_mask, lnbitsize))
5764 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5766 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5767 lr_unsignedp || rr_unsignedp);
5768 if (! all_ones_mask_p (lr_mask, rnbitsize))
5769 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5771 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5774 /* There is still another way we can do something: If both pairs of
5775 fields being compared are adjacent, we may be able to make a wider
5776 field containing them both.
5778 Note that we still must mask the lhs/rhs expressions. Furthermore,
5779 the mask must be shifted to account for the shift done by
5780 make_bit_field_ref. */
5781 if ((ll_bitsize + ll_bitpos == rl_bitpos
5782 && lr_bitsize + lr_bitpos == rr_bitpos)
5783 || (ll_bitpos == rl_bitpos + rl_bitsize
5784 && lr_bitpos == rr_bitpos + rr_bitsize))
5786 tree type;
5788 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5789 ll_bitsize + rl_bitsize,
5790 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5791 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5792 lr_bitsize + rr_bitsize,
5793 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5795 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5796 size_int (MIN (xll_bitpos, xrl_bitpos)));
5797 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5798 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5800 /* Convert to the smaller type before masking out unwanted bits. */
5801 type = lntype;
5802 if (lntype != rntype)
5804 if (lnbitsize > rnbitsize)
5806 lhs = fold_convert_loc (loc, rntype, lhs);
5807 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5808 type = rntype;
5810 else if (lnbitsize < rnbitsize)
5812 rhs = fold_convert_loc (loc, lntype, rhs);
5813 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5814 type = lntype;
5818 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5819 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5821 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5822 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5824 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5827 return 0;
5830 /* Handle the case of comparisons with constants. If there is something in
5831 common between the masks, those bits of the constants must be the same.
5832 If not, the condition is always false. Test for this to avoid generating
5833 incorrect code below. */
5834 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5835 if (! integer_zerop (result)
5836 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5837 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5839 if (wanted_code == NE_EXPR)
5841 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5842 return constant_boolean_node (true, truth_type);
5844 else
5846 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5847 return constant_boolean_node (false, truth_type);
5851 /* Construct the expression we will return. First get the component
5852 reference we will make. Unless the mask is all ones the width of
5853 that field, perform the mask operation. Then compare with the
5854 merged constant. */
5855 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5856 ll_unsignedp || rl_unsignedp);
5858 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5859 if (! all_ones_mask_p (ll_mask, lnbitsize))
5860 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5862 return build2_loc (loc, wanted_code, truth_type, result,
5863 const_binop (BIT_IOR_EXPR, l_const, r_const));
5866 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5867 constant. */
5869 static tree
5870 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5871 tree op0, tree op1)
5873 tree arg0 = op0;
5874 enum tree_code op_code;
5875 tree comp_const;
5876 tree minmax_const;
5877 int consts_equal, consts_lt;
5878 tree inner;
5880 STRIP_SIGN_NOPS (arg0);
5882 op_code = TREE_CODE (arg0);
5883 minmax_const = TREE_OPERAND (arg0, 1);
5884 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5885 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5886 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5887 inner = TREE_OPERAND (arg0, 0);
5889 /* If something does not permit us to optimize, return the original tree. */
5890 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5891 || TREE_CODE (comp_const) != INTEGER_CST
5892 || TREE_OVERFLOW (comp_const)
5893 || TREE_CODE (minmax_const) != INTEGER_CST
5894 || TREE_OVERFLOW (minmax_const))
5895 return NULL_TREE;
5897 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5898 and GT_EXPR, doing the rest with recursive calls using logical
5899 simplifications. */
5900 switch (code)
5902 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5904 tree tem
5905 = optimize_minmax_comparison (loc,
5906 invert_tree_comparison (code, false),
5907 type, op0, op1);
5908 if (tem)
5909 return invert_truthvalue_loc (loc, tem);
5910 return NULL_TREE;
5913 case GE_EXPR:
5914 return
5915 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5916 optimize_minmax_comparison
5917 (loc, EQ_EXPR, type, arg0, comp_const),
5918 optimize_minmax_comparison
5919 (loc, GT_EXPR, type, arg0, comp_const));
5921 case EQ_EXPR:
5922 if (op_code == MAX_EXPR && consts_equal)
5923 /* MAX (X, 0) == 0 -> X <= 0 */
5924 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5926 else if (op_code == MAX_EXPR && consts_lt)
5927 /* MAX (X, 0) == 5 -> X == 5 */
5928 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5930 else if (op_code == MAX_EXPR)
5931 /* MAX (X, 0) == -1 -> false */
5932 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5934 else if (consts_equal)
5935 /* MIN (X, 0) == 0 -> X >= 0 */
5936 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5938 else if (consts_lt)
5939 /* MIN (X, 0) == 5 -> false */
5940 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5942 else
5943 /* MIN (X, 0) == -1 -> X == -1 */
5944 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5946 case GT_EXPR:
5947 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5948 /* MAX (X, 0) > 0 -> X > 0
5949 MAX (X, 0) > 5 -> X > 5 */
5950 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5952 else if (op_code == MAX_EXPR)
5953 /* MAX (X, 0) > -1 -> true */
5954 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5956 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5957 /* MIN (X, 0) > 0 -> false
5958 MIN (X, 0) > 5 -> false */
5959 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5961 else
5962 /* MIN (X, 0) > -1 -> X > -1 */
5963 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5965 default:
5966 return NULL_TREE;
5970 /* T is an integer expression that is being multiplied, divided, or taken a
5971 modulus (CODE says which and what kind of divide or modulus) by a
5972 constant C. See if we can eliminate that operation by folding it with
5973 other operations already in T. WIDE_TYPE, if non-null, is a type that
5974 should be used for the computation if wider than our type.
5976 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5977 (X * 2) + (Y * 4). We must, however, be assured that either the original
5978 expression would not overflow or that overflow is undefined for the type
5979 in the language in question.
5981 If we return a non-null expression, it is an equivalent form of the
5982 original computation, but need not be in the original type.
5984 We set *STRICT_OVERFLOW_P to true if the return values depends on
5985 signed overflow being undefined. Otherwise we do not change
5986 *STRICT_OVERFLOW_P. */
5988 static tree
5989 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5990 bool *strict_overflow_p)
5992 /* To avoid exponential search depth, refuse to allow recursion past
5993 three levels. Beyond that (1) it's highly unlikely that we'll find
5994 something interesting and (2) we've probably processed it before
5995 when we built the inner expression. */
5997 static int depth;
5998 tree ret;
6000 if (depth > 3)
6001 return NULL;
6003 depth++;
6004 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6005 depth--;
6007 return ret;
6010 static tree
6011 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6012 bool *strict_overflow_p)
6014 tree type = TREE_TYPE (t);
6015 enum tree_code tcode = TREE_CODE (t);
6016 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6017 > GET_MODE_SIZE (TYPE_MODE (type)))
6018 ? wide_type : type);
6019 tree t1, t2;
6020 int same_p = tcode == code;
6021 tree op0 = NULL_TREE, op1 = NULL_TREE;
6022 bool sub_strict_overflow_p;
6024 /* Don't deal with constants of zero here; they confuse the code below. */
6025 if (integer_zerop (c))
6026 return NULL_TREE;
6028 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6029 op0 = TREE_OPERAND (t, 0);
6031 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6032 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6034 /* Note that we need not handle conditional operations here since fold
6035 already handles those cases. So just do arithmetic here. */
6036 switch (tcode)
6038 case INTEGER_CST:
6039 /* For a constant, we can always simplify if we are a multiply
6040 or (for divide and modulus) if it is a multiple of our constant. */
6041 if (code == MULT_EXPR
6042 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6043 return const_binop (code, fold_convert (ctype, t),
6044 fold_convert (ctype, c));
6045 break;
6047 CASE_CONVERT: case NON_LVALUE_EXPR:
6048 /* If op0 is an expression ... */
6049 if ((COMPARISON_CLASS_P (op0)
6050 || UNARY_CLASS_P (op0)
6051 || BINARY_CLASS_P (op0)
6052 || VL_EXP_CLASS_P (op0)
6053 || EXPRESSION_CLASS_P (op0))
6054 /* ... and has wrapping overflow, and its type is smaller
6055 than ctype, then we cannot pass through as widening. */
6056 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6057 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6058 && (TYPE_PRECISION (ctype)
6059 > TYPE_PRECISION (TREE_TYPE (op0))))
6060 /* ... or this is a truncation (t is narrower than op0),
6061 then we cannot pass through this narrowing. */
6062 || (TYPE_PRECISION (type)
6063 < TYPE_PRECISION (TREE_TYPE (op0)))
6064 /* ... or signedness changes for division or modulus,
6065 then we cannot pass through this conversion. */
6066 || (code != MULT_EXPR
6067 && (TYPE_UNSIGNED (ctype)
6068 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6069 /* ... or has undefined overflow while the converted to
6070 type has not, we cannot do the operation in the inner type
6071 as that would introduce undefined overflow. */
6072 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6073 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6074 && !TYPE_OVERFLOW_UNDEFINED (type))))
6075 break;
6077 /* Pass the constant down and see if we can make a simplification. If
6078 we can, replace this expression with the inner simplification for
6079 possible later conversion to our or some other type. */
6080 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6081 && TREE_CODE (t2) == INTEGER_CST
6082 && !TREE_OVERFLOW (t2)
6083 && (0 != (t1 = extract_muldiv (op0, t2, code,
6084 code == MULT_EXPR
6085 ? ctype : NULL_TREE,
6086 strict_overflow_p))))
6087 return t1;
6088 break;
6090 case ABS_EXPR:
6091 /* If widening the type changes it from signed to unsigned, then we
6092 must avoid building ABS_EXPR itself as unsigned. */
6093 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6095 tree cstype = (*signed_type_for) (ctype);
6096 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6097 != 0)
6099 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6100 return fold_convert (ctype, t1);
6102 break;
6104 /* If the constant is negative, we cannot simplify this. */
6105 if (tree_int_cst_sgn (c) == -1)
6106 break;
6107 /* FALLTHROUGH */
6108 case NEGATE_EXPR:
6109 /* For division and modulus, type can't be unsigned, as e.g.
6110 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6111 For signed types, even with wrapping overflow, this is fine. */
6112 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6113 break;
6114 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6115 != 0)
6116 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6117 break;
6119 case MIN_EXPR: case MAX_EXPR:
6120 /* If widening the type changes the signedness, then we can't perform
6121 this optimization as that changes the result. */
6122 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6123 break;
6125 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6126 sub_strict_overflow_p = false;
6127 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6128 &sub_strict_overflow_p)) != 0
6129 && (t2 = extract_muldiv (op1, c, code, wide_type,
6130 &sub_strict_overflow_p)) != 0)
6132 if (tree_int_cst_sgn (c) < 0)
6133 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6134 if (sub_strict_overflow_p)
6135 *strict_overflow_p = true;
6136 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6137 fold_convert (ctype, t2));
6139 break;
6141 case LSHIFT_EXPR: case RSHIFT_EXPR:
6142 /* If the second operand is constant, this is a multiplication
6143 or floor division, by a power of two, so we can treat it that
6144 way unless the multiplier or divisor overflows. Signed
6145 left-shift overflow is implementation-defined rather than
6146 undefined in C90, so do not convert signed left shift into
6147 multiplication. */
6148 if (TREE_CODE (op1) == INTEGER_CST
6149 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6150 /* const_binop may not detect overflow correctly,
6151 so check for it explicitly here. */
6152 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6153 && 0 != (t1 = fold_convert (ctype,
6154 const_binop (LSHIFT_EXPR,
6155 size_one_node,
6156 op1)))
6157 && !TREE_OVERFLOW (t1))
6158 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6159 ? MULT_EXPR : FLOOR_DIV_EXPR,
6160 ctype,
6161 fold_convert (ctype, op0),
6162 t1),
6163 c, code, wide_type, strict_overflow_p);
6164 break;
6166 case PLUS_EXPR: case MINUS_EXPR:
6167 /* See if we can eliminate the operation on both sides. If we can, we
6168 can return a new PLUS or MINUS. If we can't, the only remaining
6169 cases where we can do anything are if the second operand is a
6170 constant. */
6171 sub_strict_overflow_p = false;
6172 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6173 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6174 if (t1 != 0 && t2 != 0
6175 && (code == MULT_EXPR
6176 /* If not multiplication, we can only do this if both operands
6177 are divisible by c. */
6178 || (multiple_of_p (ctype, op0, c)
6179 && multiple_of_p (ctype, op1, c))))
6181 if (sub_strict_overflow_p)
6182 *strict_overflow_p = true;
6183 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6184 fold_convert (ctype, t2));
6187 /* If this was a subtraction, negate OP1 and set it to be an addition.
6188 This simplifies the logic below. */
6189 if (tcode == MINUS_EXPR)
6191 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6192 /* If OP1 was not easily negatable, the constant may be OP0. */
6193 if (TREE_CODE (op0) == INTEGER_CST)
6195 std::swap (op0, op1);
6196 std::swap (t1, t2);
6200 if (TREE_CODE (op1) != INTEGER_CST)
6201 break;
6203 /* If either OP1 or C are negative, this optimization is not safe for
6204 some of the division and remainder types while for others we need
6205 to change the code. */
6206 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6208 if (code == CEIL_DIV_EXPR)
6209 code = FLOOR_DIV_EXPR;
6210 else if (code == FLOOR_DIV_EXPR)
6211 code = CEIL_DIV_EXPR;
6212 else if (code != MULT_EXPR
6213 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6214 break;
6217 /* If it's a multiply or a division/modulus operation of a multiple
6218 of our constant, do the operation and verify it doesn't overflow. */
6219 if (code == MULT_EXPR
6220 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6222 op1 = const_binop (code, fold_convert (ctype, op1),
6223 fold_convert (ctype, c));
6224 /* We allow the constant to overflow with wrapping semantics. */
6225 if (op1 == 0
6226 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6227 break;
6229 else
6230 break;
6232 /* If we have an unsigned type, we cannot widen the operation since it
6233 will change the result if the original computation overflowed. */
6234 if (TYPE_UNSIGNED (ctype) && ctype != type)
6235 break;
6237 /* If we were able to eliminate our operation from the first side,
6238 apply our operation to the second side and reform the PLUS. */
6239 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6240 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6242 /* The last case is if we are a multiply. In that case, we can
6243 apply the distributive law to commute the multiply and addition
6244 if the multiplication of the constants doesn't overflow
6245 and overflow is defined. With undefined overflow
6246 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6247 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6248 return fold_build2 (tcode, ctype,
6249 fold_build2 (code, ctype,
6250 fold_convert (ctype, op0),
6251 fold_convert (ctype, c)),
6252 op1);
6254 break;
6256 case MULT_EXPR:
6257 /* We have a special case here if we are doing something like
6258 (C * 8) % 4 since we know that's zero. */
6259 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6260 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6261 /* If the multiplication can overflow we cannot optimize this. */
6262 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6263 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6264 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6266 *strict_overflow_p = true;
6267 return omit_one_operand (type, integer_zero_node, op0);
6270 /* ... fall through ... */
6272 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6273 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6274 /* If we can extract our operation from the LHS, do so and return a
6275 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6276 do something only if the second operand is a constant. */
6277 if (same_p
6278 && (t1 = extract_muldiv (op0, c, code, wide_type,
6279 strict_overflow_p)) != 0)
6280 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6281 fold_convert (ctype, op1));
6282 else if (tcode == MULT_EXPR && code == MULT_EXPR
6283 && (t1 = extract_muldiv (op1, c, code, wide_type,
6284 strict_overflow_p)) != 0)
6285 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6286 fold_convert (ctype, t1));
6287 else if (TREE_CODE (op1) != INTEGER_CST)
6288 return 0;
6290 /* If these are the same operation types, we can associate them
6291 assuming no overflow. */
6292 if (tcode == code)
6294 bool overflow_p = false;
6295 bool overflow_mul_p;
6296 signop sign = TYPE_SIGN (ctype);
6297 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6298 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6299 if (overflow_mul_p
6300 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6301 overflow_p = true;
6302 if (!overflow_p)
6304 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6305 TYPE_SIGN (TREE_TYPE (op1)));
6306 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6307 wide_int_to_tree (ctype, mul));
6311 /* If these operations "cancel" each other, we have the main
6312 optimizations of this pass, which occur when either constant is a
6313 multiple of the other, in which case we replace this with either an
6314 operation or CODE or TCODE.
6316 If we have an unsigned type, we cannot do this since it will change
6317 the result if the original computation overflowed. */
6318 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6319 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6320 || (tcode == MULT_EXPR
6321 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6322 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6323 && code != MULT_EXPR)))
6325 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6327 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6328 *strict_overflow_p = true;
6329 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6330 fold_convert (ctype,
6331 const_binop (TRUNC_DIV_EXPR,
6332 op1, c)));
6334 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6336 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6337 *strict_overflow_p = true;
6338 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6339 fold_convert (ctype,
6340 const_binop (TRUNC_DIV_EXPR,
6341 c, op1)));
6344 break;
6346 default:
6347 break;
6350 return 0;
6353 /* Return a node which has the indicated constant VALUE (either 0 or
6354 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6355 and is of the indicated TYPE. */
6357 tree
6358 constant_boolean_node (bool value, tree type)
6360 if (type == integer_type_node)
6361 return value ? integer_one_node : integer_zero_node;
6362 else if (type == boolean_type_node)
6363 return value ? boolean_true_node : boolean_false_node;
6364 else if (TREE_CODE (type) == VECTOR_TYPE)
6365 return build_vector_from_val (type,
6366 build_int_cst (TREE_TYPE (type),
6367 value ? -1 : 0));
6368 else
6369 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6373 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6374 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6375 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6376 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6377 COND is the first argument to CODE; otherwise (as in the example
6378 given here), it is the second argument. TYPE is the type of the
6379 original expression. Return NULL_TREE if no simplification is
6380 possible. */
6382 static tree
6383 fold_binary_op_with_conditional_arg (location_t loc,
6384 enum tree_code code,
6385 tree type, tree op0, tree op1,
6386 tree cond, tree arg, int cond_first_p)
6388 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6389 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6390 tree test, true_value, false_value;
6391 tree lhs = NULL_TREE;
6392 tree rhs = NULL_TREE;
6393 enum tree_code cond_code = COND_EXPR;
6395 if (TREE_CODE (cond) == COND_EXPR
6396 || TREE_CODE (cond) == VEC_COND_EXPR)
6398 test = TREE_OPERAND (cond, 0);
6399 true_value = TREE_OPERAND (cond, 1);
6400 false_value = TREE_OPERAND (cond, 2);
6401 /* If this operand throws an expression, then it does not make
6402 sense to try to perform a logical or arithmetic operation
6403 involving it. */
6404 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6405 lhs = true_value;
6406 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6407 rhs = false_value;
6409 else
6411 tree testtype = TREE_TYPE (cond);
6412 test = cond;
6413 true_value = constant_boolean_node (true, testtype);
6414 false_value = constant_boolean_node (false, testtype);
6417 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6418 cond_code = VEC_COND_EXPR;
6420 /* This transformation is only worthwhile if we don't have to wrap ARG
6421 in a SAVE_EXPR and the operation can be simplified without recursing
6422 on at least one of the branches once its pushed inside the COND_EXPR. */
6423 if (!TREE_CONSTANT (arg)
6424 && (TREE_SIDE_EFFECTS (arg)
6425 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6426 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6427 return NULL_TREE;
6429 arg = fold_convert_loc (loc, arg_type, arg);
6430 if (lhs == 0)
6432 true_value = fold_convert_loc (loc, cond_type, true_value);
6433 if (cond_first_p)
6434 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6435 else
6436 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6438 if (rhs == 0)
6440 false_value = fold_convert_loc (loc, cond_type, false_value);
6441 if (cond_first_p)
6442 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6443 else
6444 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6447 /* Check that we have simplified at least one of the branches. */
6448 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6449 return NULL_TREE;
6451 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6455 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6457 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6458 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6459 ADDEND is the same as X.
6461 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6462 and finite. The problematic cases are when X is zero, and its mode
6463 has signed zeros. In the case of rounding towards -infinity,
6464 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6465 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6467 bool
6468 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6470 if (!real_zerop (addend))
6471 return false;
6473 /* Don't allow the fold with -fsignaling-nans. */
6474 if (HONOR_SNANS (element_mode (type)))
6475 return false;
6477 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6478 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6479 return true;
6481 /* In a vector or complex, we would need to check the sign of all zeros. */
6482 if (TREE_CODE (addend) != REAL_CST)
6483 return false;
6485 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6486 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6487 negate = !negate;
6489 /* The mode has signed zeros, and we have to honor their sign.
6490 In this situation, there is only one case we can return true for.
6491 X - 0 is the same as X unless rounding towards -infinity is
6492 supported. */
6493 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6496 /* Subroutine of fold() that optimizes comparisons of a division by
6497 a nonzero integer constant against an integer constant, i.e.
6498 X/C1 op C2.
6500 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6501 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6502 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6504 The function returns the constant folded tree if a simplification
6505 can be made, and NULL_TREE otherwise. */
6507 static tree
6508 fold_div_compare (location_t loc,
6509 enum tree_code code, tree type, tree arg0, tree arg1)
6511 tree prod, tmp, hi, lo;
6512 tree arg00 = TREE_OPERAND (arg0, 0);
6513 tree arg01 = TREE_OPERAND (arg0, 1);
6514 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6515 bool neg_overflow = false;
6516 bool overflow;
6518 /* We have to do this the hard way to detect unsigned overflow.
6519 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6520 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6521 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6522 neg_overflow = false;
6524 if (sign == UNSIGNED)
6526 tmp = int_const_binop (MINUS_EXPR, arg01,
6527 build_int_cst (TREE_TYPE (arg01), 1));
6528 lo = prod;
6530 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6531 val = wi::add (prod, tmp, sign, &overflow);
6532 hi = force_fit_type (TREE_TYPE (arg00), val,
6533 -1, overflow | TREE_OVERFLOW (prod));
6535 else if (tree_int_cst_sgn (arg01) >= 0)
6537 tmp = int_const_binop (MINUS_EXPR, arg01,
6538 build_int_cst (TREE_TYPE (arg01), 1));
6539 switch (tree_int_cst_sgn (arg1))
6541 case -1:
6542 neg_overflow = true;
6543 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6544 hi = prod;
6545 break;
6547 case 0:
6548 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6549 hi = tmp;
6550 break;
6552 case 1:
6553 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6554 lo = prod;
6555 break;
6557 default:
6558 gcc_unreachable ();
6561 else
6563 /* A negative divisor reverses the relational operators. */
6564 code = swap_tree_comparison (code);
6566 tmp = int_const_binop (PLUS_EXPR, arg01,
6567 build_int_cst (TREE_TYPE (arg01), 1));
6568 switch (tree_int_cst_sgn (arg1))
6570 case -1:
6571 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6572 lo = prod;
6573 break;
6575 case 0:
6576 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6577 lo = tmp;
6578 break;
6580 case 1:
6581 neg_overflow = true;
6582 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6583 hi = prod;
6584 break;
6586 default:
6587 gcc_unreachable ();
6591 switch (code)
6593 case EQ_EXPR:
6594 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6595 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6596 if (TREE_OVERFLOW (hi))
6597 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6598 if (TREE_OVERFLOW (lo))
6599 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6600 return build_range_check (loc, type, arg00, 1, lo, hi);
6602 case NE_EXPR:
6603 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6604 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6605 if (TREE_OVERFLOW (hi))
6606 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6607 if (TREE_OVERFLOW (lo))
6608 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6609 return build_range_check (loc, type, arg00, 0, lo, hi);
6611 case LT_EXPR:
6612 if (TREE_OVERFLOW (lo))
6614 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6615 return omit_one_operand_loc (loc, type, tmp, arg00);
6617 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6619 case LE_EXPR:
6620 if (TREE_OVERFLOW (hi))
6622 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6623 return omit_one_operand_loc (loc, type, tmp, arg00);
6625 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6627 case GT_EXPR:
6628 if (TREE_OVERFLOW (hi))
6630 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6631 return omit_one_operand_loc (loc, type, tmp, arg00);
6633 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6635 case GE_EXPR:
6636 if (TREE_OVERFLOW (lo))
6638 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6639 return omit_one_operand_loc (loc, type, tmp, arg00);
6641 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6643 default:
6644 break;
6647 return NULL_TREE;
6651 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6652 equality/inequality test, then return a simplified form of the test
6653 using a sign testing. Otherwise return NULL. TYPE is the desired
6654 result type. */
6656 static tree
6657 fold_single_bit_test_into_sign_test (location_t loc,
6658 enum tree_code code, tree arg0, tree arg1,
6659 tree result_type)
6661 /* If this is testing a single bit, we can optimize the test. */
6662 if ((code == NE_EXPR || code == EQ_EXPR)
6663 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6664 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6666 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6667 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6668 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6670 if (arg00 != NULL_TREE
6671 /* This is only a win if casting to a signed type is cheap,
6672 i.e. when arg00's type is not a partial mode. */
6673 && TYPE_PRECISION (TREE_TYPE (arg00))
6674 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6676 tree stype = signed_type_for (TREE_TYPE (arg00));
6677 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6678 result_type,
6679 fold_convert_loc (loc, stype, arg00),
6680 build_int_cst (stype, 0));
6684 return NULL_TREE;
6687 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6688 equality/inequality test, then return a simplified form of
6689 the test using shifts and logical operations. Otherwise return
6690 NULL. TYPE is the desired result type. */
6692 tree
6693 fold_single_bit_test (location_t loc, enum tree_code code,
6694 tree arg0, tree arg1, tree result_type)
6696 /* If this is testing a single bit, we can optimize the test. */
6697 if ((code == NE_EXPR || code == EQ_EXPR)
6698 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6699 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6701 tree inner = TREE_OPERAND (arg0, 0);
6702 tree type = TREE_TYPE (arg0);
6703 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6704 machine_mode operand_mode = TYPE_MODE (type);
6705 int ops_unsigned;
6706 tree signed_type, unsigned_type, intermediate_type;
6707 tree tem, one;
6709 /* First, see if we can fold the single bit test into a sign-bit
6710 test. */
6711 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6712 result_type);
6713 if (tem)
6714 return tem;
6716 /* Otherwise we have (A & C) != 0 where C is a single bit,
6717 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6718 Similarly for (A & C) == 0. */
6720 /* If INNER is a right shift of a constant and it plus BITNUM does
6721 not overflow, adjust BITNUM and INNER. */
6722 if (TREE_CODE (inner) == RSHIFT_EXPR
6723 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6724 && bitnum < TYPE_PRECISION (type)
6725 && wi::ltu_p (TREE_OPERAND (inner, 1),
6726 TYPE_PRECISION (type) - bitnum))
6728 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6729 inner = TREE_OPERAND (inner, 0);
6732 /* If we are going to be able to omit the AND below, we must do our
6733 operations as unsigned. If we must use the AND, we have a choice.
6734 Normally unsigned is faster, but for some machines signed is. */
6735 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6736 && !flag_syntax_only) ? 0 : 1;
6738 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6739 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6740 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6741 inner = fold_convert_loc (loc, intermediate_type, inner);
6743 if (bitnum != 0)
6744 inner = build2 (RSHIFT_EXPR, intermediate_type,
6745 inner, size_int (bitnum));
6747 one = build_int_cst (intermediate_type, 1);
6749 if (code == EQ_EXPR)
6750 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6752 /* Put the AND last so it can combine with more things. */
6753 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6755 /* Make sure to return the proper type. */
6756 inner = fold_convert_loc (loc, result_type, inner);
6758 return inner;
6760 return NULL_TREE;
6763 /* Check whether we are allowed to reorder operands arg0 and arg1,
6764 such that the evaluation of arg1 occurs before arg0. */
6766 static bool
6767 reorder_operands_p (const_tree arg0, const_tree arg1)
6769 if (! flag_evaluation_order)
6770 return true;
6771 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6772 return true;
6773 return ! TREE_SIDE_EFFECTS (arg0)
6774 && ! TREE_SIDE_EFFECTS (arg1);
6777 /* Test whether it is preferable two swap two operands, ARG0 and
6778 ARG1, for example because ARG0 is an integer constant and ARG1
6779 isn't. If REORDER is true, only recommend swapping if we can
6780 evaluate the operands in reverse order. */
6782 bool
6783 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6785 if (CONSTANT_CLASS_P (arg1))
6786 return 0;
6787 if (CONSTANT_CLASS_P (arg0))
6788 return 1;
6790 STRIP_NOPS (arg0);
6791 STRIP_NOPS (arg1);
6793 if (TREE_CONSTANT (arg1))
6794 return 0;
6795 if (TREE_CONSTANT (arg0))
6796 return 1;
6798 if (reorder && flag_evaluation_order
6799 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6800 return 0;
6802 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6803 for commutative and comparison operators. Ensuring a canonical
6804 form allows the optimizers to find additional redundancies without
6805 having to explicitly check for both orderings. */
6806 if (TREE_CODE (arg0) == SSA_NAME
6807 && TREE_CODE (arg1) == SSA_NAME
6808 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6809 return 1;
6811 /* Put SSA_NAMEs last. */
6812 if (TREE_CODE (arg1) == SSA_NAME)
6813 return 0;
6814 if (TREE_CODE (arg0) == SSA_NAME)
6815 return 1;
6817 /* Put variables last. */
6818 if (DECL_P (arg1))
6819 return 0;
6820 if (DECL_P (arg0))
6821 return 1;
6823 return 0;
6827 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6828 means A >= Y && A != MAX, but in this case we know that
6829 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6831 static tree
6832 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6834 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6836 if (TREE_CODE (bound) == LT_EXPR)
6837 a = TREE_OPERAND (bound, 0);
6838 else if (TREE_CODE (bound) == GT_EXPR)
6839 a = TREE_OPERAND (bound, 1);
6840 else
6841 return NULL_TREE;
6843 typea = TREE_TYPE (a);
6844 if (!INTEGRAL_TYPE_P (typea)
6845 && !POINTER_TYPE_P (typea))
6846 return NULL_TREE;
6848 if (TREE_CODE (ineq) == LT_EXPR)
6850 a1 = TREE_OPERAND (ineq, 1);
6851 y = TREE_OPERAND (ineq, 0);
6853 else if (TREE_CODE (ineq) == GT_EXPR)
6855 a1 = TREE_OPERAND (ineq, 0);
6856 y = TREE_OPERAND (ineq, 1);
6858 else
6859 return NULL_TREE;
6861 if (TREE_TYPE (a1) != typea)
6862 return NULL_TREE;
6864 if (POINTER_TYPE_P (typea))
6866 /* Convert the pointer types into integer before taking the difference. */
6867 tree ta = fold_convert_loc (loc, ssizetype, a);
6868 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6869 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6871 else
6872 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6874 if (!diff || !integer_onep (diff))
6875 return NULL_TREE;
6877 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6880 /* Fold a sum or difference of at least one multiplication.
6881 Returns the folded tree or NULL if no simplification could be made. */
6883 static tree
6884 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6885 tree arg0, tree arg1)
6887 tree arg00, arg01, arg10, arg11;
6888 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6890 /* (A * C) +- (B * C) -> (A+-B) * C.
6891 (A * C) +- A -> A * (C+-1).
6892 We are most concerned about the case where C is a constant,
6893 but other combinations show up during loop reduction. Since
6894 it is not difficult, try all four possibilities. */
6896 if (TREE_CODE (arg0) == MULT_EXPR)
6898 arg00 = TREE_OPERAND (arg0, 0);
6899 arg01 = TREE_OPERAND (arg0, 1);
6901 else if (TREE_CODE (arg0) == INTEGER_CST)
6903 arg00 = build_one_cst (type);
6904 arg01 = arg0;
6906 else
6908 /* We cannot generate constant 1 for fract. */
6909 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6910 return NULL_TREE;
6911 arg00 = arg0;
6912 arg01 = build_one_cst (type);
6914 if (TREE_CODE (arg1) == MULT_EXPR)
6916 arg10 = TREE_OPERAND (arg1, 0);
6917 arg11 = TREE_OPERAND (arg1, 1);
6919 else if (TREE_CODE (arg1) == INTEGER_CST)
6921 arg10 = build_one_cst (type);
6922 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6923 the purpose of this canonicalization. */
6924 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6925 && negate_expr_p (arg1)
6926 && code == PLUS_EXPR)
6928 arg11 = negate_expr (arg1);
6929 code = MINUS_EXPR;
6931 else
6932 arg11 = arg1;
6934 else
6936 /* We cannot generate constant 1 for fract. */
6937 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6938 return NULL_TREE;
6939 arg10 = arg1;
6940 arg11 = build_one_cst (type);
6942 same = NULL_TREE;
6944 if (operand_equal_p (arg01, arg11, 0))
6945 same = arg01, alt0 = arg00, alt1 = arg10;
6946 else if (operand_equal_p (arg00, arg10, 0))
6947 same = arg00, alt0 = arg01, alt1 = arg11;
6948 else if (operand_equal_p (arg00, arg11, 0))
6949 same = arg00, alt0 = arg01, alt1 = arg10;
6950 else if (operand_equal_p (arg01, arg10, 0))
6951 same = arg01, alt0 = arg00, alt1 = arg11;
6953 /* No identical multiplicands; see if we can find a common
6954 power-of-two factor in non-power-of-two multiplies. This
6955 can help in multi-dimensional array access. */
6956 else if (tree_fits_shwi_p (arg01)
6957 && tree_fits_shwi_p (arg11))
6959 HOST_WIDE_INT int01, int11, tmp;
6960 bool swap = false;
6961 tree maybe_same;
6962 int01 = tree_to_shwi (arg01);
6963 int11 = tree_to_shwi (arg11);
6965 /* Move min of absolute values to int11. */
6966 if (absu_hwi (int01) < absu_hwi (int11))
6968 tmp = int01, int01 = int11, int11 = tmp;
6969 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6970 maybe_same = arg01;
6971 swap = true;
6973 else
6974 maybe_same = arg11;
6976 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6977 /* The remainder should not be a constant, otherwise we
6978 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6979 increased the number of multiplications necessary. */
6980 && TREE_CODE (arg10) != INTEGER_CST)
6982 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6983 build_int_cst (TREE_TYPE (arg00),
6984 int01 / int11));
6985 alt1 = arg10;
6986 same = maybe_same;
6987 if (swap)
6988 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6992 if (same)
6993 return fold_build2_loc (loc, MULT_EXPR, type,
6994 fold_build2_loc (loc, code, type,
6995 fold_convert_loc (loc, type, alt0),
6996 fold_convert_loc (loc, type, alt1)),
6997 fold_convert_loc (loc, type, same));
6999 return NULL_TREE;
7002 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7003 specified by EXPR into the buffer PTR of length LEN bytes.
7004 Return the number of bytes placed in the buffer, or zero
7005 upon failure. */
7007 static int
7008 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7010 tree type = TREE_TYPE (expr);
7011 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7012 int byte, offset, word, words;
7013 unsigned char value;
7015 if ((off == -1 && total_bytes > len)
7016 || off >= total_bytes)
7017 return 0;
7018 if (off == -1)
7019 off = 0;
7020 words = total_bytes / UNITS_PER_WORD;
7022 for (byte = 0; byte < total_bytes; byte++)
7024 int bitpos = byte * BITS_PER_UNIT;
7025 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7026 number of bytes. */
7027 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7029 if (total_bytes > UNITS_PER_WORD)
7031 word = byte / UNITS_PER_WORD;
7032 if (WORDS_BIG_ENDIAN)
7033 word = (words - 1) - word;
7034 offset = word * UNITS_PER_WORD;
7035 if (BYTES_BIG_ENDIAN)
7036 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7037 else
7038 offset += byte % UNITS_PER_WORD;
7040 else
7041 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7042 if (offset >= off
7043 && offset - off < len)
7044 ptr[offset - off] = value;
7046 return MIN (len, total_bytes - off);
7050 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7051 specified by EXPR into the buffer PTR of length LEN bytes.
7052 Return the number of bytes placed in the buffer, or zero
7053 upon failure. */
7055 static int
7056 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7058 tree type = TREE_TYPE (expr);
7059 machine_mode mode = TYPE_MODE (type);
7060 int total_bytes = GET_MODE_SIZE (mode);
7061 FIXED_VALUE_TYPE value;
7062 tree i_value, i_type;
7064 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7065 return 0;
7067 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7069 if (NULL_TREE == i_type
7070 || TYPE_PRECISION (i_type) != total_bytes)
7071 return 0;
7073 value = TREE_FIXED_CST (expr);
7074 i_value = double_int_to_tree (i_type, value.data);
7076 return native_encode_int (i_value, ptr, len, off);
7080 /* Subroutine of native_encode_expr. Encode the REAL_CST
7081 specified by EXPR into the buffer PTR of length LEN bytes.
7082 Return the number of bytes placed in the buffer, or zero
7083 upon failure. */
7085 static int
7086 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7088 tree type = TREE_TYPE (expr);
7089 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7090 int byte, offset, word, words, bitpos;
7091 unsigned char value;
7093 /* There are always 32 bits in each long, no matter the size of
7094 the hosts long. We handle floating point representations with
7095 up to 192 bits. */
7096 long tmp[6];
7098 if ((off == -1 && total_bytes > len)
7099 || off >= total_bytes)
7100 return 0;
7101 if (off == -1)
7102 off = 0;
7103 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7105 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7107 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7108 bitpos += BITS_PER_UNIT)
7110 byte = (bitpos / BITS_PER_UNIT) & 3;
7111 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7113 if (UNITS_PER_WORD < 4)
7115 word = byte / UNITS_PER_WORD;
7116 if (WORDS_BIG_ENDIAN)
7117 word = (words - 1) - word;
7118 offset = word * UNITS_PER_WORD;
7119 if (BYTES_BIG_ENDIAN)
7120 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7121 else
7122 offset += byte % UNITS_PER_WORD;
7124 else
7125 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7126 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7127 if (offset >= off
7128 && offset - off < len)
7129 ptr[offset - off] = value;
7131 return MIN (len, total_bytes - off);
7134 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7135 specified by EXPR into the buffer PTR of length LEN bytes.
7136 Return the number of bytes placed in the buffer, or zero
7137 upon failure. */
7139 static int
7140 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7142 int rsize, isize;
7143 tree part;
7145 part = TREE_REALPART (expr);
7146 rsize = native_encode_expr (part, ptr, len, off);
7147 if (off == -1
7148 && rsize == 0)
7149 return 0;
7150 part = TREE_IMAGPART (expr);
7151 if (off != -1)
7152 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7153 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7154 if (off == -1
7155 && isize != rsize)
7156 return 0;
7157 return rsize + isize;
7161 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7162 specified by EXPR into the buffer PTR of length LEN bytes.
7163 Return the number of bytes placed in the buffer, or zero
7164 upon failure. */
7166 static int
7167 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7169 unsigned i, count;
7170 int size, offset;
7171 tree itype, elem;
7173 offset = 0;
7174 count = VECTOR_CST_NELTS (expr);
7175 itype = TREE_TYPE (TREE_TYPE (expr));
7176 size = GET_MODE_SIZE (TYPE_MODE (itype));
7177 for (i = 0; i < count; i++)
7179 if (off >= size)
7181 off -= size;
7182 continue;
7184 elem = VECTOR_CST_ELT (expr, i);
7185 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7186 if ((off == -1 && res != size)
7187 || res == 0)
7188 return 0;
7189 offset += res;
7190 if (offset >= len)
7191 return offset;
7192 if (off != -1)
7193 off = 0;
7195 return offset;
7199 /* Subroutine of native_encode_expr. Encode the STRING_CST
7200 specified by EXPR into the buffer PTR of length LEN bytes.
7201 Return the number of bytes placed in the buffer, or zero
7202 upon failure. */
7204 static int
7205 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7207 tree type = TREE_TYPE (expr);
7208 HOST_WIDE_INT total_bytes;
7210 if (TREE_CODE (type) != ARRAY_TYPE
7211 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7212 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7213 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7214 return 0;
7215 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7216 if ((off == -1 && total_bytes > len)
7217 || off >= total_bytes)
7218 return 0;
7219 if (off == -1)
7220 off = 0;
7221 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7223 int written = 0;
7224 if (off < TREE_STRING_LENGTH (expr))
7226 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7227 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7229 memset (ptr + written, 0,
7230 MIN (total_bytes - written, len - written));
7232 else
7233 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7234 return MIN (total_bytes - off, len);
7238 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7239 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7240 buffer PTR of length LEN bytes. If OFF is not -1 then start
7241 the encoding at byte offset OFF and encode at most LEN bytes.
7242 Return the number of bytes placed in the buffer, or zero upon failure. */
7245 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7247 /* We don't support starting at negative offset and -1 is special. */
7248 if (off < -1)
7249 return 0;
7251 switch (TREE_CODE (expr))
7253 case INTEGER_CST:
7254 return native_encode_int (expr, ptr, len, off);
7256 case REAL_CST:
7257 return native_encode_real (expr, ptr, len, off);
7259 case FIXED_CST:
7260 return native_encode_fixed (expr, ptr, len, off);
7262 case COMPLEX_CST:
7263 return native_encode_complex (expr, ptr, len, off);
7265 case VECTOR_CST:
7266 return native_encode_vector (expr, ptr, len, off);
7268 case STRING_CST:
7269 return native_encode_string (expr, ptr, len, off);
7271 default:
7272 return 0;
7277 /* Subroutine of native_interpret_expr. Interpret the contents of
7278 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7279 If the buffer cannot be interpreted, return NULL_TREE. */
7281 static tree
7282 native_interpret_int (tree type, const unsigned char *ptr, int len)
7284 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7286 if (total_bytes > len
7287 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7288 return NULL_TREE;
7290 wide_int result = wi::from_buffer (ptr, total_bytes);
7292 return wide_int_to_tree (type, result);
7296 /* Subroutine of native_interpret_expr. Interpret the contents of
7297 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7298 If the buffer cannot be interpreted, return NULL_TREE. */
7300 static tree
7301 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7303 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7304 double_int result;
7305 FIXED_VALUE_TYPE fixed_value;
7307 if (total_bytes > len
7308 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7309 return NULL_TREE;
7311 result = double_int::from_buffer (ptr, total_bytes);
7312 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7314 return build_fixed (type, fixed_value);
7318 /* Subroutine of native_interpret_expr. Interpret the contents of
7319 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7320 If the buffer cannot be interpreted, return NULL_TREE. */
7322 static tree
7323 native_interpret_real (tree type, const unsigned char *ptr, int len)
7325 machine_mode mode = TYPE_MODE (type);
7326 int total_bytes = GET_MODE_SIZE (mode);
7327 unsigned char value;
7328 /* There are always 32 bits in each long, no matter the size of
7329 the hosts long. We handle floating point representations with
7330 up to 192 bits. */
7331 REAL_VALUE_TYPE r;
7332 long tmp[6];
7334 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7335 if (total_bytes > len || total_bytes > 24)
7336 return NULL_TREE;
7337 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7339 memset (tmp, 0, sizeof (tmp));
7340 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7341 bitpos += BITS_PER_UNIT)
7343 /* Both OFFSET and BYTE index within a long;
7344 bitpos indexes the whole float. */
7345 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7346 if (UNITS_PER_WORD < 4)
7348 int word = byte / UNITS_PER_WORD;
7349 if (WORDS_BIG_ENDIAN)
7350 word = (words - 1) - word;
7351 offset = word * UNITS_PER_WORD;
7352 if (BYTES_BIG_ENDIAN)
7353 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7354 else
7355 offset += byte % UNITS_PER_WORD;
7357 else
7359 offset = byte;
7360 if (BYTES_BIG_ENDIAN)
7362 /* Reverse bytes within each long, or within the entire float
7363 if it's smaller than a long (for HFmode). */
7364 offset = MIN (3, total_bytes - 1) - offset;
7365 gcc_assert (offset >= 0);
7368 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7370 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7373 real_from_target (&r, tmp, mode);
7374 return build_real (type, r);
7378 /* Subroutine of native_interpret_expr. Interpret the contents of
7379 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7380 If the buffer cannot be interpreted, return NULL_TREE. */
7382 static tree
7383 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7385 tree etype, rpart, ipart;
7386 int size;
7388 etype = TREE_TYPE (type);
7389 size = GET_MODE_SIZE (TYPE_MODE (etype));
7390 if (size * 2 > len)
7391 return NULL_TREE;
7392 rpart = native_interpret_expr (etype, ptr, size);
7393 if (!rpart)
7394 return NULL_TREE;
7395 ipart = native_interpret_expr (etype, ptr+size, size);
7396 if (!ipart)
7397 return NULL_TREE;
7398 return build_complex (type, rpart, ipart);
7402 /* Subroutine of native_interpret_expr. Interpret the contents of
7403 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7404 If the buffer cannot be interpreted, return NULL_TREE. */
7406 static tree
7407 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7409 tree etype, elem;
7410 int i, size, count;
7411 tree *elements;
7413 etype = TREE_TYPE (type);
7414 size = GET_MODE_SIZE (TYPE_MODE (etype));
7415 count = TYPE_VECTOR_SUBPARTS (type);
7416 if (size * count > len)
7417 return NULL_TREE;
7419 elements = XALLOCAVEC (tree, count);
7420 for (i = count - 1; i >= 0; i--)
7422 elem = native_interpret_expr (etype, ptr+(i*size), size);
7423 if (!elem)
7424 return NULL_TREE;
7425 elements[i] = elem;
7427 return build_vector (type, elements);
7431 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7432 the buffer PTR of length LEN as a constant of type TYPE. For
7433 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7434 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7435 return NULL_TREE. */
7437 tree
7438 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7440 switch (TREE_CODE (type))
7442 case INTEGER_TYPE:
7443 case ENUMERAL_TYPE:
7444 case BOOLEAN_TYPE:
7445 case POINTER_TYPE:
7446 case REFERENCE_TYPE:
7447 return native_interpret_int (type, ptr, len);
7449 case REAL_TYPE:
7450 return native_interpret_real (type, ptr, len);
7452 case FIXED_POINT_TYPE:
7453 return native_interpret_fixed (type, ptr, len);
7455 case COMPLEX_TYPE:
7456 return native_interpret_complex (type, ptr, len);
7458 case VECTOR_TYPE:
7459 return native_interpret_vector (type, ptr, len);
7461 default:
7462 return NULL_TREE;
7466 /* Returns true if we can interpret the contents of a native encoding
7467 as TYPE. */
7469 static bool
7470 can_native_interpret_type_p (tree type)
7472 switch (TREE_CODE (type))
7474 case INTEGER_TYPE:
7475 case ENUMERAL_TYPE:
7476 case BOOLEAN_TYPE:
7477 case POINTER_TYPE:
7478 case REFERENCE_TYPE:
7479 case FIXED_POINT_TYPE:
7480 case REAL_TYPE:
7481 case COMPLEX_TYPE:
7482 case VECTOR_TYPE:
7483 return true;
7484 default:
7485 return false;
7489 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7490 TYPE at compile-time. If we're unable to perform the conversion
7491 return NULL_TREE. */
7493 static tree
7494 fold_view_convert_expr (tree type, tree expr)
7496 /* We support up to 512-bit values (for V8DFmode). */
7497 unsigned char buffer[64];
7498 int len;
7500 /* Check that the host and target are sane. */
7501 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7502 return NULL_TREE;
7504 len = native_encode_expr (expr, buffer, sizeof (buffer));
7505 if (len == 0)
7506 return NULL_TREE;
7508 return native_interpret_expr (type, buffer, len);
7511 /* Build an expression for the address of T. Folds away INDIRECT_REF
7512 to avoid confusing the gimplify process. */
7514 tree
7515 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7517 /* The size of the object is not relevant when talking about its address. */
7518 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7519 t = TREE_OPERAND (t, 0);
7521 if (TREE_CODE (t) == INDIRECT_REF)
7523 t = TREE_OPERAND (t, 0);
7525 if (TREE_TYPE (t) != ptrtype)
7526 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7528 else if (TREE_CODE (t) == MEM_REF
7529 && integer_zerop (TREE_OPERAND (t, 1)))
7530 return TREE_OPERAND (t, 0);
7531 else if (TREE_CODE (t) == MEM_REF
7532 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7533 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7534 TREE_OPERAND (t, 0),
7535 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7536 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7538 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7540 if (TREE_TYPE (t) != ptrtype)
7541 t = fold_convert_loc (loc, ptrtype, t);
7543 else
7544 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7546 return t;
7549 /* Build an expression for the address of T. */
7551 tree
7552 build_fold_addr_expr_loc (location_t loc, tree t)
7554 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7556 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7559 /* Fold a unary expression of code CODE and type TYPE with operand
7560 OP0. Return the folded expression if folding is successful.
7561 Otherwise, return NULL_TREE. */
7563 tree
7564 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7566 tree tem;
7567 tree arg0;
7568 enum tree_code_class kind = TREE_CODE_CLASS (code);
7570 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7571 && TREE_CODE_LENGTH (code) == 1);
7573 arg0 = op0;
7574 if (arg0)
7576 if (CONVERT_EXPR_CODE_P (code)
7577 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7579 /* Don't use STRIP_NOPS, because signedness of argument type
7580 matters. */
7581 STRIP_SIGN_NOPS (arg0);
7583 else
7585 /* Strip any conversions that don't change the mode. This
7586 is safe for every expression, except for a comparison
7587 expression because its signedness is derived from its
7588 operands.
7590 Note that this is done as an internal manipulation within
7591 the constant folder, in order to find the simplest
7592 representation of the arguments so that their form can be
7593 studied. In any cases, the appropriate type conversions
7594 should be put back in the tree that will get out of the
7595 constant folder. */
7596 STRIP_NOPS (arg0);
7599 if (CONSTANT_CLASS_P (arg0))
7601 tree tem = const_unop (code, type, arg0);
7602 if (tem)
7604 if (TREE_TYPE (tem) != type)
7605 tem = fold_convert_loc (loc, type, tem);
7606 return tem;
7611 tem = generic_simplify (loc, code, type, op0);
7612 if (tem)
7613 return tem;
7615 if (TREE_CODE_CLASS (code) == tcc_unary)
7617 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7618 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7619 fold_build1_loc (loc, code, type,
7620 fold_convert_loc (loc, TREE_TYPE (op0),
7621 TREE_OPERAND (arg0, 1))));
7622 else if (TREE_CODE (arg0) == COND_EXPR)
7624 tree arg01 = TREE_OPERAND (arg0, 1);
7625 tree arg02 = TREE_OPERAND (arg0, 2);
7626 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7627 arg01 = fold_build1_loc (loc, code, type,
7628 fold_convert_loc (loc,
7629 TREE_TYPE (op0), arg01));
7630 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7631 arg02 = fold_build1_loc (loc, code, type,
7632 fold_convert_loc (loc,
7633 TREE_TYPE (op0), arg02));
7634 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7635 arg01, arg02);
7637 /* If this was a conversion, and all we did was to move into
7638 inside the COND_EXPR, bring it back out. But leave it if
7639 it is a conversion from integer to integer and the
7640 result precision is no wider than a word since such a
7641 conversion is cheap and may be optimized away by combine,
7642 while it couldn't if it were outside the COND_EXPR. Then return
7643 so we don't get into an infinite recursion loop taking the
7644 conversion out and then back in. */
7646 if ((CONVERT_EXPR_CODE_P (code)
7647 || code == NON_LVALUE_EXPR)
7648 && TREE_CODE (tem) == COND_EXPR
7649 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7650 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7651 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7652 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7653 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7654 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7655 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7656 && (INTEGRAL_TYPE_P
7657 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7658 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7659 || flag_syntax_only))
7660 tem = build1_loc (loc, code, type,
7661 build3 (COND_EXPR,
7662 TREE_TYPE (TREE_OPERAND
7663 (TREE_OPERAND (tem, 1), 0)),
7664 TREE_OPERAND (tem, 0),
7665 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7666 TREE_OPERAND (TREE_OPERAND (tem, 2),
7667 0)));
7668 return tem;
7672 switch (code)
7674 case NON_LVALUE_EXPR:
7675 if (!maybe_lvalue_p (op0))
7676 return fold_convert_loc (loc, type, op0);
7677 return NULL_TREE;
7679 CASE_CONVERT:
7680 case FLOAT_EXPR:
7681 case FIX_TRUNC_EXPR:
7682 if (COMPARISON_CLASS_P (op0))
7684 /* If we have (type) (a CMP b) and type is an integral type, return
7685 new expression involving the new type. Canonicalize
7686 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7687 non-integral type.
7688 Do not fold the result as that would not simplify further, also
7689 folding again results in recursions. */
7690 if (TREE_CODE (type) == BOOLEAN_TYPE)
7691 return build2_loc (loc, TREE_CODE (op0), type,
7692 TREE_OPERAND (op0, 0),
7693 TREE_OPERAND (op0, 1));
7694 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7695 && TREE_CODE (type) != VECTOR_TYPE)
7696 return build3_loc (loc, COND_EXPR, type, op0,
7697 constant_boolean_node (true, type),
7698 constant_boolean_node (false, type));
7701 /* Handle (T *)&A.B.C for A being of type T and B and C
7702 living at offset zero. This occurs frequently in
7703 C++ upcasting and then accessing the base. */
7704 if (TREE_CODE (op0) == ADDR_EXPR
7705 && POINTER_TYPE_P (type)
7706 && handled_component_p (TREE_OPERAND (op0, 0)))
7708 HOST_WIDE_INT bitsize, bitpos;
7709 tree offset;
7710 machine_mode mode;
7711 int unsignedp, volatilep;
7712 tree base = TREE_OPERAND (op0, 0);
7713 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7714 &mode, &unsignedp, &volatilep, false);
7715 /* If the reference was to a (constant) zero offset, we can use
7716 the address of the base if it has the same base type
7717 as the result type and the pointer type is unqualified. */
7718 if (! offset && bitpos == 0
7719 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7720 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7721 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7722 return fold_convert_loc (loc, type,
7723 build_fold_addr_expr_loc (loc, base));
7726 if (TREE_CODE (op0) == MODIFY_EXPR
7727 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7728 /* Detect assigning a bitfield. */
7729 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7730 && DECL_BIT_FIELD
7731 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7733 /* Don't leave an assignment inside a conversion
7734 unless assigning a bitfield. */
7735 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7736 /* First do the assignment, then return converted constant. */
7737 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7738 TREE_NO_WARNING (tem) = 1;
7739 TREE_USED (tem) = 1;
7740 return tem;
7743 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7744 constants (if x has signed type, the sign bit cannot be set
7745 in c). This folds extension into the BIT_AND_EXPR.
7746 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7747 very likely don't have maximal range for their precision and this
7748 transformation effectively doesn't preserve non-maximal ranges. */
7749 if (TREE_CODE (type) == INTEGER_TYPE
7750 && TREE_CODE (op0) == BIT_AND_EXPR
7751 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7753 tree and_expr = op0;
7754 tree and0 = TREE_OPERAND (and_expr, 0);
7755 tree and1 = TREE_OPERAND (and_expr, 1);
7756 int change = 0;
7758 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7759 || (TYPE_PRECISION (type)
7760 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7761 change = 1;
7762 else if (TYPE_PRECISION (TREE_TYPE (and1))
7763 <= HOST_BITS_PER_WIDE_INT
7764 && tree_fits_uhwi_p (and1))
7766 unsigned HOST_WIDE_INT cst;
7768 cst = tree_to_uhwi (and1);
7769 cst &= HOST_WIDE_INT_M1U
7770 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7771 change = (cst == 0);
7772 if (change
7773 && !flag_syntax_only
7774 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7775 == ZERO_EXTEND))
7777 tree uns = unsigned_type_for (TREE_TYPE (and0));
7778 and0 = fold_convert_loc (loc, uns, and0);
7779 and1 = fold_convert_loc (loc, uns, and1);
7782 if (change)
7784 tem = force_fit_type (type, wi::to_widest (and1), 0,
7785 TREE_OVERFLOW (and1));
7786 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7787 fold_convert_loc (loc, type, and0), tem);
7791 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7792 when one of the new casts will fold away. Conservatively we assume
7793 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7794 if (POINTER_TYPE_P (type)
7795 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7796 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7797 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7798 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7799 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7801 tree arg00 = TREE_OPERAND (arg0, 0);
7802 tree arg01 = TREE_OPERAND (arg0, 1);
7804 return fold_build_pointer_plus_loc
7805 (loc, fold_convert_loc (loc, type, arg00), arg01);
7808 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7809 of the same precision, and X is an integer type not narrower than
7810 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7811 if (INTEGRAL_TYPE_P (type)
7812 && TREE_CODE (op0) == BIT_NOT_EXPR
7813 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7814 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7815 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7817 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7818 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7819 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7820 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7821 fold_convert_loc (loc, type, tem));
7824 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7825 type of X and Y (integer types only). */
7826 if (INTEGRAL_TYPE_P (type)
7827 && TREE_CODE (op0) == MULT_EXPR
7828 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7829 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7831 /* Be careful not to introduce new overflows. */
7832 tree mult_type;
7833 if (TYPE_OVERFLOW_WRAPS (type))
7834 mult_type = type;
7835 else
7836 mult_type = unsigned_type_for (type);
7838 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7840 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7841 fold_convert_loc (loc, mult_type,
7842 TREE_OPERAND (op0, 0)),
7843 fold_convert_loc (loc, mult_type,
7844 TREE_OPERAND (op0, 1)));
7845 return fold_convert_loc (loc, type, tem);
7849 return NULL_TREE;
7851 case VIEW_CONVERT_EXPR:
7852 if (TREE_CODE (op0) == MEM_REF)
7853 return fold_build2_loc (loc, MEM_REF, type,
7854 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7856 return NULL_TREE;
7858 case NEGATE_EXPR:
7859 tem = fold_negate_expr (loc, arg0);
7860 if (tem)
7861 return fold_convert_loc (loc, type, tem);
7862 return NULL_TREE;
7864 case ABS_EXPR:
7865 /* Convert fabs((double)float) into (double)fabsf(float). */
7866 if (TREE_CODE (arg0) == NOP_EXPR
7867 && TREE_CODE (type) == REAL_TYPE)
7869 tree targ0 = strip_float_extensions (arg0);
7870 if (targ0 != arg0)
7871 return fold_convert_loc (loc, type,
7872 fold_build1_loc (loc, ABS_EXPR,
7873 TREE_TYPE (targ0),
7874 targ0));
7876 return NULL_TREE;
7878 case BIT_NOT_EXPR:
7879 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7880 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7881 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7882 fold_convert_loc (loc, type,
7883 TREE_OPERAND (arg0, 0)))))
7884 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7885 fold_convert_loc (loc, type,
7886 TREE_OPERAND (arg0, 1)));
7887 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7888 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7889 fold_convert_loc (loc, type,
7890 TREE_OPERAND (arg0, 1)))))
7891 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7892 fold_convert_loc (loc, type,
7893 TREE_OPERAND (arg0, 0)), tem);
7895 return NULL_TREE;
7897 case TRUTH_NOT_EXPR:
7898 /* Note that the operand of this must be an int
7899 and its values must be 0 or 1.
7900 ("true" is a fixed value perhaps depending on the language,
7901 but we don't handle values other than 1 correctly yet.) */
7902 tem = fold_truth_not_expr (loc, arg0);
7903 if (!tem)
7904 return NULL_TREE;
7905 return fold_convert_loc (loc, type, tem);
7907 case INDIRECT_REF:
7908 /* Fold *&X to X if X is an lvalue. */
7909 if (TREE_CODE (op0) == ADDR_EXPR)
7911 tree op00 = TREE_OPERAND (op0, 0);
7912 if ((TREE_CODE (op00) == VAR_DECL
7913 || TREE_CODE (op00) == PARM_DECL
7914 || TREE_CODE (op00) == RESULT_DECL)
7915 && !TREE_READONLY (op00))
7916 return op00;
7918 return NULL_TREE;
7920 default:
7921 return NULL_TREE;
7922 } /* switch (code) */
7926 /* If the operation was a conversion do _not_ mark a resulting constant
7927 with TREE_OVERFLOW if the original constant was not. These conversions
7928 have implementation defined behavior and retaining the TREE_OVERFLOW
7929 flag here would confuse later passes such as VRP. */
7930 tree
7931 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7932 tree type, tree op0)
7934 tree res = fold_unary_loc (loc, code, type, op0);
7935 if (res
7936 && TREE_CODE (res) == INTEGER_CST
7937 && TREE_CODE (op0) == INTEGER_CST
7938 && CONVERT_EXPR_CODE_P (code))
7939 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7941 return res;
7944 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7945 operands OP0 and OP1. LOC is the location of the resulting expression.
7946 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7947 Return the folded expression if folding is successful. Otherwise,
7948 return NULL_TREE. */
7949 static tree
7950 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7951 tree arg0, tree arg1, tree op0, tree op1)
7953 tree tem;
7955 /* We only do these simplifications if we are optimizing. */
7956 if (!optimize)
7957 return NULL_TREE;
7959 /* Check for things like (A || B) && (A || C). We can convert this
7960 to A || (B && C). Note that either operator can be any of the four
7961 truth and/or operations and the transformation will still be
7962 valid. Also note that we only care about order for the
7963 ANDIF and ORIF operators. If B contains side effects, this
7964 might change the truth-value of A. */
7965 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7966 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7967 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7968 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7969 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7970 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7972 tree a00 = TREE_OPERAND (arg0, 0);
7973 tree a01 = TREE_OPERAND (arg0, 1);
7974 tree a10 = TREE_OPERAND (arg1, 0);
7975 tree a11 = TREE_OPERAND (arg1, 1);
7976 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7977 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7978 && (code == TRUTH_AND_EXPR
7979 || code == TRUTH_OR_EXPR));
7981 if (operand_equal_p (a00, a10, 0))
7982 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7983 fold_build2_loc (loc, code, type, a01, a11));
7984 else if (commutative && operand_equal_p (a00, a11, 0))
7985 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7986 fold_build2_loc (loc, code, type, a01, a10));
7987 else if (commutative && operand_equal_p (a01, a10, 0))
7988 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7989 fold_build2_loc (loc, code, type, a00, a11));
7991 /* This case if tricky because we must either have commutative
7992 operators or else A10 must not have side-effects. */
7994 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7995 && operand_equal_p (a01, a11, 0))
7996 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7997 fold_build2_loc (loc, code, type, a00, a10),
7998 a01);
8001 /* See if we can build a range comparison. */
8002 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8003 return tem;
8005 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8006 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8008 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8009 if (tem)
8010 return fold_build2_loc (loc, code, type, tem, arg1);
8013 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8014 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8016 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8017 if (tem)
8018 return fold_build2_loc (loc, code, type, arg0, tem);
8021 /* Check for the possibility of merging component references. If our
8022 lhs is another similar operation, try to merge its rhs with our
8023 rhs. Then try to merge our lhs and rhs. */
8024 if (TREE_CODE (arg0) == code
8025 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8026 TREE_OPERAND (arg0, 1), arg1)))
8027 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8029 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8030 return tem;
8032 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8033 && (code == TRUTH_AND_EXPR
8034 || code == TRUTH_ANDIF_EXPR
8035 || code == TRUTH_OR_EXPR
8036 || code == TRUTH_ORIF_EXPR))
8038 enum tree_code ncode, icode;
8040 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8041 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8042 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8044 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8045 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8046 We don't want to pack more than two leafs to a non-IF AND/OR
8047 expression.
8048 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8049 equal to IF-CODE, then we don't want to add right-hand operand.
8050 If the inner right-hand side of left-hand operand has
8051 side-effects, or isn't simple, then we can't add to it,
8052 as otherwise we might destroy if-sequence. */
8053 if (TREE_CODE (arg0) == icode
8054 && simple_operand_p_2 (arg1)
8055 /* Needed for sequence points to handle trappings, and
8056 side-effects. */
8057 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8059 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8060 arg1);
8061 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8062 tem);
8064 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8065 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8066 else if (TREE_CODE (arg1) == icode
8067 && simple_operand_p_2 (arg0)
8068 /* Needed for sequence points to handle trappings, and
8069 side-effects. */
8070 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8072 tem = fold_build2_loc (loc, ncode, type,
8073 arg0, TREE_OPERAND (arg1, 0));
8074 return fold_build2_loc (loc, icode, type, tem,
8075 TREE_OPERAND (arg1, 1));
8077 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8078 into (A OR B).
8079 For sequence point consistancy, we need to check for trapping,
8080 and side-effects. */
8081 else if (code == icode && simple_operand_p_2 (arg0)
8082 && simple_operand_p_2 (arg1))
8083 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8086 return NULL_TREE;
8089 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8090 by changing CODE to reduce the magnitude of constants involved in
8091 ARG0 of the comparison.
8092 Returns a canonicalized comparison tree if a simplification was
8093 possible, otherwise returns NULL_TREE.
8094 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8095 valid if signed overflow is undefined. */
8097 static tree
8098 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8099 tree arg0, tree arg1,
8100 bool *strict_overflow_p)
8102 enum tree_code code0 = TREE_CODE (arg0);
8103 tree t, cst0 = NULL_TREE;
8104 int sgn0;
8106 /* Match A +- CST code arg1. We can change this only if overflow
8107 is undefined. */
8108 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8109 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8110 /* In principle pointers also have undefined overflow behavior,
8111 but that causes problems elsewhere. */
8112 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8113 && (code0 == MINUS_EXPR
8114 || code0 == PLUS_EXPR)
8115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8116 return NULL_TREE;
8118 /* Identify the constant in arg0 and its sign. */
8119 cst0 = TREE_OPERAND (arg0, 1);
8120 sgn0 = tree_int_cst_sgn (cst0);
8122 /* Overflowed constants and zero will cause problems. */
8123 if (integer_zerop (cst0)
8124 || TREE_OVERFLOW (cst0))
8125 return NULL_TREE;
8127 /* See if we can reduce the magnitude of the constant in
8128 arg0 by changing the comparison code. */
8129 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8130 if (code == LT_EXPR
8131 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8132 code = LE_EXPR;
8133 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8134 else if (code == GT_EXPR
8135 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8136 code = GE_EXPR;
8137 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8138 else if (code == LE_EXPR
8139 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8140 code = LT_EXPR;
8141 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8142 else if (code == GE_EXPR
8143 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8144 code = GT_EXPR;
8145 else
8146 return NULL_TREE;
8147 *strict_overflow_p = true;
8149 /* Now build the constant reduced in magnitude. But not if that
8150 would produce one outside of its types range. */
8151 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8152 && ((sgn0 == 1
8153 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8154 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8155 || (sgn0 == -1
8156 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8157 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8158 return NULL_TREE;
8160 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8161 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8162 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8163 t = fold_convert (TREE_TYPE (arg1), t);
8165 return fold_build2_loc (loc, code, type, t, arg1);
8168 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8169 overflow further. Try to decrease the magnitude of constants involved
8170 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8171 and put sole constants at the second argument position.
8172 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8174 static tree
8175 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8176 tree arg0, tree arg1)
8178 tree t;
8179 bool strict_overflow_p;
8180 const char * const warnmsg = G_("assuming signed overflow does not occur "
8181 "when reducing constant in comparison");
8183 /* Try canonicalization by simplifying arg0. */
8184 strict_overflow_p = false;
8185 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8186 &strict_overflow_p);
8187 if (t)
8189 if (strict_overflow_p)
8190 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8191 return t;
8194 /* Try canonicalization by simplifying arg1 using the swapped
8195 comparison. */
8196 code = swap_tree_comparison (code);
8197 strict_overflow_p = false;
8198 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8199 &strict_overflow_p);
8200 if (t && strict_overflow_p)
8201 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8202 return t;
8205 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8206 space. This is used to avoid issuing overflow warnings for
8207 expressions like &p->x which can not wrap. */
8209 static bool
8210 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8212 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8213 return true;
8215 if (bitpos < 0)
8216 return true;
8218 wide_int wi_offset;
8219 int precision = TYPE_PRECISION (TREE_TYPE (base));
8220 if (offset == NULL_TREE)
8221 wi_offset = wi::zero (precision);
8222 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8223 return true;
8224 else
8225 wi_offset = offset;
8227 bool overflow;
8228 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8229 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8230 if (overflow)
8231 return true;
8233 if (!wi::fits_uhwi_p (total))
8234 return true;
8236 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8237 if (size <= 0)
8238 return true;
8240 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8241 array. */
8242 if (TREE_CODE (base) == ADDR_EXPR)
8244 HOST_WIDE_INT base_size;
8246 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8247 if (base_size > 0 && size < base_size)
8248 size = base_size;
8251 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8254 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8255 kind INTEGER_CST. This makes sure to properly sign-extend the
8256 constant. */
8258 static HOST_WIDE_INT
8259 size_low_cst (const_tree t)
8261 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8262 int prec = TYPE_PRECISION (TREE_TYPE (t));
8263 if (prec < HOST_BITS_PER_WIDE_INT)
8264 return sext_hwi (w, prec);
8265 return w;
8268 /* Subroutine of fold_binary. This routine performs all of the
8269 transformations that are common to the equality/inequality
8270 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8271 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8272 fold_binary should call fold_binary. Fold a comparison with
8273 tree code CODE and type TYPE with operands OP0 and OP1. Return
8274 the folded comparison or NULL_TREE. */
8276 static tree
8277 fold_comparison (location_t loc, enum tree_code code, tree type,
8278 tree op0, tree op1)
8280 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8281 tree arg0, arg1, tem;
8283 arg0 = op0;
8284 arg1 = op1;
8286 STRIP_SIGN_NOPS (arg0);
8287 STRIP_SIGN_NOPS (arg1);
8289 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8290 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8291 && (equality_code
8292 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8293 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8295 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8296 && TREE_CODE (arg1) == INTEGER_CST
8297 && !TREE_OVERFLOW (arg1))
8299 const enum tree_code
8300 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8301 tree const1 = TREE_OPERAND (arg0, 1);
8302 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8303 tree variable = TREE_OPERAND (arg0, 0);
8304 tree new_const = int_const_binop (reverse_op, const2, const1);
8306 /* If the constant operation overflowed this can be
8307 simplified as a comparison against INT_MAX/INT_MIN. */
8308 if (TREE_OVERFLOW (new_const)
8309 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8311 int const1_sgn = tree_int_cst_sgn (const1);
8312 enum tree_code code2 = code;
8314 /* Get the sign of the constant on the lhs if the
8315 operation were VARIABLE + CONST1. */
8316 if (TREE_CODE (arg0) == MINUS_EXPR)
8317 const1_sgn = -const1_sgn;
8319 /* The sign of the constant determines if we overflowed
8320 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8321 Canonicalize to the INT_MIN overflow by swapping the comparison
8322 if necessary. */
8323 if (const1_sgn == -1)
8324 code2 = swap_tree_comparison (code);
8326 /* We now can look at the canonicalized case
8327 VARIABLE + 1 CODE2 INT_MIN
8328 and decide on the result. */
8329 switch (code2)
8331 case EQ_EXPR:
8332 case LT_EXPR:
8333 case LE_EXPR:
8334 return
8335 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8337 case NE_EXPR:
8338 case GE_EXPR:
8339 case GT_EXPR:
8340 return
8341 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8343 default:
8344 gcc_unreachable ();
8347 else
8349 if (!equality_code)
8350 fold_overflow_warning ("assuming signed overflow does not occur "
8351 "when changing X +- C1 cmp C2 to "
8352 "X cmp C2 -+ C1",
8353 WARN_STRICT_OVERFLOW_COMPARISON);
8354 return fold_build2_loc (loc, code, type, variable, new_const);
8358 /* For comparisons of pointers we can decompose it to a compile time
8359 comparison of the base objects and the offsets into the object.
8360 This requires at least one operand being an ADDR_EXPR or a
8361 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8362 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8363 && (TREE_CODE (arg0) == ADDR_EXPR
8364 || TREE_CODE (arg1) == ADDR_EXPR
8365 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8366 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8368 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8369 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8370 machine_mode mode;
8371 int volatilep, unsignedp;
8372 bool indirect_base0 = false, indirect_base1 = false;
8374 /* Get base and offset for the access. Strip ADDR_EXPR for
8375 get_inner_reference, but put it back by stripping INDIRECT_REF
8376 off the base object if possible. indirect_baseN will be true
8377 if baseN is not an address but refers to the object itself. */
8378 base0 = arg0;
8379 if (TREE_CODE (arg0) == ADDR_EXPR)
8381 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8382 &bitsize, &bitpos0, &offset0, &mode,
8383 &unsignedp, &volatilep, false);
8384 if (TREE_CODE (base0) == INDIRECT_REF)
8385 base0 = TREE_OPERAND (base0, 0);
8386 else
8387 indirect_base0 = true;
8389 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8391 base0 = TREE_OPERAND (arg0, 0);
8392 STRIP_SIGN_NOPS (base0);
8393 if (TREE_CODE (base0) == ADDR_EXPR)
8395 base0 = TREE_OPERAND (base0, 0);
8396 indirect_base0 = true;
8398 offset0 = TREE_OPERAND (arg0, 1);
8399 if (tree_fits_shwi_p (offset0))
8401 HOST_WIDE_INT off = size_low_cst (offset0);
8402 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8403 * BITS_PER_UNIT)
8404 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8406 bitpos0 = off * BITS_PER_UNIT;
8407 offset0 = NULL_TREE;
8412 base1 = arg1;
8413 if (TREE_CODE (arg1) == ADDR_EXPR)
8415 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8416 &bitsize, &bitpos1, &offset1, &mode,
8417 &unsignedp, &volatilep, false);
8418 if (TREE_CODE (base1) == INDIRECT_REF)
8419 base1 = TREE_OPERAND (base1, 0);
8420 else
8421 indirect_base1 = true;
8423 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8425 base1 = TREE_OPERAND (arg1, 0);
8426 STRIP_SIGN_NOPS (base1);
8427 if (TREE_CODE (base1) == ADDR_EXPR)
8429 base1 = TREE_OPERAND (base1, 0);
8430 indirect_base1 = true;
8432 offset1 = TREE_OPERAND (arg1, 1);
8433 if (tree_fits_shwi_p (offset1))
8435 HOST_WIDE_INT off = size_low_cst (offset1);
8436 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8437 * BITS_PER_UNIT)
8438 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8440 bitpos1 = off * BITS_PER_UNIT;
8441 offset1 = NULL_TREE;
8446 /* If we have equivalent bases we might be able to simplify. */
8447 if (indirect_base0 == indirect_base1
8448 && operand_equal_p (base0, base1,
8449 indirect_base0 ? OEP_ADDRESS_OF : 0))
8451 /* We can fold this expression to a constant if the non-constant
8452 offset parts are equal. */
8453 if ((offset0 == offset1
8454 || (offset0 && offset1
8455 && operand_equal_p (offset0, offset1, 0)))
8456 && (code == EQ_EXPR
8457 || code == NE_EXPR
8458 || (indirect_base0 && DECL_P (base0))
8459 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8462 if (!equality_code
8463 && bitpos0 != bitpos1
8464 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8465 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8466 fold_overflow_warning (("assuming pointer wraparound does not "
8467 "occur when comparing P +- C1 with "
8468 "P +- C2"),
8469 WARN_STRICT_OVERFLOW_CONDITIONAL);
8471 switch (code)
8473 case EQ_EXPR:
8474 return constant_boolean_node (bitpos0 == bitpos1, type);
8475 case NE_EXPR:
8476 return constant_boolean_node (bitpos0 != bitpos1, type);
8477 case LT_EXPR:
8478 return constant_boolean_node (bitpos0 < bitpos1, type);
8479 case LE_EXPR:
8480 return constant_boolean_node (bitpos0 <= bitpos1, type);
8481 case GE_EXPR:
8482 return constant_boolean_node (bitpos0 >= bitpos1, type);
8483 case GT_EXPR:
8484 return constant_boolean_node (bitpos0 > bitpos1, type);
8485 default:;
8488 /* We can simplify the comparison to a comparison of the variable
8489 offset parts if the constant offset parts are equal.
8490 Be careful to use signed sizetype here because otherwise we
8491 mess with array offsets in the wrong way. This is possible
8492 because pointer arithmetic is restricted to retain within an
8493 object and overflow on pointer differences is undefined as of
8494 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8495 else if (bitpos0 == bitpos1
8496 && (equality_code
8497 || (indirect_base0 && DECL_P (base0))
8498 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8500 /* By converting to signed sizetype we cover middle-end pointer
8501 arithmetic which operates on unsigned pointer types of size
8502 type size and ARRAY_REF offsets which are properly sign or
8503 zero extended from their type in case it is narrower than
8504 sizetype. */
8505 if (offset0 == NULL_TREE)
8506 offset0 = build_int_cst (ssizetype, 0);
8507 else
8508 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8509 if (offset1 == NULL_TREE)
8510 offset1 = build_int_cst (ssizetype, 0);
8511 else
8512 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8514 if (!equality_code
8515 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8516 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8517 fold_overflow_warning (("assuming pointer wraparound does not "
8518 "occur when comparing P +- C1 with "
8519 "P +- C2"),
8520 WARN_STRICT_OVERFLOW_COMPARISON);
8522 return fold_build2_loc (loc, code, type, offset0, offset1);
8525 /* For equal offsets we can simplify to a comparison of the
8526 base addresses. */
8527 else if (bitpos0 == bitpos1
8528 && (indirect_base0
8529 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8530 && (indirect_base1
8531 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8532 && ((offset0 == offset1)
8533 || (offset0 && offset1
8534 && operand_equal_p (offset0, offset1, 0))))
8536 if (indirect_base0)
8537 base0 = build_fold_addr_expr_loc (loc, base0);
8538 if (indirect_base1)
8539 base1 = build_fold_addr_expr_loc (loc, base1);
8540 return fold_build2_loc (loc, code, type, base0, base1);
8544 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8545 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8546 the resulting offset is smaller in absolute value than the
8547 original one and has the same sign. */
8548 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8549 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8550 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8551 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8552 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8553 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8554 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8555 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8557 tree const1 = TREE_OPERAND (arg0, 1);
8558 tree const2 = TREE_OPERAND (arg1, 1);
8559 tree variable1 = TREE_OPERAND (arg0, 0);
8560 tree variable2 = TREE_OPERAND (arg1, 0);
8561 tree cst;
8562 const char * const warnmsg = G_("assuming signed overflow does not "
8563 "occur when combining constants around "
8564 "a comparison");
8566 /* Put the constant on the side where it doesn't overflow and is
8567 of lower absolute value and of same sign than before. */
8568 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8569 ? MINUS_EXPR : PLUS_EXPR,
8570 const2, const1);
8571 if (!TREE_OVERFLOW (cst)
8572 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8573 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8575 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8576 return fold_build2_loc (loc, code, type,
8577 variable1,
8578 fold_build2_loc (loc, TREE_CODE (arg1),
8579 TREE_TYPE (arg1),
8580 variable2, cst));
8583 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8584 ? MINUS_EXPR : PLUS_EXPR,
8585 const1, const2);
8586 if (!TREE_OVERFLOW (cst)
8587 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8588 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8590 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8591 return fold_build2_loc (loc, code, type,
8592 fold_build2_loc (loc, TREE_CODE (arg0),
8593 TREE_TYPE (arg0),
8594 variable1, cst),
8595 variable2);
8599 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8600 if (tem)
8601 return tem;
8603 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8604 constant, we can simplify it. */
8605 if (TREE_CODE (arg1) == INTEGER_CST
8606 && (TREE_CODE (arg0) == MIN_EXPR
8607 || TREE_CODE (arg0) == MAX_EXPR)
8608 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8610 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8611 if (tem)
8612 return tem;
8615 /* If we are comparing an expression that just has comparisons
8616 of two integer values, arithmetic expressions of those comparisons,
8617 and constants, we can simplify it. There are only three cases
8618 to check: the two values can either be equal, the first can be
8619 greater, or the second can be greater. Fold the expression for
8620 those three values. Since each value must be 0 or 1, we have
8621 eight possibilities, each of which corresponds to the constant 0
8622 or 1 or one of the six possible comparisons.
8624 This handles common cases like (a > b) == 0 but also handles
8625 expressions like ((x > y) - (y > x)) > 0, which supposedly
8626 occur in macroized code. */
8628 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8630 tree cval1 = 0, cval2 = 0;
8631 int save_p = 0;
8633 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8634 /* Don't handle degenerate cases here; they should already
8635 have been handled anyway. */
8636 && cval1 != 0 && cval2 != 0
8637 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8638 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8639 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8640 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8641 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8642 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8643 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8645 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8646 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8648 /* We can't just pass T to eval_subst in case cval1 or cval2
8649 was the same as ARG1. */
8651 tree high_result
8652 = fold_build2_loc (loc, code, type,
8653 eval_subst (loc, arg0, cval1, maxval,
8654 cval2, minval),
8655 arg1);
8656 tree equal_result
8657 = fold_build2_loc (loc, code, type,
8658 eval_subst (loc, arg0, cval1, maxval,
8659 cval2, maxval),
8660 arg1);
8661 tree low_result
8662 = fold_build2_loc (loc, code, type,
8663 eval_subst (loc, arg0, cval1, minval,
8664 cval2, maxval),
8665 arg1);
8667 /* All three of these results should be 0 or 1. Confirm they are.
8668 Then use those values to select the proper code to use. */
8670 if (TREE_CODE (high_result) == INTEGER_CST
8671 && TREE_CODE (equal_result) == INTEGER_CST
8672 && TREE_CODE (low_result) == INTEGER_CST)
8674 /* Make a 3-bit mask with the high-order bit being the
8675 value for `>', the next for '=', and the low for '<'. */
8676 switch ((integer_onep (high_result) * 4)
8677 + (integer_onep (equal_result) * 2)
8678 + integer_onep (low_result))
8680 case 0:
8681 /* Always false. */
8682 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8683 case 1:
8684 code = LT_EXPR;
8685 break;
8686 case 2:
8687 code = EQ_EXPR;
8688 break;
8689 case 3:
8690 code = LE_EXPR;
8691 break;
8692 case 4:
8693 code = GT_EXPR;
8694 break;
8695 case 5:
8696 code = NE_EXPR;
8697 break;
8698 case 6:
8699 code = GE_EXPR;
8700 break;
8701 case 7:
8702 /* Always true. */
8703 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8706 if (save_p)
8708 tem = save_expr (build2 (code, type, cval1, cval2));
8709 SET_EXPR_LOCATION (tem, loc);
8710 return tem;
8712 return fold_build2_loc (loc, code, type, cval1, cval2);
8717 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8718 into a single range test. */
8719 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8720 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8721 && TREE_CODE (arg1) == INTEGER_CST
8722 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8723 && !integer_zerop (TREE_OPERAND (arg0, 1))
8724 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8725 && !TREE_OVERFLOW (arg1))
8727 tem = fold_div_compare (loc, code, type, arg0, arg1);
8728 if (tem != NULL_TREE)
8729 return tem;
8732 return NULL_TREE;
8736 /* Subroutine of fold_binary. Optimize complex multiplications of the
8737 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8738 argument EXPR represents the expression "z" of type TYPE. */
8740 static tree
8741 fold_mult_zconjz (location_t loc, tree type, tree expr)
8743 tree itype = TREE_TYPE (type);
8744 tree rpart, ipart, tem;
8746 if (TREE_CODE (expr) == COMPLEX_EXPR)
8748 rpart = TREE_OPERAND (expr, 0);
8749 ipart = TREE_OPERAND (expr, 1);
8751 else if (TREE_CODE (expr) == COMPLEX_CST)
8753 rpart = TREE_REALPART (expr);
8754 ipart = TREE_IMAGPART (expr);
8756 else
8758 expr = save_expr (expr);
8759 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8760 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8763 rpart = save_expr (rpart);
8764 ipart = save_expr (ipart);
8765 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8766 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8767 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8768 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8769 build_zero_cst (itype));
8773 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8774 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8776 static bool
8777 vec_cst_ctor_to_array (tree arg, tree *elts)
8779 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8781 if (TREE_CODE (arg) == VECTOR_CST)
8783 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8784 elts[i] = VECTOR_CST_ELT (arg, i);
8786 else if (TREE_CODE (arg) == CONSTRUCTOR)
8788 constructor_elt *elt;
8790 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8791 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8792 return false;
8793 else
8794 elts[i] = elt->value;
8796 else
8797 return false;
8798 for (; i < nelts; i++)
8799 elts[i]
8800 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8801 return true;
8804 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8805 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8806 NULL_TREE otherwise. */
8808 static tree
8809 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8811 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8812 tree *elts;
8813 bool need_ctor = false;
8815 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8816 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8817 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8818 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8819 return NULL_TREE;
8821 elts = XALLOCAVEC (tree, nelts * 3);
8822 if (!vec_cst_ctor_to_array (arg0, elts)
8823 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8824 return NULL_TREE;
8826 for (i = 0; i < nelts; i++)
8828 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8829 need_ctor = true;
8830 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8833 if (need_ctor)
8835 vec<constructor_elt, va_gc> *v;
8836 vec_alloc (v, nelts);
8837 for (i = 0; i < nelts; i++)
8838 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8839 return build_constructor (type, v);
8841 else
8842 return build_vector (type, &elts[2 * nelts]);
8845 /* Try to fold a pointer difference of type TYPE two address expressions of
8846 array references AREF0 and AREF1 using location LOC. Return a
8847 simplified expression for the difference or NULL_TREE. */
8849 static tree
8850 fold_addr_of_array_ref_difference (location_t loc, tree type,
8851 tree aref0, tree aref1)
8853 tree base0 = TREE_OPERAND (aref0, 0);
8854 tree base1 = TREE_OPERAND (aref1, 0);
8855 tree base_offset = build_int_cst (type, 0);
8857 /* If the bases are array references as well, recurse. If the bases
8858 are pointer indirections compute the difference of the pointers.
8859 If the bases are equal, we are set. */
8860 if ((TREE_CODE (base0) == ARRAY_REF
8861 && TREE_CODE (base1) == ARRAY_REF
8862 && (base_offset
8863 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8864 || (INDIRECT_REF_P (base0)
8865 && INDIRECT_REF_P (base1)
8866 && (base_offset
8867 = fold_binary_loc (loc, MINUS_EXPR, type,
8868 fold_convert (type, TREE_OPERAND (base0, 0)),
8869 fold_convert (type,
8870 TREE_OPERAND (base1, 0)))))
8871 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8873 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8874 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8875 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8876 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8877 return fold_build2_loc (loc, PLUS_EXPR, type,
8878 base_offset,
8879 fold_build2_loc (loc, MULT_EXPR, type,
8880 diff, esz));
8882 return NULL_TREE;
8885 /* If the real or vector real constant CST of type TYPE has an exact
8886 inverse, return it, else return NULL. */
8888 tree
8889 exact_inverse (tree type, tree cst)
8891 REAL_VALUE_TYPE r;
8892 tree unit_type, *elts;
8893 machine_mode mode;
8894 unsigned vec_nelts, i;
8896 switch (TREE_CODE (cst))
8898 case REAL_CST:
8899 r = TREE_REAL_CST (cst);
8901 if (exact_real_inverse (TYPE_MODE (type), &r))
8902 return build_real (type, r);
8904 return NULL_TREE;
8906 case VECTOR_CST:
8907 vec_nelts = VECTOR_CST_NELTS (cst);
8908 elts = XALLOCAVEC (tree, vec_nelts);
8909 unit_type = TREE_TYPE (type);
8910 mode = TYPE_MODE (unit_type);
8912 for (i = 0; i < vec_nelts; i++)
8914 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8915 if (!exact_real_inverse (mode, &r))
8916 return NULL_TREE;
8917 elts[i] = build_real (unit_type, r);
8920 return build_vector (type, elts);
8922 default:
8923 return NULL_TREE;
8927 /* Mask out the tz least significant bits of X of type TYPE where
8928 tz is the number of trailing zeroes in Y. */
8929 static wide_int
8930 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8932 int tz = wi::ctz (y);
8933 if (tz > 0)
8934 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8935 return x;
8938 /* Return true when T is an address and is known to be nonzero.
8939 For floating point we further ensure that T is not denormal.
8940 Similar logic is present in nonzero_address in rtlanal.h.
8942 If the return value is based on the assumption that signed overflow
8943 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8944 change *STRICT_OVERFLOW_P. */
8946 static bool
8947 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8949 tree type = TREE_TYPE (t);
8950 enum tree_code code;
8952 /* Doing something useful for floating point would need more work. */
8953 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8954 return false;
8956 code = TREE_CODE (t);
8957 switch (TREE_CODE_CLASS (code))
8959 case tcc_unary:
8960 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8961 strict_overflow_p);
8962 case tcc_binary:
8963 case tcc_comparison:
8964 return tree_binary_nonzero_warnv_p (code, type,
8965 TREE_OPERAND (t, 0),
8966 TREE_OPERAND (t, 1),
8967 strict_overflow_p);
8968 case tcc_constant:
8969 case tcc_declaration:
8970 case tcc_reference:
8971 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8973 default:
8974 break;
8977 switch (code)
8979 case TRUTH_NOT_EXPR:
8980 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8981 strict_overflow_p);
8983 case TRUTH_AND_EXPR:
8984 case TRUTH_OR_EXPR:
8985 case TRUTH_XOR_EXPR:
8986 return tree_binary_nonzero_warnv_p (code, type,
8987 TREE_OPERAND (t, 0),
8988 TREE_OPERAND (t, 1),
8989 strict_overflow_p);
8991 case COND_EXPR:
8992 case CONSTRUCTOR:
8993 case OBJ_TYPE_REF:
8994 case ASSERT_EXPR:
8995 case ADDR_EXPR:
8996 case WITH_SIZE_EXPR:
8997 case SSA_NAME:
8998 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9000 case COMPOUND_EXPR:
9001 case MODIFY_EXPR:
9002 case BIND_EXPR:
9003 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9004 strict_overflow_p);
9006 case SAVE_EXPR:
9007 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9008 strict_overflow_p);
9010 case CALL_EXPR:
9012 tree fndecl = get_callee_fndecl (t);
9013 if (!fndecl) return false;
9014 if (flag_delete_null_pointer_checks && !flag_check_new
9015 && DECL_IS_OPERATOR_NEW (fndecl)
9016 && !TREE_NOTHROW (fndecl))
9017 return true;
9018 if (flag_delete_null_pointer_checks
9019 && lookup_attribute ("returns_nonnull",
9020 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9021 return true;
9022 return alloca_call_p (t);
9025 default:
9026 break;
9028 return false;
9031 /* Return true when T is an address and is known to be nonzero.
9032 Handle warnings about undefined signed overflow. */
9034 static bool
9035 tree_expr_nonzero_p (tree t)
9037 bool ret, strict_overflow_p;
9039 strict_overflow_p = false;
9040 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9041 if (strict_overflow_p)
9042 fold_overflow_warning (("assuming signed overflow does not occur when "
9043 "determining that expression is always "
9044 "non-zero"),
9045 WARN_STRICT_OVERFLOW_MISC);
9046 return ret;
9049 /* Fold a binary expression of code CODE and type TYPE with operands
9050 OP0 and OP1. LOC is the location of the resulting expression.
9051 Return the folded expression if folding is successful. Otherwise,
9052 return NULL_TREE. */
9054 tree
9055 fold_binary_loc (location_t loc,
9056 enum tree_code code, tree type, tree op0, tree op1)
9058 enum tree_code_class kind = TREE_CODE_CLASS (code);
9059 tree arg0, arg1, tem;
9060 tree t1 = NULL_TREE;
9061 bool strict_overflow_p;
9062 unsigned int prec;
9064 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9065 && TREE_CODE_LENGTH (code) == 2
9066 && op0 != NULL_TREE
9067 && op1 != NULL_TREE);
9069 arg0 = op0;
9070 arg1 = op1;
9072 /* Strip any conversions that don't change the mode. This is
9073 safe for every expression, except for a comparison expression
9074 because its signedness is derived from its operands. So, in
9075 the latter case, only strip conversions that don't change the
9076 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9077 preserved.
9079 Note that this is done as an internal manipulation within the
9080 constant folder, in order to find the simplest representation
9081 of the arguments so that their form can be studied. In any
9082 cases, the appropriate type conversions should be put back in
9083 the tree that will get out of the constant folder. */
9085 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9087 STRIP_SIGN_NOPS (arg0);
9088 STRIP_SIGN_NOPS (arg1);
9090 else
9092 STRIP_NOPS (arg0);
9093 STRIP_NOPS (arg1);
9096 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9097 constant but we can't do arithmetic on them. */
9098 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9100 tem = const_binop (code, type, arg0, arg1);
9101 if (tem != NULL_TREE)
9103 if (TREE_TYPE (tem) != type)
9104 tem = fold_convert_loc (loc, type, tem);
9105 return tem;
9109 /* If this is a commutative operation, and ARG0 is a constant, move it
9110 to ARG1 to reduce the number of tests below. */
9111 if (commutative_tree_code (code)
9112 && tree_swap_operands_p (arg0, arg1, true))
9113 return fold_build2_loc (loc, code, type, op1, op0);
9115 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9116 to ARG1 to reduce the number of tests below. */
9117 if (kind == tcc_comparison
9118 && tree_swap_operands_p (arg0, arg1, true))
9119 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9121 tem = generic_simplify (loc, code, type, op0, op1);
9122 if (tem)
9123 return tem;
9125 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9127 First check for cases where an arithmetic operation is applied to a
9128 compound, conditional, or comparison operation. Push the arithmetic
9129 operation inside the compound or conditional to see if any folding
9130 can then be done. Convert comparison to conditional for this purpose.
9131 The also optimizes non-constant cases that used to be done in
9132 expand_expr.
9134 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9135 one of the operands is a comparison and the other is a comparison, a
9136 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9137 code below would make the expression more complex. Change it to a
9138 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9139 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9141 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9142 || code == EQ_EXPR || code == NE_EXPR)
9143 && TREE_CODE (type) != VECTOR_TYPE
9144 && ((truth_value_p (TREE_CODE (arg0))
9145 && (truth_value_p (TREE_CODE (arg1))
9146 || (TREE_CODE (arg1) == BIT_AND_EXPR
9147 && integer_onep (TREE_OPERAND (arg1, 1)))))
9148 || (truth_value_p (TREE_CODE (arg1))
9149 && (truth_value_p (TREE_CODE (arg0))
9150 || (TREE_CODE (arg0) == BIT_AND_EXPR
9151 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9153 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9154 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9155 : TRUTH_XOR_EXPR,
9156 boolean_type_node,
9157 fold_convert_loc (loc, boolean_type_node, arg0),
9158 fold_convert_loc (loc, boolean_type_node, arg1));
9160 if (code == EQ_EXPR)
9161 tem = invert_truthvalue_loc (loc, tem);
9163 return fold_convert_loc (loc, type, tem);
9166 if (TREE_CODE_CLASS (code) == tcc_binary
9167 || TREE_CODE_CLASS (code) == tcc_comparison)
9169 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9171 tem = fold_build2_loc (loc, code, type,
9172 fold_convert_loc (loc, TREE_TYPE (op0),
9173 TREE_OPERAND (arg0, 1)), op1);
9174 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9175 tem);
9177 if (TREE_CODE (arg1) == COMPOUND_EXPR
9178 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9180 tem = fold_build2_loc (loc, code, type, op0,
9181 fold_convert_loc (loc, TREE_TYPE (op1),
9182 TREE_OPERAND (arg1, 1)));
9183 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9184 tem);
9187 if (TREE_CODE (arg0) == COND_EXPR
9188 || TREE_CODE (arg0) == VEC_COND_EXPR
9189 || COMPARISON_CLASS_P (arg0))
9191 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9192 arg0, arg1,
9193 /*cond_first_p=*/1);
9194 if (tem != NULL_TREE)
9195 return tem;
9198 if (TREE_CODE (arg1) == COND_EXPR
9199 || TREE_CODE (arg1) == VEC_COND_EXPR
9200 || COMPARISON_CLASS_P (arg1))
9202 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9203 arg1, arg0,
9204 /*cond_first_p=*/0);
9205 if (tem != NULL_TREE)
9206 return tem;
9210 switch (code)
9212 case MEM_REF:
9213 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9214 if (TREE_CODE (arg0) == ADDR_EXPR
9215 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9217 tree iref = TREE_OPERAND (arg0, 0);
9218 return fold_build2 (MEM_REF, type,
9219 TREE_OPERAND (iref, 0),
9220 int_const_binop (PLUS_EXPR, arg1,
9221 TREE_OPERAND (iref, 1)));
9224 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9225 if (TREE_CODE (arg0) == ADDR_EXPR
9226 && handled_component_p (TREE_OPERAND (arg0, 0)))
9228 tree base;
9229 HOST_WIDE_INT coffset;
9230 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9231 &coffset);
9232 if (!base)
9233 return NULL_TREE;
9234 return fold_build2 (MEM_REF, type,
9235 build_fold_addr_expr (base),
9236 int_const_binop (PLUS_EXPR, arg1,
9237 size_int (coffset)));
9240 return NULL_TREE;
9242 case POINTER_PLUS_EXPR:
9243 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9244 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9245 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9246 return fold_convert_loc (loc, type,
9247 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9248 fold_convert_loc (loc, sizetype,
9249 arg1),
9250 fold_convert_loc (loc, sizetype,
9251 arg0)));
9253 return NULL_TREE;
9255 case PLUS_EXPR:
9256 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9258 /* X + (X / CST) * -CST is X % CST. */
9259 if (TREE_CODE (arg1) == MULT_EXPR
9260 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9261 && operand_equal_p (arg0,
9262 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9264 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9265 tree cst1 = TREE_OPERAND (arg1, 1);
9266 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9267 cst1, cst0);
9268 if (sum && integer_zerop (sum))
9269 return fold_convert_loc (loc, type,
9270 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9271 TREE_TYPE (arg0), arg0,
9272 cst0));
9276 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9277 one. Make sure the type is not saturating and has the signedness of
9278 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9279 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9280 if ((TREE_CODE (arg0) == MULT_EXPR
9281 || TREE_CODE (arg1) == MULT_EXPR)
9282 && !TYPE_SATURATING (type)
9283 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9284 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9285 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9287 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9288 if (tem)
9289 return tem;
9292 if (! FLOAT_TYPE_P (type))
9294 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9295 (plus (plus (mult) (mult)) (foo)) so that we can
9296 take advantage of the factoring cases below. */
9297 if (ANY_INTEGRAL_TYPE_P (type)
9298 && TYPE_OVERFLOW_WRAPS (type)
9299 && (((TREE_CODE (arg0) == PLUS_EXPR
9300 || TREE_CODE (arg0) == MINUS_EXPR)
9301 && TREE_CODE (arg1) == MULT_EXPR)
9302 || ((TREE_CODE (arg1) == PLUS_EXPR
9303 || TREE_CODE (arg1) == MINUS_EXPR)
9304 && TREE_CODE (arg0) == MULT_EXPR)))
9306 tree parg0, parg1, parg, marg;
9307 enum tree_code pcode;
9309 if (TREE_CODE (arg1) == MULT_EXPR)
9310 parg = arg0, marg = arg1;
9311 else
9312 parg = arg1, marg = arg0;
9313 pcode = TREE_CODE (parg);
9314 parg0 = TREE_OPERAND (parg, 0);
9315 parg1 = TREE_OPERAND (parg, 1);
9316 STRIP_NOPS (parg0);
9317 STRIP_NOPS (parg1);
9319 if (TREE_CODE (parg0) == MULT_EXPR
9320 && TREE_CODE (parg1) != MULT_EXPR)
9321 return fold_build2_loc (loc, pcode, type,
9322 fold_build2_loc (loc, PLUS_EXPR, type,
9323 fold_convert_loc (loc, type,
9324 parg0),
9325 fold_convert_loc (loc, type,
9326 marg)),
9327 fold_convert_loc (loc, type, parg1));
9328 if (TREE_CODE (parg0) != MULT_EXPR
9329 && TREE_CODE (parg1) == MULT_EXPR)
9330 return
9331 fold_build2_loc (loc, PLUS_EXPR, type,
9332 fold_convert_loc (loc, type, parg0),
9333 fold_build2_loc (loc, pcode, type,
9334 fold_convert_loc (loc, type, marg),
9335 fold_convert_loc (loc, type,
9336 parg1)));
9339 else
9341 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9342 to __complex__ ( x, y ). This is not the same for SNaNs or
9343 if signed zeros are involved. */
9344 if (!HONOR_SNANS (element_mode (arg0))
9345 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9346 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9348 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9349 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9350 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9351 bool arg0rz = false, arg0iz = false;
9352 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9353 || (arg0i && (arg0iz = real_zerop (arg0i))))
9355 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9356 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9357 if (arg0rz && arg1i && real_zerop (arg1i))
9359 tree rp = arg1r ? arg1r
9360 : build1 (REALPART_EXPR, rtype, arg1);
9361 tree ip = arg0i ? arg0i
9362 : build1 (IMAGPART_EXPR, rtype, arg0);
9363 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9365 else if (arg0iz && arg1r && real_zerop (arg1r))
9367 tree rp = arg0r ? arg0r
9368 : build1 (REALPART_EXPR, rtype, arg0);
9369 tree ip = arg1i ? arg1i
9370 : build1 (IMAGPART_EXPR, rtype, arg1);
9371 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9376 if (flag_unsafe_math_optimizations
9377 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9378 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9379 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9380 return tem;
9382 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9383 We associate floats only if the user has specified
9384 -fassociative-math. */
9385 if (flag_associative_math
9386 && TREE_CODE (arg1) == PLUS_EXPR
9387 && TREE_CODE (arg0) != MULT_EXPR)
9389 tree tree10 = TREE_OPERAND (arg1, 0);
9390 tree tree11 = TREE_OPERAND (arg1, 1);
9391 if (TREE_CODE (tree11) == MULT_EXPR
9392 && TREE_CODE (tree10) == MULT_EXPR)
9394 tree tree0;
9395 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9396 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9399 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9400 We associate floats only if the user has specified
9401 -fassociative-math. */
9402 if (flag_associative_math
9403 && TREE_CODE (arg0) == PLUS_EXPR
9404 && TREE_CODE (arg1) != MULT_EXPR)
9406 tree tree00 = TREE_OPERAND (arg0, 0);
9407 tree tree01 = TREE_OPERAND (arg0, 1);
9408 if (TREE_CODE (tree01) == MULT_EXPR
9409 && TREE_CODE (tree00) == MULT_EXPR)
9411 tree tree0;
9412 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9413 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9418 bit_rotate:
9419 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9420 is a rotate of A by C1 bits. */
9421 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9422 is a rotate of A by B bits. */
9424 enum tree_code code0, code1;
9425 tree rtype;
9426 code0 = TREE_CODE (arg0);
9427 code1 = TREE_CODE (arg1);
9428 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9429 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9430 && operand_equal_p (TREE_OPERAND (arg0, 0),
9431 TREE_OPERAND (arg1, 0), 0)
9432 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9433 TYPE_UNSIGNED (rtype))
9434 /* Only create rotates in complete modes. Other cases are not
9435 expanded properly. */
9436 && (element_precision (rtype)
9437 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9439 tree tree01, tree11;
9440 enum tree_code code01, code11;
9442 tree01 = TREE_OPERAND (arg0, 1);
9443 tree11 = TREE_OPERAND (arg1, 1);
9444 STRIP_NOPS (tree01);
9445 STRIP_NOPS (tree11);
9446 code01 = TREE_CODE (tree01);
9447 code11 = TREE_CODE (tree11);
9448 if (code01 == INTEGER_CST
9449 && code11 == INTEGER_CST
9450 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9451 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9453 tem = build2_loc (loc, LROTATE_EXPR,
9454 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9455 TREE_OPERAND (arg0, 0),
9456 code0 == LSHIFT_EXPR
9457 ? TREE_OPERAND (arg0, 1)
9458 : TREE_OPERAND (arg1, 1));
9459 return fold_convert_loc (loc, type, tem);
9461 else if (code11 == MINUS_EXPR)
9463 tree tree110, tree111;
9464 tree110 = TREE_OPERAND (tree11, 0);
9465 tree111 = TREE_OPERAND (tree11, 1);
9466 STRIP_NOPS (tree110);
9467 STRIP_NOPS (tree111);
9468 if (TREE_CODE (tree110) == INTEGER_CST
9469 && 0 == compare_tree_int (tree110,
9470 element_precision
9471 (TREE_TYPE (TREE_OPERAND
9472 (arg0, 0))))
9473 && operand_equal_p (tree01, tree111, 0))
9474 return
9475 fold_convert_loc (loc, type,
9476 build2 ((code0 == LSHIFT_EXPR
9477 ? LROTATE_EXPR
9478 : RROTATE_EXPR),
9479 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9480 TREE_OPERAND (arg0, 0),
9481 TREE_OPERAND (arg0, 1)));
9483 else if (code01 == MINUS_EXPR)
9485 tree tree010, tree011;
9486 tree010 = TREE_OPERAND (tree01, 0);
9487 tree011 = TREE_OPERAND (tree01, 1);
9488 STRIP_NOPS (tree010);
9489 STRIP_NOPS (tree011);
9490 if (TREE_CODE (tree010) == INTEGER_CST
9491 && 0 == compare_tree_int (tree010,
9492 element_precision
9493 (TREE_TYPE (TREE_OPERAND
9494 (arg0, 0))))
9495 && operand_equal_p (tree11, tree011, 0))
9496 return fold_convert_loc
9497 (loc, type,
9498 build2 ((code0 != LSHIFT_EXPR
9499 ? LROTATE_EXPR
9500 : RROTATE_EXPR),
9501 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9502 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9507 associate:
9508 /* In most languages, can't associate operations on floats through
9509 parentheses. Rather than remember where the parentheses were, we
9510 don't associate floats at all, unless the user has specified
9511 -fassociative-math.
9512 And, we need to make sure type is not saturating. */
9514 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9515 && !TYPE_SATURATING (type))
9517 tree var0, con0, lit0, minus_lit0;
9518 tree var1, con1, lit1, minus_lit1;
9519 tree atype = type;
9520 bool ok = true;
9522 /* Split both trees into variables, constants, and literals. Then
9523 associate each group together, the constants with literals,
9524 then the result with variables. This increases the chances of
9525 literals being recombined later and of generating relocatable
9526 expressions for the sum of a constant and literal. */
9527 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9528 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9529 code == MINUS_EXPR);
9531 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9532 if (code == MINUS_EXPR)
9533 code = PLUS_EXPR;
9535 /* With undefined overflow prefer doing association in a type
9536 which wraps on overflow, if that is one of the operand types. */
9537 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9538 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9540 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9541 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9542 atype = TREE_TYPE (arg0);
9543 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9544 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9545 atype = TREE_TYPE (arg1);
9546 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9549 /* With undefined overflow we can only associate constants with one
9550 variable, and constants whose association doesn't overflow. */
9551 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9552 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9554 if (var0 && var1)
9556 tree tmp0 = var0;
9557 tree tmp1 = var1;
9558 bool one_neg = false;
9560 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9562 tmp0 = TREE_OPERAND (tmp0, 0);
9563 one_neg = !one_neg;
9565 if (CONVERT_EXPR_P (tmp0)
9566 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9567 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9568 <= TYPE_PRECISION (atype)))
9569 tmp0 = TREE_OPERAND (tmp0, 0);
9570 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9572 tmp1 = TREE_OPERAND (tmp1, 0);
9573 one_neg = !one_neg;
9575 if (CONVERT_EXPR_P (tmp1)
9576 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9577 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9578 <= TYPE_PRECISION (atype)))
9579 tmp1 = TREE_OPERAND (tmp1, 0);
9580 /* The only case we can still associate with two variables
9581 is if they cancel out. */
9582 if (!one_neg
9583 || !operand_equal_p (tmp0, tmp1, 0))
9584 ok = false;
9588 /* Only do something if we found more than two objects. Otherwise,
9589 nothing has changed and we risk infinite recursion. */
9590 if (ok
9591 && (2 < ((var0 != 0) + (var1 != 0)
9592 + (con0 != 0) + (con1 != 0)
9593 + (lit0 != 0) + (lit1 != 0)
9594 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9596 bool any_overflows = false;
9597 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9598 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9599 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9600 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9601 var0 = associate_trees (loc, var0, var1, code, atype);
9602 con0 = associate_trees (loc, con0, con1, code, atype);
9603 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9604 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9605 code, atype);
9607 /* Preserve the MINUS_EXPR if the negative part of the literal is
9608 greater than the positive part. Otherwise, the multiplicative
9609 folding code (i.e extract_muldiv) may be fooled in case
9610 unsigned constants are subtracted, like in the following
9611 example: ((X*2 + 4) - 8U)/2. */
9612 if (minus_lit0 && lit0)
9614 if (TREE_CODE (lit0) == INTEGER_CST
9615 && TREE_CODE (minus_lit0) == INTEGER_CST
9616 && tree_int_cst_lt (lit0, minus_lit0))
9618 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9619 MINUS_EXPR, atype);
9620 lit0 = 0;
9622 else
9624 lit0 = associate_trees (loc, lit0, minus_lit0,
9625 MINUS_EXPR, atype);
9626 minus_lit0 = 0;
9630 /* Don't introduce overflows through reassociation. */
9631 if (!any_overflows
9632 && ((lit0 && TREE_OVERFLOW_P (lit0))
9633 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9634 return NULL_TREE;
9636 if (minus_lit0)
9638 if (con0 == 0)
9639 return
9640 fold_convert_loc (loc, type,
9641 associate_trees (loc, var0, minus_lit0,
9642 MINUS_EXPR, atype));
9643 else
9645 con0 = associate_trees (loc, con0, minus_lit0,
9646 MINUS_EXPR, atype);
9647 return
9648 fold_convert_loc (loc, type,
9649 associate_trees (loc, var0, con0,
9650 PLUS_EXPR, atype));
9654 con0 = associate_trees (loc, con0, lit0, code, atype);
9655 return
9656 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9657 code, atype));
9661 return NULL_TREE;
9663 case MINUS_EXPR:
9664 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9665 if (TREE_CODE (arg0) == NEGATE_EXPR
9666 && negate_expr_p (arg1)
9667 && reorder_operands_p (arg0, arg1))
9668 return fold_build2_loc (loc, MINUS_EXPR, type,
9669 fold_convert_loc (loc, type,
9670 negate_expr (arg1)),
9671 fold_convert_loc (loc, type,
9672 TREE_OPERAND (arg0, 0)));
9674 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9675 __complex__ ( x, -y ). This is not the same for SNaNs or if
9676 signed zeros are involved. */
9677 if (!HONOR_SNANS (element_mode (arg0))
9678 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9679 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9681 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9682 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9683 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9684 bool arg0rz = false, arg0iz = false;
9685 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9686 || (arg0i && (arg0iz = real_zerop (arg0i))))
9688 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9689 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9690 if (arg0rz && arg1i && real_zerop (arg1i))
9692 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9693 arg1r ? arg1r
9694 : build1 (REALPART_EXPR, rtype, arg1));
9695 tree ip = arg0i ? arg0i
9696 : build1 (IMAGPART_EXPR, rtype, arg0);
9697 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9699 else if (arg0iz && arg1r && real_zerop (arg1r))
9701 tree rp = arg0r ? arg0r
9702 : build1 (REALPART_EXPR, rtype, arg0);
9703 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9704 arg1i ? arg1i
9705 : build1 (IMAGPART_EXPR, rtype, arg1));
9706 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9711 /* A - B -> A + (-B) if B is easily negatable. */
9712 if (negate_expr_p (arg1)
9713 && !TYPE_OVERFLOW_SANITIZED (type)
9714 && ((FLOAT_TYPE_P (type)
9715 /* Avoid this transformation if B is a positive REAL_CST. */
9716 && (TREE_CODE (arg1) != REAL_CST
9717 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9718 || INTEGRAL_TYPE_P (type)))
9719 return fold_build2_loc (loc, PLUS_EXPR, type,
9720 fold_convert_loc (loc, type, arg0),
9721 fold_convert_loc (loc, type,
9722 negate_expr (arg1)));
9724 /* Fold &a[i] - &a[j] to i-j. */
9725 if (TREE_CODE (arg0) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9727 && TREE_CODE (arg1) == ADDR_EXPR
9728 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9730 tree tem = fold_addr_of_array_ref_difference (loc, type,
9731 TREE_OPERAND (arg0, 0),
9732 TREE_OPERAND (arg1, 0));
9733 if (tem)
9734 return tem;
9737 if (FLOAT_TYPE_P (type)
9738 && flag_unsafe_math_optimizations
9739 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9740 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9741 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9742 return tem;
9744 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9745 one. Make sure the type is not saturating and has the signedness of
9746 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9747 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9748 if ((TREE_CODE (arg0) == MULT_EXPR
9749 || TREE_CODE (arg1) == MULT_EXPR)
9750 && !TYPE_SATURATING (type)
9751 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9752 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9753 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9755 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9756 if (tem)
9757 return tem;
9760 goto associate;
9762 case MULT_EXPR:
9763 if (! FLOAT_TYPE_P (type))
9765 /* Transform x * -C into -x * C if x is easily negatable. */
9766 if (TREE_CODE (arg1) == INTEGER_CST
9767 && tree_int_cst_sgn (arg1) == -1
9768 && negate_expr_p (arg0)
9769 && (tem = negate_expr (arg1)) != arg1
9770 && !TREE_OVERFLOW (tem))
9771 return fold_build2_loc (loc, MULT_EXPR, type,
9772 fold_convert_loc (loc, type,
9773 negate_expr (arg0)),
9774 tem);
9776 /* (A + A) * C -> A * 2 * C */
9777 if (TREE_CODE (arg0) == PLUS_EXPR
9778 && TREE_CODE (arg1) == INTEGER_CST
9779 && operand_equal_p (TREE_OPERAND (arg0, 0),
9780 TREE_OPERAND (arg0, 1), 0))
9781 return fold_build2_loc (loc, MULT_EXPR, type,
9782 omit_one_operand_loc (loc, type,
9783 TREE_OPERAND (arg0, 0),
9784 TREE_OPERAND (arg0, 1)),
9785 fold_build2_loc (loc, MULT_EXPR, type,
9786 build_int_cst (type, 2) , arg1));
9788 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9789 sign-changing only. */
9790 if (TREE_CODE (arg1) == INTEGER_CST
9791 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9792 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9793 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9795 strict_overflow_p = false;
9796 if (TREE_CODE (arg1) == INTEGER_CST
9797 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9798 &strict_overflow_p)))
9800 if (strict_overflow_p)
9801 fold_overflow_warning (("assuming signed overflow does not "
9802 "occur when simplifying "
9803 "multiplication"),
9804 WARN_STRICT_OVERFLOW_MISC);
9805 return fold_convert_loc (loc, type, tem);
9808 /* Optimize z * conj(z) for integer complex numbers. */
9809 if (TREE_CODE (arg0) == CONJ_EXPR
9810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9811 return fold_mult_zconjz (loc, type, arg1);
9812 if (TREE_CODE (arg1) == CONJ_EXPR
9813 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9814 return fold_mult_zconjz (loc, type, arg0);
9816 else
9818 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9819 This is not the same for NaNs or if signed zeros are
9820 involved. */
9821 if (!HONOR_NANS (arg0)
9822 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9823 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9824 && TREE_CODE (arg1) == COMPLEX_CST
9825 && real_zerop (TREE_REALPART (arg1)))
9827 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9828 if (real_onep (TREE_IMAGPART (arg1)))
9829 return
9830 fold_build2_loc (loc, COMPLEX_EXPR, type,
9831 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9832 rtype, arg0)),
9833 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9834 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9835 return
9836 fold_build2_loc (loc, COMPLEX_EXPR, type,
9837 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9838 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9839 rtype, arg0)));
9842 /* Optimize z * conj(z) for floating point complex numbers.
9843 Guarded by flag_unsafe_math_optimizations as non-finite
9844 imaginary components don't produce scalar results. */
9845 if (flag_unsafe_math_optimizations
9846 && TREE_CODE (arg0) == CONJ_EXPR
9847 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9848 return fold_mult_zconjz (loc, type, arg1);
9849 if (flag_unsafe_math_optimizations
9850 && TREE_CODE (arg1) == CONJ_EXPR
9851 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9852 return fold_mult_zconjz (loc, type, arg0);
9854 if (flag_unsafe_math_optimizations)
9857 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9858 if (!in_gimple_form
9859 && optimize
9860 && operand_equal_p (arg0, arg1, 0))
9862 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9864 if (powfn)
9866 tree arg = build_real (type, dconst2);
9867 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9872 goto associate;
9874 case BIT_IOR_EXPR:
9875 /* Canonicalize (X & C1) | C2. */
9876 if (TREE_CODE (arg0) == BIT_AND_EXPR
9877 && TREE_CODE (arg1) == INTEGER_CST
9878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9880 int width = TYPE_PRECISION (type), w;
9881 wide_int c1 = TREE_OPERAND (arg0, 1);
9882 wide_int c2 = arg1;
9884 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9885 if ((c1 & c2) == c1)
9886 return omit_one_operand_loc (loc, type, arg1,
9887 TREE_OPERAND (arg0, 0));
9889 wide_int msk = wi::mask (width, false,
9890 TYPE_PRECISION (TREE_TYPE (arg1)));
9892 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9893 if (msk.and_not (c1 | c2) == 0)
9894 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9895 TREE_OPERAND (arg0, 0), arg1);
9897 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9898 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9899 mode which allows further optimizations. */
9900 c1 &= msk;
9901 c2 &= msk;
9902 wide_int c3 = c1.and_not (c2);
9903 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9905 wide_int mask = wi::mask (w, false,
9906 TYPE_PRECISION (type));
9907 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9909 c3 = mask;
9910 break;
9914 if (c3 != c1)
9915 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9916 fold_build2_loc (loc, BIT_AND_EXPR, type,
9917 TREE_OPERAND (arg0, 0),
9918 wide_int_to_tree (type,
9919 c3)),
9920 arg1);
9923 /* See if this can be simplified into a rotate first. If that
9924 is unsuccessful continue in the association code. */
9925 goto bit_rotate;
9927 case BIT_XOR_EXPR:
9928 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9929 if (TREE_CODE (arg0) == BIT_AND_EXPR
9930 && INTEGRAL_TYPE_P (type)
9931 && integer_onep (TREE_OPERAND (arg0, 1))
9932 && integer_onep (arg1))
9933 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9934 build_zero_cst (TREE_TYPE (arg0)));
9936 /* See if this can be simplified into a rotate first. If that
9937 is unsuccessful continue in the association code. */
9938 goto bit_rotate;
9940 case BIT_AND_EXPR:
9941 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9942 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9943 && INTEGRAL_TYPE_P (type)
9944 && integer_onep (TREE_OPERAND (arg0, 1))
9945 && integer_onep (arg1))
9947 tree tem2;
9948 tem = TREE_OPERAND (arg0, 0);
9949 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9950 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9951 tem, tem2);
9952 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9953 build_zero_cst (TREE_TYPE (tem)));
9955 /* Fold ~X & 1 as (X & 1) == 0. */
9956 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9957 && INTEGRAL_TYPE_P (type)
9958 && integer_onep (arg1))
9960 tree tem2;
9961 tem = TREE_OPERAND (arg0, 0);
9962 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9963 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9964 tem, tem2);
9965 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9966 build_zero_cst (TREE_TYPE (tem)));
9968 /* Fold !X & 1 as X == 0. */
9969 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9970 && integer_onep (arg1))
9972 tem = TREE_OPERAND (arg0, 0);
9973 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9974 build_zero_cst (TREE_TYPE (tem)));
9977 /* Fold (X ^ Y) & Y as ~X & Y. */
9978 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9979 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9981 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9982 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9983 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9984 fold_convert_loc (loc, type, arg1));
9986 /* Fold (X ^ Y) & X as ~Y & X. */
9987 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9988 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9989 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9991 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9992 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9993 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9994 fold_convert_loc (loc, type, arg1));
9996 /* Fold X & (X ^ Y) as X & ~Y. */
9997 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9998 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10000 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10001 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10002 fold_convert_loc (loc, type, arg0),
10003 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10005 /* Fold X & (Y ^ X) as ~Y & X. */
10006 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10007 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10008 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10010 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10011 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10012 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10013 fold_convert_loc (loc, type, arg0));
10016 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10017 multiple of 1 << CST. */
10018 if (TREE_CODE (arg1) == INTEGER_CST)
10020 wide_int cst1 = arg1;
10021 wide_int ncst1 = -cst1;
10022 if ((cst1 & ncst1) == ncst1
10023 && multiple_of_p (type, arg0,
10024 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10025 return fold_convert_loc (loc, type, arg0);
10028 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10029 bits from CST2. */
10030 if (TREE_CODE (arg1) == INTEGER_CST
10031 && TREE_CODE (arg0) == MULT_EXPR
10032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10034 wide_int warg1 = arg1;
10035 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10037 if (masked == 0)
10038 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10039 arg0, arg1);
10040 else if (masked != warg1)
10042 /* Avoid the transform if arg1 is a mask of some
10043 mode which allows further optimizations. */
10044 int pop = wi::popcount (warg1);
10045 if (!(pop >= BITS_PER_UNIT
10046 && exact_log2 (pop) != -1
10047 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10048 return fold_build2_loc (loc, code, type, op0,
10049 wide_int_to_tree (type, masked));
10053 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10054 ((A & N) + B) & M -> (A + B) & M
10055 Similarly if (N & M) == 0,
10056 ((A | N) + B) & M -> (A + B) & M
10057 and for - instead of + (or unary - instead of +)
10058 and/or ^ instead of |.
10059 If B is constant and (B & M) == 0, fold into A & M. */
10060 if (TREE_CODE (arg1) == INTEGER_CST)
10062 wide_int cst1 = arg1;
10063 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10064 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10065 && (TREE_CODE (arg0) == PLUS_EXPR
10066 || TREE_CODE (arg0) == MINUS_EXPR
10067 || TREE_CODE (arg0) == NEGATE_EXPR)
10068 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10069 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10071 tree pmop[2];
10072 int which = 0;
10073 wide_int cst0;
10075 /* Now we know that arg0 is (C + D) or (C - D) or
10076 -C and arg1 (M) is == (1LL << cst) - 1.
10077 Store C into PMOP[0] and D into PMOP[1]. */
10078 pmop[0] = TREE_OPERAND (arg0, 0);
10079 pmop[1] = NULL;
10080 if (TREE_CODE (arg0) != NEGATE_EXPR)
10082 pmop[1] = TREE_OPERAND (arg0, 1);
10083 which = 1;
10086 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10087 which = -1;
10089 for (; which >= 0; which--)
10090 switch (TREE_CODE (pmop[which]))
10092 case BIT_AND_EXPR:
10093 case BIT_IOR_EXPR:
10094 case BIT_XOR_EXPR:
10095 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10096 != INTEGER_CST)
10097 break;
10098 cst0 = TREE_OPERAND (pmop[which], 1);
10099 cst0 &= cst1;
10100 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10102 if (cst0 != cst1)
10103 break;
10105 else if (cst0 != 0)
10106 break;
10107 /* If C or D is of the form (A & N) where
10108 (N & M) == M, or of the form (A | N) or
10109 (A ^ N) where (N & M) == 0, replace it with A. */
10110 pmop[which] = TREE_OPERAND (pmop[which], 0);
10111 break;
10112 case INTEGER_CST:
10113 /* If C or D is a N where (N & M) == 0, it can be
10114 omitted (assumed 0). */
10115 if ((TREE_CODE (arg0) == PLUS_EXPR
10116 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10117 && (cst1 & pmop[which]) == 0)
10118 pmop[which] = NULL;
10119 break;
10120 default:
10121 break;
10124 /* Only build anything new if we optimized one or both arguments
10125 above. */
10126 if (pmop[0] != TREE_OPERAND (arg0, 0)
10127 || (TREE_CODE (arg0) != NEGATE_EXPR
10128 && pmop[1] != TREE_OPERAND (arg0, 1)))
10130 tree utype = TREE_TYPE (arg0);
10131 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10133 /* Perform the operations in a type that has defined
10134 overflow behavior. */
10135 utype = unsigned_type_for (TREE_TYPE (arg0));
10136 if (pmop[0] != NULL)
10137 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10138 if (pmop[1] != NULL)
10139 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10142 if (TREE_CODE (arg0) == NEGATE_EXPR)
10143 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10144 else if (TREE_CODE (arg0) == PLUS_EXPR)
10146 if (pmop[0] != NULL && pmop[1] != NULL)
10147 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10148 pmop[0], pmop[1]);
10149 else if (pmop[0] != NULL)
10150 tem = pmop[0];
10151 else if (pmop[1] != NULL)
10152 tem = pmop[1];
10153 else
10154 return build_int_cst (type, 0);
10156 else if (pmop[0] == NULL)
10157 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10158 else
10159 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10160 pmop[0], pmop[1]);
10161 /* TEM is now the new binary +, - or unary - replacement. */
10162 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10163 fold_convert_loc (loc, utype, arg1));
10164 return fold_convert_loc (loc, type, tem);
10169 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10170 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10171 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10173 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10175 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10176 if (mask == -1)
10177 return
10178 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10181 goto associate;
10183 case RDIV_EXPR:
10184 /* Don't touch a floating-point divide by zero unless the mode
10185 of the constant can represent infinity. */
10186 if (TREE_CODE (arg1) == REAL_CST
10187 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10188 && real_zerop (arg1))
10189 return NULL_TREE;
10191 /* (-A) / (-B) -> A / B */
10192 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10193 return fold_build2_loc (loc, RDIV_EXPR, type,
10194 TREE_OPERAND (arg0, 0),
10195 negate_expr (arg1));
10196 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10197 return fold_build2_loc (loc, RDIV_EXPR, type,
10198 negate_expr (arg0),
10199 TREE_OPERAND (arg1, 0));
10201 /* Convert A/B/C to A/(B*C). */
10202 if (flag_reciprocal_math
10203 && TREE_CODE (arg0) == RDIV_EXPR)
10204 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10205 fold_build2_loc (loc, MULT_EXPR, type,
10206 TREE_OPERAND (arg0, 1), arg1));
10208 /* Convert A/(B/C) to (A/B)*C. */
10209 if (flag_reciprocal_math
10210 && TREE_CODE (arg1) == RDIV_EXPR)
10211 return fold_build2_loc (loc, MULT_EXPR, type,
10212 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10213 TREE_OPERAND (arg1, 0)),
10214 TREE_OPERAND (arg1, 1));
10216 /* Convert C1/(X*C2) into (C1/C2)/X. */
10217 if (flag_reciprocal_math
10218 && TREE_CODE (arg1) == MULT_EXPR
10219 && TREE_CODE (arg0) == REAL_CST
10220 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10222 tree tem = const_binop (RDIV_EXPR, arg0,
10223 TREE_OPERAND (arg1, 1));
10224 if (tem)
10225 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10226 TREE_OPERAND (arg1, 0));
10229 return NULL_TREE;
10231 case TRUNC_DIV_EXPR:
10232 /* Optimize (X & (-A)) / A where A is a power of 2,
10233 to X >> log2(A) */
10234 if (TREE_CODE (arg0) == BIT_AND_EXPR
10235 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10236 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10238 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10239 arg1, TREE_OPERAND (arg0, 1));
10240 if (sum && integer_zerop (sum)) {
10241 tree pow2 = build_int_cst (integer_type_node,
10242 wi::exact_log2 (arg1));
10243 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10244 TREE_OPERAND (arg0, 0), pow2);
10248 /* Fall through */
10250 case FLOOR_DIV_EXPR:
10251 /* Simplify A / (B << N) where A and B are positive and B is
10252 a power of 2, to A >> (N + log2(B)). */
10253 strict_overflow_p = false;
10254 if (TREE_CODE (arg1) == LSHIFT_EXPR
10255 && (TYPE_UNSIGNED (type)
10256 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10258 tree sval = TREE_OPERAND (arg1, 0);
10259 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10261 tree sh_cnt = TREE_OPERAND (arg1, 1);
10262 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10263 wi::exact_log2 (sval));
10265 if (strict_overflow_p)
10266 fold_overflow_warning (("assuming signed overflow does not "
10267 "occur when simplifying A / (B << N)"),
10268 WARN_STRICT_OVERFLOW_MISC);
10270 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10271 sh_cnt, pow2);
10272 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10273 fold_convert_loc (loc, type, arg0), sh_cnt);
10277 /* Fall through */
10279 case ROUND_DIV_EXPR:
10280 case CEIL_DIV_EXPR:
10281 case EXACT_DIV_EXPR:
10282 if (integer_zerop (arg1))
10283 return NULL_TREE;
10285 /* Convert -A / -B to A / B when the type is signed and overflow is
10286 undefined. */
10287 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10288 && TREE_CODE (arg0) == NEGATE_EXPR
10289 && negate_expr_p (arg1))
10291 if (INTEGRAL_TYPE_P (type))
10292 fold_overflow_warning (("assuming signed overflow does not occur "
10293 "when distributing negation across "
10294 "division"),
10295 WARN_STRICT_OVERFLOW_MISC);
10296 return fold_build2_loc (loc, code, type,
10297 fold_convert_loc (loc, type,
10298 TREE_OPERAND (arg0, 0)),
10299 fold_convert_loc (loc, type,
10300 negate_expr (arg1)));
10302 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10303 && TREE_CODE (arg1) == NEGATE_EXPR
10304 && negate_expr_p (arg0))
10306 if (INTEGRAL_TYPE_P (type))
10307 fold_overflow_warning (("assuming signed overflow does not occur "
10308 "when distributing negation across "
10309 "division"),
10310 WARN_STRICT_OVERFLOW_MISC);
10311 return fold_build2_loc (loc, code, type,
10312 fold_convert_loc (loc, type,
10313 negate_expr (arg0)),
10314 fold_convert_loc (loc, type,
10315 TREE_OPERAND (arg1, 0)));
10318 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10319 operation, EXACT_DIV_EXPR.
10321 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10322 At one time others generated faster code, it's not clear if they do
10323 after the last round to changes to the DIV code in expmed.c. */
10324 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10325 && multiple_of_p (type, arg0, arg1))
10326 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10327 fold_convert (type, arg0),
10328 fold_convert (type, arg1));
10330 strict_overflow_p = false;
10331 if (TREE_CODE (arg1) == INTEGER_CST
10332 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10333 &strict_overflow_p)))
10335 if (strict_overflow_p)
10336 fold_overflow_warning (("assuming signed overflow does not occur "
10337 "when simplifying division"),
10338 WARN_STRICT_OVERFLOW_MISC);
10339 return fold_convert_loc (loc, type, tem);
10342 return NULL_TREE;
10344 case CEIL_MOD_EXPR:
10345 case FLOOR_MOD_EXPR:
10346 case ROUND_MOD_EXPR:
10347 case TRUNC_MOD_EXPR:
10348 strict_overflow_p = false;
10349 if (TREE_CODE (arg1) == INTEGER_CST
10350 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10351 &strict_overflow_p)))
10353 if (strict_overflow_p)
10354 fold_overflow_warning (("assuming signed overflow does not occur "
10355 "when simplifying modulus"),
10356 WARN_STRICT_OVERFLOW_MISC);
10357 return fold_convert_loc (loc, type, tem);
10360 return NULL_TREE;
10362 case LROTATE_EXPR:
10363 case RROTATE_EXPR:
10364 case RSHIFT_EXPR:
10365 case LSHIFT_EXPR:
10366 /* Since negative shift count is not well-defined,
10367 don't try to compute it in the compiler. */
10368 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10369 return NULL_TREE;
10371 prec = element_precision (type);
10373 /* If we have a rotate of a bit operation with the rotate count and
10374 the second operand of the bit operation both constant,
10375 permute the two operations. */
10376 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10377 && (TREE_CODE (arg0) == BIT_AND_EXPR
10378 || TREE_CODE (arg0) == BIT_IOR_EXPR
10379 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10381 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10382 fold_build2_loc (loc, code, type,
10383 TREE_OPERAND (arg0, 0), arg1),
10384 fold_build2_loc (loc, code, type,
10385 TREE_OPERAND (arg0, 1), arg1));
10387 /* Two consecutive rotates adding up to the some integer
10388 multiple of the precision of the type can be ignored. */
10389 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10390 && TREE_CODE (arg0) == RROTATE_EXPR
10391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10392 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10393 prec) == 0)
10394 return TREE_OPERAND (arg0, 0);
10396 return NULL_TREE;
10398 case MIN_EXPR:
10399 case MAX_EXPR:
10400 goto associate;
10402 case TRUTH_ANDIF_EXPR:
10403 /* Note that the operands of this must be ints
10404 and their values must be 0 or 1.
10405 ("true" is a fixed value perhaps depending on the language.) */
10406 /* If first arg is constant zero, return it. */
10407 if (integer_zerop (arg0))
10408 return fold_convert_loc (loc, type, arg0);
10409 case TRUTH_AND_EXPR:
10410 /* If either arg is constant true, drop it. */
10411 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10412 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10413 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10414 /* Preserve sequence points. */
10415 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10416 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10417 /* If second arg is constant zero, result is zero, but first arg
10418 must be evaluated. */
10419 if (integer_zerop (arg1))
10420 return omit_one_operand_loc (loc, type, arg1, arg0);
10421 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10422 case will be handled here. */
10423 if (integer_zerop (arg0))
10424 return omit_one_operand_loc (loc, type, arg0, arg1);
10426 /* !X && X is always false. */
10427 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10428 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10429 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10430 /* X && !X is always false. */
10431 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10432 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10433 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10435 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10436 means A >= Y && A != MAX, but in this case we know that
10437 A < X <= MAX. */
10439 if (!TREE_SIDE_EFFECTS (arg0)
10440 && !TREE_SIDE_EFFECTS (arg1))
10442 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10443 if (tem && !operand_equal_p (tem, arg0, 0))
10444 return fold_build2_loc (loc, code, type, tem, arg1);
10446 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10447 if (tem && !operand_equal_p (tem, arg1, 0))
10448 return fold_build2_loc (loc, code, type, arg0, tem);
10451 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10452 != NULL_TREE)
10453 return tem;
10455 return NULL_TREE;
10457 case TRUTH_ORIF_EXPR:
10458 /* Note that the operands of this must be ints
10459 and their values must be 0 or true.
10460 ("true" is a fixed value perhaps depending on the language.) */
10461 /* If first arg is constant true, return it. */
10462 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10463 return fold_convert_loc (loc, type, arg0);
10464 case TRUTH_OR_EXPR:
10465 /* If either arg is constant zero, drop it. */
10466 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10467 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10468 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10469 /* Preserve sequence points. */
10470 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10471 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10472 /* If second arg is constant true, result is true, but we must
10473 evaluate first arg. */
10474 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10475 return omit_one_operand_loc (loc, type, arg1, arg0);
10476 /* Likewise for first arg, but note this only occurs here for
10477 TRUTH_OR_EXPR. */
10478 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10479 return omit_one_operand_loc (loc, type, arg0, arg1);
10481 /* !X || X is always true. */
10482 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10483 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10484 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10485 /* X || !X is always true. */
10486 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10487 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10488 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10490 /* (X && !Y) || (!X && Y) is X ^ Y */
10491 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10492 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10494 tree a0, a1, l0, l1, n0, n1;
10496 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10497 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10499 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10500 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10502 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10503 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10505 if ((operand_equal_p (n0, a0, 0)
10506 && operand_equal_p (n1, a1, 0))
10507 || (operand_equal_p (n0, a1, 0)
10508 && operand_equal_p (n1, a0, 0)))
10509 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10512 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10513 != NULL_TREE)
10514 return tem;
10516 return NULL_TREE;
10518 case TRUTH_XOR_EXPR:
10519 /* If the second arg is constant zero, drop it. */
10520 if (integer_zerop (arg1))
10521 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10522 /* If the second arg is constant true, this is a logical inversion. */
10523 if (integer_onep (arg1))
10525 tem = invert_truthvalue_loc (loc, arg0);
10526 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10528 /* Identical arguments cancel to zero. */
10529 if (operand_equal_p (arg0, arg1, 0))
10530 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10532 /* !X ^ X is always true. */
10533 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10534 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10535 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10537 /* X ^ !X is always true. */
10538 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10539 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10540 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10542 return NULL_TREE;
10544 case EQ_EXPR:
10545 case NE_EXPR:
10546 STRIP_NOPS (arg0);
10547 STRIP_NOPS (arg1);
10549 tem = fold_comparison (loc, code, type, op0, op1);
10550 if (tem != NULL_TREE)
10551 return tem;
10553 /* bool_var != 1 becomes !bool_var. */
10554 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10555 && code == NE_EXPR)
10556 return fold_convert_loc (loc, type,
10557 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10558 TREE_TYPE (arg0), arg0));
10560 /* bool_var == 0 becomes !bool_var. */
10561 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10562 && code == EQ_EXPR)
10563 return fold_convert_loc (loc, type,
10564 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10565 TREE_TYPE (arg0), arg0));
10567 /* !exp != 0 becomes !exp */
10568 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10569 && code == NE_EXPR)
10570 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10572 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10573 if ((TREE_CODE (arg0) == PLUS_EXPR
10574 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10575 || TREE_CODE (arg0) == MINUS_EXPR)
10576 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10577 0)),
10578 arg1, 0)
10579 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10580 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10582 tree val = TREE_OPERAND (arg0, 1);
10583 return omit_two_operands_loc (loc, type,
10584 fold_build2_loc (loc, code, type,
10585 val,
10586 build_int_cst (TREE_TYPE (val),
10587 0)),
10588 TREE_OPERAND (arg0, 0), arg1);
10591 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10592 if (TREE_CODE (arg0) == MINUS_EXPR
10593 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10594 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10595 1)),
10596 arg1, 0)
10597 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10599 return omit_two_operands_loc (loc, type,
10600 code == NE_EXPR
10601 ? boolean_true_node : boolean_false_node,
10602 TREE_OPERAND (arg0, 1), arg1);
10605 /* If this is an EQ or NE comparison with zero and ARG0 is
10606 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10607 two operations, but the latter can be done in one less insn
10608 on machines that have only two-operand insns or on which a
10609 constant cannot be the first operand. */
10610 if (TREE_CODE (arg0) == BIT_AND_EXPR
10611 && integer_zerop (arg1))
10613 tree arg00 = TREE_OPERAND (arg0, 0);
10614 tree arg01 = TREE_OPERAND (arg0, 1);
10615 if (TREE_CODE (arg00) == LSHIFT_EXPR
10616 && integer_onep (TREE_OPERAND (arg00, 0)))
10618 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10619 arg01, TREE_OPERAND (arg00, 1));
10620 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10621 build_int_cst (TREE_TYPE (arg0), 1));
10622 return fold_build2_loc (loc, code, type,
10623 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10624 arg1);
10626 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10627 && integer_onep (TREE_OPERAND (arg01, 0)))
10629 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10630 arg00, TREE_OPERAND (arg01, 1));
10631 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10632 build_int_cst (TREE_TYPE (arg0), 1));
10633 return fold_build2_loc (loc, code, type,
10634 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10635 arg1);
10639 /* If this is an NE or EQ comparison of zero against the result of a
10640 signed MOD operation whose second operand is a power of 2, make
10641 the MOD operation unsigned since it is simpler and equivalent. */
10642 if (integer_zerop (arg1)
10643 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10644 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10645 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10646 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10647 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10648 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10650 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10651 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10652 fold_convert_loc (loc, newtype,
10653 TREE_OPERAND (arg0, 0)),
10654 fold_convert_loc (loc, newtype,
10655 TREE_OPERAND (arg0, 1)));
10657 return fold_build2_loc (loc, code, type, newmod,
10658 fold_convert_loc (loc, newtype, arg1));
10661 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10662 C1 is a valid shift constant, and C2 is a power of two, i.e.
10663 a single bit. */
10664 if (TREE_CODE (arg0) == BIT_AND_EXPR
10665 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10666 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10667 == INTEGER_CST
10668 && integer_pow2p (TREE_OPERAND (arg0, 1))
10669 && integer_zerop (arg1))
10671 tree itype = TREE_TYPE (arg0);
10672 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10673 prec = TYPE_PRECISION (itype);
10675 /* Check for a valid shift count. */
10676 if (wi::ltu_p (arg001, prec))
10678 tree arg01 = TREE_OPERAND (arg0, 1);
10679 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10680 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10681 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10682 can be rewritten as (X & (C2 << C1)) != 0. */
10683 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10685 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10686 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10687 return fold_build2_loc (loc, code, type, tem,
10688 fold_convert_loc (loc, itype, arg1));
10690 /* Otherwise, for signed (arithmetic) shifts,
10691 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10692 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10693 else if (!TYPE_UNSIGNED (itype))
10694 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10695 arg000, build_int_cst (itype, 0));
10696 /* Otherwise, of unsigned (logical) shifts,
10697 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10698 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10699 else
10700 return omit_one_operand_loc (loc, type,
10701 code == EQ_EXPR ? integer_one_node
10702 : integer_zero_node,
10703 arg000);
10707 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10708 Similarly for NE_EXPR. */
10709 if (TREE_CODE (arg0) == BIT_AND_EXPR
10710 && TREE_CODE (arg1) == INTEGER_CST
10711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10713 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10714 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10715 TREE_OPERAND (arg0, 1));
10716 tree dandnotc
10717 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10718 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10719 notc);
10720 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10721 if (integer_nonzerop (dandnotc))
10722 return omit_one_operand_loc (loc, type, rslt, arg0);
10725 /* If this is a comparison of a field, we may be able to simplify it. */
10726 if ((TREE_CODE (arg0) == COMPONENT_REF
10727 || TREE_CODE (arg0) == BIT_FIELD_REF)
10728 /* Handle the constant case even without -O
10729 to make sure the warnings are given. */
10730 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10732 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10733 if (t1)
10734 return t1;
10737 /* Optimize comparisons of strlen vs zero to a compare of the
10738 first character of the string vs zero. To wit,
10739 strlen(ptr) == 0 => *ptr == 0
10740 strlen(ptr) != 0 => *ptr != 0
10741 Other cases should reduce to one of these two (or a constant)
10742 due to the return value of strlen being unsigned. */
10743 if (TREE_CODE (arg0) == CALL_EXPR
10744 && integer_zerop (arg1))
10746 tree fndecl = get_callee_fndecl (arg0);
10748 if (fndecl
10749 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10750 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10751 && call_expr_nargs (arg0) == 1
10752 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10754 tree iref = build_fold_indirect_ref_loc (loc,
10755 CALL_EXPR_ARG (arg0, 0));
10756 return fold_build2_loc (loc, code, type, iref,
10757 build_int_cst (TREE_TYPE (iref), 0));
10761 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10762 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10763 if (TREE_CODE (arg0) == RSHIFT_EXPR
10764 && integer_zerop (arg1)
10765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10767 tree arg00 = TREE_OPERAND (arg0, 0);
10768 tree arg01 = TREE_OPERAND (arg0, 1);
10769 tree itype = TREE_TYPE (arg00);
10770 if (wi::eq_p (arg01, element_precision (itype) - 1))
10772 if (TYPE_UNSIGNED (itype))
10774 itype = signed_type_for (itype);
10775 arg00 = fold_convert_loc (loc, itype, arg00);
10777 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10778 type, arg00, build_zero_cst (itype));
10782 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10783 (X & C) == 0 when C is a single bit. */
10784 if (TREE_CODE (arg0) == BIT_AND_EXPR
10785 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10786 && integer_zerop (arg1)
10787 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10789 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10790 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10791 TREE_OPERAND (arg0, 1));
10792 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10793 type, tem,
10794 fold_convert_loc (loc, TREE_TYPE (arg0),
10795 arg1));
10798 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10799 constant C is a power of two, i.e. a single bit. */
10800 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10801 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10802 && integer_zerop (arg1)
10803 && integer_pow2p (TREE_OPERAND (arg0, 1))
10804 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10805 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10807 tree arg00 = TREE_OPERAND (arg0, 0);
10808 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10809 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10812 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10813 when is C is a power of two, i.e. a single bit. */
10814 if (TREE_CODE (arg0) == BIT_AND_EXPR
10815 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10816 && integer_zerop (arg1)
10817 && integer_pow2p (TREE_OPERAND (arg0, 1))
10818 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10819 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10821 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10822 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10823 arg000, TREE_OPERAND (arg0, 1));
10824 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10825 tem, build_int_cst (TREE_TYPE (tem), 0));
10828 if (integer_zerop (arg1)
10829 && tree_expr_nonzero_p (arg0))
10831 tree res = constant_boolean_node (code==NE_EXPR, type);
10832 return omit_one_operand_loc (loc, type, res, arg0);
10835 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10836 if (TREE_CODE (arg0) == BIT_AND_EXPR
10837 && TREE_CODE (arg1) == BIT_AND_EXPR)
10839 tree arg00 = TREE_OPERAND (arg0, 0);
10840 tree arg01 = TREE_OPERAND (arg0, 1);
10841 tree arg10 = TREE_OPERAND (arg1, 0);
10842 tree arg11 = TREE_OPERAND (arg1, 1);
10843 tree itype = TREE_TYPE (arg0);
10845 if (operand_equal_p (arg01, arg11, 0))
10846 return fold_build2_loc (loc, code, type,
10847 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10848 fold_build2_loc (loc,
10849 BIT_XOR_EXPR, itype,
10850 arg00, arg10),
10851 arg01),
10852 build_zero_cst (itype));
10854 if (operand_equal_p (arg01, arg10, 0))
10855 return fold_build2_loc (loc, code, type,
10856 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10857 fold_build2_loc (loc,
10858 BIT_XOR_EXPR, itype,
10859 arg00, arg11),
10860 arg01),
10861 build_zero_cst (itype));
10863 if (operand_equal_p (arg00, arg11, 0))
10864 return fold_build2_loc (loc, code, type,
10865 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10866 fold_build2_loc (loc,
10867 BIT_XOR_EXPR, itype,
10868 arg01, arg10),
10869 arg00),
10870 build_zero_cst (itype));
10872 if (operand_equal_p (arg00, arg10, 0))
10873 return fold_build2_loc (loc, code, type,
10874 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10875 fold_build2_loc (loc,
10876 BIT_XOR_EXPR, itype,
10877 arg01, arg11),
10878 arg00),
10879 build_zero_cst (itype));
10882 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10883 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10885 tree arg00 = TREE_OPERAND (arg0, 0);
10886 tree arg01 = TREE_OPERAND (arg0, 1);
10887 tree arg10 = TREE_OPERAND (arg1, 0);
10888 tree arg11 = TREE_OPERAND (arg1, 1);
10889 tree itype = TREE_TYPE (arg0);
10891 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10892 operand_equal_p guarantees no side-effects so we don't need
10893 to use omit_one_operand on Z. */
10894 if (operand_equal_p (arg01, arg11, 0))
10895 return fold_build2_loc (loc, code, type, arg00,
10896 fold_convert_loc (loc, TREE_TYPE (arg00),
10897 arg10));
10898 if (operand_equal_p (arg01, arg10, 0))
10899 return fold_build2_loc (loc, code, type, arg00,
10900 fold_convert_loc (loc, TREE_TYPE (arg00),
10901 arg11));
10902 if (operand_equal_p (arg00, arg11, 0))
10903 return fold_build2_loc (loc, code, type, arg01,
10904 fold_convert_loc (loc, TREE_TYPE (arg01),
10905 arg10));
10906 if (operand_equal_p (arg00, arg10, 0))
10907 return fold_build2_loc (loc, code, type, arg01,
10908 fold_convert_loc (loc, TREE_TYPE (arg01),
10909 arg11));
10911 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10912 if (TREE_CODE (arg01) == INTEGER_CST
10913 && TREE_CODE (arg11) == INTEGER_CST)
10915 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10916 fold_convert_loc (loc, itype, arg11));
10917 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10918 return fold_build2_loc (loc, code, type, tem,
10919 fold_convert_loc (loc, itype, arg10));
10923 /* Attempt to simplify equality/inequality comparisons of complex
10924 values. Only lower the comparison if the result is known or
10925 can be simplified to a single scalar comparison. */
10926 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10927 || TREE_CODE (arg0) == COMPLEX_CST)
10928 && (TREE_CODE (arg1) == COMPLEX_EXPR
10929 || TREE_CODE (arg1) == COMPLEX_CST))
10931 tree real0, imag0, real1, imag1;
10932 tree rcond, icond;
10934 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10936 real0 = TREE_OPERAND (arg0, 0);
10937 imag0 = TREE_OPERAND (arg0, 1);
10939 else
10941 real0 = TREE_REALPART (arg0);
10942 imag0 = TREE_IMAGPART (arg0);
10945 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10947 real1 = TREE_OPERAND (arg1, 0);
10948 imag1 = TREE_OPERAND (arg1, 1);
10950 else
10952 real1 = TREE_REALPART (arg1);
10953 imag1 = TREE_IMAGPART (arg1);
10956 rcond = fold_binary_loc (loc, code, type, real0, real1);
10957 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10959 if (integer_zerop (rcond))
10961 if (code == EQ_EXPR)
10962 return omit_two_operands_loc (loc, type, boolean_false_node,
10963 imag0, imag1);
10964 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10966 else
10968 if (code == NE_EXPR)
10969 return omit_two_operands_loc (loc, type, boolean_true_node,
10970 imag0, imag1);
10971 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10975 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10976 if (icond && TREE_CODE (icond) == INTEGER_CST)
10978 if (integer_zerop (icond))
10980 if (code == EQ_EXPR)
10981 return omit_two_operands_loc (loc, type, boolean_false_node,
10982 real0, real1);
10983 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10985 else
10987 if (code == NE_EXPR)
10988 return omit_two_operands_loc (loc, type, boolean_true_node,
10989 real0, real1);
10990 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10995 return NULL_TREE;
10997 case LT_EXPR:
10998 case GT_EXPR:
10999 case LE_EXPR:
11000 case GE_EXPR:
11001 tem = fold_comparison (loc, code, type, op0, op1);
11002 if (tem != NULL_TREE)
11003 return tem;
11005 /* Transform comparisons of the form X +- C CMP X. */
11006 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11007 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11008 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11009 && !HONOR_SNANS (arg0))
11010 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11011 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11013 tree arg01 = TREE_OPERAND (arg0, 1);
11014 enum tree_code code0 = TREE_CODE (arg0);
11015 int is_positive;
11017 if (TREE_CODE (arg01) == REAL_CST)
11018 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11019 else
11020 is_positive = tree_int_cst_sgn (arg01);
11022 /* (X - c) > X becomes false. */
11023 if (code == GT_EXPR
11024 && ((code0 == MINUS_EXPR && is_positive >= 0)
11025 || (code0 == PLUS_EXPR && is_positive <= 0)))
11027 if (TREE_CODE (arg01) == INTEGER_CST
11028 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029 fold_overflow_warning (("assuming signed overflow does not "
11030 "occur when assuming that (X - c) > X "
11031 "is always false"),
11032 WARN_STRICT_OVERFLOW_ALL);
11033 return constant_boolean_node (0, type);
11036 /* Likewise (X + c) < X becomes false. */
11037 if (code == LT_EXPR
11038 && ((code0 == PLUS_EXPR && is_positive >= 0)
11039 || (code0 == MINUS_EXPR && is_positive <= 0)))
11041 if (TREE_CODE (arg01) == INTEGER_CST
11042 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11043 fold_overflow_warning (("assuming signed overflow does not "
11044 "occur when assuming that "
11045 "(X + c) < X is always false"),
11046 WARN_STRICT_OVERFLOW_ALL);
11047 return constant_boolean_node (0, type);
11050 /* Convert (X - c) <= X to true. */
11051 if (!HONOR_NANS (arg1)
11052 && code == LE_EXPR
11053 && ((code0 == MINUS_EXPR && is_positive >= 0)
11054 || (code0 == PLUS_EXPR && is_positive <= 0)))
11056 if (TREE_CODE (arg01) == INTEGER_CST
11057 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11058 fold_overflow_warning (("assuming signed overflow does not "
11059 "occur when assuming that "
11060 "(X - c) <= X is always true"),
11061 WARN_STRICT_OVERFLOW_ALL);
11062 return constant_boolean_node (1, type);
11065 /* Convert (X + c) >= X to true. */
11066 if (!HONOR_NANS (arg1)
11067 && code == GE_EXPR
11068 && ((code0 == PLUS_EXPR && is_positive >= 0)
11069 || (code0 == MINUS_EXPR && is_positive <= 0)))
11071 if (TREE_CODE (arg01) == INTEGER_CST
11072 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11073 fold_overflow_warning (("assuming signed overflow does not "
11074 "occur when assuming that "
11075 "(X + c) >= X is always true"),
11076 WARN_STRICT_OVERFLOW_ALL);
11077 return constant_boolean_node (1, type);
11080 if (TREE_CODE (arg01) == INTEGER_CST)
11082 /* Convert X + c > X and X - c < X to true for integers. */
11083 if (code == GT_EXPR
11084 && ((code0 == PLUS_EXPR && is_positive > 0)
11085 || (code0 == MINUS_EXPR && is_positive < 0)))
11087 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11088 fold_overflow_warning (("assuming signed overflow does "
11089 "not occur when assuming that "
11090 "(X + c) > X is always true"),
11091 WARN_STRICT_OVERFLOW_ALL);
11092 return constant_boolean_node (1, type);
11095 if (code == LT_EXPR
11096 && ((code0 == MINUS_EXPR && is_positive > 0)
11097 || (code0 == PLUS_EXPR && is_positive < 0)))
11099 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11100 fold_overflow_warning (("assuming signed overflow does "
11101 "not occur when assuming that "
11102 "(X - c) < X is always true"),
11103 WARN_STRICT_OVERFLOW_ALL);
11104 return constant_boolean_node (1, type);
11107 /* Convert X + c <= X and X - c >= X to false for integers. */
11108 if (code == LE_EXPR
11109 && ((code0 == PLUS_EXPR && is_positive > 0)
11110 || (code0 == MINUS_EXPR && is_positive < 0)))
11112 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11113 fold_overflow_warning (("assuming signed overflow does "
11114 "not occur when assuming that "
11115 "(X + c) <= X is always false"),
11116 WARN_STRICT_OVERFLOW_ALL);
11117 return constant_boolean_node (0, type);
11120 if (code == GE_EXPR
11121 && ((code0 == MINUS_EXPR && is_positive > 0)
11122 || (code0 == PLUS_EXPR && is_positive < 0)))
11124 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11125 fold_overflow_warning (("assuming signed overflow does "
11126 "not occur when assuming that "
11127 "(X - c) >= X is always false"),
11128 WARN_STRICT_OVERFLOW_ALL);
11129 return constant_boolean_node (0, type);
11134 /* If we are comparing an ABS_EXPR with a constant, we can
11135 convert all the cases into explicit comparisons, but they may
11136 well not be faster than doing the ABS and one comparison.
11137 But ABS (X) <= C is a range comparison, which becomes a subtraction
11138 and a comparison, and is probably faster. */
11139 if (code == LE_EXPR
11140 && TREE_CODE (arg1) == INTEGER_CST
11141 && TREE_CODE (arg0) == ABS_EXPR
11142 && ! TREE_SIDE_EFFECTS (arg0)
11143 && (0 != (tem = negate_expr (arg1)))
11144 && TREE_CODE (tem) == INTEGER_CST
11145 && !TREE_OVERFLOW (tem))
11146 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11147 build2 (GE_EXPR, type,
11148 TREE_OPERAND (arg0, 0), tem),
11149 build2 (LE_EXPR, type,
11150 TREE_OPERAND (arg0, 0), arg1));
11152 /* Convert ABS_EXPR<x> >= 0 to true. */
11153 strict_overflow_p = false;
11154 if (code == GE_EXPR
11155 && (integer_zerop (arg1)
11156 || (! HONOR_NANS (arg0)
11157 && real_zerop (arg1)))
11158 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11160 if (strict_overflow_p)
11161 fold_overflow_warning (("assuming signed overflow does not occur "
11162 "when simplifying comparison of "
11163 "absolute value and zero"),
11164 WARN_STRICT_OVERFLOW_CONDITIONAL);
11165 return omit_one_operand_loc (loc, type,
11166 constant_boolean_node (true, type),
11167 arg0);
11170 /* Convert ABS_EXPR<x> < 0 to false. */
11171 strict_overflow_p = false;
11172 if (code == LT_EXPR
11173 && (integer_zerop (arg1) || real_zerop (arg1))
11174 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11176 if (strict_overflow_p)
11177 fold_overflow_warning (("assuming signed overflow does not occur "
11178 "when simplifying comparison of "
11179 "absolute value and zero"),
11180 WARN_STRICT_OVERFLOW_CONDITIONAL);
11181 return omit_one_operand_loc (loc, type,
11182 constant_boolean_node (false, type),
11183 arg0);
11186 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11187 and similarly for >= into !=. */
11188 if ((code == LT_EXPR || code == GE_EXPR)
11189 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11190 && TREE_CODE (arg1) == LSHIFT_EXPR
11191 && integer_onep (TREE_OPERAND (arg1, 0)))
11192 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11193 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11194 TREE_OPERAND (arg1, 1)),
11195 build_zero_cst (TREE_TYPE (arg0)));
11197 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11198 otherwise Y might be >= # of bits in X's type and thus e.g.
11199 (unsigned char) (1 << Y) for Y 15 might be 0.
11200 If the cast is widening, then 1 << Y should have unsigned type,
11201 otherwise if Y is number of bits in the signed shift type minus 1,
11202 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11203 31 might be 0xffffffff80000000. */
11204 if ((code == LT_EXPR || code == GE_EXPR)
11205 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11206 && CONVERT_EXPR_P (arg1)
11207 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11208 && (element_precision (TREE_TYPE (arg1))
11209 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11210 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11211 || (element_precision (TREE_TYPE (arg1))
11212 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11213 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11215 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11216 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11217 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11218 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11219 build_zero_cst (TREE_TYPE (arg0)));
11222 return NULL_TREE;
11224 case UNORDERED_EXPR:
11225 case ORDERED_EXPR:
11226 case UNLT_EXPR:
11227 case UNLE_EXPR:
11228 case UNGT_EXPR:
11229 case UNGE_EXPR:
11230 case UNEQ_EXPR:
11231 case LTGT_EXPR:
11232 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11234 tree targ0 = strip_float_extensions (arg0);
11235 tree targ1 = strip_float_extensions (arg1);
11236 tree newtype = TREE_TYPE (targ0);
11238 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11239 newtype = TREE_TYPE (targ1);
11241 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11242 return fold_build2_loc (loc, code, type,
11243 fold_convert_loc (loc, newtype, targ0),
11244 fold_convert_loc (loc, newtype, targ1));
11247 return NULL_TREE;
11249 case COMPOUND_EXPR:
11250 /* When pedantic, a compound expression can be neither an lvalue
11251 nor an integer constant expression. */
11252 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11253 return NULL_TREE;
11254 /* Don't let (0, 0) be null pointer constant. */
11255 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11256 : fold_convert_loc (loc, type, arg1);
11257 return pedantic_non_lvalue_loc (loc, tem);
11259 case ASSERT_EXPR:
11260 /* An ASSERT_EXPR should never be passed to fold_binary. */
11261 gcc_unreachable ();
11263 default:
11264 return NULL_TREE;
11265 } /* switch (code) */
11268 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11269 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11270 of GOTO_EXPR. */
11272 static tree
11273 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11275 switch (TREE_CODE (*tp))
11277 case LABEL_EXPR:
11278 return *tp;
11280 case GOTO_EXPR:
11281 *walk_subtrees = 0;
11283 /* ... fall through ... */
11285 default:
11286 return NULL_TREE;
11290 /* Return whether the sub-tree ST contains a label which is accessible from
11291 outside the sub-tree. */
11293 static bool
11294 contains_label_p (tree st)
11296 return
11297 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11300 /* Fold a ternary expression of code CODE and type TYPE with operands
11301 OP0, OP1, and OP2. Return the folded expression if folding is
11302 successful. Otherwise, return NULL_TREE. */
11304 tree
11305 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11306 tree op0, tree op1, tree op2)
11308 tree tem;
11309 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11310 enum tree_code_class kind = TREE_CODE_CLASS (code);
11312 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11313 && TREE_CODE_LENGTH (code) == 3);
11315 /* If this is a commutative operation, and OP0 is a constant, move it
11316 to OP1 to reduce the number of tests below. */
11317 if (commutative_ternary_tree_code (code)
11318 && tree_swap_operands_p (op0, op1, true))
11319 return fold_build3_loc (loc, code, type, op1, op0, op2);
11321 tem = generic_simplify (loc, code, type, op0, op1, op2);
11322 if (tem)
11323 return tem;
11325 /* Strip any conversions that don't change the mode. This is safe
11326 for every expression, except for a comparison expression because
11327 its signedness is derived from its operands. So, in the latter
11328 case, only strip conversions that don't change the signedness.
11330 Note that this is done as an internal manipulation within the
11331 constant folder, in order to find the simplest representation of
11332 the arguments so that their form can be studied. In any cases,
11333 the appropriate type conversions should be put back in the tree
11334 that will get out of the constant folder. */
11335 if (op0)
11337 arg0 = op0;
11338 STRIP_NOPS (arg0);
11341 if (op1)
11343 arg1 = op1;
11344 STRIP_NOPS (arg1);
11347 if (op2)
11349 arg2 = op2;
11350 STRIP_NOPS (arg2);
11353 switch (code)
11355 case COMPONENT_REF:
11356 if (TREE_CODE (arg0) == CONSTRUCTOR
11357 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11359 unsigned HOST_WIDE_INT idx;
11360 tree field, value;
11361 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11362 if (field == arg1)
11363 return value;
11365 return NULL_TREE;
11367 case COND_EXPR:
11368 case VEC_COND_EXPR:
11369 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11370 so all simple results must be passed through pedantic_non_lvalue. */
11371 if (TREE_CODE (arg0) == INTEGER_CST)
11373 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11374 tem = integer_zerop (arg0) ? op2 : op1;
11375 /* Only optimize constant conditions when the selected branch
11376 has the same type as the COND_EXPR. This avoids optimizing
11377 away "c ? x : throw", where the throw has a void type.
11378 Avoid throwing away that operand which contains label. */
11379 if ((!TREE_SIDE_EFFECTS (unused_op)
11380 || !contains_label_p (unused_op))
11381 && (! VOID_TYPE_P (TREE_TYPE (tem))
11382 || VOID_TYPE_P (type)))
11383 return pedantic_non_lvalue_loc (loc, tem);
11384 return NULL_TREE;
11386 else if (TREE_CODE (arg0) == VECTOR_CST)
11388 if ((TREE_CODE (arg1) == VECTOR_CST
11389 || TREE_CODE (arg1) == CONSTRUCTOR)
11390 && (TREE_CODE (arg2) == VECTOR_CST
11391 || TREE_CODE (arg2) == CONSTRUCTOR))
11393 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11394 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11395 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11396 for (i = 0; i < nelts; i++)
11398 tree val = VECTOR_CST_ELT (arg0, i);
11399 if (integer_all_onesp (val))
11400 sel[i] = i;
11401 else if (integer_zerop (val))
11402 sel[i] = nelts + i;
11403 else /* Currently unreachable. */
11404 return NULL_TREE;
11406 tree t = fold_vec_perm (type, arg1, arg2, sel);
11407 if (t != NULL_TREE)
11408 return t;
11412 /* If we have A op B ? A : C, we may be able to convert this to a
11413 simpler expression, depending on the operation and the values
11414 of B and C. Signed zeros prevent all of these transformations,
11415 for reasons given above each one.
11417 Also try swapping the arguments and inverting the conditional. */
11418 if (COMPARISON_CLASS_P (arg0)
11419 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11420 arg1, TREE_OPERAND (arg0, 1))
11421 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11423 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11424 if (tem)
11425 return tem;
11428 if (COMPARISON_CLASS_P (arg0)
11429 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11430 op2,
11431 TREE_OPERAND (arg0, 1))
11432 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11434 location_t loc0 = expr_location_or (arg0, loc);
11435 tem = fold_invert_truthvalue (loc0, arg0);
11436 if (tem && COMPARISON_CLASS_P (tem))
11438 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11439 if (tem)
11440 return tem;
11444 /* If the second operand is simpler than the third, swap them
11445 since that produces better jump optimization results. */
11446 if (truth_value_p (TREE_CODE (arg0))
11447 && tree_swap_operands_p (op1, op2, false))
11449 location_t loc0 = expr_location_or (arg0, loc);
11450 /* See if this can be inverted. If it can't, possibly because
11451 it was a floating-point inequality comparison, don't do
11452 anything. */
11453 tem = fold_invert_truthvalue (loc0, arg0);
11454 if (tem)
11455 return fold_build3_loc (loc, code, type, tem, op2, op1);
11458 /* Convert A ? 1 : 0 to simply A. */
11459 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11460 : (integer_onep (op1)
11461 && !VECTOR_TYPE_P (type)))
11462 && integer_zerop (op2)
11463 /* If we try to convert OP0 to our type, the
11464 call to fold will try to move the conversion inside
11465 a COND, which will recurse. In that case, the COND_EXPR
11466 is probably the best choice, so leave it alone. */
11467 && type == TREE_TYPE (arg0))
11468 return pedantic_non_lvalue_loc (loc, arg0);
11470 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11471 over COND_EXPR in cases such as floating point comparisons. */
11472 if (integer_zerop (op1)
11473 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11474 : (integer_onep (op2)
11475 && !VECTOR_TYPE_P (type)))
11476 && truth_value_p (TREE_CODE (arg0)))
11477 return pedantic_non_lvalue_loc (loc,
11478 fold_convert_loc (loc, type,
11479 invert_truthvalue_loc (loc,
11480 arg0)));
11482 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11483 if (TREE_CODE (arg0) == LT_EXPR
11484 && integer_zerop (TREE_OPERAND (arg0, 1))
11485 && integer_zerop (op2)
11486 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11488 /* sign_bit_p looks through both zero and sign extensions,
11489 but for this optimization only sign extensions are
11490 usable. */
11491 tree tem2 = TREE_OPERAND (arg0, 0);
11492 while (tem != tem2)
11494 if (TREE_CODE (tem2) != NOP_EXPR
11495 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11497 tem = NULL_TREE;
11498 break;
11500 tem2 = TREE_OPERAND (tem2, 0);
11502 /* sign_bit_p only checks ARG1 bits within A's precision.
11503 If <sign bit of A> has wider type than A, bits outside
11504 of A's precision in <sign bit of A> need to be checked.
11505 If they are all 0, this optimization needs to be done
11506 in unsigned A's type, if they are all 1 in signed A's type,
11507 otherwise this can't be done. */
11508 if (tem
11509 && TYPE_PRECISION (TREE_TYPE (tem))
11510 < TYPE_PRECISION (TREE_TYPE (arg1))
11511 && TYPE_PRECISION (TREE_TYPE (tem))
11512 < TYPE_PRECISION (type))
11514 int inner_width, outer_width;
11515 tree tem_type;
11517 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11518 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11519 if (outer_width > TYPE_PRECISION (type))
11520 outer_width = TYPE_PRECISION (type);
11522 wide_int mask = wi::shifted_mask
11523 (inner_width, outer_width - inner_width, false,
11524 TYPE_PRECISION (TREE_TYPE (arg1)));
11526 wide_int common = mask & arg1;
11527 if (common == mask)
11529 tem_type = signed_type_for (TREE_TYPE (tem));
11530 tem = fold_convert_loc (loc, tem_type, tem);
11532 else if (common == 0)
11534 tem_type = unsigned_type_for (TREE_TYPE (tem));
11535 tem = fold_convert_loc (loc, tem_type, tem);
11537 else
11538 tem = NULL;
11541 if (tem)
11542 return
11543 fold_convert_loc (loc, type,
11544 fold_build2_loc (loc, BIT_AND_EXPR,
11545 TREE_TYPE (tem), tem,
11546 fold_convert_loc (loc,
11547 TREE_TYPE (tem),
11548 arg1)));
11551 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11552 already handled above. */
11553 if (TREE_CODE (arg0) == BIT_AND_EXPR
11554 && integer_onep (TREE_OPERAND (arg0, 1))
11555 && integer_zerop (op2)
11556 && integer_pow2p (arg1))
11558 tree tem = TREE_OPERAND (arg0, 0);
11559 STRIP_NOPS (tem);
11560 if (TREE_CODE (tem) == RSHIFT_EXPR
11561 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11562 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11563 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11564 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11565 TREE_OPERAND (tem, 0), arg1);
11568 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11569 is probably obsolete because the first operand should be a
11570 truth value (that's why we have the two cases above), but let's
11571 leave it in until we can confirm this for all front-ends. */
11572 if (integer_zerop (op2)
11573 && TREE_CODE (arg0) == NE_EXPR
11574 && integer_zerop (TREE_OPERAND (arg0, 1))
11575 && integer_pow2p (arg1)
11576 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11577 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11578 arg1, OEP_ONLY_CONST))
11579 return pedantic_non_lvalue_loc (loc,
11580 fold_convert_loc (loc, type,
11581 TREE_OPERAND (arg0, 0)));
11583 /* Disable the transformations below for vectors, since
11584 fold_binary_op_with_conditional_arg may undo them immediately,
11585 yielding an infinite loop. */
11586 if (code == VEC_COND_EXPR)
11587 return NULL_TREE;
11589 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11590 if (integer_zerop (op2)
11591 && truth_value_p (TREE_CODE (arg0))
11592 && truth_value_p (TREE_CODE (arg1))
11593 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11594 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11595 : TRUTH_ANDIF_EXPR,
11596 type, fold_convert_loc (loc, type, arg0), arg1);
11598 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11599 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11600 && truth_value_p (TREE_CODE (arg0))
11601 && truth_value_p (TREE_CODE (arg1))
11602 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11604 location_t loc0 = expr_location_or (arg0, loc);
11605 /* Only perform transformation if ARG0 is easily inverted. */
11606 tem = fold_invert_truthvalue (loc0, arg0);
11607 if (tem)
11608 return fold_build2_loc (loc, code == VEC_COND_EXPR
11609 ? BIT_IOR_EXPR
11610 : TRUTH_ORIF_EXPR,
11611 type, fold_convert_loc (loc, type, tem),
11612 arg1);
11615 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11616 if (integer_zerop (arg1)
11617 && truth_value_p (TREE_CODE (arg0))
11618 && truth_value_p (TREE_CODE (op2))
11619 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11621 location_t loc0 = expr_location_or (arg0, loc);
11622 /* Only perform transformation if ARG0 is easily inverted. */
11623 tem = fold_invert_truthvalue (loc0, arg0);
11624 if (tem)
11625 return fold_build2_loc (loc, code == VEC_COND_EXPR
11626 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11627 type, fold_convert_loc (loc, type, tem),
11628 op2);
11631 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11632 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11633 && truth_value_p (TREE_CODE (arg0))
11634 && truth_value_p (TREE_CODE (op2))
11635 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11636 return fold_build2_loc (loc, code == VEC_COND_EXPR
11637 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11638 type, fold_convert_loc (loc, type, arg0), op2);
11640 return NULL_TREE;
11642 case CALL_EXPR:
11643 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11644 of fold_ternary on them. */
11645 gcc_unreachable ();
11647 case BIT_FIELD_REF:
11648 if ((TREE_CODE (arg0) == VECTOR_CST
11649 || (TREE_CODE (arg0) == CONSTRUCTOR
11650 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11651 && (type == TREE_TYPE (TREE_TYPE (arg0))
11652 || (TREE_CODE (type) == VECTOR_TYPE
11653 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11655 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11656 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11657 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11658 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11660 if (n != 0
11661 && (idx % width) == 0
11662 && (n % width) == 0
11663 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11665 idx = idx / width;
11666 n = n / width;
11668 if (TREE_CODE (arg0) == VECTOR_CST)
11670 if (n == 1)
11671 return VECTOR_CST_ELT (arg0, idx);
11673 tree *vals = XALLOCAVEC (tree, n);
11674 for (unsigned i = 0; i < n; ++i)
11675 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11676 return build_vector (type, vals);
11679 /* Constructor elements can be subvectors. */
11680 unsigned HOST_WIDE_INT k = 1;
11681 if (CONSTRUCTOR_NELTS (arg0) != 0)
11683 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11684 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11685 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11688 /* We keep an exact subset of the constructor elements. */
11689 if ((idx % k) == 0 && (n % k) == 0)
11691 if (CONSTRUCTOR_NELTS (arg0) == 0)
11692 return build_constructor (type, NULL);
11693 idx /= k;
11694 n /= k;
11695 if (n == 1)
11697 if (idx < CONSTRUCTOR_NELTS (arg0))
11698 return CONSTRUCTOR_ELT (arg0, idx)->value;
11699 return build_zero_cst (type);
11702 vec<constructor_elt, va_gc> *vals;
11703 vec_alloc (vals, n);
11704 for (unsigned i = 0;
11705 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11706 ++i)
11707 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11708 CONSTRUCTOR_ELT
11709 (arg0, idx + i)->value);
11710 return build_constructor (type, vals);
11712 /* The bitfield references a single constructor element. */
11713 else if (idx + n <= (idx / k + 1) * k)
11715 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11716 return build_zero_cst (type);
11717 else if (n == k)
11718 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11719 else
11720 return fold_build3_loc (loc, code, type,
11721 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11722 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11727 /* A bit-field-ref that referenced the full argument can be stripped. */
11728 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11729 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11730 && integer_zerop (op2))
11731 return fold_convert_loc (loc, type, arg0);
11733 /* On constants we can use native encode/interpret to constant
11734 fold (nearly) all BIT_FIELD_REFs. */
11735 if (CONSTANT_CLASS_P (arg0)
11736 && can_native_interpret_type_p (type)
11737 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11738 /* This limitation should not be necessary, we just need to
11739 round this up to mode size. */
11740 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11741 /* Need bit-shifting of the buffer to relax the following. */
11742 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11744 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11745 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11746 unsigned HOST_WIDE_INT clen;
11747 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11748 /* ??? We cannot tell native_encode_expr to start at
11749 some random byte only. So limit us to a reasonable amount
11750 of work. */
11751 if (clen <= 4096)
11753 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11754 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11755 if (len > 0
11756 && len * BITS_PER_UNIT >= bitpos + bitsize)
11758 tree v = native_interpret_expr (type,
11759 b + bitpos / BITS_PER_UNIT,
11760 bitsize / BITS_PER_UNIT);
11761 if (v)
11762 return v;
11767 return NULL_TREE;
11769 case FMA_EXPR:
11770 /* For integers we can decompose the FMA if possible. */
11771 if (TREE_CODE (arg0) == INTEGER_CST
11772 && TREE_CODE (arg1) == INTEGER_CST)
11773 return fold_build2_loc (loc, PLUS_EXPR, type,
11774 const_binop (MULT_EXPR, arg0, arg1), arg2);
11775 if (integer_zerop (arg2))
11776 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11778 return fold_fma (loc, type, arg0, arg1, arg2);
11780 case VEC_PERM_EXPR:
11781 if (TREE_CODE (arg2) == VECTOR_CST)
11783 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11784 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11785 unsigned char *sel2 = sel + nelts;
11786 bool need_mask_canon = false;
11787 bool need_mask_canon2 = false;
11788 bool all_in_vec0 = true;
11789 bool all_in_vec1 = true;
11790 bool maybe_identity = true;
11791 bool single_arg = (op0 == op1);
11792 bool changed = false;
11794 mask2 = 2 * nelts - 1;
11795 mask = single_arg ? (nelts - 1) : mask2;
11796 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11797 for (i = 0; i < nelts; i++)
11799 tree val = VECTOR_CST_ELT (arg2, i);
11800 if (TREE_CODE (val) != INTEGER_CST)
11801 return NULL_TREE;
11803 /* Make sure that the perm value is in an acceptable
11804 range. */
11805 wide_int t = val;
11806 need_mask_canon |= wi::gtu_p (t, mask);
11807 need_mask_canon2 |= wi::gtu_p (t, mask2);
11808 sel[i] = t.to_uhwi () & mask;
11809 sel2[i] = t.to_uhwi () & mask2;
11811 if (sel[i] < nelts)
11812 all_in_vec1 = false;
11813 else
11814 all_in_vec0 = false;
11816 if ((sel[i] & (nelts-1)) != i)
11817 maybe_identity = false;
11820 if (maybe_identity)
11822 if (all_in_vec0)
11823 return op0;
11824 if (all_in_vec1)
11825 return op1;
11828 if (all_in_vec0)
11829 op1 = op0;
11830 else if (all_in_vec1)
11832 op0 = op1;
11833 for (i = 0; i < nelts; i++)
11834 sel[i] -= nelts;
11835 need_mask_canon = true;
11838 if ((TREE_CODE (op0) == VECTOR_CST
11839 || TREE_CODE (op0) == CONSTRUCTOR)
11840 && (TREE_CODE (op1) == VECTOR_CST
11841 || TREE_CODE (op1) == CONSTRUCTOR))
11843 tree t = fold_vec_perm (type, op0, op1, sel);
11844 if (t != NULL_TREE)
11845 return t;
11848 if (op0 == op1 && !single_arg)
11849 changed = true;
11851 /* Some targets are deficient and fail to expand a single
11852 argument permutation while still allowing an equivalent
11853 2-argument version. */
11854 if (need_mask_canon && arg2 == op2
11855 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11856 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11858 need_mask_canon = need_mask_canon2;
11859 sel = sel2;
11862 if (need_mask_canon && arg2 == op2)
11864 tree *tsel = XALLOCAVEC (tree, nelts);
11865 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11866 for (i = 0; i < nelts; i++)
11867 tsel[i] = build_int_cst (eltype, sel[i]);
11868 op2 = build_vector (TREE_TYPE (arg2), tsel);
11869 changed = true;
11872 if (changed)
11873 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11875 return NULL_TREE;
11877 default:
11878 return NULL_TREE;
11879 } /* switch (code) */
11882 /* Perform constant folding and related simplification of EXPR.
11883 The related simplifications include x*1 => x, x*0 => 0, etc.,
11884 and application of the associative law.
11885 NOP_EXPR conversions may be removed freely (as long as we
11886 are careful not to change the type of the overall expression).
11887 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11888 but we can constant-fold them if they have constant operands. */
11890 #ifdef ENABLE_FOLD_CHECKING
11891 # define fold(x) fold_1 (x)
11892 static tree fold_1 (tree);
11893 static
11894 #endif
11895 tree
11896 fold (tree expr)
11898 const tree t = expr;
11899 enum tree_code code = TREE_CODE (t);
11900 enum tree_code_class kind = TREE_CODE_CLASS (code);
11901 tree tem;
11902 location_t loc = EXPR_LOCATION (expr);
11904 /* Return right away if a constant. */
11905 if (kind == tcc_constant)
11906 return t;
11908 /* CALL_EXPR-like objects with variable numbers of operands are
11909 treated specially. */
11910 if (kind == tcc_vl_exp)
11912 if (code == CALL_EXPR)
11914 tem = fold_call_expr (loc, expr, false);
11915 return tem ? tem : expr;
11917 return expr;
11920 if (IS_EXPR_CODE_CLASS (kind))
11922 tree type = TREE_TYPE (t);
11923 tree op0, op1, op2;
11925 switch (TREE_CODE_LENGTH (code))
11927 case 1:
11928 op0 = TREE_OPERAND (t, 0);
11929 tem = fold_unary_loc (loc, code, type, op0);
11930 return tem ? tem : expr;
11931 case 2:
11932 op0 = TREE_OPERAND (t, 0);
11933 op1 = TREE_OPERAND (t, 1);
11934 tem = fold_binary_loc (loc, code, type, op0, op1);
11935 return tem ? tem : expr;
11936 case 3:
11937 op0 = TREE_OPERAND (t, 0);
11938 op1 = TREE_OPERAND (t, 1);
11939 op2 = TREE_OPERAND (t, 2);
11940 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11941 return tem ? tem : expr;
11942 default:
11943 break;
11947 switch (code)
11949 case ARRAY_REF:
11951 tree op0 = TREE_OPERAND (t, 0);
11952 tree op1 = TREE_OPERAND (t, 1);
11954 if (TREE_CODE (op1) == INTEGER_CST
11955 && TREE_CODE (op0) == CONSTRUCTOR
11956 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11958 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
11959 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
11960 unsigned HOST_WIDE_INT begin = 0;
11962 /* Find a matching index by means of a binary search. */
11963 while (begin != end)
11965 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
11966 tree index = (*elts)[middle].index;
11968 if (TREE_CODE (index) == INTEGER_CST
11969 && tree_int_cst_lt (index, op1))
11970 begin = middle + 1;
11971 else if (TREE_CODE (index) == INTEGER_CST
11972 && tree_int_cst_lt (op1, index))
11973 end = middle;
11974 else if (TREE_CODE (index) == RANGE_EXPR
11975 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
11976 begin = middle + 1;
11977 else if (TREE_CODE (index) == RANGE_EXPR
11978 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
11979 end = middle;
11980 else
11981 return (*elts)[middle].value;
11985 return t;
11988 /* Return a VECTOR_CST if possible. */
11989 case CONSTRUCTOR:
11991 tree type = TREE_TYPE (t);
11992 if (TREE_CODE (type) != VECTOR_TYPE)
11993 return t;
11995 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11996 unsigned HOST_WIDE_INT idx, pos = 0;
11997 tree value;
11999 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
12001 if (!CONSTANT_CLASS_P (value))
12002 return t;
12003 if (TREE_CODE (value) == VECTOR_CST)
12005 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
12006 vec[pos++] = VECTOR_CST_ELT (value, i);
12008 else
12009 vec[pos++] = value;
12011 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
12012 vec[pos] = build_zero_cst (TREE_TYPE (type));
12014 return build_vector (type, vec);
12017 case CONST_DECL:
12018 return fold (DECL_INITIAL (t));
12020 default:
12021 return t;
12022 } /* switch (code) */
12025 #ifdef ENABLE_FOLD_CHECKING
12026 #undef fold
12028 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12029 hash_table<nofree_ptr_hash<const tree_node> > *);
12030 static void fold_check_failed (const_tree, const_tree);
12031 void print_fold_checksum (const_tree);
12033 /* When --enable-checking=fold, compute a digest of expr before
12034 and after actual fold call to see if fold did not accidentally
12035 change original expr. */
12037 tree
12038 fold (tree expr)
12040 tree ret;
12041 struct md5_ctx ctx;
12042 unsigned char checksum_before[16], checksum_after[16];
12043 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12045 md5_init_ctx (&ctx);
12046 fold_checksum_tree (expr, &ctx, &ht);
12047 md5_finish_ctx (&ctx, checksum_before);
12048 ht.empty ();
12050 ret = fold_1 (expr);
12052 md5_init_ctx (&ctx);
12053 fold_checksum_tree (expr, &ctx, &ht);
12054 md5_finish_ctx (&ctx, checksum_after);
12056 if (memcmp (checksum_before, checksum_after, 16))
12057 fold_check_failed (expr, ret);
12059 return ret;
12062 void
12063 print_fold_checksum (const_tree expr)
12065 struct md5_ctx ctx;
12066 unsigned char checksum[16], cnt;
12067 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12069 md5_init_ctx (&ctx);
12070 fold_checksum_tree (expr, &ctx, &ht);
12071 md5_finish_ctx (&ctx, checksum);
12072 for (cnt = 0; cnt < 16; ++cnt)
12073 fprintf (stderr, "%02x", checksum[cnt]);
12074 putc ('\n', stderr);
12077 static void
12078 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12080 internal_error ("fold check: original tree changed by fold");
12083 static void
12084 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12085 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12087 const tree_node **slot;
12088 enum tree_code code;
12089 union tree_node buf;
12090 int i, len;
12092 recursive_label:
12093 if (expr == NULL)
12094 return;
12095 slot = ht->find_slot (expr, INSERT);
12096 if (*slot != NULL)
12097 return;
12098 *slot = expr;
12099 code = TREE_CODE (expr);
12100 if (TREE_CODE_CLASS (code) == tcc_declaration
12101 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12103 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12104 memcpy ((char *) &buf, expr, tree_size (expr));
12105 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12106 buf.decl_with_vis.symtab_node = NULL;
12107 expr = (tree) &buf;
12109 else if (TREE_CODE_CLASS (code) == tcc_type
12110 && (TYPE_POINTER_TO (expr)
12111 || TYPE_REFERENCE_TO (expr)
12112 || TYPE_CACHED_VALUES_P (expr)
12113 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12114 || TYPE_NEXT_VARIANT (expr)))
12116 /* Allow these fields to be modified. */
12117 tree tmp;
12118 memcpy ((char *) &buf, expr, tree_size (expr));
12119 expr = tmp = (tree) &buf;
12120 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12121 TYPE_POINTER_TO (tmp) = NULL;
12122 TYPE_REFERENCE_TO (tmp) = NULL;
12123 TYPE_NEXT_VARIANT (tmp) = NULL;
12124 if (TYPE_CACHED_VALUES_P (tmp))
12126 TYPE_CACHED_VALUES_P (tmp) = 0;
12127 TYPE_CACHED_VALUES (tmp) = NULL;
12130 md5_process_bytes (expr, tree_size (expr), ctx);
12131 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12132 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12133 if (TREE_CODE_CLASS (code) != tcc_type
12134 && TREE_CODE_CLASS (code) != tcc_declaration
12135 && code != TREE_LIST
12136 && code != SSA_NAME
12137 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12138 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12139 switch (TREE_CODE_CLASS (code))
12141 case tcc_constant:
12142 switch (code)
12144 case STRING_CST:
12145 md5_process_bytes (TREE_STRING_POINTER (expr),
12146 TREE_STRING_LENGTH (expr), ctx);
12147 break;
12148 case COMPLEX_CST:
12149 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12150 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12151 break;
12152 case VECTOR_CST:
12153 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12154 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12155 break;
12156 default:
12157 break;
12159 break;
12160 case tcc_exceptional:
12161 switch (code)
12163 case TREE_LIST:
12164 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12165 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12166 expr = TREE_CHAIN (expr);
12167 goto recursive_label;
12168 break;
12169 case TREE_VEC:
12170 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12171 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12172 break;
12173 default:
12174 break;
12176 break;
12177 case tcc_expression:
12178 case tcc_reference:
12179 case tcc_comparison:
12180 case tcc_unary:
12181 case tcc_binary:
12182 case tcc_statement:
12183 case tcc_vl_exp:
12184 len = TREE_OPERAND_LENGTH (expr);
12185 for (i = 0; i < len; ++i)
12186 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12187 break;
12188 case tcc_declaration:
12189 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12190 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12191 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12193 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12194 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12195 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12196 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12197 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12200 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12202 if (TREE_CODE (expr) == FUNCTION_DECL)
12204 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12205 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12207 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12209 break;
12210 case tcc_type:
12211 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12212 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12213 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12214 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12215 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12216 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12217 if (INTEGRAL_TYPE_P (expr)
12218 || SCALAR_FLOAT_TYPE_P (expr))
12220 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12221 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12223 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12224 if (TREE_CODE (expr) == RECORD_TYPE
12225 || TREE_CODE (expr) == UNION_TYPE
12226 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12227 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12228 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12229 break;
12230 default:
12231 break;
12235 /* Helper function for outputting the checksum of a tree T. When
12236 debugging with gdb, you can "define mynext" to be "next" followed
12237 by "call debug_fold_checksum (op0)", then just trace down till the
12238 outputs differ. */
12240 DEBUG_FUNCTION void
12241 debug_fold_checksum (const_tree t)
12243 int i;
12244 unsigned char checksum[16];
12245 struct md5_ctx ctx;
12246 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12248 md5_init_ctx (&ctx);
12249 fold_checksum_tree (t, &ctx, &ht);
12250 md5_finish_ctx (&ctx, checksum);
12251 ht.empty ();
12253 for (i = 0; i < 16; i++)
12254 fprintf (stderr, "%d ", checksum[i]);
12256 fprintf (stderr, "\n");
12259 #endif
12261 /* Fold a unary tree expression with code CODE of type TYPE with an
12262 operand OP0. LOC is the location of the resulting expression.
12263 Return a folded expression if successful. Otherwise, return a tree
12264 expression with code CODE of type TYPE with an operand OP0. */
12266 tree
12267 fold_build1_stat_loc (location_t loc,
12268 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12270 tree tem;
12271 #ifdef ENABLE_FOLD_CHECKING
12272 unsigned char checksum_before[16], checksum_after[16];
12273 struct md5_ctx ctx;
12274 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12276 md5_init_ctx (&ctx);
12277 fold_checksum_tree (op0, &ctx, &ht);
12278 md5_finish_ctx (&ctx, checksum_before);
12279 ht.empty ();
12280 #endif
12282 tem = fold_unary_loc (loc, code, type, op0);
12283 if (!tem)
12284 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12286 #ifdef ENABLE_FOLD_CHECKING
12287 md5_init_ctx (&ctx);
12288 fold_checksum_tree (op0, &ctx, &ht);
12289 md5_finish_ctx (&ctx, checksum_after);
12291 if (memcmp (checksum_before, checksum_after, 16))
12292 fold_check_failed (op0, tem);
12293 #endif
12294 return tem;
12297 /* Fold a binary tree expression with code CODE of type TYPE with
12298 operands OP0 and OP1. LOC is the location of the resulting
12299 expression. Return a folded expression if successful. Otherwise,
12300 return a tree expression with code CODE of type TYPE with operands
12301 OP0 and OP1. */
12303 tree
12304 fold_build2_stat_loc (location_t loc,
12305 enum tree_code code, tree type, tree op0, tree op1
12306 MEM_STAT_DECL)
12308 tree tem;
12309 #ifdef ENABLE_FOLD_CHECKING
12310 unsigned char checksum_before_op0[16],
12311 checksum_before_op1[16],
12312 checksum_after_op0[16],
12313 checksum_after_op1[16];
12314 struct md5_ctx ctx;
12315 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12317 md5_init_ctx (&ctx);
12318 fold_checksum_tree (op0, &ctx, &ht);
12319 md5_finish_ctx (&ctx, checksum_before_op0);
12320 ht.empty ();
12322 md5_init_ctx (&ctx);
12323 fold_checksum_tree (op1, &ctx, &ht);
12324 md5_finish_ctx (&ctx, checksum_before_op1);
12325 ht.empty ();
12326 #endif
12328 tem = fold_binary_loc (loc, code, type, op0, op1);
12329 if (!tem)
12330 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12332 #ifdef ENABLE_FOLD_CHECKING
12333 md5_init_ctx (&ctx);
12334 fold_checksum_tree (op0, &ctx, &ht);
12335 md5_finish_ctx (&ctx, checksum_after_op0);
12336 ht.empty ();
12338 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12339 fold_check_failed (op0, tem);
12341 md5_init_ctx (&ctx);
12342 fold_checksum_tree (op1, &ctx, &ht);
12343 md5_finish_ctx (&ctx, checksum_after_op1);
12345 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12346 fold_check_failed (op1, tem);
12347 #endif
12348 return tem;
12351 /* Fold a ternary tree expression with code CODE of type TYPE with
12352 operands OP0, OP1, and OP2. Return a folded expression if
12353 successful. Otherwise, return a tree expression with code CODE of
12354 type TYPE with operands OP0, OP1, and OP2. */
12356 tree
12357 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12358 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12360 tree tem;
12361 #ifdef ENABLE_FOLD_CHECKING
12362 unsigned char checksum_before_op0[16],
12363 checksum_before_op1[16],
12364 checksum_before_op2[16],
12365 checksum_after_op0[16],
12366 checksum_after_op1[16],
12367 checksum_after_op2[16];
12368 struct md5_ctx ctx;
12369 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12371 md5_init_ctx (&ctx);
12372 fold_checksum_tree (op0, &ctx, &ht);
12373 md5_finish_ctx (&ctx, checksum_before_op0);
12374 ht.empty ();
12376 md5_init_ctx (&ctx);
12377 fold_checksum_tree (op1, &ctx, &ht);
12378 md5_finish_ctx (&ctx, checksum_before_op1);
12379 ht.empty ();
12381 md5_init_ctx (&ctx);
12382 fold_checksum_tree (op2, &ctx, &ht);
12383 md5_finish_ctx (&ctx, checksum_before_op2);
12384 ht.empty ();
12385 #endif
12387 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12388 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12389 if (!tem)
12390 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12392 #ifdef ENABLE_FOLD_CHECKING
12393 md5_init_ctx (&ctx);
12394 fold_checksum_tree (op0, &ctx, &ht);
12395 md5_finish_ctx (&ctx, checksum_after_op0);
12396 ht.empty ();
12398 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12399 fold_check_failed (op0, tem);
12401 md5_init_ctx (&ctx);
12402 fold_checksum_tree (op1, &ctx, &ht);
12403 md5_finish_ctx (&ctx, checksum_after_op1);
12404 ht.empty ();
12406 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12407 fold_check_failed (op1, tem);
12409 md5_init_ctx (&ctx);
12410 fold_checksum_tree (op2, &ctx, &ht);
12411 md5_finish_ctx (&ctx, checksum_after_op2);
12413 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12414 fold_check_failed (op2, tem);
12415 #endif
12416 return tem;
12419 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12420 arguments in ARGARRAY, and a null static chain.
12421 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12422 of type TYPE from the given operands as constructed by build_call_array. */
12424 tree
12425 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12426 int nargs, tree *argarray)
12428 tree tem;
12429 #ifdef ENABLE_FOLD_CHECKING
12430 unsigned char checksum_before_fn[16],
12431 checksum_before_arglist[16],
12432 checksum_after_fn[16],
12433 checksum_after_arglist[16];
12434 struct md5_ctx ctx;
12435 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12436 int i;
12438 md5_init_ctx (&ctx);
12439 fold_checksum_tree (fn, &ctx, &ht);
12440 md5_finish_ctx (&ctx, checksum_before_fn);
12441 ht.empty ();
12443 md5_init_ctx (&ctx);
12444 for (i = 0; i < nargs; i++)
12445 fold_checksum_tree (argarray[i], &ctx, &ht);
12446 md5_finish_ctx (&ctx, checksum_before_arglist);
12447 ht.empty ();
12448 #endif
12450 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12451 if (!tem)
12452 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12454 #ifdef ENABLE_FOLD_CHECKING
12455 md5_init_ctx (&ctx);
12456 fold_checksum_tree (fn, &ctx, &ht);
12457 md5_finish_ctx (&ctx, checksum_after_fn);
12458 ht.empty ();
12460 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12461 fold_check_failed (fn, tem);
12463 md5_init_ctx (&ctx);
12464 for (i = 0; i < nargs; i++)
12465 fold_checksum_tree (argarray[i], &ctx, &ht);
12466 md5_finish_ctx (&ctx, checksum_after_arglist);
12468 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12469 fold_check_failed (NULL_TREE, tem);
12470 #endif
12471 return tem;
12474 /* Perform constant folding and related simplification of initializer
12475 expression EXPR. These behave identically to "fold_buildN" but ignore
12476 potential run-time traps and exceptions that fold must preserve. */
12478 #define START_FOLD_INIT \
12479 int saved_signaling_nans = flag_signaling_nans;\
12480 int saved_trapping_math = flag_trapping_math;\
12481 int saved_rounding_math = flag_rounding_math;\
12482 int saved_trapv = flag_trapv;\
12483 int saved_folding_initializer = folding_initializer;\
12484 flag_signaling_nans = 0;\
12485 flag_trapping_math = 0;\
12486 flag_rounding_math = 0;\
12487 flag_trapv = 0;\
12488 folding_initializer = 1;
12490 #define END_FOLD_INIT \
12491 flag_signaling_nans = saved_signaling_nans;\
12492 flag_trapping_math = saved_trapping_math;\
12493 flag_rounding_math = saved_rounding_math;\
12494 flag_trapv = saved_trapv;\
12495 folding_initializer = saved_folding_initializer;
12497 tree
12498 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12499 tree type, tree op)
12501 tree result;
12502 START_FOLD_INIT;
12504 result = fold_build1_loc (loc, code, type, op);
12506 END_FOLD_INIT;
12507 return result;
12510 tree
12511 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12512 tree type, tree op0, tree op1)
12514 tree result;
12515 START_FOLD_INIT;
12517 result = fold_build2_loc (loc, code, type, op0, op1);
12519 END_FOLD_INIT;
12520 return result;
12523 tree
12524 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12525 int nargs, tree *argarray)
12527 tree result;
12528 START_FOLD_INIT;
12530 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12532 END_FOLD_INIT;
12533 return result;
12536 #undef START_FOLD_INIT
12537 #undef END_FOLD_INIT
12539 /* Determine if first argument is a multiple of second argument. Return 0 if
12540 it is not, or we cannot easily determined it to be.
12542 An example of the sort of thing we care about (at this point; this routine
12543 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12544 fold cases do now) is discovering that
12546 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12548 is a multiple of
12550 SAVE_EXPR (J * 8)
12552 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12554 This code also handles discovering that
12556 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12558 is a multiple of 8 so we don't have to worry about dealing with a
12559 possible remainder.
12561 Note that we *look* inside a SAVE_EXPR only to determine how it was
12562 calculated; it is not safe for fold to do much of anything else with the
12563 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12564 at run time. For example, the latter example above *cannot* be implemented
12565 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12566 evaluation time of the original SAVE_EXPR is not necessarily the same at
12567 the time the new expression is evaluated. The only optimization of this
12568 sort that would be valid is changing
12570 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12572 divided by 8 to
12574 SAVE_EXPR (I) * SAVE_EXPR (J)
12576 (where the same SAVE_EXPR (J) is used in the original and the
12577 transformed version). */
12580 multiple_of_p (tree type, const_tree top, const_tree bottom)
12582 if (operand_equal_p (top, bottom, 0))
12583 return 1;
12585 if (TREE_CODE (type) != INTEGER_TYPE)
12586 return 0;
12588 switch (TREE_CODE (top))
12590 case BIT_AND_EXPR:
12591 /* Bitwise and provides a power of two multiple. If the mask is
12592 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12593 if (!integer_pow2p (bottom))
12594 return 0;
12595 /* FALLTHRU */
12597 case MULT_EXPR:
12598 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12599 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12601 case PLUS_EXPR:
12602 case MINUS_EXPR:
12603 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12604 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12606 case LSHIFT_EXPR:
12607 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12609 tree op1, t1;
12611 op1 = TREE_OPERAND (top, 1);
12612 /* const_binop may not detect overflow correctly,
12613 so check for it explicitly here. */
12614 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12615 && 0 != (t1 = fold_convert (type,
12616 const_binop (LSHIFT_EXPR,
12617 size_one_node,
12618 op1)))
12619 && !TREE_OVERFLOW (t1))
12620 return multiple_of_p (type, t1, bottom);
12622 return 0;
12624 case NOP_EXPR:
12625 /* Can't handle conversions from non-integral or wider integral type. */
12626 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12627 || (TYPE_PRECISION (type)
12628 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12629 return 0;
12631 /* .. fall through ... */
12633 case SAVE_EXPR:
12634 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12636 case COND_EXPR:
12637 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12638 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12640 case INTEGER_CST:
12641 if (TREE_CODE (bottom) != INTEGER_CST
12642 || integer_zerop (bottom)
12643 || (TYPE_UNSIGNED (type)
12644 && (tree_int_cst_sgn (top) < 0
12645 || tree_int_cst_sgn (bottom) < 0)))
12646 return 0;
12647 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12648 SIGNED);
12650 default:
12651 return 0;
12655 #define tree_expr_nonnegative_warnv_p(X, Y) \
12656 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12658 #define RECURSE(X) \
12659 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12661 /* Return true if CODE or TYPE is known to be non-negative. */
12663 static bool
12664 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12666 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12667 && truth_value_p (code))
12668 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12669 have a signed:1 type (where the value is -1 and 0). */
12670 return true;
12671 return false;
12674 /* Return true if (CODE OP0) is known to be non-negative. If the return
12675 value is based on the assumption that signed overflow is undefined,
12676 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12677 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12679 bool
12680 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12681 bool *strict_overflow_p, int depth)
12683 if (TYPE_UNSIGNED (type))
12684 return true;
12686 switch (code)
12688 case ABS_EXPR:
12689 /* We can't return 1 if flag_wrapv is set because
12690 ABS_EXPR<INT_MIN> = INT_MIN. */
12691 if (!ANY_INTEGRAL_TYPE_P (type))
12692 return true;
12693 if (TYPE_OVERFLOW_UNDEFINED (type))
12695 *strict_overflow_p = true;
12696 return true;
12698 break;
12700 case NON_LVALUE_EXPR:
12701 case FLOAT_EXPR:
12702 case FIX_TRUNC_EXPR:
12703 return RECURSE (op0);
12705 CASE_CONVERT:
12707 tree inner_type = TREE_TYPE (op0);
12708 tree outer_type = type;
12710 if (TREE_CODE (outer_type) == REAL_TYPE)
12712 if (TREE_CODE (inner_type) == REAL_TYPE)
12713 return RECURSE (op0);
12714 if (INTEGRAL_TYPE_P (inner_type))
12716 if (TYPE_UNSIGNED (inner_type))
12717 return true;
12718 return RECURSE (op0);
12721 else if (INTEGRAL_TYPE_P (outer_type))
12723 if (TREE_CODE (inner_type) == REAL_TYPE)
12724 return RECURSE (op0);
12725 if (INTEGRAL_TYPE_P (inner_type))
12726 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12727 && TYPE_UNSIGNED (inner_type);
12730 break;
12732 default:
12733 return tree_simple_nonnegative_warnv_p (code, type);
12736 /* We don't know sign of `t', so be conservative and return false. */
12737 return false;
12740 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12741 value is based on the assumption that signed overflow is undefined,
12742 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12743 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12745 bool
12746 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12747 tree op1, bool *strict_overflow_p,
12748 int depth)
12750 if (TYPE_UNSIGNED (type))
12751 return true;
12753 switch (code)
12755 case POINTER_PLUS_EXPR:
12756 case PLUS_EXPR:
12757 if (FLOAT_TYPE_P (type))
12758 return RECURSE (op0) && RECURSE (op1);
12760 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12761 both unsigned and at least 2 bits shorter than the result. */
12762 if (TREE_CODE (type) == INTEGER_TYPE
12763 && TREE_CODE (op0) == NOP_EXPR
12764 && TREE_CODE (op1) == NOP_EXPR)
12766 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12767 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12768 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12769 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12771 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12772 TYPE_PRECISION (inner2)) + 1;
12773 return prec < TYPE_PRECISION (type);
12776 break;
12778 case MULT_EXPR:
12779 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12781 /* x * x is always non-negative for floating point x
12782 or without overflow. */
12783 if (operand_equal_p (op0, op1, 0)
12784 || (RECURSE (op0) && RECURSE (op1)))
12786 if (ANY_INTEGRAL_TYPE_P (type)
12787 && TYPE_OVERFLOW_UNDEFINED (type))
12788 *strict_overflow_p = true;
12789 return true;
12793 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12794 both unsigned and their total bits is shorter than the result. */
12795 if (TREE_CODE (type) == INTEGER_TYPE
12796 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12797 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12799 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12800 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12801 : TREE_TYPE (op0);
12802 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12803 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12804 : TREE_TYPE (op1);
12806 bool unsigned0 = TYPE_UNSIGNED (inner0);
12807 bool unsigned1 = TYPE_UNSIGNED (inner1);
12809 if (TREE_CODE (op0) == INTEGER_CST)
12810 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12812 if (TREE_CODE (op1) == INTEGER_CST)
12813 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12815 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12816 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12818 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12819 ? tree_int_cst_min_precision (op0, UNSIGNED)
12820 : TYPE_PRECISION (inner0);
12822 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12823 ? tree_int_cst_min_precision (op1, UNSIGNED)
12824 : TYPE_PRECISION (inner1);
12826 return precision0 + precision1 < TYPE_PRECISION (type);
12829 return false;
12831 case BIT_AND_EXPR:
12832 case MAX_EXPR:
12833 return RECURSE (op0) || RECURSE (op1);
12835 case BIT_IOR_EXPR:
12836 case BIT_XOR_EXPR:
12837 case MIN_EXPR:
12838 case RDIV_EXPR:
12839 case TRUNC_DIV_EXPR:
12840 case CEIL_DIV_EXPR:
12841 case FLOOR_DIV_EXPR:
12842 case ROUND_DIV_EXPR:
12843 return RECURSE (op0) && RECURSE (op1);
12845 case TRUNC_MOD_EXPR:
12846 return RECURSE (op0);
12848 case FLOOR_MOD_EXPR:
12849 return RECURSE (op1);
12851 case CEIL_MOD_EXPR:
12852 case ROUND_MOD_EXPR:
12853 default:
12854 return tree_simple_nonnegative_warnv_p (code, type);
12857 /* We don't know sign of `t', so be conservative and return false. */
12858 return false;
12861 /* Return true if T is known to be non-negative. If the return
12862 value is based on the assumption that signed overflow is undefined,
12863 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12864 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12866 bool
12867 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12869 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12870 return true;
12872 switch (TREE_CODE (t))
12874 case INTEGER_CST:
12875 return tree_int_cst_sgn (t) >= 0;
12877 case REAL_CST:
12878 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12880 case FIXED_CST:
12881 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12883 case COND_EXPR:
12884 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12886 case SSA_NAME:
12887 /* Limit the depth of recursion to avoid quadratic behavior.
12888 This is expected to catch almost all occurrences in practice.
12889 If this code misses important cases that unbounded recursion
12890 would not, passes that need this information could be revised
12891 to provide it through dataflow propagation. */
12892 return (!name_registered_for_update_p (t)
12893 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12894 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12895 strict_overflow_p, depth));
12897 default:
12898 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12902 /* Return true if T is known to be non-negative. If the return
12903 value is based on the assumption that signed overflow is undefined,
12904 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12905 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12907 bool
12908 tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
12909 bool *strict_overflow_p, int depth)
12911 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12912 switch (DECL_FUNCTION_CODE (fndecl))
12914 CASE_FLT_FN (BUILT_IN_ACOS):
12915 CASE_FLT_FN (BUILT_IN_ACOSH):
12916 CASE_FLT_FN (BUILT_IN_CABS):
12917 CASE_FLT_FN (BUILT_IN_COSH):
12918 CASE_FLT_FN (BUILT_IN_ERFC):
12919 CASE_FLT_FN (BUILT_IN_EXP):
12920 CASE_FLT_FN (BUILT_IN_EXP10):
12921 CASE_FLT_FN (BUILT_IN_EXP2):
12922 CASE_FLT_FN (BUILT_IN_FABS):
12923 CASE_FLT_FN (BUILT_IN_FDIM):
12924 CASE_FLT_FN (BUILT_IN_HYPOT):
12925 CASE_FLT_FN (BUILT_IN_POW10):
12926 CASE_INT_FN (BUILT_IN_FFS):
12927 CASE_INT_FN (BUILT_IN_PARITY):
12928 CASE_INT_FN (BUILT_IN_POPCOUNT):
12929 CASE_INT_FN (BUILT_IN_CLZ):
12930 CASE_INT_FN (BUILT_IN_CLRSB):
12931 case BUILT_IN_BSWAP32:
12932 case BUILT_IN_BSWAP64:
12933 /* Always true. */
12934 return true;
12936 CASE_FLT_FN (BUILT_IN_SQRT):
12937 /* sqrt(-0.0) is -0.0. */
12938 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12939 return true;
12940 return RECURSE (arg0);
12942 CASE_FLT_FN (BUILT_IN_ASINH):
12943 CASE_FLT_FN (BUILT_IN_ATAN):
12944 CASE_FLT_FN (BUILT_IN_ATANH):
12945 CASE_FLT_FN (BUILT_IN_CBRT):
12946 CASE_FLT_FN (BUILT_IN_CEIL):
12947 CASE_FLT_FN (BUILT_IN_ERF):
12948 CASE_FLT_FN (BUILT_IN_EXPM1):
12949 CASE_FLT_FN (BUILT_IN_FLOOR):
12950 CASE_FLT_FN (BUILT_IN_FMOD):
12951 CASE_FLT_FN (BUILT_IN_FREXP):
12952 CASE_FLT_FN (BUILT_IN_ICEIL):
12953 CASE_FLT_FN (BUILT_IN_IFLOOR):
12954 CASE_FLT_FN (BUILT_IN_IRINT):
12955 CASE_FLT_FN (BUILT_IN_IROUND):
12956 CASE_FLT_FN (BUILT_IN_LCEIL):
12957 CASE_FLT_FN (BUILT_IN_LDEXP):
12958 CASE_FLT_FN (BUILT_IN_LFLOOR):
12959 CASE_FLT_FN (BUILT_IN_LLCEIL):
12960 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12961 CASE_FLT_FN (BUILT_IN_LLRINT):
12962 CASE_FLT_FN (BUILT_IN_LLROUND):
12963 CASE_FLT_FN (BUILT_IN_LRINT):
12964 CASE_FLT_FN (BUILT_IN_LROUND):
12965 CASE_FLT_FN (BUILT_IN_MODF):
12966 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12967 CASE_FLT_FN (BUILT_IN_RINT):
12968 CASE_FLT_FN (BUILT_IN_ROUND):
12969 CASE_FLT_FN (BUILT_IN_SCALB):
12970 CASE_FLT_FN (BUILT_IN_SCALBLN):
12971 CASE_FLT_FN (BUILT_IN_SCALBN):
12972 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12973 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
12974 CASE_FLT_FN (BUILT_IN_SINH):
12975 CASE_FLT_FN (BUILT_IN_TANH):
12976 CASE_FLT_FN (BUILT_IN_TRUNC):
12977 /* True if the 1st argument is nonnegative. */
12978 return RECURSE (arg0);
12980 CASE_FLT_FN (BUILT_IN_FMAX):
12981 /* True if the 1st OR 2nd arguments are nonnegative. */
12982 return RECURSE (arg0) || RECURSE (arg1);
12984 CASE_FLT_FN (BUILT_IN_FMIN):
12985 /* True if the 1st AND 2nd arguments are nonnegative. */
12986 return RECURSE (arg0) && RECURSE (arg1);
12988 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12989 /* True if the 2nd argument is nonnegative. */
12990 return RECURSE (arg1);
12992 CASE_FLT_FN (BUILT_IN_POWI):
12993 /* True if the 1st argument is nonnegative or the second
12994 argument is an even integer. */
12995 if (TREE_CODE (arg1) == INTEGER_CST
12996 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12997 return true;
12998 return RECURSE (arg0);
13000 CASE_FLT_FN (BUILT_IN_POW):
13001 /* True if the 1st argument is nonnegative or the second
13002 argument is an even integer valued real. */
13003 if (TREE_CODE (arg1) == REAL_CST)
13005 REAL_VALUE_TYPE c;
13006 HOST_WIDE_INT n;
13008 c = TREE_REAL_CST (arg1);
13009 n = real_to_integer (&c);
13010 if ((n & 1) == 0)
13012 REAL_VALUE_TYPE cint;
13013 real_from_integer (&cint, VOIDmode, n, SIGNED);
13014 if (real_identical (&c, &cint))
13015 return true;
13018 return RECURSE (arg0);
13020 default:
13021 break;
13023 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13026 /* Return true if T is known to be non-negative. If the return
13027 value is based on the assumption that signed overflow is undefined,
13028 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13029 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13031 static bool
13032 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13034 enum tree_code code = TREE_CODE (t);
13035 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13036 return true;
13038 switch (code)
13040 case TARGET_EXPR:
13042 tree temp = TARGET_EXPR_SLOT (t);
13043 t = TARGET_EXPR_INITIAL (t);
13045 /* If the initializer is non-void, then it's a normal expression
13046 that will be assigned to the slot. */
13047 if (!VOID_TYPE_P (t))
13048 return RECURSE (t);
13050 /* Otherwise, the initializer sets the slot in some way. One common
13051 way is an assignment statement at the end of the initializer. */
13052 while (1)
13054 if (TREE_CODE (t) == BIND_EXPR)
13055 t = expr_last (BIND_EXPR_BODY (t));
13056 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13057 || TREE_CODE (t) == TRY_CATCH_EXPR)
13058 t = expr_last (TREE_OPERAND (t, 0));
13059 else if (TREE_CODE (t) == STATEMENT_LIST)
13060 t = expr_last (t);
13061 else
13062 break;
13064 if (TREE_CODE (t) == MODIFY_EXPR
13065 && TREE_OPERAND (t, 0) == temp)
13066 return RECURSE (TREE_OPERAND (t, 1));
13068 return false;
13071 case CALL_EXPR:
13073 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13074 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13076 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13077 get_callee_fndecl (t),
13078 arg0,
13079 arg1,
13080 strict_overflow_p, depth);
13082 case COMPOUND_EXPR:
13083 case MODIFY_EXPR:
13084 return RECURSE (TREE_OPERAND (t, 1));
13086 case BIND_EXPR:
13087 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13089 case SAVE_EXPR:
13090 return RECURSE (TREE_OPERAND (t, 0));
13092 default:
13093 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13097 #undef RECURSE
13098 #undef tree_expr_nonnegative_warnv_p
13100 /* Return true if T is known to be non-negative. If the return
13101 value is based on the assumption that signed overflow is undefined,
13102 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13103 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13105 bool
13106 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13108 enum tree_code code;
13109 if (t == error_mark_node)
13110 return false;
13112 code = TREE_CODE (t);
13113 switch (TREE_CODE_CLASS (code))
13115 case tcc_binary:
13116 case tcc_comparison:
13117 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13118 TREE_TYPE (t),
13119 TREE_OPERAND (t, 0),
13120 TREE_OPERAND (t, 1),
13121 strict_overflow_p, depth);
13123 case tcc_unary:
13124 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13125 TREE_TYPE (t),
13126 TREE_OPERAND (t, 0),
13127 strict_overflow_p, depth);
13129 case tcc_constant:
13130 case tcc_declaration:
13131 case tcc_reference:
13132 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13134 default:
13135 break;
13138 switch (code)
13140 case TRUTH_AND_EXPR:
13141 case TRUTH_OR_EXPR:
13142 case TRUTH_XOR_EXPR:
13143 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13144 TREE_TYPE (t),
13145 TREE_OPERAND (t, 0),
13146 TREE_OPERAND (t, 1),
13147 strict_overflow_p, depth);
13148 case TRUTH_NOT_EXPR:
13149 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13150 TREE_TYPE (t),
13151 TREE_OPERAND (t, 0),
13152 strict_overflow_p, depth);
13154 case COND_EXPR:
13155 case CONSTRUCTOR:
13156 case OBJ_TYPE_REF:
13157 case ASSERT_EXPR:
13158 case ADDR_EXPR:
13159 case WITH_SIZE_EXPR:
13160 case SSA_NAME:
13161 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13163 default:
13164 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13168 /* Return true if `t' is known to be non-negative. Handle warnings
13169 about undefined signed overflow. */
13171 bool
13172 tree_expr_nonnegative_p (tree t)
13174 bool ret, strict_overflow_p;
13176 strict_overflow_p = false;
13177 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13178 if (strict_overflow_p)
13179 fold_overflow_warning (("assuming signed overflow does not occur when "
13180 "determining that expression is always "
13181 "non-negative"),
13182 WARN_STRICT_OVERFLOW_MISC);
13183 return ret;
13187 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13188 For floating point we further ensure that T is not denormal.
13189 Similar logic is present in nonzero_address in rtlanal.h.
13191 If the return value is based on the assumption that signed overflow
13192 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13193 change *STRICT_OVERFLOW_P. */
13195 bool
13196 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13197 bool *strict_overflow_p)
13199 switch (code)
13201 case ABS_EXPR:
13202 return tree_expr_nonzero_warnv_p (op0,
13203 strict_overflow_p);
13205 case NOP_EXPR:
13207 tree inner_type = TREE_TYPE (op0);
13208 tree outer_type = type;
13210 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13211 && tree_expr_nonzero_warnv_p (op0,
13212 strict_overflow_p));
13214 break;
13216 case NON_LVALUE_EXPR:
13217 return tree_expr_nonzero_warnv_p (op0,
13218 strict_overflow_p);
13220 default:
13221 break;
13224 return false;
13227 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13228 For floating point we further ensure that T is not denormal.
13229 Similar logic is present in nonzero_address in rtlanal.h.
13231 If the return value is based on the assumption that signed overflow
13232 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13233 change *STRICT_OVERFLOW_P. */
13235 bool
13236 tree_binary_nonzero_warnv_p (enum tree_code code,
13237 tree type,
13238 tree op0,
13239 tree op1, bool *strict_overflow_p)
13241 bool sub_strict_overflow_p;
13242 switch (code)
13244 case POINTER_PLUS_EXPR:
13245 case PLUS_EXPR:
13246 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13248 /* With the presence of negative values it is hard
13249 to say something. */
13250 sub_strict_overflow_p = false;
13251 if (!tree_expr_nonnegative_warnv_p (op0,
13252 &sub_strict_overflow_p)
13253 || !tree_expr_nonnegative_warnv_p (op1,
13254 &sub_strict_overflow_p))
13255 return false;
13256 /* One of operands must be positive and the other non-negative. */
13257 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13258 overflows, on a twos-complement machine the sum of two
13259 nonnegative numbers can never be zero. */
13260 return (tree_expr_nonzero_warnv_p (op0,
13261 strict_overflow_p)
13262 || tree_expr_nonzero_warnv_p (op1,
13263 strict_overflow_p));
13265 break;
13267 case MULT_EXPR:
13268 if (TYPE_OVERFLOW_UNDEFINED (type))
13270 if (tree_expr_nonzero_warnv_p (op0,
13271 strict_overflow_p)
13272 && tree_expr_nonzero_warnv_p (op1,
13273 strict_overflow_p))
13275 *strict_overflow_p = true;
13276 return true;
13279 break;
13281 case MIN_EXPR:
13282 sub_strict_overflow_p = false;
13283 if (tree_expr_nonzero_warnv_p (op0,
13284 &sub_strict_overflow_p)
13285 && tree_expr_nonzero_warnv_p (op1,
13286 &sub_strict_overflow_p))
13288 if (sub_strict_overflow_p)
13289 *strict_overflow_p = true;
13291 break;
13293 case MAX_EXPR:
13294 sub_strict_overflow_p = false;
13295 if (tree_expr_nonzero_warnv_p (op0,
13296 &sub_strict_overflow_p))
13298 if (sub_strict_overflow_p)
13299 *strict_overflow_p = true;
13301 /* When both operands are nonzero, then MAX must be too. */
13302 if (tree_expr_nonzero_warnv_p (op1,
13303 strict_overflow_p))
13304 return true;
13306 /* MAX where operand 0 is positive is positive. */
13307 return tree_expr_nonnegative_warnv_p (op0,
13308 strict_overflow_p);
13310 /* MAX where operand 1 is positive is positive. */
13311 else if (tree_expr_nonzero_warnv_p (op1,
13312 &sub_strict_overflow_p)
13313 && tree_expr_nonnegative_warnv_p (op1,
13314 &sub_strict_overflow_p))
13316 if (sub_strict_overflow_p)
13317 *strict_overflow_p = true;
13318 return true;
13320 break;
13322 case BIT_IOR_EXPR:
13323 return (tree_expr_nonzero_warnv_p (op1,
13324 strict_overflow_p)
13325 || tree_expr_nonzero_warnv_p (op0,
13326 strict_overflow_p));
13328 default:
13329 break;
13332 return false;
13335 /* Return true when T is an address and is known to be nonzero.
13336 For floating point we further ensure that T is not denormal.
13337 Similar logic is present in nonzero_address in rtlanal.h.
13339 If the return value is based on the assumption that signed overflow
13340 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13341 change *STRICT_OVERFLOW_P. */
13343 bool
13344 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13346 bool sub_strict_overflow_p;
13347 switch (TREE_CODE (t))
13349 case INTEGER_CST:
13350 return !integer_zerop (t);
13352 case ADDR_EXPR:
13354 tree base = TREE_OPERAND (t, 0);
13356 if (!DECL_P (base))
13357 base = get_base_address (base);
13359 if (!base)
13360 return false;
13362 /* For objects in symbol table check if we know they are non-zero.
13363 Don't do anything for variables and functions before symtab is built;
13364 it is quite possible that they will be declared weak later. */
13365 if (DECL_P (base) && decl_in_symtab_p (base))
13367 struct symtab_node *symbol;
13369 symbol = symtab_node::get_create (base);
13370 if (symbol)
13371 return symbol->nonzero_address ();
13372 else
13373 return false;
13376 /* Function local objects are never NULL. */
13377 if (DECL_P (base)
13378 && (DECL_CONTEXT (base)
13379 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13380 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13381 return true;
13383 /* Constants are never weak. */
13384 if (CONSTANT_CLASS_P (base))
13385 return true;
13387 return false;
13390 case COND_EXPR:
13391 sub_strict_overflow_p = false;
13392 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13393 &sub_strict_overflow_p)
13394 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13395 &sub_strict_overflow_p))
13397 if (sub_strict_overflow_p)
13398 *strict_overflow_p = true;
13399 return true;
13401 break;
13403 default:
13404 break;
13406 return false;
13409 #define integer_valued_real_p(X) \
13410 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13412 #define RECURSE(X) \
13413 ((integer_valued_real_p) (X, depth + 1))
13415 /* Return true if the floating point result of (CODE OP0) has an
13416 integer value. We also allow +Inf, -Inf and NaN to be considered
13417 integer values.
13419 DEPTH is the current nesting depth of the query. */
13421 bool
13422 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13424 switch (code)
13426 case FLOAT_EXPR:
13427 return true;
13429 case ABS_EXPR:
13430 return RECURSE (op0);
13432 CASE_CONVERT:
13434 tree type = TREE_TYPE (op0);
13435 if (TREE_CODE (type) == INTEGER_TYPE)
13436 return true;
13437 if (TREE_CODE (type) == REAL_TYPE)
13438 return RECURSE (op0);
13439 break;
13442 default:
13443 break;
13445 return false;
13448 /* Return true if the floating point result of (CODE OP0 OP1) has an
13449 integer value. We also allow +Inf, -Inf and NaN to be considered
13450 integer values.
13452 DEPTH is the current nesting depth of the query. */
13454 bool
13455 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13457 switch (code)
13459 case PLUS_EXPR:
13460 case MINUS_EXPR:
13461 case MULT_EXPR:
13462 case MIN_EXPR:
13463 case MAX_EXPR:
13464 return RECURSE (op0) && RECURSE (op1);
13466 default:
13467 break;
13469 return false;
13472 /* Return true if the floating point result of calling FNDECL with arguments
13473 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13474 considered integer values. If FNDECL takes fewer than 2 arguments,
13475 the remaining ARGn are null.
13477 DEPTH is the current nesting depth of the query. */
13479 bool
13480 integer_valued_real_call_p (tree fndecl, tree arg0, tree arg1, int depth)
13482 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13483 switch (DECL_FUNCTION_CODE (fndecl))
13485 CASE_FLT_FN (BUILT_IN_CEIL):
13486 CASE_FLT_FN (BUILT_IN_FLOOR):
13487 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13488 CASE_FLT_FN (BUILT_IN_RINT):
13489 CASE_FLT_FN (BUILT_IN_ROUND):
13490 CASE_FLT_FN (BUILT_IN_TRUNC):
13491 return true;
13493 CASE_FLT_FN (BUILT_IN_FMIN):
13494 CASE_FLT_FN (BUILT_IN_FMAX):
13495 return RECURSE (arg0) && RECURSE (arg1);
13497 default:
13498 break;
13500 return false;
13503 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13504 has an integer value. We also allow +Inf, -Inf and NaN to be
13505 considered integer values.
13507 DEPTH is the current nesting depth of the query. */
13509 bool
13510 integer_valued_real_single_p (tree t, int depth)
13512 switch (TREE_CODE (t))
13514 case REAL_CST:
13515 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13517 case COND_EXPR:
13518 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13520 case SSA_NAME:
13521 /* Limit the depth of recursion to avoid quadratic behavior.
13522 This is expected to catch almost all occurrences in practice.
13523 If this code misses important cases that unbounded recursion
13524 would not, passes that need this information could be revised
13525 to provide it through dataflow propagation. */
13526 return (!name_registered_for_update_p (t)
13527 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13528 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13529 depth));
13531 default:
13532 break;
13534 return false;
13537 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13538 has an integer value. We also allow +Inf, -Inf and NaN to be
13539 considered integer values.
13541 DEPTH is the current nesting depth of the query. */
13543 static bool
13544 integer_valued_real_invalid_p (tree t, int depth)
13546 switch (TREE_CODE (t))
13548 case COMPOUND_EXPR:
13549 case MODIFY_EXPR:
13550 case BIND_EXPR:
13551 return RECURSE (TREE_OPERAND (t, 1));
13553 case SAVE_EXPR:
13554 return RECURSE (TREE_OPERAND (t, 0));
13556 default:
13557 break;
13559 return false;
13562 #undef RECURSE
13563 #undef integer_valued_real_p
13565 /* Return true if the floating point expression T has an integer value.
13566 We also allow +Inf, -Inf and NaN to be considered integer values.
13568 DEPTH is the current nesting depth of the query. */
13570 bool
13571 integer_valued_real_p (tree t, int depth)
13573 if (t == error_mark_node)
13574 return false;
13576 tree_code code = TREE_CODE (t);
13577 switch (TREE_CODE_CLASS (code))
13579 case tcc_binary:
13580 case tcc_comparison:
13581 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13582 TREE_OPERAND (t, 1), depth);
13584 case tcc_unary:
13585 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13587 case tcc_constant:
13588 case tcc_declaration:
13589 case tcc_reference:
13590 return integer_valued_real_single_p (t, depth);
13592 default:
13593 break;
13596 switch (code)
13598 case COND_EXPR:
13599 case SSA_NAME:
13600 return integer_valued_real_single_p (t, depth);
13602 case CALL_EXPR:
13604 tree arg0 = (call_expr_nargs (t) > 0
13605 ? CALL_EXPR_ARG (t, 0)
13606 : NULL_TREE);
13607 tree arg1 = (call_expr_nargs (t) > 1
13608 ? CALL_EXPR_ARG (t, 1)
13609 : NULL_TREE);
13610 return integer_valued_real_call_p (get_callee_fndecl (t),
13611 arg0, arg1, depth);
13614 default:
13615 return integer_valued_real_invalid_p (t, depth);
13619 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13620 attempt to fold the expression to a constant without modifying TYPE,
13621 OP0 or OP1.
13623 If the expression could be simplified to a constant, then return
13624 the constant. If the expression would not be simplified to a
13625 constant, then return NULL_TREE. */
13627 tree
13628 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13630 tree tem = fold_binary (code, type, op0, op1);
13631 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13634 /* Given the components of a unary expression CODE, TYPE and OP0,
13635 attempt to fold the expression to a constant without modifying
13636 TYPE or OP0.
13638 If the expression could be simplified to a constant, then return
13639 the constant. If the expression would not be simplified to a
13640 constant, then return NULL_TREE. */
13642 tree
13643 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13645 tree tem = fold_unary (code, type, op0);
13646 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13649 /* If EXP represents referencing an element in a constant string
13650 (either via pointer arithmetic or array indexing), return the
13651 tree representing the value accessed, otherwise return NULL. */
13653 tree
13654 fold_read_from_constant_string (tree exp)
13656 if ((TREE_CODE (exp) == INDIRECT_REF
13657 || TREE_CODE (exp) == ARRAY_REF)
13658 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13660 tree exp1 = TREE_OPERAND (exp, 0);
13661 tree index;
13662 tree string;
13663 location_t loc = EXPR_LOCATION (exp);
13665 if (TREE_CODE (exp) == INDIRECT_REF)
13666 string = string_constant (exp1, &index);
13667 else
13669 tree low_bound = array_ref_low_bound (exp);
13670 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13672 /* Optimize the special-case of a zero lower bound.
13674 We convert the low_bound to sizetype to avoid some problems
13675 with constant folding. (E.g. suppose the lower bound is 1,
13676 and its mode is QI. Without the conversion,l (ARRAY
13677 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13678 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13679 if (! integer_zerop (low_bound))
13680 index = size_diffop_loc (loc, index,
13681 fold_convert_loc (loc, sizetype, low_bound));
13683 string = exp1;
13686 if (string
13687 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13688 && TREE_CODE (string) == STRING_CST
13689 && TREE_CODE (index) == INTEGER_CST
13690 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13691 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13692 == MODE_INT)
13693 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13694 return build_int_cst_type (TREE_TYPE (exp),
13695 (TREE_STRING_POINTER (string)
13696 [TREE_INT_CST_LOW (index)]));
13698 return NULL;
13701 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13702 an integer constant, real, or fixed-point constant.
13704 TYPE is the type of the result. */
13706 static tree
13707 fold_negate_const (tree arg0, tree type)
13709 tree t = NULL_TREE;
13711 switch (TREE_CODE (arg0))
13713 case INTEGER_CST:
13715 bool overflow;
13716 wide_int val = wi::neg (arg0, &overflow);
13717 t = force_fit_type (type, val, 1,
13718 (overflow | TREE_OVERFLOW (arg0))
13719 && !TYPE_UNSIGNED (type));
13720 break;
13723 case REAL_CST:
13724 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13725 break;
13727 case FIXED_CST:
13729 FIXED_VALUE_TYPE f;
13730 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13731 &(TREE_FIXED_CST (arg0)), NULL,
13732 TYPE_SATURATING (type));
13733 t = build_fixed (type, f);
13734 /* Propagate overflow flags. */
13735 if (overflow_p | TREE_OVERFLOW (arg0))
13736 TREE_OVERFLOW (t) = 1;
13737 break;
13740 default:
13741 gcc_unreachable ();
13744 return t;
13747 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13748 an integer constant or real constant.
13750 TYPE is the type of the result. */
13752 tree
13753 fold_abs_const (tree arg0, tree type)
13755 tree t = NULL_TREE;
13757 switch (TREE_CODE (arg0))
13759 case INTEGER_CST:
13761 /* If the value is unsigned or non-negative, then the absolute value
13762 is the same as the ordinary value. */
13763 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13764 t = arg0;
13766 /* If the value is negative, then the absolute value is
13767 its negation. */
13768 else
13770 bool overflow;
13771 wide_int val = wi::neg (arg0, &overflow);
13772 t = force_fit_type (type, val, -1,
13773 overflow | TREE_OVERFLOW (arg0));
13776 break;
13778 case REAL_CST:
13779 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13780 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13781 else
13782 t = arg0;
13783 break;
13785 default:
13786 gcc_unreachable ();
13789 return t;
13792 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13793 constant. TYPE is the type of the result. */
13795 static tree
13796 fold_not_const (const_tree arg0, tree type)
13798 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13800 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13803 /* Given CODE, a relational operator, the target type, TYPE and two
13804 constant operands OP0 and OP1, return the result of the
13805 relational operation. If the result is not a compile time
13806 constant, then return NULL_TREE. */
13808 static tree
13809 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13811 int result, invert;
13813 /* From here on, the only cases we handle are when the result is
13814 known to be a constant. */
13816 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13818 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13819 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13821 /* Handle the cases where either operand is a NaN. */
13822 if (real_isnan (c0) || real_isnan (c1))
13824 switch (code)
13826 case EQ_EXPR:
13827 case ORDERED_EXPR:
13828 result = 0;
13829 break;
13831 case NE_EXPR:
13832 case UNORDERED_EXPR:
13833 case UNLT_EXPR:
13834 case UNLE_EXPR:
13835 case UNGT_EXPR:
13836 case UNGE_EXPR:
13837 case UNEQ_EXPR:
13838 result = 1;
13839 break;
13841 case LT_EXPR:
13842 case LE_EXPR:
13843 case GT_EXPR:
13844 case GE_EXPR:
13845 case LTGT_EXPR:
13846 if (flag_trapping_math)
13847 return NULL_TREE;
13848 result = 0;
13849 break;
13851 default:
13852 gcc_unreachable ();
13855 return constant_boolean_node (result, type);
13858 return constant_boolean_node (real_compare (code, c0, c1), type);
13861 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13863 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13864 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13865 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13868 /* Handle equality/inequality of complex constants. */
13869 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13871 tree rcond = fold_relational_const (code, type,
13872 TREE_REALPART (op0),
13873 TREE_REALPART (op1));
13874 tree icond = fold_relational_const (code, type,
13875 TREE_IMAGPART (op0),
13876 TREE_IMAGPART (op1));
13877 if (code == EQ_EXPR)
13878 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13879 else if (code == NE_EXPR)
13880 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13881 else
13882 return NULL_TREE;
13885 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13887 unsigned count = VECTOR_CST_NELTS (op0);
13888 tree *elts = XALLOCAVEC (tree, count);
13889 gcc_assert (VECTOR_CST_NELTS (op1) == count
13890 && TYPE_VECTOR_SUBPARTS (type) == count);
13892 for (unsigned i = 0; i < count; i++)
13894 tree elem_type = TREE_TYPE (type);
13895 tree elem0 = VECTOR_CST_ELT (op0, i);
13896 tree elem1 = VECTOR_CST_ELT (op1, i);
13898 tree tem = fold_relational_const (code, elem_type,
13899 elem0, elem1);
13901 if (tem == NULL_TREE)
13902 return NULL_TREE;
13904 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13907 return build_vector (type, elts);
13910 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13912 To compute GT, swap the arguments and do LT.
13913 To compute GE, do LT and invert the result.
13914 To compute LE, swap the arguments, do LT and invert the result.
13915 To compute NE, do EQ and invert the result.
13917 Therefore, the code below must handle only EQ and LT. */
13919 if (code == LE_EXPR || code == GT_EXPR)
13921 std::swap (op0, op1);
13922 code = swap_tree_comparison (code);
13925 /* Note that it is safe to invert for real values here because we
13926 have already handled the one case that it matters. */
13928 invert = 0;
13929 if (code == NE_EXPR || code == GE_EXPR)
13931 invert = 1;
13932 code = invert_tree_comparison (code, false);
13935 /* Compute a result for LT or EQ if args permit;
13936 Otherwise return T. */
13937 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13939 if (code == EQ_EXPR)
13940 result = tree_int_cst_equal (op0, op1);
13941 else
13942 result = tree_int_cst_lt (op0, op1);
13944 else
13945 return NULL_TREE;
13947 if (invert)
13948 result ^= 1;
13949 return constant_boolean_node (result, type);
13952 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13953 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13954 itself. */
13956 tree
13957 fold_build_cleanup_point_expr (tree type, tree expr)
13959 /* If the expression does not have side effects then we don't have to wrap
13960 it with a cleanup point expression. */
13961 if (!TREE_SIDE_EFFECTS (expr))
13962 return expr;
13964 /* If the expression is a return, check to see if the expression inside the
13965 return has no side effects or the right hand side of the modify expression
13966 inside the return. If either don't have side effects set we don't need to
13967 wrap the expression in a cleanup point expression. Note we don't check the
13968 left hand side of the modify because it should always be a return decl. */
13969 if (TREE_CODE (expr) == RETURN_EXPR)
13971 tree op = TREE_OPERAND (expr, 0);
13972 if (!op || !TREE_SIDE_EFFECTS (op))
13973 return expr;
13974 op = TREE_OPERAND (op, 1);
13975 if (!TREE_SIDE_EFFECTS (op))
13976 return expr;
13979 return build1 (CLEANUP_POINT_EXPR, type, expr);
13982 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13983 of an indirection through OP0, or NULL_TREE if no simplification is
13984 possible. */
13986 tree
13987 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13989 tree sub = op0;
13990 tree subtype;
13992 STRIP_NOPS (sub);
13993 subtype = TREE_TYPE (sub);
13994 if (!POINTER_TYPE_P (subtype))
13995 return NULL_TREE;
13997 if (TREE_CODE (sub) == ADDR_EXPR)
13999 tree op = TREE_OPERAND (sub, 0);
14000 tree optype = TREE_TYPE (op);
14001 /* *&CONST_DECL -> to the value of the const decl. */
14002 if (TREE_CODE (op) == CONST_DECL)
14003 return DECL_INITIAL (op);
14004 /* *&p => p; make sure to handle *&"str"[cst] here. */
14005 if (type == optype)
14007 tree fop = fold_read_from_constant_string (op);
14008 if (fop)
14009 return fop;
14010 else
14011 return op;
14013 /* *(foo *)&fooarray => fooarray[0] */
14014 else if (TREE_CODE (optype) == ARRAY_TYPE
14015 && type == TREE_TYPE (optype)
14016 && (!in_gimple_form
14017 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14019 tree type_domain = TYPE_DOMAIN (optype);
14020 tree min_val = size_zero_node;
14021 if (type_domain && TYPE_MIN_VALUE (type_domain))
14022 min_val = TYPE_MIN_VALUE (type_domain);
14023 if (in_gimple_form
14024 && TREE_CODE (min_val) != INTEGER_CST)
14025 return NULL_TREE;
14026 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14027 NULL_TREE, NULL_TREE);
14029 /* *(foo *)&complexfoo => __real__ complexfoo */
14030 else if (TREE_CODE (optype) == COMPLEX_TYPE
14031 && type == TREE_TYPE (optype))
14032 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14033 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14034 else if (TREE_CODE (optype) == VECTOR_TYPE
14035 && type == TREE_TYPE (optype))
14037 tree part_width = TYPE_SIZE (type);
14038 tree index = bitsize_int (0);
14039 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14043 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14044 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14046 tree op00 = TREE_OPERAND (sub, 0);
14047 tree op01 = TREE_OPERAND (sub, 1);
14049 STRIP_NOPS (op00);
14050 if (TREE_CODE (op00) == ADDR_EXPR)
14052 tree op00type;
14053 op00 = TREE_OPERAND (op00, 0);
14054 op00type = TREE_TYPE (op00);
14056 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14057 if (TREE_CODE (op00type) == VECTOR_TYPE
14058 && type == TREE_TYPE (op00type))
14060 HOST_WIDE_INT offset = tree_to_shwi (op01);
14061 tree part_width = TYPE_SIZE (type);
14062 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14063 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14064 tree index = bitsize_int (indexi);
14066 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14067 return fold_build3_loc (loc,
14068 BIT_FIELD_REF, type, op00,
14069 part_width, index);
14072 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14073 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14074 && type == TREE_TYPE (op00type))
14076 tree size = TYPE_SIZE_UNIT (type);
14077 if (tree_int_cst_equal (size, op01))
14078 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14080 /* ((foo *)&fooarray)[1] => fooarray[1] */
14081 else if (TREE_CODE (op00type) == ARRAY_TYPE
14082 && type == TREE_TYPE (op00type))
14084 tree type_domain = TYPE_DOMAIN (op00type);
14085 tree min_val = size_zero_node;
14086 if (type_domain && TYPE_MIN_VALUE (type_domain))
14087 min_val = TYPE_MIN_VALUE (type_domain);
14088 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14089 TYPE_SIZE_UNIT (type));
14090 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14091 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14092 NULL_TREE, NULL_TREE);
14097 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14098 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14099 && type == TREE_TYPE (TREE_TYPE (subtype))
14100 && (!in_gimple_form
14101 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14103 tree type_domain;
14104 tree min_val = size_zero_node;
14105 sub = build_fold_indirect_ref_loc (loc, sub);
14106 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14107 if (type_domain && TYPE_MIN_VALUE (type_domain))
14108 min_val = TYPE_MIN_VALUE (type_domain);
14109 if (in_gimple_form
14110 && TREE_CODE (min_val) != INTEGER_CST)
14111 return NULL_TREE;
14112 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14113 NULL_TREE);
14116 return NULL_TREE;
14119 /* Builds an expression for an indirection through T, simplifying some
14120 cases. */
14122 tree
14123 build_fold_indirect_ref_loc (location_t loc, tree t)
14125 tree type = TREE_TYPE (TREE_TYPE (t));
14126 tree sub = fold_indirect_ref_1 (loc, type, t);
14128 if (sub)
14129 return sub;
14131 return build1_loc (loc, INDIRECT_REF, type, t);
14134 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14136 tree
14137 fold_indirect_ref_loc (location_t loc, tree t)
14139 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14141 if (sub)
14142 return sub;
14143 else
14144 return t;
14147 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14148 whose result is ignored. The type of the returned tree need not be
14149 the same as the original expression. */
14151 tree
14152 fold_ignored_result (tree t)
14154 if (!TREE_SIDE_EFFECTS (t))
14155 return integer_zero_node;
14157 for (;;)
14158 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14160 case tcc_unary:
14161 t = TREE_OPERAND (t, 0);
14162 break;
14164 case tcc_binary:
14165 case tcc_comparison:
14166 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14167 t = TREE_OPERAND (t, 0);
14168 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14169 t = TREE_OPERAND (t, 1);
14170 else
14171 return t;
14172 break;
14174 case tcc_expression:
14175 switch (TREE_CODE (t))
14177 case COMPOUND_EXPR:
14178 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14179 return t;
14180 t = TREE_OPERAND (t, 0);
14181 break;
14183 case COND_EXPR:
14184 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14185 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14186 return t;
14187 t = TREE_OPERAND (t, 0);
14188 break;
14190 default:
14191 return t;
14193 break;
14195 default:
14196 return t;
14200 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14202 tree
14203 round_up_loc (location_t loc, tree value, unsigned int divisor)
14205 tree div = NULL_TREE;
14207 if (divisor == 1)
14208 return value;
14210 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14211 have to do anything. Only do this when we are not given a const,
14212 because in that case, this check is more expensive than just
14213 doing it. */
14214 if (TREE_CODE (value) != INTEGER_CST)
14216 div = build_int_cst (TREE_TYPE (value), divisor);
14218 if (multiple_of_p (TREE_TYPE (value), value, div))
14219 return value;
14222 /* If divisor is a power of two, simplify this to bit manipulation. */
14223 if (divisor == (divisor & -divisor))
14225 if (TREE_CODE (value) == INTEGER_CST)
14227 wide_int val = value;
14228 bool overflow_p;
14230 if ((val & (divisor - 1)) == 0)
14231 return value;
14233 overflow_p = TREE_OVERFLOW (value);
14234 val += divisor - 1;
14235 val &= - (int) divisor;
14236 if (val == 0)
14237 overflow_p = true;
14239 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14241 else
14243 tree t;
14245 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14246 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14247 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14248 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14251 else
14253 if (!div)
14254 div = build_int_cst (TREE_TYPE (value), divisor);
14255 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14256 value = size_binop_loc (loc, MULT_EXPR, value, div);
14259 return value;
14262 /* Likewise, but round down. */
14264 tree
14265 round_down_loc (location_t loc, tree value, int divisor)
14267 tree div = NULL_TREE;
14269 gcc_assert (divisor > 0);
14270 if (divisor == 1)
14271 return value;
14273 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14274 have to do anything. Only do this when we are not given a const,
14275 because in that case, this check is more expensive than just
14276 doing it. */
14277 if (TREE_CODE (value) != INTEGER_CST)
14279 div = build_int_cst (TREE_TYPE (value), divisor);
14281 if (multiple_of_p (TREE_TYPE (value), value, div))
14282 return value;
14285 /* If divisor is a power of two, simplify this to bit manipulation. */
14286 if (divisor == (divisor & -divisor))
14288 tree t;
14290 t = build_int_cst (TREE_TYPE (value), -divisor);
14291 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14293 else
14295 if (!div)
14296 div = build_int_cst (TREE_TYPE (value), divisor);
14297 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14298 value = size_binop_loc (loc, MULT_EXPR, value, div);
14301 return value;
14304 /* Returns the pointer to the base of the object addressed by EXP and
14305 extracts the information about the offset of the access, storing it
14306 to PBITPOS and POFFSET. */
14308 static tree
14309 split_address_to_core_and_offset (tree exp,
14310 HOST_WIDE_INT *pbitpos, tree *poffset)
14312 tree core;
14313 machine_mode mode;
14314 int unsignedp, volatilep;
14315 HOST_WIDE_INT bitsize;
14316 location_t loc = EXPR_LOCATION (exp);
14318 if (TREE_CODE (exp) == ADDR_EXPR)
14320 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14321 poffset, &mode, &unsignedp, &volatilep,
14322 false);
14323 core = build_fold_addr_expr_loc (loc, core);
14325 else
14327 core = exp;
14328 *pbitpos = 0;
14329 *poffset = NULL_TREE;
14332 return core;
14335 /* Returns true if addresses of E1 and E2 differ by a constant, false
14336 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14338 bool
14339 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14341 tree core1, core2;
14342 HOST_WIDE_INT bitpos1, bitpos2;
14343 tree toffset1, toffset2, tdiff, type;
14345 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14346 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14348 if (bitpos1 % BITS_PER_UNIT != 0
14349 || bitpos2 % BITS_PER_UNIT != 0
14350 || !operand_equal_p (core1, core2, 0))
14351 return false;
14353 if (toffset1 && toffset2)
14355 type = TREE_TYPE (toffset1);
14356 if (type != TREE_TYPE (toffset2))
14357 toffset2 = fold_convert (type, toffset2);
14359 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14360 if (!cst_and_fits_in_hwi (tdiff))
14361 return false;
14363 *diff = int_cst_value (tdiff);
14365 else if (toffset1 || toffset2)
14367 /* If only one of the offsets is non-constant, the difference cannot
14368 be a constant. */
14369 return false;
14371 else
14372 *diff = 0;
14374 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14375 return true;
14378 /* Return OFF converted to a pointer offset type suitable as offset for
14379 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14380 tree
14381 convert_to_ptrofftype_loc (location_t loc, tree off)
14383 return fold_convert_loc (loc, sizetype, off);
14386 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14387 tree
14388 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14390 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14391 ptr, convert_to_ptrofftype_loc (loc, off));
14394 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14395 tree
14396 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14398 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14399 ptr, size_int (off));