Daily bump.
[official-gcc.git] / gcc / fold-const.c
blob0cb09f4736e4f9c6bc814c6edabaed8ffa08f3f1
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
157 static location_t
158 expr_location_or (tree t, location_t loc)
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
179 return x;
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
189 widest_int quo;
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
195 return NULL_TREE;
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
207 static int fold_deferring_overflow_warnings;
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
214 static const char* fold_deferred_overflow_warning;
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
224 void
225 fold_defer_overflow_warnings (void)
227 ++fold_deferring_overflow_warnings;
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
242 const char *warnmsg;
243 location_t locus;
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
259 if (!issue || warnmsg == NULL)
260 return;
262 if (gimple_no_warning_p (stmt))
263 return;
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
270 if (!issue_strict_overflow_warning (code))
271 return;
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
286 fold_undefer_overflow_warnings (false, NULL, 0);
289 /* Whether we are deferring overflow warnings. */
291 bool
292 fold_deferring_overflow_warnings_p (void)
294 return fold_deferring_overflow_warnings > 0;
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
303 if (fold_deferring_overflow_warnings > 0)
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
319 bool
320 negate_mathfn_p (combined_fn fn)
322 switch (fn)
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
355 default:
356 break;
358 return false;
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
364 bool
365 may_negate_without_overflow_p (const_tree t)
367 tree type;
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
375 return !wi::only_sign_bit_p (t);
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
381 static bool
382 negate_expr_p (tree t)
384 tree type;
386 if (t == 0)
387 return false;
389 type = TREE_TYPE (t);
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
404 case FIXED_CST:
405 return true;
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
419 case VECTOR_CST:
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
430 return true;
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
476 /* Fall through. */
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
509 break;
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
525 break;
527 default:
528 break;
530 return false;
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
538 static tree
539 fold_negate_expr (location_t loc, tree t)
541 tree type = TREE_TYPE (t);
542 tree tem;
544 switch (TREE_CODE (t))
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
571 case COMPLEX_CST:
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
578 break;
580 case VECTOR_CST:
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
585 for (i = 0; i < count; i++)
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
592 return build_vector (type, elts);
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
635 break;
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
650 /* Fall through. */
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
664 break;
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
696 break;
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
703 tree fndecl, arg;
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
709 break;
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
726 break;
728 default:
729 break;
732 return NULL_TREE;
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
739 static tree
740 negate_expr (tree t)
742 tree type, tem;
743 location_t loc;
745 if (t == NULL_TREE)
746 return NULL_TREE;
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
785 tree var = 0;
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p && var)
841 /* Convert to TYPE before negating. */
842 var = fold_convert_loc (loc, type, var);
843 var = negate_expr (var);
846 else if (TREE_CODE (in) == BIT_NOT_EXPR
847 && code == PLUS_EXPR)
849 /* -X - 1 is folded to ~X, undo that here. */
850 *minus_litp = build_one_cst (TREE_TYPE (in));
851 var = negate_expr (TREE_OPERAND (in, 0));
853 else if (TREE_CONSTANT (in))
854 *conp = in;
855 else
856 var = in;
858 if (negate_p)
860 if (*litp)
861 *minus_litp = *litp, *litp = 0;
862 else if (*minus_litp)
863 *litp = *minus_litp, *minus_litp = 0;
864 *conp = negate_expr (*conp);
865 if (var)
867 /* Convert to TYPE before negating. */
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
873 return var;
876 /* Re-associate trees split by the above function. T1 and T2 are
877 either expressions to associate or null. Return the new
878 expression, if any. LOC is the location of the new expression. If
879 we build an operation, do it in TYPE and with CODE. */
881 static tree
882 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
884 if (t1 == 0)
885 return t2;
886 else if (t2 == 0)
887 return t1;
889 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
890 try to fold this since we will have infinite recursion. But do
891 deal with any NEGATE_EXPRs. */
892 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
893 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
895 if (code == PLUS_EXPR)
897 if (TREE_CODE (t1) == NEGATE_EXPR)
898 return build2_loc (loc, MINUS_EXPR, type,
899 fold_convert_loc (loc, type, t2),
900 fold_convert_loc (loc, type,
901 TREE_OPERAND (t1, 0)));
902 else if (TREE_CODE (t2) == NEGATE_EXPR)
903 return build2_loc (loc, MINUS_EXPR, type,
904 fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type,
906 TREE_OPERAND (t2, 0)));
907 else if (integer_zerop (t2))
908 return fold_convert_loc (loc, type, t1);
910 else if (code == MINUS_EXPR)
912 if (integer_zerop (t2))
913 return fold_convert_loc (loc, type, t1);
916 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type, t2));
920 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
921 fold_convert_loc (loc, type, t2));
924 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
925 for use in int_const_binop, size_binop and size_diffop. */
927 static bool
928 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
930 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
931 return false;
932 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
933 return false;
935 switch (code)
937 case LSHIFT_EXPR:
938 case RSHIFT_EXPR:
939 case LROTATE_EXPR:
940 case RROTATE_EXPR:
941 return true;
943 default:
944 break;
947 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
948 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
949 && TYPE_MODE (type1) == TYPE_MODE (type2);
953 /* Combine two integer constants ARG1 and ARG2 under operation CODE
954 to produce a new constant. Return NULL_TREE if we don't know how
955 to evaluate CODE at compile-time. */
957 static tree
958 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
959 int overflowable)
961 wide_int res;
962 tree t;
963 tree type = TREE_TYPE (arg1);
964 signop sign = TYPE_SIGN (type);
965 bool overflow = false;
967 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
968 TYPE_SIGN (TREE_TYPE (parg2)));
970 switch (code)
972 case BIT_IOR_EXPR:
973 res = wi::bit_or (arg1, arg2);
974 break;
976 case BIT_XOR_EXPR:
977 res = wi::bit_xor (arg1, arg2);
978 break;
980 case BIT_AND_EXPR:
981 res = wi::bit_and (arg1, arg2);
982 break;
984 case RSHIFT_EXPR:
985 case LSHIFT_EXPR:
986 if (wi::neg_p (arg2))
988 arg2 = -arg2;
989 if (code == RSHIFT_EXPR)
990 code = LSHIFT_EXPR;
991 else
992 code = RSHIFT_EXPR;
995 if (code == RSHIFT_EXPR)
996 /* It's unclear from the C standard whether shifts can overflow.
997 The following code ignores overflow; perhaps a C standard
998 interpretation ruling is needed. */
999 res = wi::rshift (arg1, arg2, sign);
1000 else
1001 res = wi::lshift (arg1, arg2);
1002 break;
1004 case RROTATE_EXPR:
1005 case LROTATE_EXPR:
1006 if (wi::neg_p (arg2))
1008 arg2 = -arg2;
1009 if (code == RROTATE_EXPR)
1010 code = LROTATE_EXPR;
1011 else
1012 code = RROTATE_EXPR;
1015 if (code == RROTATE_EXPR)
1016 res = wi::rrotate (arg1, arg2);
1017 else
1018 res = wi::lrotate (arg1, arg2);
1019 break;
1021 case PLUS_EXPR:
1022 res = wi::add (arg1, arg2, sign, &overflow);
1023 break;
1025 case MINUS_EXPR:
1026 res = wi::sub (arg1, arg2, sign, &overflow);
1027 break;
1029 case MULT_EXPR:
1030 res = wi::mul (arg1, arg2, sign, &overflow);
1031 break;
1033 case MULT_HIGHPART_EXPR:
1034 res = wi::mul_high (arg1, arg2, sign);
1035 break;
1037 case TRUNC_DIV_EXPR:
1038 case EXACT_DIV_EXPR:
1039 if (arg2 == 0)
1040 return NULL_TREE;
1041 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1042 break;
1044 case FLOOR_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_floor (arg1, arg2, sign, &overflow);
1048 break;
1050 case CEIL_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1054 break;
1056 case ROUND_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_round (arg1, arg2, sign, &overflow);
1060 break;
1062 case TRUNC_MOD_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1066 break;
1068 case FLOOR_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1072 break;
1074 case CEIL_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1078 break;
1080 case ROUND_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_round (arg1, arg2, sign, &overflow);
1084 break;
1086 case MIN_EXPR:
1087 res = wi::min (arg1, arg2, sign);
1088 break;
1090 case MAX_EXPR:
1091 res = wi::max (arg1, arg2, sign);
1092 break;
1094 default:
1095 return NULL_TREE;
1098 t = force_fit_type (type, res, overflowable,
1099 (((sign == SIGNED || overflowable == -1)
1100 && overflow)
1101 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1103 return t;
1106 tree
1107 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1109 return int_const_binop_1 (code, arg1, arg2, 1);
1112 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1113 constant. We assume ARG1 and ARG2 have the same data type, or at least
1114 are the same kind of constant and the same machine mode. Return zero if
1115 combining the constants is not allowed in the current operating mode. */
1117 static tree
1118 const_binop (enum tree_code code, tree arg1, tree arg2)
1120 /* Sanity check for the recursive cases. */
1121 if (!arg1 || !arg2)
1122 return NULL_TREE;
1124 STRIP_NOPS (arg1);
1125 STRIP_NOPS (arg2);
1127 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1129 if (code == POINTER_PLUS_EXPR)
1130 return int_const_binop (PLUS_EXPR,
1131 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1133 return int_const_binop (code, arg1, arg2);
1136 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1138 machine_mode mode;
1139 REAL_VALUE_TYPE d1;
1140 REAL_VALUE_TYPE d2;
1141 REAL_VALUE_TYPE value;
1142 REAL_VALUE_TYPE result;
1143 bool inexact;
1144 tree t, type;
1146 /* The following codes are handled by real_arithmetic. */
1147 switch (code)
1149 case PLUS_EXPR:
1150 case MINUS_EXPR:
1151 case MULT_EXPR:
1152 case RDIV_EXPR:
1153 case MIN_EXPR:
1154 case MAX_EXPR:
1155 break;
1157 default:
1158 return NULL_TREE;
1161 d1 = TREE_REAL_CST (arg1);
1162 d2 = TREE_REAL_CST (arg2);
1164 type = TREE_TYPE (arg1);
1165 mode = TYPE_MODE (type);
1167 /* Don't perform operation if we honor signaling NaNs and
1168 either operand is a signaling NaN. */
1169 if (HONOR_SNANS (mode)
1170 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1171 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1172 return NULL_TREE;
1174 /* Don't perform operation if it would raise a division
1175 by zero exception. */
1176 if (code == RDIV_EXPR
1177 && real_equal (&d2, &dconst0)
1178 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1179 return NULL_TREE;
1181 /* If either operand is a NaN, just return it. Otherwise, set up
1182 for floating-point trap; we return an overflow. */
1183 if (REAL_VALUE_ISNAN (d1))
1185 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1186 is off. */
1187 d1.signalling = 0;
1188 t = build_real (type, d1);
1189 return t;
1191 else if (REAL_VALUE_ISNAN (d2))
1193 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1194 is off. */
1195 d2.signalling = 0;
1196 t = build_real (type, d2);
1197 return t;
1200 inexact = real_arithmetic (&value, code, &d1, &d2);
1201 real_convert (&result, mode, &value);
1203 /* Don't constant fold this floating point operation if
1204 the result has overflowed and flag_trapping_math. */
1205 if (flag_trapping_math
1206 && MODE_HAS_INFINITIES (mode)
1207 && REAL_VALUE_ISINF (result)
1208 && !REAL_VALUE_ISINF (d1)
1209 && !REAL_VALUE_ISINF (d2))
1210 return NULL_TREE;
1212 /* Don't constant fold this floating point operation if the
1213 result may dependent upon the run-time rounding mode and
1214 flag_rounding_math is set, or if GCC's software emulation
1215 is unable to accurately represent the result. */
1216 if ((flag_rounding_math
1217 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1218 && (inexact || !real_identical (&result, &value)))
1219 return NULL_TREE;
1221 t = build_real (type, result);
1223 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1224 return t;
1227 if (TREE_CODE (arg1) == FIXED_CST)
1229 FIXED_VALUE_TYPE f1;
1230 FIXED_VALUE_TYPE f2;
1231 FIXED_VALUE_TYPE result;
1232 tree t, type;
1233 int sat_p;
1234 bool overflow_p;
1236 /* The following codes are handled by fixed_arithmetic. */
1237 switch (code)
1239 case PLUS_EXPR:
1240 case MINUS_EXPR:
1241 case MULT_EXPR:
1242 case TRUNC_DIV_EXPR:
1243 if (TREE_CODE (arg2) != FIXED_CST)
1244 return NULL_TREE;
1245 f2 = TREE_FIXED_CST (arg2);
1246 break;
1248 case LSHIFT_EXPR:
1249 case RSHIFT_EXPR:
1251 if (TREE_CODE (arg2) != INTEGER_CST)
1252 return NULL_TREE;
1253 wide_int w2 = arg2;
1254 f2.data.high = w2.elt (1);
1255 f2.data.low = w2.elt (0);
1256 f2.mode = SImode;
1258 break;
1260 default:
1261 return NULL_TREE;
1264 f1 = TREE_FIXED_CST (arg1);
1265 type = TREE_TYPE (arg1);
1266 sat_p = TYPE_SATURATING (type);
1267 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1268 t = build_fixed (type, result);
1269 /* Propagate overflow flags. */
1270 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1271 TREE_OVERFLOW (t) = 1;
1272 return t;
1275 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1277 tree type = TREE_TYPE (arg1);
1278 tree r1 = TREE_REALPART (arg1);
1279 tree i1 = TREE_IMAGPART (arg1);
1280 tree r2 = TREE_REALPART (arg2);
1281 tree i2 = TREE_IMAGPART (arg2);
1282 tree real, imag;
1284 switch (code)
1286 case PLUS_EXPR:
1287 case MINUS_EXPR:
1288 real = const_binop (code, r1, r2);
1289 imag = const_binop (code, i1, i2);
1290 break;
1292 case MULT_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_mul);
1298 real = const_binop (MINUS_EXPR,
1299 const_binop (MULT_EXPR, r1, r2),
1300 const_binop (MULT_EXPR, i1, i2));
1301 imag = const_binop (PLUS_EXPR,
1302 const_binop (MULT_EXPR, r1, i2),
1303 const_binop (MULT_EXPR, i1, r2));
1304 break;
1306 case RDIV_EXPR:
1307 if (COMPLEX_FLOAT_TYPE_P (type))
1308 return do_mpc_arg2 (arg1, arg2, type,
1309 /* do_nonfinite= */ folding_initializer,
1310 mpc_div);
1311 /* Fallthru ... */
1312 case TRUNC_DIV_EXPR:
1313 case CEIL_DIV_EXPR:
1314 case FLOOR_DIV_EXPR:
1315 case ROUND_DIV_EXPR:
1316 if (flag_complex_method == 0)
1318 /* Keep this algorithm in sync with
1319 tree-complex.c:expand_complex_div_straight().
1321 Expand complex division to scalars, straightforward algorithm.
1322 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1323 t = br*br + bi*bi
1325 tree magsquared
1326 = const_binop (PLUS_EXPR,
1327 const_binop (MULT_EXPR, r2, r2),
1328 const_binop (MULT_EXPR, i2, i2));
1329 tree t1
1330 = const_binop (PLUS_EXPR,
1331 const_binop (MULT_EXPR, r1, r2),
1332 const_binop (MULT_EXPR, i1, i2));
1333 tree t2
1334 = const_binop (MINUS_EXPR,
1335 const_binop (MULT_EXPR, i1, r2),
1336 const_binop (MULT_EXPR, r1, i2));
1338 real = const_binop (code, t1, magsquared);
1339 imag = const_binop (code, t2, magsquared);
1341 else
1343 /* Keep this algorithm in sync with
1344 tree-complex.c:expand_complex_div_wide().
1346 Expand complex division to scalars, modified algorithm to minimize
1347 overflow with wide input ranges. */
1348 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1349 fold_abs_const (r2, TREE_TYPE (type)),
1350 fold_abs_const (i2, TREE_TYPE (type)));
1352 if (integer_nonzerop (compare))
1354 /* In the TRUE branch, we compute
1355 ratio = br/bi;
1356 div = (br * ratio) + bi;
1357 tr = (ar * ratio) + ai;
1358 ti = (ai * ratio) - ar;
1359 tr = tr / div;
1360 ti = ti / div; */
1361 tree ratio = const_binop (code, r2, i2);
1362 tree div = const_binop (PLUS_EXPR, i2,
1363 const_binop (MULT_EXPR, r2, ratio));
1364 real = const_binop (MULT_EXPR, r1, ratio);
1365 real = const_binop (PLUS_EXPR, real, i1);
1366 real = const_binop (code, real, div);
1368 imag = const_binop (MULT_EXPR, i1, ratio);
1369 imag = const_binop (MINUS_EXPR, imag, r1);
1370 imag = const_binop (code, imag, div);
1372 else
1374 /* In the FALSE branch, we compute
1375 ratio = d/c;
1376 divisor = (d * ratio) + c;
1377 tr = (b * ratio) + a;
1378 ti = b - (a * ratio);
1379 tr = tr / div;
1380 ti = ti / div; */
1381 tree ratio = const_binop (code, i2, r2);
1382 tree div = const_binop (PLUS_EXPR, r2,
1383 const_binop (MULT_EXPR, i2, ratio));
1385 real = const_binop (MULT_EXPR, i1, ratio);
1386 real = const_binop (PLUS_EXPR, real, r1);
1387 real = const_binop (code, real, div);
1389 imag = const_binop (MULT_EXPR, r1, ratio);
1390 imag = const_binop (MINUS_EXPR, i1, imag);
1391 imag = const_binop (code, imag, div);
1394 break;
1396 default:
1397 return NULL_TREE;
1400 if (real && imag)
1401 return build_complex (type, real, imag);
1404 if (TREE_CODE (arg1) == VECTOR_CST
1405 && TREE_CODE (arg2) == VECTOR_CST)
1407 tree type = TREE_TYPE (arg1);
1408 int count = TYPE_VECTOR_SUBPARTS (type), i;
1409 tree *elts = XALLOCAVEC (tree, count);
1411 for (i = 0; i < count; i++)
1413 tree elem1 = VECTOR_CST_ELT (arg1, i);
1414 tree elem2 = VECTOR_CST_ELT (arg2, i);
1416 elts[i] = const_binop (code, elem1, elem2);
1418 /* It is possible that const_binop cannot handle the given
1419 code and return NULL_TREE */
1420 if (elts[i] == NULL_TREE)
1421 return NULL_TREE;
1424 return build_vector (type, elts);
1427 /* Shifts allow a scalar offset for a vector. */
1428 if (TREE_CODE (arg1) == VECTOR_CST
1429 && TREE_CODE (arg2) == INTEGER_CST)
1431 tree type = TREE_TYPE (arg1);
1432 int count = TYPE_VECTOR_SUBPARTS (type), i;
1433 tree *elts = XALLOCAVEC (tree, count);
1435 for (i = 0; i < count; i++)
1437 tree elem1 = VECTOR_CST_ELT (arg1, i);
1439 elts[i] = const_binop (code, elem1, arg2);
1441 /* It is possible that const_binop cannot handle the given
1442 code and return NULL_TREE. */
1443 if (elts[i] == NULL_TREE)
1444 return NULL_TREE;
1447 return build_vector (type, elts);
1449 return NULL_TREE;
1452 /* Overload that adds a TYPE parameter to be able to dispatch
1453 to fold_relational_const. */
1455 tree
1456 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1458 if (TREE_CODE_CLASS (code) == tcc_comparison)
1459 return fold_relational_const (code, type, arg1, arg2);
1461 /* ??? Until we make the const_binop worker take the type of the
1462 result as argument put those cases that need it here. */
1463 switch (code)
1465 case COMPLEX_EXPR:
1466 if ((TREE_CODE (arg1) == REAL_CST
1467 && TREE_CODE (arg2) == REAL_CST)
1468 || (TREE_CODE (arg1) == INTEGER_CST
1469 && TREE_CODE (arg2) == INTEGER_CST))
1470 return build_complex (type, arg1, arg2);
1471 return NULL_TREE;
1473 case VEC_PACK_TRUNC_EXPR:
1474 case VEC_PACK_FIX_TRUNC_EXPR:
1476 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1477 tree *elts;
1479 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1480 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1481 if (TREE_CODE (arg1) != VECTOR_CST
1482 || TREE_CODE (arg2) != VECTOR_CST)
1483 return NULL_TREE;
1485 elts = XALLOCAVEC (tree, nelts);
1486 if (!vec_cst_ctor_to_array (arg1, elts)
1487 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1488 return NULL_TREE;
1490 for (i = 0; i < nelts; i++)
1492 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1493 ? NOP_EXPR : FIX_TRUNC_EXPR,
1494 TREE_TYPE (type), elts[i]);
1495 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1496 return NULL_TREE;
1499 return build_vector (type, elts);
1502 case VEC_WIDEN_MULT_LO_EXPR:
1503 case VEC_WIDEN_MULT_HI_EXPR:
1504 case VEC_WIDEN_MULT_EVEN_EXPR:
1505 case VEC_WIDEN_MULT_ODD_EXPR:
1507 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1508 unsigned int out, ofs, scale;
1509 tree *elts;
1511 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1512 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1513 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1514 return NULL_TREE;
1516 elts = XALLOCAVEC (tree, nelts * 4);
1517 if (!vec_cst_ctor_to_array (arg1, elts)
1518 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1519 return NULL_TREE;
1521 if (code == VEC_WIDEN_MULT_LO_EXPR)
1522 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1523 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1524 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1525 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1526 scale = 1, ofs = 0;
1527 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1528 scale = 1, ofs = 1;
1530 for (out = 0; out < nelts; out++)
1532 unsigned int in1 = (out << scale) + ofs;
1533 unsigned int in2 = in1 + nelts * 2;
1534 tree t1, t2;
1536 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1537 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1539 if (t1 == NULL_TREE || t2 == NULL_TREE)
1540 return NULL_TREE;
1541 elts[out] = const_binop (MULT_EXPR, t1, t2);
1542 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1543 return NULL_TREE;
1546 return build_vector (type, elts);
1549 default:;
1552 if (TREE_CODE_CLASS (code) != tcc_binary)
1553 return NULL_TREE;
1555 /* Make sure type and arg0 have the same saturating flag. */
1556 gcc_checking_assert (TYPE_SATURATING (type)
1557 == TYPE_SATURATING (TREE_TYPE (arg1)));
1559 return const_binop (code, arg1, arg2);
1562 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1563 Return zero if computing the constants is not possible. */
1565 tree
1566 const_unop (enum tree_code code, tree type, tree arg0)
1568 /* Don't perform the operation, other than NEGATE and ABS, if
1569 flag_signaling_nans is on and the operand is a signaling NaN. */
1570 if (TREE_CODE (arg0) == REAL_CST
1571 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1572 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1573 && code != NEGATE_EXPR
1574 && code != ABS_EXPR)
1575 return NULL_TREE;
1577 switch (code)
1579 CASE_CONVERT:
1580 case FLOAT_EXPR:
1581 case FIX_TRUNC_EXPR:
1582 case FIXED_CONVERT_EXPR:
1583 return fold_convert_const (code, type, arg0);
1585 case ADDR_SPACE_CONVERT_EXPR:
1586 /* If the source address is 0, and the source address space
1587 cannot have a valid object at 0, fold to dest type null. */
1588 if (integer_zerop (arg0)
1589 && !(targetm.addr_space.zero_address_valid
1590 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1591 return fold_convert_const (code, type, arg0);
1592 break;
1594 case VIEW_CONVERT_EXPR:
1595 return fold_view_convert_expr (type, arg0);
1597 case NEGATE_EXPR:
1599 /* Can't call fold_negate_const directly here as that doesn't
1600 handle all cases and we might not be able to negate some
1601 constants. */
1602 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1603 if (tem && CONSTANT_CLASS_P (tem))
1604 return tem;
1605 break;
1608 case ABS_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1610 return fold_abs_const (arg0, type);
1611 break;
1613 case CONJ_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1616 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1617 TREE_TYPE (type));
1618 return build_complex (type, TREE_REALPART (arg0), ipart);
1620 break;
1622 case BIT_NOT_EXPR:
1623 if (TREE_CODE (arg0) == INTEGER_CST)
1624 return fold_not_const (arg0, type);
1625 /* Perform BIT_NOT_EXPR on each element individually. */
1626 else if (TREE_CODE (arg0) == VECTOR_CST)
1628 tree *elements;
1629 tree elem;
1630 unsigned count = VECTOR_CST_NELTS (arg0), i;
1632 elements = XALLOCAVEC (tree, count);
1633 for (i = 0; i < count; i++)
1635 elem = VECTOR_CST_ELT (arg0, i);
1636 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1637 if (elem == NULL_TREE)
1638 break;
1639 elements[i] = elem;
1641 if (i == count)
1642 return build_vector (type, elements);
1644 break;
1646 case TRUTH_NOT_EXPR:
1647 if (TREE_CODE (arg0) == INTEGER_CST)
1648 return constant_boolean_node (integer_zerop (arg0), type);
1649 break;
1651 case REALPART_EXPR:
1652 if (TREE_CODE (arg0) == COMPLEX_CST)
1653 return fold_convert (type, TREE_REALPART (arg0));
1654 break;
1656 case IMAGPART_EXPR:
1657 if (TREE_CODE (arg0) == COMPLEX_CST)
1658 return fold_convert (type, TREE_IMAGPART (arg0));
1659 break;
1661 case VEC_UNPACK_LO_EXPR:
1662 case VEC_UNPACK_HI_EXPR:
1663 case VEC_UNPACK_FLOAT_LO_EXPR:
1664 case VEC_UNPACK_FLOAT_HI_EXPR:
1666 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1667 tree *elts;
1668 enum tree_code subcode;
1670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1671 if (TREE_CODE (arg0) != VECTOR_CST)
1672 return NULL_TREE;
1674 elts = XALLOCAVEC (tree, nelts * 2);
1675 if (!vec_cst_ctor_to_array (arg0, elts))
1676 return NULL_TREE;
1678 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1679 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1680 elts += nelts;
1682 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1683 subcode = NOP_EXPR;
1684 else
1685 subcode = FLOAT_EXPR;
1687 for (i = 0; i < nelts; i++)
1689 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1690 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1691 return NULL_TREE;
1694 return build_vector (type, elts);
1697 case REDUC_MIN_EXPR:
1698 case REDUC_MAX_EXPR:
1699 case REDUC_PLUS_EXPR:
1701 unsigned int nelts, i;
1702 tree *elts;
1703 enum tree_code subcode;
1705 if (TREE_CODE (arg0) != VECTOR_CST)
1706 return NULL_TREE;
1707 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1709 elts = XALLOCAVEC (tree, nelts);
1710 if (!vec_cst_ctor_to_array (arg0, elts))
1711 return NULL_TREE;
1713 switch (code)
1715 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1716 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1717 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1718 default: gcc_unreachable ();
1721 for (i = 1; i < nelts; i++)
1723 elts[0] = const_binop (subcode, elts[0], elts[i]);
1724 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1725 return NULL_TREE;
1728 return elts[0];
1731 default:
1732 break;
1735 return NULL_TREE;
1738 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1739 indicates which particular sizetype to create. */
1741 tree
1742 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1744 return build_int_cst (sizetype_tab[(int) kind], number);
1747 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1748 is a tree code. The type of the result is taken from the operands.
1749 Both must be equivalent integer types, ala int_binop_types_match_p.
1750 If the operands are constant, so is the result. */
1752 tree
1753 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1755 tree type = TREE_TYPE (arg0);
1757 if (arg0 == error_mark_node || arg1 == error_mark_node)
1758 return error_mark_node;
1760 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1761 TREE_TYPE (arg1)));
1763 /* Handle the special case of two integer constants faster. */
1764 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1766 /* And some specific cases even faster than that. */
1767 if (code == PLUS_EXPR)
1769 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1772 return arg0;
1774 else if (code == MINUS_EXPR)
1776 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1777 return arg0;
1779 else if (code == MULT_EXPR)
1781 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1782 return arg1;
1785 /* Handle general case of two integer constants. For sizetype
1786 constant calculations we always want to know about overflow,
1787 even in the unsigned case. */
1788 return int_const_binop_1 (code, arg0, arg1, -1);
1791 return fold_build2_loc (loc, code, type, arg0, arg1);
1794 /* Given two values, either both of sizetype or both of bitsizetype,
1795 compute the difference between the two values. Return the value
1796 in signed type corresponding to the type of the operands. */
1798 tree
1799 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1801 tree type = TREE_TYPE (arg0);
1802 tree ctype;
1804 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1805 TREE_TYPE (arg1)));
1807 /* If the type is already signed, just do the simple thing. */
1808 if (!TYPE_UNSIGNED (type))
1809 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1811 if (type == sizetype)
1812 ctype = ssizetype;
1813 else if (type == bitsizetype)
1814 ctype = sbitsizetype;
1815 else
1816 ctype = signed_type_for (type);
1818 /* If either operand is not a constant, do the conversions to the signed
1819 type and subtract. The hardware will do the right thing with any
1820 overflow in the subtraction. */
1821 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1822 return size_binop_loc (loc, MINUS_EXPR,
1823 fold_convert_loc (loc, ctype, arg0),
1824 fold_convert_loc (loc, ctype, arg1));
1826 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1827 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1828 overflow) and negate (which can't either). Special-case a result
1829 of zero while we're here. */
1830 if (tree_int_cst_equal (arg0, arg1))
1831 return build_int_cst (ctype, 0);
1832 else if (tree_int_cst_lt (arg1, arg0))
1833 return fold_convert_loc (loc, ctype,
1834 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1835 else
1836 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1837 fold_convert_loc (loc, ctype,
1838 size_binop_loc (loc,
1839 MINUS_EXPR,
1840 arg1, arg0)));
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1846 static tree
1847 fold_convert_const_int_from_int (tree type, const_tree arg1)
1849 /* Given an integer constant, make new constant with new type,
1850 appropriately sign-extended or truncated. Use widest_int
1851 so that any extension is done according ARG1's type. */
1852 return force_fit_type (type, wi::to_widest (arg1),
1853 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1854 TREE_OVERFLOW (arg1));
1857 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1858 to an integer type. */
1860 static tree
1861 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1863 bool overflow = false;
1864 tree t;
1866 /* The following code implements the floating point to integer
1867 conversion rules required by the Java Language Specification,
1868 that IEEE NaNs are mapped to zero and values that overflow
1869 the target precision saturate, i.e. values greater than
1870 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1871 are mapped to INT_MIN. These semantics are allowed by the
1872 C and C++ standards that simply state that the behavior of
1873 FP-to-integer conversion is unspecified upon overflow. */
1875 wide_int val;
1876 REAL_VALUE_TYPE r;
1877 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1879 switch (code)
1881 case FIX_TRUNC_EXPR:
1882 real_trunc (&r, VOIDmode, &x);
1883 break;
1885 default:
1886 gcc_unreachable ();
1889 /* If R is NaN, return zero and show we have an overflow. */
1890 if (REAL_VALUE_ISNAN (r))
1892 overflow = true;
1893 val = wi::zero (TYPE_PRECISION (type));
1896 /* See if R is less than the lower bound or greater than the
1897 upper bound. */
1899 if (! overflow)
1901 tree lt = TYPE_MIN_VALUE (type);
1902 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1903 if (real_less (&r, &l))
1905 overflow = true;
1906 val = lt;
1910 if (! overflow)
1912 tree ut = TYPE_MAX_VALUE (type);
1913 if (ut)
1915 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1916 if (real_less (&u, &r))
1918 overflow = true;
1919 val = ut;
1924 if (! overflow)
1925 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1927 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1928 return t;
1931 /* A subroutine of fold_convert_const handling conversions of a
1932 FIXED_CST to an integer type. */
1934 static tree
1935 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1937 tree t;
1938 double_int temp, temp_trunc;
1939 unsigned int mode;
1941 /* Right shift FIXED_CST to temp by fbit. */
1942 temp = TREE_FIXED_CST (arg1).data;
1943 mode = TREE_FIXED_CST (arg1).mode;
1944 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1946 temp = temp.rshift (GET_MODE_FBIT (mode),
1947 HOST_BITS_PER_DOUBLE_INT,
1948 SIGNED_FIXED_POINT_MODE_P (mode));
1950 /* Left shift temp to temp_trunc by fbit. */
1951 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1952 HOST_BITS_PER_DOUBLE_INT,
1953 SIGNED_FIXED_POINT_MODE_P (mode));
1955 else
1957 temp = double_int_zero;
1958 temp_trunc = double_int_zero;
1961 /* If FIXED_CST is negative, we need to round the value toward 0.
1962 By checking if the fractional bits are not zero to add 1 to temp. */
1963 if (SIGNED_FIXED_POINT_MODE_P (mode)
1964 && temp_trunc.is_negative ()
1965 && TREE_FIXED_CST (arg1).data != temp_trunc)
1966 temp += double_int_one;
1968 /* Given a fixed-point constant, make new constant with new type,
1969 appropriately sign-extended or truncated. */
1970 t = force_fit_type (type, temp, -1,
1971 (temp.is_negative ()
1972 && (TYPE_UNSIGNED (type)
1973 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1974 | TREE_OVERFLOW (arg1));
1976 return t;
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1982 static tree
1983 fold_convert_const_real_from_real (tree type, const_tree arg1)
1985 REAL_VALUE_TYPE value;
1986 tree t;
1988 /* Don't perform the operation if flag_signaling_nans is on
1989 and the operand is a signaling NaN. */
1990 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1991 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1992 return NULL_TREE;
1994 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1995 t = build_real (type, value);
1997 /* If converting an infinity or NAN to a representation that doesn't
1998 have one, set the overflow bit so that we can produce some kind of
1999 error message at the appropriate point if necessary. It's not the
2000 most user-friendly message, but it's better than nothing. */
2001 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2002 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2003 TREE_OVERFLOW (t) = 1;
2004 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2005 && !MODE_HAS_NANS (TYPE_MODE (type)))
2006 TREE_OVERFLOW (t) = 1;
2007 /* Regular overflow, conversion produced an infinity in a mode that
2008 can't represent them. */
2009 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2010 && REAL_VALUE_ISINF (value)
2011 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2012 TREE_OVERFLOW (t) = 1;
2013 else
2014 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2015 return t;
2018 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2019 to a floating point type. */
2021 static tree
2022 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2024 REAL_VALUE_TYPE value;
2025 tree t;
2027 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2028 t = build_real (type, value);
2030 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2031 return t;
2034 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2035 to another fixed-point type. */
2037 static tree
2038 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2040 FIXED_VALUE_TYPE value;
2041 tree t;
2042 bool overflow_p;
2044 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2045 TYPE_SATURATING (type));
2046 t = build_fixed (type, value);
2048 /* Propagate overflow flags. */
2049 if (overflow_p | TREE_OVERFLOW (arg1))
2050 TREE_OVERFLOW (t) = 1;
2051 return t;
2054 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2055 to a fixed-point type. */
2057 static tree
2058 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2060 FIXED_VALUE_TYPE value;
2061 tree t;
2062 bool overflow_p;
2063 double_int di;
2065 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2067 di.low = TREE_INT_CST_ELT (arg1, 0);
2068 if (TREE_INT_CST_NUNITS (arg1) == 1)
2069 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2070 else
2071 di.high = TREE_INT_CST_ELT (arg1, 1);
2073 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2074 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2075 TYPE_SATURATING (type));
2076 t = build_fixed (type, value);
2078 /* Propagate overflow flags. */
2079 if (overflow_p | TREE_OVERFLOW (arg1))
2080 TREE_OVERFLOW (t) = 1;
2081 return t;
2084 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2085 to a fixed-point type. */
2087 static tree
2088 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2090 FIXED_VALUE_TYPE value;
2091 tree t;
2092 bool overflow_p;
2094 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2095 &TREE_REAL_CST (arg1),
2096 TYPE_SATURATING (type));
2097 t = build_fixed (type, value);
2099 /* Propagate overflow flags. */
2100 if (overflow_p | TREE_OVERFLOW (arg1))
2101 TREE_OVERFLOW (t) = 1;
2102 return t;
2105 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2106 type TYPE. If no simplification can be done return NULL_TREE. */
2108 static tree
2109 fold_convert_const (enum tree_code code, tree type, tree arg1)
2111 if (TREE_TYPE (arg1) == type)
2112 return arg1;
2114 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2115 || TREE_CODE (type) == OFFSET_TYPE)
2117 if (TREE_CODE (arg1) == INTEGER_CST)
2118 return fold_convert_const_int_from_int (type, arg1);
2119 else if (TREE_CODE (arg1) == REAL_CST)
2120 return fold_convert_const_int_from_real (code, type, arg1);
2121 else if (TREE_CODE (arg1) == FIXED_CST)
2122 return fold_convert_const_int_from_fixed (type, arg1);
2124 else if (TREE_CODE (type) == REAL_TYPE)
2126 if (TREE_CODE (arg1) == INTEGER_CST)
2127 return build_real_from_int_cst (type, arg1);
2128 else if (TREE_CODE (arg1) == REAL_CST)
2129 return fold_convert_const_real_from_real (type, arg1);
2130 else if (TREE_CODE (arg1) == FIXED_CST)
2131 return fold_convert_const_real_from_fixed (type, arg1);
2133 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2135 if (TREE_CODE (arg1) == FIXED_CST)
2136 return fold_convert_const_fixed_from_fixed (type, arg1);
2137 else if (TREE_CODE (arg1) == INTEGER_CST)
2138 return fold_convert_const_fixed_from_int (type, arg1);
2139 else if (TREE_CODE (arg1) == REAL_CST)
2140 return fold_convert_const_fixed_from_real (type, arg1);
2142 else if (TREE_CODE (type) == VECTOR_TYPE)
2144 if (TREE_CODE (arg1) == VECTOR_CST
2145 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2147 int len = TYPE_VECTOR_SUBPARTS (type);
2148 tree elttype = TREE_TYPE (type);
2149 tree *v = XALLOCAVEC (tree, len);
2150 for (int i = 0; i < len; ++i)
2152 tree elt = VECTOR_CST_ELT (arg1, i);
2153 tree cvt = fold_convert_const (code, elttype, elt);
2154 if (cvt == NULL_TREE)
2155 return NULL_TREE;
2156 v[i] = cvt;
2158 return build_vector (type, v);
2161 return NULL_TREE;
2164 /* Construct a vector of zero elements of vector type TYPE. */
2166 static tree
2167 build_zero_vector (tree type)
2169 tree t;
2171 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2172 return build_vector_from_val (type, t);
2175 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2177 bool
2178 fold_convertible_p (const_tree type, const_tree arg)
2180 tree orig = TREE_TYPE (arg);
2182 if (type == orig)
2183 return true;
2185 if (TREE_CODE (arg) == ERROR_MARK
2186 || TREE_CODE (type) == ERROR_MARK
2187 || TREE_CODE (orig) == ERROR_MARK)
2188 return false;
2190 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2191 return true;
2193 switch (TREE_CODE (type))
2195 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2196 case POINTER_TYPE: case REFERENCE_TYPE:
2197 case OFFSET_TYPE:
2198 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2199 || TREE_CODE (orig) == OFFSET_TYPE);
2201 case REAL_TYPE:
2202 case FIXED_POINT_TYPE:
2203 case COMPLEX_TYPE:
2204 case VECTOR_TYPE:
2205 case VOID_TYPE:
2206 return TREE_CODE (type) == TREE_CODE (orig);
2208 default:
2209 return false;
2213 /* Convert expression ARG to type TYPE. Used by the middle-end for
2214 simple conversions in preference to calling the front-end's convert. */
2216 tree
2217 fold_convert_loc (location_t loc, tree type, tree arg)
2219 tree orig = TREE_TYPE (arg);
2220 tree tem;
2222 if (type == orig)
2223 return arg;
2225 if (TREE_CODE (arg) == ERROR_MARK
2226 || TREE_CODE (type) == ERROR_MARK
2227 || TREE_CODE (orig) == ERROR_MARK)
2228 return error_mark_node;
2230 switch (TREE_CODE (type))
2232 case POINTER_TYPE:
2233 case REFERENCE_TYPE:
2234 /* Handle conversions between pointers to different address spaces. */
2235 if (POINTER_TYPE_P (orig)
2236 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2237 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2238 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2239 /* fall through */
2241 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2242 case OFFSET_TYPE:
2243 if (TREE_CODE (arg) == INTEGER_CST)
2245 tem = fold_convert_const (NOP_EXPR, type, arg);
2246 if (tem != NULL_TREE)
2247 return tem;
2249 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2250 || TREE_CODE (orig) == OFFSET_TYPE)
2251 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2252 if (TREE_CODE (orig) == COMPLEX_TYPE)
2253 return fold_convert_loc (loc, type,
2254 fold_build1_loc (loc, REALPART_EXPR,
2255 TREE_TYPE (orig), arg));
2256 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2257 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2258 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2260 case REAL_TYPE:
2261 if (TREE_CODE (arg) == INTEGER_CST)
2263 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 return tem;
2267 else if (TREE_CODE (arg) == REAL_CST)
2269 tem = fold_convert_const (NOP_EXPR, type, arg);
2270 if (tem != NULL_TREE)
2271 return tem;
2273 else if (TREE_CODE (arg) == FIXED_CST)
2275 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2276 if (tem != NULL_TREE)
2277 return tem;
2280 switch (TREE_CODE (orig))
2282 case INTEGER_TYPE:
2283 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2284 case POINTER_TYPE: case REFERENCE_TYPE:
2285 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2287 case REAL_TYPE:
2288 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2290 case FIXED_POINT_TYPE:
2291 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2293 case COMPLEX_TYPE:
2294 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2295 return fold_convert_loc (loc, type, tem);
2297 default:
2298 gcc_unreachable ();
2301 case FIXED_POINT_TYPE:
2302 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2303 || TREE_CODE (arg) == REAL_CST)
2305 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2306 if (tem != NULL_TREE)
2307 goto fold_convert_exit;
2310 switch (TREE_CODE (orig))
2312 case FIXED_POINT_TYPE:
2313 case INTEGER_TYPE:
2314 case ENUMERAL_TYPE:
2315 case BOOLEAN_TYPE:
2316 case REAL_TYPE:
2317 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2319 case COMPLEX_TYPE:
2320 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2321 return fold_convert_loc (loc, type, tem);
2323 default:
2324 gcc_unreachable ();
2327 case COMPLEX_TYPE:
2328 switch (TREE_CODE (orig))
2330 case INTEGER_TYPE:
2331 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2332 case POINTER_TYPE: case REFERENCE_TYPE:
2333 case REAL_TYPE:
2334 case FIXED_POINT_TYPE:
2335 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2336 fold_convert_loc (loc, TREE_TYPE (type), arg),
2337 fold_convert_loc (loc, TREE_TYPE (type),
2338 integer_zero_node));
2339 case COMPLEX_TYPE:
2341 tree rpart, ipart;
2343 if (TREE_CODE (arg) == COMPLEX_EXPR)
2345 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2346 TREE_OPERAND (arg, 0));
2347 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2348 TREE_OPERAND (arg, 1));
2349 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2352 arg = save_expr (arg);
2353 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2354 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2355 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2356 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2357 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2360 default:
2361 gcc_unreachable ();
2364 case VECTOR_TYPE:
2365 if (integer_zerop (arg))
2366 return build_zero_vector (type);
2367 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2368 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2369 || TREE_CODE (orig) == VECTOR_TYPE);
2370 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2372 case VOID_TYPE:
2373 tem = fold_ignored_result (arg);
2374 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2376 default:
2377 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2378 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2379 gcc_unreachable ();
2381 fold_convert_exit:
2382 protected_set_expr_location_unshare (tem, loc);
2383 return tem;
2386 /* Return false if expr can be assumed not to be an lvalue, true
2387 otherwise. */
2389 static bool
2390 maybe_lvalue_p (const_tree x)
2392 /* We only need to wrap lvalue tree codes. */
2393 switch (TREE_CODE (x))
2395 case VAR_DECL:
2396 case PARM_DECL:
2397 case RESULT_DECL:
2398 case LABEL_DECL:
2399 case FUNCTION_DECL:
2400 case SSA_NAME:
2402 case COMPONENT_REF:
2403 case MEM_REF:
2404 case INDIRECT_REF:
2405 case ARRAY_REF:
2406 case ARRAY_RANGE_REF:
2407 case BIT_FIELD_REF:
2408 case OBJ_TYPE_REF:
2410 case REALPART_EXPR:
2411 case IMAGPART_EXPR:
2412 case PREINCREMENT_EXPR:
2413 case PREDECREMENT_EXPR:
2414 case SAVE_EXPR:
2415 case TRY_CATCH_EXPR:
2416 case WITH_CLEANUP_EXPR:
2417 case COMPOUND_EXPR:
2418 case MODIFY_EXPR:
2419 case TARGET_EXPR:
2420 case COND_EXPR:
2421 case BIND_EXPR:
2422 break;
2424 default:
2425 /* Assume the worst for front-end tree codes. */
2426 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2427 break;
2428 return false;
2431 return true;
2434 /* Return an expr equal to X but certainly not valid as an lvalue. */
2436 tree
2437 non_lvalue_loc (location_t loc, tree x)
2439 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2440 us. */
2441 if (in_gimple_form)
2442 return x;
2444 if (! maybe_lvalue_p (x))
2445 return x;
2446 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2449 /* When pedantic, return an expr equal to X but certainly not valid as a
2450 pedantic lvalue. Otherwise, return X. */
2452 static tree
2453 pedantic_non_lvalue_loc (location_t loc, tree x)
2455 return protected_set_expr_location_unshare (x, loc);
2458 /* Given a tree comparison code, return the code that is the logical inverse.
2459 It is generally not safe to do this for floating-point comparisons, except
2460 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2461 ERROR_MARK in this case. */
2463 enum tree_code
2464 invert_tree_comparison (enum tree_code code, bool honor_nans)
2466 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2467 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2468 return ERROR_MARK;
2470 switch (code)
2472 case EQ_EXPR:
2473 return NE_EXPR;
2474 case NE_EXPR:
2475 return EQ_EXPR;
2476 case GT_EXPR:
2477 return honor_nans ? UNLE_EXPR : LE_EXPR;
2478 case GE_EXPR:
2479 return honor_nans ? UNLT_EXPR : LT_EXPR;
2480 case LT_EXPR:
2481 return honor_nans ? UNGE_EXPR : GE_EXPR;
2482 case LE_EXPR:
2483 return honor_nans ? UNGT_EXPR : GT_EXPR;
2484 case LTGT_EXPR:
2485 return UNEQ_EXPR;
2486 case UNEQ_EXPR:
2487 return LTGT_EXPR;
2488 case UNGT_EXPR:
2489 return LE_EXPR;
2490 case UNGE_EXPR:
2491 return LT_EXPR;
2492 case UNLT_EXPR:
2493 return GE_EXPR;
2494 case UNLE_EXPR:
2495 return GT_EXPR;
2496 case ORDERED_EXPR:
2497 return UNORDERED_EXPR;
2498 case UNORDERED_EXPR:
2499 return ORDERED_EXPR;
2500 default:
2501 gcc_unreachable ();
2505 /* Similar, but return the comparison that results if the operands are
2506 swapped. This is safe for floating-point. */
2508 enum tree_code
2509 swap_tree_comparison (enum tree_code code)
2511 switch (code)
2513 case EQ_EXPR:
2514 case NE_EXPR:
2515 case ORDERED_EXPR:
2516 case UNORDERED_EXPR:
2517 case LTGT_EXPR:
2518 case UNEQ_EXPR:
2519 return code;
2520 case GT_EXPR:
2521 return LT_EXPR;
2522 case GE_EXPR:
2523 return LE_EXPR;
2524 case LT_EXPR:
2525 return GT_EXPR;
2526 case LE_EXPR:
2527 return GE_EXPR;
2528 case UNGT_EXPR:
2529 return UNLT_EXPR;
2530 case UNGE_EXPR:
2531 return UNLE_EXPR;
2532 case UNLT_EXPR:
2533 return UNGT_EXPR;
2534 case UNLE_EXPR:
2535 return UNGE_EXPR;
2536 default:
2537 gcc_unreachable ();
2542 /* Convert a comparison tree code from an enum tree_code representation
2543 into a compcode bit-based encoding. This function is the inverse of
2544 compcode_to_comparison. */
2546 static enum comparison_code
2547 comparison_to_compcode (enum tree_code code)
2549 switch (code)
2551 case LT_EXPR:
2552 return COMPCODE_LT;
2553 case EQ_EXPR:
2554 return COMPCODE_EQ;
2555 case LE_EXPR:
2556 return COMPCODE_LE;
2557 case GT_EXPR:
2558 return COMPCODE_GT;
2559 case NE_EXPR:
2560 return COMPCODE_NE;
2561 case GE_EXPR:
2562 return COMPCODE_GE;
2563 case ORDERED_EXPR:
2564 return COMPCODE_ORD;
2565 case UNORDERED_EXPR:
2566 return COMPCODE_UNORD;
2567 case UNLT_EXPR:
2568 return COMPCODE_UNLT;
2569 case UNEQ_EXPR:
2570 return COMPCODE_UNEQ;
2571 case UNLE_EXPR:
2572 return COMPCODE_UNLE;
2573 case UNGT_EXPR:
2574 return COMPCODE_UNGT;
2575 case LTGT_EXPR:
2576 return COMPCODE_LTGT;
2577 case UNGE_EXPR:
2578 return COMPCODE_UNGE;
2579 default:
2580 gcc_unreachable ();
2584 /* Convert a compcode bit-based encoding of a comparison operator back
2585 to GCC's enum tree_code representation. This function is the
2586 inverse of comparison_to_compcode. */
2588 static enum tree_code
2589 compcode_to_comparison (enum comparison_code code)
2591 switch (code)
2593 case COMPCODE_LT:
2594 return LT_EXPR;
2595 case COMPCODE_EQ:
2596 return EQ_EXPR;
2597 case COMPCODE_LE:
2598 return LE_EXPR;
2599 case COMPCODE_GT:
2600 return GT_EXPR;
2601 case COMPCODE_NE:
2602 return NE_EXPR;
2603 case COMPCODE_GE:
2604 return GE_EXPR;
2605 case COMPCODE_ORD:
2606 return ORDERED_EXPR;
2607 case COMPCODE_UNORD:
2608 return UNORDERED_EXPR;
2609 case COMPCODE_UNLT:
2610 return UNLT_EXPR;
2611 case COMPCODE_UNEQ:
2612 return UNEQ_EXPR;
2613 case COMPCODE_UNLE:
2614 return UNLE_EXPR;
2615 case COMPCODE_UNGT:
2616 return UNGT_EXPR;
2617 case COMPCODE_LTGT:
2618 return LTGT_EXPR;
2619 case COMPCODE_UNGE:
2620 return UNGE_EXPR;
2621 default:
2622 gcc_unreachable ();
2626 /* Return a tree for the comparison which is the combination of
2627 doing the AND or OR (depending on CODE) of the two operations LCODE
2628 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2629 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2630 if this makes the transformation invalid. */
2632 tree
2633 combine_comparisons (location_t loc,
2634 enum tree_code code, enum tree_code lcode,
2635 enum tree_code rcode, tree truth_type,
2636 tree ll_arg, tree lr_arg)
2638 bool honor_nans = HONOR_NANS (ll_arg);
2639 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2640 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2641 int compcode;
2643 switch (code)
2645 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2646 compcode = lcompcode & rcompcode;
2647 break;
2649 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2650 compcode = lcompcode | rcompcode;
2651 break;
2653 default:
2654 return NULL_TREE;
2657 if (!honor_nans)
2659 /* Eliminate unordered comparisons, as well as LTGT and ORD
2660 which are not used unless the mode has NaNs. */
2661 compcode &= ~COMPCODE_UNORD;
2662 if (compcode == COMPCODE_LTGT)
2663 compcode = COMPCODE_NE;
2664 else if (compcode == COMPCODE_ORD)
2665 compcode = COMPCODE_TRUE;
2667 else if (flag_trapping_math)
2669 /* Check that the original operation and the optimized ones will trap
2670 under the same condition. */
2671 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2672 && (lcompcode != COMPCODE_EQ)
2673 && (lcompcode != COMPCODE_ORD);
2674 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2675 && (rcompcode != COMPCODE_EQ)
2676 && (rcompcode != COMPCODE_ORD);
2677 bool trap = (compcode & COMPCODE_UNORD) == 0
2678 && (compcode != COMPCODE_EQ)
2679 && (compcode != COMPCODE_ORD);
2681 /* In a short-circuited boolean expression the LHS might be
2682 such that the RHS, if evaluated, will never trap. For
2683 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2684 if neither x nor y is NaN. (This is a mixed blessing: for
2685 example, the expression above will never trap, hence
2686 optimizing it to x < y would be invalid). */
2687 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2688 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2689 rtrap = false;
2691 /* If the comparison was short-circuited, and only the RHS
2692 trapped, we may now generate a spurious trap. */
2693 if (rtrap && !ltrap
2694 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2695 return NULL_TREE;
2697 /* If we changed the conditions that cause a trap, we lose. */
2698 if ((ltrap || rtrap) != trap)
2699 return NULL_TREE;
2702 if (compcode == COMPCODE_TRUE)
2703 return constant_boolean_node (true, truth_type);
2704 else if (compcode == COMPCODE_FALSE)
2705 return constant_boolean_node (false, truth_type);
2706 else
2708 enum tree_code tcode;
2710 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2711 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2715 /* Return nonzero if two operands (typically of the same tree node)
2716 are necessarily equal. FLAGS modifies behavior as follows:
2718 If OEP_ONLY_CONST is set, only return nonzero for constants.
2719 This function tests whether the operands are indistinguishable;
2720 it does not test whether they are equal using C's == operation.
2721 The distinction is important for IEEE floating point, because
2722 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2723 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2725 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2726 even though it may hold multiple values during a function.
2727 This is because a GCC tree node guarantees that nothing else is
2728 executed between the evaluation of its "operands" (which may often
2729 be evaluated in arbitrary order). Hence if the operands themselves
2730 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2731 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2732 unset means assuming isochronic (or instantaneous) tree equivalence.
2733 Unless comparing arbitrary expression trees, such as from different
2734 statements, this flag can usually be left unset.
2736 If OEP_PURE_SAME is set, then pure functions with identical arguments
2737 are considered the same. It is used when the caller has other ways
2738 to ensure that global memory is unchanged in between.
2740 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2741 not values of expressions.
2743 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2744 any operand with side effect. This is unnecesarily conservative in the
2745 case we know that arg0 and arg1 are in disjoint code paths (such as in
2746 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2747 addresses with TREE_CONSTANT flag set so we know that &var == &var
2748 even if var is volatile. */
2751 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2753 /* If either is ERROR_MARK, they aren't equal. */
2754 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2755 || TREE_TYPE (arg0) == error_mark_node
2756 || TREE_TYPE (arg1) == error_mark_node)
2757 return 0;
2759 /* Similar, if either does not have a type (like a released SSA name),
2760 they aren't equal. */
2761 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2762 return 0;
2764 /* We cannot consider pointers to different address space equal. */
2765 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2766 && POINTER_TYPE_P (TREE_TYPE (arg1))
2767 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2768 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2769 return 0;
2771 /* Check equality of integer constants before bailing out due to
2772 precision differences. */
2773 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2775 /* Address of INTEGER_CST is not defined; check that we did not forget
2776 to drop the OEP_ADDRESS_OF flags. */
2777 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2778 return tree_int_cst_equal (arg0, arg1);
2781 if (!(flags & OEP_ADDRESS_OF))
2783 /* If both types don't have the same signedness, then we can't consider
2784 them equal. We must check this before the STRIP_NOPS calls
2785 because they may change the signedness of the arguments. As pointers
2786 strictly don't have a signedness, require either two pointers or
2787 two non-pointers as well. */
2788 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2789 || POINTER_TYPE_P (TREE_TYPE (arg0))
2790 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2791 return 0;
2793 /* If both types don't have the same precision, then it is not safe
2794 to strip NOPs. */
2795 if (element_precision (TREE_TYPE (arg0))
2796 != element_precision (TREE_TYPE (arg1)))
2797 return 0;
2799 STRIP_NOPS (arg0);
2800 STRIP_NOPS (arg1);
2802 #if 0
2803 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2804 sanity check once the issue is solved. */
2805 else
2806 /* Addresses of conversions and SSA_NAMEs (and many other things)
2807 are not defined. Check that we did not forget to drop the
2808 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2809 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2810 && TREE_CODE (arg0) != SSA_NAME);
2811 #endif
2813 /* In case both args are comparisons but with different comparison
2814 code, try to swap the comparison operands of one arg to produce
2815 a match and compare that variant. */
2816 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2817 && COMPARISON_CLASS_P (arg0)
2818 && COMPARISON_CLASS_P (arg1))
2820 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2822 if (TREE_CODE (arg0) == swap_code)
2823 return operand_equal_p (TREE_OPERAND (arg0, 0),
2824 TREE_OPERAND (arg1, 1), flags)
2825 && operand_equal_p (TREE_OPERAND (arg0, 1),
2826 TREE_OPERAND (arg1, 0), flags);
2829 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2831 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2832 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2834 else if (flags & OEP_ADDRESS_OF)
2836 /* If we are interested in comparing addresses ignore
2837 MEM_REF wrappings of the base that can appear just for
2838 TBAA reasons. */
2839 if (TREE_CODE (arg0) == MEM_REF
2840 && DECL_P (arg1)
2841 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2842 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2843 && integer_zerop (TREE_OPERAND (arg0, 1)))
2844 return 1;
2845 else if (TREE_CODE (arg1) == MEM_REF
2846 && DECL_P (arg0)
2847 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2848 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2849 && integer_zerop (TREE_OPERAND (arg1, 1)))
2850 return 1;
2851 return 0;
2853 else
2854 return 0;
2857 /* When not checking adddresses, this is needed for conversions and for
2858 COMPONENT_REF. Might as well play it safe and always test this. */
2859 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2860 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2861 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2862 && !(flags & OEP_ADDRESS_OF)))
2863 return 0;
2865 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2866 We don't care about side effects in that case because the SAVE_EXPR
2867 takes care of that for us. In all other cases, two expressions are
2868 equal if they have no side effects. If we have two identical
2869 expressions with side effects that should be treated the same due
2870 to the only side effects being identical SAVE_EXPR's, that will
2871 be detected in the recursive calls below.
2872 If we are taking an invariant address of two identical objects
2873 they are necessarily equal as well. */
2874 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2875 && (TREE_CODE (arg0) == SAVE_EXPR
2876 || (flags & OEP_MATCH_SIDE_EFFECTS)
2877 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2878 return 1;
2880 /* Next handle constant cases, those for which we can return 1 even
2881 if ONLY_CONST is set. */
2882 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2883 switch (TREE_CODE (arg0))
2885 case INTEGER_CST:
2886 return tree_int_cst_equal (arg0, arg1);
2888 case FIXED_CST:
2889 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2890 TREE_FIXED_CST (arg1));
2892 case REAL_CST:
2893 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2894 return 1;
2897 if (!HONOR_SIGNED_ZEROS (arg0))
2899 /* If we do not distinguish between signed and unsigned zero,
2900 consider them equal. */
2901 if (real_zerop (arg0) && real_zerop (arg1))
2902 return 1;
2904 return 0;
2906 case VECTOR_CST:
2908 unsigned i;
2910 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2911 return 0;
2913 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2915 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2916 VECTOR_CST_ELT (arg1, i), flags))
2917 return 0;
2919 return 1;
2922 case COMPLEX_CST:
2923 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2924 flags)
2925 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2926 flags));
2928 case STRING_CST:
2929 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2930 && ! memcmp (TREE_STRING_POINTER (arg0),
2931 TREE_STRING_POINTER (arg1),
2932 TREE_STRING_LENGTH (arg0)));
2934 case ADDR_EXPR:
2935 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2936 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2937 flags | OEP_ADDRESS_OF
2938 | OEP_MATCH_SIDE_EFFECTS);
2939 case CONSTRUCTOR:
2940 /* In GIMPLE empty constructors are allowed in initializers of
2941 aggregates. */
2942 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2943 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2944 default:
2945 break;
2948 if (flags & OEP_ONLY_CONST)
2949 return 0;
2951 /* Define macros to test an operand from arg0 and arg1 for equality and a
2952 variant that allows null and views null as being different from any
2953 non-null value. In the latter case, if either is null, the both
2954 must be; otherwise, do the normal comparison. */
2955 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2956 TREE_OPERAND (arg1, N), flags)
2958 #define OP_SAME_WITH_NULL(N) \
2959 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2960 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2962 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2964 case tcc_unary:
2965 /* Two conversions are equal only if signedness and modes match. */
2966 switch (TREE_CODE (arg0))
2968 CASE_CONVERT:
2969 case FIX_TRUNC_EXPR:
2970 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2971 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2972 return 0;
2973 break;
2974 default:
2975 break;
2978 return OP_SAME (0);
2981 case tcc_comparison:
2982 case tcc_binary:
2983 if (OP_SAME (0) && OP_SAME (1))
2984 return 1;
2986 /* For commutative ops, allow the other order. */
2987 return (commutative_tree_code (TREE_CODE (arg0))
2988 && operand_equal_p (TREE_OPERAND (arg0, 0),
2989 TREE_OPERAND (arg1, 1), flags)
2990 && operand_equal_p (TREE_OPERAND (arg0, 1),
2991 TREE_OPERAND (arg1, 0), flags));
2993 case tcc_reference:
2994 /* If either of the pointer (or reference) expressions we are
2995 dereferencing contain a side effect, these cannot be equal,
2996 but their addresses can be. */
2997 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2998 && (TREE_SIDE_EFFECTS (arg0)
2999 || TREE_SIDE_EFFECTS (arg1)))
3000 return 0;
3002 switch (TREE_CODE (arg0))
3004 case INDIRECT_REF:
3005 if (!(flags & OEP_ADDRESS_OF)
3006 && (TYPE_ALIGN (TREE_TYPE (arg0))
3007 != TYPE_ALIGN (TREE_TYPE (arg1))))
3008 return 0;
3009 flags &= ~OEP_ADDRESS_OF;
3010 return OP_SAME (0);
3012 case IMAGPART_EXPR:
3013 /* Require the same offset. */
3014 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3015 TYPE_SIZE (TREE_TYPE (arg1)),
3016 flags & ~OEP_ADDRESS_OF))
3017 return 0;
3019 /* Fallthru. */
3020 case REALPART_EXPR:
3021 case VIEW_CONVERT_EXPR:
3022 return OP_SAME (0);
3024 case TARGET_MEM_REF:
3025 case MEM_REF:
3026 if (!(flags & OEP_ADDRESS_OF))
3028 /* Require equal access sizes */
3029 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3030 && (!TYPE_SIZE (TREE_TYPE (arg0))
3031 || !TYPE_SIZE (TREE_TYPE (arg1))
3032 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3033 TYPE_SIZE (TREE_TYPE (arg1)),
3034 flags)))
3035 return 0;
3036 /* Verify that access happens in similar types. */
3037 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3038 return 0;
3039 /* Verify that accesses are TBAA compatible. */
3040 if (!alias_ptr_types_compatible_p
3041 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3042 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3043 || (MR_DEPENDENCE_CLIQUE (arg0)
3044 != MR_DEPENDENCE_CLIQUE (arg1))
3045 || (MR_DEPENDENCE_BASE (arg0)
3046 != MR_DEPENDENCE_BASE (arg1)))
3047 return 0;
3048 /* Verify that alignment is compatible. */
3049 if (TYPE_ALIGN (TREE_TYPE (arg0))
3050 != TYPE_ALIGN (TREE_TYPE (arg1)))
3051 return 0;
3053 flags &= ~OEP_ADDRESS_OF;
3054 return (OP_SAME (0) && OP_SAME (1)
3055 /* TARGET_MEM_REF require equal extra operands. */
3056 && (TREE_CODE (arg0) != TARGET_MEM_REF
3057 || (OP_SAME_WITH_NULL (2)
3058 && OP_SAME_WITH_NULL (3)
3059 && OP_SAME_WITH_NULL (4))));
3061 case ARRAY_REF:
3062 case ARRAY_RANGE_REF:
3063 if (!OP_SAME (0))
3064 return 0;
3065 flags &= ~OEP_ADDRESS_OF;
3066 /* Compare the array index by value if it is constant first as we
3067 may have different types but same value here. */
3068 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3069 TREE_OPERAND (arg1, 1))
3070 || OP_SAME (1))
3071 && OP_SAME_WITH_NULL (2)
3072 && OP_SAME_WITH_NULL (3)
3073 /* Compare low bound and element size as with OEP_ADDRESS_OF
3074 we have to account for the offset of the ref. */
3075 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3076 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3077 || (operand_equal_p (array_ref_low_bound
3078 (CONST_CAST_TREE (arg0)),
3079 array_ref_low_bound
3080 (CONST_CAST_TREE (arg1)), flags)
3081 && operand_equal_p (array_ref_element_size
3082 (CONST_CAST_TREE (arg0)),
3083 array_ref_element_size
3084 (CONST_CAST_TREE (arg1)),
3085 flags))));
3087 case COMPONENT_REF:
3088 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3089 may be NULL when we're called to compare MEM_EXPRs. */
3090 if (!OP_SAME_WITH_NULL (0)
3091 || !OP_SAME (1))
3092 return 0;
3093 flags &= ~OEP_ADDRESS_OF;
3094 return OP_SAME_WITH_NULL (2);
3096 case BIT_FIELD_REF:
3097 if (!OP_SAME (0))
3098 return 0;
3099 flags &= ~OEP_ADDRESS_OF;
3100 return OP_SAME (1) && OP_SAME (2);
3102 default:
3103 return 0;
3106 case tcc_expression:
3107 switch (TREE_CODE (arg0))
3109 case ADDR_EXPR:
3110 /* Be sure we pass right ADDRESS_OF flag. */
3111 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3112 return operand_equal_p (TREE_OPERAND (arg0, 0),
3113 TREE_OPERAND (arg1, 0),
3114 flags | OEP_ADDRESS_OF);
3116 case TRUTH_NOT_EXPR:
3117 return OP_SAME (0);
3119 case TRUTH_ANDIF_EXPR:
3120 case TRUTH_ORIF_EXPR:
3121 return OP_SAME (0) && OP_SAME (1);
3123 case FMA_EXPR:
3124 case WIDEN_MULT_PLUS_EXPR:
3125 case WIDEN_MULT_MINUS_EXPR:
3126 if (!OP_SAME (2))
3127 return 0;
3128 /* The multiplcation operands are commutative. */
3129 /* FALLTHRU */
3131 case TRUTH_AND_EXPR:
3132 case TRUTH_OR_EXPR:
3133 case TRUTH_XOR_EXPR:
3134 if (OP_SAME (0) && OP_SAME (1))
3135 return 1;
3137 /* Otherwise take into account this is a commutative operation. */
3138 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3139 TREE_OPERAND (arg1, 1), flags)
3140 && operand_equal_p (TREE_OPERAND (arg0, 1),
3141 TREE_OPERAND (arg1, 0), flags));
3143 case COND_EXPR:
3144 if (! OP_SAME (1) || ! OP_SAME (2))
3145 return 0;
3146 flags &= ~OEP_ADDRESS_OF;
3147 return OP_SAME (0);
3149 case VEC_COND_EXPR:
3150 case DOT_PROD_EXPR:
3151 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3153 default:
3154 return 0;
3157 case tcc_vl_exp:
3158 switch (TREE_CODE (arg0))
3160 case CALL_EXPR:
3161 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3162 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3163 /* If not both CALL_EXPRs are either internal or normal function
3164 functions, then they are not equal. */
3165 return 0;
3166 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3168 /* If the CALL_EXPRs call different internal functions, then they
3169 are not equal. */
3170 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3171 return 0;
3173 else
3175 /* If the CALL_EXPRs call different functions, then they are not
3176 equal. */
3177 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3178 flags))
3179 return 0;
3182 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3184 unsigned int cef = call_expr_flags (arg0);
3185 if (flags & OEP_PURE_SAME)
3186 cef &= ECF_CONST | ECF_PURE;
3187 else
3188 cef &= ECF_CONST;
3189 if (!cef)
3190 return 0;
3193 /* Now see if all the arguments are the same. */
3195 const_call_expr_arg_iterator iter0, iter1;
3196 const_tree a0, a1;
3197 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3198 a1 = first_const_call_expr_arg (arg1, &iter1);
3199 a0 && a1;
3200 a0 = next_const_call_expr_arg (&iter0),
3201 a1 = next_const_call_expr_arg (&iter1))
3202 if (! operand_equal_p (a0, a1, flags))
3203 return 0;
3205 /* If we get here and both argument lists are exhausted
3206 then the CALL_EXPRs are equal. */
3207 return ! (a0 || a1);
3209 default:
3210 return 0;
3213 case tcc_declaration:
3214 /* Consider __builtin_sqrt equal to sqrt. */
3215 return (TREE_CODE (arg0) == FUNCTION_DECL
3216 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3217 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3218 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3220 case tcc_exceptional:
3221 if (TREE_CODE (arg0) == CONSTRUCTOR)
3223 /* In GIMPLE constructors are used only to build vectors from
3224 elements. Individual elements in the constructor must be
3225 indexed in increasing order and form an initial sequence.
3227 We make no effort to compare constructors in generic.
3228 (see sem_variable::equals in ipa-icf which can do so for
3229 constants). */
3230 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3231 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3232 return 0;
3234 /* Be sure that vectors constructed have the same representation.
3235 We only tested element precision and modes to match.
3236 Vectors may be BLKmode and thus also check that the number of
3237 parts match. */
3238 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3239 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3240 return 0;
3242 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3243 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3244 unsigned int len = vec_safe_length (v0);
3246 if (len != vec_safe_length (v1))
3247 return 0;
3249 for (unsigned int i = 0; i < len; i++)
3251 constructor_elt *c0 = &(*v0)[i];
3252 constructor_elt *c1 = &(*v1)[i];
3254 if (!operand_equal_p (c0->value, c1->value, flags)
3255 /* In GIMPLE the indexes can be either NULL or matching i.
3256 Double check this so we won't get false
3257 positives for GENERIC. */
3258 || (c0->index
3259 && (TREE_CODE (c0->index) != INTEGER_CST
3260 || !compare_tree_int (c0->index, i)))
3261 || (c1->index
3262 && (TREE_CODE (c1->index) != INTEGER_CST
3263 || !compare_tree_int (c1->index, i))))
3264 return 0;
3266 return 1;
3268 return 0;
3270 default:
3271 return 0;
3274 #undef OP_SAME
3275 #undef OP_SAME_WITH_NULL
3278 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3279 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3281 When in doubt, return 0. */
3283 static int
3284 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3286 int unsignedp1, unsignedpo;
3287 tree primarg0, primarg1, primother;
3288 unsigned int correct_width;
3290 if (operand_equal_p (arg0, arg1, 0))
3291 return 1;
3293 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3294 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3295 return 0;
3297 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3298 and see if the inner values are the same. This removes any
3299 signedness comparison, which doesn't matter here. */
3300 primarg0 = arg0, primarg1 = arg1;
3301 STRIP_NOPS (primarg0);
3302 STRIP_NOPS (primarg1);
3303 if (operand_equal_p (primarg0, primarg1, 0))
3304 return 1;
3306 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3307 actual comparison operand, ARG0.
3309 First throw away any conversions to wider types
3310 already present in the operands. */
3312 primarg1 = get_narrower (arg1, &unsignedp1);
3313 primother = get_narrower (other, &unsignedpo);
3315 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3316 if (unsignedp1 == unsignedpo
3317 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3318 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3320 tree type = TREE_TYPE (arg0);
3322 /* Make sure shorter operand is extended the right way
3323 to match the longer operand. */
3324 primarg1 = fold_convert (signed_or_unsigned_type_for
3325 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3327 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3328 return 1;
3331 return 0;
3334 /* See if ARG is an expression that is either a comparison or is performing
3335 arithmetic on comparisons. The comparisons must only be comparing
3336 two different values, which will be stored in *CVAL1 and *CVAL2; if
3337 they are nonzero it means that some operands have already been found.
3338 No variables may be used anywhere else in the expression except in the
3339 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3340 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3342 If this is true, return 1. Otherwise, return zero. */
3344 static int
3345 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3347 enum tree_code code = TREE_CODE (arg);
3348 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3350 /* We can handle some of the tcc_expression cases here. */
3351 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3352 tclass = tcc_unary;
3353 else if (tclass == tcc_expression
3354 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3355 || code == COMPOUND_EXPR))
3356 tclass = tcc_binary;
3358 else if (tclass == tcc_expression && code == SAVE_EXPR
3359 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3361 /* If we've already found a CVAL1 or CVAL2, this expression is
3362 two complex to handle. */
3363 if (*cval1 || *cval2)
3364 return 0;
3366 tclass = tcc_unary;
3367 *save_p = 1;
3370 switch (tclass)
3372 case tcc_unary:
3373 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3375 case tcc_binary:
3376 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3377 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3378 cval1, cval2, save_p));
3380 case tcc_constant:
3381 return 1;
3383 case tcc_expression:
3384 if (code == COND_EXPR)
3385 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3386 cval1, cval2, save_p)
3387 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3388 cval1, cval2, save_p)
3389 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3390 cval1, cval2, save_p));
3391 return 0;
3393 case tcc_comparison:
3394 /* First see if we can handle the first operand, then the second. For
3395 the second operand, we know *CVAL1 can't be zero. It must be that
3396 one side of the comparison is each of the values; test for the
3397 case where this isn't true by failing if the two operands
3398 are the same. */
3400 if (operand_equal_p (TREE_OPERAND (arg, 0),
3401 TREE_OPERAND (arg, 1), 0))
3402 return 0;
3404 if (*cval1 == 0)
3405 *cval1 = TREE_OPERAND (arg, 0);
3406 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3408 else if (*cval2 == 0)
3409 *cval2 = TREE_OPERAND (arg, 0);
3410 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3412 else
3413 return 0;
3415 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3417 else if (*cval2 == 0)
3418 *cval2 = TREE_OPERAND (arg, 1);
3419 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3421 else
3422 return 0;
3424 return 1;
3426 default:
3427 return 0;
3431 /* ARG is a tree that is known to contain just arithmetic operations and
3432 comparisons. Evaluate the operations in the tree substituting NEW0 for
3433 any occurrence of OLD0 as an operand of a comparison and likewise for
3434 NEW1 and OLD1. */
3436 static tree
3437 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3438 tree old1, tree new1)
3440 tree type = TREE_TYPE (arg);
3441 enum tree_code code = TREE_CODE (arg);
3442 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3444 /* We can handle some of the tcc_expression cases here. */
3445 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3446 tclass = tcc_unary;
3447 else if (tclass == tcc_expression
3448 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3449 tclass = tcc_binary;
3451 switch (tclass)
3453 case tcc_unary:
3454 return fold_build1_loc (loc, code, type,
3455 eval_subst (loc, TREE_OPERAND (arg, 0),
3456 old0, new0, old1, new1));
3458 case tcc_binary:
3459 return fold_build2_loc (loc, code, type,
3460 eval_subst (loc, TREE_OPERAND (arg, 0),
3461 old0, new0, old1, new1),
3462 eval_subst (loc, TREE_OPERAND (arg, 1),
3463 old0, new0, old1, new1));
3465 case tcc_expression:
3466 switch (code)
3468 case SAVE_EXPR:
3469 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3470 old1, new1);
3472 case COMPOUND_EXPR:
3473 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3474 old1, new1);
3476 case COND_EXPR:
3477 return fold_build3_loc (loc, code, type,
3478 eval_subst (loc, TREE_OPERAND (arg, 0),
3479 old0, new0, old1, new1),
3480 eval_subst (loc, TREE_OPERAND (arg, 1),
3481 old0, new0, old1, new1),
3482 eval_subst (loc, TREE_OPERAND (arg, 2),
3483 old0, new0, old1, new1));
3484 default:
3485 break;
3487 /* Fall through - ??? */
3489 case tcc_comparison:
3491 tree arg0 = TREE_OPERAND (arg, 0);
3492 tree arg1 = TREE_OPERAND (arg, 1);
3494 /* We need to check both for exact equality and tree equality. The
3495 former will be true if the operand has a side-effect. In that
3496 case, we know the operand occurred exactly once. */
3498 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3499 arg0 = new0;
3500 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3501 arg0 = new1;
3503 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3504 arg1 = new0;
3505 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3506 arg1 = new1;
3508 return fold_build2_loc (loc, code, type, arg0, arg1);
3511 default:
3512 return arg;
3516 /* Return a tree for the case when the result of an expression is RESULT
3517 converted to TYPE and OMITTED was previously an operand of the expression
3518 but is now not needed (e.g., we folded OMITTED * 0).
3520 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3521 the conversion of RESULT to TYPE. */
3523 tree
3524 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3526 tree t = fold_convert_loc (loc, type, result);
3528 /* If the resulting operand is an empty statement, just return the omitted
3529 statement casted to void. */
3530 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3531 return build1_loc (loc, NOP_EXPR, void_type_node,
3532 fold_ignored_result (omitted));
3534 if (TREE_SIDE_EFFECTS (omitted))
3535 return build2_loc (loc, COMPOUND_EXPR, type,
3536 fold_ignored_result (omitted), t);
3538 return non_lvalue_loc (loc, t);
3541 /* Return a tree for the case when the result of an expression is RESULT
3542 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3543 of the expression but are now not needed.
3545 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3546 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3547 evaluated before OMITTED2. Otherwise, if neither has side effects,
3548 just do the conversion of RESULT to TYPE. */
3550 tree
3551 omit_two_operands_loc (location_t loc, tree type, tree result,
3552 tree omitted1, tree omitted2)
3554 tree t = fold_convert_loc (loc, type, result);
3556 if (TREE_SIDE_EFFECTS (omitted2))
3557 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3558 if (TREE_SIDE_EFFECTS (omitted1))
3559 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3561 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3565 /* Return a simplified tree node for the truth-negation of ARG. This
3566 never alters ARG itself. We assume that ARG is an operation that
3567 returns a truth value (0 or 1).
3569 FIXME: one would think we would fold the result, but it causes
3570 problems with the dominator optimizer. */
3572 static tree
3573 fold_truth_not_expr (location_t loc, tree arg)
3575 tree type = TREE_TYPE (arg);
3576 enum tree_code code = TREE_CODE (arg);
3577 location_t loc1, loc2;
3579 /* If this is a comparison, we can simply invert it, except for
3580 floating-point non-equality comparisons, in which case we just
3581 enclose a TRUTH_NOT_EXPR around what we have. */
3583 if (TREE_CODE_CLASS (code) == tcc_comparison)
3585 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3586 if (FLOAT_TYPE_P (op_type)
3587 && flag_trapping_math
3588 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3589 && code != NE_EXPR && code != EQ_EXPR)
3590 return NULL_TREE;
3592 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3593 if (code == ERROR_MARK)
3594 return NULL_TREE;
3596 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3597 TREE_OPERAND (arg, 1));
3598 if (TREE_NO_WARNING (arg))
3599 TREE_NO_WARNING (ret) = 1;
3600 return ret;
3603 switch (code)
3605 case INTEGER_CST:
3606 return constant_boolean_node (integer_zerop (arg), type);
3608 case TRUTH_AND_EXPR:
3609 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3610 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3611 return build2_loc (loc, TRUTH_OR_EXPR, type,
3612 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3613 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3615 case TRUTH_OR_EXPR:
3616 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3617 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3618 return build2_loc (loc, TRUTH_AND_EXPR, type,
3619 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3620 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3622 case TRUTH_XOR_EXPR:
3623 /* Here we can invert either operand. We invert the first operand
3624 unless the second operand is a TRUTH_NOT_EXPR in which case our
3625 result is the XOR of the first operand with the inside of the
3626 negation of the second operand. */
3628 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3629 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3630 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3631 else
3632 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3633 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3634 TREE_OPERAND (arg, 1));
3636 case TRUTH_ANDIF_EXPR:
3637 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3638 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3639 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3640 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3641 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3643 case TRUTH_ORIF_EXPR:
3644 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3645 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3646 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3647 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3648 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3650 case TRUTH_NOT_EXPR:
3651 return TREE_OPERAND (arg, 0);
3653 case COND_EXPR:
3655 tree arg1 = TREE_OPERAND (arg, 1);
3656 tree arg2 = TREE_OPERAND (arg, 2);
3658 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3659 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3661 /* A COND_EXPR may have a throw as one operand, which
3662 then has void type. Just leave void operands
3663 as they are. */
3664 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3665 VOID_TYPE_P (TREE_TYPE (arg1))
3666 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3667 VOID_TYPE_P (TREE_TYPE (arg2))
3668 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3671 case COMPOUND_EXPR:
3672 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3673 return build2_loc (loc, COMPOUND_EXPR, type,
3674 TREE_OPERAND (arg, 0),
3675 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3677 case NON_LVALUE_EXPR:
3678 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3679 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3681 CASE_CONVERT:
3682 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3683 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3685 /* ... fall through ... */
3687 case FLOAT_EXPR:
3688 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3689 return build1_loc (loc, TREE_CODE (arg), type,
3690 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3692 case BIT_AND_EXPR:
3693 if (!integer_onep (TREE_OPERAND (arg, 1)))
3694 return NULL_TREE;
3695 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3697 case SAVE_EXPR:
3698 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3700 case CLEANUP_POINT_EXPR:
3701 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3702 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3703 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3705 default:
3706 return NULL_TREE;
3710 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3711 assume that ARG is an operation that returns a truth value (0 or 1
3712 for scalars, 0 or -1 for vectors). Return the folded expression if
3713 folding is successful. Otherwise, return NULL_TREE. */
3715 static tree
3716 fold_invert_truthvalue (location_t loc, tree arg)
3718 tree type = TREE_TYPE (arg);
3719 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3720 ? BIT_NOT_EXPR
3721 : TRUTH_NOT_EXPR,
3722 type, arg);
3725 /* Return a simplified tree node for the truth-negation of ARG. This
3726 never alters ARG itself. We assume that ARG is an operation that
3727 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3729 tree
3730 invert_truthvalue_loc (location_t loc, tree arg)
3732 if (TREE_CODE (arg) == ERROR_MARK)
3733 return arg;
3735 tree type = TREE_TYPE (arg);
3736 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3737 ? BIT_NOT_EXPR
3738 : TRUTH_NOT_EXPR,
3739 type, arg);
3742 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3743 with code CODE. This optimization is unsafe. */
3744 static tree
3745 distribute_real_division (location_t loc, enum tree_code code, tree type,
3746 tree arg0, tree arg1)
3748 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3749 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3751 /* (A / C) +- (B / C) -> (A +- B) / C. */
3752 if (mul0 == mul1
3753 && operand_equal_p (TREE_OPERAND (arg0, 1),
3754 TREE_OPERAND (arg1, 1), 0))
3755 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3756 fold_build2_loc (loc, code, type,
3757 TREE_OPERAND (arg0, 0),
3758 TREE_OPERAND (arg1, 0)),
3759 TREE_OPERAND (arg0, 1));
3761 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3762 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3763 TREE_OPERAND (arg1, 0), 0)
3764 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3765 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3767 REAL_VALUE_TYPE r0, r1;
3768 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3769 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3770 if (!mul0)
3771 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3772 if (!mul1)
3773 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3774 real_arithmetic (&r0, code, &r0, &r1);
3775 return fold_build2_loc (loc, MULT_EXPR, type,
3776 TREE_OPERAND (arg0, 0),
3777 build_real (type, r0));
3780 return NULL_TREE;
3783 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3784 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3785 and uses reverse storage order if REVERSEP is nonzero. */
3787 static tree
3788 make_bit_field_ref (location_t loc, tree inner, tree type,
3789 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3790 int unsignedp, int reversep)
3792 tree result, bftype;
3794 if (bitpos == 0 && !reversep)
3796 tree size = TYPE_SIZE (TREE_TYPE (inner));
3797 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3798 || POINTER_TYPE_P (TREE_TYPE (inner)))
3799 && tree_fits_shwi_p (size)
3800 && tree_to_shwi (size) == bitsize)
3801 return fold_convert_loc (loc, type, inner);
3804 bftype = type;
3805 if (TYPE_PRECISION (bftype) != bitsize
3806 || TYPE_UNSIGNED (bftype) == !unsignedp)
3807 bftype = build_nonstandard_integer_type (bitsize, 0);
3809 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3810 size_int (bitsize), bitsize_int (bitpos));
3811 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3813 if (bftype != type)
3814 result = fold_convert_loc (loc, type, result);
3816 return result;
3819 /* Optimize a bit-field compare.
3821 There are two cases: First is a compare against a constant and the
3822 second is a comparison of two items where the fields are at the same
3823 bit position relative to the start of a chunk (byte, halfword, word)
3824 large enough to contain it. In these cases we can avoid the shift
3825 implicit in bitfield extractions.
3827 For constants, we emit a compare of the shifted constant with the
3828 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3829 compared. For two fields at the same position, we do the ANDs with the
3830 similar mask and compare the result of the ANDs.
3832 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3833 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3834 are the left and right operands of the comparison, respectively.
3836 If the optimization described above can be done, we return the resulting
3837 tree. Otherwise we return zero. */
3839 static tree
3840 optimize_bit_field_compare (location_t loc, enum tree_code code,
3841 tree compare_type, tree lhs, tree rhs)
3843 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3844 tree type = TREE_TYPE (lhs);
3845 tree unsigned_type;
3846 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3847 machine_mode lmode, rmode, nmode;
3848 int lunsignedp, runsignedp;
3849 int lreversep, rreversep;
3850 int lvolatilep = 0, rvolatilep = 0;
3851 tree linner, rinner = NULL_TREE;
3852 tree mask;
3853 tree offset;
3855 /* Get all the information about the extractions being done. If the bit size
3856 if the same as the size of the underlying object, we aren't doing an
3857 extraction at all and so can do nothing. We also don't want to
3858 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3859 then will no longer be able to replace it. */
3860 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3861 &lunsignedp, &lreversep, &lvolatilep, false);
3862 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3863 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3864 return 0;
3866 if (const_p)
3867 rreversep = lreversep;
3868 else
3870 /* If this is not a constant, we can only do something if bit positions,
3871 sizes, signedness and storage order are the same. */
3872 rinner
3873 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3874 &runsignedp, &rreversep, &rvolatilep, false);
3876 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3877 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3878 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3879 return 0;
3882 /* See if we can find a mode to refer to this field. We should be able to,
3883 but fail if we can't. */
3884 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3885 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3886 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3887 TYPE_ALIGN (TREE_TYPE (rinner))),
3888 word_mode, false);
3889 if (nmode == VOIDmode)
3890 return 0;
3892 /* Set signed and unsigned types of the precision of this mode for the
3893 shifts below. */
3894 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3896 /* Compute the bit position and size for the new reference and our offset
3897 within it. If the new reference is the same size as the original, we
3898 won't optimize anything, so return zero. */
3899 nbitsize = GET_MODE_BITSIZE (nmode);
3900 nbitpos = lbitpos & ~ (nbitsize - 1);
3901 lbitpos -= nbitpos;
3902 if (nbitsize == lbitsize)
3903 return 0;
3905 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3906 lbitpos = nbitsize - lbitsize - lbitpos;
3908 /* Make the mask to be used against the extracted field. */
3909 mask = build_int_cst_type (unsigned_type, -1);
3910 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3911 mask = const_binop (RSHIFT_EXPR, mask,
3912 size_int (nbitsize - lbitsize - lbitpos));
3914 if (! const_p)
3915 /* If not comparing with constant, just rework the comparison
3916 and return. */
3917 return fold_build2_loc (loc, code, compare_type,
3918 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3919 make_bit_field_ref (loc, linner,
3920 unsigned_type,
3921 nbitsize, nbitpos,
3922 1, lreversep),
3923 mask),
3924 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3925 make_bit_field_ref (loc, rinner,
3926 unsigned_type,
3927 nbitsize, nbitpos,
3928 1, rreversep),
3929 mask));
3931 /* Otherwise, we are handling the constant case. See if the constant is too
3932 big for the field. Warn and return a tree for 0 (false) if so. We do
3933 this not only for its own sake, but to avoid having to test for this
3934 error case below. If we didn't, we might generate wrong code.
3936 For unsigned fields, the constant shifted right by the field length should
3937 be all zero. For signed fields, the high-order bits should agree with
3938 the sign bit. */
3940 if (lunsignedp)
3942 if (wi::lrshift (rhs, lbitsize) != 0)
3944 warning (0, "comparison is always %d due to width of bit-field",
3945 code == NE_EXPR);
3946 return constant_boolean_node (code == NE_EXPR, compare_type);
3949 else
3951 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3952 if (tem != 0 && tem != -1)
3954 warning (0, "comparison is always %d due to width of bit-field",
3955 code == NE_EXPR);
3956 return constant_boolean_node (code == NE_EXPR, compare_type);
3960 /* Single-bit compares should always be against zero. */
3961 if (lbitsize == 1 && ! integer_zerop (rhs))
3963 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3964 rhs = build_int_cst (type, 0);
3967 /* Make a new bitfield reference, shift the constant over the
3968 appropriate number of bits and mask it with the computed mask
3969 (in case this was a signed field). If we changed it, make a new one. */
3970 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3971 lreversep);
3973 rhs = const_binop (BIT_AND_EXPR,
3974 const_binop (LSHIFT_EXPR,
3975 fold_convert_loc (loc, unsigned_type, rhs),
3976 size_int (lbitpos)),
3977 mask);
3979 lhs = build2_loc (loc, code, compare_type,
3980 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3981 return lhs;
3984 /* Subroutine for fold_truth_andor_1: decode a field reference.
3986 If EXP is a comparison reference, we return the innermost reference.
3988 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3989 set to the starting bit number.
3991 If the innermost field can be completely contained in a mode-sized
3992 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3994 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3995 otherwise it is not changed.
3997 *PUNSIGNEDP is set to the signedness of the field.
3999 *PREVERSEP is set to the storage order of the field.
4001 *PMASK is set to the mask used. This is either contained in a
4002 BIT_AND_EXPR or derived from the width of the field.
4004 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4006 Return 0 if this is not a component reference or is one that we can't
4007 do anything with. */
4009 static tree
4010 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4011 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4012 int *punsignedp, int *preversep, int *pvolatilep,
4013 tree *pmask, tree *pand_mask)
4015 tree outer_type = 0;
4016 tree and_mask = 0;
4017 tree mask, inner, offset;
4018 tree unsigned_type;
4019 unsigned int precision;
4021 /* All the optimizations using this function assume integer fields.
4022 There are problems with FP fields since the type_for_size call
4023 below can fail for, e.g., XFmode. */
4024 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4025 return 0;
4027 /* We are interested in the bare arrangement of bits, so strip everything
4028 that doesn't affect the machine mode. However, record the type of the
4029 outermost expression if it may matter below. */
4030 if (CONVERT_EXPR_P (exp)
4031 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4032 outer_type = TREE_TYPE (exp);
4033 STRIP_NOPS (exp);
4035 if (TREE_CODE (exp) == BIT_AND_EXPR)
4037 and_mask = TREE_OPERAND (exp, 1);
4038 exp = TREE_OPERAND (exp, 0);
4039 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4040 if (TREE_CODE (and_mask) != INTEGER_CST)
4041 return 0;
4044 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4045 punsignedp, preversep, pvolatilep, false);
4046 if ((inner == exp && and_mask == 0)
4047 || *pbitsize < 0 || offset != 0
4048 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4049 return 0;
4051 /* If the number of bits in the reference is the same as the bitsize of
4052 the outer type, then the outer type gives the signedness. Otherwise
4053 (in case of a small bitfield) the signedness is unchanged. */
4054 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4055 *punsignedp = TYPE_UNSIGNED (outer_type);
4057 /* Compute the mask to access the bitfield. */
4058 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4059 precision = TYPE_PRECISION (unsigned_type);
4061 mask = build_int_cst_type (unsigned_type, -1);
4063 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4064 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4066 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4067 if (and_mask != 0)
4068 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4069 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4071 *pmask = mask;
4072 *pand_mask = and_mask;
4073 return inner;
4076 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4077 bit positions and MASK is SIGNED. */
4079 static int
4080 all_ones_mask_p (const_tree mask, unsigned int size)
4082 tree type = TREE_TYPE (mask);
4083 unsigned int precision = TYPE_PRECISION (type);
4085 /* If this function returns true when the type of the mask is
4086 UNSIGNED, then there will be errors. In particular see
4087 gcc.c-torture/execute/990326-1.c. There does not appear to be
4088 any documentation paper trail as to why this is so. But the pre
4089 wide-int worked with that restriction and it has been preserved
4090 here. */
4091 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4092 return false;
4094 return wi::mask (size, false, precision) == mask;
4097 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4098 represents the sign bit of EXP's type. If EXP represents a sign
4099 or zero extension, also test VAL against the unextended type.
4100 The return value is the (sub)expression whose sign bit is VAL,
4101 or NULL_TREE otherwise. */
4103 tree
4104 sign_bit_p (tree exp, const_tree val)
4106 int width;
4107 tree t;
4109 /* Tree EXP must have an integral type. */
4110 t = TREE_TYPE (exp);
4111 if (! INTEGRAL_TYPE_P (t))
4112 return NULL_TREE;
4114 /* Tree VAL must be an integer constant. */
4115 if (TREE_CODE (val) != INTEGER_CST
4116 || TREE_OVERFLOW (val))
4117 return NULL_TREE;
4119 width = TYPE_PRECISION (t);
4120 if (wi::only_sign_bit_p (val, width))
4121 return exp;
4123 /* Handle extension from a narrower type. */
4124 if (TREE_CODE (exp) == NOP_EXPR
4125 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4126 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4128 return NULL_TREE;
4131 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4132 to be evaluated unconditionally. */
4134 static int
4135 simple_operand_p (const_tree exp)
4137 /* Strip any conversions that don't change the machine mode. */
4138 STRIP_NOPS (exp);
4140 return (CONSTANT_CLASS_P (exp)
4141 || TREE_CODE (exp) == SSA_NAME
4142 || (DECL_P (exp)
4143 && ! TREE_ADDRESSABLE (exp)
4144 && ! TREE_THIS_VOLATILE (exp)
4145 && ! DECL_NONLOCAL (exp)
4146 /* Don't regard global variables as simple. They may be
4147 allocated in ways unknown to the compiler (shared memory,
4148 #pragma weak, etc). */
4149 && ! TREE_PUBLIC (exp)
4150 && ! DECL_EXTERNAL (exp)
4151 /* Weakrefs are not safe to be read, since they can be NULL.
4152 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4153 have DECL_WEAK flag set. */
4154 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4155 /* Loading a static variable is unduly expensive, but global
4156 registers aren't expensive. */
4157 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4160 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4161 to be evaluated unconditionally.
4162 I addition to simple_operand_p, we assume that comparisons, conversions,
4163 and logic-not operations are simple, if their operands are simple, too. */
4165 static bool
4166 simple_operand_p_2 (tree exp)
4168 enum tree_code code;
4170 if (TREE_SIDE_EFFECTS (exp)
4171 || tree_could_trap_p (exp))
4172 return false;
4174 while (CONVERT_EXPR_P (exp))
4175 exp = TREE_OPERAND (exp, 0);
4177 code = TREE_CODE (exp);
4179 if (TREE_CODE_CLASS (code) == tcc_comparison)
4180 return (simple_operand_p (TREE_OPERAND (exp, 0))
4181 && simple_operand_p (TREE_OPERAND (exp, 1)));
4183 if (code == TRUTH_NOT_EXPR)
4184 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4186 return simple_operand_p (exp);
4190 /* The following functions are subroutines to fold_range_test and allow it to
4191 try to change a logical combination of comparisons into a range test.
4193 For example, both
4194 X == 2 || X == 3 || X == 4 || X == 5
4196 X >= 2 && X <= 5
4197 are converted to
4198 (unsigned) (X - 2) <= 3
4200 We describe each set of comparisons as being either inside or outside
4201 a range, using a variable named like IN_P, and then describe the
4202 range with a lower and upper bound. If one of the bounds is omitted,
4203 it represents either the highest or lowest value of the type.
4205 In the comments below, we represent a range by two numbers in brackets
4206 preceded by a "+" to designate being inside that range, or a "-" to
4207 designate being outside that range, so the condition can be inverted by
4208 flipping the prefix. An omitted bound is represented by a "-". For
4209 example, "- [-, 10]" means being outside the range starting at the lowest
4210 possible value and ending at 10, in other words, being greater than 10.
4211 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4212 always false.
4214 We set up things so that the missing bounds are handled in a consistent
4215 manner so neither a missing bound nor "true" and "false" need to be
4216 handled using a special case. */
4218 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4219 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4220 and UPPER1_P are nonzero if the respective argument is an upper bound
4221 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4222 must be specified for a comparison. ARG1 will be converted to ARG0's
4223 type if both are specified. */
4225 static tree
4226 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4227 tree arg1, int upper1_p)
4229 tree tem;
4230 int result;
4231 int sgn0, sgn1;
4233 /* If neither arg represents infinity, do the normal operation.
4234 Else, if not a comparison, return infinity. Else handle the special
4235 comparison rules. Note that most of the cases below won't occur, but
4236 are handled for consistency. */
4238 if (arg0 != 0 && arg1 != 0)
4240 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4241 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4242 STRIP_NOPS (tem);
4243 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4246 if (TREE_CODE_CLASS (code) != tcc_comparison)
4247 return 0;
4249 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4250 for neither. In real maths, we cannot assume open ended ranges are
4251 the same. But, this is computer arithmetic, where numbers are finite.
4252 We can therefore make the transformation of any unbounded range with
4253 the value Z, Z being greater than any representable number. This permits
4254 us to treat unbounded ranges as equal. */
4255 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4256 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4257 switch (code)
4259 case EQ_EXPR:
4260 result = sgn0 == sgn1;
4261 break;
4262 case NE_EXPR:
4263 result = sgn0 != sgn1;
4264 break;
4265 case LT_EXPR:
4266 result = sgn0 < sgn1;
4267 break;
4268 case LE_EXPR:
4269 result = sgn0 <= sgn1;
4270 break;
4271 case GT_EXPR:
4272 result = sgn0 > sgn1;
4273 break;
4274 case GE_EXPR:
4275 result = sgn0 >= sgn1;
4276 break;
4277 default:
4278 gcc_unreachable ();
4281 return constant_boolean_node (result, type);
4284 /* Helper routine for make_range. Perform one step for it, return
4285 new expression if the loop should continue or NULL_TREE if it should
4286 stop. */
4288 tree
4289 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4290 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4291 bool *strict_overflow_p)
4293 tree arg0_type = TREE_TYPE (arg0);
4294 tree n_low, n_high, low = *p_low, high = *p_high;
4295 int in_p = *p_in_p, n_in_p;
4297 switch (code)
4299 case TRUTH_NOT_EXPR:
4300 /* We can only do something if the range is testing for zero. */
4301 if (low == NULL_TREE || high == NULL_TREE
4302 || ! integer_zerop (low) || ! integer_zerop (high))
4303 return NULL_TREE;
4304 *p_in_p = ! in_p;
4305 return arg0;
4307 case EQ_EXPR: case NE_EXPR:
4308 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4309 /* We can only do something if the range is testing for zero
4310 and if the second operand is an integer constant. Note that
4311 saying something is "in" the range we make is done by
4312 complementing IN_P since it will set in the initial case of
4313 being not equal to zero; "out" is leaving it alone. */
4314 if (low == NULL_TREE || high == NULL_TREE
4315 || ! integer_zerop (low) || ! integer_zerop (high)
4316 || TREE_CODE (arg1) != INTEGER_CST)
4317 return NULL_TREE;
4319 switch (code)
4321 case NE_EXPR: /* - [c, c] */
4322 low = high = arg1;
4323 break;
4324 case EQ_EXPR: /* + [c, c] */
4325 in_p = ! in_p, low = high = arg1;
4326 break;
4327 case GT_EXPR: /* - [-, c] */
4328 low = 0, high = arg1;
4329 break;
4330 case GE_EXPR: /* + [c, -] */
4331 in_p = ! in_p, low = arg1, high = 0;
4332 break;
4333 case LT_EXPR: /* - [c, -] */
4334 low = arg1, high = 0;
4335 break;
4336 case LE_EXPR: /* + [-, c] */
4337 in_p = ! in_p, low = 0, high = arg1;
4338 break;
4339 default:
4340 gcc_unreachable ();
4343 /* If this is an unsigned comparison, we also know that EXP is
4344 greater than or equal to zero. We base the range tests we make
4345 on that fact, so we record it here so we can parse existing
4346 range tests. We test arg0_type since often the return type
4347 of, e.g. EQ_EXPR, is boolean. */
4348 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4350 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4351 in_p, low, high, 1,
4352 build_int_cst (arg0_type, 0),
4353 NULL_TREE))
4354 return NULL_TREE;
4356 in_p = n_in_p, low = n_low, high = n_high;
4358 /* If the high bound is missing, but we have a nonzero low
4359 bound, reverse the range so it goes from zero to the low bound
4360 minus 1. */
4361 if (high == 0 && low && ! integer_zerop (low))
4363 in_p = ! in_p;
4364 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4365 build_int_cst (TREE_TYPE (low), 1), 0);
4366 low = build_int_cst (arg0_type, 0);
4370 *p_low = low;
4371 *p_high = high;
4372 *p_in_p = in_p;
4373 return arg0;
4375 case NEGATE_EXPR:
4376 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4377 low and high are non-NULL, then normalize will DTRT. */
4378 if (!TYPE_UNSIGNED (arg0_type)
4379 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4381 if (low == NULL_TREE)
4382 low = TYPE_MIN_VALUE (arg0_type);
4383 if (high == NULL_TREE)
4384 high = TYPE_MAX_VALUE (arg0_type);
4387 /* (-x) IN [a,b] -> x in [-b, -a] */
4388 n_low = range_binop (MINUS_EXPR, exp_type,
4389 build_int_cst (exp_type, 0),
4390 0, high, 1);
4391 n_high = range_binop (MINUS_EXPR, exp_type,
4392 build_int_cst (exp_type, 0),
4393 0, low, 0);
4394 if (n_high != 0 && TREE_OVERFLOW (n_high))
4395 return NULL_TREE;
4396 goto normalize;
4398 case BIT_NOT_EXPR:
4399 /* ~ X -> -X - 1 */
4400 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4401 build_int_cst (exp_type, 1));
4403 case PLUS_EXPR:
4404 case MINUS_EXPR:
4405 if (TREE_CODE (arg1) != INTEGER_CST)
4406 return NULL_TREE;
4408 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4409 move a constant to the other side. */
4410 if (!TYPE_UNSIGNED (arg0_type)
4411 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4412 return NULL_TREE;
4414 /* If EXP is signed, any overflow in the computation is undefined,
4415 so we don't worry about it so long as our computations on
4416 the bounds don't overflow. For unsigned, overflow is defined
4417 and this is exactly the right thing. */
4418 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4419 arg0_type, low, 0, arg1, 0);
4420 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4421 arg0_type, high, 1, arg1, 0);
4422 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4423 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4424 return NULL_TREE;
4426 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4427 *strict_overflow_p = true;
4429 normalize:
4430 /* Check for an unsigned range which has wrapped around the maximum
4431 value thus making n_high < n_low, and normalize it. */
4432 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4434 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4435 build_int_cst (TREE_TYPE (n_high), 1), 0);
4436 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4437 build_int_cst (TREE_TYPE (n_low), 1), 0);
4439 /* If the range is of the form +/- [ x+1, x ], we won't
4440 be able to normalize it. But then, it represents the
4441 whole range or the empty set, so make it
4442 +/- [ -, - ]. */
4443 if (tree_int_cst_equal (n_low, low)
4444 && tree_int_cst_equal (n_high, high))
4445 low = high = 0;
4446 else
4447 in_p = ! in_p;
4449 else
4450 low = n_low, high = n_high;
4452 *p_low = low;
4453 *p_high = high;
4454 *p_in_p = in_p;
4455 return arg0;
4457 CASE_CONVERT:
4458 case NON_LVALUE_EXPR:
4459 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4460 return NULL_TREE;
4462 if (! INTEGRAL_TYPE_P (arg0_type)
4463 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4464 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4465 return NULL_TREE;
4467 n_low = low, n_high = high;
4469 if (n_low != 0)
4470 n_low = fold_convert_loc (loc, arg0_type, n_low);
4472 if (n_high != 0)
4473 n_high = fold_convert_loc (loc, arg0_type, n_high);
4475 /* If we're converting arg0 from an unsigned type, to exp,
4476 a signed type, we will be doing the comparison as unsigned.
4477 The tests above have already verified that LOW and HIGH
4478 are both positive.
4480 So we have to ensure that we will handle large unsigned
4481 values the same way that the current signed bounds treat
4482 negative values. */
4484 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4486 tree high_positive;
4487 tree equiv_type;
4488 /* For fixed-point modes, we need to pass the saturating flag
4489 as the 2nd parameter. */
4490 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4491 equiv_type
4492 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4493 TYPE_SATURATING (arg0_type));
4494 else
4495 equiv_type
4496 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4498 /* A range without an upper bound is, naturally, unbounded.
4499 Since convert would have cropped a very large value, use
4500 the max value for the destination type. */
4501 high_positive
4502 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4503 : TYPE_MAX_VALUE (arg0_type);
4505 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4506 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4507 fold_convert_loc (loc, arg0_type,
4508 high_positive),
4509 build_int_cst (arg0_type, 1));
4511 /* If the low bound is specified, "and" the range with the
4512 range for which the original unsigned value will be
4513 positive. */
4514 if (low != 0)
4516 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4517 1, fold_convert_loc (loc, arg0_type,
4518 integer_zero_node),
4519 high_positive))
4520 return NULL_TREE;
4522 in_p = (n_in_p == in_p);
4524 else
4526 /* Otherwise, "or" the range with the range of the input
4527 that will be interpreted as negative. */
4528 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4529 1, fold_convert_loc (loc, arg0_type,
4530 integer_zero_node),
4531 high_positive))
4532 return NULL_TREE;
4534 in_p = (in_p != n_in_p);
4538 *p_low = n_low;
4539 *p_high = n_high;
4540 *p_in_p = in_p;
4541 return arg0;
4543 default:
4544 return NULL_TREE;
4548 /* Given EXP, a logical expression, set the range it is testing into
4549 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4550 actually being tested. *PLOW and *PHIGH will be made of the same
4551 type as the returned expression. If EXP is not a comparison, we
4552 will most likely not be returning a useful value and range. Set
4553 *STRICT_OVERFLOW_P to true if the return value is only valid
4554 because signed overflow is undefined; otherwise, do not change
4555 *STRICT_OVERFLOW_P. */
4557 tree
4558 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4559 bool *strict_overflow_p)
4561 enum tree_code code;
4562 tree arg0, arg1 = NULL_TREE;
4563 tree exp_type, nexp;
4564 int in_p;
4565 tree low, high;
4566 location_t loc = EXPR_LOCATION (exp);
4568 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4569 and see if we can refine the range. Some of the cases below may not
4570 happen, but it doesn't seem worth worrying about this. We "continue"
4571 the outer loop when we've changed something; otherwise we "break"
4572 the switch, which will "break" the while. */
4574 in_p = 0;
4575 low = high = build_int_cst (TREE_TYPE (exp), 0);
4577 while (1)
4579 code = TREE_CODE (exp);
4580 exp_type = TREE_TYPE (exp);
4581 arg0 = NULL_TREE;
4583 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4585 if (TREE_OPERAND_LENGTH (exp) > 0)
4586 arg0 = TREE_OPERAND (exp, 0);
4587 if (TREE_CODE_CLASS (code) == tcc_binary
4588 || TREE_CODE_CLASS (code) == tcc_comparison
4589 || (TREE_CODE_CLASS (code) == tcc_expression
4590 && TREE_OPERAND_LENGTH (exp) > 1))
4591 arg1 = TREE_OPERAND (exp, 1);
4593 if (arg0 == NULL_TREE)
4594 break;
4596 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4597 &high, &in_p, strict_overflow_p);
4598 if (nexp == NULL_TREE)
4599 break;
4600 exp = nexp;
4603 /* If EXP is a constant, we can evaluate whether this is true or false. */
4604 if (TREE_CODE (exp) == INTEGER_CST)
4606 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4607 exp, 0, low, 0))
4608 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4609 exp, 1, high, 1)));
4610 low = high = 0;
4611 exp = 0;
4614 *pin_p = in_p, *plow = low, *phigh = high;
4615 return exp;
4618 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4619 type, TYPE, return an expression to test if EXP is in (or out of, depending
4620 on IN_P) the range. Return 0 if the test couldn't be created. */
4622 tree
4623 build_range_check (location_t loc, tree type, tree exp, int in_p,
4624 tree low, tree high)
4626 tree etype = TREE_TYPE (exp), value;
4628 /* Disable this optimization for function pointer expressions
4629 on targets that require function pointer canonicalization. */
4630 if (targetm.have_canonicalize_funcptr_for_compare ()
4631 && TREE_CODE (etype) == POINTER_TYPE
4632 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4633 return NULL_TREE;
4635 if (! in_p)
4637 value = build_range_check (loc, type, exp, 1, low, high);
4638 if (value != 0)
4639 return invert_truthvalue_loc (loc, value);
4641 return 0;
4644 if (low == 0 && high == 0)
4645 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4647 if (low == 0)
4648 return fold_build2_loc (loc, LE_EXPR, type, exp,
4649 fold_convert_loc (loc, etype, high));
4651 if (high == 0)
4652 return fold_build2_loc (loc, GE_EXPR, type, exp,
4653 fold_convert_loc (loc, etype, low));
4655 if (operand_equal_p (low, high, 0))
4656 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4657 fold_convert_loc (loc, etype, low));
4659 if (integer_zerop (low))
4661 if (! TYPE_UNSIGNED (etype))
4663 etype = unsigned_type_for (etype);
4664 high = fold_convert_loc (loc, etype, high);
4665 exp = fold_convert_loc (loc, etype, exp);
4667 return build_range_check (loc, type, exp, 1, 0, high);
4670 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4671 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4673 int prec = TYPE_PRECISION (etype);
4675 if (wi::mask (prec - 1, false, prec) == high)
4677 if (TYPE_UNSIGNED (etype))
4679 tree signed_etype = signed_type_for (etype);
4680 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4681 etype
4682 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4683 else
4684 etype = signed_etype;
4685 exp = fold_convert_loc (loc, etype, exp);
4687 return fold_build2_loc (loc, GT_EXPR, type, exp,
4688 build_int_cst (etype, 0));
4692 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4693 This requires wrap-around arithmetics for the type of the expression.
4694 First make sure that arithmetics in this type is valid, then make sure
4695 that it wraps around. */
4696 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4697 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4698 TYPE_UNSIGNED (etype));
4700 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4702 tree utype, minv, maxv;
4704 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4705 for the type in question, as we rely on this here. */
4706 utype = unsigned_type_for (etype);
4707 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4708 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4709 build_int_cst (TREE_TYPE (maxv), 1), 1);
4710 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4712 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4713 minv, 1, maxv, 1)))
4714 etype = utype;
4715 else
4716 return 0;
4719 high = fold_convert_loc (loc, etype, high);
4720 low = fold_convert_loc (loc, etype, low);
4721 exp = fold_convert_loc (loc, etype, exp);
4723 value = const_binop (MINUS_EXPR, high, low);
4726 if (POINTER_TYPE_P (etype))
4728 if (value != 0 && !TREE_OVERFLOW (value))
4730 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4731 return build_range_check (loc, type,
4732 fold_build_pointer_plus_loc (loc, exp, low),
4733 1, build_int_cst (etype, 0), value);
4735 return 0;
4738 if (value != 0 && !TREE_OVERFLOW (value))
4739 return build_range_check (loc, type,
4740 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4741 1, build_int_cst (etype, 0), value);
4743 return 0;
4746 /* Return the predecessor of VAL in its type, handling the infinite case. */
4748 static tree
4749 range_predecessor (tree val)
4751 tree type = TREE_TYPE (val);
4753 if (INTEGRAL_TYPE_P (type)
4754 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4755 return 0;
4756 else
4757 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4758 build_int_cst (TREE_TYPE (val), 1), 0);
4761 /* Return the successor of VAL in its type, handling the infinite case. */
4763 static tree
4764 range_successor (tree val)
4766 tree type = TREE_TYPE (val);
4768 if (INTEGRAL_TYPE_P (type)
4769 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4770 return 0;
4771 else
4772 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4773 build_int_cst (TREE_TYPE (val), 1), 0);
4776 /* Given two ranges, see if we can merge them into one. Return 1 if we
4777 can, 0 if we can't. Set the output range into the specified parameters. */
4779 bool
4780 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4781 tree high0, int in1_p, tree low1, tree high1)
4783 int no_overlap;
4784 int subset;
4785 int temp;
4786 tree tem;
4787 int in_p;
4788 tree low, high;
4789 int lowequal = ((low0 == 0 && low1 == 0)
4790 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4791 low0, 0, low1, 0)));
4792 int highequal = ((high0 == 0 && high1 == 0)
4793 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4794 high0, 1, high1, 1)));
4796 /* Make range 0 be the range that starts first, or ends last if they
4797 start at the same value. Swap them if it isn't. */
4798 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4799 low0, 0, low1, 0))
4800 || (lowequal
4801 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4802 high1, 1, high0, 1))))
4804 temp = in0_p, in0_p = in1_p, in1_p = temp;
4805 tem = low0, low0 = low1, low1 = tem;
4806 tem = high0, high0 = high1, high1 = tem;
4809 /* Now flag two cases, whether the ranges are disjoint or whether the
4810 second range is totally subsumed in the first. Note that the tests
4811 below are simplified by the ones above. */
4812 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4813 high0, 1, low1, 0));
4814 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4815 high1, 1, high0, 1));
4817 /* We now have four cases, depending on whether we are including or
4818 excluding the two ranges. */
4819 if (in0_p && in1_p)
4821 /* If they don't overlap, the result is false. If the second range
4822 is a subset it is the result. Otherwise, the range is from the start
4823 of the second to the end of the first. */
4824 if (no_overlap)
4825 in_p = 0, low = high = 0;
4826 else if (subset)
4827 in_p = 1, low = low1, high = high1;
4828 else
4829 in_p = 1, low = low1, high = high0;
4832 else if (in0_p && ! in1_p)
4834 /* If they don't overlap, the result is the first range. If they are
4835 equal, the result is false. If the second range is a subset of the
4836 first, and the ranges begin at the same place, we go from just after
4837 the end of the second range to the end of the first. If the second
4838 range is not a subset of the first, or if it is a subset and both
4839 ranges end at the same place, the range starts at the start of the
4840 first range and ends just before the second range.
4841 Otherwise, we can't describe this as a single range. */
4842 if (no_overlap)
4843 in_p = 1, low = low0, high = high0;
4844 else if (lowequal && highequal)
4845 in_p = 0, low = high = 0;
4846 else if (subset && lowequal)
4848 low = range_successor (high1);
4849 high = high0;
4850 in_p = 1;
4851 if (low == 0)
4853 /* We are in the weird situation where high0 > high1 but
4854 high1 has no successor. Punt. */
4855 return 0;
4858 else if (! subset || highequal)
4860 low = low0;
4861 high = range_predecessor (low1);
4862 in_p = 1;
4863 if (high == 0)
4865 /* low0 < low1 but low1 has no predecessor. Punt. */
4866 return 0;
4869 else
4870 return 0;
4873 else if (! in0_p && in1_p)
4875 /* If they don't overlap, the result is the second range. If the second
4876 is a subset of the first, the result is false. Otherwise,
4877 the range starts just after the first range and ends at the
4878 end of the second. */
4879 if (no_overlap)
4880 in_p = 1, low = low1, high = high1;
4881 else if (subset || highequal)
4882 in_p = 0, low = high = 0;
4883 else
4885 low = range_successor (high0);
4886 high = high1;
4887 in_p = 1;
4888 if (low == 0)
4890 /* high1 > high0 but high0 has no successor. Punt. */
4891 return 0;
4896 else
4898 /* The case where we are excluding both ranges. Here the complex case
4899 is if they don't overlap. In that case, the only time we have a
4900 range is if they are adjacent. If the second is a subset of the
4901 first, the result is the first. Otherwise, the range to exclude
4902 starts at the beginning of the first range and ends at the end of the
4903 second. */
4904 if (no_overlap)
4906 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4907 range_successor (high0),
4908 1, low1, 0)))
4909 in_p = 0, low = low0, high = high1;
4910 else
4912 /* Canonicalize - [min, x] into - [-, x]. */
4913 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4914 switch (TREE_CODE (TREE_TYPE (low0)))
4916 case ENUMERAL_TYPE:
4917 if (TYPE_PRECISION (TREE_TYPE (low0))
4918 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4919 break;
4920 /* FALLTHROUGH */
4921 case INTEGER_TYPE:
4922 if (tree_int_cst_equal (low0,
4923 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4924 low0 = 0;
4925 break;
4926 case POINTER_TYPE:
4927 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4928 && integer_zerop (low0))
4929 low0 = 0;
4930 break;
4931 default:
4932 break;
4935 /* Canonicalize - [x, max] into - [x, -]. */
4936 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4937 switch (TREE_CODE (TREE_TYPE (high1)))
4939 case ENUMERAL_TYPE:
4940 if (TYPE_PRECISION (TREE_TYPE (high1))
4941 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4942 break;
4943 /* FALLTHROUGH */
4944 case INTEGER_TYPE:
4945 if (tree_int_cst_equal (high1,
4946 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4947 high1 = 0;
4948 break;
4949 case POINTER_TYPE:
4950 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4951 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4952 high1, 1,
4953 build_int_cst (TREE_TYPE (high1), 1),
4954 1)))
4955 high1 = 0;
4956 break;
4957 default:
4958 break;
4961 /* The ranges might be also adjacent between the maximum and
4962 minimum values of the given type. For
4963 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4964 return + [x + 1, y - 1]. */
4965 if (low0 == 0 && high1 == 0)
4967 low = range_successor (high0);
4968 high = range_predecessor (low1);
4969 if (low == 0 || high == 0)
4970 return 0;
4972 in_p = 1;
4974 else
4975 return 0;
4978 else if (subset)
4979 in_p = 0, low = low0, high = high0;
4980 else
4981 in_p = 0, low = low0, high = high1;
4984 *pin_p = in_p, *plow = low, *phigh = high;
4985 return 1;
4989 /* Subroutine of fold, looking inside expressions of the form
4990 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4991 of the COND_EXPR. This function is being used also to optimize
4992 A op B ? C : A, by reversing the comparison first.
4994 Return a folded expression whose code is not a COND_EXPR
4995 anymore, or NULL_TREE if no folding opportunity is found. */
4997 static tree
4998 fold_cond_expr_with_comparison (location_t loc, tree type,
4999 tree arg0, tree arg1, tree arg2)
5001 enum tree_code comp_code = TREE_CODE (arg0);
5002 tree arg00 = TREE_OPERAND (arg0, 0);
5003 tree arg01 = TREE_OPERAND (arg0, 1);
5004 tree arg1_type = TREE_TYPE (arg1);
5005 tree tem;
5007 STRIP_NOPS (arg1);
5008 STRIP_NOPS (arg2);
5010 /* If we have A op 0 ? A : -A, consider applying the following
5011 transformations:
5013 A == 0? A : -A same as -A
5014 A != 0? A : -A same as A
5015 A >= 0? A : -A same as abs (A)
5016 A > 0? A : -A same as abs (A)
5017 A <= 0? A : -A same as -abs (A)
5018 A < 0? A : -A same as -abs (A)
5020 None of these transformations work for modes with signed
5021 zeros. If A is +/-0, the first two transformations will
5022 change the sign of the result (from +0 to -0, or vice
5023 versa). The last four will fix the sign of the result,
5024 even though the original expressions could be positive or
5025 negative, depending on the sign of A.
5027 Note that all these transformations are correct if A is
5028 NaN, since the two alternatives (A and -A) are also NaNs. */
5029 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5030 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5031 ? real_zerop (arg01)
5032 : integer_zerop (arg01))
5033 && ((TREE_CODE (arg2) == NEGATE_EXPR
5034 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5035 /* In the case that A is of the form X-Y, '-A' (arg2) may
5036 have already been folded to Y-X, check for that. */
5037 || (TREE_CODE (arg1) == MINUS_EXPR
5038 && TREE_CODE (arg2) == MINUS_EXPR
5039 && operand_equal_p (TREE_OPERAND (arg1, 0),
5040 TREE_OPERAND (arg2, 1), 0)
5041 && operand_equal_p (TREE_OPERAND (arg1, 1),
5042 TREE_OPERAND (arg2, 0), 0))))
5043 switch (comp_code)
5045 case EQ_EXPR:
5046 case UNEQ_EXPR:
5047 tem = fold_convert_loc (loc, arg1_type, arg1);
5048 return pedantic_non_lvalue_loc (loc,
5049 fold_convert_loc (loc, type,
5050 negate_expr (tem)));
5051 case NE_EXPR:
5052 case LTGT_EXPR:
5053 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5054 case UNGE_EXPR:
5055 case UNGT_EXPR:
5056 if (flag_trapping_math)
5057 break;
5058 /* Fall through. */
5059 case GE_EXPR:
5060 case GT_EXPR:
5061 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5062 break;
5063 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5064 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5065 case UNLE_EXPR:
5066 case UNLT_EXPR:
5067 if (flag_trapping_math)
5068 break;
5069 case LE_EXPR:
5070 case LT_EXPR:
5071 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5072 break;
5073 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5074 return negate_expr (fold_convert_loc (loc, type, tem));
5075 default:
5076 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5077 break;
5080 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5081 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5082 both transformations are correct when A is NaN: A != 0
5083 is then true, and A == 0 is false. */
5085 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5086 && integer_zerop (arg01) && integer_zerop (arg2))
5088 if (comp_code == NE_EXPR)
5089 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5090 else if (comp_code == EQ_EXPR)
5091 return build_zero_cst (type);
5094 /* Try some transformations of A op B ? A : B.
5096 A == B? A : B same as B
5097 A != B? A : B same as A
5098 A >= B? A : B same as max (A, B)
5099 A > B? A : B same as max (B, A)
5100 A <= B? A : B same as min (A, B)
5101 A < B? A : B same as min (B, A)
5103 As above, these transformations don't work in the presence
5104 of signed zeros. For example, if A and B are zeros of
5105 opposite sign, the first two transformations will change
5106 the sign of the result. In the last four, the original
5107 expressions give different results for (A=+0, B=-0) and
5108 (A=-0, B=+0), but the transformed expressions do not.
5110 The first two transformations are correct if either A or B
5111 is a NaN. In the first transformation, the condition will
5112 be false, and B will indeed be chosen. In the case of the
5113 second transformation, the condition A != B will be true,
5114 and A will be chosen.
5116 The conversions to max() and min() are not correct if B is
5117 a number and A is not. The conditions in the original
5118 expressions will be false, so all four give B. The min()
5119 and max() versions would give a NaN instead. */
5120 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5121 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5122 /* Avoid these transformations if the COND_EXPR may be used
5123 as an lvalue in the C++ front-end. PR c++/19199. */
5124 && (in_gimple_form
5125 || VECTOR_TYPE_P (type)
5126 || (! lang_GNU_CXX ()
5127 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5128 || ! maybe_lvalue_p (arg1)
5129 || ! maybe_lvalue_p (arg2)))
5131 tree comp_op0 = arg00;
5132 tree comp_op1 = arg01;
5133 tree comp_type = TREE_TYPE (comp_op0);
5135 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5136 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5138 comp_type = type;
5139 comp_op0 = arg1;
5140 comp_op1 = arg2;
5143 switch (comp_code)
5145 case EQ_EXPR:
5146 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5147 case NE_EXPR:
5148 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5149 case LE_EXPR:
5150 case LT_EXPR:
5151 case UNLE_EXPR:
5152 case UNLT_EXPR:
5153 /* In C++ a ?: expression can be an lvalue, so put the
5154 operand which will be used if they are equal first
5155 so that we can convert this back to the
5156 corresponding COND_EXPR. */
5157 if (!HONOR_NANS (arg1))
5159 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5160 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5161 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5162 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5163 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5164 comp_op1, comp_op0);
5165 return pedantic_non_lvalue_loc (loc,
5166 fold_convert_loc (loc, type, tem));
5168 break;
5169 case GE_EXPR:
5170 case GT_EXPR:
5171 case UNGE_EXPR:
5172 case UNGT_EXPR:
5173 if (!HONOR_NANS (arg1))
5175 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5176 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5177 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5178 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5179 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5180 comp_op1, comp_op0);
5181 return pedantic_non_lvalue_loc (loc,
5182 fold_convert_loc (loc, type, tem));
5184 break;
5185 case UNEQ_EXPR:
5186 if (!HONOR_NANS (arg1))
5187 return pedantic_non_lvalue_loc (loc,
5188 fold_convert_loc (loc, type, arg2));
5189 break;
5190 case LTGT_EXPR:
5191 if (!HONOR_NANS (arg1))
5192 return pedantic_non_lvalue_loc (loc,
5193 fold_convert_loc (loc, type, arg1));
5194 break;
5195 default:
5196 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5197 break;
5201 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5202 we might still be able to simplify this. For example,
5203 if C1 is one less or one more than C2, this might have started
5204 out as a MIN or MAX and been transformed by this function.
5205 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5207 if (INTEGRAL_TYPE_P (type)
5208 && TREE_CODE (arg01) == INTEGER_CST
5209 && TREE_CODE (arg2) == INTEGER_CST)
5210 switch (comp_code)
5212 case EQ_EXPR:
5213 if (TREE_CODE (arg1) == INTEGER_CST)
5214 break;
5215 /* We can replace A with C1 in this case. */
5216 arg1 = fold_convert_loc (loc, type, arg01);
5217 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5219 case LT_EXPR:
5220 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5221 MIN_EXPR, to preserve the signedness of the comparison. */
5222 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5223 OEP_ONLY_CONST)
5224 && operand_equal_p (arg01,
5225 const_binop (PLUS_EXPR, arg2,
5226 build_int_cst (type, 1)),
5227 OEP_ONLY_CONST))
5229 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5230 fold_convert_loc (loc, TREE_TYPE (arg00),
5231 arg2));
5232 return pedantic_non_lvalue_loc (loc,
5233 fold_convert_loc (loc, type, tem));
5235 break;
5237 case LE_EXPR:
5238 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5239 as above. */
5240 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5241 OEP_ONLY_CONST)
5242 && operand_equal_p (arg01,
5243 const_binop (MINUS_EXPR, arg2,
5244 build_int_cst (type, 1)),
5245 OEP_ONLY_CONST))
5247 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5248 fold_convert_loc (loc, TREE_TYPE (arg00),
5249 arg2));
5250 return pedantic_non_lvalue_loc (loc,
5251 fold_convert_loc (loc, type, tem));
5253 break;
5255 case GT_EXPR:
5256 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5257 MAX_EXPR, to preserve the signedness of the comparison. */
5258 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5259 OEP_ONLY_CONST)
5260 && operand_equal_p (arg01,
5261 const_binop (MINUS_EXPR, arg2,
5262 build_int_cst (type, 1)),
5263 OEP_ONLY_CONST))
5265 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5266 fold_convert_loc (loc, TREE_TYPE (arg00),
5267 arg2));
5268 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5270 break;
5272 case GE_EXPR:
5273 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5274 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5275 OEP_ONLY_CONST)
5276 && operand_equal_p (arg01,
5277 const_binop (PLUS_EXPR, arg2,
5278 build_int_cst (type, 1)),
5279 OEP_ONLY_CONST))
5281 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5282 fold_convert_loc (loc, TREE_TYPE (arg00),
5283 arg2));
5284 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5286 break;
5287 case NE_EXPR:
5288 break;
5289 default:
5290 gcc_unreachable ();
5293 return NULL_TREE;
5298 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5299 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5300 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5301 false) >= 2)
5302 #endif
5304 /* EXP is some logical combination of boolean tests. See if we can
5305 merge it into some range test. Return the new tree if so. */
5307 static tree
5308 fold_range_test (location_t loc, enum tree_code code, tree type,
5309 tree op0, tree op1)
5311 int or_op = (code == TRUTH_ORIF_EXPR
5312 || code == TRUTH_OR_EXPR);
5313 int in0_p, in1_p, in_p;
5314 tree low0, low1, low, high0, high1, high;
5315 bool strict_overflow_p = false;
5316 tree tem, lhs, rhs;
5317 const char * const warnmsg = G_("assuming signed overflow does not occur "
5318 "when simplifying range test");
5320 if (!INTEGRAL_TYPE_P (type))
5321 return 0;
5323 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5324 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5326 /* If this is an OR operation, invert both sides; we will invert
5327 again at the end. */
5328 if (or_op)
5329 in0_p = ! in0_p, in1_p = ! in1_p;
5331 /* If both expressions are the same, if we can merge the ranges, and we
5332 can build the range test, return it or it inverted. If one of the
5333 ranges is always true or always false, consider it to be the same
5334 expression as the other. */
5335 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5336 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5337 in1_p, low1, high1)
5338 && 0 != (tem = (build_range_check (loc, type,
5339 lhs != 0 ? lhs
5340 : rhs != 0 ? rhs : integer_zero_node,
5341 in_p, low, high))))
5343 if (strict_overflow_p)
5344 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5345 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5348 /* On machines where the branch cost is expensive, if this is a
5349 short-circuited branch and the underlying object on both sides
5350 is the same, make a non-short-circuit operation. */
5351 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5352 && lhs != 0 && rhs != 0
5353 && (code == TRUTH_ANDIF_EXPR
5354 || code == TRUTH_ORIF_EXPR)
5355 && operand_equal_p (lhs, rhs, 0))
5357 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5358 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5359 which cases we can't do this. */
5360 if (simple_operand_p (lhs))
5361 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5362 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5363 type, op0, op1);
5365 else if (!lang_hooks.decls.global_bindings_p ()
5366 && !CONTAINS_PLACEHOLDER_P (lhs))
5368 tree common = save_expr (lhs);
5370 if (0 != (lhs = build_range_check (loc, type, common,
5371 or_op ? ! in0_p : in0_p,
5372 low0, high0))
5373 && (0 != (rhs = build_range_check (loc, type, common,
5374 or_op ? ! in1_p : in1_p,
5375 low1, high1))))
5377 if (strict_overflow_p)
5378 fold_overflow_warning (warnmsg,
5379 WARN_STRICT_OVERFLOW_COMPARISON);
5380 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5381 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5382 type, lhs, rhs);
5387 return 0;
5390 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5391 bit value. Arrange things so the extra bits will be set to zero if and
5392 only if C is signed-extended to its full width. If MASK is nonzero,
5393 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5395 static tree
5396 unextend (tree c, int p, int unsignedp, tree mask)
5398 tree type = TREE_TYPE (c);
5399 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5400 tree temp;
5402 if (p == modesize || unsignedp)
5403 return c;
5405 /* We work by getting just the sign bit into the low-order bit, then
5406 into the high-order bit, then sign-extend. We then XOR that value
5407 with C. */
5408 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5410 /* We must use a signed type in order to get an arithmetic right shift.
5411 However, we must also avoid introducing accidental overflows, so that
5412 a subsequent call to integer_zerop will work. Hence we must
5413 do the type conversion here. At this point, the constant is either
5414 zero or one, and the conversion to a signed type can never overflow.
5415 We could get an overflow if this conversion is done anywhere else. */
5416 if (TYPE_UNSIGNED (type))
5417 temp = fold_convert (signed_type_for (type), temp);
5419 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5420 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5421 if (mask != 0)
5422 temp = const_binop (BIT_AND_EXPR, temp,
5423 fold_convert (TREE_TYPE (c), mask));
5424 /* If necessary, convert the type back to match the type of C. */
5425 if (TYPE_UNSIGNED (type))
5426 temp = fold_convert (type, temp);
5428 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5431 /* For an expression that has the form
5432 (A && B) || ~B
5434 (A || B) && ~B,
5435 we can drop one of the inner expressions and simplify to
5436 A || ~B
5438 A && ~B
5439 LOC is the location of the resulting expression. OP is the inner
5440 logical operation; the left-hand side in the examples above, while CMPOP
5441 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5442 removing a condition that guards another, as in
5443 (A != NULL && A->...) || A == NULL
5444 which we must not transform. If RHS_ONLY is true, only eliminate the
5445 right-most operand of the inner logical operation. */
5447 static tree
5448 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5449 bool rhs_only)
5451 tree type = TREE_TYPE (cmpop);
5452 enum tree_code code = TREE_CODE (cmpop);
5453 enum tree_code truthop_code = TREE_CODE (op);
5454 tree lhs = TREE_OPERAND (op, 0);
5455 tree rhs = TREE_OPERAND (op, 1);
5456 tree orig_lhs = lhs, orig_rhs = rhs;
5457 enum tree_code rhs_code = TREE_CODE (rhs);
5458 enum tree_code lhs_code = TREE_CODE (lhs);
5459 enum tree_code inv_code;
5461 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5462 return NULL_TREE;
5464 if (TREE_CODE_CLASS (code) != tcc_comparison)
5465 return NULL_TREE;
5467 if (rhs_code == truthop_code)
5469 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5470 if (newrhs != NULL_TREE)
5472 rhs = newrhs;
5473 rhs_code = TREE_CODE (rhs);
5476 if (lhs_code == truthop_code && !rhs_only)
5478 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5479 if (newlhs != NULL_TREE)
5481 lhs = newlhs;
5482 lhs_code = TREE_CODE (lhs);
5486 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5487 if (inv_code == rhs_code
5488 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5489 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5490 return lhs;
5491 if (!rhs_only && inv_code == lhs_code
5492 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5493 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5494 return rhs;
5495 if (rhs != orig_rhs || lhs != orig_lhs)
5496 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5497 lhs, rhs);
5498 return NULL_TREE;
5501 /* Find ways of folding logical expressions of LHS and RHS:
5502 Try to merge two comparisons to the same innermost item.
5503 Look for range tests like "ch >= '0' && ch <= '9'".
5504 Look for combinations of simple terms on machines with expensive branches
5505 and evaluate the RHS unconditionally.
5507 For example, if we have p->a == 2 && p->b == 4 and we can make an
5508 object large enough to span both A and B, we can do this with a comparison
5509 against the object ANDed with the a mask.
5511 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5512 operations to do this with one comparison.
5514 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5515 function and the one above.
5517 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5518 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5520 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5521 two operands.
5523 We return the simplified tree or 0 if no optimization is possible. */
5525 static tree
5526 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5527 tree lhs, tree rhs)
5529 /* If this is the "or" of two comparisons, we can do something if
5530 the comparisons are NE_EXPR. If this is the "and", we can do something
5531 if the comparisons are EQ_EXPR. I.e.,
5532 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5534 WANTED_CODE is this operation code. For single bit fields, we can
5535 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5536 comparison for one-bit fields. */
5538 enum tree_code wanted_code;
5539 enum tree_code lcode, rcode;
5540 tree ll_arg, lr_arg, rl_arg, rr_arg;
5541 tree ll_inner, lr_inner, rl_inner, rr_inner;
5542 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5543 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5544 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5545 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5546 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5547 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5548 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5549 machine_mode lnmode, rnmode;
5550 tree ll_mask, lr_mask, rl_mask, rr_mask;
5551 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5552 tree l_const, r_const;
5553 tree lntype, rntype, result;
5554 HOST_WIDE_INT first_bit, end_bit;
5555 int volatilep;
5557 /* Start by getting the comparison codes. Fail if anything is volatile.
5558 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5559 it were surrounded with a NE_EXPR. */
5561 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5562 return 0;
5564 lcode = TREE_CODE (lhs);
5565 rcode = TREE_CODE (rhs);
5567 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5569 lhs = build2 (NE_EXPR, truth_type, lhs,
5570 build_int_cst (TREE_TYPE (lhs), 0));
5571 lcode = NE_EXPR;
5574 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5576 rhs = build2 (NE_EXPR, truth_type, rhs,
5577 build_int_cst (TREE_TYPE (rhs), 0));
5578 rcode = NE_EXPR;
5581 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5582 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5583 return 0;
5585 ll_arg = TREE_OPERAND (lhs, 0);
5586 lr_arg = TREE_OPERAND (lhs, 1);
5587 rl_arg = TREE_OPERAND (rhs, 0);
5588 rr_arg = TREE_OPERAND (rhs, 1);
5590 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5591 if (simple_operand_p (ll_arg)
5592 && simple_operand_p (lr_arg))
5594 if (operand_equal_p (ll_arg, rl_arg, 0)
5595 && operand_equal_p (lr_arg, rr_arg, 0))
5597 result = combine_comparisons (loc, code, lcode, rcode,
5598 truth_type, ll_arg, lr_arg);
5599 if (result)
5600 return result;
5602 else if (operand_equal_p (ll_arg, rr_arg, 0)
5603 && operand_equal_p (lr_arg, rl_arg, 0))
5605 result = combine_comparisons (loc, code, lcode,
5606 swap_tree_comparison (rcode),
5607 truth_type, ll_arg, lr_arg);
5608 if (result)
5609 return result;
5613 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5614 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5616 /* If the RHS can be evaluated unconditionally and its operands are
5617 simple, it wins to evaluate the RHS unconditionally on machines
5618 with expensive branches. In this case, this isn't a comparison
5619 that can be merged. */
5621 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5622 false) >= 2
5623 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5624 && simple_operand_p (rl_arg)
5625 && simple_operand_p (rr_arg))
5627 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5628 if (code == TRUTH_OR_EXPR
5629 && lcode == NE_EXPR && integer_zerop (lr_arg)
5630 && rcode == NE_EXPR && integer_zerop (rr_arg)
5631 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5632 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5633 return build2_loc (loc, NE_EXPR, truth_type,
5634 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5635 ll_arg, rl_arg),
5636 build_int_cst (TREE_TYPE (ll_arg), 0));
5638 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5639 if (code == TRUTH_AND_EXPR
5640 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5641 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5642 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5643 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5644 return build2_loc (loc, EQ_EXPR, truth_type,
5645 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5646 ll_arg, rl_arg),
5647 build_int_cst (TREE_TYPE (ll_arg), 0));
5650 /* See if the comparisons can be merged. Then get all the parameters for
5651 each side. */
5653 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5654 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5655 return 0;
5657 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5658 volatilep = 0;
5659 ll_inner = decode_field_reference (loc, ll_arg,
5660 &ll_bitsize, &ll_bitpos, &ll_mode,
5661 &ll_unsignedp, &ll_reversep, &volatilep,
5662 &ll_mask, &ll_and_mask);
5663 lr_inner = decode_field_reference (loc, lr_arg,
5664 &lr_bitsize, &lr_bitpos, &lr_mode,
5665 &lr_unsignedp, &lr_reversep, &volatilep,
5666 &lr_mask, &lr_and_mask);
5667 rl_inner = decode_field_reference (loc, rl_arg,
5668 &rl_bitsize, &rl_bitpos, &rl_mode,
5669 &rl_unsignedp, &rl_reversep, &volatilep,
5670 &rl_mask, &rl_and_mask);
5671 rr_inner = decode_field_reference (loc, rr_arg,
5672 &rr_bitsize, &rr_bitpos, &rr_mode,
5673 &rr_unsignedp, &rr_reversep, &volatilep,
5674 &rr_mask, &rr_and_mask);
5676 /* It must be true that the inner operation on the lhs of each
5677 comparison must be the same if we are to be able to do anything.
5678 Then see if we have constants. If not, the same must be true for
5679 the rhs's. */
5680 if (volatilep
5681 || ll_reversep != rl_reversep
5682 || ll_inner == 0 || rl_inner == 0
5683 || ! operand_equal_p (ll_inner, rl_inner, 0))
5684 return 0;
5686 if (TREE_CODE (lr_arg) == INTEGER_CST
5687 && TREE_CODE (rr_arg) == INTEGER_CST)
5689 l_const = lr_arg, r_const = rr_arg;
5690 lr_reversep = ll_reversep;
5692 else if (lr_reversep != rr_reversep
5693 || lr_inner == 0 || rr_inner == 0
5694 || ! operand_equal_p (lr_inner, rr_inner, 0))
5695 return 0;
5696 else
5697 l_const = r_const = 0;
5699 /* If either comparison code is not correct for our logical operation,
5700 fail. However, we can convert a one-bit comparison against zero into
5701 the opposite comparison against that bit being set in the field. */
5703 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5704 if (lcode != wanted_code)
5706 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5708 /* Make the left operand unsigned, since we are only interested
5709 in the value of one bit. Otherwise we are doing the wrong
5710 thing below. */
5711 ll_unsignedp = 1;
5712 l_const = ll_mask;
5714 else
5715 return 0;
5718 /* This is analogous to the code for l_const above. */
5719 if (rcode != wanted_code)
5721 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5723 rl_unsignedp = 1;
5724 r_const = rl_mask;
5726 else
5727 return 0;
5730 /* See if we can find a mode that contains both fields being compared on
5731 the left. If we can't, fail. Otherwise, update all constants and masks
5732 to be relative to a field of that size. */
5733 first_bit = MIN (ll_bitpos, rl_bitpos);
5734 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5735 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5736 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5737 volatilep);
5738 if (lnmode == VOIDmode)
5739 return 0;
5741 lnbitsize = GET_MODE_BITSIZE (lnmode);
5742 lnbitpos = first_bit & ~ (lnbitsize - 1);
5743 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5744 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5746 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5748 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5749 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5752 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5753 size_int (xll_bitpos));
5754 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5755 size_int (xrl_bitpos));
5757 if (l_const)
5759 l_const = fold_convert_loc (loc, lntype, l_const);
5760 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5761 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5762 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5763 fold_build1_loc (loc, BIT_NOT_EXPR,
5764 lntype, ll_mask))))
5766 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5768 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5771 if (r_const)
5773 r_const = fold_convert_loc (loc, lntype, r_const);
5774 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5775 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5776 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5777 fold_build1_loc (loc, BIT_NOT_EXPR,
5778 lntype, rl_mask))))
5780 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5782 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5786 /* If the right sides are not constant, do the same for it. Also,
5787 disallow this optimization if a size or signedness mismatch occurs
5788 between the left and right sides. */
5789 if (l_const == 0)
5791 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5792 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5793 /* Make sure the two fields on the right
5794 correspond to the left without being swapped. */
5795 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5796 return 0;
5798 first_bit = MIN (lr_bitpos, rr_bitpos);
5799 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5800 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5801 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5802 volatilep);
5803 if (rnmode == VOIDmode)
5804 return 0;
5806 rnbitsize = GET_MODE_BITSIZE (rnmode);
5807 rnbitpos = first_bit & ~ (rnbitsize - 1);
5808 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5809 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5811 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5813 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5814 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5817 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5818 rntype, lr_mask),
5819 size_int (xlr_bitpos));
5820 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5821 rntype, rr_mask),
5822 size_int (xrr_bitpos));
5824 /* Make a mask that corresponds to both fields being compared.
5825 Do this for both items being compared. If the operands are the
5826 same size and the bits being compared are in the same position
5827 then we can do this by masking both and comparing the masked
5828 results. */
5829 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5830 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5831 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5833 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5834 ll_unsignedp || rl_unsignedp, ll_reversep);
5835 if (! all_ones_mask_p (ll_mask, lnbitsize))
5836 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5838 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5839 lr_unsignedp || rr_unsignedp, lr_reversep);
5840 if (! all_ones_mask_p (lr_mask, rnbitsize))
5841 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5843 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5846 /* There is still another way we can do something: If both pairs of
5847 fields being compared are adjacent, we may be able to make a wider
5848 field containing them both.
5850 Note that we still must mask the lhs/rhs expressions. Furthermore,
5851 the mask must be shifted to account for the shift done by
5852 make_bit_field_ref. */
5853 if ((ll_bitsize + ll_bitpos == rl_bitpos
5854 && lr_bitsize + lr_bitpos == rr_bitpos)
5855 || (ll_bitpos == rl_bitpos + rl_bitsize
5856 && lr_bitpos == rr_bitpos + rr_bitsize))
5858 tree type;
5860 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5861 ll_bitsize + rl_bitsize,
5862 MIN (ll_bitpos, rl_bitpos),
5863 ll_unsignedp, ll_reversep);
5864 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5865 lr_bitsize + rr_bitsize,
5866 MIN (lr_bitpos, rr_bitpos),
5867 lr_unsignedp, lr_reversep);
5869 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5870 size_int (MIN (xll_bitpos, xrl_bitpos)));
5871 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5872 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5874 /* Convert to the smaller type before masking out unwanted bits. */
5875 type = lntype;
5876 if (lntype != rntype)
5878 if (lnbitsize > rnbitsize)
5880 lhs = fold_convert_loc (loc, rntype, lhs);
5881 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5882 type = rntype;
5884 else if (lnbitsize < rnbitsize)
5886 rhs = fold_convert_loc (loc, lntype, rhs);
5887 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5888 type = lntype;
5892 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5893 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5895 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5896 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5898 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5901 return 0;
5904 /* Handle the case of comparisons with constants. If there is something in
5905 common between the masks, those bits of the constants must be the same.
5906 If not, the condition is always false. Test for this to avoid generating
5907 incorrect code below. */
5908 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5909 if (! integer_zerop (result)
5910 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5911 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5913 if (wanted_code == NE_EXPR)
5915 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5916 return constant_boolean_node (true, truth_type);
5918 else
5920 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5921 return constant_boolean_node (false, truth_type);
5925 /* Construct the expression we will return. First get the component
5926 reference we will make. Unless the mask is all ones the width of
5927 that field, perform the mask operation. Then compare with the
5928 merged constant. */
5929 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5930 ll_unsignedp || rl_unsignedp, ll_reversep);
5932 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5933 if (! all_ones_mask_p (ll_mask, lnbitsize))
5934 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5936 return build2_loc (loc, wanted_code, truth_type, result,
5937 const_binop (BIT_IOR_EXPR, l_const, r_const));
5940 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5941 constant. */
5943 static tree
5944 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5945 tree op0, tree op1)
5947 tree arg0 = op0;
5948 enum tree_code op_code;
5949 tree comp_const;
5950 tree minmax_const;
5951 int consts_equal, consts_lt;
5952 tree inner;
5954 STRIP_SIGN_NOPS (arg0);
5956 op_code = TREE_CODE (arg0);
5957 minmax_const = TREE_OPERAND (arg0, 1);
5958 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5959 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5960 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5961 inner = TREE_OPERAND (arg0, 0);
5963 /* If something does not permit us to optimize, return the original tree. */
5964 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5965 || TREE_CODE (comp_const) != INTEGER_CST
5966 || TREE_OVERFLOW (comp_const)
5967 || TREE_CODE (minmax_const) != INTEGER_CST
5968 || TREE_OVERFLOW (minmax_const))
5969 return NULL_TREE;
5971 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5972 and GT_EXPR, doing the rest with recursive calls using logical
5973 simplifications. */
5974 switch (code)
5976 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5978 tree tem
5979 = optimize_minmax_comparison (loc,
5980 invert_tree_comparison (code, false),
5981 type, op0, op1);
5982 if (tem)
5983 return invert_truthvalue_loc (loc, tem);
5984 return NULL_TREE;
5987 case GE_EXPR:
5988 return
5989 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5990 optimize_minmax_comparison
5991 (loc, EQ_EXPR, type, arg0, comp_const),
5992 optimize_minmax_comparison
5993 (loc, GT_EXPR, type, arg0, comp_const));
5995 case EQ_EXPR:
5996 if (op_code == MAX_EXPR && consts_equal)
5997 /* MAX (X, 0) == 0 -> X <= 0 */
5998 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6000 else if (op_code == MAX_EXPR && consts_lt)
6001 /* MAX (X, 0) == 5 -> X == 5 */
6002 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6004 else if (op_code == MAX_EXPR)
6005 /* MAX (X, 0) == -1 -> false */
6006 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6008 else if (consts_equal)
6009 /* MIN (X, 0) == 0 -> X >= 0 */
6010 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6012 else if (consts_lt)
6013 /* MIN (X, 0) == 5 -> false */
6014 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6016 else
6017 /* MIN (X, 0) == -1 -> X == -1 */
6018 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6020 case GT_EXPR:
6021 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6022 /* MAX (X, 0) > 0 -> X > 0
6023 MAX (X, 0) > 5 -> X > 5 */
6024 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6026 else if (op_code == MAX_EXPR)
6027 /* MAX (X, 0) > -1 -> true */
6028 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6030 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6031 /* MIN (X, 0) > 0 -> false
6032 MIN (X, 0) > 5 -> false */
6033 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6035 else
6036 /* MIN (X, 0) > -1 -> X > -1 */
6037 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6039 default:
6040 return NULL_TREE;
6044 /* T is an integer expression that is being multiplied, divided, or taken a
6045 modulus (CODE says which and what kind of divide or modulus) by a
6046 constant C. See if we can eliminate that operation by folding it with
6047 other operations already in T. WIDE_TYPE, if non-null, is a type that
6048 should be used for the computation if wider than our type.
6050 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6051 (X * 2) + (Y * 4). We must, however, be assured that either the original
6052 expression would not overflow or that overflow is undefined for the type
6053 in the language in question.
6055 If we return a non-null expression, it is an equivalent form of the
6056 original computation, but need not be in the original type.
6058 We set *STRICT_OVERFLOW_P to true if the return values depends on
6059 signed overflow being undefined. Otherwise we do not change
6060 *STRICT_OVERFLOW_P. */
6062 static tree
6063 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6064 bool *strict_overflow_p)
6066 /* To avoid exponential search depth, refuse to allow recursion past
6067 three levels. Beyond that (1) it's highly unlikely that we'll find
6068 something interesting and (2) we've probably processed it before
6069 when we built the inner expression. */
6071 static int depth;
6072 tree ret;
6074 if (depth > 3)
6075 return NULL;
6077 depth++;
6078 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6079 depth--;
6081 return ret;
6084 static tree
6085 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6086 bool *strict_overflow_p)
6088 tree type = TREE_TYPE (t);
6089 enum tree_code tcode = TREE_CODE (t);
6090 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6091 > GET_MODE_SIZE (TYPE_MODE (type)))
6092 ? wide_type : type);
6093 tree t1, t2;
6094 int same_p = tcode == code;
6095 tree op0 = NULL_TREE, op1 = NULL_TREE;
6096 bool sub_strict_overflow_p;
6098 /* Don't deal with constants of zero here; they confuse the code below. */
6099 if (integer_zerop (c))
6100 return NULL_TREE;
6102 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6103 op0 = TREE_OPERAND (t, 0);
6105 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6106 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6108 /* Note that we need not handle conditional operations here since fold
6109 already handles those cases. So just do arithmetic here. */
6110 switch (tcode)
6112 case INTEGER_CST:
6113 /* For a constant, we can always simplify if we are a multiply
6114 or (for divide and modulus) if it is a multiple of our constant. */
6115 if (code == MULT_EXPR
6116 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6118 tree tem = const_binop (code, fold_convert (ctype, t),
6119 fold_convert (ctype, c));
6120 /* If the multiplication overflowed, we lost information on it.
6121 See PR68142 and PR69845. */
6122 if (TREE_OVERFLOW (tem))
6123 return NULL_TREE;
6124 return tem;
6126 break;
6128 CASE_CONVERT: case NON_LVALUE_EXPR:
6129 /* If op0 is an expression ... */
6130 if ((COMPARISON_CLASS_P (op0)
6131 || UNARY_CLASS_P (op0)
6132 || BINARY_CLASS_P (op0)
6133 || VL_EXP_CLASS_P (op0)
6134 || EXPRESSION_CLASS_P (op0))
6135 /* ... and has wrapping overflow, and its type is smaller
6136 than ctype, then we cannot pass through as widening. */
6137 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6138 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6139 && (TYPE_PRECISION (ctype)
6140 > TYPE_PRECISION (TREE_TYPE (op0))))
6141 /* ... or this is a truncation (t is narrower than op0),
6142 then we cannot pass through this narrowing. */
6143 || (TYPE_PRECISION (type)
6144 < TYPE_PRECISION (TREE_TYPE (op0)))
6145 /* ... or signedness changes for division or modulus,
6146 then we cannot pass through this conversion. */
6147 || (code != MULT_EXPR
6148 && (TYPE_UNSIGNED (ctype)
6149 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6150 /* ... or has undefined overflow while the converted to
6151 type has not, we cannot do the operation in the inner type
6152 as that would introduce undefined overflow. */
6153 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6154 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6155 && !TYPE_OVERFLOW_UNDEFINED (type))))
6156 break;
6158 /* Pass the constant down and see if we can make a simplification. If
6159 we can, replace this expression with the inner simplification for
6160 possible later conversion to our or some other type. */
6161 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6162 && TREE_CODE (t2) == INTEGER_CST
6163 && !TREE_OVERFLOW (t2)
6164 && (0 != (t1 = extract_muldiv (op0, t2, code,
6165 code == MULT_EXPR
6166 ? ctype : NULL_TREE,
6167 strict_overflow_p))))
6168 return t1;
6169 break;
6171 case ABS_EXPR:
6172 /* If widening the type changes it from signed to unsigned, then we
6173 must avoid building ABS_EXPR itself as unsigned. */
6174 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6176 tree cstype = (*signed_type_for) (ctype);
6177 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6178 != 0)
6180 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6181 return fold_convert (ctype, t1);
6183 break;
6185 /* If the constant is negative, we cannot simplify this. */
6186 if (tree_int_cst_sgn (c) == -1)
6187 break;
6188 /* FALLTHROUGH */
6189 case NEGATE_EXPR:
6190 /* For division and modulus, type can't be unsigned, as e.g.
6191 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6192 For signed types, even with wrapping overflow, this is fine. */
6193 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6194 break;
6195 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6196 != 0)
6197 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6198 break;
6200 case MIN_EXPR: case MAX_EXPR:
6201 /* If widening the type changes the signedness, then we can't perform
6202 this optimization as that changes the result. */
6203 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6204 break;
6206 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6207 sub_strict_overflow_p = false;
6208 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6209 &sub_strict_overflow_p)) != 0
6210 && (t2 = extract_muldiv (op1, c, code, wide_type,
6211 &sub_strict_overflow_p)) != 0)
6213 if (tree_int_cst_sgn (c) < 0)
6214 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6215 if (sub_strict_overflow_p)
6216 *strict_overflow_p = true;
6217 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6218 fold_convert (ctype, t2));
6220 break;
6222 case LSHIFT_EXPR: case RSHIFT_EXPR:
6223 /* If the second operand is constant, this is a multiplication
6224 or floor division, by a power of two, so we can treat it that
6225 way unless the multiplier or divisor overflows. Signed
6226 left-shift overflow is implementation-defined rather than
6227 undefined in C90, so do not convert signed left shift into
6228 multiplication. */
6229 if (TREE_CODE (op1) == INTEGER_CST
6230 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6231 /* const_binop may not detect overflow correctly,
6232 so check for it explicitly here. */
6233 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6234 && 0 != (t1 = fold_convert (ctype,
6235 const_binop (LSHIFT_EXPR,
6236 size_one_node,
6237 op1)))
6238 && !TREE_OVERFLOW (t1))
6239 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6240 ? MULT_EXPR : FLOOR_DIV_EXPR,
6241 ctype,
6242 fold_convert (ctype, op0),
6243 t1),
6244 c, code, wide_type, strict_overflow_p);
6245 break;
6247 case PLUS_EXPR: case MINUS_EXPR:
6248 /* See if we can eliminate the operation on both sides. If we can, we
6249 can return a new PLUS or MINUS. If we can't, the only remaining
6250 cases where we can do anything are if the second operand is a
6251 constant. */
6252 sub_strict_overflow_p = false;
6253 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6254 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6255 if (t1 != 0 && t2 != 0
6256 && (code == MULT_EXPR
6257 /* If not multiplication, we can only do this if both operands
6258 are divisible by c. */
6259 || (multiple_of_p (ctype, op0, c)
6260 && multiple_of_p (ctype, op1, c))))
6262 if (sub_strict_overflow_p)
6263 *strict_overflow_p = true;
6264 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6265 fold_convert (ctype, t2));
6268 /* If this was a subtraction, negate OP1 and set it to be an addition.
6269 This simplifies the logic below. */
6270 if (tcode == MINUS_EXPR)
6272 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6273 /* If OP1 was not easily negatable, the constant may be OP0. */
6274 if (TREE_CODE (op0) == INTEGER_CST)
6276 std::swap (op0, op1);
6277 std::swap (t1, t2);
6281 if (TREE_CODE (op1) != INTEGER_CST)
6282 break;
6284 /* If either OP1 or C are negative, this optimization is not safe for
6285 some of the division and remainder types while for others we need
6286 to change the code. */
6287 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6289 if (code == CEIL_DIV_EXPR)
6290 code = FLOOR_DIV_EXPR;
6291 else if (code == FLOOR_DIV_EXPR)
6292 code = CEIL_DIV_EXPR;
6293 else if (code != MULT_EXPR
6294 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6295 break;
6298 /* If it's a multiply or a division/modulus operation of a multiple
6299 of our constant, do the operation and verify it doesn't overflow. */
6300 if (code == MULT_EXPR
6301 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6303 op1 = const_binop (code, fold_convert (ctype, op1),
6304 fold_convert (ctype, c));
6305 /* We allow the constant to overflow with wrapping semantics. */
6306 if (op1 == 0
6307 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6308 break;
6310 else
6311 break;
6313 /* If we have an unsigned type, we cannot widen the operation since it
6314 will change the result if the original computation overflowed. */
6315 if (TYPE_UNSIGNED (ctype) && ctype != type)
6316 break;
6318 /* If we were able to eliminate our operation from the first side,
6319 apply our operation to the second side and reform the PLUS. */
6320 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6321 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6323 /* The last case is if we are a multiply. In that case, we can
6324 apply the distributive law to commute the multiply and addition
6325 if the multiplication of the constants doesn't overflow
6326 and overflow is defined. With undefined overflow
6327 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6328 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6329 return fold_build2 (tcode, ctype,
6330 fold_build2 (code, ctype,
6331 fold_convert (ctype, op0),
6332 fold_convert (ctype, c)),
6333 op1);
6335 break;
6337 case MULT_EXPR:
6338 /* We have a special case here if we are doing something like
6339 (C * 8) % 4 since we know that's zero. */
6340 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6341 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6342 /* If the multiplication can overflow we cannot optimize this. */
6343 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6344 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6345 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6347 *strict_overflow_p = true;
6348 return omit_one_operand (type, integer_zero_node, op0);
6351 /* ... fall through ... */
6353 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6354 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6355 /* If we can extract our operation from the LHS, do so and return a
6356 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6357 do something only if the second operand is a constant. */
6358 if (same_p
6359 && (t1 = extract_muldiv (op0, c, code, wide_type,
6360 strict_overflow_p)) != 0)
6361 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6362 fold_convert (ctype, op1));
6363 else if (tcode == MULT_EXPR && code == MULT_EXPR
6364 && (t1 = extract_muldiv (op1, c, code, wide_type,
6365 strict_overflow_p)) != 0)
6366 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6367 fold_convert (ctype, t1));
6368 else if (TREE_CODE (op1) != INTEGER_CST)
6369 return 0;
6371 /* If these are the same operation types, we can associate them
6372 assuming no overflow. */
6373 if (tcode == code)
6375 bool overflow_p = false;
6376 bool overflow_mul_p;
6377 signop sign = TYPE_SIGN (ctype);
6378 unsigned prec = TYPE_PRECISION (ctype);
6379 wide_int mul = wi::mul (wide_int::from (op1, prec,
6380 TYPE_SIGN (TREE_TYPE (op1))),
6381 wide_int::from (c, prec,
6382 TYPE_SIGN (TREE_TYPE (c))),
6383 sign, &overflow_mul_p);
6384 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6385 if (overflow_mul_p
6386 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6387 overflow_p = true;
6388 if (!overflow_p)
6389 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6390 wide_int_to_tree (ctype, mul));
6393 /* If these operations "cancel" each other, we have the main
6394 optimizations of this pass, which occur when either constant is a
6395 multiple of the other, in which case we replace this with either an
6396 operation or CODE or TCODE.
6398 If we have an unsigned type, we cannot do this since it will change
6399 the result if the original computation overflowed. */
6400 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6401 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6402 || (tcode == MULT_EXPR
6403 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6404 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6405 && code != MULT_EXPR)))
6407 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6409 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6410 *strict_overflow_p = true;
6411 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6412 fold_convert (ctype,
6413 const_binop (TRUNC_DIV_EXPR,
6414 op1, c)));
6416 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6418 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6419 *strict_overflow_p = true;
6420 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6421 fold_convert (ctype,
6422 const_binop (TRUNC_DIV_EXPR,
6423 c, op1)));
6426 break;
6428 default:
6429 break;
6432 return 0;
6435 /* Return a node which has the indicated constant VALUE (either 0 or
6436 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6437 and is of the indicated TYPE. */
6439 tree
6440 constant_boolean_node (bool value, tree type)
6442 if (type == integer_type_node)
6443 return value ? integer_one_node : integer_zero_node;
6444 else if (type == boolean_type_node)
6445 return value ? boolean_true_node : boolean_false_node;
6446 else if (TREE_CODE (type) == VECTOR_TYPE)
6447 return build_vector_from_val (type,
6448 build_int_cst (TREE_TYPE (type),
6449 value ? -1 : 0));
6450 else
6451 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6455 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6456 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6457 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6458 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6459 COND is the first argument to CODE; otherwise (as in the example
6460 given here), it is the second argument. TYPE is the type of the
6461 original expression. Return NULL_TREE if no simplification is
6462 possible. */
6464 static tree
6465 fold_binary_op_with_conditional_arg (location_t loc,
6466 enum tree_code code,
6467 tree type, tree op0, tree op1,
6468 tree cond, tree arg, int cond_first_p)
6470 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6471 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6472 tree test, true_value, false_value;
6473 tree lhs = NULL_TREE;
6474 tree rhs = NULL_TREE;
6475 enum tree_code cond_code = COND_EXPR;
6477 if (TREE_CODE (cond) == COND_EXPR
6478 || TREE_CODE (cond) == VEC_COND_EXPR)
6480 test = TREE_OPERAND (cond, 0);
6481 true_value = TREE_OPERAND (cond, 1);
6482 false_value = TREE_OPERAND (cond, 2);
6483 /* If this operand throws an expression, then it does not make
6484 sense to try to perform a logical or arithmetic operation
6485 involving it. */
6486 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6487 lhs = true_value;
6488 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6489 rhs = false_value;
6491 else if (!(TREE_CODE (type) != VECTOR_TYPE
6492 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6494 tree testtype = TREE_TYPE (cond);
6495 test = cond;
6496 true_value = constant_boolean_node (true, testtype);
6497 false_value = constant_boolean_node (false, testtype);
6499 else
6500 /* Detect the case of mixing vector and scalar types - bail out. */
6501 return NULL_TREE;
6503 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6504 cond_code = VEC_COND_EXPR;
6506 /* This transformation is only worthwhile if we don't have to wrap ARG
6507 in a SAVE_EXPR and the operation can be simplified without recursing
6508 on at least one of the branches once its pushed inside the COND_EXPR. */
6509 if (!TREE_CONSTANT (arg)
6510 && (TREE_SIDE_EFFECTS (arg)
6511 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6512 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6513 return NULL_TREE;
6515 arg = fold_convert_loc (loc, arg_type, arg);
6516 if (lhs == 0)
6518 true_value = fold_convert_loc (loc, cond_type, true_value);
6519 if (cond_first_p)
6520 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6521 else
6522 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6524 if (rhs == 0)
6526 false_value = fold_convert_loc (loc, cond_type, false_value);
6527 if (cond_first_p)
6528 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6529 else
6530 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6533 /* Check that we have simplified at least one of the branches. */
6534 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6535 return NULL_TREE;
6537 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6541 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6543 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6544 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6545 ADDEND is the same as X.
6547 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6548 and finite. The problematic cases are when X is zero, and its mode
6549 has signed zeros. In the case of rounding towards -infinity,
6550 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6551 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6553 bool
6554 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6556 if (!real_zerop (addend))
6557 return false;
6559 /* Don't allow the fold with -fsignaling-nans. */
6560 if (HONOR_SNANS (element_mode (type)))
6561 return false;
6563 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6564 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6565 return true;
6567 /* In a vector or complex, we would need to check the sign of all zeros. */
6568 if (TREE_CODE (addend) != REAL_CST)
6569 return false;
6571 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6572 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6573 negate = !negate;
6575 /* The mode has signed zeros, and we have to honor their sign.
6576 In this situation, there is only one case we can return true for.
6577 X - 0 is the same as X unless rounding towards -infinity is
6578 supported. */
6579 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6582 /* Subroutine of fold() that optimizes comparisons of a division by
6583 a nonzero integer constant against an integer constant, i.e.
6584 X/C1 op C2.
6586 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6587 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6588 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6590 The function returns the constant folded tree if a simplification
6591 can be made, and NULL_TREE otherwise. */
6593 static tree
6594 fold_div_compare (location_t loc,
6595 enum tree_code code, tree type, tree arg0, tree arg1)
6597 tree prod, tmp, hi, lo;
6598 tree arg00 = TREE_OPERAND (arg0, 0);
6599 tree arg01 = TREE_OPERAND (arg0, 1);
6600 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6601 bool neg_overflow = false;
6602 bool overflow;
6604 /* We have to do this the hard way to detect unsigned overflow.
6605 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6606 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6607 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6608 neg_overflow = false;
6610 if (sign == UNSIGNED)
6612 tmp = int_const_binop (MINUS_EXPR, arg01,
6613 build_int_cst (TREE_TYPE (arg01), 1));
6614 lo = prod;
6616 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6617 val = wi::add (prod, tmp, sign, &overflow);
6618 hi = force_fit_type (TREE_TYPE (arg00), val,
6619 -1, overflow | TREE_OVERFLOW (prod));
6621 else if (tree_int_cst_sgn (arg01) >= 0)
6623 tmp = int_const_binop (MINUS_EXPR, arg01,
6624 build_int_cst (TREE_TYPE (arg01), 1));
6625 switch (tree_int_cst_sgn (arg1))
6627 case -1:
6628 neg_overflow = true;
6629 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6630 hi = prod;
6631 break;
6633 case 0:
6634 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6635 hi = tmp;
6636 break;
6638 case 1:
6639 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6640 lo = prod;
6641 break;
6643 default:
6644 gcc_unreachable ();
6647 else
6649 /* A negative divisor reverses the relational operators. */
6650 code = swap_tree_comparison (code);
6652 tmp = int_const_binop (PLUS_EXPR, arg01,
6653 build_int_cst (TREE_TYPE (arg01), 1));
6654 switch (tree_int_cst_sgn (arg1))
6656 case -1:
6657 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6658 lo = prod;
6659 break;
6661 case 0:
6662 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6663 lo = tmp;
6664 break;
6666 case 1:
6667 neg_overflow = true;
6668 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6669 hi = prod;
6670 break;
6672 default:
6673 gcc_unreachable ();
6677 switch (code)
6679 case EQ_EXPR:
6680 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6681 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6682 if (TREE_OVERFLOW (hi))
6683 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6684 if (TREE_OVERFLOW (lo))
6685 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6686 return build_range_check (loc, type, arg00, 1, lo, hi);
6688 case NE_EXPR:
6689 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6690 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6691 if (TREE_OVERFLOW (hi))
6692 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6693 if (TREE_OVERFLOW (lo))
6694 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6695 return build_range_check (loc, type, arg00, 0, lo, hi);
6697 case LT_EXPR:
6698 if (TREE_OVERFLOW (lo))
6700 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6701 return omit_one_operand_loc (loc, type, tmp, arg00);
6703 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6705 case LE_EXPR:
6706 if (TREE_OVERFLOW (hi))
6708 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6709 return omit_one_operand_loc (loc, type, tmp, arg00);
6711 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6713 case GT_EXPR:
6714 if (TREE_OVERFLOW (hi))
6716 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6717 return omit_one_operand_loc (loc, type, tmp, arg00);
6719 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6721 case GE_EXPR:
6722 if (TREE_OVERFLOW (lo))
6724 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6725 return omit_one_operand_loc (loc, type, tmp, arg00);
6727 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6729 default:
6730 break;
6733 return NULL_TREE;
6737 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6738 equality/inequality test, then return a simplified form of the test
6739 using a sign testing. Otherwise return NULL. TYPE is the desired
6740 result type. */
6742 static tree
6743 fold_single_bit_test_into_sign_test (location_t loc,
6744 enum tree_code code, tree arg0, tree arg1,
6745 tree result_type)
6747 /* If this is testing a single bit, we can optimize the test. */
6748 if ((code == NE_EXPR || code == EQ_EXPR)
6749 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6750 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6752 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6753 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6754 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6756 if (arg00 != NULL_TREE
6757 /* This is only a win if casting to a signed type is cheap,
6758 i.e. when arg00's type is not a partial mode. */
6759 && TYPE_PRECISION (TREE_TYPE (arg00))
6760 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6762 tree stype = signed_type_for (TREE_TYPE (arg00));
6763 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6764 result_type,
6765 fold_convert_loc (loc, stype, arg00),
6766 build_int_cst (stype, 0));
6770 return NULL_TREE;
6773 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6774 equality/inequality test, then return a simplified form of
6775 the test using shifts and logical operations. Otherwise return
6776 NULL. TYPE is the desired result type. */
6778 tree
6779 fold_single_bit_test (location_t loc, enum tree_code code,
6780 tree arg0, tree arg1, tree result_type)
6782 /* If this is testing a single bit, we can optimize the test. */
6783 if ((code == NE_EXPR || code == EQ_EXPR)
6784 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6785 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6787 tree inner = TREE_OPERAND (arg0, 0);
6788 tree type = TREE_TYPE (arg0);
6789 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6790 machine_mode operand_mode = TYPE_MODE (type);
6791 int ops_unsigned;
6792 tree signed_type, unsigned_type, intermediate_type;
6793 tree tem, one;
6795 /* First, see if we can fold the single bit test into a sign-bit
6796 test. */
6797 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6798 result_type);
6799 if (tem)
6800 return tem;
6802 /* Otherwise we have (A & C) != 0 where C is a single bit,
6803 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6804 Similarly for (A & C) == 0. */
6806 /* If INNER is a right shift of a constant and it plus BITNUM does
6807 not overflow, adjust BITNUM and INNER. */
6808 if (TREE_CODE (inner) == RSHIFT_EXPR
6809 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6810 && bitnum < TYPE_PRECISION (type)
6811 && wi::ltu_p (TREE_OPERAND (inner, 1),
6812 TYPE_PRECISION (type) - bitnum))
6814 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6815 inner = TREE_OPERAND (inner, 0);
6818 /* If we are going to be able to omit the AND below, we must do our
6819 operations as unsigned. If we must use the AND, we have a choice.
6820 Normally unsigned is faster, but for some machines signed is. */
6821 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6822 && !flag_syntax_only) ? 0 : 1;
6824 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6825 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6826 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6827 inner = fold_convert_loc (loc, intermediate_type, inner);
6829 if (bitnum != 0)
6830 inner = build2 (RSHIFT_EXPR, intermediate_type,
6831 inner, size_int (bitnum));
6833 one = build_int_cst (intermediate_type, 1);
6835 if (code == EQ_EXPR)
6836 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6838 /* Put the AND last so it can combine with more things. */
6839 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6841 /* Make sure to return the proper type. */
6842 inner = fold_convert_loc (loc, result_type, inner);
6844 return inner;
6846 return NULL_TREE;
6849 /* Check whether we are allowed to reorder operands arg0 and arg1,
6850 such that the evaluation of arg1 occurs before arg0. */
6852 static bool
6853 reorder_operands_p (const_tree arg0, const_tree arg1)
6855 if (! flag_evaluation_order)
6856 return true;
6857 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6858 return true;
6859 return ! TREE_SIDE_EFFECTS (arg0)
6860 && ! TREE_SIDE_EFFECTS (arg1);
6863 /* Test whether it is preferable two swap two operands, ARG0 and
6864 ARG1, for example because ARG0 is an integer constant and ARG1
6865 isn't. If REORDER is true, only recommend swapping if we can
6866 evaluate the operands in reverse order. */
6868 bool
6869 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6871 if (CONSTANT_CLASS_P (arg1))
6872 return 0;
6873 if (CONSTANT_CLASS_P (arg0))
6874 return 1;
6876 STRIP_NOPS (arg0);
6877 STRIP_NOPS (arg1);
6879 if (TREE_CONSTANT (arg1))
6880 return 0;
6881 if (TREE_CONSTANT (arg0))
6882 return 1;
6884 if (reorder && flag_evaluation_order
6885 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6886 return 0;
6888 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6889 for commutative and comparison operators. Ensuring a canonical
6890 form allows the optimizers to find additional redundancies without
6891 having to explicitly check for both orderings. */
6892 if (TREE_CODE (arg0) == SSA_NAME
6893 && TREE_CODE (arg1) == SSA_NAME
6894 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6895 return 1;
6897 /* Put SSA_NAMEs last. */
6898 if (TREE_CODE (arg1) == SSA_NAME)
6899 return 0;
6900 if (TREE_CODE (arg0) == SSA_NAME)
6901 return 1;
6903 /* Put variables last. */
6904 if (DECL_P (arg1))
6905 return 0;
6906 if (DECL_P (arg0))
6907 return 1;
6909 return 0;
6913 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6914 means A >= Y && A != MAX, but in this case we know that
6915 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6917 static tree
6918 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6920 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6922 if (TREE_CODE (bound) == LT_EXPR)
6923 a = TREE_OPERAND (bound, 0);
6924 else if (TREE_CODE (bound) == GT_EXPR)
6925 a = TREE_OPERAND (bound, 1);
6926 else
6927 return NULL_TREE;
6929 typea = TREE_TYPE (a);
6930 if (!INTEGRAL_TYPE_P (typea)
6931 && !POINTER_TYPE_P (typea))
6932 return NULL_TREE;
6934 if (TREE_CODE (ineq) == LT_EXPR)
6936 a1 = TREE_OPERAND (ineq, 1);
6937 y = TREE_OPERAND (ineq, 0);
6939 else if (TREE_CODE (ineq) == GT_EXPR)
6941 a1 = TREE_OPERAND (ineq, 0);
6942 y = TREE_OPERAND (ineq, 1);
6944 else
6945 return NULL_TREE;
6947 if (TREE_TYPE (a1) != typea)
6948 return NULL_TREE;
6950 if (POINTER_TYPE_P (typea))
6952 /* Convert the pointer types into integer before taking the difference. */
6953 tree ta = fold_convert_loc (loc, ssizetype, a);
6954 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6955 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6957 else
6958 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6960 if (!diff || !integer_onep (diff))
6961 return NULL_TREE;
6963 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6966 /* Fold a sum or difference of at least one multiplication.
6967 Returns the folded tree or NULL if no simplification could be made. */
6969 static tree
6970 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6971 tree arg0, tree arg1)
6973 tree arg00, arg01, arg10, arg11;
6974 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6976 /* (A * C) +- (B * C) -> (A+-B) * C.
6977 (A * C) +- A -> A * (C+-1).
6978 We are most concerned about the case where C is a constant,
6979 but other combinations show up during loop reduction. Since
6980 it is not difficult, try all four possibilities. */
6982 if (TREE_CODE (arg0) == MULT_EXPR)
6984 arg00 = TREE_OPERAND (arg0, 0);
6985 arg01 = TREE_OPERAND (arg0, 1);
6987 else if (TREE_CODE (arg0) == INTEGER_CST)
6989 arg00 = build_one_cst (type);
6990 arg01 = arg0;
6992 else
6994 /* We cannot generate constant 1 for fract. */
6995 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6996 return NULL_TREE;
6997 arg00 = arg0;
6998 arg01 = build_one_cst (type);
7000 if (TREE_CODE (arg1) == MULT_EXPR)
7002 arg10 = TREE_OPERAND (arg1, 0);
7003 arg11 = TREE_OPERAND (arg1, 1);
7005 else if (TREE_CODE (arg1) == INTEGER_CST)
7007 arg10 = build_one_cst (type);
7008 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7009 the purpose of this canonicalization. */
7010 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7011 && negate_expr_p (arg1)
7012 && code == PLUS_EXPR)
7014 arg11 = negate_expr (arg1);
7015 code = MINUS_EXPR;
7017 else
7018 arg11 = arg1;
7020 else
7022 /* We cannot generate constant 1 for fract. */
7023 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7024 return NULL_TREE;
7025 arg10 = arg1;
7026 arg11 = build_one_cst (type);
7028 same = NULL_TREE;
7030 if (operand_equal_p (arg01, arg11, 0))
7031 same = arg01, alt0 = arg00, alt1 = arg10;
7032 else if (operand_equal_p (arg00, arg10, 0))
7033 same = arg00, alt0 = arg01, alt1 = arg11;
7034 else if (operand_equal_p (arg00, arg11, 0))
7035 same = arg00, alt0 = arg01, alt1 = arg10;
7036 else if (operand_equal_p (arg01, arg10, 0))
7037 same = arg01, alt0 = arg00, alt1 = arg11;
7039 /* No identical multiplicands; see if we can find a common
7040 power-of-two factor in non-power-of-two multiplies. This
7041 can help in multi-dimensional array access. */
7042 else if (tree_fits_shwi_p (arg01)
7043 && tree_fits_shwi_p (arg11))
7045 HOST_WIDE_INT int01, int11, tmp;
7046 bool swap = false;
7047 tree maybe_same;
7048 int01 = tree_to_shwi (arg01);
7049 int11 = tree_to_shwi (arg11);
7051 /* Move min of absolute values to int11. */
7052 if (absu_hwi (int01) < absu_hwi (int11))
7054 tmp = int01, int01 = int11, int11 = tmp;
7055 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7056 maybe_same = arg01;
7057 swap = true;
7059 else
7060 maybe_same = arg11;
7062 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7063 /* The remainder should not be a constant, otherwise we
7064 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7065 increased the number of multiplications necessary. */
7066 && TREE_CODE (arg10) != INTEGER_CST)
7068 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7069 build_int_cst (TREE_TYPE (arg00),
7070 int01 / int11));
7071 alt1 = arg10;
7072 same = maybe_same;
7073 if (swap)
7074 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7078 if (same)
7079 return fold_build2_loc (loc, MULT_EXPR, type,
7080 fold_build2_loc (loc, code, type,
7081 fold_convert_loc (loc, type, alt0),
7082 fold_convert_loc (loc, type, alt1)),
7083 fold_convert_loc (loc, type, same));
7085 return NULL_TREE;
7088 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7089 specified by EXPR into the buffer PTR of length LEN bytes.
7090 Return the number of bytes placed in the buffer, or zero
7091 upon failure. */
7093 static int
7094 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7096 tree type = TREE_TYPE (expr);
7097 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7098 int byte, offset, word, words;
7099 unsigned char value;
7101 if ((off == -1 && total_bytes > len)
7102 || off >= total_bytes)
7103 return 0;
7104 if (off == -1)
7105 off = 0;
7106 words = total_bytes / UNITS_PER_WORD;
7108 for (byte = 0; byte < total_bytes; byte++)
7110 int bitpos = byte * BITS_PER_UNIT;
7111 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7112 number of bytes. */
7113 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7115 if (total_bytes > UNITS_PER_WORD)
7117 word = byte / UNITS_PER_WORD;
7118 if (WORDS_BIG_ENDIAN)
7119 word = (words - 1) - word;
7120 offset = word * UNITS_PER_WORD;
7121 if (BYTES_BIG_ENDIAN)
7122 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7123 else
7124 offset += byte % UNITS_PER_WORD;
7126 else
7127 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7128 if (offset >= off
7129 && offset - off < len)
7130 ptr[offset - off] = value;
7132 return MIN (len, total_bytes - off);
7136 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7137 specified by EXPR into the buffer PTR of length LEN bytes.
7138 Return the number of bytes placed in the buffer, or zero
7139 upon failure. */
7141 static int
7142 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7144 tree type = TREE_TYPE (expr);
7145 machine_mode mode = TYPE_MODE (type);
7146 int total_bytes = GET_MODE_SIZE (mode);
7147 FIXED_VALUE_TYPE value;
7148 tree i_value, i_type;
7150 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7151 return 0;
7153 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7155 if (NULL_TREE == i_type
7156 || TYPE_PRECISION (i_type) != total_bytes)
7157 return 0;
7159 value = TREE_FIXED_CST (expr);
7160 i_value = double_int_to_tree (i_type, value.data);
7162 return native_encode_int (i_value, ptr, len, off);
7166 /* Subroutine of native_encode_expr. Encode the REAL_CST
7167 specified by EXPR into the buffer PTR of length LEN bytes.
7168 Return the number of bytes placed in the buffer, or zero
7169 upon failure. */
7171 static int
7172 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7174 tree type = TREE_TYPE (expr);
7175 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7176 int byte, offset, word, words, bitpos;
7177 unsigned char value;
7179 /* There are always 32 bits in each long, no matter the size of
7180 the hosts long. We handle floating point representations with
7181 up to 192 bits. */
7182 long tmp[6];
7184 if ((off == -1 && total_bytes > len)
7185 || off >= total_bytes)
7186 return 0;
7187 if (off == -1)
7188 off = 0;
7189 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7191 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7193 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7194 bitpos += BITS_PER_UNIT)
7196 byte = (bitpos / BITS_PER_UNIT) & 3;
7197 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7199 if (UNITS_PER_WORD < 4)
7201 word = byte / UNITS_PER_WORD;
7202 if (WORDS_BIG_ENDIAN)
7203 word = (words - 1) - word;
7204 offset = word * UNITS_PER_WORD;
7205 if (BYTES_BIG_ENDIAN)
7206 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7207 else
7208 offset += byte % UNITS_PER_WORD;
7210 else
7211 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7212 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7213 if (offset >= off
7214 && offset - off < len)
7215 ptr[offset - off] = value;
7217 return MIN (len, total_bytes - off);
7220 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7221 specified by EXPR into the buffer PTR of length LEN bytes.
7222 Return the number of bytes placed in the buffer, or zero
7223 upon failure. */
7225 static int
7226 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7228 int rsize, isize;
7229 tree part;
7231 part = TREE_REALPART (expr);
7232 rsize = native_encode_expr (part, ptr, len, off);
7233 if (off == -1
7234 && rsize == 0)
7235 return 0;
7236 part = TREE_IMAGPART (expr);
7237 if (off != -1)
7238 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7239 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7240 if (off == -1
7241 && isize != rsize)
7242 return 0;
7243 return rsize + isize;
7247 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7248 specified by EXPR into the buffer PTR of length LEN bytes.
7249 Return the number of bytes placed in the buffer, or zero
7250 upon failure. */
7252 static int
7253 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7255 unsigned i, count;
7256 int size, offset;
7257 tree itype, elem;
7259 offset = 0;
7260 count = VECTOR_CST_NELTS (expr);
7261 itype = TREE_TYPE (TREE_TYPE (expr));
7262 size = GET_MODE_SIZE (TYPE_MODE (itype));
7263 for (i = 0; i < count; i++)
7265 if (off >= size)
7267 off -= size;
7268 continue;
7270 elem = VECTOR_CST_ELT (expr, i);
7271 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7272 if ((off == -1 && res != size)
7273 || res == 0)
7274 return 0;
7275 offset += res;
7276 if (offset >= len)
7277 return offset;
7278 if (off != -1)
7279 off = 0;
7281 return offset;
7285 /* Subroutine of native_encode_expr. Encode the STRING_CST
7286 specified by EXPR into the buffer PTR of length LEN bytes.
7287 Return the number of bytes placed in the buffer, or zero
7288 upon failure. */
7290 static int
7291 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7293 tree type = TREE_TYPE (expr);
7294 HOST_WIDE_INT total_bytes;
7296 if (TREE_CODE (type) != ARRAY_TYPE
7297 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7298 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7299 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7300 return 0;
7301 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7302 if ((off == -1 && total_bytes > len)
7303 || off >= total_bytes)
7304 return 0;
7305 if (off == -1)
7306 off = 0;
7307 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7309 int written = 0;
7310 if (off < TREE_STRING_LENGTH (expr))
7312 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7313 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7315 memset (ptr + written, 0,
7316 MIN (total_bytes - written, len - written));
7318 else
7319 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7320 return MIN (total_bytes - off, len);
7324 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7325 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7326 buffer PTR of length LEN bytes. If OFF is not -1 then start
7327 the encoding at byte offset OFF and encode at most LEN bytes.
7328 Return the number of bytes placed in the buffer, or zero upon failure. */
7331 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7333 /* We don't support starting at negative offset and -1 is special. */
7334 if (off < -1)
7335 return 0;
7337 switch (TREE_CODE (expr))
7339 case INTEGER_CST:
7340 return native_encode_int (expr, ptr, len, off);
7342 case REAL_CST:
7343 return native_encode_real (expr, ptr, len, off);
7345 case FIXED_CST:
7346 return native_encode_fixed (expr, ptr, len, off);
7348 case COMPLEX_CST:
7349 return native_encode_complex (expr, ptr, len, off);
7351 case VECTOR_CST:
7352 return native_encode_vector (expr, ptr, len, off);
7354 case STRING_CST:
7355 return native_encode_string (expr, ptr, len, off);
7357 default:
7358 return 0;
7363 /* Subroutine of native_interpret_expr. Interpret the contents of
7364 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7365 If the buffer cannot be interpreted, return NULL_TREE. */
7367 static tree
7368 native_interpret_int (tree type, const unsigned char *ptr, int len)
7370 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7372 if (total_bytes > len
7373 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7374 return NULL_TREE;
7376 wide_int result = wi::from_buffer (ptr, total_bytes);
7378 return wide_int_to_tree (type, result);
7382 /* Subroutine of native_interpret_expr. Interpret the contents of
7383 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7384 If the buffer cannot be interpreted, return NULL_TREE. */
7386 static tree
7387 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7389 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7390 double_int result;
7391 FIXED_VALUE_TYPE fixed_value;
7393 if (total_bytes > len
7394 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7395 return NULL_TREE;
7397 result = double_int::from_buffer (ptr, total_bytes);
7398 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7400 return build_fixed (type, fixed_value);
7404 /* Subroutine of native_interpret_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7406 If the buffer cannot be interpreted, return NULL_TREE. */
7408 static tree
7409 native_interpret_real (tree type, const unsigned char *ptr, int len)
7411 machine_mode mode = TYPE_MODE (type);
7412 int total_bytes = GET_MODE_SIZE (mode);
7413 unsigned char value;
7414 /* There are always 32 bits in each long, no matter the size of
7415 the hosts long. We handle floating point representations with
7416 up to 192 bits. */
7417 REAL_VALUE_TYPE r;
7418 long tmp[6];
7420 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7421 if (total_bytes > len || total_bytes > 24)
7422 return NULL_TREE;
7423 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7425 memset (tmp, 0, sizeof (tmp));
7426 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7427 bitpos += BITS_PER_UNIT)
7429 /* Both OFFSET and BYTE index within a long;
7430 bitpos indexes the whole float. */
7431 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7432 if (UNITS_PER_WORD < 4)
7434 int word = byte / UNITS_PER_WORD;
7435 if (WORDS_BIG_ENDIAN)
7436 word = (words - 1) - word;
7437 offset = word * UNITS_PER_WORD;
7438 if (BYTES_BIG_ENDIAN)
7439 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7440 else
7441 offset += byte % UNITS_PER_WORD;
7443 else
7445 offset = byte;
7446 if (BYTES_BIG_ENDIAN)
7448 /* Reverse bytes within each long, or within the entire float
7449 if it's smaller than a long (for HFmode). */
7450 offset = MIN (3, total_bytes - 1) - offset;
7451 gcc_assert (offset >= 0);
7454 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7456 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7459 real_from_target (&r, tmp, mode);
7460 return build_real (type, r);
7464 /* Subroutine of native_interpret_expr. Interpret the contents of
7465 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7466 If the buffer cannot be interpreted, return NULL_TREE. */
7468 static tree
7469 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7471 tree etype, rpart, ipart;
7472 int size;
7474 etype = TREE_TYPE (type);
7475 size = GET_MODE_SIZE (TYPE_MODE (etype));
7476 if (size * 2 > len)
7477 return NULL_TREE;
7478 rpart = native_interpret_expr (etype, ptr, size);
7479 if (!rpart)
7480 return NULL_TREE;
7481 ipart = native_interpret_expr (etype, ptr+size, size);
7482 if (!ipart)
7483 return NULL_TREE;
7484 return build_complex (type, rpart, ipart);
7488 /* Subroutine of native_interpret_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7490 If the buffer cannot be interpreted, return NULL_TREE. */
7492 static tree
7493 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7495 tree etype, elem;
7496 int i, size, count;
7497 tree *elements;
7499 etype = TREE_TYPE (type);
7500 size = GET_MODE_SIZE (TYPE_MODE (etype));
7501 count = TYPE_VECTOR_SUBPARTS (type);
7502 if (size * count > len)
7503 return NULL_TREE;
7505 elements = XALLOCAVEC (tree, count);
7506 for (i = count - 1; i >= 0; i--)
7508 elem = native_interpret_expr (etype, ptr+(i*size), size);
7509 if (!elem)
7510 return NULL_TREE;
7511 elements[i] = elem;
7513 return build_vector (type, elements);
7517 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7518 the buffer PTR of length LEN as a constant of type TYPE. For
7519 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7520 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7521 return NULL_TREE. */
7523 tree
7524 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7526 switch (TREE_CODE (type))
7528 case INTEGER_TYPE:
7529 case ENUMERAL_TYPE:
7530 case BOOLEAN_TYPE:
7531 case POINTER_TYPE:
7532 case REFERENCE_TYPE:
7533 return native_interpret_int (type, ptr, len);
7535 case REAL_TYPE:
7536 return native_interpret_real (type, ptr, len);
7538 case FIXED_POINT_TYPE:
7539 return native_interpret_fixed (type, ptr, len);
7541 case COMPLEX_TYPE:
7542 return native_interpret_complex (type, ptr, len);
7544 case VECTOR_TYPE:
7545 return native_interpret_vector (type, ptr, len);
7547 default:
7548 return NULL_TREE;
7552 /* Returns true if we can interpret the contents of a native encoding
7553 as TYPE. */
7555 static bool
7556 can_native_interpret_type_p (tree type)
7558 switch (TREE_CODE (type))
7560 case INTEGER_TYPE:
7561 case ENUMERAL_TYPE:
7562 case BOOLEAN_TYPE:
7563 case POINTER_TYPE:
7564 case REFERENCE_TYPE:
7565 case FIXED_POINT_TYPE:
7566 case REAL_TYPE:
7567 case COMPLEX_TYPE:
7568 case VECTOR_TYPE:
7569 return true;
7570 default:
7571 return false;
7575 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7576 TYPE at compile-time. If we're unable to perform the conversion
7577 return NULL_TREE. */
7579 static tree
7580 fold_view_convert_expr (tree type, tree expr)
7582 /* We support up to 512-bit values (for V8DFmode). */
7583 unsigned char buffer[64];
7584 int len;
7586 /* Check that the host and target are sane. */
7587 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7588 return NULL_TREE;
7590 len = native_encode_expr (expr, buffer, sizeof (buffer));
7591 if (len == 0)
7592 return NULL_TREE;
7594 return native_interpret_expr (type, buffer, len);
7597 /* Build an expression for the address of T. Folds away INDIRECT_REF
7598 to avoid confusing the gimplify process. */
7600 tree
7601 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7603 /* The size of the object is not relevant when talking about its address. */
7604 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7605 t = TREE_OPERAND (t, 0);
7607 if (TREE_CODE (t) == INDIRECT_REF)
7609 t = TREE_OPERAND (t, 0);
7611 if (TREE_TYPE (t) != ptrtype)
7612 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7614 else if (TREE_CODE (t) == MEM_REF
7615 && integer_zerop (TREE_OPERAND (t, 1)))
7616 return TREE_OPERAND (t, 0);
7617 else if (TREE_CODE (t) == MEM_REF
7618 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7619 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7620 TREE_OPERAND (t, 0),
7621 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7622 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7624 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7626 if (TREE_TYPE (t) != ptrtype)
7627 t = fold_convert_loc (loc, ptrtype, t);
7629 else
7630 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7632 return t;
7635 /* Build an expression for the address of T. */
7637 tree
7638 build_fold_addr_expr_loc (location_t loc, tree t)
7640 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7642 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7645 /* Fold a unary expression of code CODE and type TYPE with operand
7646 OP0. Return the folded expression if folding is successful.
7647 Otherwise, return NULL_TREE. */
7649 tree
7650 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7652 tree tem;
7653 tree arg0;
7654 enum tree_code_class kind = TREE_CODE_CLASS (code);
7656 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7657 && TREE_CODE_LENGTH (code) == 1);
7659 arg0 = op0;
7660 if (arg0)
7662 if (CONVERT_EXPR_CODE_P (code)
7663 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7665 /* Don't use STRIP_NOPS, because signedness of argument type
7666 matters. */
7667 STRIP_SIGN_NOPS (arg0);
7669 else
7671 /* Strip any conversions that don't change the mode. This
7672 is safe for every expression, except for a comparison
7673 expression because its signedness is derived from its
7674 operands.
7676 Note that this is done as an internal manipulation within
7677 the constant folder, in order to find the simplest
7678 representation of the arguments so that their form can be
7679 studied. In any cases, the appropriate type conversions
7680 should be put back in the tree that will get out of the
7681 constant folder. */
7682 STRIP_NOPS (arg0);
7685 if (CONSTANT_CLASS_P (arg0))
7687 tree tem = const_unop (code, type, arg0);
7688 if (tem)
7690 if (TREE_TYPE (tem) != type)
7691 tem = fold_convert_loc (loc, type, tem);
7692 return tem;
7697 tem = generic_simplify (loc, code, type, op0);
7698 if (tem)
7699 return tem;
7701 if (TREE_CODE_CLASS (code) == tcc_unary)
7703 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7704 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7705 fold_build1_loc (loc, code, type,
7706 fold_convert_loc (loc, TREE_TYPE (op0),
7707 TREE_OPERAND (arg0, 1))));
7708 else if (TREE_CODE (arg0) == COND_EXPR)
7710 tree arg01 = TREE_OPERAND (arg0, 1);
7711 tree arg02 = TREE_OPERAND (arg0, 2);
7712 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7713 arg01 = fold_build1_loc (loc, code, type,
7714 fold_convert_loc (loc,
7715 TREE_TYPE (op0), arg01));
7716 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7717 arg02 = fold_build1_loc (loc, code, type,
7718 fold_convert_loc (loc,
7719 TREE_TYPE (op0), arg02));
7720 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7721 arg01, arg02);
7723 /* If this was a conversion, and all we did was to move into
7724 inside the COND_EXPR, bring it back out. But leave it if
7725 it is a conversion from integer to integer and the
7726 result precision is no wider than a word since such a
7727 conversion is cheap and may be optimized away by combine,
7728 while it couldn't if it were outside the COND_EXPR. Then return
7729 so we don't get into an infinite recursion loop taking the
7730 conversion out and then back in. */
7732 if ((CONVERT_EXPR_CODE_P (code)
7733 || code == NON_LVALUE_EXPR)
7734 && TREE_CODE (tem) == COND_EXPR
7735 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7736 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7737 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7738 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7739 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7740 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7741 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7742 && (INTEGRAL_TYPE_P
7743 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7744 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7745 || flag_syntax_only))
7746 tem = build1_loc (loc, code, type,
7747 build3 (COND_EXPR,
7748 TREE_TYPE (TREE_OPERAND
7749 (TREE_OPERAND (tem, 1), 0)),
7750 TREE_OPERAND (tem, 0),
7751 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7752 TREE_OPERAND (TREE_OPERAND (tem, 2),
7753 0)));
7754 return tem;
7758 switch (code)
7760 case NON_LVALUE_EXPR:
7761 if (!maybe_lvalue_p (op0))
7762 return fold_convert_loc (loc, type, op0);
7763 return NULL_TREE;
7765 CASE_CONVERT:
7766 case FLOAT_EXPR:
7767 case FIX_TRUNC_EXPR:
7768 if (COMPARISON_CLASS_P (op0))
7770 /* If we have (type) (a CMP b) and type is an integral type, return
7771 new expression involving the new type. Canonicalize
7772 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7773 non-integral type.
7774 Do not fold the result as that would not simplify further, also
7775 folding again results in recursions. */
7776 if (TREE_CODE (type) == BOOLEAN_TYPE)
7777 return build2_loc (loc, TREE_CODE (op0), type,
7778 TREE_OPERAND (op0, 0),
7779 TREE_OPERAND (op0, 1));
7780 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7781 && TREE_CODE (type) != VECTOR_TYPE)
7782 return build3_loc (loc, COND_EXPR, type, op0,
7783 constant_boolean_node (true, type),
7784 constant_boolean_node (false, type));
7787 /* Handle (T *)&A.B.C for A being of type T and B and C
7788 living at offset zero. This occurs frequently in
7789 C++ upcasting and then accessing the base. */
7790 if (TREE_CODE (op0) == ADDR_EXPR
7791 && POINTER_TYPE_P (type)
7792 && handled_component_p (TREE_OPERAND (op0, 0)))
7794 HOST_WIDE_INT bitsize, bitpos;
7795 tree offset;
7796 machine_mode mode;
7797 int unsignedp, reversep, volatilep;
7798 tree base
7799 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7800 &offset, &mode, &unsignedp, &reversep,
7801 &volatilep, false);
7802 /* If the reference was to a (constant) zero offset, we can use
7803 the address of the base if it has the same base type
7804 as the result type and the pointer type is unqualified. */
7805 if (! offset && bitpos == 0
7806 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7807 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7808 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7809 return fold_convert_loc (loc, type,
7810 build_fold_addr_expr_loc (loc, base));
7813 if (TREE_CODE (op0) == MODIFY_EXPR
7814 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7815 /* Detect assigning a bitfield. */
7816 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7817 && DECL_BIT_FIELD
7818 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7820 /* Don't leave an assignment inside a conversion
7821 unless assigning a bitfield. */
7822 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7823 /* First do the assignment, then return converted constant. */
7824 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7825 TREE_NO_WARNING (tem) = 1;
7826 TREE_USED (tem) = 1;
7827 return tem;
7830 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7831 constants (if x has signed type, the sign bit cannot be set
7832 in c). This folds extension into the BIT_AND_EXPR.
7833 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7834 very likely don't have maximal range for their precision and this
7835 transformation effectively doesn't preserve non-maximal ranges. */
7836 if (TREE_CODE (type) == INTEGER_TYPE
7837 && TREE_CODE (op0) == BIT_AND_EXPR
7838 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7840 tree and_expr = op0;
7841 tree and0 = TREE_OPERAND (and_expr, 0);
7842 tree and1 = TREE_OPERAND (and_expr, 1);
7843 int change = 0;
7845 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7846 || (TYPE_PRECISION (type)
7847 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7848 change = 1;
7849 else if (TYPE_PRECISION (TREE_TYPE (and1))
7850 <= HOST_BITS_PER_WIDE_INT
7851 && tree_fits_uhwi_p (and1))
7853 unsigned HOST_WIDE_INT cst;
7855 cst = tree_to_uhwi (and1);
7856 cst &= HOST_WIDE_INT_M1U
7857 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7858 change = (cst == 0);
7859 if (change
7860 && !flag_syntax_only
7861 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7862 == ZERO_EXTEND))
7864 tree uns = unsigned_type_for (TREE_TYPE (and0));
7865 and0 = fold_convert_loc (loc, uns, and0);
7866 and1 = fold_convert_loc (loc, uns, and1);
7869 if (change)
7871 tem = force_fit_type (type, wi::to_widest (and1), 0,
7872 TREE_OVERFLOW (and1));
7873 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7874 fold_convert_loc (loc, type, and0), tem);
7878 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7879 cast (T1)X will fold away. We assume that this happens when X itself
7880 is a cast. */
7881 if (POINTER_TYPE_P (type)
7882 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7883 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7885 tree arg00 = TREE_OPERAND (arg0, 0);
7886 tree arg01 = TREE_OPERAND (arg0, 1);
7888 return fold_build_pointer_plus_loc
7889 (loc, fold_convert_loc (loc, type, arg00), arg01);
7892 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7893 of the same precision, and X is an integer type not narrower than
7894 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7895 if (INTEGRAL_TYPE_P (type)
7896 && TREE_CODE (op0) == BIT_NOT_EXPR
7897 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7898 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7899 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7901 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7902 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7903 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7904 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7905 fold_convert_loc (loc, type, tem));
7908 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7909 type of X and Y (integer types only). */
7910 if (INTEGRAL_TYPE_P (type)
7911 && TREE_CODE (op0) == MULT_EXPR
7912 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7913 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7915 /* Be careful not to introduce new overflows. */
7916 tree mult_type;
7917 if (TYPE_OVERFLOW_WRAPS (type))
7918 mult_type = type;
7919 else
7920 mult_type = unsigned_type_for (type);
7922 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7924 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7925 fold_convert_loc (loc, mult_type,
7926 TREE_OPERAND (op0, 0)),
7927 fold_convert_loc (loc, mult_type,
7928 TREE_OPERAND (op0, 1)));
7929 return fold_convert_loc (loc, type, tem);
7933 return NULL_TREE;
7935 case VIEW_CONVERT_EXPR:
7936 if (TREE_CODE (op0) == MEM_REF)
7938 tem = fold_build2_loc (loc, MEM_REF, type,
7939 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7940 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7941 return tem;
7944 return NULL_TREE;
7946 case NEGATE_EXPR:
7947 tem = fold_negate_expr (loc, arg0);
7948 if (tem)
7949 return fold_convert_loc (loc, type, tem);
7950 return NULL_TREE;
7952 case ABS_EXPR:
7953 /* Convert fabs((double)float) into (double)fabsf(float). */
7954 if (TREE_CODE (arg0) == NOP_EXPR
7955 && TREE_CODE (type) == REAL_TYPE)
7957 tree targ0 = strip_float_extensions (arg0);
7958 if (targ0 != arg0)
7959 return fold_convert_loc (loc, type,
7960 fold_build1_loc (loc, ABS_EXPR,
7961 TREE_TYPE (targ0),
7962 targ0));
7964 return NULL_TREE;
7966 case BIT_NOT_EXPR:
7967 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7968 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7969 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7970 fold_convert_loc (loc, type,
7971 TREE_OPERAND (arg0, 0)))))
7972 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7973 fold_convert_loc (loc, type,
7974 TREE_OPERAND (arg0, 1)));
7975 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7976 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7977 fold_convert_loc (loc, type,
7978 TREE_OPERAND (arg0, 1)))))
7979 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7980 fold_convert_loc (loc, type,
7981 TREE_OPERAND (arg0, 0)), tem);
7983 return NULL_TREE;
7985 case TRUTH_NOT_EXPR:
7986 /* Note that the operand of this must be an int
7987 and its values must be 0 or 1.
7988 ("true" is a fixed value perhaps depending on the language,
7989 but we don't handle values other than 1 correctly yet.) */
7990 tem = fold_truth_not_expr (loc, arg0);
7991 if (!tem)
7992 return NULL_TREE;
7993 return fold_convert_loc (loc, type, tem);
7995 case INDIRECT_REF:
7996 /* Fold *&X to X if X is an lvalue. */
7997 if (TREE_CODE (op0) == ADDR_EXPR)
7999 tree op00 = TREE_OPERAND (op0, 0);
8000 if ((TREE_CODE (op00) == VAR_DECL
8001 || TREE_CODE (op00) == PARM_DECL
8002 || TREE_CODE (op00) == RESULT_DECL)
8003 && !TREE_READONLY (op00))
8004 return op00;
8006 return NULL_TREE;
8008 default:
8009 return NULL_TREE;
8010 } /* switch (code) */
8014 /* If the operation was a conversion do _not_ mark a resulting constant
8015 with TREE_OVERFLOW if the original constant was not. These conversions
8016 have implementation defined behavior and retaining the TREE_OVERFLOW
8017 flag here would confuse later passes such as VRP. */
8018 tree
8019 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8020 tree type, tree op0)
8022 tree res = fold_unary_loc (loc, code, type, op0);
8023 if (res
8024 && TREE_CODE (res) == INTEGER_CST
8025 && TREE_CODE (op0) == INTEGER_CST
8026 && CONVERT_EXPR_CODE_P (code))
8027 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8029 return res;
8032 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8033 operands OP0 and OP1. LOC is the location of the resulting expression.
8034 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8035 Return the folded expression if folding is successful. Otherwise,
8036 return NULL_TREE. */
8037 static tree
8038 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8039 tree arg0, tree arg1, tree op0, tree op1)
8041 tree tem;
8043 /* We only do these simplifications if we are optimizing. */
8044 if (!optimize)
8045 return NULL_TREE;
8047 /* Check for things like (A || B) && (A || C). We can convert this
8048 to A || (B && C). Note that either operator can be any of the four
8049 truth and/or operations and the transformation will still be
8050 valid. Also note that we only care about order for the
8051 ANDIF and ORIF operators. If B contains side effects, this
8052 might change the truth-value of A. */
8053 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8054 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8055 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8056 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8057 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8058 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8060 tree a00 = TREE_OPERAND (arg0, 0);
8061 tree a01 = TREE_OPERAND (arg0, 1);
8062 tree a10 = TREE_OPERAND (arg1, 0);
8063 tree a11 = TREE_OPERAND (arg1, 1);
8064 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8065 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8066 && (code == TRUTH_AND_EXPR
8067 || code == TRUTH_OR_EXPR));
8069 if (operand_equal_p (a00, a10, 0))
8070 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8071 fold_build2_loc (loc, code, type, a01, a11));
8072 else if (commutative && operand_equal_p (a00, a11, 0))
8073 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8074 fold_build2_loc (loc, code, type, a01, a10));
8075 else if (commutative && operand_equal_p (a01, a10, 0))
8076 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8077 fold_build2_loc (loc, code, type, a00, a11));
8079 /* This case if tricky because we must either have commutative
8080 operators or else A10 must not have side-effects. */
8082 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8083 && operand_equal_p (a01, a11, 0))
8084 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8085 fold_build2_loc (loc, code, type, a00, a10),
8086 a01);
8089 /* See if we can build a range comparison. */
8090 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8091 return tem;
8093 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8094 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8096 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8097 if (tem)
8098 return fold_build2_loc (loc, code, type, tem, arg1);
8101 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8102 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8104 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8105 if (tem)
8106 return fold_build2_loc (loc, code, type, arg0, tem);
8109 /* Check for the possibility of merging component references. If our
8110 lhs is another similar operation, try to merge its rhs with our
8111 rhs. Then try to merge our lhs and rhs. */
8112 if (TREE_CODE (arg0) == code
8113 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8114 TREE_OPERAND (arg0, 1), arg1)))
8115 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8117 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8118 return tem;
8120 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8121 && (code == TRUTH_AND_EXPR
8122 || code == TRUTH_ANDIF_EXPR
8123 || code == TRUTH_OR_EXPR
8124 || code == TRUTH_ORIF_EXPR))
8126 enum tree_code ncode, icode;
8128 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8129 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8130 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8132 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8133 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8134 We don't want to pack more than two leafs to a non-IF AND/OR
8135 expression.
8136 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8137 equal to IF-CODE, then we don't want to add right-hand operand.
8138 If the inner right-hand side of left-hand operand has
8139 side-effects, or isn't simple, then we can't add to it,
8140 as otherwise we might destroy if-sequence. */
8141 if (TREE_CODE (arg0) == icode
8142 && simple_operand_p_2 (arg1)
8143 /* Needed for sequence points to handle trappings, and
8144 side-effects. */
8145 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8147 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8148 arg1);
8149 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8150 tem);
8152 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8153 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8154 else if (TREE_CODE (arg1) == icode
8155 && simple_operand_p_2 (arg0)
8156 /* Needed for sequence points to handle trappings, and
8157 side-effects. */
8158 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8160 tem = fold_build2_loc (loc, ncode, type,
8161 arg0, TREE_OPERAND (arg1, 0));
8162 return fold_build2_loc (loc, icode, type, tem,
8163 TREE_OPERAND (arg1, 1));
8165 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8166 into (A OR B).
8167 For sequence point consistancy, we need to check for trapping,
8168 and side-effects. */
8169 else if (code == icode && simple_operand_p_2 (arg0)
8170 && simple_operand_p_2 (arg1))
8171 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8174 return NULL_TREE;
8177 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8178 by changing CODE to reduce the magnitude of constants involved in
8179 ARG0 of the comparison.
8180 Returns a canonicalized comparison tree if a simplification was
8181 possible, otherwise returns NULL_TREE.
8182 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8183 valid if signed overflow is undefined. */
8185 static tree
8186 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8187 tree arg0, tree arg1,
8188 bool *strict_overflow_p)
8190 enum tree_code code0 = TREE_CODE (arg0);
8191 tree t, cst0 = NULL_TREE;
8192 int sgn0;
8194 /* Match A +- CST code arg1. We can change this only if overflow
8195 is undefined. */
8196 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8198 /* In principle pointers also have undefined overflow behavior,
8199 but that causes problems elsewhere. */
8200 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8201 && (code0 == MINUS_EXPR
8202 || code0 == PLUS_EXPR)
8203 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8204 return NULL_TREE;
8206 /* Identify the constant in arg0 and its sign. */
8207 cst0 = TREE_OPERAND (arg0, 1);
8208 sgn0 = tree_int_cst_sgn (cst0);
8210 /* Overflowed constants and zero will cause problems. */
8211 if (integer_zerop (cst0)
8212 || TREE_OVERFLOW (cst0))
8213 return NULL_TREE;
8215 /* See if we can reduce the magnitude of the constant in
8216 arg0 by changing the comparison code. */
8217 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8218 if (code == LT_EXPR
8219 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8220 code = LE_EXPR;
8221 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8222 else if (code == GT_EXPR
8223 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8224 code = GE_EXPR;
8225 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8226 else if (code == LE_EXPR
8227 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8228 code = LT_EXPR;
8229 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8230 else if (code == GE_EXPR
8231 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8232 code = GT_EXPR;
8233 else
8234 return NULL_TREE;
8235 *strict_overflow_p = true;
8237 /* Now build the constant reduced in magnitude. But not if that
8238 would produce one outside of its types range. */
8239 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8240 && ((sgn0 == 1
8241 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8242 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8243 || (sgn0 == -1
8244 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8245 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8246 return NULL_TREE;
8248 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8249 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8250 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8251 t = fold_convert (TREE_TYPE (arg1), t);
8253 return fold_build2_loc (loc, code, type, t, arg1);
8256 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8257 overflow further. Try to decrease the magnitude of constants involved
8258 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8259 and put sole constants at the second argument position.
8260 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8262 static tree
8263 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8264 tree arg0, tree arg1)
8266 tree t;
8267 bool strict_overflow_p;
8268 const char * const warnmsg = G_("assuming signed overflow does not occur "
8269 "when reducing constant in comparison");
8271 /* Try canonicalization by simplifying arg0. */
8272 strict_overflow_p = false;
8273 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8274 &strict_overflow_p);
8275 if (t)
8277 if (strict_overflow_p)
8278 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8279 return t;
8282 /* Try canonicalization by simplifying arg1 using the swapped
8283 comparison. */
8284 code = swap_tree_comparison (code);
8285 strict_overflow_p = false;
8286 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8287 &strict_overflow_p);
8288 if (t && strict_overflow_p)
8289 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8290 return t;
8293 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8294 space. This is used to avoid issuing overflow warnings for
8295 expressions like &p->x which can not wrap. */
8297 static bool
8298 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8300 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8301 return true;
8303 if (bitpos < 0)
8304 return true;
8306 wide_int wi_offset;
8307 int precision = TYPE_PRECISION (TREE_TYPE (base));
8308 if (offset == NULL_TREE)
8309 wi_offset = wi::zero (precision);
8310 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8311 return true;
8312 else
8313 wi_offset = offset;
8315 bool overflow;
8316 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8317 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8318 if (overflow)
8319 return true;
8321 if (!wi::fits_uhwi_p (total))
8322 return true;
8324 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8325 if (size <= 0)
8326 return true;
8328 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8329 array. */
8330 if (TREE_CODE (base) == ADDR_EXPR)
8332 HOST_WIDE_INT base_size;
8334 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8335 if (base_size > 0 && size < base_size)
8336 size = base_size;
8339 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8342 /* Return a positive integer when the symbol DECL is known to have
8343 a nonzero address, zero when it's known not to (e.g., it's a weak
8344 symbol), and a negative integer when the symbol is not yet in the
8345 symbol table and so whether or not its address is zero is unknown. */
8346 static int
8347 maybe_nonzero_address (tree decl)
8349 if (DECL_P (decl) && decl_in_symtab_p (decl))
8350 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8351 return symbol->nonzero_address ();
8353 return -1;
8356 /* Subroutine of fold_binary. This routine performs all of the
8357 transformations that are common to the equality/inequality
8358 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8359 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8360 fold_binary should call fold_binary. Fold a comparison with
8361 tree code CODE and type TYPE with operands OP0 and OP1. Return
8362 the folded comparison or NULL_TREE. */
8364 static tree
8365 fold_comparison (location_t loc, enum tree_code code, tree type,
8366 tree op0, tree op1)
8368 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8369 tree arg0, arg1, tem;
8371 arg0 = op0;
8372 arg1 = op1;
8374 STRIP_SIGN_NOPS (arg0);
8375 STRIP_SIGN_NOPS (arg1);
8377 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8378 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8379 && (equality_code
8380 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8381 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8383 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8384 && TREE_CODE (arg1) == INTEGER_CST
8385 && !TREE_OVERFLOW (arg1))
8387 const enum tree_code
8388 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8389 tree const1 = TREE_OPERAND (arg0, 1);
8390 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8391 tree variable = TREE_OPERAND (arg0, 0);
8392 tree new_const = int_const_binop (reverse_op, const2, const1);
8394 /* If the constant operation overflowed this can be
8395 simplified as a comparison against INT_MAX/INT_MIN. */
8396 if (TREE_OVERFLOW (new_const)
8397 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8399 int const1_sgn = tree_int_cst_sgn (const1);
8400 enum tree_code code2 = code;
8402 /* Get the sign of the constant on the lhs if the
8403 operation were VARIABLE + CONST1. */
8404 if (TREE_CODE (arg0) == MINUS_EXPR)
8405 const1_sgn = -const1_sgn;
8407 /* The sign of the constant determines if we overflowed
8408 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8409 Canonicalize to the INT_MIN overflow by swapping the comparison
8410 if necessary. */
8411 if (const1_sgn == -1)
8412 code2 = swap_tree_comparison (code);
8414 /* We now can look at the canonicalized case
8415 VARIABLE + 1 CODE2 INT_MIN
8416 and decide on the result. */
8417 switch (code2)
8419 case EQ_EXPR:
8420 case LT_EXPR:
8421 case LE_EXPR:
8422 return
8423 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8425 case NE_EXPR:
8426 case GE_EXPR:
8427 case GT_EXPR:
8428 return
8429 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8431 default:
8432 gcc_unreachable ();
8435 else
8437 if (!equality_code)
8438 fold_overflow_warning ("assuming signed overflow does not occur "
8439 "when changing X +- C1 cmp C2 to "
8440 "X cmp C2 -+ C1",
8441 WARN_STRICT_OVERFLOW_COMPARISON);
8442 return fold_build2_loc (loc, code, type, variable, new_const);
8446 /* For comparisons of pointers we can decompose it to a compile time
8447 comparison of the base objects and the offsets into the object.
8448 This requires at least one operand being an ADDR_EXPR or a
8449 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8450 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8451 && (TREE_CODE (arg0) == ADDR_EXPR
8452 || TREE_CODE (arg1) == ADDR_EXPR
8453 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8454 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8456 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8457 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8458 machine_mode mode;
8459 int volatilep, reversep, unsignedp;
8460 bool indirect_base0 = false, indirect_base1 = false;
8462 /* Get base and offset for the access. Strip ADDR_EXPR for
8463 get_inner_reference, but put it back by stripping INDIRECT_REF
8464 off the base object if possible. indirect_baseN will be true
8465 if baseN is not an address but refers to the object itself. */
8466 base0 = arg0;
8467 if (TREE_CODE (arg0) == ADDR_EXPR)
8469 base0
8470 = get_inner_reference (TREE_OPERAND (arg0, 0),
8471 &bitsize, &bitpos0, &offset0, &mode,
8472 &unsignedp, &reversep, &volatilep, false);
8473 if (TREE_CODE (base0) == INDIRECT_REF)
8474 base0 = TREE_OPERAND (base0, 0);
8475 else
8476 indirect_base0 = true;
8478 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8480 base0 = TREE_OPERAND (arg0, 0);
8481 STRIP_SIGN_NOPS (base0);
8482 if (TREE_CODE (base0) == ADDR_EXPR)
8484 base0
8485 = get_inner_reference (TREE_OPERAND (base0, 0),
8486 &bitsize, &bitpos0, &offset0, &mode,
8487 &unsignedp, &reversep, &volatilep,
8488 false);
8489 if (TREE_CODE (base0) == INDIRECT_REF)
8490 base0 = TREE_OPERAND (base0, 0);
8491 else
8492 indirect_base0 = true;
8494 if (offset0 == NULL_TREE || integer_zerop (offset0))
8495 offset0 = TREE_OPERAND (arg0, 1);
8496 else
8497 offset0 = size_binop (PLUS_EXPR, offset0,
8498 TREE_OPERAND (arg0, 1));
8499 if (TREE_CODE (offset0) == INTEGER_CST)
8501 offset_int tem = wi::sext (wi::to_offset (offset0),
8502 TYPE_PRECISION (sizetype));
8503 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8504 tem += bitpos0;
8505 if (wi::fits_shwi_p (tem))
8507 bitpos0 = tem.to_shwi ();
8508 offset0 = NULL_TREE;
8513 base1 = arg1;
8514 if (TREE_CODE (arg1) == ADDR_EXPR)
8516 base1
8517 = get_inner_reference (TREE_OPERAND (arg1, 0),
8518 &bitsize, &bitpos1, &offset1, &mode,
8519 &unsignedp, &reversep, &volatilep, false);
8520 if (TREE_CODE (base1) == INDIRECT_REF)
8521 base1 = TREE_OPERAND (base1, 0);
8522 else
8523 indirect_base1 = true;
8525 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8527 base1 = TREE_OPERAND (arg1, 0);
8528 STRIP_SIGN_NOPS (base1);
8529 if (TREE_CODE (base1) == ADDR_EXPR)
8531 base1
8532 = get_inner_reference (TREE_OPERAND (base1, 0),
8533 &bitsize, &bitpos1, &offset1, &mode,
8534 &unsignedp, &reversep, &volatilep,
8535 false);
8536 if (TREE_CODE (base1) == INDIRECT_REF)
8537 base1 = TREE_OPERAND (base1, 0);
8538 else
8539 indirect_base1 = true;
8541 if (offset1 == NULL_TREE || integer_zerop (offset1))
8542 offset1 = TREE_OPERAND (arg1, 1);
8543 else
8544 offset1 = size_binop (PLUS_EXPR, offset1,
8545 TREE_OPERAND (arg1, 1));
8546 if (TREE_CODE (offset1) == INTEGER_CST)
8548 offset_int tem = wi::sext (wi::to_offset (offset1),
8549 TYPE_PRECISION (sizetype));
8550 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8551 tem += bitpos1;
8552 if (wi::fits_shwi_p (tem))
8554 bitpos1 = tem.to_shwi ();
8555 offset1 = NULL_TREE;
8560 /* If we have equivalent bases we might be able to simplify. */
8561 if (indirect_base0 == indirect_base1
8562 && operand_equal_p (base0, base1,
8563 indirect_base0 ? OEP_ADDRESS_OF : 0))
8565 /* We can fold this expression to a constant if the non-constant
8566 offset parts are equal. */
8567 if ((offset0 == offset1
8568 || (offset0 && offset1
8569 && operand_equal_p (offset0, offset1, 0)))
8570 && (code == EQ_EXPR
8571 || code == NE_EXPR
8572 || (indirect_base0 && DECL_P (base0))
8573 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8576 if (!equality_code
8577 && bitpos0 != bitpos1
8578 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8579 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8580 fold_overflow_warning (("assuming pointer wraparound does not "
8581 "occur when comparing P +- C1 with "
8582 "P +- C2"),
8583 WARN_STRICT_OVERFLOW_CONDITIONAL);
8585 switch (code)
8587 case EQ_EXPR:
8588 return constant_boolean_node (bitpos0 == bitpos1, type);
8589 case NE_EXPR:
8590 return constant_boolean_node (bitpos0 != bitpos1, type);
8591 case LT_EXPR:
8592 return constant_boolean_node (bitpos0 < bitpos1, type);
8593 case LE_EXPR:
8594 return constant_boolean_node (bitpos0 <= bitpos1, type);
8595 case GE_EXPR:
8596 return constant_boolean_node (bitpos0 >= bitpos1, type);
8597 case GT_EXPR:
8598 return constant_boolean_node (bitpos0 > bitpos1, type);
8599 default:;
8602 /* We can simplify the comparison to a comparison of the variable
8603 offset parts if the constant offset parts are equal.
8604 Be careful to use signed sizetype here because otherwise we
8605 mess with array offsets in the wrong way. This is possible
8606 because pointer arithmetic is restricted to retain within an
8607 object and overflow on pointer differences is undefined as of
8608 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8609 else if (bitpos0 == bitpos1
8610 && (equality_code
8611 || (indirect_base0 && DECL_P (base0))
8612 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8614 /* By converting to signed sizetype we cover middle-end pointer
8615 arithmetic which operates on unsigned pointer types of size
8616 type size and ARRAY_REF offsets which are properly sign or
8617 zero extended from their type in case it is narrower than
8618 sizetype. */
8619 if (offset0 == NULL_TREE)
8620 offset0 = build_int_cst (ssizetype, 0);
8621 else
8622 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8623 if (offset1 == NULL_TREE)
8624 offset1 = build_int_cst (ssizetype, 0);
8625 else
8626 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8628 if (!equality_code
8629 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8630 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8631 fold_overflow_warning (("assuming pointer wraparound does not "
8632 "occur when comparing P +- C1 with "
8633 "P +- C2"),
8634 WARN_STRICT_OVERFLOW_COMPARISON);
8636 return fold_build2_loc (loc, code, type, offset0, offset1);
8639 /* For equal offsets we can simplify to a comparison of the
8640 base addresses. */
8641 else if (bitpos0 == bitpos1
8642 && (indirect_base0
8643 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8644 && (indirect_base1
8645 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8646 && ((offset0 == offset1)
8647 || (offset0 && offset1
8648 && operand_equal_p (offset0, offset1, 0))))
8650 if (indirect_base0)
8651 base0 = build_fold_addr_expr_loc (loc, base0);
8652 if (indirect_base1)
8653 base1 = build_fold_addr_expr_loc (loc, base1);
8654 return fold_build2_loc (loc, code, type, base0, base1);
8656 /* Comparison between an ordinary (non-weak) symbol and a null
8657 pointer can be eliminated since such symbols must have a non
8658 null address. In C, relational expressions between pointers
8659 to objects and null pointers are undefined. The results
8660 below follow the C++ rules with the additional property that
8661 every object pointer compares greater than a null pointer.
8663 else if (DECL_P (base0)
8664 && maybe_nonzero_address (base0) > 0
8665 /* Avoid folding references to struct members at offset 0 to
8666 prevent tests like '&ptr->firstmember == 0' from getting
8667 eliminated. When ptr is null, although the -> expression
8668 is strictly speaking invalid, GCC retains it as a matter
8669 of QoI. See PR c/44555. */
8670 && (offset0 == NULL_TREE && bitpos0 != 0)
8671 /* The caller guarantees that when one of the arguments is
8672 constant (i.e., null in this case) it is second. */
8673 && integer_zerop (arg1))
8675 switch (code)
8677 case EQ_EXPR:
8678 case LE_EXPR:
8679 case LT_EXPR:
8680 return constant_boolean_node (false, type);
8681 case GE_EXPR:
8682 case GT_EXPR:
8683 case NE_EXPR:
8684 return constant_boolean_node (true, type);
8685 default:
8686 gcc_unreachable ();
8691 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8692 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8693 the resulting offset is smaller in absolute value than the
8694 original one and has the same sign. */
8695 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8696 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8697 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8698 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8699 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8700 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8701 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8702 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8704 tree const1 = TREE_OPERAND (arg0, 1);
8705 tree const2 = TREE_OPERAND (arg1, 1);
8706 tree variable1 = TREE_OPERAND (arg0, 0);
8707 tree variable2 = TREE_OPERAND (arg1, 0);
8708 tree cst;
8709 const char * const warnmsg = G_("assuming signed overflow does not "
8710 "occur when combining constants around "
8711 "a comparison");
8713 /* Put the constant on the side where it doesn't overflow and is
8714 of lower absolute value and of same sign than before. */
8715 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8716 ? MINUS_EXPR : PLUS_EXPR,
8717 const2, const1);
8718 if (!TREE_OVERFLOW (cst)
8719 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8720 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8722 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8723 return fold_build2_loc (loc, code, type,
8724 variable1,
8725 fold_build2_loc (loc, TREE_CODE (arg1),
8726 TREE_TYPE (arg1),
8727 variable2, cst));
8730 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8731 ? MINUS_EXPR : PLUS_EXPR,
8732 const1, const2);
8733 if (!TREE_OVERFLOW (cst)
8734 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8735 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8737 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8738 return fold_build2_loc (loc, code, type,
8739 fold_build2_loc (loc, TREE_CODE (arg0),
8740 TREE_TYPE (arg0),
8741 variable1, cst),
8742 variable2);
8746 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8747 if (tem)
8748 return tem;
8750 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8751 constant, we can simplify it. */
8752 if (TREE_CODE (arg1) == INTEGER_CST
8753 && (TREE_CODE (arg0) == MIN_EXPR
8754 || TREE_CODE (arg0) == MAX_EXPR)
8755 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8757 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8758 if (tem)
8759 return tem;
8762 /* If we are comparing an expression that just has comparisons
8763 of two integer values, arithmetic expressions of those comparisons,
8764 and constants, we can simplify it. There are only three cases
8765 to check: the two values can either be equal, the first can be
8766 greater, or the second can be greater. Fold the expression for
8767 those three values. Since each value must be 0 or 1, we have
8768 eight possibilities, each of which corresponds to the constant 0
8769 or 1 or one of the six possible comparisons.
8771 This handles common cases like (a > b) == 0 but also handles
8772 expressions like ((x > y) - (y > x)) > 0, which supposedly
8773 occur in macroized code. */
8775 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8777 tree cval1 = 0, cval2 = 0;
8778 int save_p = 0;
8780 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8781 /* Don't handle degenerate cases here; they should already
8782 have been handled anyway. */
8783 && cval1 != 0 && cval2 != 0
8784 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8785 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8786 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8787 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8788 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8789 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8790 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8792 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8793 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8795 /* We can't just pass T to eval_subst in case cval1 or cval2
8796 was the same as ARG1. */
8798 tree high_result
8799 = fold_build2_loc (loc, code, type,
8800 eval_subst (loc, arg0, cval1, maxval,
8801 cval2, minval),
8802 arg1);
8803 tree equal_result
8804 = fold_build2_loc (loc, code, type,
8805 eval_subst (loc, arg0, cval1, maxval,
8806 cval2, maxval),
8807 arg1);
8808 tree low_result
8809 = fold_build2_loc (loc, code, type,
8810 eval_subst (loc, arg0, cval1, minval,
8811 cval2, maxval),
8812 arg1);
8814 /* All three of these results should be 0 or 1. Confirm they are.
8815 Then use those values to select the proper code to use. */
8817 if (TREE_CODE (high_result) == INTEGER_CST
8818 && TREE_CODE (equal_result) == INTEGER_CST
8819 && TREE_CODE (low_result) == INTEGER_CST)
8821 /* Make a 3-bit mask with the high-order bit being the
8822 value for `>', the next for '=', and the low for '<'. */
8823 switch ((integer_onep (high_result) * 4)
8824 + (integer_onep (equal_result) * 2)
8825 + integer_onep (low_result))
8827 case 0:
8828 /* Always false. */
8829 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8830 case 1:
8831 code = LT_EXPR;
8832 break;
8833 case 2:
8834 code = EQ_EXPR;
8835 break;
8836 case 3:
8837 code = LE_EXPR;
8838 break;
8839 case 4:
8840 code = GT_EXPR;
8841 break;
8842 case 5:
8843 code = NE_EXPR;
8844 break;
8845 case 6:
8846 code = GE_EXPR;
8847 break;
8848 case 7:
8849 /* Always true. */
8850 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8853 if (save_p)
8855 tem = save_expr (build2 (code, type, cval1, cval2));
8856 SET_EXPR_LOCATION (tem, loc);
8857 return tem;
8859 return fold_build2_loc (loc, code, type, cval1, cval2);
8864 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8865 into a single range test. */
8866 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8867 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8868 && TREE_CODE (arg1) == INTEGER_CST
8869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8870 && !integer_zerop (TREE_OPERAND (arg0, 1))
8871 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8872 && !TREE_OVERFLOW (arg1))
8874 tem = fold_div_compare (loc, code, type, arg0, arg1);
8875 if (tem != NULL_TREE)
8876 return tem;
8879 return NULL_TREE;
8883 /* Subroutine of fold_binary. Optimize complex multiplications of the
8884 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8885 argument EXPR represents the expression "z" of type TYPE. */
8887 static tree
8888 fold_mult_zconjz (location_t loc, tree type, tree expr)
8890 tree itype = TREE_TYPE (type);
8891 tree rpart, ipart, tem;
8893 if (TREE_CODE (expr) == COMPLEX_EXPR)
8895 rpart = TREE_OPERAND (expr, 0);
8896 ipart = TREE_OPERAND (expr, 1);
8898 else if (TREE_CODE (expr) == COMPLEX_CST)
8900 rpart = TREE_REALPART (expr);
8901 ipart = TREE_IMAGPART (expr);
8903 else
8905 expr = save_expr (expr);
8906 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8907 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8910 rpart = save_expr (rpart);
8911 ipart = save_expr (ipart);
8912 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8913 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8914 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8915 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8916 build_zero_cst (itype));
8920 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8921 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8923 static bool
8924 vec_cst_ctor_to_array (tree arg, tree *elts)
8926 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8928 if (TREE_CODE (arg) == VECTOR_CST)
8930 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8931 elts[i] = VECTOR_CST_ELT (arg, i);
8933 else if (TREE_CODE (arg) == CONSTRUCTOR)
8935 constructor_elt *elt;
8937 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8938 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8939 return false;
8940 else
8941 elts[i] = elt->value;
8943 else
8944 return false;
8945 for (; i < nelts; i++)
8946 elts[i]
8947 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8948 return true;
8951 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8952 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8953 NULL_TREE otherwise. */
8955 static tree
8956 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8958 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8959 tree *elts;
8960 bool need_ctor = false;
8962 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8963 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8964 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8965 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8966 return NULL_TREE;
8968 elts = XALLOCAVEC (tree, nelts * 3);
8969 if (!vec_cst_ctor_to_array (arg0, elts)
8970 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8971 return NULL_TREE;
8973 for (i = 0; i < nelts; i++)
8975 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8976 need_ctor = true;
8977 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8980 if (need_ctor)
8982 vec<constructor_elt, va_gc> *v;
8983 vec_alloc (v, nelts);
8984 for (i = 0; i < nelts; i++)
8985 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8986 return build_constructor (type, v);
8988 else
8989 return build_vector (type, &elts[2 * nelts]);
8992 /* Try to fold a pointer difference of type TYPE two address expressions of
8993 array references AREF0 and AREF1 using location LOC. Return a
8994 simplified expression for the difference or NULL_TREE. */
8996 static tree
8997 fold_addr_of_array_ref_difference (location_t loc, tree type,
8998 tree aref0, tree aref1)
9000 tree base0 = TREE_OPERAND (aref0, 0);
9001 tree base1 = TREE_OPERAND (aref1, 0);
9002 tree base_offset = build_int_cst (type, 0);
9004 /* If the bases are array references as well, recurse. If the bases
9005 are pointer indirections compute the difference of the pointers.
9006 If the bases are equal, we are set. */
9007 if ((TREE_CODE (base0) == ARRAY_REF
9008 && TREE_CODE (base1) == ARRAY_REF
9009 && (base_offset
9010 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9011 || (INDIRECT_REF_P (base0)
9012 && INDIRECT_REF_P (base1)
9013 && (base_offset
9014 = fold_binary_loc (loc, MINUS_EXPR, type,
9015 fold_convert (type, TREE_OPERAND (base0, 0)),
9016 fold_convert (type,
9017 TREE_OPERAND (base1, 0)))))
9018 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9020 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9021 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9022 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9023 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9024 return fold_build2_loc (loc, PLUS_EXPR, type,
9025 base_offset,
9026 fold_build2_loc (loc, MULT_EXPR, type,
9027 diff, esz));
9029 return NULL_TREE;
9032 /* If the real or vector real constant CST of type TYPE has an exact
9033 inverse, return it, else return NULL. */
9035 tree
9036 exact_inverse (tree type, tree cst)
9038 REAL_VALUE_TYPE r;
9039 tree unit_type, *elts;
9040 machine_mode mode;
9041 unsigned vec_nelts, i;
9043 switch (TREE_CODE (cst))
9045 case REAL_CST:
9046 r = TREE_REAL_CST (cst);
9048 if (exact_real_inverse (TYPE_MODE (type), &r))
9049 return build_real (type, r);
9051 return NULL_TREE;
9053 case VECTOR_CST:
9054 vec_nelts = VECTOR_CST_NELTS (cst);
9055 elts = XALLOCAVEC (tree, vec_nelts);
9056 unit_type = TREE_TYPE (type);
9057 mode = TYPE_MODE (unit_type);
9059 for (i = 0; i < vec_nelts; i++)
9061 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9062 if (!exact_real_inverse (mode, &r))
9063 return NULL_TREE;
9064 elts[i] = build_real (unit_type, r);
9067 return build_vector (type, elts);
9069 default:
9070 return NULL_TREE;
9074 /* Mask out the tz least significant bits of X of type TYPE where
9075 tz is the number of trailing zeroes in Y. */
9076 static wide_int
9077 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9079 int tz = wi::ctz (y);
9080 if (tz > 0)
9081 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9082 return x;
9085 /* Return true when T is an address and is known to be nonzero.
9086 For floating point we further ensure that T is not denormal.
9087 Similar logic is present in nonzero_address in rtlanal.h.
9089 If the return value is based on the assumption that signed overflow
9090 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9091 change *STRICT_OVERFLOW_P. */
9093 static bool
9094 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9096 tree type = TREE_TYPE (t);
9097 enum tree_code code;
9099 /* Doing something useful for floating point would need more work. */
9100 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9101 return false;
9103 code = TREE_CODE (t);
9104 switch (TREE_CODE_CLASS (code))
9106 case tcc_unary:
9107 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9108 strict_overflow_p);
9109 case tcc_binary:
9110 case tcc_comparison:
9111 return tree_binary_nonzero_warnv_p (code, type,
9112 TREE_OPERAND (t, 0),
9113 TREE_OPERAND (t, 1),
9114 strict_overflow_p);
9115 case tcc_constant:
9116 case tcc_declaration:
9117 case tcc_reference:
9118 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9120 default:
9121 break;
9124 switch (code)
9126 case TRUTH_NOT_EXPR:
9127 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9128 strict_overflow_p);
9130 case TRUTH_AND_EXPR:
9131 case TRUTH_OR_EXPR:
9132 case TRUTH_XOR_EXPR:
9133 return tree_binary_nonzero_warnv_p (code, type,
9134 TREE_OPERAND (t, 0),
9135 TREE_OPERAND (t, 1),
9136 strict_overflow_p);
9138 case COND_EXPR:
9139 case CONSTRUCTOR:
9140 case OBJ_TYPE_REF:
9141 case ASSERT_EXPR:
9142 case ADDR_EXPR:
9143 case WITH_SIZE_EXPR:
9144 case SSA_NAME:
9145 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9147 case COMPOUND_EXPR:
9148 case MODIFY_EXPR:
9149 case BIND_EXPR:
9150 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9151 strict_overflow_p);
9153 case SAVE_EXPR:
9154 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9155 strict_overflow_p);
9157 case CALL_EXPR:
9159 tree fndecl = get_callee_fndecl (t);
9160 if (!fndecl) return false;
9161 if (flag_delete_null_pointer_checks && !flag_check_new
9162 && DECL_IS_OPERATOR_NEW (fndecl)
9163 && !TREE_NOTHROW (fndecl))
9164 return true;
9165 if (flag_delete_null_pointer_checks
9166 && lookup_attribute ("returns_nonnull",
9167 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9168 return true;
9169 return alloca_call_p (t);
9172 default:
9173 break;
9175 return false;
9178 /* Return true when T is an address and is known to be nonzero.
9179 Handle warnings about undefined signed overflow. */
9181 static bool
9182 tree_expr_nonzero_p (tree t)
9184 bool ret, strict_overflow_p;
9186 strict_overflow_p = false;
9187 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9188 if (strict_overflow_p)
9189 fold_overflow_warning (("assuming signed overflow does not occur when "
9190 "determining that expression is always "
9191 "non-zero"),
9192 WARN_STRICT_OVERFLOW_MISC);
9193 return ret;
9196 /* Return true if T is known not to be equal to an integer W. */
9198 bool
9199 expr_not_equal_to (tree t, const wide_int &w)
9201 wide_int min, max, nz;
9202 value_range_type rtype;
9203 switch (TREE_CODE (t))
9205 case INTEGER_CST:
9206 return wi::ne_p (t, w);
9208 case SSA_NAME:
9209 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9210 return false;
9211 rtype = get_range_info (t, &min, &max);
9212 if (rtype == VR_RANGE)
9214 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9215 return true;
9216 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9217 return true;
9219 else if (rtype == VR_ANTI_RANGE
9220 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9221 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9222 return true;
9223 /* If T has some known zero bits and W has any of those bits set,
9224 then T is known not to be equal to W. */
9225 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9226 TYPE_PRECISION (TREE_TYPE (t))), 0))
9227 return true;
9228 return false;
9230 default:
9231 return false;
9235 /* Fold a binary expression of code CODE and type TYPE with operands
9236 OP0 and OP1. LOC is the location of the resulting expression.
9237 Return the folded expression if folding is successful. Otherwise,
9238 return NULL_TREE. */
9240 tree
9241 fold_binary_loc (location_t loc,
9242 enum tree_code code, tree type, tree op0, tree op1)
9244 enum tree_code_class kind = TREE_CODE_CLASS (code);
9245 tree arg0, arg1, tem;
9246 tree t1 = NULL_TREE;
9247 bool strict_overflow_p;
9248 unsigned int prec;
9250 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9251 && TREE_CODE_LENGTH (code) == 2
9252 && op0 != NULL_TREE
9253 && op1 != NULL_TREE);
9255 arg0 = op0;
9256 arg1 = op1;
9258 /* Strip any conversions that don't change the mode. This is
9259 safe for every expression, except for a comparison expression
9260 because its signedness is derived from its operands. So, in
9261 the latter case, only strip conversions that don't change the
9262 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9263 preserved.
9265 Note that this is done as an internal manipulation within the
9266 constant folder, in order to find the simplest representation
9267 of the arguments so that their form can be studied. In any
9268 cases, the appropriate type conversions should be put back in
9269 the tree that will get out of the constant folder. */
9271 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9273 STRIP_SIGN_NOPS (arg0);
9274 STRIP_SIGN_NOPS (arg1);
9276 else
9278 STRIP_NOPS (arg0);
9279 STRIP_NOPS (arg1);
9282 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9283 constant but we can't do arithmetic on them. */
9284 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9286 tem = const_binop (code, type, arg0, arg1);
9287 if (tem != NULL_TREE)
9289 if (TREE_TYPE (tem) != type)
9290 tem = fold_convert_loc (loc, type, tem);
9291 return tem;
9295 /* If this is a commutative operation, and ARG0 is a constant, move it
9296 to ARG1 to reduce the number of tests below. */
9297 if (commutative_tree_code (code)
9298 && tree_swap_operands_p (arg0, arg1, true))
9299 return fold_build2_loc (loc, code, type, op1, op0);
9301 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9302 to ARG1 to reduce the number of tests below. */
9303 if (kind == tcc_comparison
9304 && tree_swap_operands_p (arg0, arg1, true))
9305 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9307 tem = generic_simplify (loc, code, type, op0, op1);
9308 if (tem)
9309 return tem;
9311 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9313 First check for cases where an arithmetic operation is applied to a
9314 compound, conditional, or comparison operation. Push the arithmetic
9315 operation inside the compound or conditional to see if any folding
9316 can then be done. Convert comparison to conditional for this purpose.
9317 The also optimizes non-constant cases that used to be done in
9318 expand_expr.
9320 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9321 one of the operands is a comparison and the other is a comparison, a
9322 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9323 code below would make the expression more complex. Change it to a
9324 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9325 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9327 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9328 || code == EQ_EXPR || code == NE_EXPR)
9329 && TREE_CODE (type) != VECTOR_TYPE
9330 && ((truth_value_p (TREE_CODE (arg0))
9331 && (truth_value_p (TREE_CODE (arg1))
9332 || (TREE_CODE (arg1) == BIT_AND_EXPR
9333 && integer_onep (TREE_OPERAND (arg1, 1)))))
9334 || (truth_value_p (TREE_CODE (arg1))
9335 && (truth_value_p (TREE_CODE (arg0))
9336 || (TREE_CODE (arg0) == BIT_AND_EXPR
9337 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9339 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9340 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9341 : TRUTH_XOR_EXPR,
9342 boolean_type_node,
9343 fold_convert_loc (loc, boolean_type_node, arg0),
9344 fold_convert_loc (loc, boolean_type_node, arg1));
9346 if (code == EQ_EXPR)
9347 tem = invert_truthvalue_loc (loc, tem);
9349 return fold_convert_loc (loc, type, tem);
9352 if (TREE_CODE_CLASS (code) == tcc_binary
9353 || TREE_CODE_CLASS (code) == tcc_comparison)
9355 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9357 tem = fold_build2_loc (loc, code, type,
9358 fold_convert_loc (loc, TREE_TYPE (op0),
9359 TREE_OPERAND (arg0, 1)), op1);
9360 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9361 tem);
9363 if (TREE_CODE (arg1) == COMPOUND_EXPR
9364 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9366 tem = fold_build2_loc (loc, code, type, op0,
9367 fold_convert_loc (loc, TREE_TYPE (op1),
9368 TREE_OPERAND (arg1, 1)));
9369 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9370 tem);
9373 if (TREE_CODE (arg0) == COND_EXPR
9374 || TREE_CODE (arg0) == VEC_COND_EXPR
9375 || COMPARISON_CLASS_P (arg0))
9377 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9378 arg0, arg1,
9379 /*cond_first_p=*/1);
9380 if (tem != NULL_TREE)
9381 return tem;
9384 if (TREE_CODE (arg1) == COND_EXPR
9385 || TREE_CODE (arg1) == VEC_COND_EXPR
9386 || COMPARISON_CLASS_P (arg1))
9388 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9389 arg1, arg0,
9390 /*cond_first_p=*/0);
9391 if (tem != NULL_TREE)
9392 return tem;
9396 switch (code)
9398 case MEM_REF:
9399 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9400 if (TREE_CODE (arg0) == ADDR_EXPR
9401 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9403 tree iref = TREE_OPERAND (arg0, 0);
9404 return fold_build2 (MEM_REF, type,
9405 TREE_OPERAND (iref, 0),
9406 int_const_binop (PLUS_EXPR, arg1,
9407 TREE_OPERAND (iref, 1)));
9410 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9411 if (TREE_CODE (arg0) == ADDR_EXPR
9412 && handled_component_p (TREE_OPERAND (arg0, 0)))
9414 tree base;
9415 HOST_WIDE_INT coffset;
9416 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9417 &coffset);
9418 if (!base)
9419 return NULL_TREE;
9420 return fold_build2 (MEM_REF, type,
9421 build_fold_addr_expr (base),
9422 int_const_binop (PLUS_EXPR, arg1,
9423 size_int (coffset)));
9426 return NULL_TREE;
9428 case POINTER_PLUS_EXPR:
9429 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9430 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9431 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9432 return fold_convert_loc (loc, type,
9433 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9434 fold_convert_loc (loc, sizetype,
9435 arg1),
9436 fold_convert_loc (loc, sizetype,
9437 arg0)));
9439 return NULL_TREE;
9441 case PLUS_EXPR:
9442 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9444 /* X + (X / CST) * -CST is X % CST. */
9445 if (TREE_CODE (arg1) == MULT_EXPR
9446 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9447 && operand_equal_p (arg0,
9448 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9450 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9451 tree cst1 = TREE_OPERAND (arg1, 1);
9452 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9453 cst1, cst0);
9454 if (sum && integer_zerop (sum))
9455 return fold_convert_loc (loc, type,
9456 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9457 TREE_TYPE (arg0), arg0,
9458 cst0));
9462 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9463 one. Make sure the type is not saturating and has the signedness of
9464 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9465 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9466 if ((TREE_CODE (arg0) == MULT_EXPR
9467 || TREE_CODE (arg1) == MULT_EXPR)
9468 && !TYPE_SATURATING (type)
9469 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9470 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9471 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9473 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9474 if (tem)
9475 return tem;
9478 if (! FLOAT_TYPE_P (type))
9480 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9481 (plus (plus (mult) (mult)) (foo)) so that we can
9482 take advantage of the factoring cases below. */
9483 if (ANY_INTEGRAL_TYPE_P (type)
9484 && TYPE_OVERFLOW_WRAPS (type)
9485 && (((TREE_CODE (arg0) == PLUS_EXPR
9486 || TREE_CODE (arg0) == MINUS_EXPR)
9487 && TREE_CODE (arg1) == MULT_EXPR)
9488 || ((TREE_CODE (arg1) == PLUS_EXPR
9489 || TREE_CODE (arg1) == MINUS_EXPR)
9490 && TREE_CODE (arg0) == MULT_EXPR)))
9492 tree parg0, parg1, parg, marg;
9493 enum tree_code pcode;
9495 if (TREE_CODE (arg1) == MULT_EXPR)
9496 parg = arg0, marg = arg1;
9497 else
9498 parg = arg1, marg = arg0;
9499 pcode = TREE_CODE (parg);
9500 parg0 = TREE_OPERAND (parg, 0);
9501 parg1 = TREE_OPERAND (parg, 1);
9502 STRIP_NOPS (parg0);
9503 STRIP_NOPS (parg1);
9505 if (TREE_CODE (parg0) == MULT_EXPR
9506 && TREE_CODE (parg1) != MULT_EXPR)
9507 return fold_build2_loc (loc, pcode, type,
9508 fold_build2_loc (loc, PLUS_EXPR, type,
9509 fold_convert_loc (loc, type,
9510 parg0),
9511 fold_convert_loc (loc, type,
9512 marg)),
9513 fold_convert_loc (loc, type, parg1));
9514 if (TREE_CODE (parg0) != MULT_EXPR
9515 && TREE_CODE (parg1) == MULT_EXPR)
9516 return
9517 fold_build2_loc (loc, PLUS_EXPR, type,
9518 fold_convert_loc (loc, type, parg0),
9519 fold_build2_loc (loc, pcode, type,
9520 fold_convert_loc (loc, type, marg),
9521 fold_convert_loc (loc, type,
9522 parg1)));
9525 else
9527 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9528 to __complex__ ( x, y ). This is not the same for SNaNs or
9529 if signed zeros are involved. */
9530 if (!HONOR_SNANS (element_mode (arg0))
9531 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9532 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9534 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9535 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9536 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9537 bool arg0rz = false, arg0iz = false;
9538 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9539 || (arg0i && (arg0iz = real_zerop (arg0i))))
9541 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9542 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9543 if (arg0rz && arg1i && real_zerop (arg1i))
9545 tree rp = arg1r ? arg1r
9546 : build1 (REALPART_EXPR, rtype, arg1);
9547 tree ip = arg0i ? arg0i
9548 : build1 (IMAGPART_EXPR, rtype, arg0);
9549 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9551 else if (arg0iz && arg1r && real_zerop (arg1r))
9553 tree rp = arg0r ? arg0r
9554 : build1 (REALPART_EXPR, rtype, arg0);
9555 tree ip = arg1i ? arg1i
9556 : build1 (IMAGPART_EXPR, rtype, arg1);
9557 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9562 if (flag_unsafe_math_optimizations
9563 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9564 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9565 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9566 return tem;
9568 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9569 We associate floats only if the user has specified
9570 -fassociative-math. */
9571 if (flag_associative_math
9572 && TREE_CODE (arg1) == PLUS_EXPR
9573 && TREE_CODE (arg0) != MULT_EXPR)
9575 tree tree10 = TREE_OPERAND (arg1, 0);
9576 tree tree11 = TREE_OPERAND (arg1, 1);
9577 if (TREE_CODE (tree11) == MULT_EXPR
9578 && TREE_CODE (tree10) == MULT_EXPR)
9580 tree tree0;
9581 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9582 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9585 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9586 We associate floats only if the user has specified
9587 -fassociative-math. */
9588 if (flag_associative_math
9589 && TREE_CODE (arg0) == PLUS_EXPR
9590 && TREE_CODE (arg1) != MULT_EXPR)
9592 tree tree00 = TREE_OPERAND (arg0, 0);
9593 tree tree01 = TREE_OPERAND (arg0, 1);
9594 if (TREE_CODE (tree01) == MULT_EXPR
9595 && TREE_CODE (tree00) == MULT_EXPR)
9597 tree tree0;
9598 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9599 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9604 bit_rotate:
9605 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9606 is a rotate of A by C1 bits. */
9607 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9608 is a rotate of A by B bits. */
9610 enum tree_code code0, code1;
9611 tree rtype;
9612 code0 = TREE_CODE (arg0);
9613 code1 = TREE_CODE (arg1);
9614 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9615 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9616 && operand_equal_p (TREE_OPERAND (arg0, 0),
9617 TREE_OPERAND (arg1, 0), 0)
9618 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9619 TYPE_UNSIGNED (rtype))
9620 /* Only create rotates in complete modes. Other cases are not
9621 expanded properly. */
9622 && (element_precision (rtype)
9623 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9625 tree tree01, tree11;
9626 enum tree_code code01, code11;
9628 tree01 = TREE_OPERAND (arg0, 1);
9629 tree11 = TREE_OPERAND (arg1, 1);
9630 STRIP_NOPS (tree01);
9631 STRIP_NOPS (tree11);
9632 code01 = TREE_CODE (tree01);
9633 code11 = TREE_CODE (tree11);
9634 if (code01 == INTEGER_CST
9635 && code11 == INTEGER_CST
9636 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9637 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9639 tem = build2_loc (loc, LROTATE_EXPR,
9640 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9641 TREE_OPERAND (arg0, 0),
9642 code0 == LSHIFT_EXPR
9643 ? TREE_OPERAND (arg0, 1)
9644 : TREE_OPERAND (arg1, 1));
9645 return fold_convert_loc (loc, type, tem);
9647 else if (code11 == MINUS_EXPR)
9649 tree tree110, tree111;
9650 tree110 = TREE_OPERAND (tree11, 0);
9651 tree111 = TREE_OPERAND (tree11, 1);
9652 STRIP_NOPS (tree110);
9653 STRIP_NOPS (tree111);
9654 if (TREE_CODE (tree110) == INTEGER_CST
9655 && 0 == compare_tree_int (tree110,
9656 element_precision
9657 (TREE_TYPE (TREE_OPERAND
9658 (arg0, 0))))
9659 && operand_equal_p (tree01, tree111, 0))
9660 return
9661 fold_convert_loc (loc, type,
9662 build2 ((code0 == LSHIFT_EXPR
9663 ? LROTATE_EXPR
9664 : RROTATE_EXPR),
9665 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9666 TREE_OPERAND (arg0, 0),
9667 TREE_OPERAND (arg0, 1)));
9669 else if (code01 == MINUS_EXPR)
9671 tree tree010, tree011;
9672 tree010 = TREE_OPERAND (tree01, 0);
9673 tree011 = TREE_OPERAND (tree01, 1);
9674 STRIP_NOPS (tree010);
9675 STRIP_NOPS (tree011);
9676 if (TREE_CODE (tree010) == INTEGER_CST
9677 && 0 == compare_tree_int (tree010,
9678 element_precision
9679 (TREE_TYPE (TREE_OPERAND
9680 (arg0, 0))))
9681 && operand_equal_p (tree11, tree011, 0))
9682 return fold_convert_loc
9683 (loc, type,
9684 build2 ((code0 != LSHIFT_EXPR
9685 ? LROTATE_EXPR
9686 : RROTATE_EXPR),
9687 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9688 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9693 associate:
9694 /* In most languages, can't associate operations on floats through
9695 parentheses. Rather than remember where the parentheses were, we
9696 don't associate floats at all, unless the user has specified
9697 -fassociative-math.
9698 And, we need to make sure type is not saturating. */
9700 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9701 && !TYPE_SATURATING (type))
9703 tree var0, con0, lit0, minus_lit0;
9704 tree var1, con1, lit1, minus_lit1;
9705 tree atype = type;
9706 bool ok = true;
9708 /* Split both trees into variables, constants, and literals. Then
9709 associate each group together, the constants with literals,
9710 then the result with variables. This increases the chances of
9711 literals being recombined later and of generating relocatable
9712 expressions for the sum of a constant and literal. */
9713 var0 = split_tree (loc, arg0, type, code,
9714 &con0, &lit0, &minus_lit0, 0);
9715 var1 = split_tree (loc, arg1, type, code,
9716 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9718 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9719 if (code == MINUS_EXPR)
9720 code = PLUS_EXPR;
9722 /* With undefined overflow prefer doing association in a type
9723 which wraps on overflow, if that is one of the operand types. */
9724 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9725 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9727 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9728 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9729 atype = TREE_TYPE (arg0);
9730 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9731 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9732 atype = TREE_TYPE (arg1);
9733 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9736 /* With undefined overflow we can only associate constants with one
9737 variable, and constants whose association doesn't overflow. */
9738 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9739 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9741 if (var0 && var1)
9743 tree tmp0 = var0;
9744 tree tmp1 = var1;
9745 bool one_neg = false;
9747 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9749 tmp0 = TREE_OPERAND (tmp0, 0);
9750 one_neg = !one_neg;
9752 if (CONVERT_EXPR_P (tmp0)
9753 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9754 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9755 <= TYPE_PRECISION (atype)))
9756 tmp0 = TREE_OPERAND (tmp0, 0);
9757 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9759 tmp1 = TREE_OPERAND (tmp1, 0);
9760 one_neg = !one_neg;
9762 if (CONVERT_EXPR_P (tmp1)
9763 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9764 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9765 <= TYPE_PRECISION (atype)))
9766 tmp1 = TREE_OPERAND (tmp1, 0);
9767 /* The only case we can still associate with two variables
9768 is if they cancel out. */
9769 if (!one_neg
9770 || !operand_equal_p (tmp0, tmp1, 0))
9771 ok = false;
9775 /* Only do something if we found more than two objects. Otherwise,
9776 nothing has changed and we risk infinite recursion. */
9777 if (ok
9778 && (2 < ((var0 != 0) + (var1 != 0)
9779 + (con0 != 0) + (con1 != 0)
9780 + (lit0 != 0) + (lit1 != 0)
9781 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9783 bool any_overflows = false;
9784 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9785 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9786 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9787 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9788 var0 = associate_trees (loc, var0, var1, code, atype);
9789 con0 = associate_trees (loc, con0, con1, code, atype);
9790 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9791 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9792 code, atype);
9794 /* Preserve the MINUS_EXPR if the negative part of the literal is
9795 greater than the positive part. Otherwise, the multiplicative
9796 folding code (i.e extract_muldiv) may be fooled in case
9797 unsigned constants are subtracted, like in the following
9798 example: ((X*2 + 4) - 8U)/2. */
9799 if (minus_lit0 && lit0)
9801 if (TREE_CODE (lit0) == INTEGER_CST
9802 && TREE_CODE (minus_lit0) == INTEGER_CST
9803 && tree_int_cst_lt (lit0, minus_lit0))
9805 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9806 MINUS_EXPR, atype);
9807 lit0 = 0;
9809 else
9811 lit0 = associate_trees (loc, lit0, minus_lit0,
9812 MINUS_EXPR, atype);
9813 minus_lit0 = 0;
9817 /* Don't introduce overflows through reassociation. */
9818 if (!any_overflows
9819 && ((lit0 && TREE_OVERFLOW_P (lit0))
9820 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9821 return NULL_TREE;
9823 if (minus_lit0)
9825 if (con0 == 0)
9826 return
9827 fold_convert_loc (loc, type,
9828 associate_trees (loc, var0, minus_lit0,
9829 MINUS_EXPR, atype));
9830 else
9832 con0 = associate_trees (loc, con0, minus_lit0,
9833 MINUS_EXPR, atype);
9834 return
9835 fold_convert_loc (loc, type,
9836 associate_trees (loc, var0, con0,
9837 PLUS_EXPR, atype));
9841 con0 = associate_trees (loc, con0, lit0, code, atype);
9842 return
9843 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9844 code, atype));
9848 return NULL_TREE;
9850 case MINUS_EXPR:
9851 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9852 if (TREE_CODE (arg0) == NEGATE_EXPR
9853 && negate_expr_p (op1)
9854 && reorder_operands_p (arg0, arg1))
9855 return fold_build2_loc (loc, MINUS_EXPR, type,
9856 negate_expr (op1),
9857 fold_convert_loc (loc, type,
9858 TREE_OPERAND (arg0, 0)));
9860 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9861 __complex__ ( x, -y ). This is not the same for SNaNs or if
9862 signed zeros are involved. */
9863 if (!HONOR_SNANS (element_mode (arg0))
9864 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9865 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9867 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9868 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9869 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9870 bool arg0rz = false, arg0iz = false;
9871 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9872 || (arg0i && (arg0iz = real_zerop (arg0i))))
9874 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9875 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9876 if (arg0rz && arg1i && real_zerop (arg1i))
9878 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9879 arg1r ? arg1r
9880 : build1 (REALPART_EXPR, rtype, arg1));
9881 tree ip = arg0i ? arg0i
9882 : build1 (IMAGPART_EXPR, rtype, arg0);
9883 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9885 else if (arg0iz && arg1r && real_zerop (arg1r))
9887 tree rp = arg0r ? arg0r
9888 : build1 (REALPART_EXPR, rtype, arg0);
9889 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9890 arg1i ? arg1i
9891 : build1 (IMAGPART_EXPR, rtype, arg1));
9892 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9897 /* A - B -> A + (-B) if B is easily negatable. */
9898 if (negate_expr_p (op1)
9899 && ! TYPE_OVERFLOW_SANITIZED (type)
9900 && ((FLOAT_TYPE_P (type)
9901 /* Avoid this transformation if B is a positive REAL_CST. */
9902 && (TREE_CODE (op1) != REAL_CST
9903 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9904 || INTEGRAL_TYPE_P (type)))
9905 return fold_build2_loc (loc, PLUS_EXPR, type,
9906 fold_convert_loc (loc, type, arg0),
9907 negate_expr (op1));
9909 /* Fold &a[i] - &a[j] to i-j. */
9910 if (TREE_CODE (arg0) == ADDR_EXPR
9911 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9912 && TREE_CODE (arg1) == ADDR_EXPR
9913 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9915 tree tem = fold_addr_of_array_ref_difference (loc, type,
9916 TREE_OPERAND (arg0, 0),
9917 TREE_OPERAND (arg1, 0));
9918 if (tem)
9919 return tem;
9922 if (FLOAT_TYPE_P (type)
9923 && flag_unsafe_math_optimizations
9924 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9925 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9926 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9927 return tem;
9929 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9930 one. Make sure the type is not saturating and has the signedness of
9931 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9932 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9933 if ((TREE_CODE (arg0) == MULT_EXPR
9934 || TREE_CODE (arg1) == MULT_EXPR)
9935 && !TYPE_SATURATING (type)
9936 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9937 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9938 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9940 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9941 if (tem)
9942 return tem;
9945 goto associate;
9947 case MULT_EXPR:
9948 if (! FLOAT_TYPE_P (type))
9950 /* Transform x * -C into -x * C if x is easily negatable. */
9951 if (TREE_CODE (op1) == INTEGER_CST
9952 && tree_int_cst_sgn (op1) == -1
9953 && negate_expr_p (op0)
9954 && (tem = negate_expr (op1)) != op1
9955 && ! TREE_OVERFLOW (tem))
9956 return fold_build2_loc (loc, MULT_EXPR, type,
9957 fold_convert_loc (loc, type,
9958 negate_expr (op0)), tem);
9960 /* (A + A) * C -> A * 2 * C */
9961 if (TREE_CODE (arg0) == PLUS_EXPR
9962 && TREE_CODE (arg1) == INTEGER_CST
9963 && operand_equal_p (TREE_OPERAND (arg0, 0),
9964 TREE_OPERAND (arg0, 1), 0))
9965 return fold_build2_loc (loc, MULT_EXPR, type,
9966 omit_one_operand_loc (loc, type,
9967 TREE_OPERAND (arg0, 0),
9968 TREE_OPERAND (arg0, 1)),
9969 fold_build2_loc (loc, MULT_EXPR, type,
9970 build_int_cst (type, 2) , arg1));
9972 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9973 sign-changing only. */
9974 if (TREE_CODE (arg1) == INTEGER_CST
9975 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9976 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9977 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9979 strict_overflow_p = false;
9980 if (TREE_CODE (arg1) == INTEGER_CST
9981 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9982 &strict_overflow_p)))
9984 if (strict_overflow_p)
9985 fold_overflow_warning (("assuming signed overflow does not "
9986 "occur when simplifying "
9987 "multiplication"),
9988 WARN_STRICT_OVERFLOW_MISC);
9989 return fold_convert_loc (loc, type, tem);
9992 /* Optimize z * conj(z) for integer complex numbers. */
9993 if (TREE_CODE (arg0) == CONJ_EXPR
9994 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9995 return fold_mult_zconjz (loc, type, arg1);
9996 if (TREE_CODE (arg1) == CONJ_EXPR
9997 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9998 return fold_mult_zconjz (loc, type, arg0);
10000 else
10002 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10003 This is not the same for NaNs or if signed zeros are
10004 involved. */
10005 if (!HONOR_NANS (arg0)
10006 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10007 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10008 && TREE_CODE (arg1) == COMPLEX_CST
10009 && real_zerop (TREE_REALPART (arg1)))
10011 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10012 if (real_onep (TREE_IMAGPART (arg1)))
10013 return
10014 fold_build2_loc (loc, COMPLEX_EXPR, type,
10015 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10016 rtype, arg0)),
10017 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10018 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10019 return
10020 fold_build2_loc (loc, COMPLEX_EXPR, type,
10021 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10022 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10023 rtype, arg0)));
10026 /* Optimize z * conj(z) for floating point complex numbers.
10027 Guarded by flag_unsafe_math_optimizations as non-finite
10028 imaginary components don't produce scalar results. */
10029 if (flag_unsafe_math_optimizations
10030 && TREE_CODE (arg0) == CONJ_EXPR
10031 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10032 return fold_mult_zconjz (loc, type, arg1);
10033 if (flag_unsafe_math_optimizations
10034 && TREE_CODE (arg1) == CONJ_EXPR
10035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10036 return fold_mult_zconjz (loc, type, arg0);
10038 if (flag_unsafe_math_optimizations)
10041 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10042 if (!in_gimple_form
10043 && optimize
10044 && operand_equal_p (arg0, arg1, 0))
10046 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10048 if (powfn)
10050 tree arg = build_real (type, dconst2);
10051 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10056 goto associate;
10058 case BIT_IOR_EXPR:
10059 /* Canonicalize (X & C1) | C2. */
10060 if (TREE_CODE (arg0) == BIT_AND_EXPR
10061 && TREE_CODE (arg1) == INTEGER_CST
10062 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10064 int width = TYPE_PRECISION (type), w;
10065 wide_int c1 = TREE_OPERAND (arg0, 1);
10066 wide_int c2 = arg1;
10068 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10069 if ((c1 & c2) == c1)
10070 return omit_one_operand_loc (loc, type, arg1,
10071 TREE_OPERAND (arg0, 0));
10073 wide_int msk = wi::mask (width, false,
10074 TYPE_PRECISION (TREE_TYPE (arg1)));
10076 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10077 if (msk.and_not (c1 | c2) == 0)
10078 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10079 TREE_OPERAND (arg0, 0), arg1);
10081 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10082 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10083 mode which allows further optimizations. */
10084 c1 &= msk;
10085 c2 &= msk;
10086 wide_int c3 = c1.and_not (c2);
10087 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10089 wide_int mask = wi::mask (w, false,
10090 TYPE_PRECISION (type));
10091 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10093 c3 = mask;
10094 break;
10098 if (c3 != c1)
10099 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10100 fold_build2_loc (loc, BIT_AND_EXPR, type,
10101 TREE_OPERAND (arg0, 0),
10102 wide_int_to_tree (type,
10103 c3)),
10104 arg1);
10107 /* See if this can be simplified into a rotate first. If that
10108 is unsuccessful continue in the association code. */
10109 goto bit_rotate;
10111 case BIT_XOR_EXPR:
10112 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10113 if (TREE_CODE (arg0) == BIT_AND_EXPR
10114 && INTEGRAL_TYPE_P (type)
10115 && integer_onep (TREE_OPERAND (arg0, 1))
10116 && integer_onep (arg1))
10117 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10118 build_zero_cst (TREE_TYPE (arg0)));
10120 /* See if this can be simplified into a rotate first. If that
10121 is unsuccessful continue in the association code. */
10122 goto bit_rotate;
10124 case BIT_AND_EXPR:
10125 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10126 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10127 && INTEGRAL_TYPE_P (type)
10128 && integer_onep (TREE_OPERAND (arg0, 1))
10129 && integer_onep (arg1))
10131 tree tem2;
10132 tem = TREE_OPERAND (arg0, 0);
10133 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10134 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10135 tem, tem2);
10136 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10137 build_zero_cst (TREE_TYPE (tem)));
10139 /* Fold ~X & 1 as (X & 1) == 0. */
10140 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10141 && INTEGRAL_TYPE_P (type)
10142 && integer_onep (arg1))
10144 tree tem2;
10145 tem = TREE_OPERAND (arg0, 0);
10146 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10147 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10148 tem, tem2);
10149 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10150 build_zero_cst (TREE_TYPE (tem)));
10152 /* Fold !X & 1 as X == 0. */
10153 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10154 && integer_onep (arg1))
10156 tem = TREE_OPERAND (arg0, 0);
10157 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10158 build_zero_cst (TREE_TYPE (tem)));
10161 /* Fold (X ^ Y) & Y as ~X & Y. */
10162 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10163 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10165 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10166 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10167 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10168 fold_convert_loc (loc, type, arg1));
10170 /* Fold (X ^ Y) & X as ~Y & X. */
10171 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10173 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10175 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10176 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10177 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10178 fold_convert_loc (loc, type, arg1));
10180 /* Fold X & (X ^ Y) as X & ~Y. */
10181 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10182 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10184 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10185 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10186 fold_convert_loc (loc, type, arg0),
10187 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10189 /* Fold X & (Y ^ X) as ~Y & X. */
10190 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10191 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10192 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10194 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10195 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10196 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10197 fold_convert_loc (loc, type, arg0));
10200 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10201 multiple of 1 << CST. */
10202 if (TREE_CODE (arg1) == INTEGER_CST)
10204 wide_int cst1 = arg1;
10205 wide_int ncst1 = -cst1;
10206 if ((cst1 & ncst1) == ncst1
10207 && multiple_of_p (type, arg0,
10208 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10209 return fold_convert_loc (loc, type, arg0);
10212 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10213 bits from CST2. */
10214 if (TREE_CODE (arg1) == INTEGER_CST
10215 && TREE_CODE (arg0) == MULT_EXPR
10216 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10218 wide_int warg1 = arg1;
10219 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10221 if (masked == 0)
10222 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10223 arg0, arg1);
10224 else if (masked != warg1)
10226 /* Avoid the transform if arg1 is a mask of some
10227 mode which allows further optimizations. */
10228 int pop = wi::popcount (warg1);
10229 if (!(pop >= BITS_PER_UNIT
10230 && exact_log2 (pop) != -1
10231 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10232 return fold_build2_loc (loc, code, type, op0,
10233 wide_int_to_tree (type, masked));
10237 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10238 ((A & N) + B) & M -> (A + B) & M
10239 Similarly if (N & M) == 0,
10240 ((A | N) + B) & M -> (A + B) & M
10241 and for - instead of + (or unary - instead of +)
10242 and/or ^ instead of |.
10243 If B is constant and (B & M) == 0, fold into A & M. */
10244 if (TREE_CODE (arg1) == INTEGER_CST)
10246 wide_int cst1 = arg1;
10247 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10248 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10249 && (TREE_CODE (arg0) == PLUS_EXPR
10250 || TREE_CODE (arg0) == MINUS_EXPR
10251 || TREE_CODE (arg0) == NEGATE_EXPR)
10252 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10253 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10255 tree pmop[2];
10256 int which = 0;
10257 wide_int cst0;
10259 /* Now we know that arg0 is (C + D) or (C - D) or
10260 -C and arg1 (M) is == (1LL << cst) - 1.
10261 Store C into PMOP[0] and D into PMOP[1]. */
10262 pmop[0] = TREE_OPERAND (arg0, 0);
10263 pmop[1] = NULL;
10264 if (TREE_CODE (arg0) != NEGATE_EXPR)
10266 pmop[1] = TREE_OPERAND (arg0, 1);
10267 which = 1;
10270 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10271 which = -1;
10273 for (; which >= 0; which--)
10274 switch (TREE_CODE (pmop[which]))
10276 case BIT_AND_EXPR:
10277 case BIT_IOR_EXPR:
10278 case BIT_XOR_EXPR:
10279 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10280 != INTEGER_CST)
10281 break;
10282 cst0 = TREE_OPERAND (pmop[which], 1);
10283 cst0 &= cst1;
10284 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10286 if (cst0 != cst1)
10287 break;
10289 else if (cst0 != 0)
10290 break;
10291 /* If C or D is of the form (A & N) where
10292 (N & M) == M, or of the form (A | N) or
10293 (A ^ N) where (N & M) == 0, replace it with A. */
10294 pmop[which] = TREE_OPERAND (pmop[which], 0);
10295 break;
10296 case INTEGER_CST:
10297 /* If C or D is a N where (N & M) == 0, it can be
10298 omitted (assumed 0). */
10299 if ((TREE_CODE (arg0) == PLUS_EXPR
10300 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10301 && (cst1 & pmop[which]) == 0)
10302 pmop[which] = NULL;
10303 break;
10304 default:
10305 break;
10308 /* Only build anything new if we optimized one or both arguments
10309 above. */
10310 if (pmop[0] != TREE_OPERAND (arg0, 0)
10311 || (TREE_CODE (arg0) != NEGATE_EXPR
10312 && pmop[1] != TREE_OPERAND (arg0, 1)))
10314 tree utype = TREE_TYPE (arg0);
10315 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10317 /* Perform the operations in a type that has defined
10318 overflow behavior. */
10319 utype = unsigned_type_for (TREE_TYPE (arg0));
10320 if (pmop[0] != NULL)
10321 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10322 if (pmop[1] != NULL)
10323 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10326 if (TREE_CODE (arg0) == NEGATE_EXPR)
10327 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10328 else if (TREE_CODE (arg0) == PLUS_EXPR)
10330 if (pmop[0] != NULL && pmop[1] != NULL)
10331 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10332 pmop[0], pmop[1]);
10333 else if (pmop[0] != NULL)
10334 tem = pmop[0];
10335 else if (pmop[1] != NULL)
10336 tem = pmop[1];
10337 else
10338 return build_int_cst (type, 0);
10340 else if (pmop[0] == NULL)
10341 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10342 else
10343 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10344 pmop[0], pmop[1]);
10345 /* TEM is now the new binary +, - or unary - replacement. */
10346 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10347 fold_convert_loc (loc, utype, arg1));
10348 return fold_convert_loc (loc, type, tem);
10353 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10354 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10355 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10357 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10359 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10360 if (mask == -1)
10361 return
10362 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10365 goto associate;
10367 case RDIV_EXPR:
10368 /* Don't touch a floating-point divide by zero unless the mode
10369 of the constant can represent infinity. */
10370 if (TREE_CODE (arg1) == REAL_CST
10371 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10372 && real_zerop (arg1))
10373 return NULL_TREE;
10375 /* (-A) / (-B) -> A / B */
10376 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10377 return fold_build2_loc (loc, RDIV_EXPR, type,
10378 TREE_OPERAND (arg0, 0),
10379 negate_expr (arg1));
10380 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10381 return fold_build2_loc (loc, RDIV_EXPR, type,
10382 negate_expr (arg0),
10383 TREE_OPERAND (arg1, 0));
10384 return NULL_TREE;
10386 case TRUNC_DIV_EXPR:
10387 /* Fall through */
10389 case FLOOR_DIV_EXPR:
10390 /* Simplify A / (B << N) where A and B are positive and B is
10391 a power of 2, to A >> (N + log2(B)). */
10392 strict_overflow_p = false;
10393 if (TREE_CODE (arg1) == LSHIFT_EXPR
10394 && (TYPE_UNSIGNED (type)
10395 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10397 tree sval = TREE_OPERAND (arg1, 0);
10398 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10400 tree sh_cnt = TREE_OPERAND (arg1, 1);
10401 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10402 wi::exact_log2 (sval));
10404 if (strict_overflow_p)
10405 fold_overflow_warning (("assuming signed overflow does not "
10406 "occur when simplifying A / (B << N)"),
10407 WARN_STRICT_OVERFLOW_MISC);
10409 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10410 sh_cnt, pow2);
10411 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10412 fold_convert_loc (loc, type, arg0), sh_cnt);
10416 /* Fall through */
10418 case ROUND_DIV_EXPR:
10419 case CEIL_DIV_EXPR:
10420 case EXACT_DIV_EXPR:
10421 if (integer_zerop (arg1))
10422 return NULL_TREE;
10424 /* Convert -A / -B to A / B when the type is signed and overflow is
10425 undefined. */
10426 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10427 && TREE_CODE (arg0) == NEGATE_EXPR
10428 && negate_expr_p (op1))
10430 if (INTEGRAL_TYPE_P (type))
10431 fold_overflow_warning (("assuming signed overflow does not occur "
10432 "when distributing negation across "
10433 "division"),
10434 WARN_STRICT_OVERFLOW_MISC);
10435 return fold_build2_loc (loc, code, type,
10436 fold_convert_loc (loc, type,
10437 TREE_OPERAND (arg0, 0)),
10438 negate_expr (op1));
10440 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10441 && TREE_CODE (arg1) == NEGATE_EXPR
10442 && negate_expr_p (op0))
10444 if (INTEGRAL_TYPE_P (type))
10445 fold_overflow_warning (("assuming signed overflow does not occur "
10446 "when distributing negation across "
10447 "division"),
10448 WARN_STRICT_OVERFLOW_MISC);
10449 return fold_build2_loc (loc, code, type,
10450 negate_expr (op0),
10451 fold_convert_loc (loc, type,
10452 TREE_OPERAND (arg1, 0)));
10455 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10456 operation, EXACT_DIV_EXPR.
10458 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10459 At one time others generated faster code, it's not clear if they do
10460 after the last round to changes to the DIV code in expmed.c. */
10461 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10462 && multiple_of_p (type, arg0, arg1))
10463 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10464 fold_convert (type, arg0),
10465 fold_convert (type, arg1));
10467 strict_overflow_p = false;
10468 if (TREE_CODE (arg1) == INTEGER_CST
10469 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10470 &strict_overflow_p)))
10472 if (strict_overflow_p)
10473 fold_overflow_warning (("assuming signed overflow does not occur "
10474 "when simplifying division"),
10475 WARN_STRICT_OVERFLOW_MISC);
10476 return fold_convert_loc (loc, type, tem);
10479 return NULL_TREE;
10481 case CEIL_MOD_EXPR:
10482 case FLOOR_MOD_EXPR:
10483 case ROUND_MOD_EXPR:
10484 case TRUNC_MOD_EXPR:
10485 strict_overflow_p = false;
10486 if (TREE_CODE (arg1) == INTEGER_CST
10487 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10488 &strict_overflow_p)))
10490 if (strict_overflow_p)
10491 fold_overflow_warning (("assuming signed overflow does not occur "
10492 "when simplifying modulus"),
10493 WARN_STRICT_OVERFLOW_MISC);
10494 return fold_convert_loc (loc, type, tem);
10497 return NULL_TREE;
10499 case LROTATE_EXPR:
10500 case RROTATE_EXPR:
10501 case RSHIFT_EXPR:
10502 case LSHIFT_EXPR:
10503 /* Since negative shift count is not well-defined,
10504 don't try to compute it in the compiler. */
10505 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10506 return NULL_TREE;
10508 prec = element_precision (type);
10510 /* If we have a rotate of a bit operation with the rotate count and
10511 the second operand of the bit operation both constant,
10512 permute the two operations. */
10513 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10514 && (TREE_CODE (arg0) == BIT_AND_EXPR
10515 || TREE_CODE (arg0) == BIT_IOR_EXPR
10516 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10517 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10518 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10519 fold_build2_loc (loc, code, type,
10520 TREE_OPERAND (arg0, 0), arg1),
10521 fold_build2_loc (loc, code, type,
10522 TREE_OPERAND (arg0, 1), arg1));
10524 /* Two consecutive rotates adding up to the some integer
10525 multiple of the precision of the type can be ignored. */
10526 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10527 && TREE_CODE (arg0) == RROTATE_EXPR
10528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10529 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10530 prec) == 0)
10531 return TREE_OPERAND (arg0, 0);
10533 return NULL_TREE;
10535 case MIN_EXPR:
10536 case MAX_EXPR:
10537 goto associate;
10539 case TRUTH_ANDIF_EXPR:
10540 /* Note that the operands of this must be ints
10541 and their values must be 0 or 1.
10542 ("true" is a fixed value perhaps depending on the language.) */
10543 /* If first arg is constant zero, return it. */
10544 if (integer_zerop (arg0))
10545 return fold_convert_loc (loc, type, arg0);
10546 case TRUTH_AND_EXPR:
10547 /* If either arg is constant true, drop it. */
10548 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10549 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10550 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10551 /* Preserve sequence points. */
10552 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10553 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10554 /* If second arg is constant zero, result is zero, but first arg
10555 must be evaluated. */
10556 if (integer_zerop (arg1))
10557 return omit_one_operand_loc (loc, type, arg1, arg0);
10558 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10559 case will be handled here. */
10560 if (integer_zerop (arg0))
10561 return omit_one_operand_loc (loc, type, arg0, arg1);
10563 /* !X && X is always false. */
10564 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10565 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10566 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10567 /* X && !X is always false. */
10568 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10569 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10570 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10572 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10573 means A >= Y && A != MAX, but in this case we know that
10574 A < X <= MAX. */
10576 if (!TREE_SIDE_EFFECTS (arg0)
10577 && !TREE_SIDE_EFFECTS (arg1))
10579 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10580 if (tem && !operand_equal_p (tem, arg0, 0))
10581 return fold_build2_loc (loc, code, type, tem, arg1);
10583 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10584 if (tem && !operand_equal_p (tem, arg1, 0))
10585 return fold_build2_loc (loc, code, type, arg0, tem);
10588 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10589 != NULL_TREE)
10590 return tem;
10592 return NULL_TREE;
10594 case TRUTH_ORIF_EXPR:
10595 /* Note that the operands of this must be ints
10596 and their values must be 0 or true.
10597 ("true" is a fixed value perhaps depending on the language.) */
10598 /* If first arg is constant true, return it. */
10599 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10600 return fold_convert_loc (loc, type, arg0);
10601 case TRUTH_OR_EXPR:
10602 /* If either arg is constant zero, drop it. */
10603 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10604 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10605 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10606 /* Preserve sequence points. */
10607 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10608 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10609 /* If second arg is constant true, result is true, but we must
10610 evaluate first arg. */
10611 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10612 return omit_one_operand_loc (loc, type, arg1, arg0);
10613 /* Likewise for first arg, but note this only occurs here for
10614 TRUTH_OR_EXPR. */
10615 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10616 return omit_one_operand_loc (loc, type, arg0, arg1);
10618 /* !X || X is always true. */
10619 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10620 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10621 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10622 /* X || !X is always true. */
10623 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10624 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10625 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10627 /* (X && !Y) || (!X && Y) is X ^ Y */
10628 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10629 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10631 tree a0, a1, l0, l1, n0, n1;
10633 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10634 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10636 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10637 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10639 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10640 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10642 if ((operand_equal_p (n0, a0, 0)
10643 && operand_equal_p (n1, a1, 0))
10644 || (operand_equal_p (n0, a1, 0)
10645 && operand_equal_p (n1, a0, 0)))
10646 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10649 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10650 != NULL_TREE)
10651 return tem;
10653 return NULL_TREE;
10655 case TRUTH_XOR_EXPR:
10656 /* If the second arg is constant zero, drop it. */
10657 if (integer_zerop (arg1))
10658 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10659 /* If the second arg is constant true, this is a logical inversion. */
10660 if (integer_onep (arg1))
10662 tem = invert_truthvalue_loc (loc, arg0);
10663 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10665 /* Identical arguments cancel to zero. */
10666 if (operand_equal_p (arg0, arg1, 0))
10667 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10669 /* !X ^ X is always true. */
10670 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10671 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10672 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10674 /* X ^ !X is always true. */
10675 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10676 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10677 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10679 return NULL_TREE;
10681 case EQ_EXPR:
10682 case NE_EXPR:
10683 STRIP_NOPS (arg0);
10684 STRIP_NOPS (arg1);
10686 tem = fold_comparison (loc, code, type, op0, op1);
10687 if (tem != NULL_TREE)
10688 return tem;
10690 /* bool_var != 1 becomes !bool_var. */
10691 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10692 && code == NE_EXPR)
10693 return fold_convert_loc (loc, type,
10694 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10695 TREE_TYPE (arg0), arg0));
10697 /* bool_var == 0 becomes !bool_var. */
10698 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10699 && code == EQ_EXPR)
10700 return fold_convert_loc (loc, type,
10701 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10702 TREE_TYPE (arg0), arg0));
10704 /* !exp != 0 becomes !exp */
10705 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10706 && code == NE_EXPR)
10707 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10709 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10710 if ((TREE_CODE (arg0) == PLUS_EXPR
10711 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10712 || TREE_CODE (arg0) == MINUS_EXPR)
10713 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10714 0)),
10715 arg1, 0)
10716 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10717 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10719 tree val = TREE_OPERAND (arg0, 1);
10720 val = fold_build2_loc (loc, code, type, val,
10721 build_int_cst (TREE_TYPE (val), 0));
10722 return omit_two_operands_loc (loc, type, val,
10723 TREE_OPERAND (arg0, 0), arg1);
10726 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10727 if ((TREE_CODE (arg1) == PLUS_EXPR
10728 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10729 || TREE_CODE (arg1) == MINUS_EXPR)
10730 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10731 0)),
10732 arg0, 0)
10733 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10734 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10736 tree val = TREE_OPERAND (arg1, 1);
10737 val = fold_build2_loc (loc, code, type, val,
10738 build_int_cst (TREE_TYPE (val), 0));
10739 return omit_two_operands_loc (loc, type, val,
10740 TREE_OPERAND (arg1, 0), arg0);
10743 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10744 if (TREE_CODE (arg0) == MINUS_EXPR
10745 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10746 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10747 1)),
10748 arg1, 0)
10749 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10750 return omit_two_operands_loc (loc, type,
10751 code == NE_EXPR
10752 ? boolean_true_node : boolean_false_node,
10753 TREE_OPERAND (arg0, 1), arg1);
10755 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10756 if (TREE_CODE (arg1) == MINUS_EXPR
10757 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10758 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10759 1)),
10760 arg0, 0)
10761 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10762 return omit_two_operands_loc (loc, type,
10763 code == NE_EXPR
10764 ? boolean_true_node : boolean_false_node,
10765 TREE_OPERAND (arg1, 1), arg0);
10767 /* If this is an EQ or NE comparison with zero and ARG0 is
10768 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10769 two operations, but the latter can be done in one less insn
10770 on machines that have only two-operand insns or on which a
10771 constant cannot be the first operand. */
10772 if (TREE_CODE (arg0) == BIT_AND_EXPR
10773 && integer_zerop (arg1))
10775 tree arg00 = TREE_OPERAND (arg0, 0);
10776 tree arg01 = TREE_OPERAND (arg0, 1);
10777 if (TREE_CODE (arg00) == LSHIFT_EXPR
10778 && integer_onep (TREE_OPERAND (arg00, 0)))
10780 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10781 arg01, TREE_OPERAND (arg00, 1));
10782 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10783 build_int_cst (TREE_TYPE (arg0), 1));
10784 return fold_build2_loc (loc, code, type,
10785 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10786 arg1);
10788 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10789 && integer_onep (TREE_OPERAND (arg01, 0)))
10791 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10792 arg00, TREE_OPERAND (arg01, 1));
10793 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10794 build_int_cst (TREE_TYPE (arg0), 1));
10795 return fold_build2_loc (loc, code, type,
10796 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10797 arg1);
10801 /* If this is an NE or EQ comparison of zero against the result of a
10802 signed MOD operation whose second operand is a power of 2, make
10803 the MOD operation unsigned since it is simpler and equivalent. */
10804 if (integer_zerop (arg1)
10805 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10806 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10807 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10808 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10809 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10810 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10812 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10813 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10814 fold_convert_loc (loc, newtype,
10815 TREE_OPERAND (arg0, 0)),
10816 fold_convert_loc (loc, newtype,
10817 TREE_OPERAND (arg0, 1)));
10819 return fold_build2_loc (loc, code, type, newmod,
10820 fold_convert_loc (loc, newtype, arg1));
10823 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10824 C1 is a valid shift constant, and C2 is a power of two, i.e.
10825 a single bit. */
10826 if (TREE_CODE (arg0) == BIT_AND_EXPR
10827 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10828 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10829 == INTEGER_CST
10830 && integer_pow2p (TREE_OPERAND (arg0, 1))
10831 && integer_zerop (arg1))
10833 tree itype = TREE_TYPE (arg0);
10834 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10835 prec = TYPE_PRECISION (itype);
10837 /* Check for a valid shift count. */
10838 if (wi::ltu_p (arg001, prec))
10840 tree arg01 = TREE_OPERAND (arg0, 1);
10841 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10842 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10843 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10844 can be rewritten as (X & (C2 << C1)) != 0. */
10845 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10847 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10848 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10849 return fold_build2_loc (loc, code, type, tem,
10850 fold_convert_loc (loc, itype, arg1));
10852 /* Otherwise, for signed (arithmetic) shifts,
10853 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10854 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10855 else if (!TYPE_UNSIGNED (itype))
10856 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10857 arg000, build_int_cst (itype, 0));
10858 /* Otherwise, of unsigned (logical) shifts,
10859 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10860 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10861 else
10862 return omit_one_operand_loc (loc, type,
10863 code == EQ_EXPR ? integer_one_node
10864 : integer_zero_node,
10865 arg000);
10869 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10870 Similarly for NE_EXPR. */
10871 if (TREE_CODE (arg0) == BIT_AND_EXPR
10872 && TREE_CODE (arg1) == INTEGER_CST
10873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10875 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10876 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10877 TREE_OPERAND (arg0, 1));
10878 tree dandnotc
10879 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10880 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10881 notc);
10882 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10883 if (integer_nonzerop (dandnotc))
10884 return omit_one_operand_loc (loc, type, rslt, arg0);
10887 /* If this is a comparison of a field, we may be able to simplify it. */
10888 if ((TREE_CODE (arg0) == COMPONENT_REF
10889 || TREE_CODE (arg0) == BIT_FIELD_REF)
10890 /* Handle the constant case even without -O
10891 to make sure the warnings are given. */
10892 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10894 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10895 if (t1)
10896 return t1;
10899 /* Optimize comparisons of strlen vs zero to a compare of the
10900 first character of the string vs zero. To wit,
10901 strlen(ptr) == 0 => *ptr == 0
10902 strlen(ptr) != 0 => *ptr != 0
10903 Other cases should reduce to one of these two (or a constant)
10904 due to the return value of strlen being unsigned. */
10905 if (TREE_CODE (arg0) == CALL_EXPR
10906 && integer_zerop (arg1))
10908 tree fndecl = get_callee_fndecl (arg0);
10910 if (fndecl
10911 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10912 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10913 && call_expr_nargs (arg0) == 1
10914 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10916 tree iref = build_fold_indirect_ref_loc (loc,
10917 CALL_EXPR_ARG (arg0, 0));
10918 return fold_build2_loc (loc, code, type, iref,
10919 build_int_cst (TREE_TYPE (iref), 0));
10923 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10924 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10925 if (TREE_CODE (arg0) == RSHIFT_EXPR
10926 && integer_zerop (arg1)
10927 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10929 tree arg00 = TREE_OPERAND (arg0, 0);
10930 tree arg01 = TREE_OPERAND (arg0, 1);
10931 tree itype = TREE_TYPE (arg00);
10932 if (wi::eq_p (arg01, element_precision (itype) - 1))
10934 if (TYPE_UNSIGNED (itype))
10936 itype = signed_type_for (itype);
10937 arg00 = fold_convert_loc (loc, itype, arg00);
10939 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10940 type, arg00, build_zero_cst (itype));
10944 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10945 (X & C) == 0 when C is a single bit. */
10946 if (TREE_CODE (arg0) == BIT_AND_EXPR
10947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10948 && integer_zerop (arg1)
10949 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10951 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10952 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10953 TREE_OPERAND (arg0, 1));
10954 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10955 type, tem,
10956 fold_convert_loc (loc, TREE_TYPE (arg0),
10957 arg1));
10960 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10961 constant C is a power of two, i.e. a single bit. */
10962 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10963 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10964 && integer_zerop (arg1)
10965 && integer_pow2p (TREE_OPERAND (arg0, 1))
10966 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10967 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10969 tree arg00 = TREE_OPERAND (arg0, 0);
10970 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10971 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10974 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10975 when is C is a power of two, i.e. a single bit. */
10976 if (TREE_CODE (arg0) == BIT_AND_EXPR
10977 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10978 && integer_zerop (arg1)
10979 && integer_pow2p (TREE_OPERAND (arg0, 1))
10980 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10981 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10983 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10984 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10985 arg000, TREE_OPERAND (arg0, 1));
10986 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10987 tem, build_int_cst (TREE_TYPE (tem), 0));
10990 if (integer_zerop (arg1)
10991 && tree_expr_nonzero_p (arg0))
10993 tree res = constant_boolean_node (code==NE_EXPR, type);
10994 return omit_one_operand_loc (loc, type, res, arg0);
10997 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10998 if (TREE_CODE (arg0) == BIT_AND_EXPR
10999 && TREE_CODE (arg1) == BIT_AND_EXPR)
11001 tree arg00 = TREE_OPERAND (arg0, 0);
11002 tree arg01 = TREE_OPERAND (arg0, 1);
11003 tree arg10 = TREE_OPERAND (arg1, 0);
11004 tree arg11 = TREE_OPERAND (arg1, 1);
11005 tree itype = TREE_TYPE (arg0);
11007 if (operand_equal_p (arg01, arg11, 0))
11008 return fold_build2_loc (loc, code, type,
11009 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11010 fold_build2_loc (loc,
11011 BIT_XOR_EXPR, itype,
11012 arg00, arg10),
11013 arg01),
11014 build_zero_cst (itype));
11016 if (operand_equal_p (arg01, arg10, 0))
11017 return fold_build2_loc (loc, code, type,
11018 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11019 fold_build2_loc (loc,
11020 BIT_XOR_EXPR, itype,
11021 arg00, arg11),
11022 arg01),
11023 build_zero_cst (itype));
11025 if (operand_equal_p (arg00, arg11, 0))
11026 return fold_build2_loc (loc, code, type,
11027 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11028 fold_build2_loc (loc,
11029 BIT_XOR_EXPR, itype,
11030 arg01, arg10),
11031 arg00),
11032 build_zero_cst (itype));
11034 if (operand_equal_p (arg00, arg10, 0))
11035 return fold_build2_loc (loc, code, type,
11036 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11037 fold_build2_loc (loc,
11038 BIT_XOR_EXPR, itype,
11039 arg01, arg11),
11040 arg00),
11041 build_zero_cst (itype));
11044 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11045 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11047 tree arg00 = TREE_OPERAND (arg0, 0);
11048 tree arg01 = TREE_OPERAND (arg0, 1);
11049 tree arg10 = TREE_OPERAND (arg1, 0);
11050 tree arg11 = TREE_OPERAND (arg1, 1);
11051 tree itype = TREE_TYPE (arg0);
11053 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11054 operand_equal_p guarantees no side-effects so we don't need
11055 to use omit_one_operand on Z. */
11056 if (operand_equal_p (arg01, arg11, 0))
11057 return fold_build2_loc (loc, code, type, arg00,
11058 fold_convert_loc (loc, TREE_TYPE (arg00),
11059 arg10));
11060 if (operand_equal_p (arg01, arg10, 0))
11061 return fold_build2_loc (loc, code, type, arg00,
11062 fold_convert_loc (loc, TREE_TYPE (arg00),
11063 arg11));
11064 if (operand_equal_p (arg00, arg11, 0))
11065 return fold_build2_loc (loc, code, type, arg01,
11066 fold_convert_loc (loc, TREE_TYPE (arg01),
11067 arg10));
11068 if (operand_equal_p (arg00, arg10, 0))
11069 return fold_build2_loc (loc, code, type, arg01,
11070 fold_convert_loc (loc, TREE_TYPE (arg01),
11071 arg11));
11073 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11074 if (TREE_CODE (arg01) == INTEGER_CST
11075 && TREE_CODE (arg11) == INTEGER_CST)
11077 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11078 fold_convert_loc (loc, itype, arg11));
11079 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11080 return fold_build2_loc (loc, code, type, tem,
11081 fold_convert_loc (loc, itype, arg10));
11085 /* Attempt to simplify equality/inequality comparisons of complex
11086 values. Only lower the comparison if the result is known or
11087 can be simplified to a single scalar comparison. */
11088 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11089 || TREE_CODE (arg0) == COMPLEX_CST)
11090 && (TREE_CODE (arg1) == COMPLEX_EXPR
11091 || TREE_CODE (arg1) == COMPLEX_CST))
11093 tree real0, imag0, real1, imag1;
11094 tree rcond, icond;
11096 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11098 real0 = TREE_OPERAND (arg0, 0);
11099 imag0 = TREE_OPERAND (arg0, 1);
11101 else
11103 real0 = TREE_REALPART (arg0);
11104 imag0 = TREE_IMAGPART (arg0);
11107 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11109 real1 = TREE_OPERAND (arg1, 0);
11110 imag1 = TREE_OPERAND (arg1, 1);
11112 else
11114 real1 = TREE_REALPART (arg1);
11115 imag1 = TREE_IMAGPART (arg1);
11118 rcond = fold_binary_loc (loc, code, type, real0, real1);
11119 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11121 if (integer_zerop (rcond))
11123 if (code == EQ_EXPR)
11124 return omit_two_operands_loc (loc, type, boolean_false_node,
11125 imag0, imag1);
11126 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11128 else
11130 if (code == NE_EXPR)
11131 return omit_two_operands_loc (loc, type, boolean_true_node,
11132 imag0, imag1);
11133 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11137 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11138 if (icond && TREE_CODE (icond) == INTEGER_CST)
11140 if (integer_zerop (icond))
11142 if (code == EQ_EXPR)
11143 return omit_two_operands_loc (loc, type, boolean_false_node,
11144 real0, real1);
11145 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11147 else
11149 if (code == NE_EXPR)
11150 return omit_two_operands_loc (loc, type, boolean_true_node,
11151 real0, real1);
11152 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11157 return NULL_TREE;
11159 case LT_EXPR:
11160 case GT_EXPR:
11161 case LE_EXPR:
11162 case GE_EXPR:
11163 tem = fold_comparison (loc, code, type, op0, op1);
11164 if (tem != NULL_TREE)
11165 return tem;
11167 /* Transform comparisons of the form X +- C CMP X. */
11168 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11169 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11170 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11171 && !HONOR_SNANS (arg0))
11172 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11173 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11175 tree arg01 = TREE_OPERAND (arg0, 1);
11176 enum tree_code code0 = TREE_CODE (arg0);
11177 int is_positive;
11179 if (TREE_CODE (arg01) == REAL_CST)
11180 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11181 else
11182 is_positive = tree_int_cst_sgn (arg01);
11184 /* (X - c) > X becomes false. */
11185 if (code == GT_EXPR
11186 && ((code0 == MINUS_EXPR && is_positive >= 0)
11187 || (code0 == PLUS_EXPR && is_positive <= 0)))
11189 if (TREE_CODE (arg01) == INTEGER_CST
11190 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11191 fold_overflow_warning (("assuming signed overflow does not "
11192 "occur when assuming that (X - c) > X "
11193 "is always false"),
11194 WARN_STRICT_OVERFLOW_ALL);
11195 return constant_boolean_node (0, type);
11198 /* Likewise (X + c) < X becomes false. */
11199 if (code == LT_EXPR
11200 && ((code0 == PLUS_EXPR && is_positive >= 0)
11201 || (code0 == MINUS_EXPR && is_positive <= 0)))
11203 if (TREE_CODE (arg01) == INTEGER_CST
11204 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11205 fold_overflow_warning (("assuming signed overflow does not "
11206 "occur when assuming that "
11207 "(X + c) < X is always false"),
11208 WARN_STRICT_OVERFLOW_ALL);
11209 return constant_boolean_node (0, type);
11212 /* Convert (X - c) <= X to true. */
11213 if (!HONOR_NANS (arg1)
11214 && code == LE_EXPR
11215 && ((code0 == MINUS_EXPR && is_positive >= 0)
11216 || (code0 == PLUS_EXPR && is_positive <= 0)))
11218 if (TREE_CODE (arg01) == INTEGER_CST
11219 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11220 fold_overflow_warning (("assuming signed overflow does not "
11221 "occur when assuming that "
11222 "(X - c) <= X is always true"),
11223 WARN_STRICT_OVERFLOW_ALL);
11224 return constant_boolean_node (1, type);
11227 /* Convert (X + c) >= X to true. */
11228 if (!HONOR_NANS (arg1)
11229 && code == GE_EXPR
11230 && ((code0 == PLUS_EXPR && is_positive >= 0)
11231 || (code0 == MINUS_EXPR && is_positive <= 0)))
11233 if (TREE_CODE (arg01) == INTEGER_CST
11234 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11235 fold_overflow_warning (("assuming signed overflow does not "
11236 "occur when assuming that "
11237 "(X + c) >= X is always true"),
11238 WARN_STRICT_OVERFLOW_ALL);
11239 return constant_boolean_node (1, type);
11242 if (TREE_CODE (arg01) == INTEGER_CST)
11244 /* Convert X + c > X and X - c < X to true for integers. */
11245 if (code == GT_EXPR
11246 && ((code0 == PLUS_EXPR && is_positive > 0)
11247 || (code0 == MINUS_EXPR && is_positive < 0)))
11249 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11250 fold_overflow_warning (("assuming signed overflow does "
11251 "not occur when assuming that "
11252 "(X + c) > X is always true"),
11253 WARN_STRICT_OVERFLOW_ALL);
11254 return constant_boolean_node (1, type);
11257 if (code == LT_EXPR
11258 && ((code0 == MINUS_EXPR && is_positive > 0)
11259 || (code0 == PLUS_EXPR && is_positive < 0)))
11261 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11262 fold_overflow_warning (("assuming signed overflow does "
11263 "not occur when assuming that "
11264 "(X - c) < X is always true"),
11265 WARN_STRICT_OVERFLOW_ALL);
11266 return constant_boolean_node (1, type);
11269 /* Convert X + c <= X and X - c >= X to false for integers. */
11270 if (code == LE_EXPR
11271 && ((code0 == PLUS_EXPR && is_positive > 0)
11272 || (code0 == MINUS_EXPR && is_positive < 0)))
11274 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11275 fold_overflow_warning (("assuming signed overflow does "
11276 "not occur when assuming that "
11277 "(X + c) <= X is always false"),
11278 WARN_STRICT_OVERFLOW_ALL);
11279 return constant_boolean_node (0, type);
11282 if (code == GE_EXPR
11283 && ((code0 == MINUS_EXPR && is_positive > 0)
11284 || (code0 == PLUS_EXPR && is_positive < 0)))
11286 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11287 fold_overflow_warning (("assuming signed overflow does "
11288 "not occur when assuming that "
11289 "(X - c) >= X is always false"),
11290 WARN_STRICT_OVERFLOW_ALL);
11291 return constant_boolean_node (0, type);
11296 /* If we are comparing an ABS_EXPR with a constant, we can
11297 convert all the cases into explicit comparisons, but they may
11298 well not be faster than doing the ABS and one comparison.
11299 But ABS (X) <= C is a range comparison, which becomes a subtraction
11300 and a comparison, and is probably faster. */
11301 if (code == LE_EXPR
11302 && TREE_CODE (arg1) == INTEGER_CST
11303 && TREE_CODE (arg0) == ABS_EXPR
11304 && ! TREE_SIDE_EFFECTS (arg0)
11305 && (0 != (tem = negate_expr (arg1)))
11306 && TREE_CODE (tem) == INTEGER_CST
11307 && !TREE_OVERFLOW (tem))
11308 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11309 build2 (GE_EXPR, type,
11310 TREE_OPERAND (arg0, 0), tem),
11311 build2 (LE_EXPR, type,
11312 TREE_OPERAND (arg0, 0), arg1));
11314 /* Convert ABS_EXPR<x> >= 0 to true. */
11315 strict_overflow_p = false;
11316 if (code == GE_EXPR
11317 && (integer_zerop (arg1)
11318 || (! HONOR_NANS (arg0)
11319 && real_zerop (arg1)))
11320 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11322 if (strict_overflow_p)
11323 fold_overflow_warning (("assuming signed overflow does not occur "
11324 "when simplifying comparison of "
11325 "absolute value and zero"),
11326 WARN_STRICT_OVERFLOW_CONDITIONAL);
11327 return omit_one_operand_loc (loc, type,
11328 constant_boolean_node (true, type),
11329 arg0);
11332 /* Convert ABS_EXPR<x> < 0 to false. */
11333 strict_overflow_p = false;
11334 if (code == LT_EXPR
11335 && (integer_zerop (arg1) || real_zerop (arg1))
11336 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11338 if (strict_overflow_p)
11339 fold_overflow_warning (("assuming signed overflow does not occur "
11340 "when simplifying comparison of "
11341 "absolute value and zero"),
11342 WARN_STRICT_OVERFLOW_CONDITIONAL);
11343 return omit_one_operand_loc (loc, type,
11344 constant_boolean_node (false, type),
11345 arg0);
11348 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11349 and similarly for >= into !=. */
11350 if ((code == LT_EXPR || code == GE_EXPR)
11351 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11352 && TREE_CODE (arg1) == LSHIFT_EXPR
11353 && integer_onep (TREE_OPERAND (arg1, 0)))
11354 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11355 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11356 TREE_OPERAND (arg1, 1)),
11357 build_zero_cst (TREE_TYPE (arg0)));
11359 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11360 otherwise Y might be >= # of bits in X's type and thus e.g.
11361 (unsigned char) (1 << Y) for Y 15 might be 0.
11362 If the cast is widening, then 1 << Y should have unsigned type,
11363 otherwise if Y is number of bits in the signed shift type minus 1,
11364 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11365 31 might be 0xffffffff80000000. */
11366 if ((code == LT_EXPR || code == GE_EXPR)
11367 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11368 && CONVERT_EXPR_P (arg1)
11369 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11370 && (element_precision (TREE_TYPE (arg1))
11371 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11372 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11373 || (element_precision (TREE_TYPE (arg1))
11374 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11375 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11377 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11378 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11379 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11380 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11381 build_zero_cst (TREE_TYPE (arg0)));
11384 return NULL_TREE;
11386 case UNORDERED_EXPR:
11387 case ORDERED_EXPR:
11388 case UNLT_EXPR:
11389 case UNLE_EXPR:
11390 case UNGT_EXPR:
11391 case UNGE_EXPR:
11392 case UNEQ_EXPR:
11393 case LTGT_EXPR:
11394 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11396 tree targ0 = strip_float_extensions (arg0);
11397 tree targ1 = strip_float_extensions (arg1);
11398 tree newtype = TREE_TYPE (targ0);
11400 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11401 newtype = TREE_TYPE (targ1);
11403 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11404 return fold_build2_loc (loc, code, type,
11405 fold_convert_loc (loc, newtype, targ0),
11406 fold_convert_loc (loc, newtype, targ1));
11409 return NULL_TREE;
11411 case COMPOUND_EXPR:
11412 /* When pedantic, a compound expression can be neither an lvalue
11413 nor an integer constant expression. */
11414 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11415 return NULL_TREE;
11416 /* Don't let (0, 0) be null pointer constant. */
11417 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11418 : fold_convert_loc (loc, type, arg1);
11419 return pedantic_non_lvalue_loc (loc, tem);
11421 case ASSERT_EXPR:
11422 /* An ASSERT_EXPR should never be passed to fold_binary. */
11423 gcc_unreachable ();
11425 default:
11426 return NULL_TREE;
11427 } /* switch (code) */
11430 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11431 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11432 of GOTO_EXPR. */
11434 static tree
11435 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11437 switch (TREE_CODE (*tp))
11439 case LABEL_EXPR:
11440 return *tp;
11442 case GOTO_EXPR:
11443 *walk_subtrees = 0;
11445 /* ... fall through ... */
11447 default:
11448 return NULL_TREE;
11452 /* Return whether the sub-tree ST contains a label which is accessible from
11453 outside the sub-tree. */
11455 static bool
11456 contains_label_p (tree st)
11458 return
11459 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11462 /* Fold a ternary expression of code CODE and type TYPE with operands
11463 OP0, OP1, and OP2. Return the folded expression if folding is
11464 successful. Otherwise, return NULL_TREE. */
11466 tree
11467 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11468 tree op0, tree op1, tree op2)
11470 tree tem;
11471 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11472 enum tree_code_class kind = TREE_CODE_CLASS (code);
11474 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11475 && TREE_CODE_LENGTH (code) == 3);
11477 /* If this is a commutative operation, and OP0 is a constant, move it
11478 to OP1 to reduce the number of tests below. */
11479 if (commutative_ternary_tree_code (code)
11480 && tree_swap_operands_p (op0, op1, true))
11481 return fold_build3_loc (loc, code, type, op1, op0, op2);
11483 tem = generic_simplify (loc, code, type, op0, op1, op2);
11484 if (tem)
11485 return tem;
11487 /* Strip any conversions that don't change the mode. This is safe
11488 for every expression, except for a comparison expression because
11489 its signedness is derived from its operands. So, in the latter
11490 case, only strip conversions that don't change the signedness.
11492 Note that this is done as an internal manipulation within the
11493 constant folder, in order to find the simplest representation of
11494 the arguments so that their form can be studied. In any cases,
11495 the appropriate type conversions should be put back in the tree
11496 that will get out of the constant folder. */
11497 if (op0)
11499 arg0 = op0;
11500 STRIP_NOPS (arg0);
11503 if (op1)
11505 arg1 = op1;
11506 STRIP_NOPS (arg1);
11509 if (op2)
11511 arg2 = op2;
11512 STRIP_NOPS (arg2);
11515 switch (code)
11517 case COMPONENT_REF:
11518 if (TREE_CODE (arg0) == CONSTRUCTOR
11519 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11521 unsigned HOST_WIDE_INT idx;
11522 tree field, value;
11523 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11524 if (field == arg1)
11525 return value;
11527 return NULL_TREE;
11529 case COND_EXPR:
11530 case VEC_COND_EXPR:
11531 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11532 so all simple results must be passed through pedantic_non_lvalue. */
11533 if (TREE_CODE (arg0) == INTEGER_CST)
11535 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11536 tem = integer_zerop (arg0) ? op2 : op1;
11537 /* Only optimize constant conditions when the selected branch
11538 has the same type as the COND_EXPR. This avoids optimizing
11539 away "c ? x : throw", where the throw has a void type.
11540 Avoid throwing away that operand which contains label. */
11541 if ((!TREE_SIDE_EFFECTS (unused_op)
11542 || !contains_label_p (unused_op))
11543 && (! VOID_TYPE_P (TREE_TYPE (tem))
11544 || VOID_TYPE_P (type)))
11545 return pedantic_non_lvalue_loc (loc, tem);
11546 return NULL_TREE;
11548 else if (TREE_CODE (arg0) == VECTOR_CST)
11550 if ((TREE_CODE (arg1) == VECTOR_CST
11551 || TREE_CODE (arg1) == CONSTRUCTOR)
11552 && (TREE_CODE (arg2) == VECTOR_CST
11553 || TREE_CODE (arg2) == CONSTRUCTOR))
11555 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11556 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11557 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11558 for (i = 0; i < nelts; i++)
11560 tree val = VECTOR_CST_ELT (arg0, i);
11561 if (integer_all_onesp (val))
11562 sel[i] = i;
11563 else if (integer_zerop (val))
11564 sel[i] = nelts + i;
11565 else /* Currently unreachable. */
11566 return NULL_TREE;
11568 tree t = fold_vec_perm (type, arg1, arg2, sel);
11569 if (t != NULL_TREE)
11570 return t;
11574 /* If we have A op B ? A : C, we may be able to convert this to a
11575 simpler expression, depending on the operation and the values
11576 of B and C. Signed zeros prevent all of these transformations,
11577 for reasons given above each one.
11579 Also try swapping the arguments and inverting the conditional. */
11580 if (COMPARISON_CLASS_P (arg0)
11581 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11582 arg1, TREE_OPERAND (arg0, 1))
11583 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11585 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11586 if (tem)
11587 return tem;
11590 if (COMPARISON_CLASS_P (arg0)
11591 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11592 op2,
11593 TREE_OPERAND (arg0, 1))
11594 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11596 location_t loc0 = expr_location_or (arg0, loc);
11597 tem = fold_invert_truthvalue (loc0, arg0);
11598 if (tem && COMPARISON_CLASS_P (tem))
11600 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11601 if (tem)
11602 return tem;
11606 /* If the second operand is simpler than the third, swap them
11607 since that produces better jump optimization results. */
11608 if (truth_value_p (TREE_CODE (arg0))
11609 && tree_swap_operands_p (op1, op2, false))
11611 location_t loc0 = expr_location_or (arg0, loc);
11612 /* See if this can be inverted. If it can't, possibly because
11613 it was a floating-point inequality comparison, don't do
11614 anything. */
11615 tem = fold_invert_truthvalue (loc0, arg0);
11616 if (tem)
11617 return fold_build3_loc (loc, code, type, tem, op2, op1);
11620 /* Convert A ? 1 : 0 to simply A. */
11621 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11622 : (integer_onep (op1)
11623 && !VECTOR_TYPE_P (type)))
11624 && integer_zerop (op2)
11625 /* If we try to convert OP0 to our type, the
11626 call to fold will try to move the conversion inside
11627 a COND, which will recurse. In that case, the COND_EXPR
11628 is probably the best choice, so leave it alone. */
11629 && type == TREE_TYPE (arg0))
11630 return pedantic_non_lvalue_loc (loc, arg0);
11632 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11633 over COND_EXPR in cases such as floating point comparisons. */
11634 if (integer_zerop (op1)
11635 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11636 : (integer_onep (op2)
11637 && !VECTOR_TYPE_P (type)))
11638 && truth_value_p (TREE_CODE (arg0)))
11639 return pedantic_non_lvalue_loc (loc,
11640 fold_convert_loc (loc, type,
11641 invert_truthvalue_loc (loc,
11642 arg0)));
11644 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11645 if (TREE_CODE (arg0) == LT_EXPR
11646 && integer_zerop (TREE_OPERAND (arg0, 1))
11647 && integer_zerop (op2)
11648 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11650 /* sign_bit_p looks through both zero and sign extensions,
11651 but for this optimization only sign extensions are
11652 usable. */
11653 tree tem2 = TREE_OPERAND (arg0, 0);
11654 while (tem != tem2)
11656 if (TREE_CODE (tem2) != NOP_EXPR
11657 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11659 tem = NULL_TREE;
11660 break;
11662 tem2 = TREE_OPERAND (tem2, 0);
11664 /* sign_bit_p only checks ARG1 bits within A's precision.
11665 If <sign bit of A> has wider type than A, bits outside
11666 of A's precision in <sign bit of A> need to be checked.
11667 If they are all 0, this optimization needs to be done
11668 in unsigned A's type, if they are all 1 in signed A's type,
11669 otherwise this can't be done. */
11670 if (tem
11671 && TYPE_PRECISION (TREE_TYPE (tem))
11672 < TYPE_PRECISION (TREE_TYPE (arg1))
11673 && TYPE_PRECISION (TREE_TYPE (tem))
11674 < TYPE_PRECISION (type))
11676 int inner_width, outer_width;
11677 tree tem_type;
11679 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11680 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11681 if (outer_width > TYPE_PRECISION (type))
11682 outer_width = TYPE_PRECISION (type);
11684 wide_int mask = wi::shifted_mask
11685 (inner_width, outer_width - inner_width, false,
11686 TYPE_PRECISION (TREE_TYPE (arg1)));
11688 wide_int common = mask & arg1;
11689 if (common == mask)
11691 tem_type = signed_type_for (TREE_TYPE (tem));
11692 tem = fold_convert_loc (loc, tem_type, tem);
11694 else if (common == 0)
11696 tem_type = unsigned_type_for (TREE_TYPE (tem));
11697 tem = fold_convert_loc (loc, tem_type, tem);
11699 else
11700 tem = NULL;
11703 if (tem)
11704 return
11705 fold_convert_loc (loc, type,
11706 fold_build2_loc (loc, BIT_AND_EXPR,
11707 TREE_TYPE (tem), tem,
11708 fold_convert_loc (loc,
11709 TREE_TYPE (tem),
11710 arg1)));
11713 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11714 already handled above. */
11715 if (TREE_CODE (arg0) == BIT_AND_EXPR
11716 && integer_onep (TREE_OPERAND (arg0, 1))
11717 && integer_zerop (op2)
11718 && integer_pow2p (arg1))
11720 tree tem = TREE_OPERAND (arg0, 0);
11721 STRIP_NOPS (tem);
11722 if (TREE_CODE (tem) == RSHIFT_EXPR
11723 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11724 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11725 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11726 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11727 TREE_OPERAND (tem, 0), arg1);
11730 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11731 is probably obsolete because the first operand should be a
11732 truth value (that's why we have the two cases above), but let's
11733 leave it in until we can confirm this for all front-ends. */
11734 if (integer_zerop (op2)
11735 && TREE_CODE (arg0) == NE_EXPR
11736 && integer_zerop (TREE_OPERAND (arg0, 1))
11737 && integer_pow2p (arg1)
11738 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11739 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11740 arg1, OEP_ONLY_CONST))
11741 return pedantic_non_lvalue_loc (loc,
11742 fold_convert_loc (loc, type,
11743 TREE_OPERAND (arg0, 0)));
11745 /* Disable the transformations below for vectors, since
11746 fold_binary_op_with_conditional_arg may undo them immediately,
11747 yielding an infinite loop. */
11748 if (code == VEC_COND_EXPR)
11749 return NULL_TREE;
11751 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11752 if (integer_zerop (op2)
11753 && truth_value_p (TREE_CODE (arg0))
11754 && truth_value_p (TREE_CODE (arg1))
11755 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11756 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11757 : TRUTH_ANDIF_EXPR,
11758 type, fold_convert_loc (loc, type, arg0), arg1);
11760 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11761 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11762 && truth_value_p (TREE_CODE (arg0))
11763 && truth_value_p (TREE_CODE (arg1))
11764 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11766 location_t loc0 = expr_location_or (arg0, loc);
11767 /* Only perform transformation if ARG0 is easily inverted. */
11768 tem = fold_invert_truthvalue (loc0, arg0);
11769 if (tem)
11770 return fold_build2_loc (loc, code == VEC_COND_EXPR
11771 ? BIT_IOR_EXPR
11772 : TRUTH_ORIF_EXPR,
11773 type, fold_convert_loc (loc, type, tem),
11774 arg1);
11777 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11778 if (integer_zerop (arg1)
11779 && truth_value_p (TREE_CODE (arg0))
11780 && truth_value_p (TREE_CODE (op2))
11781 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11783 location_t loc0 = expr_location_or (arg0, loc);
11784 /* Only perform transformation if ARG0 is easily inverted. */
11785 tem = fold_invert_truthvalue (loc0, arg0);
11786 if (tem)
11787 return fold_build2_loc (loc, code == VEC_COND_EXPR
11788 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11789 type, fold_convert_loc (loc, type, tem),
11790 op2);
11793 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11794 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11795 && truth_value_p (TREE_CODE (arg0))
11796 && truth_value_p (TREE_CODE (op2))
11797 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11798 return fold_build2_loc (loc, code == VEC_COND_EXPR
11799 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11800 type, fold_convert_loc (loc, type, arg0), op2);
11802 return NULL_TREE;
11804 case CALL_EXPR:
11805 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11806 of fold_ternary on them. */
11807 gcc_unreachable ();
11809 case BIT_FIELD_REF:
11810 if ((TREE_CODE (arg0) == VECTOR_CST
11811 || (TREE_CODE (arg0) == CONSTRUCTOR
11812 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11813 && (type == TREE_TYPE (TREE_TYPE (arg0))
11814 || (TREE_CODE (type) == VECTOR_TYPE
11815 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11817 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11818 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11819 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11820 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11822 if (n != 0
11823 && (idx % width) == 0
11824 && (n % width) == 0
11825 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11827 idx = idx / width;
11828 n = n / width;
11830 if (TREE_CODE (arg0) == VECTOR_CST)
11832 if (n == 1)
11833 return VECTOR_CST_ELT (arg0, idx);
11835 tree *vals = XALLOCAVEC (tree, n);
11836 for (unsigned i = 0; i < n; ++i)
11837 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11838 return build_vector (type, vals);
11841 /* Constructor elements can be subvectors. */
11842 unsigned HOST_WIDE_INT k = 1;
11843 if (CONSTRUCTOR_NELTS (arg0) != 0)
11845 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11846 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11847 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11850 /* We keep an exact subset of the constructor elements. */
11851 if ((idx % k) == 0 && (n % k) == 0)
11853 if (CONSTRUCTOR_NELTS (arg0) == 0)
11854 return build_constructor (type, NULL);
11855 idx /= k;
11856 n /= k;
11857 if (n == 1)
11859 if (idx < CONSTRUCTOR_NELTS (arg0))
11860 return CONSTRUCTOR_ELT (arg0, idx)->value;
11861 return build_zero_cst (type);
11864 vec<constructor_elt, va_gc> *vals;
11865 vec_alloc (vals, n);
11866 for (unsigned i = 0;
11867 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11868 ++i)
11869 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11870 CONSTRUCTOR_ELT
11871 (arg0, idx + i)->value);
11872 return build_constructor (type, vals);
11874 /* The bitfield references a single constructor element. */
11875 else if (idx + n <= (idx / k + 1) * k)
11877 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11878 return build_zero_cst (type);
11879 else if (n == k)
11880 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11881 else
11882 return fold_build3_loc (loc, code, type,
11883 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11884 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11889 /* A bit-field-ref that referenced the full argument can be stripped. */
11890 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11891 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11892 && integer_zerop (op2))
11893 return fold_convert_loc (loc, type, arg0);
11895 /* On constants we can use native encode/interpret to constant
11896 fold (nearly) all BIT_FIELD_REFs. */
11897 if (CONSTANT_CLASS_P (arg0)
11898 && can_native_interpret_type_p (type)
11899 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11900 /* This limitation should not be necessary, we just need to
11901 round this up to mode size. */
11902 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11903 /* Need bit-shifting of the buffer to relax the following. */
11904 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11906 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11907 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11908 unsigned HOST_WIDE_INT clen;
11909 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11910 /* ??? We cannot tell native_encode_expr to start at
11911 some random byte only. So limit us to a reasonable amount
11912 of work. */
11913 if (clen <= 4096)
11915 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11916 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11917 if (len > 0
11918 && len * BITS_PER_UNIT >= bitpos + bitsize)
11920 tree v = native_interpret_expr (type,
11921 b + bitpos / BITS_PER_UNIT,
11922 bitsize / BITS_PER_UNIT);
11923 if (v)
11924 return v;
11929 return NULL_TREE;
11931 case FMA_EXPR:
11932 /* For integers we can decompose the FMA if possible. */
11933 if (TREE_CODE (arg0) == INTEGER_CST
11934 && TREE_CODE (arg1) == INTEGER_CST)
11935 return fold_build2_loc (loc, PLUS_EXPR, type,
11936 const_binop (MULT_EXPR, arg0, arg1), arg2);
11937 if (integer_zerop (arg2))
11938 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11940 return fold_fma (loc, type, arg0, arg1, arg2);
11942 case VEC_PERM_EXPR:
11943 if (TREE_CODE (arg2) == VECTOR_CST)
11945 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11946 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11947 unsigned char *sel2 = sel + nelts;
11948 bool need_mask_canon = false;
11949 bool need_mask_canon2 = false;
11950 bool all_in_vec0 = true;
11951 bool all_in_vec1 = true;
11952 bool maybe_identity = true;
11953 bool single_arg = (op0 == op1);
11954 bool changed = false;
11956 mask2 = 2 * nelts - 1;
11957 mask = single_arg ? (nelts - 1) : mask2;
11958 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11959 for (i = 0; i < nelts; i++)
11961 tree val = VECTOR_CST_ELT (arg2, i);
11962 if (TREE_CODE (val) != INTEGER_CST)
11963 return NULL_TREE;
11965 /* Make sure that the perm value is in an acceptable
11966 range. */
11967 wide_int t = val;
11968 need_mask_canon |= wi::gtu_p (t, mask);
11969 need_mask_canon2 |= wi::gtu_p (t, mask2);
11970 sel[i] = t.to_uhwi () & mask;
11971 sel2[i] = t.to_uhwi () & mask2;
11973 if (sel[i] < nelts)
11974 all_in_vec1 = false;
11975 else
11976 all_in_vec0 = false;
11978 if ((sel[i] & (nelts-1)) != i)
11979 maybe_identity = false;
11982 if (maybe_identity)
11984 if (all_in_vec0)
11985 return op0;
11986 if (all_in_vec1)
11987 return op1;
11990 if (all_in_vec0)
11991 op1 = op0;
11992 else if (all_in_vec1)
11994 op0 = op1;
11995 for (i = 0; i < nelts; i++)
11996 sel[i] -= nelts;
11997 need_mask_canon = true;
12000 if ((TREE_CODE (op0) == VECTOR_CST
12001 || TREE_CODE (op0) == CONSTRUCTOR)
12002 && (TREE_CODE (op1) == VECTOR_CST
12003 || TREE_CODE (op1) == CONSTRUCTOR))
12005 tree t = fold_vec_perm (type, op0, op1, sel);
12006 if (t != NULL_TREE)
12007 return t;
12010 if (op0 == op1 && !single_arg)
12011 changed = true;
12013 /* Some targets are deficient and fail to expand a single
12014 argument permutation while still allowing an equivalent
12015 2-argument version. */
12016 if (need_mask_canon && arg2 == op2
12017 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
12018 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
12020 need_mask_canon = need_mask_canon2;
12021 sel = sel2;
12024 if (need_mask_canon && arg2 == op2)
12026 tree *tsel = XALLOCAVEC (tree, nelts);
12027 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12028 for (i = 0; i < nelts; i++)
12029 tsel[i] = build_int_cst (eltype, sel[i]);
12030 op2 = build_vector (TREE_TYPE (arg2), tsel);
12031 changed = true;
12034 if (changed)
12035 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12037 return NULL_TREE;
12039 default:
12040 return NULL_TREE;
12041 } /* switch (code) */
12044 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12045 of an array (or vector). */
12047 tree
12048 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
12050 tree index_type = NULL_TREE;
12051 offset_int low_bound = 0;
12053 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12055 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12056 if (domain_type && TYPE_MIN_VALUE (domain_type))
12058 /* Static constructors for variably sized objects makes no sense. */
12059 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12060 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12061 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12065 if (index_type)
12066 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12067 TYPE_SIGN (index_type));
12069 offset_int index = low_bound - 1;
12070 if (index_type)
12071 index = wi::ext (index, TYPE_PRECISION (index_type),
12072 TYPE_SIGN (index_type));
12074 offset_int max_index;
12075 unsigned HOST_WIDE_INT cnt;
12076 tree cfield, cval;
12078 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12080 /* Array constructor might explicitly set index, or specify a range,
12081 or leave index NULL meaning that it is next index after previous
12082 one. */
12083 if (cfield)
12085 if (TREE_CODE (cfield) == INTEGER_CST)
12086 max_index = index = wi::to_offset (cfield);
12087 else
12089 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12090 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12091 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12094 else
12096 index += 1;
12097 if (index_type)
12098 index = wi::ext (index, TYPE_PRECISION (index_type),
12099 TYPE_SIGN (index_type));
12100 max_index = index;
12103 /* Do we have match? */
12104 if (wi::cmpu (access_index, index) >= 0
12105 && wi::cmpu (access_index, max_index) <= 0)
12106 return cval;
12108 return NULL_TREE;
12111 /* Perform constant folding and related simplification of EXPR.
12112 The related simplifications include x*1 => x, x*0 => 0, etc.,
12113 and application of the associative law.
12114 NOP_EXPR conversions may be removed freely (as long as we
12115 are careful not to change the type of the overall expression).
12116 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12117 but we can constant-fold them if they have constant operands. */
12119 #ifdef ENABLE_FOLD_CHECKING
12120 # define fold(x) fold_1 (x)
12121 static tree fold_1 (tree);
12122 static
12123 #endif
12124 tree
12125 fold (tree expr)
12127 const tree t = expr;
12128 enum tree_code code = TREE_CODE (t);
12129 enum tree_code_class kind = TREE_CODE_CLASS (code);
12130 tree tem;
12131 location_t loc = EXPR_LOCATION (expr);
12133 /* Return right away if a constant. */
12134 if (kind == tcc_constant)
12135 return t;
12137 /* CALL_EXPR-like objects with variable numbers of operands are
12138 treated specially. */
12139 if (kind == tcc_vl_exp)
12141 if (code == CALL_EXPR)
12143 tem = fold_call_expr (loc, expr, false);
12144 return tem ? tem : expr;
12146 return expr;
12149 if (IS_EXPR_CODE_CLASS (kind))
12151 tree type = TREE_TYPE (t);
12152 tree op0, op1, op2;
12154 switch (TREE_CODE_LENGTH (code))
12156 case 1:
12157 op0 = TREE_OPERAND (t, 0);
12158 tem = fold_unary_loc (loc, code, type, op0);
12159 return tem ? tem : expr;
12160 case 2:
12161 op0 = TREE_OPERAND (t, 0);
12162 op1 = TREE_OPERAND (t, 1);
12163 tem = fold_binary_loc (loc, code, type, op0, op1);
12164 return tem ? tem : expr;
12165 case 3:
12166 op0 = TREE_OPERAND (t, 0);
12167 op1 = TREE_OPERAND (t, 1);
12168 op2 = TREE_OPERAND (t, 2);
12169 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12170 return tem ? tem : expr;
12171 default:
12172 break;
12176 switch (code)
12178 case ARRAY_REF:
12180 tree op0 = TREE_OPERAND (t, 0);
12181 tree op1 = TREE_OPERAND (t, 1);
12183 if (TREE_CODE (op1) == INTEGER_CST
12184 && TREE_CODE (op0) == CONSTRUCTOR
12185 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12187 tree val = get_array_ctor_element_at_index (op0,
12188 wi::to_offset (op1));
12189 if (val)
12190 return val;
12193 return t;
12196 /* Return a VECTOR_CST if possible. */
12197 case CONSTRUCTOR:
12199 tree type = TREE_TYPE (t);
12200 if (TREE_CODE (type) != VECTOR_TYPE)
12201 return t;
12203 unsigned i;
12204 tree val;
12205 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12206 if (! CONSTANT_CLASS_P (val))
12207 return t;
12209 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12212 case CONST_DECL:
12213 return fold (DECL_INITIAL (t));
12215 default:
12216 return t;
12217 } /* switch (code) */
12220 #ifdef ENABLE_FOLD_CHECKING
12221 #undef fold
12223 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12224 hash_table<nofree_ptr_hash<const tree_node> > *);
12225 static void fold_check_failed (const_tree, const_tree);
12226 void print_fold_checksum (const_tree);
12228 /* When --enable-checking=fold, compute a digest of expr before
12229 and after actual fold call to see if fold did not accidentally
12230 change original expr. */
12232 tree
12233 fold (tree expr)
12235 tree ret;
12236 struct md5_ctx ctx;
12237 unsigned char checksum_before[16], checksum_after[16];
12238 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12240 md5_init_ctx (&ctx);
12241 fold_checksum_tree (expr, &ctx, &ht);
12242 md5_finish_ctx (&ctx, checksum_before);
12243 ht.empty ();
12245 ret = fold_1 (expr);
12247 md5_init_ctx (&ctx);
12248 fold_checksum_tree (expr, &ctx, &ht);
12249 md5_finish_ctx (&ctx, checksum_after);
12251 if (memcmp (checksum_before, checksum_after, 16))
12252 fold_check_failed (expr, ret);
12254 return ret;
12257 void
12258 print_fold_checksum (const_tree expr)
12260 struct md5_ctx ctx;
12261 unsigned char checksum[16], cnt;
12262 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12264 md5_init_ctx (&ctx);
12265 fold_checksum_tree (expr, &ctx, &ht);
12266 md5_finish_ctx (&ctx, checksum);
12267 for (cnt = 0; cnt < 16; ++cnt)
12268 fprintf (stderr, "%02x", checksum[cnt]);
12269 putc ('\n', stderr);
12272 static void
12273 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12275 internal_error ("fold check: original tree changed by fold");
12278 static void
12279 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12280 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12282 const tree_node **slot;
12283 enum tree_code code;
12284 union tree_node buf;
12285 int i, len;
12287 recursive_label:
12288 if (expr == NULL)
12289 return;
12290 slot = ht->find_slot (expr, INSERT);
12291 if (*slot != NULL)
12292 return;
12293 *slot = expr;
12294 code = TREE_CODE (expr);
12295 if (TREE_CODE_CLASS (code) == tcc_declaration
12296 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12298 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12299 memcpy ((char *) &buf, expr, tree_size (expr));
12300 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12301 buf.decl_with_vis.symtab_node = NULL;
12302 expr = (tree) &buf;
12304 else if (TREE_CODE_CLASS (code) == tcc_type
12305 && (TYPE_POINTER_TO (expr)
12306 || TYPE_REFERENCE_TO (expr)
12307 || TYPE_CACHED_VALUES_P (expr)
12308 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12309 || TYPE_NEXT_VARIANT (expr)))
12311 /* Allow these fields to be modified. */
12312 tree tmp;
12313 memcpy ((char *) &buf, expr, tree_size (expr));
12314 expr = tmp = (tree) &buf;
12315 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12316 TYPE_POINTER_TO (tmp) = NULL;
12317 TYPE_REFERENCE_TO (tmp) = NULL;
12318 TYPE_NEXT_VARIANT (tmp) = NULL;
12319 if (TYPE_CACHED_VALUES_P (tmp))
12321 TYPE_CACHED_VALUES_P (tmp) = 0;
12322 TYPE_CACHED_VALUES (tmp) = NULL;
12325 md5_process_bytes (expr, tree_size (expr), ctx);
12326 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12327 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12328 if (TREE_CODE_CLASS (code) != tcc_type
12329 && TREE_CODE_CLASS (code) != tcc_declaration
12330 && code != TREE_LIST
12331 && code != SSA_NAME
12332 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12333 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12334 switch (TREE_CODE_CLASS (code))
12336 case tcc_constant:
12337 switch (code)
12339 case STRING_CST:
12340 md5_process_bytes (TREE_STRING_POINTER (expr),
12341 TREE_STRING_LENGTH (expr), ctx);
12342 break;
12343 case COMPLEX_CST:
12344 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12345 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12346 break;
12347 case VECTOR_CST:
12348 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12349 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12350 break;
12351 default:
12352 break;
12354 break;
12355 case tcc_exceptional:
12356 switch (code)
12358 case TREE_LIST:
12359 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12360 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12361 expr = TREE_CHAIN (expr);
12362 goto recursive_label;
12363 break;
12364 case TREE_VEC:
12365 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12366 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12367 break;
12368 default:
12369 break;
12371 break;
12372 case tcc_expression:
12373 case tcc_reference:
12374 case tcc_comparison:
12375 case tcc_unary:
12376 case tcc_binary:
12377 case tcc_statement:
12378 case tcc_vl_exp:
12379 len = TREE_OPERAND_LENGTH (expr);
12380 for (i = 0; i < len; ++i)
12381 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12382 break;
12383 case tcc_declaration:
12384 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12385 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12386 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12388 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12389 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12390 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12391 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12392 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12395 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12397 if (TREE_CODE (expr) == FUNCTION_DECL)
12399 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12400 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12402 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12404 break;
12405 case tcc_type:
12406 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12407 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12408 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12409 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12410 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12411 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12412 if (INTEGRAL_TYPE_P (expr)
12413 || SCALAR_FLOAT_TYPE_P (expr))
12415 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12416 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12418 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12419 if (TREE_CODE (expr) == RECORD_TYPE
12420 || TREE_CODE (expr) == UNION_TYPE
12421 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12422 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12423 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12424 break;
12425 default:
12426 break;
12430 /* Helper function for outputting the checksum of a tree T. When
12431 debugging with gdb, you can "define mynext" to be "next" followed
12432 by "call debug_fold_checksum (op0)", then just trace down till the
12433 outputs differ. */
12435 DEBUG_FUNCTION void
12436 debug_fold_checksum (const_tree t)
12438 int i;
12439 unsigned char checksum[16];
12440 struct md5_ctx ctx;
12441 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12443 md5_init_ctx (&ctx);
12444 fold_checksum_tree (t, &ctx, &ht);
12445 md5_finish_ctx (&ctx, checksum);
12446 ht.empty ();
12448 for (i = 0; i < 16; i++)
12449 fprintf (stderr, "%d ", checksum[i]);
12451 fprintf (stderr, "\n");
12454 #endif
12456 /* Fold a unary tree expression with code CODE of type TYPE with an
12457 operand OP0. LOC is the location of the resulting expression.
12458 Return a folded expression if successful. Otherwise, return a tree
12459 expression with code CODE of type TYPE with an operand OP0. */
12461 tree
12462 fold_build1_stat_loc (location_t loc,
12463 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12465 tree tem;
12466 #ifdef ENABLE_FOLD_CHECKING
12467 unsigned char checksum_before[16], checksum_after[16];
12468 struct md5_ctx ctx;
12469 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12471 md5_init_ctx (&ctx);
12472 fold_checksum_tree (op0, &ctx, &ht);
12473 md5_finish_ctx (&ctx, checksum_before);
12474 ht.empty ();
12475 #endif
12477 tem = fold_unary_loc (loc, code, type, op0);
12478 if (!tem)
12479 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12481 #ifdef ENABLE_FOLD_CHECKING
12482 md5_init_ctx (&ctx);
12483 fold_checksum_tree (op0, &ctx, &ht);
12484 md5_finish_ctx (&ctx, checksum_after);
12486 if (memcmp (checksum_before, checksum_after, 16))
12487 fold_check_failed (op0, tem);
12488 #endif
12489 return tem;
12492 /* Fold a binary tree expression with code CODE of type TYPE with
12493 operands OP0 and OP1. LOC is the location of the resulting
12494 expression. Return a folded expression if successful. Otherwise,
12495 return a tree expression with code CODE of type TYPE with operands
12496 OP0 and OP1. */
12498 tree
12499 fold_build2_stat_loc (location_t loc,
12500 enum tree_code code, tree type, tree op0, tree op1
12501 MEM_STAT_DECL)
12503 tree tem;
12504 #ifdef ENABLE_FOLD_CHECKING
12505 unsigned char checksum_before_op0[16],
12506 checksum_before_op1[16],
12507 checksum_after_op0[16],
12508 checksum_after_op1[16];
12509 struct md5_ctx ctx;
12510 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12512 md5_init_ctx (&ctx);
12513 fold_checksum_tree (op0, &ctx, &ht);
12514 md5_finish_ctx (&ctx, checksum_before_op0);
12515 ht.empty ();
12517 md5_init_ctx (&ctx);
12518 fold_checksum_tree (op1, &ctx, &ht);
12519 md5_finish_ctx (&ctx, checksum_before_op1);
12520 ht.empty ();
12521 #endif
12523 tem = fold_binary_loc (loc, code, type, op0, op1);
12524 if (!tem)
12525 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12527 #ifdef ENABLE_FOLD_CHECKING
12528 md5_init_ctx (&ctx);
12529 fold_checksum_tree (op0, &ctx, &ht);
12530 md5_finish_ctx (&ctx, checksum_after_op0);
12531 ht.empty ();
12533 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12534 fold_check_failed (op0, tem);
12536 md5_init_ctx (&ctx);
12537 fold_checksum_tree (op1, &ctx, &ht);
12538 md5_finish_ctx (&ctx, checksum_after_op1);
12540 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12541 fold_check_failed (op1, tem);
12542 #endif
12543 return tem;
12546 /* Fold a ternary tree expression with code CODE of type TYPE with
12547 operands OP0, OP1, and OP2. Return a folded expression if
12548 successful. Otherwise, return a tree expression with code CODE of
12549 type TYPE with operands OP0, OP1, and OP2. */
12551 tree
12552 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12553 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12555 tree tem;
12556 #ifdef ENABLE_FOLD_CHECKING
12557 unsigned char checksum_before_op0[16],
12558 checksum_before_op1[16],
12559 checksum_before_op2[16],
12560 checksum_after_op0[16],
12561 checksum_after_op1[16],
12562 checksum_after_op2[16];
12563 struct md5_ctx ctx;
12564 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12566 md5_init_ctx (&ctx);
12567 fold_checksum_tree (op0, &ctx, &ht);
12568 md5_finish_ctx (&ctx, checksum_before_op0);
12569 ht.empty ();
12571 md5_init_ctx (&ctx);
12572 fold_checksum_tree (op1, &ctx, &ht);
12573 md5_finish_ctx (&ctx, checksum_before_op1);
12574 ht.empty ();
12576 md5_init_ctx (&ctx);
12577 fold_checksum_tree (op2, &ctx, &ht);
12578 md5_finish_ctx (&ctx, checksum_before_op2);
12579 ht.empty ();
12580 #endif
12582 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12583 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12584 if (!tem)
12585 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12587 #ifdef ENABLE_FOLD_CHECKING
12588 md5_init_ctx (&ctx);
12589 fold_checksum_tree (op0, &ctx, &ht);
12590 md5_finish_ctx (&ctx, checksum_after_op0);
12591 ht.empty ();
12593 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12594 fold_check_failed (op0, tem);
12596 md5_init_ctx (&ctx);
12597 fold_checksum_tree (op1, &ctx, &ht);
12598 md5_finish_ctx (&ctx, checksum_after_op1);
12599 ht.empty ();
12601 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12602 fold_check_failed (op1, tem);
12604 md5_init_ctx (&ctx);
12605 fold_checksum_tree (op2, &ctx, &ht);
12606 md5_finish_ctx (&ctx, checksum_after_op2);
12608 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12609 fold_check_failed (op2, tem);
12610 #endif
12611 return tem;
12614 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12615 arguments in ARGARRAY, and a null static chain.
12616 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12617 of type TYPE from the given operands as constructed by build_call_array. */
12619 tree
12620 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12621 int nargs, tree *argarray)
12623 tree tem;
12624 #ifdef ENABLE_FOLD_CHECKING
12625 unsigned char checksum_before_fn[16],
12626 checksum_before_arglist[16],
12627 checksum_after_fn[16],
12628 checksum_after_arglist[16];
12629 struct md5_ctx ctx;
12630 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12631 int i;
12633 md5_init_ctx (&ctx);
12634 fold_checksum_tree (fn, &ctx, &ht);
12635 md5_finish_ctx (&ctx, checksum_before_fn);
12636 ht.empty ();
12638 md5_init_ctx (&ctx);
12639 for (i = 0; i < nargs; i++)
12640 fold_checksum_tree (argarray[i], &ctx, &ht);
12641 md5_finish_ctx (&ctx, checksum_before_arglist);
12642 ht.empty ();
12643 #endif
12645 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12646 if (!tem)
12647 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12649 #ifdef ENABLE_FOLD_CHECKING
12650 md5_init_ctx (&ctx);
12651 fold_checksum_tree (fn, &ctx, &ht);
12652 md5_finish_ctx (&ctx, checksum_after_fn);
12653 ht.empty ();
12655 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12656 fold_check_failed (fn, tem);
12658 md5_init_ctx (&ctx);
12659 for (i = 0; i < nargs; i++)
12660 fold_checksum_tree (argarray[i], &ctx, &ht);
12661 md5_finish_ctx (&ctx, checksum_after_arglist);
12663 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12664 fold_check_failed (NULL_TREE, tem);
12665 #endif
12666 return tem;
12669 /* Perform constant folding and related simplification of initializer
12670 expression EXPR. These behave identically to "fold_buildN" but ignore
12671 potential run-time traps and exceptions that fold must preserve. */
12673 #define START_FOLD_INIT \
12674 int saved_signaling_nans = flag_signaling_nans;\
12675 int saved_trapping_math = flag_trapping_math;\
12676 int saved_rounding_math = flag_rounding_math;\
12677 int saved_trapv = flag_trapv;\
12678 int saved_folding_initializer = folding_initializer;\
12679 flag_signaling_nans = 0;\
12680 flag_trapping_math = 0;\
12681 flag_rounding_math = 0;\
12682 flag_trapv = 0;\
12683 folding_initializer = 1;
12685 #define END_FOLD_INIT \
12686 flag_signaling_nans = saved_signaling_nans;\
12687 flag_trapping_math = saved_trapping_math;\
12688 flag_rounding_math = saved_rounding_math;\
12689 flag_trapv = saved_trapv;\
12690 folding_initializer = saved_folding_initializer;
12692 tree
12693 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12694 tree type, tree op)
12696 tree result;
12697 START_FOLD_INIT;
12699 result = fold_build1_loc (loc, code, type, op);
12701 END_FOLD_INIT;
12702 return result;
12705 tree
12706 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12707 tree type, tree op0, tree op1)
12709 tree result;
12710 START_FOLD_INIT;
12712 result = fold_build2_loc (loc, code, type, op0, op1);
12714 END_FOLD_INIT;
12715 return result;
12718 tree
12719 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12720 int nargs, tree *argarray)
12722 tree result;
12723 START_FOLD_INIT;
12725 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12727 END_FOLD_INIT;
12728 return result;
12731 #undef START_FOLD_INIT
12732 #undef END_FOLD_INIT
12734 /* Determine if first argument is a multiple of second argument. Return 0 if
12735 it is not, or we cannot easily determined it to be.
12737 An example of the sort of thing we care about (at this point; this routine
12738 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12739 fold cases do now) is discovering that
12741 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12743 is a multiple of
12745 SAVE_EXPR (J * 8)
12747 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12749 This code also handles discovering that
12751 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12753 is a multiple of 8 so we don't have to worry about dealing with a
12754 possible remainder.
12756 Note that we *look* inside a SAVE_EXPR only to determine how it was
12757 calculated; it is not safe for fold to do much of anything else with the
12758 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12759 at run time. For example, the latter example above *cannot* be implemented
12760 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12761 evaluation time of the original SAVE_EXPR is not necessarily the same at
12762 the time the new expression is evaluated. The only optimization of this
12763 sort that would be valid is changing
12765 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12767 divided by 8 to
12769 SAVE_EXPR (I) * SAVE_EXPR (J)
12771 (where the same SAVE_EXPR (J) is used in the original and the
12772 transformed version). */
12775 multiple_of_p (tree type, const_tree top, const_tree bottom)
12777 if (operand_equal_p (top, bottom, 0))
12778 return 1;
12780 if (TREE_CODE (type) != INTEGER_TYPE)
12781 return 0;
12783 switch (TREE_CODE (top))
12785 case BIT_AND_EXPR:
12786 /* Bitwise and provides a power of two multiple. If the mask is
12787 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12788 if (!integer_pow2p (bottom))
12789 return 0;
12790 /* FALLTHRU */
12792 case MULT_EXPR:
12793 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12794 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12796 case PLUS_EXPR:
12797 case MINUS_EXPR:
12798 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12799 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12801 case LSHIFT_EXPR:
12802 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12804 tree op1, t1;
12806 op1 = TREE_OPERAND (top, 1);
12807 /* const_binop may not detect overflow correctly,
12808 so check for it explicitly here. */
12809 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12810 && 0 != (t1 = fold_convert (type,
12811 const_binop (LSHIFT_EXPR,
12812 size_one_node,
12813 op1)))
12814 && !TREE_OVERFLOW (t1))
12815 return multiple_of_p (type, t1, bottom);
12817 return 0;
12819 case NOP_EXPR:
12820 /* Can't handle conversions from non-integral or wider integral type. */
12821 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12822 || (TYPE_PRECISION (type)
12823 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12824 return 0;
12826 /* .. fall through ... */
12828 case SAVE_EXPR:
12829 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12831 case COND_EXPR:
12832 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12833 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12835 case INTEGER_CST:
12836 if (TREE_CODE (bottom) != INTEGER_CST
12837 || integer_zerop (bottom)
12838 || (TYPE_UNSIGNED (type)
12839 && (tree_int_cst_sgn (top) < 0
12840 || tree_int_cst_sgn (bottom) < 0)))
12841 return 0;
12842 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12843 SIGNED);
12845 default:
12846 return 0;
12850 #define tree_expr_nonnegative_warnv_p(X, Y) \
12851 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12853 #define RECURSE(X) \
12854 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12856 /* Return true if CODE or TYPE is known to be non-negative. */
12858 static bool
12859 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12861 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12862 && truth_value_p (code))
12863 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12864 have a signed:1 type (where the value is -1 and 0). */
12865 return true;
12866 return false;
12869 /* Return true if (CODE OP0) is known to be non-negative. If the return
12870 value is based on the assumption that signed overflow is undefined,
12871 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12872 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12874 bool
12875 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12876 bool *strict_overflow_p, int depth)
12878 if (TYPE_UNSIGNED (type))
12879 return true;
12881 switch (code)
12883 case ABS_EXPR:
12884 /* We can't return 1 if flag_wrapv is set because
12885 ABS_EXPR<INT_MIN> = INT_MIN. */
12886 if (!ANY_INTEGRAL_TYPE_P (type))
12887 return true;
12888 if (TYPE_OVERFLOW_UNDEFINED (type))
12890 *strict_overflow_p = true;
12891 return true;
12893 break;
12895 case NON_LVALUE_EXPR:
12896 case FLOAT_EXPR:
12897 case FIX_TRUNC_EXPR:
12898 return RECURSE (op0);
12900 CASE_CONVERT:
12902 tree inner_type = TREE_TYPE (op0);
12903 tree outer_type = type;
12905 if (TREE_CODE (outer_type) == REAL_TYPE)
12907 if (TREE_CODE (inner_type) == REAL_TYPE)
12908 return RECURSE (op0);
12909 if (INTEGRAL_TYPE_P (inner_type))
12911 if (TYPE_UNSIGNED (inner_type))
12912 return true;
12913 return RECURSE (op0);
12916 else if (INTEGRAL_TYPE_P (outer_type))
12918 if (TREE_CODE (inner_type) == REAL_TYPE)
12919 return RECURSE (op0);
12920 if (INTEGRAL_TYPE_P (inner_type))
12921 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12922 && TYPE_UNSIGNED (inner_type);
12925 break;
12927 default:
12928 return tree_simple_nonnegative_warnv_p (code, type);
12931 /* We don't know sign of `t', so be conservative and return false. */
12932 return false;
12935 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12936 value is based on the assumption that signed overflow is undefined,
12937 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12938 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12940 bool
12941 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12942 tree op1, bool *strict_overflow_p,
12943 int depth)
12945 if (TYPE_UNSIGNED (type))
12946 return true;
12948 switch (code)
12950 case POINTER_PLUS_EXPR:
12951 case PLUS_EXPR:
12952 if (FLOAT_TYPE_P (type))
12953 return RECURSE (op0) && RECURSE (op1);
12955 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12956 both unsigned and at least 2 bits shorter than the result. */
12957 if (TREE_CODE (type) == INTEGER_TYPE
12958 && TREE_CODE (op0) == NOP_EXPR
12959 && TREE_CODE (op1) == NOP_EXPR)
12961 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12962 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12963 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12964 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12966 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12967 TYPE_PRECISION (inner2)) + 1;
12968 return prec < TYPE_PRECISION (type);
12971 break;
12973 case MULT_EXPR:
12974 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12976 /* x * x is always non-negative for floating point x
12977 or without overflow. */
12978 if (operand_equal_p (op0, op1, 0)
12979 || (RECURSE (op0) && RECURSE (op1)))
12981 if (ANY_INTEGRAL_TYPE_P (type)
12982 && TYPE_OVERFLOW_UNDEFINED (type))
12983 *strict_overflow_p = true;
12984 return true;
12988 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12989 both unsigned and their total bits is shorter than the result. */
12990 if (TREE_CODE (type) == INTEGER_TYPE
12991 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12992 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12994 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12995 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12996 : TREE_TYPE (op0);
12997 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12998 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12999 : TREE_TYPE (op1);
13001 bool unsigned0 = TYPE_UNSIGNED (inner0);
13002 bool unsigned1 = TYPE_UNSIGNED (inner1);
13004 if (TREE_CODE (op0) == INTEGER_CST)
13005 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13007 if (TREE_CODE (op1) == INTEGER_CST)
13008 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13010 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13011 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13013 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13014 ? tree_int_cst_min_precision (op0, UNSIGNED)
13015 : TYPE_PRECISION (inner0);
13017 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13018 ? tree_int_cst_min_precision (op1, UNSIGNED)
13019 : TYPE_PRECISION (inner1);
13021 return precision0 + precision1 < TYPE_PRECISION (type);
13024 return false;
13026 case BIT_AND_EXPR:
13027 case MAX_EXPR:
13028 return RECURSE (op0) || RECURSE (op1);
13030 case BIT_IOR_EXPR:
13031 case BIT_XOR_EXPR:
13032 case MIN_EXPR:
13033 case RDIV_EXPR:
13034 case TRUNC_DIV_EXPR:
13035 case CEIL_DIV_EXPR:
13036 case FLOOR_DIV_EXPR:
13037 case ROUND_DIV_EXPR:
13038 return RECURSE (op0) && RECURSE (op1);
13040 case TRUNC_MOD_EXPR:
13041 return RECURSE (op0);
13043 case FLOOR_MOD_EXPR:
13044 return RECURSE (op1);
13046 case CEIL_MOD_EXPR:
13047 case ROUND_MOD_EXPR:
13048 default:
13049 return tree_simple_nonnegative_warnv_p (code, type);
13052 /* We don't know sign of `t', so be conservative and return false. */
13053 return false;
13056 /* Return true if T is known to be non-negative. If the return
13057 value is based on the assumption that signed overflow is undefined,
13058 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13059 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13061 bool
13062 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13064 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13065 return true;
13067 switch (TREE_CODE (t))
13069 case INTEGER_CST:
13070 return tree_int_cst_sgn (t) >= 0;
13072 case REAL_CST:
13073 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13075 case FIXED_CST:
13076 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13078 case COND_EXPR:
13079 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13081 case SSA_NAME:
13082 /* Limit the depth of recursion to avoid quadratic behavior.
13083 This is expected to catch almost all occurrences in practice.
13084 If this code misses important cases that unbounded recursion
13085 would not, passes that need this information could be revised
13086 to provide it through dataflow propagation. */
13087 return (!name_registered_for_update_p (t)
13088 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13089 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13090 strict_overflow_p, depth));
13092 default:
13093 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13097 /* Return true if T is known to be non-negative. If the return
13098 value is based on the assumption that signed overflow is undefined,
13099 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13100 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13102 bool
13103 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13104 bool *strict_overflow_p, int depth)
13106 switch (fn)
13108 CASE_CFN_ACOS:
13109 CASE_CFN_ACOSH:
13110 CASE_CFN_CABS:
13111 CASE_CFN_COSH:
13112 CASE_CFN_ERFC:
13113 CASE_CFN_EXP:
13114 CASE_CFN_EXP10:
13115 CASE_CFN_EXP2:
13116 CASE_CFN_FABS:
13117 CASE_CFN_FDIM:
13118 CASE_CFN_HYPOT:
13119 CASE_CFN_POW10:
13120 CASE_CFN_FFS:
13121 CASE_CFN_PARITY:
13122 CASE_CFN_POPCOUNT:
13123 CASE_CFN_CLZ:
13124 CASE_CFN_CLRSB:
13125 case CFN_BUILT_IN_BSWAP32:
13126 case CFN_BUILT_IN_BSWAP64:
13127 /* Always true. */
13128 return true;
13130 CASE_CFN_SQRT:
13131 /* sqrt(-0.0) is -0.0. */
13132 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13133 return true;
13134 return RECURSE (arg0);
13136 CASE_CFN_ASINH:
13137 CASE_CFN_ATAN:
13138 CASE_CFN_ATANH:
13139 CASE_CFN_CBRT:
13140 CASE_CFN_CEIL:
13141 CASE_CFN_ERF:
13142 CASE_CFN_EXPM1:
13143 CASE_CFN_FLOOR:
13144 CASE_CFN_FMOD:
13145 CASE_CFN_FREXP:
13146 CASE_CFN_ICEIL:
13147 CASE_CFN_IFLOOR:
13148 CASE_CFN_IRINT:
13149 CASE_CFN_IROUND:
13150 CASE_CFN_LCEIL:
13151 CASE_CFN_LDEXP:
13152 CASE_CFN_LFLOOR:
13153 CASE_CFN_LLCEIL:
13154 CASE_CFN_LLFLOOR:
13155 CASE_CFN_LLRINT:
13156 CASE_CFN_LLROUND:
13157 CASE_CFN_LRINT:
13158 CASE_CFN_LROUND:
13159 CASE_CFN_MODF:
13160 CASE_CFN_NEARBYINT:
13161 CASE_CFN_RINT:
13162 CASE_CFN_ROUND:
13163 CASE_CFN_SCALB:
13164 CASE_CFN_SCALBLN:
13165 CASE_CFN_SCALBN:
13166 CASE_CFN_SIGNBIT:
13167 CASE_CFN_SIGNIFICAND:
13168 CASE_CFN_SINH:
13169 CASE_CFN_TANH:
13170 CASE_CFN_TRUNC:
13171 /* True if the 1st argument is nonnegative. */
13172 return RECURSE (arg0);
13174 CASE_CFN_FMAX:
13175 /* True if the 1st OR 2nd arguments are nonnegative. */
13176 return RECURSE (arg0) || RECURSE (arg1);
13178 CASE_CFN_FMIN:
13179 /* True if the 1st AND 2nd arguments are nonnegative. */
13180 return RECURSE (arg0) && RECURSE (arg1);
13182 CASE_CFN_COPYSIGN:
13183 /* True if the 2nd argument is nonnegative. */
13184 return RECURSE (arg1);
13186 CASE_CFN_POWI:
13187 /* True if the 1st argument is nonnegative or the second
13188 argument is an even integer. */
13189 if (TREE_CODE (arg1) == INTEGER_CST
13190 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13191 return true;
13192 return RECURSE (arg0);
13194 CASE_CFN_POW:
13195 /* True if the 1st argument is nonnegative or the second
13196 argument is an even integer valued real. */
13197 if (TREE_CODE (arg1) == REAL_CST)
13199 REAL_VALUE_TYPE c;
13200 HOST_WIDE_INT n;
13202 c = TREE_REAL_CST (arg1);
13203 n = real_to_integer (&c);
13204 if ((n & 1) == 0)
13206 REAL_VALUE_TYPE cint;
13207 real_from_integer (&cint, VOIDmode, n, SIGNED);
13208 if (real_identical (&c, &cint))
13209 return true;
13212 return RECURSE (arg0);
13214 default:
13215 break;
13217 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13220 /* Return true if T is known to be non-negative. If the return
13221 value is based on the assumption that signed overflow is undefined,
13222 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13223 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13225 static bool
13226 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13228 enum tree_code code = TREE_CODE (t);
13229 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13230 return true;
13232 switch (code)
13234 case TARGET_EXPR:
13236 tree temp = TARGET_EXPR_SLOT (t);
13237 t = TARGET_EXPR_INITIAL (t);
13239 /* If the initializer is non-void, then it's a normal expression
13240 that will be assigned to the slot. */
13241 if (!VOID_TYPE_P (t))
13242 return RECURSE (t);
13244 /* Otherwise, the initializer sets the slot in some way. One common
13245 way is an assignment statement at the end of the initializer. */
13246 while (1)
13248 if (TREE_CODE (t) == BIND_EXPR)
13249 t = expr_last (BIND_EXPR_BODY (t));
13250 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13251 || TREE_CODE (t) == TRY_CATCH_EXPR)
13252 t = expr_last (TREE_OPERAND (t, 0));
13253 else if (TREE_CODE (t) == STATEMENT_LIST)
13254 t = expr_last (t);
13255 else
13256 break;
13258 if (TREE_CODE (t) == MODIFY_EXPR
13259 && TREE_OPERAND (t, 0) == temp)
13260 return RECURSE (TREE_OPERAND (t, 1));
13262 return false;
13265 case CALL_EXPR:
13267 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13268 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13270 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13271 get_call_combined_fn (t),
13272 arg0,
13273 arg1,
13274 strict_overflow_p, depth);
13276 case COMPOUND_EXPR:
13277 case MODIFY_EXPR:
13278 return RECURSE (TREE_OPERAND (t, 1));
13280 case BIND_EXPR:
13281 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13283 case SAVE_EXPR:
13284 return RECURSE (TREE_OPERAND (t, 0));
13286 default:
13287 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13291 #undef RECURSE
13292 #undef tree_expr_nonnegative_warnv_p
13294 /* Return true if T is known to be non-negative. If the return
13295 value is based on the assumption that signed overflow is undefined,
13296 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13297 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13299 bool
13300 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13302 enum tree_code code;
13303 if (t == error_mark_node)
13304 return false;
13306 code = TREE_CODE (t);
13307 switch (TREE_CODE_CLASS (code))
13309 case tcc_binary:
13310 case tcc_comparison:
13311 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13312 TREE_TYPE (t),
13313 TREE_OPERAND (t, 0),
13314 TREE_OPERAND (t, 1),
13315 strict_overflow_p, depth);
13317 case tcc_unary:
13318 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13319 TREE_TYPE (t),
13320 TREE_OPERAND (t, 0),
13321 strict_overflow_p, depth);
13323 case tcc_constant:
13324 case tcc_declaration:
13325 case tcc_reference:
13326 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13328 default:
13329 break;
13332 switch (code)
13334 case TRUTH_AND_EXPR:
13335 case TRUTH_OR_EXPR:
13336 case TRUTH_XOR_EXPR:
13337 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13338 TREE_TYPE (t),
13339 TREE_OPERAND (t, 0),
13340 TREE_OPERAND (t, 1),
13341 strict_overflow_p, depth);
13342 case TRUTH_NOT_EXPR:
13343 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13344 TREE_TYPE (t),
13345 TREE_OPERAND (t, 0),
13346 strict_overflow_p, depth);
13348 case COND_EXPR:
13349 case CONSTRUCTOR:
13350 case OBJ_TYPE_REF:
13351 case ASSERT_EXPR:
13352 case ADDR_EXPR:
13353 case WITH_SIZE_EXPR:
13354 case SSA_NAME:
13355 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13357 default:
13358 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13362 /* Return true if `t' is known to be non-negative. Handle warnings
13363 about undefined signed overflow. */
13365 bool
13366 tree_expr_nonnegative_p (tree t)
13368 bool ret, strict_overflow_p;
13370 strict_overflow_p = false;
13371 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13372 if (strict_overflow_p)
13373 fold_overflow_warning (("assuming signed overflow does not occur when "
13374 "determining that expression is always "
13375 "non-negative"),
13376 WARN_STRICT_OVERFLOW_MISC);
13377 return ret;
13381 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13382 For floating point we further ensure that T is not denormal.
13383 Similar logic is present in nonzero_address in rtlanal.h.
13385 If the return value is based on the assumption that signed overflow
13386 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13387 change *STRICT_OVERFLOW_P. */
13389 bool
13390 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13391 bool *strict_overflow_p)
13393 switch (code)
13395 case ABS_EXPR:
13396 return tree_expr_nonzero_warnv_p (op0,
13397 strict_overflow_p);
13399 case NOP_EXPR:
13401 tree inner_type = TREE_TYPE (op0);
13402 tree outer_type = type;
13404 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13405 && tree_expr_nonzero_warnv_p (op0,
13406 strict_overflow_p));
13408 break;
13410 case NON_LVALUE_EXPR:
13411 return tree_expr_nonzero_warnv_p (op0,
13412 strict_overflow_p);
13414 default:
13415 break;
13418 return false;
13421 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13422 For floating point we further ensure that T is not denormal.
13423 Similar logic is present in nonzero_address in rtlanal.h.
13425 If the return value is based on the assumption that signed overflow
13426 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13427 change *STRICT_OVERFLOW_P. */
13429 bool
13430 tree_binary_nonzero_warnv_p (enum tree_code code,
13431 tree type,
13432 tree op0,
13433 tree op1, bool *strict_overflow_p)
13435 bool sub_strict_overflow_p;
13436 switch (code)
13438 case POINTER_PLUS_EXPR:
13439 case PLUS_EXPR:
13440 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13442 /* With the presence of negative values it is hard
13443 to say something. */
13444 sub_strict_overflow_p = false;
13445 if (!tree_expr_nonnegative_warnv_p (op0,
13446 &sub_strict_overflow_p)
13447 || !tree_expr_nonnegative_warnv_p (op1,
13448 &sub_strict_overflow_p))
13449 return false;
13450 /* One of operands must be positive and the other non-negative. */
13451 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13452 overflows, on a twos-complement machine the sum of two
13453 nonnegative numbers can never be zero. */
13454 return (tree_expr_nonzero_warnv_p (op0,
13455 strict_overflow_p)
13456 || tree_expr_nonzero_warnv_p (op1,
13457 strict_overflow_p));
13459 break;
13461 case MULT_EXPR:
13462 if (TYPE_OVERFLOW_UNDEFINED (type))
13464 if (tree_expr_nonzero_warnv_p (op0,
13465 strict_overflow_p)
13466 && tree_expr_nonzero_warnv_p (op1,
13467 strict_overflow_p))
13469 *strict_overflow_p = true;
13470 return true;
13473 break;
13475 case MIN_EXPR:
13476 sub_strict_overflow_p = false;
13477 if (tree_expr_nonzero_warnv_p (op0,
13478 &sub_strict_overflow_p)
13479 && tree_expr_nonzero_warnv_p (op1,
13480 &sub_strict_overflow_p))
13482 if (sub_strict_overflow_p)
13483 *strict_overflow_p = true;
13485 break;
13487 case MAX_EXPR:
13488 sub_strict_overflow_p = false;
13489 if (tree_expr_nonzero_warnv_p (op0,
13490 &sub_strict_overflow_p))
13492 if (sub_strict_overflow_p)
13493 *strict_overflow_p = true;
13495 /* When both operands are nonzero, then MAX must be too. */
13496 if (tree_expr_nonzero_warnv_p (op1,
13497 strict_overflow_p))
13498 return true;
13500 /* MAX where operand 0 is positive is positive. */
13501 return tree_expr_nonnegative_warnv_p (op0,
13502 strict_overflow_p);
13504 /* MAX where operand 1 is positive is positive. */
13505 else if (tree_expr_nonzero_warnv_p (op1,
13506 &sub_strict_overflow_p)
13507 && tree_expr_nonnegative_warnv_p (op1,
13508 &sub_strict_overflow_p))
13510 if (sub_strict_overflow_p)
13511 *strict_overflow_p = true;
13512 return true;
13514 break;
13516 case BIT_IOR_EXPR:
13517 return (tree_expr_nonzero_warnv_p (op1,
13518 strict_overflow_p)
13519 || tree_expr_nonzero_warnv_p (op0,
13520 strict_overflow_p));
13522 default:
13523 break;
13526 return false;
13529 /* Return true when T is an address and is known to be nonzero.
13530 For floating point we further ensure that T is not denormal.
13531 Similar logic is present in nonzero_address in rtlanal.h.
13533 If the return value is based on the assumption that signed overflow
13534 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13535 change *STRICT_OVERFLOW_P. */
13537 bool
13538 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13540 bool sub_strict_overflow_p;
13541 switch (TREE_CODE (t))
13543 case INTEGER_CST:
13544 return !integer_zerop (t);
13546 case ADDR_EXPR:
13548 tree base = TREE_OPERAND (t, 0);
13550 if (!DECL_P (base))
13551 base = get_base_address (base);
13553 if (base && TREE_CODE (base) == TARGET_EXPR)
13554 base = TARGET_EXPR_SLOT (base);
13556 if (!base)
13557 return false;
13559 /* For objects in symbol table check if we know they are non-zero.
13560 Don't do anything for variables and functions before symtab is built;
13561 it is quite possible that they will be declared weak later. */
13562 int nonzero_addr = maybe_nonzero_address (base);
13563 if (nonzero_addr >= 0)
13564 return nonzero_addr;
13566 /* Function local objects are never NULL. */
13567 if (DECL_P (base)
13568 && (DECL_CONTEXT (base)
13569 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13570 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13571 return true;
13573 /* Constants are never weak. */
13574 if (CONSTANT_CLASS_P (base))
13575 return true;
13577 return false;
13580 case COND_EXPR:
13581 sub_strict_overflow_p = false;
13582 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13583 &sub_strict_overflow_p)
13584 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13585 &sub_strict_overflow_p))
13587 if (sub_strict_overflow_p)
13588 *strict_overflow_p = true;
13589 return true;
13591 break;
13593 default:
13594 break;
13596 return false;
13599 #define integer_valued_real_p(X) \
13600 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13602 #define RECURSE(X) \
13603 ((integer_valued_real_p) (X, depth + 1))
13605 /* Return true if the floating point result of (CODE OP0) has an
13606 integer value. We also allow +Inf, -Inf and NaN to be considered
13607 integer values. Return false for signaling NaN.
13609 DEPTH is the current nesting depth of the query. */
13611 bool
13612 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13614 switch (code)
13616 case FLOAT_EXPR:
13617 return true;
13619 case ABS_EXPR:
13620 return RECURSE (op0);
13622 CASE_CONVERT:
13624 tree type = TREE_TYPE (op0);
13625 if (TREE_CODE (type) == INTEGER_TYPE)
13626 return true;
13627 if (TREE_CODE (type) == REAL_TYPE)
13628 return RECURSE (op0);
13629 break;
13632 default:
13633 break;
13635 return false;
13638 /* Return true if the floating point result of (CODE OP0 OP1) has an
13639 integer value. We also allow +Inf, -Inf and NaN to be considered
13640 integer values. Return false for signaling NaN.
13642 DEPTH is the current nesting depth of the query. */
13644 bool
13645 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13647 switch (code)
13649 case PLUS_EXPR:
13650 case MINUS_EXPR:
13651 case MULT_EXPR:
13652 case MIN_EXPR:
13653 case MAX_EXPR:
13654 return RECURSE (op0) && RECURSE (op1);
13656 default:
13657 break;
13659 return false;
13662 /* Return true if the floating point result of calling FNDECL with arguments
13663 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13664 considered integer values. Return false for signaling NaN. If FNDECL
13665 takes fewer than 2 arguments, the remaining ARGn are null.
13667 DEPTH is the current nesting depth of the query. */
13669 bool
13670 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13672 switch (fn)
13674 CASE_CFN_CEIL:
13675 CASE_CFN_FLOOR:
13676 CASE_CFN_NEARBYINT:
13677 CASE_CFN_RINT:
13678 CASE_CFN_ROUND:
13679 CASE_CFN_TRUNC:
13680 return true;
13682 CASE_CFN_FMIN:
13683 CASE_CFN_FMAX:
13684 return RECURSE (arg0) && RECURSE (arg1);
13686 default:
13687 break;
13689 return false;
13692 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13693 has an integer value. We also allow +Inf, -Inf and NaN to be
13694 considered integer values. Return false for signaling NaN.
13696 DEPTH is the current nesting depth of the query. */
13698 bool
13699 integer_valued_real_single_p (tree t, int depth)
13701 switch (TREE_CODE (t))
13703 case REAL_CST:
13704 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13706 case COND_EXPR:
13707 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13709 case SSA_NAME:
13710 /* Limit the depth of recursion to avoid quadratic behavior.
13711 This is expected to catch almost all occurrences in practice.
13712 If this code misses important cases that unbounded recursion
13713 would not, passes that need this information could be revised
13714 to provide it through dataflow propagation. */
13715 return (!name_registered_for_update_p (t)
13716 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13717 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13718 depth));
13720 default:
13721 break;
13723 return false;
13726 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13727 has an integer value. We also allow +Inf, -Inf and NaN to be
13728 considered integer values. Return false for signaling NaN.
13730 DEPTH is the current nesting depth of the query. */
13732 static bool
13733 integer_valued_real_invalid_p (tree t, int depth)
13735 switch (TREE_CODE (t))
13737 case COMPOUND_EXPR:
13738 case MODIFY_EXPR:
13739 case BIND_EXPR:
13740 return RECURSE (TREE_OPERAND (t, 1));
13742 case SAVE_EXPR:
13743 return RECURSE (TREE_OPERAND (t, 0));
13745 default:
13746 break;
13748 return false;
13751 #undef RECURSE
13752 #undef integer_valued_real_p
13754 /* Return true if the floating point expression T has an integer value.
13755 We also allow +Inf, -Inf and NaN to be considered integer values.
13756 Return false for signaling NaN.
13758 DEPTH is the current nesting depth of the query. */
13760 bool
13761 integer_valued_real_p (tree t, int depth)
13763 if (t == error_mark_node)
13764 return false;
13766 tree_code code = TREE_CODE (t);
13767 switch (TREE_CODE_CLASS (code))
13769 case tcc_binary:
13770 case tcc_comparison:
13771 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13772 TREE_OPERAND (t, 1), depth);
13774 case tcc_unary:
13775 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13777 case tcc_constant:
13778 case tcc_declaration:
13779 case tcc_reference:
13780 return integer_valued_real_single_p (t, depth);
13782 default:
13783 break;
13786 switch (code)
13788 case COND_EXPR:
13789 case SSA_NAME:
13790 return integer_valued_real_single_p (t, depth);
13792 case CALL_EXPR:
13794 tree arg0 = (call_expr_nargs (t) > 0
13795 ? CALL_EXPR_ARG (t, 0)
13796 : NULL_TREE);
13797 tree arg1 = (call_expr_nargs (t) > 1
13798 ? CALL_EXPR_ARG (t, 1)
13799 : NULL_TREE);
13800 return integer_valued_real_call_p (get_call_combined_fn (t),
13801 arg0, arg1, depth);
13804 default:
13805 return integer_valued_real_invalid_p (t, depth);
13809 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13810 attempt to fold the expression to a constant without modifying TYPE,
13811 OP0 or OP1.
13813 If the expression could be simplified to a constant, then return
13814 the constant. If the expression would not be simplified to a
13815 constant, then return NULL_TREE. */
13817 tree
13818 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13820 tree tem = fold_binary (code, type, op0, op1);
13821 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13824 /* Given the components of a unary expression CODE, TYPE and OP0,
13825 attempt to fold the expression to a constant without modifying
13826 TYPE or OP0.
13828 If the expression could be simplified to a constant, then return
13829 the constant. If the expression would not be simplified to a
13830 constant, then return NULL_TREE. */
13832 tree
13833 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13835 tree tem = fold_unary (code, type, op0);
13836 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13839 /* If EXP represents referencing an element in a constant string
13840 (either via pointer arithmetic or array indexing), return the
13841 tree representing the value accessed, otherwise return NULL. */
13843 tree
13844 fold_read_from_constant_string (tree exp)
13846 if ((TREE_CODE (exp) == INDIRECT_REF
13847 || TREE_CODE (exp) == ARRAY_REF)
13848 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13850 tree exp1 = TREE_OPERAND (exp, 0);
13851 tree index;
13852 tree string;
13853 location_t loc = EXPR_LOCATION (exp);
13855 if (TREE_CODE (exp) == INDIRECT_REF)
13856 string = string_constant (exp1, &index);
13857 else
13859 tree low_bound = array_ref_low_bound (exp);
13860 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13862 /* Optimize the special-case of a zero lower bound.
13864 We convert the low_bound to sizetype to avoid some problems
13865 with constant folding. (E.g. suppose the lower bound is 1,
13866 and its mode is QI. Without the conversion,l (ARRAY
13867 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13868 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13869 if (! integer_zerop (low_bound))
13870 index = size_diffop_loc (loc, index,
13871 fold_convert_loc (loc, sizetype, low_bound));
13873 string = exp1;
13876 if (string
13877 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13878 && TREE_CODE (string) == STRING_CST
13879 && TREE_CODE (index) == INTEGER_CST
13880 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13881 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13882 == MODE_INT)
13883 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13884 return build_int_cst_type (TREE_TYPE (exp),
13885 (TREE_STRING_POINTER (string)
13886 [TREE_INT_CST_LOW (index)]));
13888 return NULL;
13891 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13892 an integer constant, real, or fixed-point constant.
13894 TYPE is the type of the result. */
13896 static tree
13897 fold_negate_const (tree arg0, tree type)
13899 tree t = NULL_TREE;
13901 switch (TREE_CODE (arg0))
13903 case INTEGER_CST:
13905 bool overflow;
13906 wide_int val = wi::neg (arg0, &overflow);
13907 t = force_fit_type (type, val, 1,
13908 (overflow | TREE_OVERFLOW (arg0))
13909 && !TYPE_UNSIGNED (type));
13910 break;
13913 case REAL_CST:
13914 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13915 break;
13917 case FIXED_CST:
13919 FIXED_VALUE_TYPE f;
13920 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13921 &(TREE_FIXED_CST (arg0)), NULL,
13922 TYPE_SATURATING (type));
13923 t = build_fixed (type, f);
13924 /* Propagate overflow flags. */
13925 if (overflow_p | TREE_OVERFLOW (arg0))
13926 TREE_OVERFLOW (t) = 1;
13927 break;
13930 default:
13931 gcc_unreachable ();
13934 return t;
13937 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13938 an integer constant or real constant.
13940 TYPE is the type of the result. */
13942 tree
13943 fold_abs_const (tree arg0, tree type)
13945 tree t = NULL_TREE;
13947 switch (TREE_CODE (arg0))
13949 case INTEGER_CST:
13951 /* If the value is unsigned or non-negative, then the absolute value
13952 is the same as the ordinary value. */
13953 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13954 t = arg0;
13956 /* If the value is negative, then the absolute value is
13957 its negation. */
13958 else
13960 bool overflow;
13961 wide_int val = wi::neg (arg0, &overflow);
13962 t = force_fit_type (type, val, -1,
13963 overflow | TREE_OVERFLOW (arg0));
13966 break;
13968 case REAL_CST:
13969 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13970 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13971 else
13972 t = arg0;
13973 break;
13975 default:
13976 gcc_unreachable ();
13979 return t;
13982 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13983 constant. TYPE is the type of the result. */
13985 static tree
13986 fold_not_const (const_tree arg0, tree type)
13988 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13990 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13993 /* Given CODE, a relational operator, the target type, TYPE and two
13994 constant operands OP0 and OP1, return the result of the
13995 relational operation. If the result is not a compile time
13996 constant, then return NULL_TREE. */
13998 static tree
13999 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14001 int result, invert;
14003 /* From here on, the only cases we handle are when the result is
14004 known to be a constant. */
14006 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14008 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14009 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14011 /* Handle the cases where either operand is a NaN. */
14012 if (real_isnan (c0) || real_isnan (c1))
14014 switch (code)
14016 case EQ_EXPR:
14017 case ORDERED_EXPR:
14018 result = 0;
14019 break;
14021 case NE_EXPR:
14022 case UNORDERED_EXPR:
14023 case UNLT_EXPR:
14024 case UNLE_EXPR:
14025 case UNGT_EXPR:
14026 case UNGE_EXPR:
14027 case UNEQ_EXPR:
14028 result = 1;
14029 break;
14031 case LT_EXPR:
14032 case LE_EXPR:
14033 case GT_EXPR:
14034 case GE_EXPR:
14035 case LTGT_EXPR:
14036 if (flag_trapping_math)
14037 return NULL_TREE;
14038 result = 0;
14039 break;
14041 default:
14042 gcc_unreachable ();
14045 return constant_boolean_node (result, type);
14048 return constant_boolean_node (real_compare (code, c0, c1), type);
14051 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14053 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14054 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14055 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14058 /* Handle equality/inequality of complex constants. */
14059 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14061 tree rcond = fold_relational_const (code, type,
14062 TREE_REALPART (op0),
14063 TREE_REALPART (op1));
14064 tree icond = fold_relational_const (code, type,
14065 TREE_IMAGPART (op0),
14066 TREE_IMAGPART (op1));
14067 if (code == EQ_EXPR)
14068 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14069 else if (code == NE_EXPR)
14070 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14071 else
14072 return NULL_TREE;
14075 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14077 if (!VECTOR_TYPE_P (type))
14079 /* Have vector comparison with scalar boolean result. */
14080 bool result = true;
14081 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14082 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14083 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14085 tree elem0 = VECTOR_CST_ELT (op0, i);
14086 tree elem1 = VECTOR_CST_ELT (op1, i);
14087 tree tmp = fold_relational_const (code, type, elem0, elem1);
14088 result &= integer_onep (tmp);
14090 if (code == NE_EXPR)
14091 result = !result;
14092 return constant_boolean_node (result, type);
14094 unsigned count = VECTOR_CST_NELTS (op0);
14095 tree *elts = XALLOCAVEC (tree, count);
14096 gcc_assert (VECTOR_CST_NELTS (op1) == count
14097 && TYPE_VECTOR_SUBPARTS (type) == count);
14099 for (unsigned i = 0; i < count; i++)
14101 tree elem_type = TREE_TYPE (type);
14102 tree elem0 = VECTOR_CST_ELT (op0, i);
14103 tree elem1 = VECTOR_CST_ELT (op1, i);
14105 tree tem = fold_relational_const (code, elem_type,
14106 elem0, elem1);
14108 if (tem == NULL_TREE)
14109 return NULL_TREE;
14111 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14114 return build_vector (type, elts);
14117 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14119 To compute GT, swap the arguments and do LT.
14120 To compute GE, do LT and invert the result.
14121 To compute LE, swap the arguments, do LT and invert the result.
14122 To compute NE, do EQ and invert the result.
14124 Therefore, the code below must handle only EQ and LT. */
14126 if (code == LE_EXPR || code == GT_EXPR)
14128 std::swap (op0, op1);
14129 code = swap_tree_comparison (code);
14132 /* Note that it is safe to invert for real values here because we
14133 have already handled the one case that it matters. */
14135 invert = 0;
14136 if (code == NE_EXPR || code == GE_EXPR)
14138 invert = 1;
14139 code = invert_tree_comparison (code, false);
14142 /* Compute a result for LT or EQ if args permit;
14143 Otherwise return T. */
14144 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14146 if (code == EQ_EXPR)
14147 result = tree_int_cst_equal (op0, op1);
14148 else
14149 result = tree_int_cst_lt (op0, op1);
14151 else
14152 return NULL_TREE;
14154 if (invert)
14155 result ^= 1;
14156 return constant_boolean_node (result, type);
14159 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14160 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14161 itself. */
14163 tree
14164 fold_build_cleanup_point_expr (tree type, tree expr)
14166 /* If the expression does not have side effects then we don't have to wrap
14167 it with a cleanup point expression. */
14168 if (!TREE_SIDE_EFFECTS (expr))
14169 return expr;
14171 /* If the expression is a return, check to see if the expression inside the
14172 return has no side effects or the right hand side of the modify expression
14173 inside the return. If either don't have side effects set we don't need to
14174 wrap the expression in a cleanup point expression. Note we don't check the
14175 left hand side of the modify because it should always be a return decl. */
14176 if (TREE_CODE (expr) == RETURN_EXPR)
14178 tree op = TREE_OPERAND (expr, 0);
14179 if (!op || !TREE_SIDE_EFFECTS (op))
14180 return expr;
14181 op = TREE_OPERAND (op, 1);
14182 if (!TREE_SIDE_EFFECTS (op))
14183 return expr;
14186 return build1 (CLEANUP_POINT_EXPR, type, expr);
14189 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14190 of an indirection through OP0, or NULL_TREE if no simplification is
14191 possible. */
14193 tree
14194 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14196 tree sub = op0;
14197 tree subtype;
14199 STRIP_NOPS (sub);
14200 subtype = TREE_TYPE (sub);
14201 if (!POINTER_TYPE_P (subtype))
14202 return NULL_TREE;
14204 if (TREE_CODE (sub) == ADDR_EXPR)
14206 tree op = TREE_OPERAND (sub, 0);
14207 tree optype = TREE_TYPE (op);
14208 /* *&CONST_DECL -> to the value of the const decl. */
14209 if (TREE_CODE (op) == CONST_DECL)
14210 return DECL_INITIAL (op);
14211 /* *&p => p; make sure to handle *&"str"[cst] here. */
14212 if (type == optype)
14214 tree fop = fold_read_from_constant_string (op);
14215 if (fop)
14216 return fop;
14217 else
14218 return op;
14220 /* *(foo *)&fooarray => fooarray[0] */
14221 else if (TREE_CODE (optype) == ARRAY_TYPE
14222 && type == TREE_TYPE (optype)
14223 && (!in_gimple_form
14224 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14226 tree type_domain = TYPE_DOMAIN (optype);
14227 tree min_val = size_zero_node;
14228 if (type_domain && TYPE_MIN_VALUE (type_domain))
14229 min_val = TYPE_MIN_VALUE (type_domain);
14230 if (in_gimple_form
14231 && TREE_CODE (min_val) != INTEGER_CST)
14232 return NULL_TREE;
14233 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14234 NULL_TREE, NULL_TREE);
14236 /* *(foo *)&complexfoo => __real__ complexfoo */
14237 else if (TREE_CODE (optype) == COMPLEX_TYPE
14238 && type == TREE_TYPE (optype))
14239 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14240 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14241 else if (TREE_CODE (optype) == VECTOR_TYPE
14242 && type == TREE_TYPE (optype))
14244 tree part_width = TYPE_SIZE (type);
14245 tree index = bitsize_int (0);
14246 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14250 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14251 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14253 tree op00 = TREE_OPERAND (sub, 0);
14254 tree op01 = TREE_OPERAND (sub, 1);
14256 STRIP_NOPS (op00);
14257 if (TREE_CODE (op00) == ADDR_EXPR)
14259 tree op00type;
14260 op00 = TREE_OPERAND (op00, 0);
14261 op00type = TREE_TYPE (op00);
14263 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14264 if (TREE_CODE (op00type) == VECTOR_TYPE
14265 && type == TREE_TYPE (op00type))
14267 tree part_width = TYPE_SIZE (type);
14268 unsigned HOST_WIDE_INT max_offset
14269 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14270 * TYPE_VECTOR_SUBPARTS (op00type));
14271 if (tree_int_cst_sign_bit (op01) == 0
14272 && compare_tree_int (op01, max_offset) == -1)
14274 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14275 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14276 tree index = bitsize_int (indexi);
14277 return fold_build3_loc (loc,
14278 BIT_FIELD_REF, type, op00,
14279 part_width, index);
14282 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14283 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14284 && type == TREE_TYPE (op00type))
14286 tree size = TYPE_SIZE_UNIT (type);
14287 if (tree_int_cst_equal (size, op01))
14288 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14290 /* ((foo *)&fooarray)[1] => fooarray[1] */
14291 else if (TREE_CODE (op00type) == ARRAY_TYPE
14292 && type == TREE_TYPE (op00type))
14294 tree type_domain = TYPE_DOMAIN (op00type);
14295 tree min_val = size_zero_node;
14296 if (type_domain && TYPE_MIN_VALUE (type_domain))
14297 min_val = TYPE_MIN_VALUE (type_domain);
14298 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14299 TYPE_SIZE_UNIT (type));
14300 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14301 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14302 NULL_TREE, NULL_TREE);
14307 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14308 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14309 && type == TREE_TYPE (TREE_TYPE (subtype))
14310 && (!in_gimple_form
14311 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14313 tree type_domain;
14314 tree min_val = size_zero_node;
14315 sub = build_fold_indirect_ref_loc (loc, sub);
14316 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14317 if (type_domain && TYPE_MIN_VALUE (type_domain))
14318 min_val = TYPE_MIN_VALUE (type_domain);
14319 if (in_gimple_form
14320 && TREE_CODE (min_val) != INTEGER_CST)
14321 return NULL_TREE;
14322 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14323 NULL_TREE);
14326 return NULL_TREE;
14329 /* Builds an expression for an indirection through T, simplifying some
14330 cases. */
14332 tree
14333 build_fold_indirect_ref_loc (location_t loc, tree t)
14335 tree type = TREE_TYPE (TREE_TYPE (t));
14336 tree sub = fold_indirect_ref_1 (loc, type, t);
14338 if (sub)
14339 return sub;
14341 return build1_loc (loc, INDIRECT_REF, type, t);
14344 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14346 tree
14347 fold_indirect_ref_loc (location_t loc, tree t)
14349 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14351 if (sub)
14352 return sub;
14353 else
14354 return t;
14357 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14358 whose result is ignored. The type of the returned tree need not be
14359 the same as the original expression. */
14361 tree
14362 fold_ignored_result (tree t)
14364 if (!TREE_SIDE_EFFECTS (t))
14365 return integer_zero_node;
14367 for (;;)
14368 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14370 case tcc_unary:
14371 t = TREE_OPERAND (t, 0);
14372 break;
14374 case tcc_binary:
14375 case tcc_comparison:
14376 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14377 t = TREE_OPERAND (t, 0);
14378 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14379 t = TREE_OPERAND (t, 1);
14380 else
14381 return t;
14382 break;
14384 case tcc_expression:
14385 switch (TREE_CODE (t))
14387 case COMPOUND_EXPR:
14388 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14389 return t;
14390 t = TREE_OPERAND (t, 0);
14391 break;
14393 case COND_EXPR:
14394 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14395 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14396 return t;
14397 t = TREE_OPERAND (t, 0);
14398 break;
14400 default:
14401 return t;
14403 break;
14405 default:
14406 return t;
14410 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14412 tree
14413 round_up_loc (location_t loc, tree value, unsigned int divisor)
14415 tree div = NULL_TREE;
14417 if (divisor == 1)
14418 return value;
14420 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14421 have to do anything. Only do this when we are not given a const,
14422 because in that case, this check is more expensive than just
14423 doing it. */
14424 if (TREE_CODE (value) != INTEGER_CST)
14426 div = build_int_cst (TREE_TYPE (value), divisor);
14428 if (multiple_of_p (TREE_TYPE (value), value, div))
14429 return value;
14432 /* If divisor is a power of two, simplify this to bit manipulation. */
14433 if (divisor == (divisor & -divisor))
14435 if (TREE_CODE (value) == INTEGER_CST)
14437 wide_int val = value;
14438 bool overflow_p;
14440 if ((val & (divisor - 1)) == 0)
14441 return value;
14443 overflow_p = TREE_OVERFLOW (value);
14444 val += divisor - 1;
14445 val &= - (int) divisor;
14446 if (val == 0)
14447 overflow_p = true;
14449 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14451 else
14453 tree t;
14455 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14456 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14457 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14458 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14461 else
14463 if (!div)
14464 div = build_int_cst (TREE_TYPE (value), divisor);
14465 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14466 value = size_binop_loc (loc, MULT_EXPR, value, div);
14469 return value;
14472 /* Likewise, but round down. */
14474 tree
14475 round_down_loc (location_t loc, tree value, int divisor)
14477 tree div = NULL_TREE;
14479 gcc_assert (divisor > 0);
14480 if (divisor == 1)
14481 return value;
14483 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14484 have to do anything. Only do this when we are not given a const,
14485 because in that case, this check is more expensive than just
14486 doing it. */
14487 if (TREE_CODE (value) != INTEGER_CST)
14489 div = build_int_cst (TREE_TYPE (value), divisor);
14491 if (multiple_of_p (TREE_TYPE (value), value, div))
14492 return value;
14495 /* If divisor is a power of two, simplify this to bit manipulation. */
14496 if (divisor == (divisor & -divisor))
14498 tree t;
14500 t = build_int_cst (TREE_TYPE (value), -divisor);
14501 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14503 else
14505 if (!div)
14506 div = build_int_cst (TREE_TYPE (value), divisor);
14507 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14508 value = size_binop_loc (loc, MULT_EXPR, value, div);
14511 return value;
14514 /* Returns the pointer to the base of the object addressed by EXP and
14515 extracts the information about the offset of the access, storing it
14516 to PBITPOS and POFFSET. */
14518 static tree
14519 split_address_to_core_and_offset (tree exp,
14520 HOST_WIDE_INT *pbitpos, tree *poffset)
14522 tree core;
14523 machine_mode mode;
14524 int unsignedp, reversep, volatilep;
14525 HOST_WIDE_INT bitsize;
14526 location_t loc = EXPR_LOCATION (exp);
14528 if (TREE_CODE (exp) == ADDR_EXPR)
14530 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14531 poffset, &mode, &unsignedp, &reversep,
14532 &volatilep, false);
14533 core = build_fold_addr_expr_loc (loc, core);
14535 else
14537 core = exp;
14538 *pbitpos = 0;
14539 *poffset = NULL_TREE;
14542 return core;
14545 /* Returns true if addresses of E1 and E2 differ by a constant, false
14546 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14548 bool
14549 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14551 tree core1, core2;
14552 HOST_WIDE_INT bitpos1, bitpos2;
14553 tree toffset1, toffset2, tdiff, type;
14555 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14556 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14558 if (bitpos1 % BITS_PER_UNIT != 0
14559 || bitpos2 % BITS_PER_UNIT != 0
14560 || !operand_equal_p (core1, core2, 0))
14561 return false;
14563 if (toffset1 && toffset2)
14565 type = TREE_TYPE (toffset1);
14566 if (type != TREE_TYPE (toffset2))
14567 toffset2 = fold_convert (type, toffset2);
14569 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14570 if (!cst_and_fits_in_hwi (tdiff))
14571 return false;
14573 *diff = int_cst_value (tdiff);
14575 else if (toffset1 || toffset2)
14577 /* If only one of the offsets is non-constant, the difference cannot
14578 be a constant. */
14579 return false;
14581 else
14582 *diff = 0;
14584 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14585 return true;
14588 /* Return OFF converted to a pointer offset type suitable as offset for
14589 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14590 tree
14591 convert_to_ptrofftype_loc (location_t loc, tree off)
14593 return fold_convert_loc (loc, sizetype, off);
14596 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14597 tree
14598 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14600 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14601 ptr, convert_to_ptrofftype_loc (loc, off));
14604 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14605 tree
14606 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14608 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14609 ptr, size_int (off));
14612 /* Return a char pointer for a C string if it is a string constant
14613 or sum of string constant and integer constant. */
14615 const char *
14616 c_getstr (tree src)
14618 tree offset_node;
14620 src = string_constant (src, &offset_node);
14621 if (src == 0)
14622 return 0;
14624 if (offset_node == 0)
14625 return TREE_STRING_POINTER (src);
14626 else if (!tree_fits_uhwi_p (offset_node)
14627 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14628 return 0;
14630 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);