Wmisleading-indentation: add reproducer for PR c/70085
[official-gcc.git] / gcc / fold-const.c
blob696b4a6996fa735d9bd794549f6cf668a34a1775
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
157 static location_t
158 expr_location_or (tree t, location_t loc)
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
179 return x;
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
189 widest_int quo;
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
195 return NULL_TREE;
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
207 static int fold_deferring_overflow_warnings;
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
214 static const char* fold_deferred_overflow_warning;
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
224 void
225 fold_defer_overflow_warnings (void)
227 ++fold_deferring_overflow_warnings;
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
242 const char *warnmsg;
243 location_t locus;
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
259 if (!issue || warnmsg == NULL)
260 return;
262 if (gimple_no_warning_p (stmt))
263 return;
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
270 if (!issue_strict_overflow_warning (code))
271 return;
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
286 fold_undefer_overflow_warnings (false, NULL, 0);
289 /* Whether we are deferring overflow warnings. */
291 bool
292 fold_deferring_overflow_warnings_p (void)
294 return fold_deferring_overflow_warnings > 0;
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
303 if (fold_deferring_overflow_warnings > 0)
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
319 bool
320 negate_mathfn_p (combined_fn fn)
322 switch (fn)
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
355 default:
356 break;
358 return false;
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
364 bool
365 may_negate_without_overflow_p (const_tree t)
367 tree type;
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
375 return !wi::only_sign_bit_p (t);
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
381 static bool
382 negate_expr_p (tree t)
384 tree type;
386 if (t == 0)
387 return false;
389 type = TREE_TYPE (t);
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
404 case FIXED_CST:
405 return true;
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
419 case VECTOR_CST:
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
430 return true;
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
476 /* Fall through. */
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
509 break;
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
525 break;
527 default:
528 break;
530 return false;
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
538 static tree
539 fold_negate_expr (location_t loc, tree t)
541 tree type = TREE_TYPE (t);
542 tree tem;
544 switch (TREE_CODE (t))
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
571 case COMPLEX_CST:
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
578 break;
580 case VECTOR_CST:
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
585 for (i = 0; i < count; i++)
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
592 return build_vector (type, elts);
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
635 break;
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
650 /* Fall through. */
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
664 break;
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
696 break;
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
703 tree fndecl, arg;
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
709 break;
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
726 break;
728 default:
729 break;
732 return NULL_TREE;
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
739 static tree
740 negate_expr (tree t)
742 tree type, tem;
743 location_t loc;
745 if (t == NULL_TREE)
746 return NULL_TREE;
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
785 tree var = 0;
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
841 /* Convert to TYPE before negating a pointer type expr. */
842 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
859 if (negate_p)
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 /* Convert to TYPE before negating a pointer type expr. */
867 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
872 return var;
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
894 if (code == PLUS_EXPR)
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
909 else if (code == MINUS_EXPR)
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
934 switch (code)
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
942 default:
943 break;
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
969 switch (code)
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1093 default:
1094 return NULL_TREE;
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1102 return t;
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1132 return int_const_binop (code, arg1, arg2);
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1156 default:
1157 return NULL_TREE;
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a signaling NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1170 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1171 return NULL_TREE;
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && real_equal (&d2, &dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1184 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 is off. */
1186 d1.signalling = 0;
1187 t = build_real (type, d1);
1188 return t;
1190 else if (REAL_VALUE_ISNAN (d2))
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d2.signalling = 0;
1195 t = build_real (type, d2);
1196 return t;
1199 inexact = real_arithmetic (&value, code, &d1, &d2);
1200 real_convert (&result, mode, &value);
1202 /* Don't constant fold this floating point operation if
1203 the result has overflowed and flag_trapping_math. */
1204 if (flag_trapping_math
1205 && MODE_HAS_INFINITIES (mode)
1206 && REAL_VALUE_ISINF (result)
1207 && !REAL_VALUE_ISINF (d1)
1208 && !REAL_VALUE_ISINF (d2))
1209 return NULL_TREE;
1211 /* Don't constant fold this floating point operation if the
1212 result may dependent upon the run-time rounding mode and
1213 flag_rounding_math is set, or if GCC's software emulation
1214 is unable to accurately represent the result. */
1215 if ((flag_rounding_math
1216 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1217 && (inexact || !real_identical (&result, &value)))
1218 return NULL_TREE;
1220 t = build_real (type, result);
1222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1223 return t;
1226 if (TREE_CODE (arg1) == FIXED_CST)
1228 FIXED_VALUE_TYPE f1;
1229 FIXED_VALUE_TYPE f2;
1230 FIXED_VALUE_TYPE result;
1231 tree t, type;
1232 int sat_p;
1233 bool overflow_p;
1235 /* The following codes are handled by fixed_arithmetic. */
1236 switch (code)
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 case TRUNC_DIV_EXPR:
1242 if (TREE_CODE (arg2) != FIXED_CST)
1243 return NULL_TREE;
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1250 if (TREE_CODE (arg2) != INTEGER_CST)
1251 return NULL_TREE;
1252 wide_int w2 = arg2;
1253 f2.data.high = w2.elt (1);
1254 f2.data.low = w2.elt (0);
1255 f2.mode = SImode;
1257 break;
1259 default:
1260 return NULL_TREE;
1263 f1 = TREE_FIXED_CST (arg1);
1264 type = TREE_TYPE (arg1);
1265 sat_p = TYPE_SATURATING (type);
1266 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1267 t = build_fixed (type, result);
1268 /* Propagate overflow flags. */
1269 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1270 TREE_OVERFLOW (t) = 1;
1271 return t;
1274 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1276 tree type = TREE_TYPE (arg1);
1277 tree r1 = TREE_REALPART (arg1);
1278 tree i1 = TREE_IMAGPART (arg1);
1279 tree r2 = TREE_REALPART (arg2);
1280 tree i2 = TREE_IMAGPART (arg2);
1281 tree real, imag;
1283 switch (code)
1285 case PLUS_EXPR:
1286 case MINUS_EXPR:
1287 real = const_binop (code, r1, r2);
1288 imag = const_binop (code, i1, i2);
1289 break;
1291 case MULT_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_mul);
1297 real = const_binop (MINUS_EXPR,
1298 const_binop (MULT_EXPR, r1, r2),
1299 const_binop (MULT_EXPR, i1, i2));
1300 imag = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r1, i2),
1302 const_binop (MULT_EXPR, i1, r2));
1303 break;
1305 case RDIV_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_div);
1310 /* Fallthru ... */
1311 case TRUNC_DIV_EXPR:
1312 case CEIL_DIV_EXPR:
1313 case FLOOR_DIV_EXPR:
1314 case ROUND_DIV_EXPR:
1315 if (flag_complex_method == 0)
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_straight().
1320 Expand complex division to scalars, straightforward algorithm.
1321 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1322 t = br*br + bi*bi
1324 tree magsquared
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r2, r2),
1327 const_binop (MULT_EXPR, i2, i2));
1328 tree t1
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2),
1331 const_binop (MULT_EXPR, i1, i2));
1332 tree t2
1333 = const_binop (MINUS_EXPR,
1334 const_binop (MULT_EXPR, i1, r2),
1335 const_binop (MULT_EXPR, r1, i2));
1337 real = const_binop (code, t1, magsquared);
1338 imag = const_binop (code, t2, magsquared);
1340 else
1342 /* Keep this algorithm in sync with
1343 tree-complex.c:expand_complex_div_wide().
1345 Expand complex division to scalars, modified algorithm to minimize
1346 overflow with wide input ranges. */
1347 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1348 fold_abs_const (r2, TREE_TYPE (type)),
1349 fold_abs_const (i2, TREE_TYPE (type)));
1351 if (integer_nonzerop (compare))
1353 /* In the TRUE branch, we compute
1354 ratio = br/bi;
1355 div = (br * ratio) + bi;
1356 tr = (ar * ratio) + ai;
1357 ti = (ai * ratio) - ar;
1358 tr = tr / div;
1359 ti = ti / div; */
1360 tree ratio = const_binop (code, r2, i2);
1361 tree div = const_binop (PLUS_EXPR, i2,
1362 const_binop (MULT_EXPR, r2, ratio));
1363 real = const_binop (MULT_EXPR, r1, ratio);
1364 real = const_binop (PLUS_EXPR, real, i1);
1365 real = const_binop (code, real, div);
1367 imag = const_binop (MULT_EXPR, i1, ratio);
1368 imag = const_binop (MINUS_EXPR, imag, r1);
1369 imag = const_binop (code, imag, div);
1371 else
1373 /* In the FALSE branch, we compute
1374 ratio = d/c;
1375 divisor = (d * ratio) + c;
1376 tr = (b * ratio) + a;
1377 ti = b - (a * ratio);
1378 tr = tr / div;
1379 ti = ti / div; */
1380 tree ratio = const_binop (code, i2, r2);
1381 tree div = const_binop (PLUS_EXPR, r2,
1382 const_binop (MULT_EXPR, i2, ratio));
1384 real = const_binop (MULT_EXPR, i1, ratio);
1385 real = const_binop (PLUS_EXPR, real, r1);
1386 real = const_binop (code, real, div);
1388 imag = const_binop (MULT_EXPR, r1, ratio);
1389 imag = const_binop (MINUS_EXPR, i1, imag);
1390 imag = const_binop (code, imag, div);
1393 break;
1395 default:
1396 return NULL_TREE;
1399 if (real && imag)
1400 return build_complex (type, real, imag);
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == VECTOR_CST)
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1410 for (i = 0; i < count; i++)
1412 tree elem1 = VECTOR_CST_ELT (arg1, i);
1413 tree elem2 = VECTOR_CST_ELT (arg2, i);
1415 elts[i] = const_binop (code, elem1, elem2);
1417 /* It is possible that const_binop cannot handle the given
1418 code and return NULL_TREE */
1419 if (elts[i] == NULL_TREE)
1420 return NULL_TREE;
1423 return build_vector (type, elts);
1426 /* Shifts allow a scalar offset for a vector. */
1427 if (TREE_CODE (arg1) == VECTOR_CST
1428 && TREE_CODE (arg2) == INTEGER_CST)
1430 tree type = TREE_TYPE (arg1);
1431 int count = TYPE_VECTOR_SUBPARTS (type), i;
1432 tree *elts = XALLOCAVEC (tree, count);
1434 for (i = 0; i < count; i++)
1436 tree elem1 = VECTOR_CST_ELT (arg1, i);
1438 elts[i] = const_binop (code, elem1, arg2);
1440 /* It is possible that const_binop cannot handle the given
1441 code and return NULL_TREE. */
1442 if (elts[i] == NULL_TREE)
1443 return NULL_TREE;
1446 return build_vector (type, elts);
1448 return NULL_TREE;
1451 /* Overload that adds a TYPE parameter to be able to dispatch
1452 to fold_relational_const. */
1454 tree
1455 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1457 if (TREE_CODE_CLASS (code) == tcc_comparison)
1458 return fold_relational_const (code, type, arg1, arg2);
1460 /* ??? Until we make the const_binop worker take the type of the
1461 result as argument put those cases that need it here. */
1462 switch (code)
1464 case COMPLEX_EXPR:
1465 if ((TREE_CODE (arg1) == REAL_CST
1466 && TREE_CODE (arg2) == REAL_CST)
1467 || (TREE_CODE (arg1) == INTEGER_CST
1468 && TREE_CODE (arg2) == INTEGER_CST))
1469 return build_complex (type, arg1, arg2);
1470 return NULL_TREE;
1472 case VEC_PACK_TRUNC_EXPR:
1473 case VEC_PACK_FIX_TRUNC_EXPR:
1475 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1476 tree *elts;
1478 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1479 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1480 if (TREE_CODE (arg1) != VECTOR_CST
1481 || TREE_CODE (arg2) != VECTOR_CST)
1482 return NULL_TREE;
1484 elts = XALLOCAVEC (tree, nelts);
1485 if (!vec_cst_ctor_to_array (arg1, elts)
1486 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1487 return NULL_TREE;
1489 for (i = 0; i < nelts; i++)
1491 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1492 ? NOP_EXPR : FIX_TRUNC_EXPR,
1493 TREE_TYPE (type), elts[i]);
1494 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1495 return NULL_TREE;
1498 return build_vector (type, elts);
1501 case VEC_WIDEN_MULT_LO_EXPR:
1502 case VEC_WIDEN_MULT_HI_EXPR:
1503 case VEC_WIDEN_MULT_EVEN_EXPR:
1504 case VEC_WIDEN_MULT_ODD_EXPR:
1506 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1507 unsigned int out, ofs, scale;
1508 tree *elts;
1510 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1511 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1512 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1513 return NULL_TREE;
1515 elts = XALLOCAVEC (tree, nelts * 4);
1516 if (!vec_cst_ctor_to_array (arg1, elts)
1517 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1518 return NULL_TREE;
1520 if (code == VEC_WIDEN_MULT_LO_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1522 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1524 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1525 scale = 1, ofs = 0;
1526 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 scale = 1, ofs = 1;
1529 for (out = 0; out < nelts; out++)
1531 unsigned int in1 = (out << scale) + ofs;
1532 unsigned int in2 = in1 + nelts * 2;
1533 tree t1, t2;
1535 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1536 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1538 if (t1 == NULL_TREE || t2 == NULL_TREE)
1539 return NULL_TREE;
1540 elts[out] = const_binop (MULT_EXPR, t1, t2);
1541 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1542 return NULL_TREE;
1545 return build_vector (type, elts);
1548 default:;
1551 if (TREE_CODE_CLASS (code) != tcc_binary)
1552 return NULL_TREE;
1554 /* Make sure type and arg0 have the same saturating flag. */
1555 gcc_checking_assert (TYPE_SATURATING (type)
1556 == TYPE_SATURATING (TREE_TYPE (arg1)));
1558 return const_binop (code, arg1, arg2);
1561 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1562 Return zero if computing the constants is not possible. */
1564 tree
1565 const_unop (enum tree_code code, tree type, tree arg0)
1567 /* Don't perform the operation, other than NEGATE and ABS, if
1568 flag_signaling_nans is on and the operand is a signaling NaN. */
1569 if (TREE_CODE (arg0) == REAL_CST
1570 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1571 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1572 && code != NEGATE_EXPR
1573 && code != ABS_EXPR)
1574 return NULL_TREE;
1576 switch (code)
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 /* If the source address is 0, and the source address space
1586 cannot have a valid object at 0, fold to dest type null. */
1587 if (integer_zerop (arg0)
1588 && !(targetm.addr_space.zero_address_valid
1589 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1590 return fold_convert_const (code, type, arg0);
1591 break;
1593 case VIEW_CONVERT_EXPR:
1594 return fold_view_convert_expr (type, arg0);
1596 case NEGATE_EXPR:
1598 /* Can't call fold_negate_const directly here as that doesn't
1599 handle all cases and we might not be able to negate some
1600 constants. */
1601 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1602 if (tem && CONSTANT_CLASS_P (tem))
1603 return tem;
1604 break;
1607 case ABS_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1609 return fold_abs_const (arg0, type);
1610 break;
1612 case CONJ_EXPR:
1613 if (TREE_CODE (arg0) == COMPLEX_CST)
1615 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1616 TREE_TYPE (type));
1617 return build_complex (type, TREE_REALPART (arg0), ipart);
1619 break;
1621 case BIT_NOT_EXPR:
1622 if (TREE_CODE (arg0) == INTEGER_CST)
1623 return fold_not_const (arg0, type);
1624 /* Perform BIT_NOT_EXPR on each element individually. */
1625 else if (TREE_CODE (arg0) == VECTOR_CST)
1627 tree *elements;
1628 tree elem;
1629 unsigned count = VECTOR_CST_NELTS (arg0), i;
1631 elements = XALLOCAVEC (tree, count);
1632 for (i = 0; i < count; i++)
1634 elem = VECTOR_CST_ELT (arg0, i);
1635 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1636 if (elem == NULL_TREE)
1637 break;
1638 elements[i] = elem;
1640 if (i == count)
1641 return build_vector (type, elements);
1643 break;
1645 case TRUTH_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return constant_boolean_node (integer_zerop (arg0), type);
1648 break;
1650 case REALPART_EXPR:
1651 if (TREE_CODE (arg0) == COMPLEX_CST)
1652 return fold_convert (type, TREE_REALPART (arg0));
1653 break;
1655 case IMAGPART_EXPR:
1656 if (TREE_CODE (arg0) == COMPLEX_CST)
1657 return fold_convert (type, TREE_IMAGPART (arg0));
1658 break;
1660 case VEC_UNPACK_LO_EXPR:
1661 case VEC_UNPACK_HI_EXPR:
1662 case VEC_UNPACK_FLOAT_LO_EXPR:
1663 case VEC_UNPACK_FLOAT_HI_EXPR:
1665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1666 tree *elts;
1667 enum tree_code subcode;
1669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1670 if (TREE_CODE (arg0) != VECTOR_CST)
1671 return NULL_TREE;
1673 elts = XALLOCAVEC (tree, nelts * 2);
1674 if (!vec_cst_ctor_to_array (arg0, elts))
1675 return NULL_TREE;
1677 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1678 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1679 elts += nelts;
1681 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1682 subcode = NOP_EXPR;
1683 else
1684 subcode = FLOAT_EXPR;
1686 for (i = 0; i < nelts; i++)
1688 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1689 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1690 return NULL_TREE;
1693 return build_vector (type, elts);
1696 case REDUC_MIN_EXPR:
1697 case REDUC_MAX_EXPR:
1698 case REDUC_PLUS_EXPR:
1700 unsigned int nelts, i;
1701 tree *elts;
1702 enum tree_code subcode;
1704 if (TREE_CODE (arg0) != VECTOR_CST)
1705 return NULL_TREE;
1706 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1708 elts = XALLOCAVEC (tree, nelts);
1709 if (!vec_cst_ctor_to_array (arg0, elts))
1710 return NULL_TREE;
1712 switch (code)
1714 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1715 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1716 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1717 default: gcc_unreachable ();
1720 for (i = 1; i < nelts; i++)
1722 elts[0] = const_binop (subcode, elts[0], elts[i]);
1723 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1724 return NULL_TREE;
1727 return elts[0];
1730 default:
1731 break;
1734 return NULL_TREE;
1737 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1738 indicates which particular sizetype to create. */
1740 tree
1741 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1743 return build_int_cst (sizetype_tab[(int) kind], number);
1746 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1747 is a tree code. The type of the result is taken from the operands.
1748 Both must be equivalent integer types, ala int_binop_types_match_p.
1749 If the operands are constant, so is the result. */
1751 tree
1752 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1754 tree type = TREE_TYPE (arg0);
1756 if (arg0 == error_mark_node || arg1 == error_mark_node)
1757 return error_mark_node;
1759 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1760 TREE_TYPE (arg1)));
1762 /* Handle the special case of two integer constants faster. */
1763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1765 /* And some specific cases even faster than that. */
1766 if (code == PLUS_EXPR)
1768 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1771 return arg0;
1773 else if (code == MINUS_EXPR)
1775 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1776 return arg0;
1778 else if (code == MULT_EXPR)
1780 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1781 return arg1;
1784 /* Handle general case of two integer constants. For sizetype
1785 constant calculations we always want to know about overflow,
1786 even in the unsigned case. */
1787 return int_const_binop_1 (code, arg0, arg1, -1);
1790 return fold_build2_loc (loc, code, type, arg0, arg1);
1793 /* Given two values, either both of sizetype or both of bitsizetype,
1794 compute the difference between the two values. Return the value
1795 in signed type corresponding to the type of the operands. */
1797 tree
1798 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1800 tree type = TREE_TYPE (arg0);
1801 tree ctype;
1803 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1804 TREE_TYPE (arg1)));
1806 /* If the type is already signed, just do the simple thing. */
1807 if (!TYPE_UNSIGNED (type))
1808 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1810 if (type == sizetype)
1811 ctype = ssizetype;
1812 else if (type == bitsizetype)
1813 ctype = sbitsizetype;
1814 else
1815 ctype = signed_type_for (type);
1817 /* If either operand is not a constant, do the conversions to the signed
1818 type and subtract. The hardware will do the right thing with any
1819 overflow in the subtraction. */
1820 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1821 return size_binop_loc (loc, MINUS_EXPR,
1822 fold_convert_loc (loc, ctype, arg0),
1823 fold_convert_loc (loc, ctype, arg1));
1825 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1826 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1827 overflow) and negate (which can't either). Special-case a result
1828 of zero while we're here. */
1829 if (tree_int_cst_equal (arg0, arg1))
1830 return build_int_cst (ctype, 0);
1831 else if (tree_int_cst_lt (arg1, arg0))
1832 return fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1834 else
1835 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1836 fold_convert_loc (loc, ctype,
1837 size_binop_loc (loc,
1838 MINUS_EXPR,
1839 arg1, arg0)));
1842 /* A subroutine of fold_convert_const handling conversions of an
1843 INTEGER_CST to another integer type. */
1845 static tree
1846 fold_convert_const_int_from_int (tree type, const_tree arg1)
1848 /* Given an integer constant, make new constant with new type,
1849 appropriately sign-extended or truncated. Use widest_int
1850 so that any extension is done according ARG1's type. */
1851 return force_fit_type (type, wi::to_widest (arg1),
1852 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1853 TREE_OVERFLOW (arg1));
1856 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1857 to an integer type. */
1859 static tree
1860 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1862 bool overflow = false;
1863 tree t;
1865 /* The following code implements the floating point to integer
1866 conversion rules required by the Java Language Specification,
1867 that IEEE NaNs are mapped to zero and values that overflow
1868 the target precision saturate, i.e. values greater than
1869 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1870 are mapped to INT_MIN. These semantics are allowed by the
1871 C and C++ standards that simply state that the behavior of
1872 FP-to-integer conversion is unspecified upon overflow. */
1874 wide_int val;
1875 REAL_VALUE_TYPE r;
1876 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1878 switch (code)
1880 case FIX_TRUNC_EXPR:
1881 real_trunc (&r, VOIDmode, &x);
1882 break;
1884 default:
1885 gcc_unreachable ();
1888 /* If R is NaN, return zero and show we have an overflow. */
1889 if (REAL_VALUE_ISNAN (r))
1891 overflow = true;
1892 val = wi::zero (TYPE_PRECISION (type));
1895 /* See if R is less than the lower bound or greater than the
1896 upper bound. */
1898 if (! overflow)
1900 tree lt = TYPE_MIN_VALUE (type);
1901 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1902 if (real_less (&r, &l))
1904 overflow = true;
1905 val = lt;
1909 if (! overflow)
1911 tree ut = TYPE_MAX_VALUE (type);
1912 if (ut)
1914 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1915 if (real_less (&u, &r))
1917 overflow = true;
1918 val = ut;
1923 if (! overflow)
1924 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1926 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1927 return t;
1930 /* A subroutine of fold_convert_const handling conversions of a
1931 FIXED_CST to an integer type. */
1933 static tree
1934 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1936 tree t;
1937 double_int temp, temp_trunc;
1938 unsigned int mode;
1940 /* Right shift FIXED_CST to temp by fbit. */
1941 temp = TREE_FIXED_CST (arg1).data;
1942 mode = TREE_FIXED_CST (arg1).mode;
1943 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1945 temp = temp.rshift (GET_MODE_FBIT (mode),
1946 HOST_BITS_PER_DOUBLE_INT,
1947 SIGNED_FIXED_POINT_MODE_P (mode));
1949 /* Left shift temp to temp_trunc by fbit. */
1950 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1951 HOST_BITS_PER_DOUBLE_INT,
1952 SIGNED_FIXED_POINT_MODE_P (mode));
1954 else
1956 temp = double_int_zero;
1957 temp_trunc = double_int_zero;
1960 /* If FIXED_CST is negative, we need to round the value toward 0.
1961 By checking if the fractional bits are not zero to add 1 to temp. */
1962 if (SIGNED_FIXED_POINT_MODE_P (mode)
1963 && temp_trunc.is_negative ()
1964 && TREE_FIXED_CST (arg1).data != temp_trunc)
1965 temp += double_int_one;
1967 /* Given a fixed-point constant, make new constant with new type,
1968 appropriately sign-extended or truncated. */
1969 t = force_fit_type (type, temp, -1,
1970 (temp.is_negative ()
1971 && (TYPE_UNSIGNED (type)
1972 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1973 | TREE_OVERFLOW (arg1));
1975 return t;
1978 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1979 to another floating point type. */
1981 static tree
1982 fold_convert_const_real_from_real (tree type, const_tree arg1)
1984 REAL_VALUE_TYPE value;
1985 tree t;
1987 /* Don't perform the operation if flag_signaling_nans is on
1988 and the operand is a signaling NaN. */
1989 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1990 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1991 return NULL_TREE;
1993 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1994 t = build_real (type, value);
1996 /* If converting an infinity or NAN to a representation that doesn't
1997 have one, set the overflow bit so that we can produce some kind of
1998 error message at the appropriate point if necessary. It's not the
1999 most user-friendly message, but it's better than nothing. */
2000 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2001 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2002 TREE_OVERFLOW (t) = 1;
2003 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2004 && !MODE_HAS_NANS (TYPE_MODE (type)))
2005 TREE_OVERFLOW (t) = 1;
2006 /* Regular overflow, conversion produced an infinity in a mode that
2007 can't represent them. */
2008 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2009 && REAL_VALUE_ISINF (value)
2010 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2011 TREE_OVERFLOW (t) = 1;
2012 else
2013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 return t;
2017 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2018 to a floating point type. */
2020 static tree
2021 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2023 REAL_VALUE_TYPE value;
2024 tree t;
2026 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2027 t = build_real (type, value);
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to another fixed-point type. */
2036 static tree
2037 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2039 FIXED_VALUE_TYPE value;
2040 tree t;
2041 bool overflow_p;
2043 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2044 TYPE_SATURATING (type));
2045 t = build_fixed (type, value);
2047 /* Propagate overflow flags. */
2048 if (overflow_p | TREE_OVERFLOW (arg1))
2049 TREE_OVERFLOW (t) = 1;
2050 return t;
2053 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2054 to a fixed-point type. */
2056 static tree
2057 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2059 FIXED_VALUE_TYPE value;
2060 tree t;
2061 bool overflow_p;
2062 double_int di;
2064 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2066 di.low = TREE_INT_CST_ELT (arg1, 0);
2067 if (TREE_INT_CST_NUNITS (arg1) == 1)
2068 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2069 else
2070 di.high = TREE_INT_CST_ELT (arg1, 1);
2072 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2073 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2074 TYPE_SATURATING (type));
2075 t = build_fixed (type, value);
2077 /* Propagate overflow flags. */
2078 if (overflow_p | TREE_OVERFLOW (arg1))
2079 TREE_OVERFLOW (t) = 1;
2080 return t;
2083 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2084 to a fixed-point type. */
2086 static tree
2087 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2089 FIXED_VALUE_TYPE value;
2090 tree t;
2091 bool overflow_p;
2093 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2094 &TREE_REAL_CST (arg1),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2104 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2105 type TYPE. If no simplification can be done return NULL_TREE. */
2107 static tree
2108 fold_convert_const (enum tree_code code, tree type, tree arg1)
2110 if (TREE_TYPE (arg1) == type)
2111 return arg1;
2113 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2114 || TREE_CODE (type) == OFFSET_TYPE)
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return fold_convert_const_int_from_int (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_int_from_real (code, type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_int_from_fixed (type, arg1);
2123 else if (TREE_CODE (type) == REAL_TYPE)
2125 if (TREE_CODE (arg1) == INTEGER_CST)
2126 return build_real_from_int_cst (type, arg1);
2127 else if (TREE_CODE (arg1) == REAL_CST)
2128 return fold_convert_const_real_from_real (type, arg1);
2129 else if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_real_from_fixed (type, arg1);
2132 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2134 if (TREE_CODE (arg1) == FIXED_CST)
2135 return fold_convert_const_fixed_from_fixed (type, arg1);
2136 else if (TREE_CODE (arg1) == INTEGER_CST)
2137 return fold_convert_const_fixed_from_int (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_fixed_from_real (type, arg1);
2141 else if (TREE_CODE (type) == VECTOR_TYPE)
2143 if (TREE_CODE (arg1) == VECTOR_CST
2144 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2146 int len = TYPE_VECTOR_SUBPARTS (type);
2147 tree elttype = TREE_TYPE (type);
2148 tree *v = XALLOCAVEC (tree, len);
2149 for (int i = 0; i < len; ++i)
2151 tree elt = VECTOR_CST_ELT (arg1, i);
2152 tree cvt = fold_convert_const (code, elttype, elt);
2153 if (cvt == NULL_TREE)
2154 return NULL_TREE;
2155 v[i] = cvt;
2157 return build_vector (type, v);
2160 return NULL_TREE;
2163 /* Construct a vector of zero elements of vector type TYPE. */
2165 static tree
2166 build_zero_vector (tree type)
2168 tree t;
2170 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2171 return build_vector_from_val (type, t);
2174 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2176 bool
2177 fold_convertible_p (const_tree type, const_tree arg)
2179 tree orig = TREE_TYPE (arg);
2181 if (type == orig)
2182 return true;
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return false;
2189 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2190 return true;
2192 switch (TREE_CODE (type))
2194 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2195 case POINTER_TYPE: case REFERENCE_TYPE:
2196 case OFFSET_TYPE:
2197 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2198 || TREE_CODE (orig) == OFFSET_TYPE);
2200 case REAL_TYPE:
2201 case FIXED_POINT_TYPE:
2202 case COMPLEX_TYPE:
2203 case VECTOR_TYPE:
2204 case VOID_TYPE:
2205 return TREE_CODE (type) == TREE_CODE (orig);
2207 default:
2208 return false;
2212 /* Convert expression ARG to type TYPE. Used by the middle-end for
2213 simple conversions in preference to calling the front-end's convert. */
2215 tree
2216 fold_convert_loc (location_t loc, tree type, tree arg)
2218 tree orig = TREE_TYPE (arg);
2219 tree tem;
2221 if (type == orig)
2222 return arg;
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2227 return error_mark_node;
2229 switch (TREE_CODE (type))
2231 case POINTER_TYPE:
2232 case REFERENCE_TYPE:
2233 /* Handle conversions between pointers to different address spaces. */
2234 if (POINTER_TYPE_P (orig)
2235 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2236 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2237 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2238 /* fall through */
2240 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2241 case OFFSET_TYPE:
2242 if (TREE_CODE (arg) == INTEGER_CST)
2244 tem = fold_convert_const (NOP_EXPR, type, arg);
2245 if (tem != NULL_TREE)
2246 return tem;
2248 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2249 || TREE_CODE (orig) == OFFSET_TYPE)
2250 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2251 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 return fold_convert_loc (loc, type,
2253 fold_build1_loc (loc, REALPART_EXPR,
2254 TREE_TYPE (orig), arg));
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2266 else if (TREE_CODE (arg) == REAL_CST)
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2272 else if (TREE_CODE (arg) == FIXED_CST)
2274 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2279 switch (TREE_CODE (orig))
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2286 case REAL_TYPE:
2287 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2289 case FIXED_POINT_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2296 default:
2297 gcc_unreachable ();
2300 case FIXED_POINT_TYPE:
2301 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2302 || TREE_CODE (arg) == REAL_CST)
2304 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2305 if (tem != NULL_TREE)
2306 goto fold_convert_exit;
2309 switch (TREE_CODE (orig))
2311 case FIXED_POINT_TYPE:
2312 case INTEGER_TYPE:
2313 case ENUMERAL_TYPE:
2314 case BOOLEAN_TYPE:
2315 case REAL_TYPE:
2316 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2318 case COMPLEX_TYPE:
2319 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 return fold_convert_loc (loc, type, tem);
2322 default:
2323 gcc_unreachable ();
2326 case COMPLEX_TYPE:
2327 switch (TREE_CODE (orig))
2329 case INTEGER_TYPE:
2330 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case REAL_TYPE:
2333 case FIXED_POINT_TYPE:
2334 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2335 fold_convert_loc (loc, TREE_TYPE (type), arg),
2336 fold_convert_loc (loc, TREE_TYPE (type),
2337 integer_zero_node));
2338 case COMPLEX_TYPE:
2340 tree rpart, ipart;
2342 if (TREE_CODE (arg) == COMPLEX_EXPR)
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2345 TREE_OPERAND (arg, 0));
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2347 TREE_OPERAND (arg, 1));
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2351 arg = save_expr (arg);
2352 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2353 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2359 default:
2360 gcc_unreachable ();
2363 case VECTOR_TYPE:
2364 if (integer_zerop (arg))
2365 return build_zero_vector (type);
2366 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2367 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2368 || TREE_CODE (orig) == VECTOR_TYPE);
2369 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2371 case VOID_TYPE:
2372 tem = fold_ignored_result (arg);
2373 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2375 default:
2376 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2377 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2378 gcc_unreachable ();
2380 fold_convert_exit:
2381 protected_set_expr_location_unshare (tem, loc);
2382 return tem;
2385 /* Return false if expr can be assumed not to be an lvalue, true
2386 otherwise. */
2388 static bool
2389 maybe_lvalue_p (const_tree x)
2391 /* We only need to wrap lvalue tree codes. */
2392 switch (TREE_CODE (x))
2394 case VAR_DECL:
2395 case PARM_DECL:
2396 case RESULT_DECL:
2397 case LABEL_DECL:
2398 case FUNCTION_DECL:
2399 case SSA_NAME:
2401 case COMPONENT_REF:
2402 case MEM_REF:
2403 case INDIRECT_REF:
2404 case ARRAY_REF:
2405 case ARRAY_RANGE_REF:
2406 case BIT_FIELD_REF:
2407 case OBJ_TYPE_REF:
2409 case REALPART_EXPR:
2410 case IMAGPART_EXPR:
2411 case PREINCREMENT_EXPR:
2412 case PREDECREMENT_EXPR:
2413 case SAVE_EXPR:
2414 case TRY_CATCH_EXPR:
2415 case WITH_CLEANUP_EXPR:
2416 case COMPOUND_EXPR:
2417 case MODIFY_EXPR:
2418 case TARGET_EXPR:
2419 case COND_EXPR:
2420 case BIND_EXPR:
2421 break;
2423 default:
2424 /* Assume the worst for front-end tree codes. */
2425 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 break;
2427 return false;
2430 return true;
2433 /* Return an expr equal to X but certainly not valid as an lvalue. */
2435 tree
2436 non_lvalue_loc (location_t loc, tree x)
2438 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2439 us. */
2440 if (in_gimple_form)
2441 return x;
2443 if (! maybe_lvalue_p (x))
2444 return x;
2445 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2448 /* When pedantic, return an expr equal to X but certainly not valid as a
2449 pedantic lvalue. Otherwise, return X. */
2451 static tree
2452 pedantic_non_lvalue_loc (location_t loc, tree x)
2454 return protected_set_expr_location_unshare (x, loc);
2457 /* Given a tree comparison code, return the code that is the logical inverse.
2458 It is generally not safe to do this for floating-point comparisons, except
2459 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2460 ERROR_MARK in this case. */
2462 enum tree_code
2463 invert_tree_comparison (enum tree_code code, bool honor_nans)
2465 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2466 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2467 return ERROR_MARK;
2469 switch (code)
2471 case EQ_EXPR:
2472 return NE_EXPR;
2473 case NE_EXPR:
2474 return EQ_EXPR;
2475 case GT_EXPR:
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2477 case GE_EXPR:
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2479 case LT_EXPR:
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2481 case LE_EXPR:
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2483 case LTGT_EXPR:
2484 return UNEQ_EXPR;
2485 case UNEQ_EXPR:
2486 return LTGT_EXPR;
2487 case UNGT_EXPR:
2488 return LE_EXPR;
2489 case UNGE_EXPR:
2490 return LT_EXPR;
2491 case UNLT_EXPR:
2492 return GE_EXPR;
2493 case UNLE_EXPR:
2494 return GT_EXPR;
2495 case ORDERED_EXPR:
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2499 default:
2500 gcc_unreachable ();
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2507 enum tree_code
2508 swap_tree_comparison (enum tree_code code)
2510 switch (code)
2512 case EQ_EXPR:
2513 case NE_EXPR:
2514 case ORDERED_EXPR:
2515 case UNORDERED_EXPR:
2516 case LTGT_EXPR:
2517 case UNEQ_EXPR:
2518 return code;
2519 case GT_EXPR:
2520 return LT_EXPR;
2521 case GE_EXPR:
2522 return LE_EXPR;
2523 case LT_EXPR:
2524 return GT_EXPR;
2525 case LE_EXPR:
2526 return GE_EXPR;
2527 case UNGT_EXPR:
2528 return UNLT_EXPR;
2529 case UNGE_EXPR:
2530 return UNLE_EXPR;
2531 case UNLT_EXPR:
2532 return UNGT_EXPR;
2533 case UNLE_EXPR:
2534 return UNGE_EXPR;
2535 default:
2536 gcc_unreachable ();
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2548 switch (code)
2550 case LT_EXPR:
2551 return COMPCODE_LT;
2552 case EQ_EXPR:
2553 return COMPCODE_EQ;
2554 case LE_EXPR:
2555 return COMPCODE_LE;
2556 case GT_EXPR:
2557 return COMPCODE_GT;
2558 case NE_EXPR:
2559 return COMPCODE_NE;
2560 case GE_EXPR:
2561 return COMPCODE_GE;
2562 case ORDERED_EXPR:
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2566 case UNLT_EXPR:
2567 return COMPCODE_UNLT;
2568 case UNEQ_EXPR:
2569 return COMPCODE_UNEQ;
2570 case UNLE_EXPR:
2571 return COMPCODE_UNLE;
2572 case UNGT_EXPR:
2573 return COMPCODE_UNGT;
2574 case LTGT_EXPR:
2575 return COMPCODE_LTGT;
2576 case UNGE_EXPR:
2577 return COMPCODE_UNGE;
2578 default:
2579 gcc_unreachable ();
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2590 switch (code)
2592 case COMPCODE_LT:
2593 return LT_EXPR;
2594 case COMPCODE_EQ:
2595 return EQ_EXPR;
2596 case COMPCODE_LE:
2597 return LE_EXPR;
2598 case COMPCODE_GT:
2599 return GT_EXPR;
2600 case COMPCODE_NE:
2601 return NE_EXPR;
2602 case COMPCODE_GE:
2603 return GE_EXPR;
2604 case COMPCODE_ORD:
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2608 case COMPCODE_UNLT:
2609 return UNLT_EXPR;
2610 case COMPCODE_UNEQ:
2611 return UNEQ_EXPR;
2612 case COMPCODE_UNLE:
2613 return UNLE_EXPR;
2614 case COMPCODE_UNGT:
2615 return UNGT_EXPR;
2616 case COMPCODE_LTGT:
2617 return LTGT_EXPR;
2618 case COMPCODE_UNGE:
2619 return UNGE_EXPR;
2620 default:
2621 gcc_unreachable ();
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2631 tree
2632 combine_comparisons (location_t loc,
2633 enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2637 bool honor_nans = HONOR_NANS (ll_arg);
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 int compcode;
2642 switch (code)
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2646 break;
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2650 break;
2652 default:
2653 return NULL_TREE;
2656 if (!honor_nans)
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2666 else if (flag_trapping_math)
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2688 rtrap = false;
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2692 if (rtrap && !ltrap
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2694 return NULL_TREE;
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2698 return NULL_TREE;
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2705 else
2707 enum tree_code tcode;
2709 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2710 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2714 /* Return nonzero if two operands (typically of the same tree node)
2715 are necessarily equal. FLAGS modifies behavior as follows:
2717 If OEP_ONLY_CONST is set, only return nonzero for constants.
2718 This function tests whether the operands are indistinguishable;
2719 it does not test whether they are equal using C's == operation.
2720 The distinction is important for IEEE floating point, because
2721 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2722 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2724 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2725 even though it may hold multiple values during a function.
2726 This is because a GCC tree node guarantees that nothing else is
2727 executed between the evaluation of its "operands" (which may often
2728 be evaluated in arbitrary order). Hence if the operands themselves
2729 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2730 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2731 unset means assuming isochronic (or instantaneous) tree equivalence.
2732 Unless comparing arbitrary expression trees, such as from different
2733 statements, this flag can usually be left unset.
2735 If OEP_PURE_SAME is set, then pure functions with identical arguments
2736 are considered the same. It is used when the caller has other ways
2737 to ensure that global memory is unchanged in between.
2739 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2740 not values of expressions.
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2752 /* If either is ERROR_MARK, they aren't equal. */
2753 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2754 || TREE_TYPE (arg0) == error_mark_node
2755 || TREE_TYPE (arg1) == error_mark_node)
2756 return 0;
2758 /* Similar, if either does not have a type (like a released SSA name),
2759 they aren't equal. */
2760 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2761 return 0;
2763 /* We cannot consider pointers to different address space equal. */
2764 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2765 && POINTER_TYPE_P (TREE_TYPE (arg1))
2766 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2767 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2768 return 0;
2770 /* Check equality of integer constants before bailing out due to
2771 precision differences. */
2772 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2774 /* Address of INTEGER_CST is not defined; check that we did not forget
2775 to drop the OEP_ADDRESS_OF flags. */
2776 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2777 return tree_int_cst_equal (arg0, arg1);
2780 if (!(flags & OEP_ADDRESS_OF))
2782 /* If both types don't have the same signedness, then we can't consider
2783 them equal. We must check this before the STRIP_NOPS calls
2784 because they may change the signedness of the arguments. As pointers
2785 strictly don't have a signedness, require either two pointers or
2786 two non-pointers as well. */
2787 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2788 || POINTER_TYPE_P (TREE_TYPE (arg0))
2789 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2790 return 0;
2792 /* If both types don't have the same precision, then it is not safe
2793 to strip NOPs. */
2794 if (element_precision (TREE_TYPE (arg0))
2795 != element_precision (TREE_TYPE (arg1)))
2796 return 0;
2798 STRIP_NOPS (arg0);
2799 STRIP_NOPS (arg1);
2801 #if 0
2802 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2803 sanity check once the issue is solved. */
2804 else
2805 /* Addresses of conversions and SSA_NAMEs (and many other things)
2806 are not defined. Check that we did not forget to drop the
2807 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2808 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2809 && TREE_CODE (arg0) != SSA_NAME);
2810 #endif
2812 /* In case both args are comparisons but with different comparison
2813 code, try to swap the comparison operands of one arg to produce
2814 a match and compare that variant. */
2815 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2816 && COMPARISON_CLASS_P (arg0)
2817 && COMPARISON_CLASS_P (arg1))
2819 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2821 if (TREE_CODE (arg0) == swap_code)
2822 return operand_equal_p (TREE_OPERAND (arg0, 0),
2823 TREE_OPERAND (arg1, 1), flags)
2824 && operand_equal_p (TREE_OPERAND (arg0, 1),
2825 TREE_OPERAND (arg1, 0), flags);
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2830 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2831 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2833 else if (flags & OEP_ADDRESS_OF)
2835 /* If we are interested in comparing addresses ignore
2836 MEM_REF wrappings of the base that can appear just for
2837 TBAA reasons. */
2838 if (TREE_CODE (arg0) == MEM_REF
2839 && DECL_P (arg1)
2840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2841 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2842 && integer_zerop (TREE_OPERAND (arg0, 1)))
2843 return 1;
2844 else if (TREE_CODE (arg1) == MEM_REF
2845 && DECL_P (arg0)
2846 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2847 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2848 && integer_zerop (TREE_OPERAND (arg1, 1)))
2849 return 1;
2850 return 0;
2852 else
2853 return 0;
2856 /* When not checking adddresses, this is needed for conversions and for
2857 COMPONENT_REF. Might as well play it safe and always test this. */
2858 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2859 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2860 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2861 && !(flags & OEP_ADDRESS_OF)))
2862 return 0;
2864 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2865 We don't care about side effects in that case because the SAVE_EXPR
2866 takes care of that for us. In all other cases, two expressions are
2867 equal if they have no side effects. If we have two identical
2868 expressions with side effects that should be treated the same due
2869 to the only side effects being identical SAVE_EXPR's, that will
2870 be detected in the recursive calls below.
2871 If we are taking an invariant address of two identical objects
2872 they are necessarily equal as well. */
2873 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2874 && (TREE_CODE (arg0) == SAVE_EXPR
2875 || (flags & OEP_MATCH_SIDE_EFFECTS)
2876 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2877 return 1;
2879 /* Next handle constant cases, those for which we can return 1 even
2880 if ONLY_CONST is set. */
2881 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2882 switch (TREE_CODE (arg0))
2884 case INTEGER_CST:
2885 return tree_int_cst_equal (arg0, arg1);
2887 case FIXED_CST:
2888 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2889 TREE_FIXED_CST (arg1));
2891 case REAL_CST:
2892 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2893 return 1;
2896 if (!HONOR_SIGNED_ZEROS (arg0))
2898 /* If we do not distinguish between signed and unsigned zero,
2899 consider them equal. */
2900 if (real_zerop (arg0) && real_zerop (arg1))
2901 return 1;
2903 return 0;
2905 case VECTOR_CST:
2907 unsigned i;
2909 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2910 return 0;
2912 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2914 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2915 VECTOR_CST_ELT (arg1, i), flags))
2916 return 0;
2918 return 1;
2921 case COMPLEX_CST:
2922 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2923 flags)
2924 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2925 flags));
2927 case STRING_CST:
2928 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2929 && ! memcmp (TREE_STRING_POINTER (arg0),
2930 TREE_STRING_POINTER (arg1),
2931 TREE_STRING_LENGTH (arg0)));
2933 case ADDR_EXPR:
2934 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2935 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2936 flags | OEP_ADDRESS_OF
2937 | OEP_MATCH_SIDE_EFFECTS);
2938 case CONSTRUCTOR:
2939 /* In GIMPLE empty constructors are allowed in initializers of
2940 aggregates. */
2941 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2942 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2943 default:
2944 break;
2947 if (flags & OEP_ONLY_CONST)
2948 return 0;
2950 /* Define macros to test an operand from arg0 and arg1 for equality and a
2951 variant that allows null and views null as being different from any
2952 non-null value. In the latter case, if either is null, the both
2953 must be; otherwise, do the normal comparison. */
2954 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2955 TREE_OPERAND (arg1, N), flags)
2957 #define OP_SAME_WITH_NULL(N) \
2958 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2959 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2961 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2963 case tcc_unary:
2964 /* Two conversions are equal only if signedness and modes match. */
2965 switch (TREE_CODE (arg0))
2967 CASE_CONVERT:
2968 case FIX_TRUNC_EXPR:
2969 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2970 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2971 return 0;
2972 break;
2973 default:
2974 break;
2977 return OP_SAME (0);
2980 case tcc_comparison:
2981 case tcc_binary:
2982 if (OP_SAME (0) && OP_SAME (1))
2983 return 1;
2985 /* For commutative ops, allow the other order. */
2986 return (commutative_tree_code (TREE_CODE (arg0))
2987 && operand_equal_p (TREE_OPERAND (arg0, 0),
2988 TREE_OPERAND (arg1, 1), flags)
2989 && operand_equal_p (TREE_OPERAND (arg0, 1),
2990 TREE_OPERAND (arg1, 0), flags));
2992 case tcc_reference:
2993 /* If either of the pointer (or reference) expressions we are
2994 dereferencing contain a side effect, these cannot be equal,
2995 but their addresses can be. */
2996 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2997 && (TREE_SIDE_EFFECTS (arg0)
2998 || TREE_SIDE_EFFECTS (arg1)))
2999 return 0;
3001 switch (TREE_CODE (arg0))
3003 case INDIRECT_REF:
3004 if (!(flags & OEP_ADDRESS_OF)
3005 && (TYPE_ALIGN (TREE_TYPE (arg0))
3006 != TYPE_ALIGN (TREE_TYPE (arg1))))
3007 return 0;
3008 flags &= ~OEP_ADDRESS_OF;
3009 return OP_SAME (0);
3011 case IMAGPART_EXPR:
3012 /* Require the same offset. */
3013 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3014 TYPE_SIZE (TREE_TYPE (arg1)),
3015 flags & ~OEP_ADDRESS_OF))
3016 return 0;
3018 /* Fallthru. */
3019 case REALPART_EXPR:
3020 case VIEW_CONVERT_EXPR:
3021 return OP_SAME (0);
3023 case TARGET_MEM_REF:
3024 case MEM_REF:
3025 if (!(flags & OEP_ADDRESS_OF))
3027 /* Require equal access sizes */
3028 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3029 && (!TYPE_SIZE (TREE_TYPE (arg0))
3030 || !TYPE_SIZE (TREE_TYPE (arg1))
3031 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3032 TYPE_SIZE (TREE_TYPE (arg1)),
3033 flags)))
3034 return 0;
3035 /* Verify that access happens in similar types. */
3036 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3037 return 0;
3038 /* Verify that accesses are TBAA compatible. */
3039 if (!alias_ptr_types_compatible_p
3040 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3041 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3042 || (MR_DEPENDENCE_CLIQUE (arg0)
3043 != MR_DEPENDENCE_CLIQUE (arg1))
3044 || (MR_DEPENDENCE_BASE (arg0)
3045 != MR_DEPENDENCE_BASE (arg1)))
3046 return 0;
3047 /* Verify that alignment is compatible. */
3048 if (TYPE_ALIGN (TREE_TYPE (arg0))
3049 != TYPE_ALIGN (TREE_TYPE (arg1)))
3050 return 0;
3052 flags &= ~OEP_ADDRESS_OF;
3053 return (OP_SAME (0) && OP_SAME (1)
3054 /* TARGET_MEM_REF require equal extra operands. */
3055 && (TREE_CODE (arg0) != TARGET_MEM_REF
3056 || (OP_SAME_WITH_NULL (2)
3057 && OP_SAME_WITH_NULL (3)
3058 && OP_SAME_WITH_NULL (4))));
3060 case ARRAY_REF:
3061 case ARRAY_RANGE_REF:
3062 if (!OP_SAME (0))
3063 return 0;
3064 flags &= ~OEP_ADDRESS_OF;
3065 /* Compare the array index by value if it is constant first as we
3066 may have different types but same value here. */
3067 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3068 TREE_OPERAND (arg1, 1))
3069 || OP_SAME (1))
3070 && OP_SAME_WITH_NULL (2)
3071 && OP_SAME_WITH_NULL (3)
3072 /* Compare low bound and element size as with OEP_ADDRESS_OF
3073 we have to account for the offset of the ref. */
3074 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3075 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3076 || (operand_equal_p (array_ref_low_bound
3077 (CONST_CAST_TREE (arg0)),
3078 array_ref_low_bound
3079 (CONST_CAST_TREE (arg1)), flags)
3080 && operand_equal_p (array_ref_element_size
3081 (CONST_CAST_TREE (arg0)),
3082 array_ref_element_size
3083 (CONST_CAST_TREE (arg1)),
3084 flags))));
3086 case COMPONENT_REF:
3087 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3088 may be NULL when we're called to compare MEM_EXPRs. */
3089 if (!OP_SAME_WITH_NULL (0)
3090 || !OP_SAME (1))
3091 return 0;
3092 flags &= ~OEP_ADDRESS_OF;
3093 return OP_SAME_WITH_NULL (2);
3095 case BIT_FIELD_REF:
3096 if (!OP_SAME (0))
3097 return 0;
3098 flags &= ~OEP_ADDRESS_OF;
3099 return OP_SAME (1) && OP_SAME (2);
3101 default:
3102 return 0;
3105 case tcc_expression:
3106 switch (TREE_CODE (arg0))
3108 case ADDR_EXPR:
3109 /* Be sure we pass right ADDRESS_OF flag. */
3110 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3111 return operand_equal_p (TREE_OPERAND (arg0, 0),
3112 TREE_OPERAND (arg1, 0),
3113 flags | OEP_ADDRESS_OF);
3115 case TRUTH_NOT_EXPR:
3116 return OP_SAME (0);
3118 case TRUTH_ANDIF_EXPR:
3119 case TRUTH_ORIF_EXPR:
3120 return OP_SAME (0) && OP_SAME (1);
3122 case FMA_EXPR:
3123 case WIDEN_MULT_PLUS_EXPR:
3124 case WIDEN_MULT_MINUS_EXPR:
3125 if (!OP_SAME (2))
3126 return 0;
3127 /* The multiplcation operands are commutative. */
3128 /* FALLTHRU */
3130 case TRUTH_AND_EXPR:
3131 case TRUTH_OR_EXPR:
3132 case TRUTH_XOR_EXPR:
3133 if (OP_SAME (0) && OP_SAME (1))
3134 return 1;
3136 /* Otherwise take into account this is a commutative operation. */
3137 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3138 TREE_OPERAND (arg1, 1), flags)
3139 && operand_equal_p (TREE_OPERAND (arg0, 1),
3140 TREE_OPERAND (arg1, 0), flags));
3142 case COND_EXPR:
3143 if (! OP_SAME (1) || ! OP_SAME (2))
3144 return 0;
3145 flags &= ~OEP_ADDRESS_OF;
3146 return OP_SAME (0);
3148 case VEC_COND_EXPR:
3149 case DOT_PROD_EXPR:
3150 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3152 default:
3153 return 0;
3156 case tcc_vl_exp:
3157 switch (TREE_CODE (arg0))
3159 case CALL_EXPR:
3160 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3161 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3162 /* If not both CALL_EXPRs are either internal or normal function
3163 functions, then they are not equal. */
3164 return 0;
3165 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3167 /* If the CALL_EXPRs call different internal functions, then they
3168 are not equal. */
3169 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3170 return 0;
3172 else
3174 /* If the CALL_EXPRs call different functions, then they are not
3175 equal. */
3176 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3177 flags))
3178 return 0;
3181 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3183 unsigned int cef = call_expr_flags (arg0);
3184 if (flags & OEP_PURE_SAME)
3185 cef &= ECF_CONST | ECF_PURE;
3186 else
3187 cef &= ECF_CONST;
3188 if (!cef)
3189 return 0;
3192 /* Now see if all the arguments are the same. */
3194 const_call_expr_arg_iterator iter0, iter1;
3195 const_tree a0, a1;
3196 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3197 a1 = first_const_call_expr_arg (arg1, &iter1);
3198 a0 && a1;
3199 a0 = next_const_call_expr_arg (&iter0),
3200 a1 = next_const_call_expr_arg (&iter1))
3201 if (! operand_equal_p (a0, a1, flags))
3202 return 0;
3204 /* If we get here and both argument lists are exhausted
3205 then the CALL_EXPRs are equal. */
3206 return ! (a0 || a1);
3208 default:
3209 return 0;
3212 case tcc_declaration:
3213 /* Consider __builtin_sqrt equal to sqrt. */
3214 return (TREE_CODE (arg0) == FUNCTION_DECL
3215 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3216 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3217 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3219 case tcc_exceptional:
3220 if (TREE_CODE (arg0) == CONSTRUCTOR)
3222 /* In GIMPLE constructors are used only to build vectors from
3223 elements. Individual elements in the constructor must be
3224 indexed in increasing order and form an initial sequence.
3226 We make no effort to compare constructors in generic.
3227 (see sem_variable::equals in ipa-icf which can do so for
3228 constants). */
3229 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3230 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3231 return 0;
3233 /* Be sure that vectors constructed have the same representation.
3234 We only tested element precision and modes to match.
3235 Vectors may be BLKmode and thus also check that the number of
3236 parts match. */
3237 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3238 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3239 return 0;
3241 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3242 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3243 unsigned int len = vec_safe_length (v0);
3245 if (len != vec_safe_length (v1))
3246 return 0;
3248 for (unsigned int i = 0; i < len; i++)
3250 constructor_elt *c0 = &(*v0)[i];
3251 constructor_elt *c1 = &(*v1)[i];
3253 if (!operand_equal_p (c0->value, c1->value, flags)
3254 /* In GIMPLE the indexes can be either NULL or matching i.
3255 Double check this so we won't get false
3256 positives for GENERIC. */
3257 || (c0->index
3258 && (TREE_CODE (c0->index) != INTEGER_CST
3259 || !compare_tree_int (c0->index, i)))
3260 || (c1->index
3261 && (TREE_CODE (c1->index) != INTEGER_CST
3262 || !compare_tree_int (c1->index, i))))
3263 return 0;
3265 return 1;
3267 return 0;
3269 default:
3270 return 0;
3273 #undef OP_SAME
3274 #undef OP_SAME_WITH_NULL
3277 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3278 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3280 When in doubt, return 0. */
3282 static int
3283 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3285 int unsignedp1, unsignedpo;
3286 tree primarg0, primarg1, primother;
3287 unsigned int correct_width;
3289 if (operand_equal_p (arg0, arg1, 0))
3290 return 1;
3292 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3293 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3294 return 0;
3296 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3297 and see if the inner values are the same. This removes any
3298 signedness comparison, which doesn't matter here. */
3299 primarg0 = arg0, primarg1 = arg1;
3300 STRIP_NOPS (primarg0);
3301 STRIP_NOPS (primarg1);
3302 if (operand_equal_p (primarg0, primarg1, 0))
3303 return 1;
3305 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3306 actual comparison operand, ARG0.
3308 First throw away any conversions to wider types
3309 already present in the operands. */
3311 primarg1 = get_narrower (arg1, &unsignedp1);
3312 primother = get_narrower (other, &unsignedpo);
3314 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3315 if (unsignedp1 == unsignedpo
3316 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3317 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3319 tree type = TREE_TYPE (arg0);
3321 /* Make sure shorter operand is extended the right way
3322 to match the longer operand. */
3323 primarg1 = fold_convert (signed_or_unsigned_type_for
3324 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3326 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3327 return 1;
3330 return 0;
3333 /* See if ARG is an expression that is either a comparison or is performing
3334 arithmetic on comparisons. The comparisons must only be comparing
3335 two different values, which will be stored in *CVAL1 and *CVAL2; if
3336 they are nonzero it means that some operands have already been found.
3337 No variables may be used anywhere else in the expression except in the
3338 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3339 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3341 If this is true, return 1. Otherwise, return zero. */
3343 static int
3344 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3346 enum tree_code code = TREE_CODE (arg);
3347 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3349 /* We can handle some of the tcc_expression cases here. */
3350 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3351 tclass = tcc_unary;
3352 else if (tclass == tcc_expression
3353 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3354 || code == COMPOUND_EXPR))
3355 tclass = tcc_binary;
3357 else if (tclass == tcc_expression && code == SAVE_EXPR
3358 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3360 /* If we've already found a CVAL1 or CVAL2, this expression is
3361 two complex to handle. */
3362 if (*cval1 || *cval2)
3363 return 0;
3365 tclass = tcc_unary;
3366 *save_p = 1;
3369 switch (tclass)
3371 case tcc_unary:
3372 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3374 case tcc_binary:
3375 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3376 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3377 cval1, cval2, save_p));
3379 case tcc_constant:
3380 return 1;
3382 case tcc_expression:
3383 if (code == COND_EXPR)
3384 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3385 cval1, cval2, save_p)
3386 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3387 cval1, cval2, save_p)
3388 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3389 cval1, cval2, save_p));
3390 return 0;
3392 case tcc_comparison:
3393 /* First see if we can handle the first operand, then the second. For
3394 the second operand, we know *CVAL1 can't be zero. It must be that
3395 one side of the comparison is each of the values; test for the
3396 case where this isn't true by failing if the two operands
3397 are the same. */
3399 if (operand_equal_p (TREE_OPERAND (arg, 0),
3400 TREE_OPERAND (arg, 1), 0))
3401 return 0;
3403 if (*cval1 == 0)
3404 *cval1 = TREE_OPERAND (arg, 0);
3405 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3407 else if (*cval2 == 0)
3408 *cval2 = TREE_OPERAND (arg, 0);
3409 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3411 else
3412 return 0;
3414 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3416 else if (*cval2 == 0)
3417 *cval2 = TREE_OPERAND (arg, 1);
3418 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3420 else
3421 return 0;
3423 return 1;
3425 default:
3426 return 0;
3430 /* ARG is a tree that is known to contain just arithmetic operations and
3431 comparisons. Evaluate the operations in the tree substituting NEW0 for
3432 any occurrence of OLD0 as an operand of a comparison and likewise for
3433 NEW1 and OLD1. */
3435 static tree
3436 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3437 tree old1, tree new1)
3439 tree type = TREE_TYPE (arg);
3440 enum tree_code code = TREE_CODE (arg);
3441 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3443 /* We can handle some of the tcc_expression cases here. */
3444 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3445 tclass = tcc_unary;
3446 else if (tclass == tcc_expression
3447 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3448 tclass = tcc_binary;
3450 switch (tclass)
3452 case tcc_unary:
3453 return fold_build1_loc (loc, code, type,
3454 eval_subst (loc, TREE_OPERAND (arg, 0),
3455 old0, new0, old1, new1));
3457 case tcc_binary:
3458 return fold_build2_loc (loc, code, type,
3459 eval_subst (loc, TREE_OPERAND (arg, 0),
3460 old0, new0, old1, new1),
3461 eval_subst (loc, TREE_OPERAND (arg, 1),
3462 old0, new0, old1, new1));
3464 case tcc_expression:
3465 switch (code)
3467 case SAVE_EXPR:
3468 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3469 old1, new1);
3471 case COMPOUND_EXPR:
3472 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3473 old1, new1);
3475 case COND_EXPR:
3476 return fold_build3_loc (loc, code, type,
3477 eval_subst (loc, TREE_OPERAND (arg, 0),
3478 old0, new0, old1, new1),
3479 eval_subst (loc, TREE_OPERAND (arg, 1),
3480 old0, new0, old1, new1),
3481 eval_subst (loc, TREE_OPERAND (arg, 2),
3482 old0, new0, old1, new1));
3483 default:
3484 break;
3486 /* Fall through - ??? */
3488 case tcc_comparison:
3490 tree arg0 = TREE_OPERAND (arg, 0);
3491 tree arg1 = TREE_OPERAND (arg, 1);
3493 /* We need to check both for exact equality and tree equality. The
3494 former will be true if the operand has a side-effect. In that
3495 case, we know the operand occurred exactly once. */
3497 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3498 arg0 = new0;
3499 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3500 arg0 = new1;
3502 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3503 arg1 = new0;
3504 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3505 arg1 = new1;
3507 return fold_build2_loc (loc, code, type, arg0, arg1);
3510 default:
3511 return arg;
3515 /* Return a tree for the case when the result of an expression is RESULT
3516 converted to TYPE and OMITTED was previously an operand of the expression
3517 but is now not needed (e.g., we folded OMITTED * 0).
3519 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3520 the conversion of RESULT to TYPE. */
3522 tree
3523 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3525 tree t = fold_convert_loc (loc, type, result);
3527 /* If the resulting operand is an empty statement, just return the omitted
3528 statement casted to void. */
3529 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3530 return build1_loc (loc, NOP_EXPR, void_type_node,
3531 fold_ignored_result (omitted));
3533 if (TREE_SIDE_EFFECTS (omitted))
3534 return build2_loc (loc, COMPOUND_EXPR, type,
3535 fold_ignored_result (omitted), t);
3537 return non_lvalue_loc (loc, t);
3540 /* Return a tree for the case when the result of an expression is RESULT
3541 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3542 of the expression but are now not needed.
3544 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3545 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3546 evaluated before OMITTED2. Otherwise, if neither has side effects,
3547 just do the conversion of RESULT to TYPE. */
3549 tree
3550 omit_two_operands_loc (location_t loc, tree type, tree result,
3551 tree omitted1, tree omitted2)
3553 tree t = fold_convert_loc (loc, type, result);
3555 if (TREE_SIDE_EFFECTS (omitted2))
3556 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3557 if (TREE_SIDE_EFFECTS (omitted1))
3558 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3560 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3564 /* Return a simplified tree node for the truth-negation of ARG. This
3565 never alters ARG itself. We assume that ARG is an operation that
3566 returns a truth value (0 or 1).
3568 FIXME: one would think we would fold the result, but it causes
3569 problems with the dominator optimizer. */
3571 static tree
3572 fold_truth_not_expr (location_t loc, tree arg)
3574 tree type = TREE_TYPE (arg);
3575 enum tree_code code = TREE_CODE (arg);
3576 location_t loc1, loc2;
3578 /* If this is a comparison, we can simply invert it, except for
3579 floating-point non-equality comparisons, in which case we just
3580 enclose a TRUTH_NOT_EXPR around what we have. */
3582 if (TREE_CODE_CLASS (code) == tcc_comparison)
3584 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3585 if (FLOAT_TYPE_P (op_type)
3586 && flag_trapping_math
3587 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3588 && code != NE_EXPR && code != EQ_EXPR)
3589 return NULL_TREE;
3591 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3592 if (code == ERROR_MARK)
3593 return NULL_TREE;
3595 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3596 TREE_OPERAND (arg, 1));
3597 if (TREE_NO_WARNING (arg))
3598 TREE_NO_WARNING (ret) = 1;
3599 return ret;
3602 switch (code)
3604 case INTEGER_CST:
3605 return constant_boolean_node (integer_zerop (arg), type);
3607 case TRUTH_AND_EXPR:
3608 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3609 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3610 return build2_loc (loc, TRUTH_OR_EXPR, type,
3611 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3612 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3614 case TRUTH_OR_EXPR:
3615 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3616 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3617 return build2_loc (loc, TRUTH_AND_EXPR, type,
3618 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3619 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3621 case TRUTH_XOR_EXPR:
3622 /* Here we can invert either operand. We invert the first operand
3623 unless the second operand is a TRUTH_NOT_EXPR in which case our
3624 result is the XOR of the first operand with the inside of the
3625 negation of the second operand. */
3627 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3628 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3629 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3630 else
3631 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3632 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3633 TREE_OPERAND (arg, 1));
3635 case TRUTH_ANDIF_EXPR:
3636 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3637 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3638 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3639 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3640 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3642 case TRUTH_ORIF_EXPR:
3643 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3644 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3645 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3646 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3647 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3649 case TRUTH_NOT_EXPR:
3650 return TREE_OPERAND (arg, 0);
3652 case COND_EXPR:
3654 tree arg1 = TREE_OPERAND (arg, 1);
3655 tree arg2 = TREE_OPERAND (arg, 2);
3657 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3658 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3660 /* A COND_EXPR may have a throw as one operand, which
3661 then has void type. Just leave void operands
3662 as they are. */
3663 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3664 VOID_TYPE_P (TREE_TYPE (arg1))
3665 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3666 VOID_TYPE_P (TREE_TYPE (arg2))
3667 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3670 case COMPOUND_EXPR:
3671 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3672 return build2_loc (loc, COMPOUND_EXPR, type,
3673 TREE_OPERAND (arg, 0),
3674 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3676 case NON_LVALUE_EXPR:
3677 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3678 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3680 CASE_CONVERT:
3681 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3682 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3684 /* ... fall through ... */
3686 case FLOAT_EXPR:
3687 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3688 return build1_loc (loc, TREE_CODE (arg), type,
3689 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3691 case BIT_AND_EXPR:
3692 if (!integer_onep (TREE_OPERAND (arg, 1)))
3693 return NULL_TREE;
3694 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3696 case SAVE_EXPR:
3697 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3699 case CLEANUP_POINT_EXPR:
3700 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3701 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3702 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3704 default:
3705 return NULL_TREE;
3709 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3710 assume that ARG is an operation that returns a truth value (0 or 1
3711 for scalars, 0 or -1 for vectors). Return the folded expression if
3712 folding is successful. Otherwise, return NULL_TREE. */
3714 static tree
3715 fold_invert_truthvalue (location_t loc, tree arg)
3717 tree type = TREE_TYPE (arg);
3718 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3719 ? BIT_NOT_EXPR
3720 : TRUTH_NOT_EXPR,
3721 type, arg);
3724 /* Return a simplified tree node for the truth-negation of ARG. This
3725 never alters ARG itself. We assume that ARG is an operation that
3726 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3728 tree
3729 invert_truthvalue_loc (location_t loc, tree arg)
3731 if (TREE_CODE (arg) == ERROR_MARK)
3732 return arg;
3734 tree type = TREE_TYPE (arg);
3735 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3736 ? BIT_NOT_EXPR
3737 : TRUTH_NOT_EXPR,
3738 type, arg);
3741 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3742 with code CODE. This optimization is unsafe. */
3743 static tree
3744 distribute_real_division (location_t loc, enum tree_code code, tree type,
3745 tree arg0, tree arg1)
3747 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3748 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3750 /* (A / C) +- (B / C) -> (A +- B) / C. */
3751 if (mul0 == mul1
3752 && operand_equal_p (TREE_OPERAND (arg0, 1),
3753 TREE_OPERAND (arg1, 1), 0))
3754 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3755 fold_build2_loc (loc, code, type,
3756 TREE_OPERAND (arg0, 0),
3757 TREE_OPERAND (arg1, 0)),
3758 TREE_OPERAND (arg0, 1));
3760 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3761 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3762 TREE_OPERAND (arg1, 0), 0)
3763 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3764 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3766 REAL_VALUE_TYPE r0, r1;
3767 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3768 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3769 if (!mul0)
3770 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3771 if (!mul1)
3772 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3773 real_arithmetic (&r0, code, &r0, &r1);
3774 return fold_build2_loc (loc, MULT_EXPR, type,
3775 TREE_OPERAND (arg0, 0),
3776 build_real (type, r0));
3779 return NULL_TREE;
3782 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3783 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3784 and uses reverse storage order if REVERSEP is nonzero. */
3786 static tree
3787 make_bit_field_ref (location_t loc, tree inner, tree type,
3788 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3789 int unsignedp, int reversep)
3791 tree result, bftype;
3793 if (bitpos == 0 && !reversep)
3795 tree size = TYPE_SIZE (TREE_TYPE (inner));
3796 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3797 || POINTER_TYPE_P (TREE_TYPE (inner)))
3798 && tree_fits_shwi_p (size)
3799 && tree_to_shwi (size) == bitsize)
3800 return fold_convert_loc (loc, type, inner);
3803 bftype = type;
3804 if (TYPE_PRECISION (bftype) != bitsize
3805 || TYPE_UNSIGNED (bftype) == !unsignedp)
3806 bftype = build_nonstandard_integer_type (bitsize, 0);
3808 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3809 size_int (bitsize), bitsize_int (bitpos));
3810 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3812 if (bftype != type)
3813 result = fold_convert_loc (loc, type, result);
3815 return result;
3818 /* Optimize a bit-field compare.
3820 There are two cases: First is a compare against a constant and the
3821 second is a comparison of two items where the fields are at the same
3822 bit position relative to the start of a chunk (byte, halfword, word)
3823 large enough to contain it. In these cases we can avoid the shift
3824 implicit in bitfield extractions.
3826 For constants, we emit a compare of the shifted constant with the
3827 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3828 compared. For two fields at the same position, we do the ANDs with the
3829 similar mask and compare the result of the ANDs.
3831 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3832 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3833 are the left and right operands of the comparison, respectively.
3835 If the optimization described above can be done, we return the resulting
3836 tree. Otherwise we return zero. */
3838 static tree
3839 optimize_bit_field_compare (location_t loc, enum tree_code code,
3840 tree compare_type, tree lhs, tree rhs)
3842 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3843 tree type = TREE_TYPE (lhs);
3844 tree unsigned_type;
3845 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3846 machine_mode lmode, rmode, nmode;
3847 int lunsignedp, runsignedp;
3848 int lreversep, rreversep;
3849 int lvolatilep = 0, rvolatilep = 0;
3850 tree linner, rinner = NULL_TREE;
3851 tree mask;
3852 tree offset;
3854 /* Get all the information about the extractions being done. If the bit size
3855 if the same as the size of the underlying object, we aren't doing an
3856 extraction at all and so can do nothing. We also don't want to
3857 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3858 then will no longer be able to replace it. */
3859 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3860 &lunsignedp, &lreversep, &lvolatilep, false);
3861 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3862 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3863 return 0;
3865 if (const_p)
3866 rreversep = lreversep;
3867 else
3869 /* If this is not a constant, we can only do something if bit positions,
3870 sizes, signedness and storage order are the same. */
3871 rinner
3872 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3873 &runsignedp, &rreversep, &rvolatilep, false);
3875 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3876 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3877 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3878 return 0;
3881 /* See if we can find a mode to refer to this field. We should be able to,
3882 but fail if we can't. */
3883 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3884 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3885 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3886 TYPE_ALIGN (TREE_TYPE (rinner))),
3887 word_mode, false);
3888 if (nmode == VOIDmode)
3889 return 0;
3891 /* Set signed and unsigned types of the precision of this mode for the
3892 shifts below. */
3893 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3895 /* Compute the bit position and size for the new reference and our offset
3896 within it. If the new reference is the same size as the original, we
3897 won't optimize anything, so return zero. */
3898 nbitsize = GET_MODE_BITSIZE (nmode);
3899 nbitpos = lbitpos & ~ (nbitsize - 1);
3900 lbitpos -= nbitpos;
3901 if (nbitsize == lbitsize)
3902 return 0;
3904 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3905 lbitpos = nbitsize - lbitsize - lbitpos;
3907 /* Make the mask to be used against the extracted field. */
3908 mask = build_int_cst_type (unsigned_type, -1);
3909 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3910 mask = const_binop (RSHIFT_EXPR, mask,
3911 size_int (nbitsize - lbitsize - lbitpos));
3913 if (! const_p)
3914 /* If not comparing with constant, just rework the comparison
3915 and return. */
3916 return fold_build2_loc (loc, code, compare_type,
3917 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3918 make_bit_field_ref (loc, linner,
3919 unsigned_type,
3920 nbitsize, nbitpos,
3921 1, lreversep),
3922 mask),
3923 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3924 make_bit_field_ref (loc, rinner,
3925 unsigned_type,
3926 nbitsize, nbitpos,
3927 1, rreversep),
3928 mask));
3930 /* Otherwise, we are handling the constant case. See if the constant is too
3931 big for the field. Warn and return a tree for 0 (false) if so. We do
3932 this not only for its own sake, but to avoid having to test for this
3933 error case below. If we didn't, we might generate wrong code.
3935 For unsigned fields, the constant shifted right by the field length should
3936 be all zero. For signed fields, the high-order bits should agree with
3937 the sign bit. */
3939 if (lunsignedp)
3941 if (wi::lrshift (rhs, lbitsize) != 0)
3943 warning (0, "comparison is always %d due to width of bit-field",
3944 code == NE_EXPR);
3945 return constant_boolean_node (code == NE_EXPR, compare_type);
3948 else
3950 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3951 if (tem != 0 && tem != -1)
3953 warning (0, "comparison is always %d due to width of bit-field",
3954 code == NE_EXPR);
3955 return constant_boolean_node (code == NE_EXPR, compare_type);
3959 /* Single-bit compares should always be against zero. */
3960 if (lbitsize == 1 && ! integer_zerop (rhs))
3962 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3963 rhs = build_int_cst (type, 0);
3966 /* Make a new bitfield reference, shift the constant over the
3967 appropriate number of bits and mask it with the computed mask
3968 (in case this was a signed field). If we changed it, make a new one. */
3969 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3970 lreversep);
3972 rhs = const_binop (BIT_AND_EXPR,
3973 const_binop (LSHIFT_EXPR,
3974 fold_convert_loc (loc, unsigned_type, rhs),
3975 size_int (lbitpos)),
3976 mask);
3978 lhs = build2_loc (loc, code, compare_type,
3979 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3980 return lhs;
3983 /* Subroutine for fold_truth_andor_1: decode a field reference.
3985 If EXP is a comparison reference, we return the innermost reference.
3987 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3988 set to the starting bit number.
3990 If the innermost field can be completely contained in a mode-sized
3991 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3993 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3994 otherwise it is not changed.
3996 *PUNSIGNEDP is set to the signedness of the field.
3998 *PREVERSEP is set to the storage order of the field.
4000 *PMASK is set to the mask used. This is either contained in a
4001 BIT_AND_EXPR or derived from the width of the field.
4003 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4005 Return 0 if this is not a component reference or is one that we can't
4006 do anything with. */
4008 static tree
4009 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4010 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4011 int *punsignedp, int *preversep, int *pvolatilep,
4012 tree *pmask, tree *pand_mask)
4014 tree outer_type = 0;
4015 tree and_mask = 0;
4016 tree mask, inner, offset;
4017 tree unsigned_type;
4018 unsigned int precision;
4020 /* All the optimizations using this function assume integer fields.
4021 There are problems with FP fields since the type_for_size call
4022 below can fail for, e.g., XFmode. */
4023 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4024 return 0;
4026 /* We are interested in the bare arrangement of bits, so strip everything
4027 that doesn't affect the machine mode. However, record the type of the
4028 outermost expression if it may matter below. */
4029 if (CONVERT_EXPR_P (exp)
4030 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4031 outer_type = TREE_TYPE (exp);
4032 STRIP_NOPS (exp);
4034 if (TREE_CODE (exp) == BIT_AND_EXPR)
4036 and_mask = TREE_OPERAND (exp, 1);
4037 exp = TREE_OPERAND (exp, 0);
4038 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4039 if (TREE_CODE (and_mask) != INTEGER_CST)
4040 return 0;
4043 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4044 punsignedp, preversep, pvolatilep, false);
4045 if ((inner == exp && and_mask == 0)
4046 || *pbitsize < 0 || offset != 0
4047 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4048 return 0;
4050 /* If the number of bits in the reference is the same as the bitsize of
4051 the outer type, then the outer type gives the signedness. Otherwise
4052 (in case of a small bitfield) the signedness is unchanged. */
4053 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4054 *punsignedp = TYPE_UNSIGNED (outer_type);
4056 /* Compute the mask to access the bitfield. */
4057 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4058 precision = TYPE_PRECISION (unsigned_type);
4060 mask = build_int_cst_type (unsigned_type, -1);
4062 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4063 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4065 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4066 if (and_mask != 0)
4067 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4068 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4070 *pmask = mask;
4071 *pand_mask = and_mask;
4072 return inner;
4075 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4076 bit positions and MASK is SIGNED. */
4078 static int
4079 all_ones_mask_p (const_tree mask, unsigned int size)
4081 tree type = TREE_TYPE (mask);
4082 unsigned int precision = TYPE_PRECISION (type);
4084 /* If this function returns true when the type of the mask is
4085 UNSIGNED, then there will be errors. In particular see
4086 gcc.c-torture/execute/990326-1.c. There does not appear to be
4087 any documentation paper trail as to why this is so. But the pre
4088 wide-int worked with that restriction and it has been preserved
4089 here. */
4090 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4091 return false;
4093 return wi::mask (size, false, precision) == mask;
4096 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4097 represents the sign bit of EXP's type. If EXP represents a sign
4098 or zero extension, also test VAL against the unextended type.
4099 The return value is the (sub)expression whose sign bit is VAL,
4100 or NULL_TREE otherwise. */
4102 tree
4103 sign_bit_p (tree exp, const_tree val)
4105 int width;
4106 tree t;
4108 /* Tree EXP must have an integral type. */
4109 t = TREE_TYPE (exp);
4110 if (! INTEGRAL_TYPE_P (t))
4111 return NULL_TREE;
4113 /* Tree VAL must be an integer constant. */
4114 if (TREE_CODE (val) != INTEGER_CST
4115 || TREE_OVERFLOW (val))
4116 return NULL_TREE;
4118 width = TYPE_PRECISION (t);
4119 if (wi::only_sign_bit_p (val, width))
4120 return exp;
4122 /* Handle extension from a narrower type. */
4123 if (TREE_CODE (exp) == NOP_EXPR
4124 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4125 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4127 return NULL_TREE;
4130 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4131 to be evaluated unconditionally. */
4133 static int
4134 simple_operand_p (const_tree exp)
4136 /* Strip any conversions that don't change the machine mode. */
4137 STRIP_NOPS (exp);
4139 return (CONSTANT_CLASS_P (exp)
4140 || TREE_CODE (exp) == SSA_NAME
4141 || (DECL_P (exp)
4142 && ! TREE_ADDRESSABLE (exp)
4143 && ! TREE_THIS_VOLATILE (exp)
4144 && ! DECL_NONLOCAL (exp)
4145 /* Don't regard global variables as simple. They may be
4146 allocated in ways unknown to the compiler (shared memory,
4147 #pragma weak, etc). */
4148 && ! TREE_PUBLIC (exp)
4149 && ! DECL_EXTERNAL (exp)
4150 /* Weakrefs are not safe to be read, since they can be NULL.
4151 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4152 have DECL_WEAK flag set. */
4153 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4154 /* Loading a static variable is unduly expensive, but global
4155 registers aren't expensive. */
4156 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4159 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4160 to be evaluated unconditionally.
4161 I addition to simple_operand_p, we assume that comparisons, conversions,
4162 and logic-not operations are simple, if their operands are simple, too. */
4164 static bool
4165 simple_operand_p_2 (tree exp)
4167 enum tree_code code;
4169 if (TREE_SIDE_EFFECTS (exp)
4170 || tree_could_trap_p (exp))
4171 return false;
4173 while (CONVERT_EXPR_P (exp))
4174 exp = TREE_OPERAND (exp, 0);
4176 code = TREE_CODE (exp);
4178 if (TREE_CODE_CLASS (code) == tcc_comparison)
4179 return (simple_operand_p (TREE_OPERAND (exp, 0))
4180 && simple_operand_p (TREE_OPERAND (exp, 1)));
4182 if (code == TRUTH_NOT_EXPR)
4183 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4185 return simple_operand_p (exp);
4189 /* The following functions are subroutines to fold_range_test and allow it to
4190 try to change a logical combination of comparisons into a range test.
4192 For example, both
4193 X == 2 || X == 3 || X == 4 || X == 5
4195 X >= 2 && X <= 5
4196 are converted to
4197 (unsigned) (X - 2) <= 3
4199 We describe each set of comparisons as being either inside or outside
4200 a range, using a variable named like IN_P, and then describe the
4201 range with a lower and upper bound. If one of the bounds is omitted,
4202 it represents either the highest or lowest value of the type.
4204 In the comments below, we represent a range by two numbers in brackets
4205 preceded by a "+" to designate being inside that range, or a "-" to
4206 designate being outside that range, so the condition can be inverted by
4207 flipping the prefix. An omitted bound is represented by a "-". For
4208 example, "- [-, 10]" means being outside the range starting at the lowest
4209 possible value and ending at 10, in other words, being greater than 10.
4210 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4211 always false.
4213 We set up things so that the missing bounds are handled in a consistent
4214 manner so neither a missing bound nor "true" and "false" need to be
4215 handled using a special case. */
4217 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4218 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4219 and UPPER1_P are nonzero if the respective argument is an upper bound
4220 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4221 must be specified for a comparison. ARG1 will be converted to ARG0's
4222 type if both are specified. */
4224 static tree
4225 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4226 tree arg1, int upper1_p)
4228 tree tem;
4229 int result;
4230 int sgn0, sgn1;
4232 /* If neither arg represents infinity, do the normal operation.
4233 Else, if not a comparison, return infinity. Else handle the special
4234 comparison rules. Note that most of the cases below won't occur, but
4235 are handled for consistency. */
4237 if (arg0 != 0 && arg1 != 0)
4239 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4240 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4241 STRIP_NOPS (tem);
4242 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4245 if (TREE_CODE_CLASS (code) != tcc_comparison)
4246 return 0;
4248 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4249 for neither. In real maths, we cannot assume open ended ranges are
4250 the same. But, this is computer arithmetic, where numbers are finite.
4251 We can therefore make the transformation of any unbounded range with
4252 the value Z, Z being greater than any representable number. This permits
4253 us to treat unbounded ranges as equal. */
4254 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4255 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4256 switch (code)
4258 case EQ_EXPR:
4259 result = sgn0 == sgn1;
4260 break;
4261 case NE_EXPR:
4262 result = sgn0 != sgn1;
4263 break;
4264 case LT_EXPR:
4265 result = sgn0 < sgn1;
4266 break;
4267 case LE_EXPR:
4268 result = sgn0 <= sgn1;
4269 break;
4270 case GT_EXPR:
4271 result = sgn0 > sgn1;
4272 break;
4273 case GE_EXPR:
4274 result = sgn0 >= sgn1;
4275 break;
4276 default:
4277 gcc_unreachable ();
4280 return constant_boolean_node (result, type);
4283 /* Helper routine for make_range. Perform one step for it, return
4284 new expression if the loop should continue or NULL_TREE if it should
4285 stop. */
4287 tree
4288 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4289 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4290 bool *strict_overflow_p)
4292 tree arg0_type = TREE_TYPE (arg0);
4293 tree n_low, n_high, low = *p_low, high = *p_high;
4294 int in_p = *p_in_p, n_in_p;
4296 switch (code)
4298 case TRUTH_NOT_EXPR:
4299 /* We can only do something if the range is testing for zero. */
4300 if (low == NULL_TREE || high == NULL_TREE
4301 || ! integer_zerop (low) || ! integer_zerop (high))
4302 return NULL_TREE;
4303 *p_in_p = ! in_p;
4304 return arg0;
4306 case EQ_EXPR: case NE_EXPR:
4307 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4308 /* We can only do something if the range is testing for zero
4309 and if the second operand is an integer constant. Note that
4310 saying something is "in" the range we make is done by
4311 complementing IN_P since it will set in the initial case of
4312 being not equal to zero; "out" is leaving it alone. */
4313 if (low == NULL_TREE || high == NULL_TREE
4314 || ! integer_zerop (low) || ! integer_zerop (high)
4315 || TREE_CODE (arg1) != INTEGER_CST)
4316 return NULL_TREE;
4318 switch (code)
4320 case NE_EXPR: /* - [c, c] */
4321 low = high = arg1;
4322 break;
4323 case EQ_EXPR: /* + [c, c] */
4324 in_p = ! in_p, low = high = arg1;
4325 break;
4326 case GT_EXPR: /* - [-, c] */
4327 low = 0, high = arg1;
4328 break;
4329 case GE_EXPR: /* + [c, -] */
4330 in_p = ! in_p, low = arg1, high = 0;
4331 break;
4332 case LT_EXPR: /* - [c, -] */
4333 low = arg1, high = 0;
4334 break;
4335 case LE_EXPR: /* + [-, c] */
4336 in_p = ! in_p, low = 0, high = arg1;
4337 break;
4338 default:
4339 gcc_unreachable ();
4342 /* If this is an unsigned comparison, we also know that EXP is
4343 greater than or equal to zero. We base the range tests we make
4344 on that fact, so we record it here so we can parse existing
4345 range tests. We test arg0_type since often the return type
4346 of, e.g. EQ_EXPR, is boolean. */
4347 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4349 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4350 in_p, low, high, 1,
4351 build_int_cst (arg0_type, 0),
4352 NULL_TREE))
4353 return NULL_TREE;
4355 in_p = n_in_p, low = n_low, high = n_high;
4357 /* If the high bound is missing, but we have a nonzero low
4358 bound, reverse the range so it goes from zero to the low bound
4359 minus 1. */
4360 if (high == 0 && low && ! integer_zerop (low))
4362 in_p = ! in_p;
4363 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4364 build_int_cst (TREE_TYPE (low), 1), 0);
4365 low = build_int_cst (arg0_type, 0);
4369 *p_low = low;
4370 *p_high = high;
4371 *p_in_p = in_p;
4372 return arg0;
4374 case NEGATE_EXPR:
4375 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4376 low and high are non-NULL, then normalize will DTRT. */
4377 if (!TYPE_UNSIGNED (arg0_type)
4378 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4380 if (low == NULL_TREE)
4381 low = TYPE_MIN_VALUE (arg0_type);
4382 if (high == NULL_TREE)
4383 high = TYPE_MAX_VALUE (arg0_type);
4386 /* (-x) IN [a,b] -> x in [-b, -a] */
4387 n_low = range_binop (MINUS_EXPR, exp_type,
4388 build_int_cst (exp_type, 0),
4389 0, high, 1);
4390 n_high = range_binop (MINUS_EXPR, exp_type,
4391 build_int_cst (exp_type, 0),
4392 0, low, 0);
4393 if (n_high != 0 && TREE_OVERFLOW (n_high))
4394 return NULL_TREE;
4395 goto normalize;
4397 case BIT_NOT_EXPR:
4398 /* ~ X -> -X - 1 */
4399 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4400 build_int_cst (exp_type, 1));
4402 case PLUS_EXPR:
4403 case MINUS_EXPR:
4404 if (TREE_CODE (arg1) != INTEGER_CST)
4405 return NULL_TREE;
4407 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4408 move a constant to the other side. */
4409 if (!TYPE_UNSIGNED (arg0_type)
4410 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4411 return NULL_TREE;
4413 /* If EXP is signed, any overflow in the computation is undefined,
4414 so we don't worry about it so long as our computations on
4415 the bounds don't overflow. For unsigned, overflow is defined
4416 and this is exactly the right thing. */
4417 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4418 arg0_type, low, 0, arg1, 0);
4419 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4420 arg0_type, high, 1, arg1, 0);
4421 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4422 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4423 return NULL_TREE;
4425 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4426 *strict_overflow_p = true;
4428 normalize:
4429 /* Check for an unsigned range which has wrapped around the maximum
4430 value thus making n_high < n_low, and normalize it. */
4431 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4433 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4434 build_int_cst (TREE_TYPE (n_high), 1), 0);
4435 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4436 build_int_cst (TREE_TYPE (n_low), 1), 0);
4438 /* If the range is of the form +/- [ x+1, x ], we won't
4439 be able to normalize it. But then, it represents the
4440 whole range or the empty set, so make it
4441 +/- [ -, - ]. */
4442 if (tree_int_cst_equal (n_low, low)
4443 && tree_int_cst_equal (n_high, high))
4444 low = high = 0;
4445 else
4446 in_p = ! in_p;
4448 else
4449 low = n_low, high = n_high;
4451 *p_low = low;
4452 *p_high = high;
4453 *p_in_p = in_p;
4454 return arg0;
4456 CASE_CONVERT:
4457 case NON_LVALUE_EXPR:
4458 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4459 return NULL_TREE;
4461 if (! INTEGRAL_TYPE_P (arg0_type)
4462 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4463 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4464 return NULL_TREE;
4466 n_low = low, n_high = high;
4468 if (n_low != 0)
4469 n_low = fold_convert_loc (loc, arg0_type, n_low);
4471 if (n_high != 0)
4472 n_high = fold_convert_loc (loc, arg0_type, n_high);
4474 /* If we're converting arg0 from an unsigned type, to exp,
4475 a signed type, we will be doing the comparison as unsigned.
4476 The tests above have already verified that LOW and HIGH
4477 are both positive.
4479 So we have to ensure that we will handle large unsigned
4480 values the same way that the current signed bounds treat
4481 negative values. */
4483 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4485 tree high_positive;
4486 tree equiv_type;
4487 /* For fixed-point modes, we need to pass the saturating flag
4488 as the 2nd parameter. */
4489 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4490 equiv_type
4491 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4492 TYPE_SATURATING (arg0_type));
4493 else
4494 equiv_type
4495 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4497 /* A range without an upper bound is, naturally, unbounded.
4498 Since convert would have cropped a very large value, use
4499 the max value for the destination type. */
4500 high_positive
4501 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4502 : TYPE_MAX_VALUE (arg0_type);
4504 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4505 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4506 fold_convert_loc (loc, arg0_type,
4507 high_positive),
4508 build_int_cst (arg0_type, 1));
4510 /* If the low bound is specified, "and" the range with the
4511 range for which the original unsigned value will be
4512 positive. */
4513 if (low != 0)
4515 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4516 1, fold_convert_loc (loc, arg0_type,
4517 integer_zero_node),
4518 high_positive))
4519 return NULL_TREE;
4521 in_p = (n_in_p == in_p);
4523 else
4525 /* Otherwise, "or" the range with the range of the input
4526 that will be interpreted as negative. */
4527 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4528 1, fold_convert_loc (loc, arg0_type,
4529 integer_zero_node),
4530 high_positive))
4531 return NULL_TREE;
4533 in_p = (in_p != n_in_p);
4537 *p_low = n_low;
4538 *p_high = n_high;
4539 *p_in_p = in_p;
4540 return arg0;
4542 default:
4543 return NULL_TREE;
4547 /* Given EXP, a logical expression, set the range it is testing into
4548 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4549 actually being tested. *PLOW and *PHIGH will be made of the same
4550 type as the returned expression. If EXP is not a comparison, we
4551 will most likely not be returning a useful value and range. Set
4552 *STRICT_OVERFLOW_P to true if the return value is only valid
4553 because signed overflow is undefined; otherwise, do not change
4554 *STRICT_OVERFLOW_P. */
4556 tree
4557 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4558 bool *strict_overflow_p)
4560 enum tree_code code;
4561 tree arg0, arg1 = NULL_TREE;
4562 tree exp_type, nexp;
4563 int in_p;
4564 tree low, high;
4565 location_t loc = EXPR_LOCATION (exp);
4567 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4568 and see if we can refine the range. Some of the cases below may not
4569 happen, but it doesn't seem worth worrying about this. We "continue"
4570 the outer loop when we've changed something; otherwise we "break"
4571 the switch, which will "break" the while. */
4573 in_p = 0;
4574 low = high = build_int_cst (TREE_TYPE (exp), 0);
4576 while (1)
4578 code = TREE_CODE (exp);
4579 exp_type = TREE_TYPE (exp);
4580 arg0 = NULL_TREE;
4582 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4584 if (TREE_OPERAND_LENGTH (exp) > 0)
4585 arg0 = TREE_OPERAND (exp, 0);
4586 if (TREE_CODE_CLASS (code) == tcc_binary
4587 || TREE_CODE_CLASS (code) == tcc_comparison
4588 || (TREE_CODE_CLASS (code) == tcc_expression
4589 && TREE_OPERAND_LENGTH (exp) > 1))
4590 arg1 = TREE_OPERAND (exp, 1);
4592 if (arg0 == NULL_TREE)
4593 break;
4595 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4596 &high, &in_p, strict_overflow_p);
4597 if (nexp == NULL_TREE)
4598 break;
4599 exp = nexp;
4602 /* If EXP is a constant, we can evaluate whether this is true or false. */
4603 if (TREE_CODE (exp) == INTEGER_CST)
4605 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4606 exp, 0, low, 0))
4607 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4608 exp, 1, high, 1)));
4609 low = high = 0;
4610 exp = 0;
4613 *pin_p = in_p, *plow = low, *phigh = high;
4614 return exp;
4617 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4618 type, TYPE, return an expression to test if EXP is in (or out of, depending
4619 on IN_P) the range. Return 0 if the test couldn't be created. */
4621 tree
4622 build_range_check (location_t loc, tree type, tree exp, int in_p,
4623 tree low, tree high)
4625 tree etype = TREE_TYPE (exp), value;
4627 /* Disable this optimization for function pointer expressions
4628 on targets that require function pointer canonicalization. */
4629 if (targetm.have_canonicalize_funcptr_for_compare ()
4630 && TREE_CODE (etype) == POINTER_TYPE
4631 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4632 return NULL_TREE;
4634 if (! in_p)
4636 value = build_range_check (loc, type, exp, 1, low, high);
4637 if (value != 0)
4638 return invert_truthvalue_loc (loc, value);
4640 return 0;
4643 if (low == 0 && high == 0)
4644 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4646 if (low == 0)
4647 return fold_build2_loc (loc, LE_EXPR, type, exp,
4648 fold_convert_loc (loc, etype, high));
4650 if (high == 0)
4651 return fold_build2_loc (loc, GE_EXPR, type, exp,
4652 fold_convert_loc (loc, etype, low));
4654 if (operand_equal_p (low, high, 0))
4655 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4656 fold_convert_loc (loc, etype, low));
4658 if (integer_zerop (low))
4660 if (! TYPE_UNSIGNED (etype))
4662 etype = unsigned_type_for (etype);
4663 high = fold_convert_loc (loc, etype, high);
4664 exp = fold_convert_loc (loc, etype, exp);
4666 return build_range_check (loc, type, exp, 1, 0, high);
4669 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4670 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4672 int prec = TYPE_PRECISION (etype);
4674 if (wi::mask (prec - 1, false, prec) == high)
4676 if (TYPE_UNSIGNED (etype))
4678 tree signed_etype = signed_type_for (etype);
4679 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4680 etype
4681 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4682 else
4683 etype = signed_etype;
4684 exp = fold_convert_loc (loc, etype, exp);
4686 return fold_build2_loc (loc, GT_EXPR, type, exp,
4687 build_int_cst (etype, 0));
4691 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4692 This requires wrap-around arithmetics for the type of the expression.
4693 First make sure that arithmetics in this type is valid, then make sure
4694 that it wraps around. */
4695 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4696 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4697 TYPE_UNSIGNED (etype));
4699 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4701 tree utype, minv, maxv;
4703 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4704 for the type in question, as we rely on this here. */
4705 utype = unsigned_type_for (etype);
4706 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4707 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4708 build_int_cst (TREE_TYPE (maxv), 1), 1);
4709 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4711 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4712 minv, 1, maxv, 1)))
4713 etype = utype;
4714 else
4715 return 0;
4718 high = fold_convert_loc (loc, etype, high);
4719 low = fold_convert_loc (loc, etype, low);
4720 exp = fold_convert_loc (loc, etype, exp);
4722 value = const_binop (MINUS_EXPR, high, low);
4725 if (POINTER_TYPE_P (etype))
4727 if (value != 0 && !TREE_OVERFLOW (value))
4729 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4730 return build_range_check (loc, type,
4731 fold_build_pointer_plus_loc (loc, exp, low),
4732 1, build_int_cst (etype, 0), value);
4734 return 0;
4737 if (value != 0 && !TREE_OVERFLOW (value))
4738 return build_range_check (loc, type,
4739 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4740 1, build_int_cst (etype, 0), value);
4742 return 0;
4745 /* Return the predecessor of VAL in its type, handling the infinite case. */
4747 static tree
4748 range_predecessor (tree val)
4750 tree type = TREE_TYPE (val);
4752 if (INTEGRAL_TYPE_P (type)
4753 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4754 return 0;
4755 else
4756 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4757 build_int_cst (TREE_TYPE (val), 1), 0);
4760 /* Return the successor of VAL in its type, handling the infinite case. */
4762 static tree
4763 range_successor (tree val)
4765 tree type = TREE_TYPE (val);
4767 if (INTEGRAL_TYPE_P (type)
4768 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4769 return 0;
4770 else
4771 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4772 build_int_cst (TREE_TYPE (val), 1), 0);
4775 /* Given two ranges, see if we can merge them into one. Return 1 if we
4776 can, 0 if we can't. Set the output range into the specified parameters. */
4778 bool
4779 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4780 tree high0, int in1_p, tree low1, tree high1)
4782 int no_overlap;
4783 int subset;
4784 int temp;
4785 tree tem;
4786 int in_p;
4787 tree low, high;
4788 int lowequal = ((low0 == 0 && low1 == 0)
4789 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4790 low0, 0, low1, 0)));
4791 int highequal = ((high0 == 0 && high1 == 0)
4792 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4793 high0, 1, high1, 1)));
4795 /* Make range 0 be the range that starts first, or ends last if they
4796 start at the same value. Swap them if it isn't. */
4797 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4798 low0, 0, low1, 0))
4799 || (lowequal
4800 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4801 high1, 1, high0, 1))))
4803 temp = in0_p, in0_p = in1_p, in1_p = temp;
4804 tem = low0, low0 = low1, low1 = tem;
4805 tem = high0, high0 = high1, high1 = tem;
4808 /* Now flag two cases, whether the ranges are disjoint or whether the
4809 second range is totally subsumed in the first. Note that the tests
4810 below are simplified by the ones above. */
4811 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4812 high0, 1, low1, 0));
4813 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4814 high1, 1, high0, 1));
4816 /* We now have four cases, depending on whether we are including or
4817 excluding the two ranges. */
4818 if (in0_p && in1_p)
4820 /* If they don't overlap, the result is false. If the second range
4821 is a subset it is the result. Otherwise, the range is from the start
4822 of the second to the end of the first. */
4823 if (no_overlap)
4824 in_p = 0, low = high = 0;
4825 else if (subset)
4826 in_p = 1, low = low1, high = high1;
4827 else
4828 in_p = 1, low = low1, high = high0;
4831 else if (in0_p && ! in1_p)
4833 /* If they don't overlap, the result is the first range. If they are
4834 equal, the result is false. If the second range is a subset of the
4835 first, and the ranges begin at the same place, we go from just after
4836 the end of the second range to the end of the first. If the second
4837 range is not a subset of the first, or if it is a subset and both
4838 ranges end at the same place, the range starts at the start of the
4839 first range and ends just before the second range.
4840 Otherwise, we can't describe this as a single range. */
4841 if (no_overlap)
4842 in_p = 1, low = low0, high = high0;
4843 else if (lowequal && highequal)
4844 in_p = 0, low = high = 0;
4845 else if (subset && lowequal)
4847 low = range_successor (high1);
4848 high = high0;
4849 in_p = 1;
4850 if (low == 0)
4852 /* We are in the weird situation where high0 > high1 but
4853 high1 has no successor. Punt. */
4854 return 0;
4857 else if (! subset || highequal)
4859 low = low0;
4860 high = range_predecessor (low1);
4861 in_p = 1;
4862 if (high == 0)
4864 /* low0 < low1 but low1 has no predecessor. Punt. */
4865 return 0;
4868 else
4869 return 0;
4872 else if (! in0_p && in1_p)
4874 /* If they don't overlap, the result is the second range. If the second
4875 is a subset of the first, the result is false. Otherwise,
4876 the range starts just after the first range and ends at the
4877 end of the second. */
4878 if (no_overlap)
4879 in_p = 1, low = low1, high = high1;
4880 else if (subset || highequal)
4881 in_p = 0, low = high = 0;
4882 else
4884 low = range_successor (high0);
4885 high = high1;
4886 in_p = 1;
4887 if (low == 0)
4889 /* high1 > high0 but high0 has no successor. Punt. */
4890 return 0;
4895 else
4897 /* The case where we are excluding both ranges. Here the complex case
4898 is if they don't overlap. In that case, the only time we have a
4899 range is if they are adjacent. If the second is a subset of the
4900 first, the result is the first. Otherwise, the range to exclude
4901 starts at the beginning of the first range and ends at the end of the
4902 second. */
4903 if (no_overlap)
4905 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4906 range_successor (high0),
4907 1, low1, 0)))
4908 in_p = 0, low = low0, high = high1;
4909 else
4911 /* Canonicalize - [min, x] into - [-, x]. */
4912 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4913 switch (TREE_CODE (TREE_TYPE (low0)))
4915 case ENUMERAL_TYPE:
4916 if (TYPE_PRECISION (TREE_TYPE (low0))
4917 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4918 break;
4919 /* FALLTHROUGH */
4920 case INTEGER_TYPE:
4921 if (tree_int_cst_equal (low0,
4922 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4923 low0 = 0;
4924 break;
4925 case POINTER_TYPE:
4926 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4927 && integer_zerop (low0))
4928 low0 = 0;
4929 break;
4930 default:
4931 break;
4934 /* Canonicalize - [x, max] into - [x, -]. */
4935 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4936 switch (TREE_CODE (TREE_TYPE (high1)))
4938 case ENUMERAL_TYPE:
4939 if (TYPE_PRECISION (TREE_TYPE (high1))
4940 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4941 break;
4942 /* FALLTHROUGH */
4943 case INTEGER_TYPE:
4944 if (tree_int_cst_equal (high1,
4945 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4946 high1 = 0;
4947 break;
4948 case POINTER_TYPE:
4949 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4950 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4951 high1, 1,
4952 build_int_cst (TREE_TYPE (high1), 1),
4953 1)))
4954 high1 = 0;
4955 break;
4956 default:
4957 break;
4960 /* The ranges might be also adjacent between the maximum and
4961 minimum values of the given type. For
4962 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4963 return + [x + 1, y - 1]. */
4964 if (low0 == 0 && high1 == 0)
4966 low = range_successor (high0);
4967 high = range_predecessor (low1);
4968 if (low == 0 || high == 0)
4969 return 0;
4971 in_p = 1;
4973 else
4974 return 0;
4977 else if (subset)
4978 in_p = 0, low = low0, high = high0;
4979 else
4980 in_p = 0, low = low0, high = high1;
4983 *pin_p = in_p, *plow = low, *phigh = high;
4984 return 1;
4988 /* Subroutine of fold, looking inside expressions of the form
4989 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4990 of the COND_EXPR. This function is being used also to optimize
4991 A op B ? C : A, by reversing the comparison first.
4993 Return a folded expression whose code is not a COND_EXPR
4994 anymore, or NULL_TREE if no folding opportunity is found. */
4996 static tree
4997 fold_cond_expr_with_comparison (location_t loc, tree type,
4998 tree arg0, tree arg1, tree arg2)
5000 enum tree_code comp_code = TREE_CODE (arg0);
5001 tree arg00 = TREE_OPERAND (arg0, 0);
5002 tree arg01 = TREE_OPERAND (arg0, 1);
5003 tree arg1_type = TREE_TYPE (arg1);
5004 tree tem;
5006 STRIP_NOPS (arg1);
5007 STRIP_NOPS (arg2);
5009 /* If we have A op 0 ? A : -A, consider applying the following
5010 transformations:
5012 A == 0? A : -A same as -A
5013 A != 0? A : -A same as A
5014 A >= 0? A : -A same as abs (A)
5015 A > 0? A : -A same as abs (A)
5016 A <= 0? A : -A same as -abs (A)
5017 A < 0? A : -A same as -abs (A)
5019 None of these transformations work for modes with signed
5020 zeros. If A is +/-0, the first two transformations will
5021 change the sign of the result (from +0 to -0, or vice
5022 versa). The last four will fix the sign of the result,
5023 even though the original expressions could be positive or
5024 negative, depending on the sign of A.
5026 Note that all these transformations are correct if A is
5027 NaN, since the two alternatives (A and -A) are also NaNs. */
5028 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5029 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5030 ? real_zerop (arg01)
5031 : integer_zerop (arg01))
5032 && ((TREE_CODE (arg2) == NEGATE_EXPR
5033 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5034 /* In the case that A is of the form X-Y, '-A' (arg2) may
5035 have already been folded to Y-X, check for that. */
5036 || (TREE_CODE (arg1) == MINUS_EXPR
5037 && TREE_CODE (arg2) == MINUS_EXPR
5038 && operand_equal_p (TREE_OPERAND (arg1, 0),
5039 TREE_OPERAND (arg2, 1), 0)
5040 && operand_equal_p (TREE_OPERAND (arg1, 1),
5041 TREE_OPERAND (arg2, 0), 0))))
5042 switch (comp_code)
5044 case EQ_EXPR:
5045 case UNEQ_EXPR:
5046 tem = fold_convert_loc (loc, arg1_type, arg1);
5047 return pedantic_non_lvalue_loc (loc,
5048 fold_convert_loc (loc, type,
5049 negate_expr (tem)));
5050 case NE_EXPR:
5051 case LTGT_EXPR:
5052 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5053 case UNGE_EXPR:
5054 case UNGT_EXPR:
5055 if (flag_trapping_math)
5056 break;
5057 /* Fall through. */
5058 case GE_EXPR:
5059 case GT_EXPR:
5060 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5061 break;
5062 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5063 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5064 case UNLE_EXPR:
5065 case UNLT_EXPR:
5066 if (flag_trapping_math)
5067 break;
5068 case LE_EXPR:
5069 case LT_EXPR:
5070 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5071 break;
5072 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5073 return negate_expr (fold_convert_loc (loc, type, tem));
5074 default:
5075 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5076 break;
5079 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5080 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5081 both transformations are correct when A is NaN: A != 0
5082 is then true, and A == 0 is false. */
5084 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5085 && integer_zerop (arg01) && integer_zerop (arg2))
5087 if (comp_code == NE_EXPR)
5088 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5089 else if (comp_code == EQ_EXPR)
5090 return build_zero_cst (type);
5093 /* Try some transformations of A op B ? A : B.
5095 A == B? A : B same as B
5096 A != B? A : B same as A
5097 A >= B? A : B same as max (A, B)
5098 A > B? A : B same as max (B, A)
5099 A <= B? A : B same as min (A, B)
5100 A < B? A : B same as min (B, A)
5102 As above, these transformations don't work in the presence
5103 of signed zeros. For example, if A and B are zeros of
5104 opposite sign, the first two transformations will change
5105 the sign of the result. In the last four, the original
5106 expressions give different results for (A=+0, B=-0) and
5107 (A=-0, B=+0), but the transformed expressions do not.
5109 The first two transformations are correct if either A or B
5110 is a NaN. In the first transformation, the condition will
5111 be false, and B will indeed be chosen. In the case of the
5112 second transformation, the condition A != B will be true,
5113 and A will be chosen.
5115 The conversions to max() and min() are not correct if B is
5116 a number and A is not. The conditions in the original
5117 expressions will be false, so all four give B. The min()
5118 and max() versions would give a NaN instead. */
5119 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5120 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5121 /* Avoid these transformations if the COND_EXPR may be used
5122 as an lvalue in the C++ front-end. PR c++/19199. */
5123 && (in_gimple_form
5124 || VECTOR_TYPE_P (type)
5125 || (! lang_GNU_CXX ()
5126 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5127 || ! maybe_lvalue_p (arg1)
5128 || ! maybe_lvalue_p (arg2)))
5130 tree comp_op0 = arg00;
5131 tree comp_op1 = arg01;
5132 tree comp_type = TREE_TYPE (comp_op0);
5134 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5135 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5137 comp_type = type;
5138 comp_op0 = arg1;
5139 comp_op1 = arg2;
5142 switch (comp_code)
5144 case EQ_EXPR:
5145 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5146 case NE_EXPR:
5147 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5148 case LE_EXPR:
5149 case LT_EXPR:
5150 case UNLE_EXPR:
5151 case UNLT_EXPR:
5152 /* In C++ a ?: expression can be an lvalue, so put the
5153 operand which will be used if they are equal first
5154 so that we can convert this back to the
5155 corresponding COND_EXPR. */
5156 if (!HONOR_NANS (arg1))
5158 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5159 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5160 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5161 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5162 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5163 comp_op1, comp_op0);
5164 return pedantic_non_lvalue_loc (loc,
5165 fold_convert_loc (loc, type, tem));
5167 break;
5168 case GE_EXPR:
5169 case GT_EXPR:
5170 case UNGE_EXPR:
5171 case UNGT_EXPR:
5172 if (!HONOR_NANS (arg1))
5174 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5175 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5176 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5177 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5178 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5179 comp_op1, comp_op0);
5180 return pedantic_non_lvalue_loc (loc,
5181 fold_convert_loc (loc, type, tem));
5183 break;
5184 case UNEQ_EXPR:
5185 if (!HONOR_NANS (arg1))
5186 return pedantic_non_lvalue_loc (loc,
5187 fold_convert_loc (loc, type, arg2));
5188 break;
5189 case LTGT_EXPR:
5190 if (!HONOR_NANS (arg1))
5191 return pedantic_non_lvalue_loc (loc,
5192 fold_convert_loc (loc, type, arg1));
5193 break;
5194 default:
5195 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5196 break;
5200 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5201 we might still be able to simplify this. For example,
5202 if C1 is one less or one more than C2, this might have started
5203 out as a MIN or MAX and been transformed by this function.
5204 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5206 if (INTEGRAL_TYPE_P (type)
5207 && TREE_CODE (arg01) == INTEGER_CST
5208 && TREE_CODE (arg2) == INTEGER_CST)
5209 switch (comp_code)
5211 case EQ_EXPR:
5212 if (TREE_CODE (arg1) == INTEGER_CST)
5213 break;
5214 /* We can replace A with C1 in this case. */
5215 arg1 = fold_convert_loc (loc, type, arg01);
5216 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5218 case LT_EXPR:
5219 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5220 MIN_EXPR, to preserve the signedness of the comparison. */
5221 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5222 OEP_ONLY_CONST)
5223 && operand_equal_p (arg01,
5224 const_binop (PLUS_EXPR, arg2,
5225 build_int_cst (type, 1)),
5226 OEP_ONLY_CONST))
5228 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5229 fold_convert_loc (loc, TREE_TYPE (arg00),
5230 arg2));
5231 return pedantic_non_lvalue_loc (loc,
5232 fold_convert_loc (loc, type, tem));
5234 break;
5236 case LE_EXPR:
5237 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5238 as above. */
5239 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5240 OEP_ONLY_CONST)
5241 && operand_equal_p (arg01,
5242 const_binop (MINUS_EXPR, arg2,
5243 build_int_cst (type, 1)),
5244 OEP_ONLY_CONST))
5246 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5247 fold_convert_loc (loc, TREE_TYPE (arg00),
5248 arg2));
5249 return pedantic_non_lvalue_loc (loc,
5250 fold_convert_loc (loc, type, tem));
5252 break;
5254 case GT_EXPR:
5255 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5256 MAX_EXPR, to preserve the signedness of the comparison. */
5257 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5258 OEP_ONLY_CONST)
5259 && operand_equal_p (arg01,
5260 const_binop (MINUS_EXPR, arg2,
5261 build_int_cst (type, 1)),
5262 OEP_ONLY_CONST))
5264 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5265 fold_convert_loc (loc, TREE_TYPE (arg00),
5266 arg2));
5267 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5269 break;
5271 case GE_EXPR:
5272 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5273 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5274 OEP_ONLY_CONST)
5275 && operand_equal_p (arg01,
5276 const_binop (PLUS_EXPR, arg2,
5277 build_int_cst (type, 1)),
5278 OEP_ONLY_CONST))
5280 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5281 fold_convert_loc (loc, TREE_TYPE (arg00),
5282 arg2));
5283 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5285 break;
5286 case NE_EXPR:
5287 break;
5288 default:
5289 gcc_unreachable ();
5292 return NULL_TREE;
5297 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5298 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5299 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5300 false) >= 2)
5301 #endif
5303 /* EXP is some logical combination of boolean tests. See if we can
5304 merge it into some range test. Return the new tree if so. */
5306 static tree
5307 fold_range_test (location_t loc, enum tree_code code, tree type,
5308 tree op0, tree op1)
5310 int or_op = (code == TRUTH_ORIF_EXPR
5311 || code == TRUTH_OR_EXPR);
5312 int in0_p, in1_p, in_p;
5313 tree low0, low1, low, high0, high1, high;
5314 bool strict_overflow_p = false;
5315 tree tem, lhs, rhs;
5316 const char * const warnmsg = G_("assuming signed overflow does not occur "
5317 "when simplifying range test");
5319 if (!INTEGRAL_TYPE_P (type))
5320 return 0;
5322 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5323 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5325 /* If this is an OR operation, invert both sides; we will invert
5326 again at the end. */
5327 if (or_op)
5328 in0_p = ! in0_p, in1_p = ! in1_p;
5330 /* If both expressions are the same, if we can merge the ranges, and we
5331 can build the range test, return it or it inverted. If one of the
5332 ranges is always true or always false, consider it to be the same
5333 expression as the other. */
5334 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5335 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5336 in1_p, low1, high1)
5337 && 0 != (tem = (build_range_check (loc, type,
5338 lhs != 0 ? lhs
5339 : rhs != 0 ? rhs : integer_zero_node,
5340 in_p, low, high))))
5342 if (strict_overflow_p)
5343 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5344 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5347 /* On machines where the branch cost is expensive, if this is a
5348 short-circuited branch and the underlying object on both sides
5349 is the same, make a non-short-circuit operation. */
5350 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5351 && lhs != 0 && rhs != 0
5352 && (code == TRUTH_ANDIF_EXPR
5353 || code == TRUTH_ORIF_EXPR)
5354 && operand_equal_p (lhs, rhs, 0))
5356 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5357 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5358 which cases we can't do this. */
5359 if (simple_operand_p (lhs))
5360 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5361 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5362 type, op0, op1);
5364 else if (!lang_hooks.decls.global_bindings_p ()
5365 && !CONTAINS_PLACEHOLDER_P (lhs))
5367 tree common = save_expr (lhs);
5369 if (0 != (lhs = build_range_check (loc, type, common,
5370 or_op ? ! in0_p : in0_p,
5371 low0, high0))
5372 && (0 != (rhs = build_range_check (loc, type, common,
5373 or_op ? ! in1_p : in1_p,
5374 low1, high1))))
5376 if (strict_overflow_p)
5377 fold_overflow_warning (warnmsg,
5378 WARN_STRICT_OVERFLOW_COMPARISON);
5379 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5380 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5381 type, lhs, rhs);
5386 return 0;
5389 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5390 bit value. Arrange things so the extra bits will be set to zero if and
5391 only if C is signed-extended to its full width. If MASK is nonzero,
5392 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5394 static tree
5395 unextend (tree c, int p, int unsignedp, tree mask)
5397 tree type = TREE_TYPE (c);
5398 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5399 tree temp;
5401 if (p == modesize || unsignedp)
5402 return c;
5404 /* We work by getting just the sign bit into the low-order bit, then
5405 into the high-order bit, then sign-extend. We then XOR that value
5406 with C. */
5407 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5409 /* We must use a signed type in order to get an arithmetic right shift.
5410 However, we must also avoid introducing accidental overflows, so that
5411 a subsequent call to integer_zerop will work. Hence we must
5412 do the type conversion here. At this point, the constant is either
5413 zero or one, and the conversion to a signed type can never overflow.
5414 We could get an overflow if this conversion is done anywhere else. */
5415 if (TYPE_UNSIGNED (type))
5416 temp = fold_convert (signed_type_for (type), temp);
5418 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5419 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5420 if (mask != 0)
5421 temp = const_binop (BIT_AND_EXPR, temp,
5422 fold_convert (TREE_TYPE (c), mask));
5423 /* If necessary, convert the type back to match the type of C. */
5424 if (TYPE_UNSIGNED (type))
5425 temp = fold_convert (type, temp);
5427 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5430 /* For an expression that has the form
5431 (A && B) || ~B
5433 (A || B) && ~B,
5434 we can drop one of the inner expressions and simplify to
5435 A || ~B
5437 A && ~B
5438 LOC is the location of the resulting expression. OP is the inner
5439 logical operation; the left-hand side in the examples above, while CMPOP
5440 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5441 removing a condition that guards another, as in
5442 (A != NULL && A->...) || A == NULL
5443 which we must not transform. If RHS_ONLY is true, only eliminate the
5444 right-most operand of the inner logical operation. */
5446 static tree
5447 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5448 bool rhs_only)
5450 tree type = TREE_TYPE (cmpop);
5451 enum tree_code code = TREE_CODE (cmpop);
5452 enum tree_code truthop_code = TREE_CODE (op);
5453 tree lhs = TREE_OPERAND (op, 0);
5454 tree rhs = TREE_OPERAND (op, 1);
5455 tree orig_lhs = lhs, orig_rhs = rhs;
5456 enum tree_code rhs_code = TREE_CODE (rhs);
5457 enum tree_code lhs_code = TREE_CODE (lhs);
5458 enum tree_code inv_code;
5460 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5461 return NULL_TREE;
5463 if (TREE_CODE_CLASS (code) != tcc_comparison)
5464 return NULL_TREE;
5466 if (rhs_code == truthop_code)
5468 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5469 if (newrhs != NULL_TREE)
5471 rhs = newrhs;
5472 rhs_code = TREE_CODE (rhs);
5475 if (lhs_code == truthop_code && !rhs_only)
5477 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5478 if (newlhs != NULL_TREE)
5480 lhs = newlhs;
5481 lhs_code = TREE_CODE (lhs);
5485 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5486 if (inv_code == rhs_code
5487 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5488 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5489 return lhs;
5490 if (!rhs_only && inv_code == lhs_code
5491 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5492 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5493 return rhs;
5494 if (rhs != orig_rhs || lhs != orig_lhs)
5495 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5496 lhs, rhs);
5497 return NULL_TREE;
5500 /* Find ways of folding logical expressions of LHS and RHS:
5501 Try to merge two comparisons to the same innermost item.
5502 Look for range tests like "ch >= '0' && ch <= '9'".
5503 Look for combinations of simple terms on machines with expensive branches
5504 and evaluate the RHS unconditionally.
5506 For example, if we have p->a == 2 && p->b == 4 and we can make an
5507 object large enough to span both A and B, we can do this with a comparison
5508 against the object ANDed with the a mask.
5510 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5511 operations to do this with one comparison.
5513 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5514 function and the one above.
5516 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5517 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5519 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5520 two operands.
5522 We return the simplified tree or 0 if no optimization is possible. */
5524 static tree
5525 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5526 tree lhs, tree rhs)
5528 /* If this is the "or" of two comparisons, we can do something if
5529 the comparisons are NE_EXPR. If this is the "and", we can do something
5530 if the comparisons are EQ_EXPR. I.e.,
5531 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5533 WANTED_CODE is this operation code. For single bit fields, we can
5534 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5535 comparison for one-bit fields. */
5537 enum tree_code wanted_code;
5538 enum tree_code lcode, rcode;
5539 tree ll_arg, lr_arg, rl_arg, rr_arg;
5540 tree ll_inner, lr_inner, rl_inner, rr_inner;
5541 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5542 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5543 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5544 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5545 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5546 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5547 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5548 machine_mode lnmode, rnmode;
5549 tree ll_mask, lr_mask, rl_mask, rr_mask;
5550 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5551 tree l_const, r_const;
5552 tree lntype, rntype, result;
5553 HOST_WIDE_INT first_bit, end_bit;
5554 int volatilep;
5556 /* Start by getting the comparison codes. Fail if anything is volatile.
5557 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5558 it were surrounded with a NE_EXPR. */
5560 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5561 return 0;
5563 lcode = TREE_CODE (lhs);
5564 rcode = TREE_CODE (rhs);
5566 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5568 lhs = build2 (NE_EXPR, truth_type, lhs,
5569 build_int_cst (TREE_TYPE (lhs), 0));
5570 lcode = NE_EXPR;
5573 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5575 rhs = build2 (NE_EXPR, truth_type, rhs,
5576 build_int_cst (TREE_TYPE (rhs), 0));
5577 rcode = NE_EXPR;
5580 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5581 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5582 return 0;
5584 ll_arg = TREE_OPERAND (lhs, 0);
5585 lr_arg = TREE_OPERAND (lhs, 1);
5586 rl_arg = TREE_OPERAND (rhs, 0);
5587 rr_arg = TREE_OPERAND (rhs, 1);
5589 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5590 if (simple_operand_p (ll_arg)
5591 && simple_operand_p (lr_arg))
5593 if (operand_equal_p (ll_arg, rl_arg, 0)
5594 && operand_equal_p (lr_arg, rr_arg, 0))
5596 result = combine_comparisons (loc, code, lcode, rcode,
5597 truth_type, ll_arg, lr_arg);
5598 if (result)
5599 return result;
5601 else if (operand_equal_p (ll_arg, rr_arg, 0)
5602 && operand_equal_p (lr_arg, rl_arg, 0))
5604 result = combine_comparisons (loc, code, lcode,
5605 swap_tree_comparison (rcode),
5606 truth_type, ll_arg, lr_arg);
5607 if (result)
5608 return result;
5612 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5613 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5615 /* If the RHS can be evaluated unconditionally and its operands are
5616 simple, it wins to evaluate the RHS unconditionally on machines
5617 with expensive branches. In this case, this isn't a comparison
5618 that can be merged. */
5620 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5621 false) >= 2
5622 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5623 && simple_operand_p (rl_arg)
5624 && simple_operand_p (rr_arg))
5626 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5627 if (code == TRUTH_OR_EXPR
5628 && lcode == NE_EXPR && integer_zerop (lr_arg)
5629 && rcode == NE_EXPR && integer_zerop (rr_arg)
5630 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5631 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5632 return build2_loc (loc, NE_EXPR, truth_type,
5633 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5634 ll_arg, rl_arg),
5635 build_int_cst (TREE_TYPE (ll_arg), 0));
5637 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5638 if (code == TRUTH_AND_EXPR
5639 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5640 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5641 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5642 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5643 return build2_loc (loc, EQ_EXPR, truth_type,
5644 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5645 ll_arg, rl_arg),
5646 build_int_cst (TREE_TYPE (ll_arg), 0));
5649 /* See if the comparisons can be merged. Then get all the parameters for
5650 each side. */
5652 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5653 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5654 return 0;
5656 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5657 volatilep = 0;
5658 ll_inner = decode_field_reference (loc, ll_arg,
5659 &ll_bitsize, &ll_bitpos, &ll_mode,
5660 &ll_unsignedp, &ll_reversep, &volatilep,
5661 &ll_mask, &ll_and_mask);
5662 lr_inner = decode_field_reference (loc, lr_arg,
5663 &lr_bitsize, &lr_bitpos, &lr_mode,
5664 &lr_unsignedp, &lr_reversep, &volatilep,
5665 &lr_mask, &lr_and_mask);
5666 rl_inner = decode_field_reference (loc, rl_arg,
5667 &rl_bitsize, &rl_bitpos, &rl_mode,
5668 &rl_unsignedp, &rl_reversep, &volatilep,
5669 &rl_mask, &rl_and_mask);
5670 rr_inner = decode_field_reference (loc, rr_arg,
5671 &rr_bitsize, &rr_bitpos, &rr_mode,
5672 &rr_unsignedp, &rr_reversep, &volatilep,
5673 &rr_mask, &rr_and_mask);
5675 /* It must be true that the inner operation on the lhs of each
5676 comparison must be the same if we are to be able to do anything.
5677 Then see if we have constants. If not, the same must be true for
5678 the rhs's. */
5679 if (volatilep
5680 || ll_reversep != rl_reversep
5681 || ll_inner == 0 || rl_inner == 0
5682 || ! operand_equal_p (ll_inner, rl_inner, 0))
5683 return 0;
5685 if (TREE_CODE (lr_arg) == INTEGER_CST
5686 && TREE_CODE (rr_arg) == INTEGER_CST)
5688 l_const = lr_arg, r_const = rr_arg;
5689 lr_reversep = ll_reversep;
5691 else if (lr_reversep != rr_reversep
5692 || lr_inner == 0 || rr_inner == 0
5693 || ! operand_equal_p (lr_inner, rr_inner, 0))
5694 return 0;
5695 else
5696 l_const = r_const = 0;
5698 /* If either comparison code is not correct for our logical operation,
5699 fail. However, we can convert a one-bit comparison against zero into
5700 the opposite comparison against that bit being set in the field. */
5702 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5703 if (lcode != wanted_code)
5705 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5707 /* Make the left operand unsigned, since we are only interested
5708 in the value of one bit. Otherwise we are doing the wrong
5709 thing below. */
5710 ll_unsignedp = 1;
5711 l_const = ll_mask;
5713 else
5714 return 0;
5717 /* This is analogous to the code for l_const above. */
5718 if (rcode != wanted_code)
5720 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5722 rl_unsignedp = 1;
5723 r_const = rl_mask;
5725 else
5726 return 0;
5729 /* See if we can find a mode that contains both fields being compared on
5730 the left. If we can't, fail. Otherwise, update all constants and masks
5731 to be relative to a field of that size. */
5732 first_bit = MIN (ll_bitpos, rl_bitpos);
5733 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5734 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5735 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5736 volatilep);
5737 if (lnmode == VOIDmode)
5738 return 0;
5740 lnbitsize = GET_MODE_BITSIZE (lnmode);
5741 lnbitpos = first_bit & ~ (lnbitsize - 1);
5742 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5743 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5745 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5747 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5748 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5751 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5752 size_int (xll_bitpos));
5753 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5754 size_int (xrl_bitpos));
5756 if (l_const)
5758 l_const = fold_convert_loc (loc, lntype, l_const);
5759 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5760 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5761 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5762 fold_build1_loc (loc, BIT_NOT_EXPR,
5763 lntype, ll_mask))))
5765 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5767 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5770 if (r_const)
5772 r_const = fold_convert_loc (loc, lntype, r_const);
5773 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5774 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5775 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5776 fold_build1_loc (loc, BIT_NOT_EXPR,
5777 lntype, rl_mask))))
5779 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5781 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5785 /* If the right sides are not constant, do the same for it. Also,
5786 disallow this optimization if a size or signedness mismatch occurs
5787 between the left and right sides. */
5788 if (l_const == 0)
5790 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5791 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5792 /* Make sure the two fields on the right
5793 correspond to the left without being swapped. */
5794 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5795 return 0;
5797 first_bit = MIN (lr_bitpos, rr_bitpos);
5798 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5799 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5800 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5801 volatilep);
5802 if (rnmode == VOIDmode)
5803 return 0;
5805 rnbitsize = GET_MODE_BITSIZE (rnmode);
5806 rnbitpos = first_bit & ~ (rnbitsize - 1);
5807 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5808 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5810 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5812 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5813 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5816 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5817 rntype, lr_mask),
5818 size_int (xlr_bitpos));
5819 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5820 rntype, rr_mask),
5821 size_int (xrr_bitpos));
5823 /* Make a mask that corresponds to both fields being compared.
5824 Do this for both items being compared. If the operands are the
5825 same size and the bits being compared are in the same position
5826 then we can do this by masking both and comparing the masked
5827 results. */
5828 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5829 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5830 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5832 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5833 ll_unsignedp || rl_unsignedp, ll_reversep);
5834 if (! all_ones_mask_p (ll_mask, lnbitsize))
5835 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5837 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5838 lr_unsignedp || rr_unsignedp, lr_reversep);
5839 if (! all_ones_mask_p (lr_mask, rnbitsize))
5840 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5842 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5845 /* There is still another way we can do something: If both pairs of
5846 fields being compared are adjacent, we may be able to make a wider
5847 field containing them both.
5849 Note that we still must mask the lhs/rhs expressions. Furthermore,
5850 the mask must be shifted to account for the shift done by
5851 make_bit_field_ref. */
5852 if ((ll_bitsize + ll_bitpos == rl_bitpos
5853 && lr_bitsize + lr_bitpos == rr_bitpos)
5854 || (ll_bitpos == rl_bitpos + rl_bitsize
5855 && lr_bitpos == rr_bitpos + rr_bitsize))
5857 tree type;
5859 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5860 ll_bitsize + rl_bitsize,
5861 MIN (ll_bitpos, rl_bitpos),
5862 ll_unsignedp, ll_reversep);
5863 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5864 lr_bitsize + rr_bitsize,
5865 MIN (lr_bitpos, rr_bitpos),
5866 lr_unsignedp, lr_reversep);
5868 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5869 size_int (MIN (xll_bitpos, xrl_bitpos)));
5870 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5871 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5873 /* Convert to the smaller type before masking out unwanted bits. */
5874 type = lntype;
5875 if (lntype != rntype)
5877 if (lnbitsize > rnbitsize)
5879 lhs = fold_convert_loc (loc, rntype, lhs);
5880 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5881 type = rntype;
5883 else if (lnbitsize < rnbitsize)
5885 rhs = fold_convert_loc (loc, lntype, rhs);
5886 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5887 type = lntype;
5891 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5892 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5894 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5895 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5897 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5900 return 0;
5903 /* Handle the case of comparisons with constants. If there is something in
5904 common between the masks, those bits of the constants must be the same.
5905 If not, the condition is always false. Test for this to avoid generating
5906 incorrect code below. */
5907 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5908 if (! integer_zerop (result)
5909 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5910 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5912 if (wanted_code == NE_EXPR)
5914 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5915 return constant_boolean_node (true, truth_type);
5917 else
5919 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5920 return constant_boolean_node (false, truth_type);
5924 /* Construct the expression we will return. First get the component
5925 reference we will make. Unless the mask is all ones the width of
5926 that field, perform the mask operation. Then compare with the
5927 merged constant. */
5928 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5929 ll_unsignedp || rl_unsignedp, ll_reversep);
5931 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5932 if (! all_ones_mask_p (ll_mask, lnbitsize))
5933 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5935 return build2_loc (loc, wanted_code, truth_type, result,
5936 const_binop (BIT_IOR_EXPR, l_const, r_const));
5939 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5940 constant. */
5942 static tree
5943 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5944 tree op0, tree op1)
5946 tree arg0 = op0;
5947 enum tree_code op_code;
5948 tree comp_const;
5949 tree minmax_const;
5950 int consts_equal, consts_lt;
5951 tree inner;
5953 STRIP_SIGN_NOPS (arg0);
5955 op_code = TREE_CODE (arg0);
5956 minmax_const = TREE_OPERAND (arg0, 1);
5957 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5958 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5959 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5960 inner = TREE_OPERAND (arg0, 0);
5962 /* If something does not permit us to optimize, return the original tree. */
5963 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5964 || TREE_CODE (comp_const) != INTEGER_CST
5965 || TREE_OVERFLOW (comp_const)
5966 || TREE_CODE (minmax_const) != INTEGER_CST
5967 || TREE_OVERFLOW (minmax_const))
5968 return NULL_TREE;
5970 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5971 and GT_EXPR, doing the rest with recursive calls using logical
5972 simplifications. */
5973 switch (code)
5975 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5977 tree tem
5978 = optimize_minmax_comparison (loc,
5979 invert_tree_comparison (code, false),
5980 type, op0, op1);
5981 if (tem)
5982 return invert_truthvalue_loc (loc, tem);
5983 return NULL_TREE;
5986 case GE_EXPR:
5987 return
5988 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5989 optimize_minmax_comparison
5990 (loc, EQ_EXPR, type, arg0, comp_const),
5991 optimize_minmax_comparison
5992 (loc, GT_EXPR, type, arg0, comp_const));
5994 case EQ_EXPR:
5995 if (op_code == MAX_EXPR && consts_equal)
5996 /* MAX (X, 0) == 0 -> X <= 0 */
5997 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5999 else if (op_code == MAX_EXPR && consts_lt)
6000 /* MAX (X, 0) == 5 -> X == 5 */
6001 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6003 else if (op_code == MAX_EXPR)
6004 /* MAX (X, 0) == -1 -> false */
6005 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6007 else if (consts_equal)
6008 /* MIN (X, 0) == 0 -> X >= 0 */
6009 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6011 else if (consts_lt)
6012 /* MIN (X, 0) == 5 -> false */
6013 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6015 else
6016 /* MIN (X, 0) == -1 -> X == -1 */
6017 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6019 case GT_EXPR:
6020 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6021 /* MAX (X, 0) > 0 -> X > 0
6022 MAX (X, 0) > 5 -> X > 5 */
6023 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6025 else if (op_code == MAX_EXPR)
6026 /* MAX (X, 0) > -1 -> true */
6027 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6029 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6030 /* MIN (X, 0) > 0 -> false
6031 MIN (X, 0) > 5 -> false */
6032 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6034 else
6035 /* MIN (X, 0) > -1 -> X > -1 */
6036 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6038 default:
6039 return NULL_TREE;
6043 /* T is an integer expression that is being multiplied, divided, or taken a
6044 modulus (CODE says which and what kind of divide or modulus) by a
6045 constant C. See if we can eliminate that operation by folding it with
6046 other operations already in T. WIDE_TYPE, if non-null, is a type that
6047 should be used for the computation if wider than our type.
6049 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6050 (X * 2) + (Y * 4). We must, however, be assured that either the original
6051 expression would not overflow or that overflow is undefined for the type
6052 in the language in question.
6054 If we return a non-null expression, it is an equivalent form of the
6055 original computation, but need not be in the original type.
6057 We set *STRICT_OVERFLOW_P to true if the return values depends on
6058 signed overflow being undefined. Otherwise we do not change
6059 *STRICT_OVERFLOW_P. */
6061 static tree
6062 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6063 bool *strict_overflow_p)
6065 /* To avoid exponential search depth, refuse to allow recursion past
6066 three levels. Beyond that (1) it's highly unlikely that we'll find
6067 something interesting and (2) we've probably processed it before
6068 when we built the inner expression. */
6070 static int depth;
6071 tree ret;
6073 if (depth > 3)
6074 return NULL;
6076 depth++;
6077 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6078 depth--;
6080 return ret;
6083 static tree
6084 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6085 bool *strict_overflow_p)
6087 tree type = TREE_TYPE (t);
6088 enum tree_code tcode = TREE_CODE (t);
6089 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6090 > GET_MODE_SIZE (TYPE_MODE (type)))
6091 ? wide_type : type);
6092 tree t1, t2;
6093 int same_p = tcode == code;
6094 tree op0 = NULL_TREE, op1 = NULL_TREE;
6095 bool sub_strict_overflow_p;
6097 /* Don't deal with constants of zero here; they confuse the code below. */
6098 if (integer_zerop (c))
6099 return NULL_TREE;
6101 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6102 op0 = TREE_OPERAND (t, 0);
6104 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6105 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6107 /* Note that we need not handle conditional operations here since fold
6108 already handles those cases. So just do arithmetic here. */
6109 switch (tcode)
6111 case INTEGER_CST:
6112 /* For a constant, we can always simplify if we are a multiply
6113 or (for divide and modulus) if it is a multiple of our constant. */
6114 if (code == MULT_EXPR
6115 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6117 tree tem = const_binop (code, fold_convert (ctype, t),
6118 fold_convert (ctype, c));
6119 /* If the multiplication overflowed to INT_MIN then we lost sign
6120 information on it and a subsequent multiplication might
6121 spuriously overflow. See PR68142. */
6122 if (TREE_OVERFLOW (tem)
6123 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6124 return NULL_TREE;
6125 return tem;
6127 break;
6129 CASE_CONVERT: case NON_LVALUE_EXPR:
6130 /* If op0 is an expression ... */
6131 if ((COMPARISON_CLASS_P (op0)
6132 || UNARY_CLASS_P (op0)
6133 || BINARY_CLASS_P (op0)
6134 || VL_EXP_CLASS_P (op0)
6135 || EXPRESSION_CLASS_P (op0))
6136 /* ... and has wrapping overflow, and its type is smaller
6137 than ctype, then we cannot pass through as widening. */
6138 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6139 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6140 && (TYPE_PRECISION (ctype)
6141 > TYPE_PRECISION (TREE_TYPE (op0))))
6142 /* ... or this is a truncation (t is narrower than op0),
6143 then we cannot pass through this narrowing. */
6144 || (TYPE_PRECISION (type)
6145 < TYPE_PRECISION (TREE_TYPE (op0)))
6146 /* ... or signedness changes for division or modulus,
6147 then we cannot pass through this conversion. */
6148 || (code != MULT_EXPR
6149 && (TYPE_UNSIGNED (ctype)
6150 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6151 /* ... or has undefined overflow while the converted to
6152 type has not, we cannot do the operation in the inner type
6153 as that would introduce undefined overflow. */
6154 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6155 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6156 && !TYPE_OVERFLOW_UNDEFINED (type))))
6157 break;
6159 /* Pass the constant down and see if we can make a simplification. If
6160 we can, replace this expression with the inner simplification for
6161 possible later conversion to our or some other type. */
6162 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6163 && TREE_CODE (t2) == INTEGER_CST
6164 && !TREE_OVERFLOW (t2)
6165 && (0 != (t1 = extract_muldiv (op0, t2, code,
6166 code == MULT_EXPR
6167 ? ctype : NULL_TREE,
6168 strict_overflow_p))))
6169 return t1;
6170 break;
6172 case ABS_EXPR:
6173 /* If widening the type changes it from signed to unsigned, then we
6174 must avoid building ABS_EXPR itself as unsigned. */
6175 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6177 tree cstype = (*signed_type_for) (ctype);
6178 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6179 != 0)
6181 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6182 return fold_convert (ctype, t1);
6184 break;
6186 /* If the constant is negative, we cannot simplify this. */
6187 if (tree_int_cst_sgn (c) == -1)
6188 break;
6189 /* FALLTHROUGH */
6190 case NEGATE_EXPR:
6191 /* For division and modulus, type can't be unsigned, as e.g.
6192 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6193 For signed types, even with wrapping overflow, this is fine. */
6194 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6195 break;
6196 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6197 != 0)
6198 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6199 break;
6201 case MIN_EXPR: case MAX_EXPR:
6202 /* If widening the type changes the signedness, then we can't perform
6203 this optimization as that changes the result. */
6204 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6205 break;
6207 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6208 sub_strict_overflow_p = false;
6209 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6210 &sub_strict_overflow_p)) != 0
6211 && (t2 = extract_muldiv (op1, c, code, wide_type,
6212 &sub_strict_overflow_p)) != 0)
6214 if (tree_int_cst_sgn (c) < 0)
6215 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6216 if (sub_strict_overflow_p)
6217 *strict_overflow_p = true;
6218 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6219 fold_convert (ctype, t2));
6221 break;
6223 case LSHIFT_EXPR: case RSHIFT_EXPR:
6224 /* If the second operand is constant, this is a multiplication
6225 or floor division, by a power of two, so we can treat it that
6226 way unless the multiplier or divisor overflows. Signed
6227 left-shift overflow is implementation-defined rather than
6228 undefined in C90, so do not convert signed left shift into
6229 multiplication. */
6230 if (TREE_CODE (op1) == INTEGER_CST
6231 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6232 /* const_binop may not detect overflow correctly,
6233 so check for it explicitly here. */
6234 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6235 && 0 != (t1 = fold_convert (ctype,
6236 const_binop (LSHIFT_EXPR,
6237 size_one_node,
6238 op1)))
6239 && !TREE_OVERFLOW (t1))
6240 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6241 ? MULT_EXPR : FLOOR_DIV_EXPR,
6242 ctype,
6243 fold_convert (ctype, op0),
6244 t1),
6245 c, code, wide_type, strict_overflow_p);
6246 break;
6248 case PLUS_EXPR: case MINUS_EXPR:
6249 /* See if we can eliminate the operation on both sides. If we can, we
6250 can return a new PLUS or MINUS. If we can't, the only remaining
6251 cases where we can do anything are if the second operand is a
6252 constant. */
6253 sub_strict_overflow_p = false;
6254 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6255 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6256 if (t1 != 0 && t2 != 0
6257 && (code == MULT_EXPR
6258 /* If not multiplication, we can only do this if both operands
6259 are divisible by c. */
6260 || (multiple_of_p (ctype, op0, c)
6261 && multiple_of_p (ctype, op1, c))))
6263 if (sub_strict_overflow_p)
6264 *strict_overflow_p = true;
6265 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6266 fold_convert (ctype, t2));
6269 /* If this was a subtraction, negate OP1 and set it to be an addition.
6270 This simplifies the logic below. */
6271 if (tcode == MINUS_EXPR)
6273 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6274 /* If OP1 was not easily negatable, the constant may be OP0. */
6275 if (TREE_CODE (op0) == INTEGER_CST)
6277 std::swap (op0, op1);
6278 std::swap (t1, t2);
6282 if (TREE_CODE (op1) != INTEGER_CST)
6283 break;
6285 /* If either OP1 or C are negative, this optimization is not safe for
6286 some of the division and remainder types while for others we need
6287 to change the code. */
6288 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6290 if (code == CEIL_DIV_EXPR)
6291 code = FLOOR_DIV_EXPR;
6292 else if (code == FLOOR_DIV_EXPR)
6293 code = CEIL_DIV_EXPR;
6294 else if (code != MULT_EXPR
6295 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6296 break;
6299 /* If it's a multiply or a division/modulus operation of a multiple
6300 of our constant, do the operation and verify it doesn't overflow. */
6301 if (code == MULT_EXPR
6302 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6304 op1 = const_binop (code, fold_convert (ctype, op1),
6305 fold_convert (ctype, c));
6306 /* We allow the constant to overflow with wrapping semantics. */
6307 if (op1 == 0
6308 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6309 break;
6311 else
6312 break;
6314 /* If we have an unsigned type, we cannot widen the operation since it
6315 will change the result if the original computation overflowed. */
6316 if (TYPE_UNSIGNED (ctype) && ctype != type)
6317 break;
6319 /* If we were able to eliminate our operation from the first side,
6320 apply our operation to the second side and reform the PLUS. */
6321 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6322 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6324 /* The last case is if we are a multiply. In that case, we can
6325 apply the distributive law to commute the multiply and addition
6326 if the multiplication of the constants doesn't overflow
6327 and overflow is defined. With undefined overflow
6328 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6329 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6330 return fold_build2 (tcode, ctype,
6331 fold_build2 (code, ctype,
6332 fold_convert (ctype, op0),
6333 fold_convert (ctype, c)),
6334 op1);
6336 break;
6338 case MULT_EXPR:
6339 /* We have a special case here if we are doing something like
6340 (C * 8) % 4 since we know that's zero. */
6341 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6342 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6343 /* If the multiplication can overflow we cannot optimize this. */
6344 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6345 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6346 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6348 *strict_overflow_p = true;
6349 return omit_one_operand (type, integer_zero_node, op0);
6352 /* ... fall through ... */
6354 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6355 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6356 /* If we can extract our operation from the LHS, do so and return a
6357 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6358 do something only if the second operand is a constant. */
6359 if (same_p
6360 && (t1 = extract_muldiv (op0, c, code, wide_type,
6361 strict_overflow_p)) != 0)
6362 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6363 fold_convert (ctype, op1));
6364 else if (tcode == MULT_EXPR && code == MULT_EXPR
6365 && (t1 = extract_muldiv (op1, c, code, wide_type,
6366 strict_overflow_p)) != 0)
6367 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6368 fold_convert (ctype, t1));
6369 else if (TREE_CODE (op1) != INTEGER_CST)
6370 return 0;
6372 /* If these are the same operation types, we can associate them
6373 assuming no overflow. */
6374 if (tcode == code)
6376 bool overflow_p = false;
6377 bool overflow_mul_p;
6378 signop sign = TYPE_SIGN (ctype);
6379 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6380 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6381 if (overflow_mul_p
6382 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6383 overflow_p = true;
6384 if (!overflow_p)
6386 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6387 TYPE_SIGN (TREE_TYPE (op1)));
6388 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6389 wide_int_to_tree (ctype, mul));
6393 /* If these operations "cancel" each other, we have the main
6394 optimizations of this pass, which occur when either constant is a
6395 multiple of the other, in which case we replace this with either an
6396 operation or CODE or TCODE.
6398 If we have an unsigned type, we cannot do this since it will change
6399 the result if the original computation overflowed. */
6400 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6401 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6402 || (tcode == MULT_EXPR
6403 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6404 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6405 && code != MULT_EXPR)))
6407 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6409 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6410 *strict_overflow_p = true;
6411 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6412 fold_convert (ctype,
6413 const_binop (TRUNC_DIV_EXPR,
6414 op1, c)));
6416 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6418 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6419 *strict_overflow_p = true;
6420 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6421 fold_convert (ctype,
6422 const_binop (TRUNC_DIV_EXPR,
6423 c, op1)));
6426 break;
6428 default:
6429 break;
6432 return 0;
6435 /* Return a node which has the indicated constant VALUE (either 0 or
6436 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6437 and is of the indicated TYPE. */
6439 tree
6440 constant_boolean_node (bool value, tree type)
6442 if (type == integer_type_node)
6443 return value ? integer_one_node : integer_zero_node;
6444 else if (type == boolean_type_node)
6445 return value ? boolean_true_node : boolean_false_node;
6446 else if (TREE_CODE (type) == VECTOR_TYPE)
6447 return build_vector_from_val (type,
6448 build_int_cst (TREE_TYPE (type),
6449 value ? -1 : 0));
6450 else
6451 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6455 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6456 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6457 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6458 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6459 COND is the first argument to CODE; otherwise (as in the example
6460 given here), it is the second argument. TYPE is the type of the
6461 original expression. Return NULL_TREE if no simplification is
6462 possible. */
6464 static tree
6465 fold_binary_op_with_conditional_arg (location_t loc,
6466 enum tree_code code,
6467 tree type, tree op0, tree op1,
6468 tree cond, tree arg, int cond_first_p)
6470 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6471 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6472 tree test, true_value, false_value;
6473 tree lhs = NULL_TREE;
6474 tree rhs = NULL_TREE;
6475 enum tree_code cond_code = COND_EXPR;
6477 if (TREE_CODE (cond) == COND_EXPR
6478 || TREE_CODE (cond) == VEC_COND_EXPR)
6480 test = TREE_OPERAND (cond, 0);
6481 true_value = TREE_OPERAND (cond, 1);
6482 false_value = TREE_OPERAND (cond, 2);
6483 /* If this operand throws an expression, then it does not make
6484 sense to try to perform a logical or arithmetic operation
6485 involving it. */
6486 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6487 lhs = true_value;
6488 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6489 rhs = false_value;
6491 else if (!(TREE_CODE (type) != VECTOR_TYPE
6492 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6494 tree testtype = TREE_TYPE (cond);
6495 test = cond;
6496 true_value = constant_boolean_node (true, testtype);
6497 false_value = constant_boolean_node (false, testtype);
6499 else
6500 /* Detect the case of mixing vector and scalar types - bail out. */
6501 return NULL_TREE;
6503 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6504 cond_code = VEC_COND_EXPR;
6506 /* This transformation is only worthwhile if we don't have to wrap ARG
6507 in a SAVE_EXPR and the operation can be simplified without recursing
6508 on at least one of the branches once its pushed inside the COND_EXPR. */
6509 if (!TREE_CONSTANT (arg)
6510 && (TREE_SIDE_EFFECTS (arg)
6511 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6512 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6513 return NULL_TREE;
6515 arg = fold_convert_loc (loc, arg_type, arg);
6516 if (lhs == 0)
6518 true_value = fold_convert_loc (loc, cond_type, true_value);
6519 if (cond_first_p)
6520 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6521 else
6522 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6524 if (rhs == 0)
6526 false_value = fold_convert_loc (loc, cond_type, false_value);
6527 if (cond_first_p)
6528 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6529 else
6530 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6533 /* Check that we have simplified at least one of the branches. */
6534 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6535 return NULL_TREE;
6537 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6541 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6543 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6544 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6545 ADDEND is the same as X.
6547 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6548 and finite. The problematic cases are when X is zero, and its mode
6549 has signed zeros. In the case of rounding towards -infinity,
6550 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6551 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6553 bool
6554 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6556 if (!real_zerop (addend))
6557 return false;
6559 /* Don't allow the fold with -fsignaling-nans. */
6560 if (HONOR_SNANS (element_mode (type)))
6561 return false;
6563 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6564 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6565 return true;
6567 /* In a vector or complex, we would need to check the sign of all zeros. */
6568 if (TREE_CODE (addend) != REAL_CST)
6569 return false;
6571 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6572 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6573 negate = !negate;
6575 /* The mode has signed zeros, and we have to honor their sign.
6576 In this situation, there is only one case we can return true for.
6577 X - 0 is the same as X unless rounding towards -infinity is
6578 supported. */
6579 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6582 /* Subroutine of fold() that optimizes comparisons of a division by
6583 a nonzero integer constant against an integer constant, i.e.
6584 X/C1 op C2.
6586 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6587 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6588 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6590 The function returns the constant folded tree if a simplification
6591 can be made, and NULL_TREE otherwise. */
6593 static tree
6594 fold_div_compare (location_t loc,
6595 enum tree_code code, tree type, tree arg0, tree arg1)
6597 tree prod, tmp, hi, lo;
6598 tree arg00 = TREE_OPERAND (arg0, 0);
6599 tree arg01 = TREE_OPERAND (arg0, 1);
6600 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6601 bool neg_overflow = false;
6602 bool overflow;
6604 /* We have to do this the hard way to detect unsigned overflow.
6605 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6606 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6607 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6608 neg_overflow = false;
6610 if (sign == UNSIGNED)
6612 tmp = int_const_binop (MINUS_EXPR, arg01,
6613 build_int_cst (TREE_TYPE (arg01), 1));
6614 lo = prod;
6616 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6617 val = wi::add (prod, tmp, sign, &overflow);
6618 hi = force_fit_type (TREE_TYPE (arg00), val,
6619 -1, overflow | TREE_OVERFLOW (prod));
6621 else if (tree_int_cst_sgn (arg01) >= 0)
6623 tmp = int_const_binop (MINUS_EXPR, arg01,
6624 build_int_cst (TREE_TYPE (arg01), 1));
6625 switch (tree_int_cst_sgn (arg1))
6627 case -1:
6628 neg_overflow = true;
6629 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6630 hi = prod;
6631 break;
6633 case 0:
6634 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6635 hi = tmp;
6636 break;
6638 case 1:
6639 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6640 lo = prod;
6641 break;
6643 default:
6644 gcc_unreachable ();
6647 else
6649 /* A negative divisor reverses the relational operators. */
6650 code = swap_tree_comparison (code);
6652 tmp = int_const_binop (PLUS_EXPR, arg01,
6653 build_int_cst (TREE_TYPE (arg01), 1));
6654 switch (tree_int_cst_sgn (arg1))
6656 case -1:
6657 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6658 lo = prod;
6659 break;
6661 case 0:
6662 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6663 lo = tmp;
6664 break;
6666 case 1:
6667 neg_overflow = true;
6668 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6669 hi = prod;
6670 break;
6672 default:
6673 gcc_unreachable ();
6677 switch (code)
6679 case EQ_EXPR:
6680 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6681 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6682 if (TREE_OVERFLOW (hi))
6683 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6684 if (TREE_OVERFLOW (lo))
6685 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6686 return build_range_check (loc, type, arg00, 1, lo, hi);
6688 case NE_EXPR:
6689 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6690 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6691 if (TREE_OVERFLOW (hi))
6692 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6693 if (TREE_OVERFLOW (lo))
6694 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6695 return build_range_check (loc, type, arg00, 0, lo, hi);
6697 case LT_EXPR:
6698 if (TREE_OVERFLOW (lo))
6700 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6701 return omit_one_operand_loc (loc, type, tmp, arg00);
6703 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6705 case LE_EXPR:
6706 if (TREE_OVERFLOW (hi))
6708 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6709 return omit_one_operand_loc (loc, type, tmp, arg00);
6711 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6713 case GT_EXPR:
6714 if (TREE_OVERFLOW (hi))
6716 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6717 return omit_one_operand_loc (loc, type, tmp, arg00);
6719 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6721 case GE_EXPR:
6722 if (TREE_OVERFLOW (lo))
6724 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6725 return omit_one_operand_loc (loc, type, tmp, arg00);
6727 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6729 default:
6730 break;
6733 return NULL_TREE;
6737 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6738 equality/inequality test, then return a simplified form of the test
6739 using a sign testing. Otherwise return NULL. TYPE is the desired
6740 result type. */
6742 static tree
6743 fold_single_bit_test_into_sign_test (location_t loc,
6744 enum tree_code code, tree arg0, tree arg1,
6745 tree result_type)
6747 /* If this is testing a single bit, we can optimize the test. */
6748 if ((code == NE_EXPR || code == EQ_EXPR)
6749 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6750 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6752 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6753 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6754 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6756 if (arg00 != NULL_TREE
6757 /* This is only a win if casting to a signed type is cheap,
6758 i.e. when arg00's type is not a partial mode. */
6759 && TYPE_PRECISION (TREE_TYPE (arg00))
6760 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6762 tree stype = signed_type_for (TREE_TYPE (arg00));
6763 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6764 result_type,
6765 fold_convert_loc (loc, stype, arg00),
6766 build_int_cst (stype, 0));
6770 return NULL_TREE;
6773 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6774 equality/inequality test, then return a simplified form of
6775 the test using shifts and logical operations. Otherwise return
6776 NULL. TYPE is the desired result type. */
6778 tree
6779 fold_single_bit_test (location_t loc, enum tree_code code,
6780 tree arg0, tree arg1, tree result_type)
6782 /* If this is testing a single bit, we can optimize the test. */
6783 if ((code == NE_EXPR || code == EQ_EXPR)
6784 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6785 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6787 tree inner = TREE_OPERAND (arg0, 0);
6788 tree type = TREE_TYPE (arg0);
6789 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6790 machine_mode operand_mode = TYPE_MODE (type);
6791 int ops_unsigned;
6792 tree signed_type, unsigned_type, intermediate_type;
6793 tree tem, one;
6795 /* First, see if we can fold the single bit test into a sign-bit
6796 test. */
6797 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6798 result_type);
6799 if (tem)
6800 return tem;
6802 /* Otherwise we have (A & C) != 0 where C is a single bit,
6803 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6804 Similarly for (A & C) == 0. */
6806 /* If INNER is a right shift of a constant and it plus BITNUM does
6807 not overflow, adjust BITNUM and INNER. */
6808 if (TREE_CODE (inner) == RSHIFT_EXPR
6809 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6810 && bitnum < TYPE_PRECISION (type)
6811 && wi::ltu_p (TREE_OPERAND (inner, 1),
6812 TYPE_PRECISION (type) - bitnum))
6814 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6815 inner = TREE_OPERAND (inner, 0);
6818 /* If we are going to be able to omit the AND below, we must do our
6819 operations as unsigned. If we must use the AND, we have a choice.
6820 Normally unsigned is faster, but for some machines signed is. */
6821 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6822 && !flag_syntax_only) ? 0 : 1;
6824 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6825 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6826 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6827 inner = fold_convert_loc (loc, intermediate_type, inner);
6829 if (bitnum != 0)
6830 inner = build2 (RSHIFT_EXPR, intermediate_type,
6831 inner, size_int (bitnum));
6833 one = build_int_cst (intermediate_type, 1);
6835 if (code == EQ_EXPR)
6836 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6838 /* Put the AND last so it can combine with more things. */
6839 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6841 /* Make sure to return the proper type. */
6842 inner = fold_convert_loc (loc, result_type, inner);
6844 return inner;
6846 return NULL_TREE;
6849 /* Check whether we are allowed to reorder operands arg0 and arg1,
6850 such that the evaluation of arg1 occurs before arg0. */
6852 static bool
6853 reorder_operands_p (const_tree arg0, const_tree arg1)
6855 if (! flag_evaluation_order)
6856 return true;
6857 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6858 return true;
6859 return ! TREE_SIDE_EFFECTS (arg0)
6860 && ! TREE_SIDE_EFFECTS (arg1);
6863 /* Test whether it is preferable two swap two operands, ARG0 and
6864 ARG1, for example because ARG0 is an integer constant and ARG1
6865 isn't. If REORDER is true, only recommend swapping if we can
6866 evaluate the operands in reverse order. */
6868 bool
6869 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6871 if (CONSTANT_CLASS_P (arg1))
6872 return 0;
6873 if (CONSTANT_CLASS_P (arg0))
6874 return 1;
6876 STRIP_NOPS (arg0);
6877 STRIP_NOPS (arg1);
6879 if (TREE_CONSTANT (arg1))
6880 return 0;
6881 if (TREE_CONSTANT (arg0))
6882 return 1;
6884 if (reorder && flag_evaluation_order
6885 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6886 return 0;
6888 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6889 for commutative and comparison operators. Ensuring a canonical
6890 form allows the optimizers to find additional redundancies without
6891 having to explicitly check for both orderings. */
6892 if (TREE_CODE (arg0) == SSA_NAME
6893 && TREE_CODE (arg1) == SSA_NAME
6894 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6895 return 1;
6897 /* Put SSA_NAMEs last. */
6898 if (TREE_CODE (arg1) == SSA_NAME)
6899 return 0;
6900 if (TREE_CODE (arg0) == SSA_NAME)
6901 return 1;
6903 /* Put variables last. */
6904 if (DECL_P (arg1))
6905 return 0;
6906 if (DECL_P (arg0))
6907 return 1;
6909 return 0;
6913 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6914 means A >= Y && A != MAX, but in this case we know that
6915 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6917 static tree
6918 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6920 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6922 if (TREE_CODE (bound) == LT_EXPR)
6923 a = TREE_OPERAND (bound, 0);
6924 else if (TREE_CODE (bound) == GT_EXPR)
6925 a = TREE_OPERAND (bound, 1);
6926 else
6927 return NULL_TREE;
6929 typea = TREE_TYPE (a);
6930 if (!INTEGRAL_TYPE_P (typea)
6931 && !POINTER_TYPE_P (typea))
6932 return NULL_TREE;
6934 if (TREE_CODE (ineq) == LT_EXPR)
6936 a1 = TREE_OPERAND (ineq, 1);
6937 y = TREE_OPERAND (ineq, 0);
6939 else if (TREE_CODE (ineq) == GT_EXPR)
6941 a1 = TREE_OPERAND (ineq, 0);
6942 y = TREE_OPERAND (ineq, 1);
6944 else
6945 return NULL_TREE;
6947 if (TREE_TYPE (a1) != typea)
6948 return NULL_TREE;
6950 if (POINTER_TYPE_P (typea))
6952 /* Convert the pointer types into integer before taking the difference. */
6953 tree ta = fold_convert_loc (loc, ssizetype, a);
6954 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6955 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6957 else
6958 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6960 if (!diff || !integer_onep (diff))
6961 return NULL_TREE;
6963 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6966 /* Fold a sum or difference of at least one multiplication.
6967 Returns the folded tree or NULL if no simplification could be made. */
6969 static tree
6970 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6971 tree arg0, tree arg1)
6973 tree arg00, arg01, arg10, arg11;
6974 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6976 /* (A * C) +- (B * C) -> (A+-B) * C.
6977 (A * C) +- A -> A * (C+-1).
6978 We are most concerned about the case where C is a constant,
6979 but other combinations show up during loop reduction. Since
6980 it is not difficult, try all four possibilities. */
6982 if (TREE_CODE (arg0) == MULT_EXPR)
6984 arg00 = TREE_OPERAND (arg0, 0);
6985 arg01 = TREE_OPERAND (arg0, 1);
6987 else if (TREE_CODE (arg0) == INTEGER_CST)
6989 arg00 = build_one_cst (type);
6990 arg01 = arg0;
6992 else
6994 /* We cannot generate constant 1 for fract. */
6995 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6996 return NULL_TREE;
6997 arg00 = arg0;
6998 arg01 = build_one_cst (type);
7000 if (TREE_CODE (arg1) == MULT_EXPR)
7002 arg10 = TREE_OPERAND (arg1, 0);
7003 arg11 = TREE_OPERAND (arg1, 1);
7005 else if (TREE_CODE (arg1) == INTEGER_CST)
7007 arg10 = build_one_cst (type);
7008 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7009 the purpose of this canonicalization. */
7010 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7011 && negate_expr_p (arg1)
7012 && code == PLUS_EXPR)
7014 arg11 = negate_expr (arg1);
7015 code = MINUS_EXPR;
7017 else
7018 arg11 = arg1;
7020 else
7022 /* We cannot generate constant 1 for fract. */
7023 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7024 return NULL_TREE;
7025 arg10 = arg1;
7026 arg11 = build_one_cst (type);
7028 same = NULL_TREE;
7030 if (operand_equal_p (arg01, arg11, 0))
7031 same = arg01, alt0 = arg00, alt1 = arg10;
7032 else if (operand_equal_p (arg00, arg10, 0))
7033 same = arg00, alt0 = arg01, alt1 = arg11;
7034 else if (operand_equal_p (arg00, arg11, 0))
7035 same = arg00, alt0 = arg01, alt1 = arg10;
7036 else if (operand_equal_p (arg01, arg10, 0))
7037 same = arg01, alt0 = arg00, alt1 = arg11;
7039 /* No identical multiplicands; see if we can find a common
7040 power-of-two factor in non-power-of-two multiplies. This
7041 can help in multi-dimensional array access. */
7042 else if (tree_fits_shwi_p (arg01)
7043 && tree_fits_shwi_p (arg11))
7045 HOST_WIDE_INT int01, int11, tmp;
7046 bool swap = false;
7047 tree maybe_same;
7048 int01 = tree_to_shwi (arg01);
7049 int11 = tree_to_shwi (arg11);
7051 /* Move min of absolute values to int11. */
7052 if (absu_hwi (int01) < absu_hwi (int11))
7054 tmp = int01, int01 = int11, int11 = tmp;
7055 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7056 maybe_same = arg01;
7057 swap = true;
7059 else
7060 maybe_same = arg11;
7062 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7063 /* The remainder should not be a constant, otherwise we
7064 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7065 increased the number of multiplications necessary. */
7066 && TREE_CODE (arg10) != INTEGER_CST)
7068 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7069 build_int_cst (TREE_TYPE (arg00),
7070 int01 / int11));
7071 alt1 = arg10;
7072 same = maybe_same;
7073 if (swap)
7074 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7078 if (same)
7079 return fold_build2_loc (loc, MULT_EXPR, type,
7080 fold_build2_loc (loc, code, type,
7081 fold_convert_loc (loc, type, alt0),
7082 fold_convert_loc (loc, type, alt1)),
7083 fold_convert_loc (loc, type, same));
7085 return NULL_TREE;
7088 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7089 specified by EXPR into the buffer PTR of length LEN bytes.
7090 Return the number of bytes placed in the buffer, or zero
7091 upon failure. */
7093 static int
7094 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7096 tree type = TREE_TYPE (expr);
7097 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7098 int byte, offset, word, words;
7099 unsigned char value;
7101 if ((off == -1 && total_bytes > len)
7102 || off >= total_bytes)
7103 return 0;
7104 if (off == -1)
7105 off = 0;
7106 words = total_bytes / UNITS_PER_WORD;
7108 for (byte = 0; byte < total_bytes; byte++)
7110 int bitpos = byte * BITS_PER_UNIT;
7111 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7112 number of bytes. */
7113 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7115 if (total_bytes > UNITS_PER_WORD)
7117 word = byte / UNITS_PER_WORD;
7118 if (WORDS_BIG_ENDIAN)
7119 word = (words - 1) - word;
7120 offset = word * UNITS_PER_WORD;
7121 if (BYTES_BIG_ENDIAN)
7122 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7123 else
7124 offset += byte % UNITS_PER_WORD;
7126 else
7127 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7128 if (offset >= off
7129 && offset - off < len)
7130 ptr[offset - off] = value;
7132 return MIN (len, total_bytes - off);
7136 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7137 specified by EXPR into the buffer PTR of length LEN bytes.
7138 Return the number of bytes placed in the buffer, or zero
7139 upon failure. */
7141 static int
7142 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7144 tree type = TREE_TYPE (expr);
7145 machine_mode mode = TYPE_MODE (type);
7146 int total_bytes = GET_MODE_SIZE (mode);
7147 FIXED_VALUE_TYPE value;
7148 tree i_value, i_type;
7150 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7151 return 0;
7153 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7155 if (NULL_TREE == i_type
7156 || TYPE_PRECISION (i_type) != total_bytes)
7157 return 0;
7159 value = TREE_FIXED_CST (expr);
7160 i_value = double_int_to_tree (i_type, value.data);
7162 return native_encode_int (i_value, ptr, len, off);
7166 /* Subroutine of native_encode_expr. Encode the REAL_CST
7167 specified by EXPR into the buffer PTR of length LEN bytes.
7168 Return the number of bytes placed in the buffer, or zero
7169 upon failure. */
7171 static int
7172 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7174 tree type = TREE_TYPE (expr);
7175 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7176 int byte, offset, word, words, bitpos;
7177 unsigned char value;
7179 /* There are always 32 bits in each long, no matter the size of
7180 the hosts long. We handle floating point representations with
7181 up to 192 bits. */
7182 long tmp[6];
7184 if ((off == -1 && total_bytes > len)
7185 || off >= total_bytes)
7186 return 0;
7187 if (off == -1)
7188 off = 0;
7189 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7191 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7193 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7194 bitpos += BITS_PER_UNIT)
7196 byte = (bitpos / BITS_PER_UNIT) & 3;
7197 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7199 if (UNITS_PER_WORD < 4)
7201 word = byte / UNITS_PER_WORD;
7202 if (WORDS_BIG_ENDIAN)
7203 word = (words - 1) - word;
7204 offset = word * UNITS_PER_WORD;
7205 if (BYTES_BIG_ENDIAN)
7206 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7207 else
7208 offset += byte % UNITS_PER_WORD;
7210 else
7211 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7212 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7213 if (offset >= off
7214 && offset - off < len)
7215 ptr[offset - off] = value;
7217 return MIN (len, total_bytes - off);
7220 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7221 specified by EXPR into the buffer PTR of length LEN bytes.
7222 Return the number of bytes placed in the buffer, or zero
7223 upon failure. */
7225 static int
7226 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7228 int rsize, isize;
7229 tree part;
7231 part = TREE_REALPART (expr);
7232 rsize = native_encode_expr (part, ptr, len, off);
7233 if (off == -1
7234 && rsize == 0)
7235 return 0;
7236 part = TREE_IMAGPART (expr);
7237 if (off != -1)
7238 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7239 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7240 if (off == -1
7241 && isize != rsize)
7242 return 0;
7243 return rsize + isize;
7247 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7248 specified by EXPR into the buffer PTR of length LEN bytes.
7249 Return the number of bytes placed in the buffer, or zero
7250 upon failure. */
7252 static int
7253 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7255 unsigned i, count;
7256 int size, offset;
7257 tree itype, elem;
7259 offset = 0;
7260 count = VECTOR_CST_NELTS (expr);
7261 itype = TREE_TYPE (TREE_TYPE (expr));
7262 size = GET_MODE_SIZE (TYPE_MODE (itype));
7263 for (i = 0; i < count; i++)
7265 if (off >= size)
7267 off -= size;
7268 continue;
7270 elem = VECTOR_CST_ELT (expr, i);
7271 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7272 if ((off == -1 && res != size)
7273 || res == 0)
7274 return 0;
7275 offset += res;
7276 if (offset >= len)
7277 return offset;
7278 if (off != -1)
7279 off = 0;
7281 return offset;
7285 /* Subroutine of native_encode_expr. Encode the STRING_CST
7286 specified by EXPR into the buffer PTR of length LEN bytes.
7287 Return the number of bytes placed in the buffer, or zero
7288 upon failure. */
7290 static int
7291 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7293 tree type = TREE_TYPE (expr);
7294 HOST_WIDE_INT total_bytes;
7296 if (TREE_CODE (type) != ARRAY_TYPE
7297 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7298 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7299 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7300 return 0;
7301 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7302 if ((off == -1 && total_bytes > len)
7303 || off >= total_bytes)
7304 return 0;
7305 if (off == -1)
7306 off = 0;
7307 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7309 int written = 0;
7310 if (off < TREE_STRING_LENGTH (expr))
7312 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7313 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7315 memset (ptr + written, 0,
7316 MIN (total_bytes - written, len - written));
7318 else
7319 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7320 return MIN (total_bytes - off, len);
7324 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7325 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7326 buffer PTR of length LEN bytes. If OFF is not -1 then start
7327 the encoding at byte offset OFF and encode at most LEN bytes.
7328 Return the number of bytes placed in the buffer, or zero upon failure. */
7331 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7333 /* We don't support starting at negative offset and -1 is special. */
7334 if (off < -1)
7335 return 0;
7337 switch (TREE_CODE (expr))
7339 case INTEGER_CST:
7340 return native_encode_int (expr, ptr, len, off);
7342 case REAL_CST:
7343 return native_encode_real (expr, ptr, len, off);
7345 case FIXED_CST:
7346 return native_encode_fixed (expr, ptr, len, off);
7348 case COMPLEX_CST:
7349 return native_encode_complex (expr, ptr, len, off);
7351 case VECTOR_CST:
7352 return native_encode_vector (expr, ptr, len, off);
7354 case STRING_CST:
7355 return native_encode_string (expr, ptr, len, off);
7357 default:
7358 return 0;
7363 /* Subroutine of native_interpret_expr. Interpret the contents of
7364 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7365 If the buffer cannot be interpreted, return NULL_TREE. */
7367 static tree
7368 native_interpret_int (tree type, const unsigned char *ptr, int len)
7370 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7372 if (total_bytes > len
7373 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7374 return NULL_TREE;
7376 wide_int result = wi::from_buffer (ptr, total_bytes);
7378 return wide_int_to_tree (type, result);
7382 /* Subroutine of native_interpret_expr. Interpret the contents of
7383 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7384 If the buffer cannot be interpreted, return NULL_TREE. */
7386 static tree
7387 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7389 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7390 double_int result;
7391 FIXED_VALUE_TYPE fixed_value;
7393 if (total_bytes > len
7394 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7395 return NULL_TREE;
7397 result = double_int::from_buffer (ptr, total_bytes);
7398 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7400 return build_fixed (type, fixed_value);
7404 /* Subroutine of native_interpret_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7406 If the buffer cannot be interpreted, return NULL_TREE. */
7408 static tree
7409 native_interpret_real (tree type, const unsigned char *ptr, int len)
7411 machine_mode mode = TYPE_MODE (type);
7412 int total_bytes = GET_MODE_SIZE (mode);
7413 unsigned char value;
7414 /* There are always 32 bits in each long, no matter the size of
7415 the hosts long. We handle floating point representations with
7416 up to 192 bits. */
7417 REAL_VALUE_TYPE r;
7418 long tmp[6];
7420 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7421 if (total_bytes > len || total_bytes > 24)
7422 return NULL_TREE;
7423 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7425 memset (tmp, 0, sizeof (tmp));
7426 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7427 bitpos += BITS_PER_UNIT)
7429 /* Both OFFSET and BYTE index within a long;
7430 bitpos indexes the whole float. */
7431 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7432 if (UNITS_PER_WORD < 4)
7434 int word = byte / UNITS_PER_WORD;
7435 if (WORDS_BIG_ENDIAN)
7436 word = (words - 1) - word;
7437 offset = word * UNITS_PER_WORD;
7438 if (BYTES_BIG_ENDIAN)
7439 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7440 else
7441 offset += byte % UNITS_PER_WORD;
7443 else
7445 offset = byte;
7446 if (BYTES_BIG_ENDIAN)
7448 /* Reverse bytes within each long, or within the entire float
7449 if it's smaller than a long (for HFmode). */
7450 offset = MIN (3, total_bytes - 1) - offset;
7451 gcc_assert (offset >= 0);
7454 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7456 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7459 real_from_target (&r, tmp, mode);
7460 return build_real (type, r);
7464 /* Subroutine of native_interpret_expr. Interpret the contents of
7465 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7466 If the buffer cannot be interpreted, return NULL_TREE. */
7468 static tree
7469 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7471 tree etype, rpart, ipart;
7472 int size;
7474 etype = TREE_TYPE (type);
7475 size = GET_MODE_SIZE (TYPE_MODE (etype));
7476 if (size * 2 > len)
7477 return NULL_TREE;
7478 rpart = native_interpret_expr (etype, ptr, size);
7479 if (!rpart)
7480 return NULL_TREE;
7481 ipart = native_interpret_expr (etype, ptr+size, size);
7482 if (!ipart)
7483 return NULL_TREE;
7484 return build_complex (type, rpart, ipart);
7488 /* Subroutine of native_interpret_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7490 If the buffer cannot be interpreted, return NULL_TREE. */
7492 static tree
7493 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7495 tree etype, elem;
7496 int i, size, count;
7497 tree *elements;
7499 etype = TREE_TYPE (type);
7500 size = GET_MODE_SIZE (TYPE_MODE (etype));
7501 count = TYPE_VECTOR_SUBPARTS (type);
7502 if (size * count > len)
7503 return NULL_TREE;
7505 elements = XALLOCAVEC (tree, count);
7506 for (i = count - 1; i >= 0; i--)
7508 elem = native_interpret_expr (etype, ptr+(i*size), size);
7509 if (!elem)
7510 return NULL_TREE;
7511 elements[i] = elem;
7513 return build_vector (type, elements);
7517 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7518 the buffer PTR of length LEN as a constant of type TYPE. For
7519 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7520 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7521 return NULL_TREE. */
7523 tree
7524 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7526 switch (TREE_CODE (type))
7528 case INTEGER_TYPE:
7529 case ENUMERAL_TYPE:
7530 case BOOLEAN_TYPE:
7531 case POINTER_TYPE:
7532 case REFERENCE_TYPE:
7533 return native_interpret_int (type, ptr, len);
7535 case REAL_TYPE:
7536 return native_interpret_real (type, ptr, len);
7538 case FIXED_POINT_TYPE:
7539 return native_interpret_fixed (type, ptr, len);
7541 case COMPLEX_TYPE:
7542 return native_interpret_complex (type, ptr, len);
7544 case VECTOR_TYPE:
7545 return native_interpret_vector (type, ptr, len);
7547 default:
7548 return NULL_TREE;
7552 /* Returns true if we can interpret the contents of a native encoding
7553 as TYPE. */
7555 static bool
7556 can_native_interpret_type_p (tree type)
7558 switch (TREE_CODE (type))
7560 case INTEGER_TYPE:
7561 case ENUMERAL_TYPE:
7562 case BOOLEAN_TYPE:
7563 case POINTER_TYPE:
7564 case REFERENCE_TYPE:
7565 case FIXED_POINT_TYPE:
7566 case REAL_TYPE:
7567 case COMPLEX_TYPE:
7568 case VECTOR_TYPE:
7569 return true;
7570 default:
7571 return false;
7575 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7576 TYPE at compile-time. If we're unable to perform the conversion
7577 return NULL_TREE. */
7579 static tree
7580 fold_view_convert_expr (tree type, tree expr)
7582 /* We support up to 512-bit values (for V8DFmode). */
7583 unsigned char buffer[64];
7584 int len;
7586 /* Check that the host and target are sane. */
7587 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7588 return NULL_TREE;
7590 len = native_encode_expr (expr, buffer, sizeof (buffer));
7591 if (len == 0)
7592 return NULL_TREE;
7594 return native_interpret_expr (type, buffer, len);
7597 /* Build an expression for the address of T. Folds away INDIRECT_REF
7598 to avoid confusing the gimplify process. */
7600 tree
7601 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7603 /* The size of the object is not relevant when talking about its address. */
7604 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7605 t = TREE_OPERAND (t, 0);
7607 if (TREE_CODE (t) == INDIRECT_REF)
7609 t = TREE_OPERAND (t, 0);
7611 if (TREE_TYPE (t) != ptrtype)
7612 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7614 else if (TREE_CODE (t) == MEM_REF
7615 && integer_zerop (TREE_OPERAND (t, 1)))
7616 return TREE_OPERAND (t, 0);
7617 else if (TREE_CODE (t) == MEM_REF
7618 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7619 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7620 TREE_OPERAND (t, 0),
7621 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7622 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7624 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7626 if (TREE_TYPE (t) != ptrtype)
7627 t = fold_convert_loc (loc, ptrtype, t);
7629 else
7630 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7632 return t;
7635 /* Build an expression for the address of T. */
7637 tree
7638 build_fold_addr_expr_loc (location_t loc, tree t)
7640 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7642 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7645 /* Fold a unary expression of code CODE and type TYPE with operand
7646 OP0. Return the folded expression if folding is successful.
7647 Otherwise, return NULL_TREE. */
7649 tree
7650 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7652 tree tem;
7653 tree arg0;
7654 enum tree_code_class kind = TREE_CODE_CLASS (code);
7656 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7657 && TREE_CODE_LENGTH (code) == 1);
7659 arg0 = op0;
7660 if (arg0)
7662 if (CONVERT_EXPR_CODE_P (code)
7663 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7665 /* Don't use STRIP_NOPS, because signedness of argument type
7666 matters. */
7667 STRIP_SIGN_NOPS (arg0);
7669 else
7671 /* Strip any conversions that don't change the mode. This
7672 is safe for every expression, except for a comparison
7673 expression because its signedness is derived from its
7674 operands.
7676 Note that this is done as an internal manipulation within
7677 the constant folder, in order to find the simplest
7678 representation of the arguments so that their form can be
7679 studied. In any cases, the appropriate type conversions
7680 should be put back in the tree that will get out of the
7681 constant folder. */
7682 STRIP_NOPS (arg0);
7685 if (CONSTANT_CLASS_P (arg0))
7687 tree tem = const_unop (code, type, arg0);
7688 if (tem)
7690 if (TREE_TYPE (tem) != type)
7691 tem = fold_convert_loc (loc, type, tem);
7692 return tem;
7697 tem = generic_simplify (loc, code, type, op0);
7698 if (tem)
7699 return tem;
7701 if (TREE_CODE_CLASS (code) == tcc_unary)
7703 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7704 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7705 fold_build1_loc (loc, code, type,
7706 fold_convert_loc (loc, TREE_TYPE (op0),
7707 TREE_OPERAND (arg0, 1))));
7708 else if (TREE_CODE (arg0) == COND_EXPR)
7710 tree arg01 = TREE_OPERAND (arg0, 1);
7711 tree arg02 = TREE_OPERAND (arg0, 2);
7712 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7713 arg01 = fold_build1_loc (loc, code, type,
7714 fold_convert_loc (loc,
7715 TREE_TYPE (op0), arg01));
7716 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7717 arg02 = fold_build1_loc (loc, code, type,
7718 fold_convert_loc (loc,
7719 TREE_TYPE (op0), arg02));
7720 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7721 arg01, arg02);
7723 /* If this was a conversion, and all we did was to move into
7724 inside the COND_EXPR, bring it back out. But leave it if
7725 it is a conversion from integer to integer and the
7726 result precision is no wider than a word since such a
7727 conversion is cheap and may be optimized away by combine,
7728 while it couldn't if it were outside the COND_EXPR. Then return
7729 so we don't get into an infinite recursion loop taking the
7730 conversion out and then back in. */
7732 if ((CONVERT_EXPR_CODE_P (code)
7733 || code == NON_LVALUE_EXPR)
7734 && TREE_CODE (tem) == COND_EXPR
7735 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7736 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7737 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7738 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7739 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7740 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7741 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7742 && (INTEGRAL_TYPE_P
7743 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7744 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7745 || flag_syntax_only))
7746 tem = build1_loc (loc, code, type,
7747 build3 (COND_EXPR,
7748 TREE_TYPE (TREE_OPERAND
7749 (TREE_OPERAND (tem, 1), 0)),
7750 TREE_OPERAND (tem, 0),
7751 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7752 TREE_OPERAND (TREE_OPERAND (tem, 2),
7753 0)));
7754 return tem;
7758 switch (code)
7760 case NON_LVALUE_EXPR:
7761 if (!maybe_lvalue_p (op0))
7762 return fold_convert_loc (loc, type, op0);
7763 return NULL_TREE;
7765 CASE_CONVERT:
7766 case FLOAT_EXPR:
7767 case FIX_TRUNC_EXPR:
7768 if (COMPARISON_CLASS_P (op0))
7770 /* If we have (type) (a CMP b) and type is an integral type, return
7771 new expression involving the new type. Canonicalize
7772 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7773 non-integral type.
7774 Do not fold the result as that would not simplify further, also
7775 folding again results in recursions. */
7776 if (TREE_CODE (type) == BOOLEAN_TYPE)
7777 return build2_loc (loc, TREE_CODE (op0), type,
7778 TREE_OPERAND (op0, 0),
7779 TREE_OPERAND (op0, 1));
7780 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7781 && TREE_CODE (type) != VECTOR_TYPE)
7782 return build3_loc (loc, COND_EXPR, type, op0,
7783 constant_boolean_node (true, type),
7784 constant_boolean_node (false, type));
7787 /* Handle (T *)&A.B.C for A being of type T and B and C
7788 living at offset zero. This occurs frequently in
7789 C++ upcasting and then accessing the base. */
7790 if (TREE_CODE (op0) == ADDR_EXPR
7791 && POINTER_TYPE_P (type)
7792 && handled_component_p (TREE_OPERAND (op0, 0)))
7794 HOST_WIDE_INT bitsize, bitpos;
7795 tree offset;
7796 machine_mode mode;
7797 int unsignedp, reversep, volatilep;
7798 tree base
7799 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7800 &offset, &mode, &unsignedp, &reversep,
7801 &volatilep, false);
7802 /* If the reference was to a (constant) zero offset, we can use
7803 the address of the base if it has the same base type
7804 as the result type and the pointer type is unqualified. */
7805 if (! offset && bitpos == 0
7806 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7807 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7808 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7809 return fold_convert_loc (loc, type,
7810 build_fold_addr_expr_loc (loc, base));
7813 if (TREE_CODE (op0) == MODIFY_EXPR
7814 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7815 /* Detect assigning a bitfield. */
7816 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7817 && DECL_BIT_FIELD
7818 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7820 /* Don't leave an assignment inside a conversion
7821 unless assigning a bitfield. */
7822 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7823 /* First do the assignment, then return converted constant. */
7824 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7825 TREE_NO_WARNING (tem) = 1;
7826 TREE_USED (tem) = 1;
7827 return tem;
7830 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7831 constants (if x has signed type, the sign bit cannot be set
7832 in c). This folds extension into the BIT_AND_EXPR.
7833 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7834 very likely don't have maximal range for their precision and this
7835 transformation effectively doesn't preserve non-maximal ranges. */
7836 if (TREE_CODE (type) == INTEGER_TYPE
7837 && TREE_CODE (op0) == BIT_AND_EXPR
7838 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7840 tree and_expr = op0;
7841 tree and0 = TREE_OPERAND (and_expr, 0);
7842 tree and1 = TREE_OPERAND (and_expr, 1);
7843 int change = 0;
7845 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7846 || (TYPE_PRECISION (type)
7847 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7848 change = 1;
7849 else if (TYPE_PRECISION (TREE_TYPE (and1))
7850 <= HOST_BITS_PER_WIDE_INT
7851 && tree_fits_uhwi_p (and1))
7853 unsigned HOST_WIDE_INT cst;
7855 cst = tree_to_uhwi (and1);
7856 cst &= HOST_WIDE_INT_M1U
7857 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7858 change = (cst == 0);
7859 if (change
7860 && !flag_syntax_only
7861 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7862 == ZERO_EXTEND))
7864 tree uns = unsigned_type_for (TREE_TYPE (and0));
7865 and0 = fold_convert_loc (loc, uns, and0);
7866 and1 = fold_convert_loc (loc, uns, and1);
7869 if (change)
7871 tem = force_fit_type (type, wi::to_widest (and1), 0,
7872 TREE_OVERFLOW (and1));
7873 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7874 fold_convert_loc (loc, type, and0), tem);
7878 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7879 cast (T1)X will fold away. We assume that this happens when X itself
7880 is a cast. */
7881 if (POINTER_TYPE_P (type)
7882 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7883 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7885 tree arg00 = TREE_OPERAND (arg0, 0);
7886 tree arg01 = TREE_OPERAND (arg0, 1);
7888 return fold_build_pointer_plus_loc
7889 (loc, fold_convert_loc (loc, type, arg00), arg01);
7892 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7893 of the same precision, and X is an integer type not narrower than
7894 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7895 if (INTEGRAL_TYPE_P (type)
7896 && TREE_CODE (op0) == BIT_NOT_EXPR
7897 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7898 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7899 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7901 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7902 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7903 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7904 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7905 fold_convert_loc (loc, type, tem));
7908 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7909 type of X and Y (integer types only). */
7910 if (INTEGRAL_TYPE_P (type)
7911 && TREE_CODE (op0) == MULT_EXPR
7912 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7913 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7915 /* Be careful not to introduce new overflows. */
7916 tree mult_type;
7917 if (TYPE_OVERFLOW_WRAPS (type))
7918 mult_type = type;
7919 else
7920 mult_type = unsigned_type_for (type);
7922 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7924 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7925 fold_convert_loc (loc, mult_type,
7926 TREE_OPERAND (op0, 0)),
7927 fold_convert_loc (loc, mult_type,
7928 TREE_OPERAND (op0, 1)));
7929 return fold_convert_loc (loc, type, tem);
7933 return NULL_TREE;
7935 case VIEW_CONVERT_EXPR:
7936 if (TREE_CODE (op0) == MEM_REF)
7938 tem = fold_build2_loc (loc, MEM_REF, type,
7939 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7940 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7941 return tem;
7944 return NULL_TREE;
7946 case NEGATE_EXPR:
7947 tem = fold_negate_expr (loc, arg0);
7948 if (tem)
7949 return fold_convert_loc (loc, type, tem);
7950 return NULL_TREE;
7952 case ABS_EXPR:
7953 /* Convert fabs((double)float) into (double)fabsf(float). */
7954 if (TREE_CODE (arg0) == NOP_EXPR
7955 && TREE_CODE (type) == REAL_TYPE)
7957 tree targ0 = strip_float_extensions (arg0);
7958 if (targ0 != arg0)
7959 return fold_convert_loc (loc, type,
7960 fold_build1_loc (loc, ABS_EXPR,
7961 TREE_TYPE (targ0),
7962 targ0));
7964 return NULL_TREE;
7966 case BIT_NOT_EXPR:
7967 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7968 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7969 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7970 fold_convert_loc (loc, type,
7971 TREE_OPERAND (arg0, 0)))))
7972 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7973 fold_convert_loc (loc, type,
7974 TREE_OPERAND (arg0, 1)));
7975 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7976 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7977 fold_convert_loc (loc, type,
7978 TREE_OPERAND (arg0, 1)))))
7979 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7980 fold_convert_loc (loc, type,
7981 TREE_OPERAND (arg0, 0)), tem);
7983 return NULL_TREE;
7985 case TRUTH_NOT_EXPR:
7986 /* Note that the operand of this must be an int
7987 and its values must be 0 or 1.
7988 ("true" is a fixed value perhaps depending on the language,
7989 but we don't handle values other than 1 correctly yet.) */
7990 tem = fold_truth_not_expr (loc, arg0);
7991 if (!tem)
7992 return NULL_TREE;
7993 return fold_convert_loc (loc, type, tem);
7995 case INDIRECT_REF:
7996 /* Fold *&X to X if X is an lvalue. */
7997 if (TREE_CODE (op0) == ADDR_EXPR)
7999 tree op00 = TREE_OPERAND (op0, 0);
8000 if ((TREE_CODE (op00) == VAR_DECL
8001 || TREE_CODE (op00) == PARM_DECL
8002 || TREE_CODE (op00) == RESULT_DECL)
8003 && !TREE_READONLY (op00))
8004 return op00;
8006 return NULL_TREE;
8008 default:
8009 return NULL_TREE;
8010 } /* switch (code) */
8014 /* If the operation was a conversion do _not_ mark a resulting constant
8015 with TREE_OVERFLOW if the original constant was not. These conversions
8016 have implementation defined behavior and retaining the TREE_OVERFLOW
8017 flag here would confuse later passes such as VRP. */
8018 tree
8019 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8020 tree type, tree op0)
8022 tree res = fold_unary_loc (loc, code, type, op0);
8023 if (res
8024 && TREE_CODE (res) == INTEGER_CST
8025 && TREE_CODE (op0) == INTEGER_CST
8026 && CONVERT_EXPR_CODE_P (code))
8027 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8029 return res;
8032 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8033 operands OP0 and OP1. LOC is the location of the resulting expression.
8034 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8035 Return the folded expression if folding is successful. Otherwise,
8036 return NULL_TREE. */
8037 static tree
8038 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8039 tree arg0, tree arg1, tree op0, tree op1)
8041 tree tem;
8043 /* We only do these simplifications if we are optimizing. */
8044 if (!optimize)
8045 return NULL_TREE;
8047 /* Check for things like (A || B) && (A || C). We can convert this
8048 to A || (B && C). Note that either operator can be any of the four
8049 truth and/or operations and the transformation will still be
8050 valid. Also note that we only care about order for the
8051 ANDIF and ORIF operators. If B contains side effects, this
8052 might change the truth-value of A. */
8053 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8054 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8055 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8056 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8057 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8058 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8060 tree a00 = TREE_OPERAND (arg0, 0);
8061 tree a01 = TREE_OPERAND (arg0, 1);
8062 tree a10 = TREE_OPERAND (arg1, 0);
8063 tree a11 = TREE_OPERAND (arg1, 1);
8064 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8065 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8066 && (code == TRUTH_AND_EXPR
8067 || code == TRUTH_OR_EXPR));
8069 if (operand_equal_p (a00, a10, 0))
8070 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8071 fold_build2_loc (loc, code, type, a01, a11));
8072 else if (commutative && operand_equal_p (a00, a11, 0))
8073 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8074 fold_build2_loc (loc, code, type, a01, a10));
8075 else if (commutative && operand_equal_p (a01, a10, 0))
8076 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8077 fold_build2_loc (loc, code, type, a00, a11));
8079 /* This case if tricky because we must either have commutative
8080 operators or else A10 must not have side-effects. */
8082 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8083 && operand_equal_p (a01, a11, 0))
8084 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8085 fold_build2_loc (loc, code, type, a00, a10),
8086 a01);
8089 /* See if we can build a range comparison. */
8090 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8091 return tem;
8093 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8094 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8096 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8097 if (tem)
8098 return fold_build2_loc (loc, code, type, tem, arg1);
8101 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8102 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8104 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8105 if (tem)
8106 return fold_build2_loc (loc, code, type, arg0, tem);
8109 /* Check for the possibility of merging component references. If our
8110 lhs is another similar operation, try to merge its rhs with our
8111 rhs. Then try to merge our lhs and rhs. */
8112 if (TREE_CODE (arg0) == code
8113 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8114 TREE_OPERAND (arg0, 1), arg1)))
8115 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8117 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8118 return tem;
8120 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8121 && (code == TRUTH_AND_EXPR
8122 || code == TRUTH_ANDIF_EXPR
8123 || code == TRUTH_OR_EXPR
8124 || code == TRUTH_ORIF_EXPR))
8126 enum tree_code ncode, icode;
8128 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8129 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8130 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8132 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8133 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8134 We don't want to pack more than two leafs to a non-IF AND/OR
8135 expression.
8136 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8137 equal to IF-CODE, then we don't want to add right-hand operand.
8138 If the inner right-hand side of left-hand operand has
8139 side-effects, or isn't simple, then we can't add to it,
8140 as otherwise we might destroy if-sequence. */
8141 if (TREE_CODE (arg0) == icode
8142 && simple_operand_p_2 (arg1)
8143 /* Needed for sequence points to handle trappings, and
8144 side-effects. */
8145 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8147 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8148 arg1);
8149 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8150 tem);
8152 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8153 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8154 else if (TREE_CODE (arg1) == icode
8155 && simple_operand_p_2 (arg0)
8156 /* Needed for sequence points to handle trappings, and
8157 side-effects. */
8158 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8160 tem = fold_build2_loc (loc, ncode, type,
8161 arg0, TREE_OPERAND (arg1, 0));
8162 return fold_build2_loc (loc, icode, type, tem,
8163 TREE_OPERAND (arg1, 1));
8165 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8166 into (A OR B).
8167 For sequence point consistancy, we need to check for trapping,
8168 and side-effects. */
8169 else if (code == icode && simple_operand_p_2 (arg0)
8170 && simple_operand_p_2 (arg1))
8171 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8174 return NULL_TREE;
8177 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8178 by changing CODE to reduce the magnitude of constants involved in
8179 ARG0 of the comparison.
8180 Returns a canonicalized comparison tree if a simplification was
8181 possible, otherwise returns NULL_TREE.
8182 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8183 valid if signed overflow is undefined. */
8185 static tree
8186 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8187 tree arg0, tree arg1,
8188 bool *strict_overflow_p)
8190 enum tree_code code0 = TREE_CODE (arg0);
8191 tree t, cst0 = NULL_TREE;
8192 int sgn0;
8194 /* Match A +- CST code arg1. We can change this only if overflow
8195 is undefined. */
8196 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8198 /* In principle pointers also have undefined overflow behavior,
8199 but that causes problems elsewhere. */
8200 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8201 && (code0 == MINUS_EXPR
8202 || code0 == PLUS_EXPR)
8203 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8204 return NULL_TREE;
8206 /* Identify the constant in arg0 and its sign. */
8207 cst0 = TREE_OPERAND (arg0, 1);
8208 sgn0 = tree_int_cst_sgn (cst0);
8210 /* Overflowed constants and zero will cause problems. */
8211 if (integer_zerop (cst0)
8212 || TREE_OVERFLOW (cst0))
8213 return NULL_TREE;
8215 /* See if we can reduce the magnitude of the constant in
8216 arg0 by changing the comparison code. */
8217 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8218 if (code == LT_EXPR
8219 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8220 code = LE_EXPR;
8221 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8222 else if (code == GT_EXPR
8223 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8224 code = GE_EXPR;
8225 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8226 else if (code == LE_EXPR
8227 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8228 code = LT_EXPR;
8229 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8230 else if (code == GE_EXPR
8231 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8232 code = GT_EXPR;
8233 else
8234 return NULL_TREE;
8235 *strict_overflow_p = true;
8237 /* Now build the constant reduced in magnitude. But not if that
8238 would produce one outside of its types range. */
8239 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8240 && ((sgn0 == 1
8241 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8242 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8243 || (sgn0 == -1
8244 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8245 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8246 return NULL_TREE;
8248 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8249 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8250 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8251 t = fold_convert (TREE_TYPE (arg1), t);
8253 return fold_build2_loc (loc, code, type, t, arg1);
8256 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8257 overflow further. Try to decrease the magnitude of constants involved
8258 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8259 and put sole constants at the second argument position.
8260 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8262 static tree
8263 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8264 tree arg0, tree arg1)
8266 tree t;
8267 bool strict_overflow_p;
8268 const char * const warnmsg = G_("assuming signed overflow does not occur "
8269 "when reducing constant in comparison");
8271 /* Try canonicalization by simplifying arg0. */
8272 strict_overflow_p = false;
8273 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8274 &strict_overflow_p);
8275 if (t)
8277 if (strict_overflow_p)
8278 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8279 return t;
8282 /* Try canonicalization by simplifying arg1 using the swapped
8283 comparison. */
8284 code = swap_tree_comparison (code);
8285 strict_overflow_p = false;
8286 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8287 &strict_overflow_p);
8288 if (t && strict_overflow_p)
8289 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8290 return t;
8293 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8294 space. This is used to avoid issuing overflow warnings for
8295 expressions like &p->x which can not wrap. */
8297 static bool
8298 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8300 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8301 return true;
8303 if (bitpos < 0)
8304 return true;
8306 wide_int wi_offset;
8307 int precision = TYPE_PRECISION (TREE_TYPE (base));
8308 if (offset == NULL_TREE)
8309 wi_offset = wi::zero (precision);
8310 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8311 return true;
8312 else
8313 wi_offset = offset;
8315 bool overflow;
8316 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8317 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8318 if (overflow)
8319 return true;
8321 if (!wi::fits_uhwi_p (total))
8322 return true;
8324 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8325 if (size <= 0)
8326 return true;
8328 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8329 array. */
8330 if (TREE_CODE (base) == ADDR_EXPR)
8332 HOST_WIDE_INT base_size;
8334 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8335 if (base_size > 0 && size < base_size)
8336 size = base_size;
8339 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8342 /* Subroutine of fold_binary. This routine performs all of the
8343 transformations that are common to the equality/inequality
8344 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8345 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8346 fold_binary should call fold_binary. Fold a comparison with
8347 tree code CODE and type TYPE with operands OP0 and OP1. Return
8348 the folded comparison or NULL_TREE. */
8350 static tree
8351 fold_comparison (location_t loc, enum tree_code code, tree type,
8352 tree op0, tree op1)
8354 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8355 tree arg0, arg1, tem;
8357 arg0 = op0;
8358 arg1 = op1;
8360 STRIP_SIGN_NOPS (arg0);
8361 STRIP_SIGN_NOPS (arg1);
8363 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8364 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8365 && (equality_code
8366 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8367 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8368 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8369 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8370 && TREE_CODE (arg1) == INTEGER_CST
8371 && !TREE_OVERFLOW (arg1))
8373 const enum tree_code
8374 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8375 tree const1 = TREE_OPERAND (arg0, 1);
8376 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8377 tree variable = TREE_OPERAND (arg0, 0);
8378 tree new_const = int_const_binop (reverse_op, const2, const1);
8380 /* If the constant operation overflowed this can be
8381 simplified as a comparison against INT_MAX/INT_MIN. */
8382 if (TREE_OVERFLOW (new_const)
8383 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8385 int const1_sgn = tree_int_cst_sgn (const1);
8386 enum tree_code code2 = code;
8388 /* Get the sign of the constant on the lhs if the
8389 operation were VARIABLE + CONST1. */
8390 if (TREE_CODE (arg0) == MINUS_EXPR)
8391 const1_sgn = -const1_sgn;
8393 /* The sign of the constant determines if we overflowed
8394 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8395 Canonicalize to the INT_MIN overflow by swapping the comparison
8396 if necessary. */
8397 if (const1_sgn == -1)
8398 code2 = swap_tree_comparison (code);
8400 /* We now can look at the canonicalized case
8401 VARIABLE + 1 CODE2 INT_MIN
8402 and decide on the result. */
8403 switch (code2)
8405 case EQ_EXPR:
8406 case LT_EXPR:
8407 case LE_EXPR:
8408 return
8409 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8411 case NE_EXPR:
8412 case GE_EXPR:
8413 case GT_EXPR:
8414 return
8415 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8417 default:
8418 gcc_unreachable ();
8421 else
8423 if (!equality_code)
8424 fold_overflow_warning ("assuming signed overflow does not occur "
8425 "when changing X +- C1 cmp C2 to "
8426 "X cmp C2 -+ C1",
8427 WARN_STRICT_OVERFLOW_COMPARISON);
8428 return fold_build2_loc (loc, code, type, variable, new_const);
8432 /* For comparisons of pointers we can decompose it to a compile time
8433 comparison of the base objects and the offsets into the object.
8434 This requires at least one operand being an ADDR_EXPR or a
8435 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8436 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8437 && (TREE_CODE (arg0) == ADDR_EXPR
8438 || TREE_CODE (arg1) == ADDR_EXPR
8439 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8440 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8442 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8443 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8444 machine_mode mode;
8445 int volatilep, reversep, unsignedp;
8446 bool indirect_base0 = false, indirect_base1 = false;
8448 /* Get base and offset for the access. Strip ADDR_EXPR for
8449 get_inner_reference, but put it back by stripping INDIRECT_REF
8450 off the base object if possible. indirect_baseN will be true
8451 if baseN is not an address but refers to the object itself. */
8452 base0 = arg0;
8453 if (TREE_CODE (arg0) == ADDR_EXPR)
8455 base0
8456 = get_inner_reference (TREE_OPERAND (arg0, 0),
8457 &bitsize, &bitpos0, &offset0, &mode,
8458 &unsignedp, &reversep, &volatilep, false);
8459 if (TREE_CODE (base0) == INDIRECT_REF)
8460 base0 = TREE_OPERAND (base0, 0);
8461 else
8462 indirect_base0 = true;
8464 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8466 base0 = TREE_OPERAND (arg0, 0);
8467 STRIP_SIGN_NOPS (base0);
8468 if (TREE_CODE (base0) == ADDR_EXPR)
8470 base0
8471 = get_inner_reference (TREE_OPERAND (base0, 0),
8472 &bitsize, &bitpos0, &offset0, &mode,
8473 &unsignedp, &reversep, &volatilep,
8474 false);
8475 if (TREE_CODE (base0) == INDIRECT_REF)
8476 base0 = TREE_OPERAND (base0, 0);
8477 else
8478 indirect_base0 = true;
8480 if (offset0 == NULL_TREE || integer_zerop (offset0))
8481 offset0 = TREE_OPERAND (arg0, 1);
8482 else
8483 offset0 = size_binop (PLUS_EXPR, offset0,
8484 TREE_OPERAND (arg0, 1));
8485 if (TREE_CODE (offset0) == INTEGER_CST)
8487 offset_int tem = wi::sext (wi::to_offset (offset0),
8488 TYPE_PRECISION (sizetype));
8489 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8490 tem += bitpos0;
8491 if (wi::fits_shwi_p (tem))
8493 bitpos0 = tem.to_shwi ();
8494 offset0 = NULL_TREE;
8499 base1 = arg1;
8500 if (TREE_CODE (arg1) == ADDR_EXPR)
8502 base1
8503 = get_inner_reference (TREE_OPERAND (arg1, 0),
8504 &bitsize, &bitpos1, &offset1, &mode,
8505 &unsignedp, &reversep, &volatilep, false);
8506 if (TREE_CODE (base1) == INDIRECT_REF)
8507 base1 = TREE_OPERAND (base1, 0);
8508 else
8509 indirect_base1 = true;
8511 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8513 base1 = TREE_OPERAND (arg1, 0);
8514 STRIP_SIGN_NOPS (base1);
8515 if (TREE_CODE (base1) == ADDR_EXPR)
8517 base1
8518 = get_inner_reference (TREE_OPERAND (base1, 0),
8519 &bitsize, &bitpos1, &offset1, &mode,
8520 &unsignedp, &reversep, &volatilep,
8521 false);
8522 if (TREE_CODE (base1) == INDIRECT_REF)
8523 base1 = TREE_OPERAND (base1, 0);
8524 else
8525 indirect_base1 = true;
8527 if (offset1 == NULL_TREE || integer_zerop (offset1))
8528 offset1 = TREE_OPERAND (arg1, 1);
8529 else
8530 offset1 = size_binop (PLUS_EXPR, offset1,
8531 TREE_OPERAND (arg1, 1));
8532 if (TREE_CODE (offset1) == INTEGER_CST)
8534 offset_int tem = wi::sext (wi::to_offset (offset1),
8535 TYPE_PRECISION (sizetype));
8536 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8537 tem += bitpos1;
8538 if (wi::fits_shwi_p (tem))
8540 bitpos1 = tem.to_shwi ();
8541 offset1 = NULL_TREE;
8546 /* If we have equivalent bases we might be able to simplify. */
8547 if (indirect_base0 == indirect_base1
8548 && operand_equal_p (base0, base1,
8549 indirect_base0 ? OEP_ADDRESS_OF : 0))
8551 /* We can fold this expression to a constant if the non-constant
8552 offset parts are equal. */
8553 if ((offset0 == offset1
8554 || (offset0 && offset1
8555 && operand_equal_p (offset0, offset1, 0)))
8556 && (code == EQ_EXPR
8557 || code == NE_EXPR
8558 || (indirect_base0 && DECL_P (base0))
8559 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8562 if (!equality_code
8563 && bitpos0 != bitpos1
8564 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8565 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8566 fold_overflow_warning (("assuming pointer wraparound does not "
8567 "occur when comparing P +- C1 with "
8568 "P +- C2"),
8569 WARN_STRICT_OVERFLOW_CONDITIONAL);
8571 switch (code)
8573 case EQ_EXPR:
8574 return constant_boolean_node (bitpos0 == bitpos1, type);
8575 case NE_EXPR:
8576 return constant_boolean_node (bitpos0 != bitpos1, type);
8577 case LT_EXPR:
8578 return constant_boolean_node (bitpos0 < bitpos1, type);
8579 case LE_EXPR:
8580 return constant_boolean_node (bitpos0 <= bitpos1, type);
8581 case GE_EXPR:
8582 return constant_boolean_node (bitpos0 >= bitpos1, type);
8583 case GT_EXPR:
8584 return constant_boolean_node (bitpos0 > bitpos1, type);
8585 default:;
8588 /* We can simplify the comparison to a comparison of the variable
8589 offset parts if the constant offset parts are equal.
8590 Be careful to use signed sizetype here because otherwise we
8591 mess with array offsets in the wrong way. This is possible
8592 because pointer arithmetic is restricted to retain within an
8593 object and overflow on pointer differences is undefined as of
8594 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8595 else if (bitpos0 == bitpos1
8596 && (equality_code
8597 || (indirect_base0 && DECL_P (base0))
8598 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8600 /* By converting to signed sizetype we cover middle-end pointer
8601 arithmetic which operates on unsigned pointer types of size
8602 type size and ARRAY_REF offsets which are properly sign or
8603 zero extended from their type in case it is narrower than
8604 sizetype. */
8605 if (offset0 == NULL_TREE)
8606 offset0 = build_int_cst (ssizetype, 0);
8607 else
8608 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8609 if (offset1 == NULL_TREE)
8610 offset1 = build_int_cst (ssizetype, 0);
8611 else
8612 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8614 if (!equality_code
8615 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8616 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8617 fold_overflow_warning (("assuming pointer wraparound does not "
8618 "occur when comparing P +- C1 with "
8619 "P +- C2"),
8620 WARN_STRICT_OVERFLOW_COMPARISON);
8622 return fold_build2_loc (loc, code, type, offset0, offset1);
8625 /* For equal offsets we can simplify to a comparison of the
8626 base addresses. */
8627 else if (bitpos0 == bitpos1
8628 && (indirect_base0
8629 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8630 && (indirect_base1
8631 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8632 && ((offset0 == offset1)
8633 || (offset0 && offset1
8634 && operand_equal_p (offset0, offset1, 0))))
8636 if (indirect_base0)
8637 base0 = build_fold_addr_expr_loc (loc, base0);
8638 if (indirect_base1)
8639 base1 = build_fold_addr_expr_loc (loc, base1);
8640 return fold_build2_loc (loc, code, type, base0, base1);
8644 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8645 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8646 the resulting offset is smaller in absolute value than the
8647 original one and has the same sign. */
8648 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8649 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8650 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8651 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8652 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8653 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8654 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8655 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8657 tree const1 = TREE_OPERAND (arg0, 1);
8658 tree const2 = TREE_OPERAND (arg1, 1);
8659 tree variable1 = TREE_OPERAND (arg0, 0);
8660 tree variable2 = TREE_OPERAND (arg1, 0);
8661 tree cst;
8662 const char * const warnmsg = G_("assuming signed overflow does not "
8663 "occur when combining constants around "
8664 "a comparison");
8666 /* Put the constant on the side where it doesn't overflow and is
8667 of lower absolute value and of same sign than before. */
8668 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8669 ? MINUS_EXPR : PLUS_EXPR,
8670 const2, const1);
8671 if (!TREE_OVERFLOW (cst)
8672 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8673 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8675 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8676 return fold_build2_loc (loc, code, type,
8677 variable1,
8678 fold_build2_loc (loc, TREE_CODE (arg1),
8679 TREE_TYPE (arg1),
8680 variable2, cst));
8683 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8684 ? MINUS_EXPR : PLUS_EXPR,
8685 const1, const2);
8686 if (!TREE_OVERFLOW (cst)
8687 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8688 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8690 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8691 return fold_build2_loc (loc, code, type,
8692 fold_build2_loc (loc, TREE_CODE (arg0),
8693 TREE_TYPE (arg0),
8694 variable1, cst),
8695 variable2);
8699 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8700 if (tem)
8701 return tem;
8703 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8704 constant, we can simplify it. */
8705 if (TREE_CODE (arg1) == INTEGER_CST
8706 && (TREE_CODE (arg0) == MIN_EXPR
8707 || TREE_CODE (arg0) == MAX_EXPR)
8708 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8710 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8711 if (tem)
8712 return tem;
8715 /* If we are comparing an expression that just has comparisons
8716 of two integer values, arithmetic expressions of those comparisons,
8717 and constants, we can simplify it. There are only three cases
8718 to check: the two values can either be equal, the first can be
8719 greater, or the second can be greater. Fold the expression for
8720 those three values. Since each value must be 0 or 1, we have
8721 eight possibilities, each of which corresponds to the constant 0
8722 or 1 or one of the six possible comparisons.
8724 This handles common cases like (a > b) == 0 but also handles
8725 expressions like ((x > y) - (y > x)) > 0, which supposedly
8726 occur in macroized code. */
8728 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8730 tree cval1 = 0, cval2 = 0;
8731 int save_p = 0;
8733 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8734 /* Don't handle degenerate cases here; they should already
8735 have been handled anyway. */
8736 && cval1 != 0 && cval2 != 0
8737 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8738 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8739 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8740 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8741 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8742 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8743 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8745 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8746 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8748 /* We can't just pass T to eval_subst in case cval1 or cval2
8749 was the same as ARG1. */
8751 tree high_result
8752 = fold_build2_loc (loc, code, type,
8753 eval_subst (loc, arg0, cval1, maxval,
8754 cval2, minval),
8755 arg1);
8756 tree equal_result
8757 = fold_build2_loc (loc, code, type,
8758 eval_subst (loc, arg0, cval1, maxval,
8759 cval2, maxval),
8760 arg1);
8761 tree low_result
8762 = fold_build2_loc (loc, code, type,
8763 eval_subst (loc, arg0, cval1, minval,
8764 cval2, maxval),
8765 arg1);
8767 /* All three of these results should be 0 or 1. Confirm they are.
8768 Then use those values to select the proper code to use. */
8770 if (TREE_CODE (high_result) == INTEGER_CST
8771 && TREE_CODE (equal_result) == INTEGER_CST
8772 && TREE_CODE (low_result) == INTEGER_CST)
8774 /* Make a 3-bit mask with the high-order bit being the
8775 value for `>', the next for '=', and the low for '<'. */
8776 switch ((integer_onep (high_result) * 4)
8777 + (integer_onep (equal_result) * 2)
8778 + integer_onep (low_result))
8780 case 0:
8781 /* Always false. */
8782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8783 case 1:
8784 code = LT_EXPR;
8785 break;
8786 case 2:
8787 code = EQ_EXPR;
8788 break;
8789 case 3:
8790 code = LE_EXPR;
8791 break;
8792 case 4:
8793 code = GT_EXPR;
8794 break;
8795 case 5:
8796 code = NE_EXPR;
8797 break;
8798 case 6:
8799 code = GE_EXPR;
8800 break;
8801 case 7:
8802 /* Always true. */
8803 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8806 if (save_p)
8808 tem = save_expr (build2 (code, type, cval1, cval2));
8809 SET_EXPR_LOCATION (tem, loc);
8810 return tem;
8812 return fold_build2_loc (loc, code, type, cval1, cval2);
8817 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8818 into a single range test. */
8819 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8820 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8821 && TREE_CODE (arg1) == INTEGER_CST
8822 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8823 && !integer_zerop (TREE_OPERAND (arg0, 1))
8824 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8825 && !TREE_OVERFLOW (arg1))
8827 tem = fold_div_compare (loc, code, type, arg0, arg1);
8828 if (tem != NULL_TREE)
8829 return tem;
8832 return NULL_TREE;
8836 /* Subroutine of fold_binary. Optimize complex multiplications of the
8837 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8838 argument EXPR represents the expression "z" of type TYPE. */
8840 static tree
8841 fold_mult_zconjz (location_t loc, tree type, tree expr)
8843 tree itype = TREE_TYPE (type);
8844 tree rpart, ipart, tem;
8846 if (TREE_CODE (expr) == COMPLEX_EXPR)
8848 rpart = TREE_OPERAND (expr, 0);
8849 ipart = TREE_OPERAND (expr, 1);
8851 else if (TREE_CODE (expr) == COMPLEX_CST)
8853 rpart = TREE_REALPART (expr);
8854 ipart = TREE_IMAGPART (expr);
8856 else
8858 expr = save_expr (expr);
8859 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8860 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8863 rpart = save_expr (rpart);
8864 ipart = save_expr (ipart);
8865 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8866 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8867 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8868 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8869 build_zero_cst (itype));
8873 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8874 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8876 static bool
8877 vec_cst_ctor_to_array (tree arg, tree *elts)
8879 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8881 if (TREE_CODE (arg) == VECTOR_CST)
8883 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8884 elts[i] = VECTOR_CST_ELT (arg, i);
8886 else if (TREE_CODE (arg) == CONSTRUCTOR)
8888 constructor_elt *elt;
8890 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8891 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8892 return false;
8893 else
8894 elts[i] = elt->value;
8896 else
8897 return false;
8898 for (; i < nelts; i++)
8899 elts[i]
8900 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8901 return true;
8904 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8905 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8906 NULL_TREE otherwise. */
8908 static tree
8909 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8911 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8912 tree *elts;
8913 bool need_ctor = false;
8915 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8916 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8917 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8918 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8919 return NULL_TREE;
8921 elts = XALLOCAVEC (tree, nelts * 3);
8922 if (!vec_cst_ctor_to_array (arg0, elts)
8923 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8924 return NULL_TREE;
8926 for (i = 0; i < nelts; i++)
8928 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8929 need_ctor = true;
8930 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8933 if (need_ctor)
8935 vec<constructor_elt, va_gc> *v;
8936 vec_alloc (v, nelts);
8937 for (i = 0; i < nelts; i++)
8938 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8939 return build_constructor (type, v);
8941 else
8942 return build_vector (type, &elts[2 * nelts]);
8945 /* Try to fold a pointer difference of type TYPE two address expressions of
8946 array references AREF0 and AREF1 using location LOC. Return a
8947 simplified expression for the difference or NULL_TREE. */
8949 static tree
8950 fold_addr_of_array_ref_difference (location_t loc, tree type,
8951 tree aref0, tree aref1)
8953 tree base0 = TREE_OPERAND (aref0, 0);
8954 tree base1 = TREE_OPERAND (aref1, 0);
8955 tree base_offset = build_int_cst (type, 0);
8957 /* If the bases are array references as well, recurse. If the bases
8958 are pointer indirections compute the difference of the pointers.
8959 If the bases are equal, we are set. */
8960 if ((TREE_CODE (base0) == ARRAY_REF
8961 && TREE_CODE (base1) == ARRAY_REF
8962 && (base_offset
8963 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8964 || (INDIRECT_REF_P (base0)
8965 && INDIRECT_REF_P (base1)
8966 && (base_offset
8967 = fold_binary_loc (loc, MINUS_EXPR, type,
8968 fold_convert (type, TREE_OPERAND (base0, 0)),
8969 fold_convert (type,
8970 TREE_OPERAND (base1, 0)))))
8971 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8973 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8974 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8975 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8976 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8977 return fold_build2_loc (loc, PLUS_EXPR, type,
8978 base_offset,
8979 fold_build2_loc (loc, MULT_EXPR, type,
8980 diff, esz));
8982 return NULL_TREE;
8985 /* If the real or vector real constant CST of type TYPE has an exact
8986 inverse, return it, else return NULL. */
8988 tree
8989 exact_inverse (tree type, tree cst)
8991 REAL_VALUE_TYPE r;
8992 tree unit_type, *elts;
8993 machine_mode mode;
8994 unsigned vec_nelts, i;
8996 switch (TREE_CODE (cst))
8998 case REAL_CST:
8999 r = TREE_REAL_CST (cst);
9001 if (exact_real_inverse (TYPE_MODE (type), &r))
9002 return build_real (type, r);
9004 return NULL_TREE;
9006 case VECTOR_CST:
9007 vec_nelts = VECTOR_CST_NELTS (cst);
9008 elts = XALLOCAVEC (tree, vec_nelts);
9009 unit_type = TREE_TYPE (type);
9010 mode = TYPE_MODE (unit_type);
9012 for (i = 0; i < vec_nelts; i++)
9014 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9015 if (!exact_real_inverse (mode, &r))
9016 return NULL_TREE;
9017 elts[i] = build_real (unit_type, r);
9020 return build_vector (type, elts);
9022 default:
9023 return NULL_TREE;
9027 /* Mask out the tz least significant bits of X of type TYPE where
9028 tz is the number of trailing zeroes in Y. */
9029 static wide_int
9030 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9032 int tz = wi::ctz (y);
9033 if (tz > 0)
9034 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9035 return x;
9038 /* Return true when T is an address and is known to be nonzero.
9039 For floating point we further ensure that T is not denormal.
9040 Similar logic is present in nonzero_address in rtlanal.h.
9042 If the return value is based on the assumption that signed overflow
9043 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9044 change *STRICT_OVERFLOW_P. */
9046 static bool
9047 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9049 tree type = TREE_TYPE (t);
9050 enum tree_code code;
9052 /* Doing something useful for floating point would need more work. */
9053 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9054 return false;
9056 code = TREE_CODE (t);
9057 switch (TREE_CODE_CLASS (code))
9059 case tcc_unary:
9060 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9061 strict_overflow_p);
9062 case tcc_binary:
9063 case tcc_comparison:
9064 return tree_binary_nonzero_warnv_p (code, type,
9065 TREE_OPERAND (t, 0),
9066 TREE_OPERAND (t, 1),
9067 strict_overflow_p);
9068 case tcc_constant:
9069 case tcc_declaration:
9070 case tcc_reference:
9071 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9073 default:
9074 break;
9077 switch (code)
9079 case TRUTH_NOT_EXPR:
9080 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9081 strict_overflow_p);
9083 case TRUTH_AND_EXPR:
9084 case TRUTH_OR_EXPR:
9085 case TRUTH_XOR_EXPR:
9086 return tree_binary_nonzero_warnv_p (code, type,
9087 TREE_OPERAND (t, 0),
9088 TREE_OPERAND (t, 1),
9089 strict_overflow_p);
9091 case COND_EXPR:
9092 case CONSTRUCTOR:
9093 case OBJ_TYPE_REF:
9094 case ASSERT_EXPR:
9095 case ADDR_EXPR:
9096 case WITH_SIZE_EXPR:
9097 case SSA_NAME:
9098 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9100 case COMPOUND_EXPR:
9101 case MODIFY_EXPR:
9102 case BIND_EXPR:
9103 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9104 strict_overflow_p);
9106 case SAVE_EXPR:
9107 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9108 strict_overflow_p);
9110 case CALL_EXPR:
9112 tree fndecl = get_callee_fndecl (t);
9113 if (!fndecl) return false;
9114 if (flag_delete_null_pointer_checks && !flag_check_new
9115 && DECL_IS_OPERATOR_NEW (fndecl)
9116 && !TREE_NOTHROW (fndecl))
9117 return true;
9118 if (flag_delete_null_pointer_checks
9119 && lookup_attribute ("returns_nonnull",
9120 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9121 return true;
9122 return alloca_call_p (t);
9125 default:
9126 break;
9128 return false;
9131 /* Return true when T is an address and is known to be nonzero.
9132 Handle warnings about undefined signed overflow. */
9134 static bool
9135 tree_expr_nonzero_p (tree t)
9137 bool ret, strict_overflow_p;
9139 strict_overflow_p = false;
9140 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9141 if (strict_overflow_p)
9142 fold_overflow_warning (("assuming signed overflow does not occur when "
9143 "determining that expression is always "
9144 "non-zero"),
9145 WARN_STRICT_OVERFLOW_MISC);
9146 return ret;
9149 /* Return true if T is known not to be equal to an integer W. */
9151 bool
9152 expr_not_equal_to (tree t, const wide_int &w)
9154 wide_int min, max, nz;
9155 value_range_type rtype;
9156 switch (TREE_CODE (t))
9158 case INTEGER_CST:
9159 return wi::ne_p (t, w);
9161 case SSA_NAME:
9162 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9163 return false;
9164 rtype = get_range_info (t, &min, &max);
9165 if (rtype == VR_RANGE)
9167 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9168 return true;
9169 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9170 return true;
9172 else if (rtype == VR_ANTI_RANGE
9173 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9174 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9175 return true;
9176 /* If T has some known zero bits and W has any of those bits set,
9177 then T is known not to be equal to W. */
9178 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9179 TYPE_PRECISION (TREE_TYPE (t))), 0))
9180 return true;
9181 return false;
9183 default:
9184 return false;
9188 /* Fold a binary expression of code CODE and type TYPE with operands
9189 OP0 and OP1. LOC is the location of the resulting expression.
9190 Return the folded expression if folding is successful. Otherwise,
9191 return NULL_TREE. */
9193 tree
9194 fold_binary_loc (location_t loc,
9195 enum tree_code code, tree type, tree op0, tree op1)
9197 enum tree_code_class kind = TREE_CODE_CLASS (code);
9198 tree arg0, arg1, tem;
9199 tree t1 = NULL_TREE;
9200 bool strict_overflow_p;
9201 unsigned int prec;
9203 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9204 && TREE_CODE_LENGTH (code) == 2
9205 && op0 != NULL_TREE
9206 && op1 != NULL_TREE);
9208 arg0 = op0;
9209 arg1 = op1;
9211 /* Strip any conversions that don't change the mode. This is
9212 safe for every expression, except for a comparison expression
9213 because its signedness is derived from its operands. So, in
9214 the latter case, only strip conversions that don't change the
9215 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9216 preserved.
9218 Note that this is done as an internal manipulation within the
9219 constant folder, in order to find the simplest representation
9220 of the arguments so that their form can be studied. In any
9221 cases, the appropriate type conversions should be put back in
9222 the tree that will get out of the constant folder. */
9224 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9226 STRIP_SIGN_NOPS (arg0);
9227 STRIP_SIGN_NOPS (arg1);
9229 else
9231 STRIP_NOPS (arg0);
9232 STRIP_NOPS (arg1);
9235 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9236 constant but we can't do arithmetic on them. */
9237 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9239 tem = const_binop (code, type, arg0, arg1);
9240 if (tem != NULL_TREE)
9242 if (TREE_TYPE (tem) != type)
9243 tem = fold_convert_loc (loc, type, tem);
9244 return tem;
9248 /* If this is a commutative operation, and ARG0 is a constant, move it
9249 to ARG1 to reduce the number of tests below. */
9250 if (commutative_tree_code (code)
9251 && tree_swap_operands_p (arg0, arg1, true))
9252 return fold_build2_loc (loc, code, type, op1, op0);
9254 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9255 to ARG1 to reduce the number of tests below. */
9256 if (kind == tcc_comparison
9257 && tree_swap_operands_p (arg0, arg1, true))
9258 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9260 tem = generic_simplify (loc, code, type, op0, op1);
9261 if (tem)
9262 return tem;
9264 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9266 First check for cases where an arithmetic operation is applied to a
9267 compound, conditional, or comparison operation. Push the arithmetic
9268 operation inside the compound or conditional to see if any folding
9269 can then be done. Convert comparison to conditional for this purpose.
9270 The also optimizes non-constant cases that used to be done in
9271 expand_expr.
9273 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9274 one of the operands is a comparison and the other is a comparison, a
9275 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9276 code below would make the expression more complex. Change it to a
9277 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9278 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9280 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9281 || code == EQ_EXPR || code == NE_EXPR)
9282 && TREE_CODE (type) != VECTOR_TYPE
9283 && ((truth_value_p (TREE_CODE (arg0))
9284 && (truth_value_p (TREE_CODE (arg1))
9285 || (TREE_CODE (arg1) == BIT_AND_EXPR
9286 && integer_onep (TREE_OPERAND (arg1, 1)))))
9287 || (truth_value_p (TREE_CODE (arg1))
9288 && (truth_value_p (TREE_CODE (arg0))
9289 || (TREE_CODE (arg0) == BIT_AND_EXPR
9290 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9292 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9293 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9294 : TRUTH_XOR_EXPR,
9295 boolean_type_node,
9296 fold_convert_loc (loc, boolean_type_node, arg0),
9297 fold_convert_loc (loc, boolean_type_node, arg1));
9299 if (code == EQ_EXPR)
9300 tem = invert_truthvalue_loc (loc, tem);
9302 return fold_convert_loc (loc, type, tem);
9305 if (TREE_CODE_CLASS (code) == tcc_binary
9306 || TREE_CODE_CLASS (code) == tcc_comparison)
9308 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9310 tem = fold_build2_loc (loc, code, type,
9311 fold_convert_loc (loc, TREE_TYPE (op0),
9312 TREE_OPERAND (arg0, 1)), op1);
9313 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9314 tem);
9316 if (TREE_CODE (arg1) == COMPOUND_EXPR
9317 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9319 tem = fold_build2_loc (loc, code, type, op0,
9320 fold_convert_loc (loc, TREE_TYPE (op1),
9321 TREE_OPERAND (arg1, 1)));
9322 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9323 tem);
9326 if (TREE_CODE (arg0) == COND_EXPR
9327 || TREE_CODE (arg0) == VEC_COND_EXPR
9328 || COMPARISON_CLASS_P (arg0))
9330 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9331 arg0, arg1,
9332 /*cond_first_p=*/1);
9333 if (tem != NULL_TREE)
9334 return tem;
9337 if (TREE_CODE (arg1) == COND_EXPR
9338 || TREE_CODE (arg1) == VEC_COND_EXPR
9339 || COMPARISON_CLASS_P (arg1))
9341 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9342 arg1, arg0,
9343 /*cond_first_p=*/0);
9344 if (tem != NULL_TREE)
9345 return tem;
9349 switch (code)
9351 case MEM_REF:
9352 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9353 if (TREE_CODE (arg0) == ADDR_EXPR
9354 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9356 tree iref = TREE_OPERAND (arg0, 0);
9357 return fold_build2 (MEM_REF, type,
9358 TREE_OPERAND (iref, 0),
9359 int_const_binop (PLUS_EXPR, arg1,
9360 TREE_OPERAND (iref, 1)));
9363 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9364 if (TREE_CODE (arg0) == ADDR_EXPR
9365 && handled_component_p (TREE_OPERAND (arg0, 0)))
9367 tree base;
9368 HOST_WIDE_INT coffset;
9369 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9370 &coffset);
9371 if (!base)
9372 return NULL_TREE;
9373 return fold_build2 (MEM_REF, type,
9374 build_fold_addr_expr (base),
9375 int_const_binop (PLUS_EXPR, arg1,
9376 size_int (coffset)));
9379 return NULL_TREE;
9381 case POINTER_PLUS_EXPR:
9382 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9383 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9384 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9385 return fold_convert_loc (loc, type,
9386 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9387 fold_convert_loc (loc, sizetype,
9388 arg1),
9389 fold_convert_loc (loc, sizetype,
9390 arg0)));
9392 return NULL_TREE;
9394 case PLUS_EXPR:
9395 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9397 /* X + (X / CST) * -CST is X % CST. */
9398 if (TREE_CODE (arg1) == MULT_EXPR
9399 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9400 && operand_equal_p (arg0,
9401 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9403 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9404 tree cst1 = TREE_OPERAND (arg1, 1);
9405 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9406 cst1, cst0);
9407 if (sum && integer_zerop (sum))
9408 return fold_convert_loc (loc, type,
9409 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9410 TREE_TYPE (arg0), arg0,
9411 cst0));
9415 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9416 one. Make sure the type is not saturating and has the signedness of
9417 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9418 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9419 if ((TREE_CODE (arg0) == MULT_EXPR
9420 || TREE_CODE (arg1) == MULT_EXPR)
9421 && !TYPE_SATURATING (type)
9422 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9423 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9424 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9426 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9427 if (tem)
9428 return tem;
9431 if (! FLOAT_TYPE_P (type))
9433 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9434 (plus (plus (mult) (mult)) (foo)) so that we can
9435 take advantage of the factoring cases below. */
9436 if (ANY_INTEGRAL_TYPE_P (type)
9437 && TYPE_OVERFLOW_WRAPS (type)
9438 && (((TREE_CODE (arg0) == PLUS_EXPR
9439 || TREE_CODE (arg0) == MINUS_EXPR)
9440 && TREE_CODE (arg1) == MULT_EXPR)
9441 || ((TREE_CODE (arg1) == PLUS_EXPR
9442 || TREE_CODE (arg1) == MINUS_EXPR)
9443 && TREE_CODE (arg0) == MULT_EXPR)))
9445 tree parg0, parg1, parg, marg;
9446 enum tree_code pcode;
9448 if (TREE_CODE (arg1) == MULT_EXPR)
9449 parg = arg0, marg = arg1;
9450 else
9451 parg = arg1, marg = arg0;
9452 pcode = TREE_CODE (parg);
9453 parg0 = TREE_OPERAND (parg, 0);
9454 parg1 = TREE_OPERAND (parg, 1);
9455 STRIP_NOPS (parg0);
9456 STRIP_NOPS (parg1);
9458 if (TREE_CODE (parg0) == MULT_EXPR
9459 && TREE_CODE (parg1) != MULT_EXPR)
9460 return fold_build2_loc (loc, pcode, type,
9461 fold_build2_loc (loc, PLUS_EXPR, type,
9462 fold_convert_loc (loc, type,
9463 parg0),
9464 fold_convert_loc (loc, type,
9465 marg)),
9466 fold_convert_loc (loc, type, parg1));
9467 if (TREE_CODE (parg0) != MULT_EXPR
9468 && TREE_CODE (parg1) == MULT_EXPR)
9469 return
9470 fold_build2_loc (loc, PLUS_EXPR, type,
9471 fold_convert_loc (loc, type, parg0),
9472 fold_build2_loc (loc, pcode, type,
9473 fold_convert_loc (loc, type, marg),
9474 fold_convert_loc (loc, type,
9475 parg1)));
9478 else
9480 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9481 to __complex__ ( x, y ). This is not the same for SNaNs or
9482 if signed zeros are involved. */
9483 if (!HONOR_SNANS (element_mode (arg0))
9484 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9485 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9487 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9488 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9489 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9490 bool arg0rz = false, arg0iz = false;
9491 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9492 || (arg0i && (arg0iz = real_zerop (arg0i))))
9494 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9495 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9496 if (arg0rz && arg1i && real_zerop (arg1i))
9498 tree rp = arg1r ? arg1r
9499 : build1 (REALPART_EXPR, rtype, arg1);
9500 tree ip = arg0i ? arg0i
9501 : build1 (IMAGPART_EXPR, rtype, arg0);
9502 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9504 else if (arg0iz && arg1r && real_zerop (arg1r))
9506 tree rp = arg0r ? arg0r
9507 : build1 (REALPART_EXPR, rtype, arg0);
9508 tree ip = arg1i ? arg1i
9509 : build1 (IMAGPART_EXPR, rtype, arg1);
9510 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9515 if (flag_unsafe_math_optimizations
9516 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9517 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9518 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9519 return tem;
9521 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9522 We associate floats only if the user has specified
9523 -fassociative-math. */
9524 if (flag_associative_math
9525 && TREE_CODE (arg1) == PLUS_EXPR
9526 && TREE_CODE (arg0) != MULT_EXPR)
9528 tree tree10 = TREE_OPERAND (arg1, 0);
9529 tree tree11 = TREE_OPERAND (arg1, 1);
9530 if (TREE_CODE (tree11) == MULT_EXPR
9531 && TREE_CODE (tree10) == MULT_EXPR)
9533 tree tree0;
9534 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9535 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9538 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9539 We associate floats only if the user has specified
9540 -fassociative-math. */
9541 if (flag_associative_math
9542 && TREE_CODE (arg0) == PLUS_EXPR
9543 && TREE_CODE (arg1) != MULT_EXPR)
9545 tree tree00 = TREE_OPERAND (arg0, 0);
9546 tree tree01 = TREE_OPERAND (arg0, 1);
9547 if (TREE_CODE (tree01) == MULT_EXPR
9548 && TREE_CODE (tree00) == MULT_EXPR)
9550 tree tree0;
9551 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9552 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9557 bit_rotate:
9558 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9559 is a rotate of A by C1 bits. */
9560 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9561 is a rotate of A by B bits. */
9563 enum tree_code code0, code1;
9564 tree rtype;
9565 code0 = TREE_CODE (arg0);
9566 code1 = TREE_CODE (arg1);
9567 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9568 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9569 && operand_equal_p (TREE_OPERAND (arg0, 0),
9570 TREE_OPERAND (arg1, 0), 0)
9571 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9572 TYPE_UNSIGNED (rtype))
9573 /* Only create rotates in complete modes. Other cases are not
9574 expanded properly. */
9575 && (element_precision (rtype)
9576 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9578 tree tree01, tree11;
9579 enum tree_code code01, code11;
9581 tree01 = TREE_OPERAND (arg0, 1);
9582 tree11 = TREE_OPERAND (arg1, 1);
9583 STRIP_NOPS (tree01);
9584 STRIP_NOPS (tree11);
9585 code01 = TREE_CODE (tree01);
9586 code11 = TREE_CODE (tree11);
9587 if (code01 == INTEGER_CST
9588 && code11 == INTEGER_CST
9589 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9590 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9592 tem = build2_loc (loc, LROTATE_EXPR,
9593 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9594 TREE_OPERAND (arg0, 0),
9595 code0 == LSHIFT_EXPR
9596 ? TREE_OPERAND (arg0, 1)
9597 : TREE_OPERAND (arg1, 1));
9598 return fold_convert_loc (loc, type, tem);
9600 else if (code11 == MINUS_EXPR)
9602 tree tree110, tree111;
9603 tree110 = TREE_OPERAND (tree11, 0);
9604 tree111 = TREE_OPERAND (tree11, 1);
9605 STRIP_NOPS (tree110);
9606 STRIP_NOPS (tree111);
9607 if (TREE_CODE (tree110) == INTEGER_CST
9608 && 0 == compare_tree_int (tree110,
9609 element_precision
9610 (TREE_TYPE (TREE_OPERAND
9611 (arg0, 0))))
9612 && operand_equal_p (tree01, tree111, 0))
9613 return
9614 fold_convert_loc (loc, type,
9615 build2 ((code0 == LSHIFT_EXPR
9616 ? LROTATE_EXPR
9617 : RROTATE_EXPR),
9618 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9619 TREE_OPERAND (arg0, 0),
9620 TREE_OPERAND (arg0, 1)));
9622 else if (code01 == MINUS_EXPR)
9624 tree tree010, tree011;
9625 tree010 = TREE_OPERAND (tree01, 0);
9626 tree011 = TREE_OPERAND (tree01, 1);
9627 STRIP_NOPS (tree010);
9628 STRIP_NOPS (tree011);
9629 if (TREE_CODE (tree010) == INTEGER_CST
9630 && 0 == compare_tree_int (tree010,
9631 element_precision
9632 (TREE_TYPE (TREE_OPERAND
9633 (arg0, 0))))
9634 && operand_equal_p (tree11, tree011, 0))
9635 return fold_convert_loc
9636 (loc, type,
9637 build2 ((code0 != LSHIFT_EXPR
9638 ? LROTATE_EXPR
9639 : RROTATE_EXPR),
9640 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9641 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9646 associate:
9647 /* In most languages, can't associate operations on floats through
9648 parentheses. Rather than remember where the parentheses were, we
9649 don't associate floats at all, unless the user has specified
9650 -fassociative-math.
9651 And, we need to make sure type is not saturating. */
9653 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9654 && !TYPE_SATURATING (type))
9656 tree var0, con0, lit0, minus_lit0;
9657 tree var1, con1, lit1, minus_lit1;
9658 tree atype = type;
9659 bool ok = true;
9661 /* Split both trees into variables, constants, and literals. Then
9662 associate each group together, the constants with literals,
9663 then the result with variables. This increases the chances of
9664 literals being recombined later and of generating relocatable
9665 expressions for the sum of a constant and literal. */
9666 var0 = split_tree (loc, arg0, type, code,
9667 &con0, &lit0, &minus_lit0, 0);
9668 var1 = split_tree (loc, arg1, type, code,
9669 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9671 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9672 if (code == MINUS_EXPR)
9673 code = PLUS_EXPR;
9675 /* With undefined overflow prefer doing association in a type
9676 which wraps on overflow, if that is one of the operand types. */
9677 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9678 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9680 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9681 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9682 atype = TREE_TYPE (arg0);
9683 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9684 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9685 atype = TREE_TYPE (arg1);
9686 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9689 /* With undefined overflow we can only associate constants with one
9690 variable, and constants whose association doesn't overflow. */
9691 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9692 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9694 if (var0 && var1)
9696 tree tmp0 = var0;
9697 tree tmp1 = var1;
9698 bool one_neg = false;
9700 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9702 tmp0 = TREE_OPERAND (tmp0, 0);
9703 one_neg = !one_neg;
9705 if (CONVERT_EXPR_P (tmp0)
9706 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9707 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9708 <= TYPE_PRECISION (atype)))
9709 tmp0 = TREE_OPERAND (tmp0, 0);
9710 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9712 tmp1 = TREE_OPERAND (tmp1, 0);
9713 one_neg = !one_neg;
9715 if (CONVERT_EXPR_P (tmp1)
9716 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9717 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9718 <= TYPE_PRECISION (atype)))
9719 tmp1 = TREE_OPERAND (tmp1, 0);
9720 /* The only case we can still associate with two variables
9721 is if they cancel out. */
9722 if (!one_neg
9723 || !operand_equal_p (tmp0, tmp1, 0))
9724 ok = false;
9728 /* Only do something if we found more than two objects. Otherwise,
9729 nothing has changed and we risk infinite recursion. */
9730 if (ok
9731 && (2 < ((var0 != 0) + (var1 != 0)
9732 + (con0 != 0) + (con1 != 0)
9733 + (lit0 != 0) + (lit1 != 0)
9734 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9736 bool any_overflows = false;
9737 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9738 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9739 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9740 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9741 var0 = associate_trees (loc, var0, var1, code, atype);
9742 con0 = associate_trees (loc, con0, con1, code, atype);
9743 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9744 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9745 code, atype);
9747 /* Preserve the MINUS_EXPR if the negative part of the literal is
9748 greater than the positive part. Otherwise, the multiplicative
9749 folding code (i.e extract_muldiv) may be fooled in case
9750 unsigned constants are subtracted, like in the following
9751 example: ((X*2 + 4) - 8U)/2. */
9752 if (minus_lit0 && lit0)
9754 if (TREE_CODE (lit0) == INTEGER_CST
9755 && TREE_CODE (minus_lit0) == INTEGER_CST
9756 && tree_int_cst_lt (lit0, minus_lit0))
9758 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9759 MINUS_EXPR, atype);
9760 lit0 = 0;
9762 else
9764 lit0 = associate_trees (loc, lit0, minus_lit0,
9765 MINUS_EXPR, atype);
9766 minus_lit0 = 0;
9770 /* Don't introduce overflows through reassociation. */
9771 if (!any_overflows
9772 && ((lit0 && TREE_OVERFLOW_P (lit0))
9773 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9774 return NULL_TREE;
9776 if (minus_lit0)
9778 if (con0 == 0)
9779 return
9780 fold_convert_loc (loc, type,
9781 associate_trees (loc, var0, minus_lit0,
9782 MINUS_EXPR, atype));
9783 else
9785 con0 = associate_trees (loc, con0, minus_lit0,
9786 MINUS_EXPR, atype);
9787 return
9788 fold_convert_loc (loc, type,
9789 associate_trees (loc, var0, con0,
9790 PLUS_EXPR, atype));
9794 con0 = associate_trees (loc, con0, lit0, code, atype);
9795 return
9796 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9797 code, atype));
9801 return NULL_TREE;
9803 case MINUS_EXPR:
9804 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9805 if (TREE_CODE (arg0) == NEGATE_EXPR
9806 && negate_expr_p (op1)
9807 && reorder_operands_p (arg0, arg1))
9808 return fold_build2_loc (loc, MINUS_EXPR, type,
9809 negate_expr (op1),
9810 fold_convert_loc (loc, type,
9811 TREE_OPERAND (arg0, 0)));
9813 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9814 __complex__ ( x, -y ). This is not the same for SNaNs or if
9815 signed zeros are involved. */
9816 if (!HONOR_SNANS (element_mode (arg0))
9817 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9818 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9820 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9821 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9822 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9823 bool arg0rz = false, arg0iz = false;
9824 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9825 || (arg0i && (arg0iz = real_zerop (arg0i))))
9827 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9828 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9829 if (arg0rz && arg1i && real_zerop (arg1i))
9831 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9832 arg1r ? arg1r
9833 : build1 (REALPART_EXPR, rtype, arg1));
9834 tree ip = arg0i ? arg0i
9835 : build1 (IMAGPART_EXPR, rtype, arg0);
9836 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9838 else if (arg0iz && arg1r && real_zerop (arg1r))
9840 tree rp = arg0r ? arg0r
9841 : build1 (REALPART_EXPR, rtype, arg0);
9842 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9843 arg1i ? arg1i
9844 : build1 (IMAGPART_EXPR, rtype, arg1));
9845 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9850 /* A - B -> A + (-B) if B is easily negatable. */
9851 if (negate_expr_p (op1)
9852 && ! TYPE_OVERFLOW_SANITIZED (type)
9853 && ((FLOAT_TYPE_P (type)
9854 /* Avoid this transformation if B is a positive REAL_CST. */
9855 && (TREE_CODE (op1) != REAL_CST
9856 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9857 || INTEGRAL_TYPE_P (type)))
9858 return fold_build2_loc (loc, PLUS_EXPR, type,
9859 fold_convert_loc (loc, type, arg0),
9860 negate_expr (op1));
9862 /* Fold &a[i] - &a[j] to i-j. */
9863 if (TREE_CODE (arg0) == ADDR_EXPR
9864 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9865 && TREE_CODE (arg1) == ADDR_EXPR
9866 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9868 tree tem = fold_addr_of_array_ref_difference (loc, type,
9869 TREE_OPERAND (arg0, 0),
9870 TREE_OPERAND (arg1, 0));
9871 if (tem)
9872 return tem;
9875 if (FLOAT_TYPE_P (type)
9876 && flag_unsafe_math_optimizations
9877 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9878 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9879 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9880 return tem;
9882 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9883 one. Make sure the type is not saturating and has the signedness of
9884 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9885 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9886 if ((TREE_CODE (arg0) == MULT_EXPR
9887 || TREE_CODE (arg1) == MULT_EXPR)
9888 && !TYPE_SATURATING (type)
9889 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9890 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9891 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9893 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9894 if (tem)
9895 return tem;
9898 goto associate;
9900 case MULT_EXPR:
9901 if (! FLOAT_TYPE_P (type))
9903 /* Transform x * -C into -x * C if x is easily negatable. */
9904 if (TREE_CODE (op1) == INTEGER_CST
9905 && tree_int_cst_sgn (op1) == -1
9906 && negate_expr_p (op0)
9907 && (tem = negate_expr (op1)) != op1
9908 && ! TREE_OVERFLOW (tem))
9909 return fold_build2_loc (loc, MULT_EXPR, type,
9910 fold_convert_loc (loc, type,
9911 negate_expr (op0)), tem);
9913 /* (A + A) * C -> A * 2 * C */
9914 if (TREE_CODE (arg0) == PLUS_EXPR
9915 && TREE_CODE (arg1) == INTEGER_CST
9916 && operand_equal_p (TREE_OPERAND (arg0, 0),
9917 TREE_OPERAND (arg0, 1), 0))
9918 return fold_build2_loc (loc, MULT_EXPR, type,
9919 omit_one_operand_loc (loc, type,
9920 TREE_OPERAND (arg0, 0),
9921 TREE_OPERAND (arg0, 1)),
9922 fold_build2_loc (loc, MULT_EXPR, type,
9923 build_int_cst (type, 2) , arg1));
9925 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9926 sign-changing only. */
9927 if (TREE_CODE (arg1) == INTEGER_CST
9928 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9929 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9930 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9932 strict_overflow_p = false;
9933 if (TREE_CODE (arg1) == INTEGER_CST
9934 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9935 &strict_overflow_p)))
9937 if (strict_overflow_p)
9938 fold_overflow_warning (("assuming signed overflow does not "
9939 "occur when simplifying "
9940 "multiplication"),
9941 WARN_STRICT_OVERFLOW_MISC);
9942 return fold_convert_loc (loc, type, tem);
9945 /* Optimize z * conj(z) for integer complex numbers. */
9946 if (TREE_CODE (arg0) == CONJ_EXPR
9947 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9948 return fold_mult_zconjz (loc, type, arg1);
9949 if (TREE_CODE (arg1) == CONJ_EXPR
9950 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9951 return fold_mult_zconjz (loc, type, arg0);
9953 else
9955 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9956 This is not the same for NaNs or if signed zeros are
9957 involved. */
9958 if (!HONOR_NANS (arg0)
9959 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9960 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9961 && TREE_CODE (arg1) == COMPLEX_CST
9962 && real_zerop (TREE_REALPART (arg1)))
9964 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9965 if (real_onep (TREE_IMAGPART (arg1)))
9966 return
9967 fold_build2_loc (loc, COMPLEX_EXPR, type,
9968 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9969 rtype, arg0)),
9970 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9971 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9972 return
9973 fold_build2_loc (loc, COMPLEX_EXPR, type,
9974 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9975 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9976 rtype, arg0)));
9979 /* Optimize z * conj(z) for floating point complex numbers.
9980 Guarded by flag_unsafe_math_optimizations as non-finite
9981 imaginary components don't produce scalar results. */
9982 if (flag_unsafe_math_optimizations
9983 && TREE_CODE (arg0) == CONJ_EXPR
9984 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9985 return fold_mult_zconjz (loc, type, arg1);
9986 if (flag_unsafe_math_optimizations
9987 && TREE_CODE (arg1) == CONJ_EXPR
9988 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9989 return fold_mult_zconjz (loc, type, arg0);
9991 if (flag_unsafe_math_optimizations)
9994 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9995 if (!in_gimple_form
9996 && optimize
9997 && operand_equal_p (arg0, arg1, 0))
9999 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10001 if (powfn)
10003 tree arg = build_real (type, dconst2);
10004 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10009 goto associate;
10011 case BIT_IOR_EXPR:
10012 /* Canonicalize (X & C1) | C2. */
10013 if (TREE_CODE (arg0) == BIT_AND_EXPR
10014 && TREE_CODE (arg1) == INTEGER_CST
10015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10017 int width = TYPE_PRECISION (type), w;
10018 wide_int c1 = TREE_OPERAND (arg0, 1);
10019 wide_int c2 = arg1;
10021 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10022 if ((c1 & c2) == c1)
10023 return omit_one_operand_loc (loc, type, arg1,
10024 TREE_OPERAND (arg0, 0));
10026 wide_int msk = wi::mask (width, false,
10027 TYPE_PRECISION (TREE_TYPE (arg1)));
10029 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10030 if (msk.and_not (c1 | c2) == 0)
10031 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10032 TREE_OPERAND (arg0, 0), arg1);
10034 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10035 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10036 mode which allows further optimizations. */
10037 c1 &= msk;
10038 c2 &= msk;
10039 wide_int c3 = c1.and_not (c2);
10040 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10042 wide_int mask = wi::mask (w, false,
10043 TYPE_PRECISION (type));
10044 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10046 c3 = mask;
10047 break;
10051 if (c3 != c1)
10052 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10053 fold_build2_loc (loc, BIT_AND_EXPR, type,
10054 TREE_OPERAND (arg0, 0),
10055 wide_int_to_tree (type,
10056 c3)),
10057 arg1);
10060 /* See if this can be simplified into a rotate first. If that
10061 is unsuccessful continue in the association code. */
10062 goto bit_rotate;
10064 case BIT_XOR_EXPR:
10065 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10066 if (TREE_CODE (arg0) == BIT_AND_EXPR
10067 && INTEGRAL_TYPE_P (type)
10068 && integer_onep (TREE_OPERAND (arg0, 1))
10069 && integer_onep (arg1))
10070 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10071 build_zero_cst (TREE_TYPE (arg0)));
10073 /* See if this can be simplified into a rotate first. If that
10074 is unsuccessful continue in the association code. */
10075 goto bit_rotate;
10077 case BIT_AND_EXPR:
10078 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10079 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10080 && INTEGRAL_TYPE_P (type)
10081 && integer_onep (TREE_OPERAND (arg0, 1))
10082 && integer_onep (arg1))
10084 tree tem2;
10085 tem = TREE_OPERAND (arg0, 0);
10086 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10087 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10088 tem, tem2);
10089 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10090 build_zero_cst (TREE_TYPE (tem)));
10092 /* Fold ~X & 1 as (X & 1) == 0. */
10093 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10094 && INTEGRAL_TYPE_P (type)
10095 && integer_onep (arg1))
10097 tree tem2;
10098 tem = TREE_OPERAND (arg0, 0);
10099 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10100 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10101 tem, tem2);
10102 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10103 build_zero_cst (TREE_TYPE (tem)));
10105 /* Fold !X & 1 as X == 0. */
10106 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10107 && integer_onep (arg1))
10109 tem = TREE_OPERAND (arg0, 0);
10110 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10111 build_zero_cst (TREE_TYPE (tem)));
10114 /* Fold (X ^ Y) & Y as ~X & Y. */
10115 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10116 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10118 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10119 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10120 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10121 fold_convert_loc (loc, type, arg1));
10123 /* Fold (X ^ Y) & X as ~Y & X. */
10124 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10125 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10126 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10128 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10129 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10130 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10131 fold_convert_loc (loc, type, arg1));
10133 /* Fold X & (X ^ Y) as X & ~Y. */
10134 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10135 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10137 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10138 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10139 fold_convert_loc (loc, type, arg0),
10140 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10142 /* Fold X & (Y ^ X) as ~Y & X. */
10143 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10144 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10145 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10147 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10148 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10149 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10150 fold_convert_loc (loc, type, arg0));
10153 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10154 multiple of 1 << CST. */
10155 if (TREE_CODE (arg1) == INTEGER_CST)
10157 wide_int cst1 = arg1;
10158 wide_int ncst1 = -cst1;
10159 if ((cst1 & ncst1) == ncst1
10160 && multiple_of_p (type, arg0,
10161 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10162 return fold_convert_loc (loc, type, arg0);
10165 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10166 bits from CST2. */
10167 if (TREE_CODE (arg1) == INTEGER_CST
10168 && TREE_CODE (arg0) == MULT_EXPR
10169 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10171 wide_int warg1 = arg1;
10172 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10174 if (masked == 0)
10175 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10176 arg0, arg1);
10177 else if (masked != warg1)
10179 /* Avoid the transform if arg1 is a mask of some
10180 mode which allows further optimizations. */
10181 int pop = wi::popcount (warg1);
10182 if (!(pop >= BITS_PER_UNIT
10183 && exact_log2 (pop) != -1
10184 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10185 return fold_build2_loc (loc, code, type, op0,
10186 wide_int_to_tree (type, masked));
10190 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10191 ((A & N) + B) & M -> (A + B) & M
10192 Similarly if (N & M) == 0,
10193 ((A | N) + B) & M -> (A + B) & M
10194 and for - instead of + (or unary - instead of +)
10195 and/or ^ instead of |.
10196 If B is constant and (B & M) == 0, fold into A & M. */
10197 if (TREE_CODE (arg1) == INTEGER_CST)
10199 wide_int cst1 = arg1;
10200 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10201 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10202 && (TREE_CODE (arg0) == PLUS_EXPR
10203 || TREE_CODE (arg0) == MINUS_EXPR
10204 || TREE_CODE (arg0) == NEGATE_EXPR)
10205 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10206 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10208 tree pmop[2];
10209 int which = 0;
10210 wide_int cst0;
10212 /* Now we know that arg0 is (C + D) or (C - D) or
10213 -C and arg1 (M) is == (1LL << cst) - 1.
10214 Store C into PMOP[0] and D into PMOP[1]. */
10215 pmop[0] = TREE_OPERAND (arg0, 0);
10216 pmop[1] = NULL;
10217 if (TREE_CODE (arg0) != NEGATE_EXPR)
10219 pmop[1] = TREE_OPERAND (arg0, 1);
10220 which = 1;
10223 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10224 which = -1;
10226 for (; which >= 0; which--)
10227 switch (TREE_CODE (pmop[which]))
10229 case BIT_AND_EXPR:
10230 case BIT_IOR_EXPR:
10231 case BIT_XOR_EXPR:
10232 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10233 != INTEGER_CST)
10234 break;
10235 cst0 = TREE_OPERAND (pmop[which], 1);
10236 cst0 &= cst1;
10237 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10239 if (cst0 != cst1)
10240 break;
10242 else if (cst0 != 0)
10243 break;
10244 /* If C or D is of the form (A & N) where
10245 (N & M) == M, or of the form (A | N) or
10246 (A ^ N) where (N & M) == 0, replace it with A. */
10247 pmop[which] = TREE_OPERAND (pmop[which], 0);
10248 break;
10249 case INTEGER_CST:
10250 /* If C or D is a N where (N & M) == 0, it can be
10251 omitted (assumed 0). */
10252 if ((TREE_CODE (arg0) == PLUS_EXPR
10253 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10254 && (cst1 & pmop[which]) == 0)
10255 pmop[which] = NULL;
10256 break;
10257 default:
10258 break;
10261 /* Only build anything new if we optimized one or both arguments
10262 above. */
10263 if (pmop[0] != TREE_OPERAND (arg0, 0)
10264 || (TREE_CODE (arg0) != NEGATE_EXPR
10265 && pmop[1] != TREE_OPERAND (arg0, 1)))
10267 tree utype = TREE_TYPE (arg0);
10268 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10270 /* Perform the operations in a type that has defined
10271 overflow behavior. */
10272 utype = unsigned_type_for (TREE_TYPE (arg0));
10273 if (pmop[0] != NULL)
10274 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10275 if (pmop[1] != NULL)
10276 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10279 if (TREE_CODE (arg0) == NEGATE_EXPR)
10280 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10281 else if (TREE_CODE (arg0) == PLUS_EXPR)
10283 if (pmop[0] != NULL && pmop[1] != NULL)
10284 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10285 pmop[0], pmop[1]);
10286 else if (pmop[0] != NULL)
10287 tem = pmop[0];
10288 else if (pmop[1] != NULL)
10289 tem = pmop[1];
10290 else
10291 return build_int_cst (type, 0);
10293 else if (pmop[0] == NULL)
10294 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10295 else
10296 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10297 pmop[0], pmop[1]);
10298 /* TEM is now the new binary +, - or unary - replacement. */
10299 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10300 fold_convert_loc (loc, utype, arg1));
10301 return fold_convert_loc (loc, type, tem);
10306 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10307 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10308 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10310 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10312 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10313 if (mask == -1)
10314 return
10315 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10318 goto associate;
10320 case RDIV_EXPR:
10321 /* Don't touch a floating-point divide by zero unless the mode
10322 of the constant can represent infinity. */
10323 if (TREE_CODE (arg1) == REAL_CST
10324 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10325 && real_zerop (arg1))
10326 return NULL_TREE;
10328 /* (-A) / (-B) -> A / B */
10329 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10330 return fold_build2_loc (loc, RDIV_EXPR, type,
10331 TREE_OPERAND (arg0, 0),
10332 negate_expr (arg1));
10333 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10334 return fold_build2_loc (loc, RDIV_EXPR, type,
10335 negate_expr (arg0),
10336 TREE_OPERAND (arg1, 0));
10337 return NULL_TREE;
10339 case TRUNC_DIV_EXPR:
10340 /* Fall through */
10342 case FLOOR_DIV_EXPR:
10343 /* Simplify A / (B << N) where A and B are positive and B is
10344 a power of 2, to A >> (N + log2(B)). */
10345 strict_overflow_p = false;
10346 if (TREE_CODE (arg1) == LSHIFT_EXPR
10347 && (TYPE_UNSIGNED (type)
10348 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10350 tree sval = TREE_OPERAND (arg1, 0);
10351 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10353 tree sh_cnt = TREE_OPERAND (arg1, 1);
10354 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10355 wi::exact_log2 (sval));
10357 if (strict_overflow_p)
10358 fold_overflow_warning (("assuming signed overflow does not "
10359 "occur when simplifying A / (B << N)"),
10360 WARN_STRICT_OVERFLOW_MISC);
10362 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10363 sh_cnt, pow2);
10364 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10365 fold_convert_loc (loc, type, arg0), sh_cnt);
10369 /* Fall through */
10371 case ROUND_DIV_EXPR:
10372 case CEIL_DIV_EXPR:
10373 case EXACT_DIV_EXPR:
10374 if (integer_zerop (arg1))
10375 return NULL_TREE;
10377 /* Convert -A / -B to A / B when the type is signed and overflow is
10378 undefined. */
10379 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10380 && TREE_CODE (arg0) == NEGATE_EXPR
10381 && negate_expr_p (op1))
10383 if (INTEGRAL_TYPE_P (type))
10384 fold_overflow_warning (("assuming signed overflow does not occur "
10385 "when distributing negation across "
10386 "division"),
10387 WARN_STRICT_OVERFLOW_MISC);
10388 return fold_build2_loc (loc, code, type,
10389 fold_convert_loc (loc, type,
10390 TREE_OPERAND (arg0, 0)),
10391 negate_expr (op1));
10393 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10394 && TREE_CODE (arg1) == NEGATE_EXPR
10395 && negate_expr_p (op0))
10397 if (INTEGRAL_TYPE_P (type))
10398 fold_overflow_warning (("assuming signed overflow does not occur "
10399 "when distributing negation across "
10400 "division"),
10401 WARN_STRICT_OVERFLOW_MISC);
10402 return fold_build2_loc (loc, code, type,
10403 negate_expr (op0),
10404 fold_convert_loc (loc, type,
10405 TREE_OPERAND (arg1, 0)));
10408 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10409 operation, EXACT_DIV_EXPR.
10411 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10412 At one time others generated faster code, it's not clear if they do
10413 after the last round to changes to the DIV code in expmed.c. */
10414 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10415 && multiple_of_p (type, arg0, arg1))
10416 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10417 fold_convert (type, arg0),
10418 fold_convert (type, arg1));
10420 strict_overflow_p = false;
10421 if (TREE_CODE (arg1) == INTEGER_CST
10422 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10423 &strict_overflow_p)))
10425 if (strict_overflow_p)
10426 fold_overflow_warning (("assuming signed overflow does not occur "
10427 "when simplifying division"),
10428 WARN_STRICT_OVERFLOW_MISC);
10429 return fold_convert_loc (loc, type, tem);
10432 return NULL_TREE;
10434 case CEIL_MOD_EXPR:
10435 case FLOOR_MOD_EXPR:
10436 case ROUND_MOD_EXPR:
10437 case TRUNC_MOD_EXPR:
10438 strict_overflow_p = false;
10439 if (TREE_CODE (arg1) == INTEGER_CST
10440 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10441 &strict_overflow_p)))
10443 if (strict_overflow_p)
10444 fold_overflow_warning (("assuming signed overflow does not occur "
10445 "when simplifying modulus"),
10446 WARN_STRICT_OVERFLOW_MISC);
10447 return fold_convert_loc (loc, type, tem);
10450 return NULL_TREE;
10452 case LROTATE_EXPR:
10453 case RROTATE_EXPR:
10454 case RSHIFT_EXPR:
10455 case LSHIFT_EXPR:
10456 /* Since negative shift count is not well-defined,
10457 don't try to compute it in the compiler. */
10458 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10459 return NULL_TREE;
10461 prec = element_precision (type);
10463 /* If we have a rotate of a bit operation with the rotate count and
10464 the second operand of the bit operation both constant,
10465 permute the two operations. */
10466 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10467 && (TREE_CODE (arg0) == BIT_AND_EXPR
10468 || TREE_CODE (arg0) == BIT_IOR_EXPR
10469 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10470 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10471 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10472 fold_build2_loc (loc, code, type,
10473 TREE_OPERAND (arg0, 0), arg1),
10474 fold_build2_loc (loc, code, type,
10475 TREE_OPERAND (arg0, 1), arg1));
10477 /* Two consecutive rotates adding up to the some integer
10478 multiple of the precision of the type can be ignored. */
10479 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10480 && TREE_CODE (arg0) == RROTATE_EXPR
10481 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10482 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10483 prec) == 0)
10484 return TREE_OPERAND (arg0, 0);
10486 return NULL_TREE;
10488 case MIN_EXPR:
10489 case MAX_EXPR:
10490 goto associate;
10492 case TRUTH_ANDIF_EXPR:
10493 /* Note that the operands of this must be ints
10494 and their values must be 0 or 1.
10495 ("true" is a fixed value perhaps depending on the language.) */
10496 /* If first arg is constant zero, return it. */
10497 if (integer_zerop (arg0))
10498 return fold_convert_loc (loc, type, arg0);
10499 case TRUTH_AND_EXPR:
10500 /* If either arg is constant true, drop it. */
10501 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10502 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10503 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10504 /* Preserve sequence points. */
10505 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10506 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10507 /* If second arg is constant zero, result is zero, but first arg
10508 must be evaluated. */
10509 if (integer_zerop (arg1))
10510 return omit_one_operand_loc (loc, type, arg1, arg0);
10511 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10512 case will be handled here. */
10513 if (integer_zerop (arg0))
10514 return omit_one_operand_loc (loc, type, arg0, arg1);
10516 /* !X && X is always false. */
10517 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10518 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10519 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10520 /* X && !X is always false. */
10521 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10522 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10523 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10525 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10526 means A >= Y && A != MAX, but in this case we know that
10527 A < X <= MAX. */
10529 if (!TREE_SIDE_EFFECTS (arg0)
10530 && !TREE_SIDE_EFFECTS (arg1))
10532 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10533 if (tem && !operand_equal_p (tem, arg0, 0))
10534 return fold_build2_loc (loc, code, type, tem, arg1);
10536 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10537 if (tem && !operand_equal_p (tem, arg1, 0))
10538 return fold_build2_loc (loc, code, type, arg0, tem);
10541 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10542 != NULL_TREE)
10543 return tem;
10545 return NULL_TREE;
10547 case TRUTH_ORIF_EXPR:
10548 /* Note that the operands of this must be ints
10549 and their values must be 0 or true.
10550 ("true" is a fixed value perhaps depending on the language.) */
10551 /* If first arg is constant true, return it. */
10552 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10553 return fold_convert_loc (loc, type, arg0);
10554 case TRUTH_OR_EXPR:
10555 /* If either arg is constant zero, drop it. */
10556 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10557 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10558 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10559 /* Preserve sequence points. */
10560 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10562 /* If second arg is constant true, result is true, but we must
10563 evaluate first arg. */
10564 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10565 return omit_one_operand_loc (loc, type, arg1, arg0);
10566 /* Likewise for first arg, but note this only occurs here for
10567 TRUTH_OR_EXPR. */
10568 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10569 return omit_one_operand_loc (loc, type, arg0, arg1);
10571 /* !X || X is always true. */
10572 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10573 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10574 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10575 /* X || !X is always true. */
10576 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10577 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10578 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10580 /* (X && !Y) || (!X && Y) is X ^ Y */
10581 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10582 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10584 tree a0, a1, l0, l1, n0, n1;
10586 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10587 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10589 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10590 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10592 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10593 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10595 if ((operand_equal_p (n0, a0, 0)
10596 && operand_equal_p (n1, a1, 0))
10597 || (operand_equal_p (n0, a1, 0)
10598 && operand_equal_p (n1, a0, 0)))
10599 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10602 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10603 != NULL_TREE)
10604 return tem;
10606 return NULL_TREE;
10608 case TRUTH_XOR_EXPR:
10609 /* If the second arg is constant zero, drop it. */
10610 if (integer_zerop (arg1))
10611 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10612 /* If the second arg is constant true, this is a logical inversion. */
10613 if (integer_onep (arg1))
10615 tem = invert_truthvalue_loc (loc, arg0);
10616 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10618 /* Identical arguments cancel to zero. */
10619 if (operand_equal_p (arg0, arg1, 0))
10620 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10622 /* !X ^ X is always true. */
10623 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10624 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10625 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10627 /* X ^ !X is always true. */
10628 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10629 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10630 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10632 return NULL_TREE;
10634 case EQ_EXPR:
10635 case NE_EXPR:
10636 STRIP_NOPS (arg0);
10637 STRIP_NOPS (arg1);
10639 tem = fold_comparison (loc, code, type, op0, op1);
10640 if (tem != NULL_TREE)
10641 return tem;
10643 /* bool_var != 1 becomes !bool_var. */
10644 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10645 && code == NE_EXPR)
10646 return fold_convert_loc (loc, type,
10647 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10648 TREE_TYPE (arg0), arg0));
10650 /* bool_var == 0 becomes !bool_var. */
10651 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10652 && code == EQ_EXPR)
10653 return fold_convert_loc (loc, type,
10654 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10655 TREE_TYPE (arg0), arg0));
10657 /* !exp != 0 becomes !exp */
10658 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10659 && code == NE_EXPR)
10660 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10662 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10663 if ((TREE_CODE (arg0) == PLUS_EXPR
10664 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10665 || TREE_CODE (arg0) == MINUS_EXPR)
10666 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10667 0)),
10668 arg1, 0)
10669 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10670 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10672 tree val = TREE_OPERAND (arg0, 1);
10673 val = fold_build2_loc (loc, code, type, val,
10674 build_int_cst (TREE_TYPE (val), 0));
10675 return omit_two_operands_loc (loc, type, val,
10676 TREE_OPERAND (arg0, 0), arg1);
10679 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10680 if ((TREE_CODE (arg1) == PLUS_EXPR
10681 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10682 || TREE_CODE (arg1) == MINUS_EXPR)
10683 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10684 0)),
10685 arg0, 0)
10686 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10687 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10689 tree val = TREE_OPERAND (arg1, 1);
10690 val = fold_build2_loc (loc, code, type, val,
10691 build_int_cst (TREE_TYPE (val), 0));
10692 return omit_two_operands_loc (loc, type, val,
10693 TREE_OPERAND (arg1, 0), arg0);
10696 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10697 if (TREE_CODE (arg0) == MINUS_EXPR
10698 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10699 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10700 1)),
10701 arg1, 0)
10702 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10703 return omit_two_operands_loc (loc, type,
10704 code == NE_EXPR
10705 ? boolean_true_node : boolean_false_node,
10706 TREE_OPERAND (arg0, 1), arg1);
10708 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10709 if (TREE_CODE (arg1) == MINUS_EXPR
10710 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10711 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10712 1)),
10713 arg0, 0)
10714 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10715 return omit_two_operands_loc (loc, type,
10716 code == NE_EXPR
10717 ? boolean_true_node : boolean_false_node,
10718 TREE_OPERAND (arg1, 1), arg0);
10720 /* If this is an EQ or NE comparison with zero and ARG0 is
10721 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10722 two operations, but the latter can be done in one less insn
10723 on machines that have only two-operand insns or on which a
10724 constant cannot be the first operand. */
10725 if (TREE_CODE (arg0) == BIT_AND_EXPR
10726 && integer_zerop (arg1))
10728 tree arg00 = TREE_OPERAND (arg0, 0);
10729 tree arg01 = TREE_OPERAND (arg0, 1);
10730 if (TREE_CODE (arg00) == LSHIFT_EXPR
10731 && integer_onep (TREE_OPERAND (arg00, 0)))
10733 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10734 arg01, TREE_OPERAND (arg00, 1));
10735 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10736 build_int_cst (TREE_TYPE (arg0), 1));
10737 return fold_build2_loc (loc, code, type,
10738 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10739 arg1);
10741 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10742 && integer_onep (TREE_OPERAND (arg01, 0)))
10744 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10745 arg00, TREE_OPERAND (arg01, 1));
10746 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10747 build_int_cst (TREE_TYPE (arg0), 1));
10748 return fold_build2_loc (loc, code, type,
10749 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10750 arg1);
10754 /* If this is an NE or EQ comparison of zero against the result of a
10755 signed MOD operation whose second operand is a power of 2, make
10756 the MOD operation unsigned since it is simpler and equivalent. */
10757 if (integer_zerop (arg1)
10758 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10759 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10760 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10761 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10762 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10763 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10765 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10766 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10767 fold_convert_loc (loc, newtype,
10768 TREE_OPERAND (arg0, 0)),
10769 fold_convert_loc (loc, newtype,
10770 TREE_OPERAND (arg0, 1)));
10772 return fold_build2_loc (loc, code, type, newmod,
10773 fold_convert_loc (loc, newtype, arg1));
10776 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10777 C1 is a valid shift constant, and C2 is a power of two, i.e.
10778 a single bit. */
10779 if (TREE_CODE (arg0) == BIT_AND_EXPR
10780 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10781 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10782 == INTEGER_CST
10783 && integer_pow2p (TREE_OPERAND (arg0, 1))
10784 && integer_zerop (arg1))
10786 tree itype = TREE_TYPE (arg0);
10787 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10788 prec = TYPE_PRECISION (itype);
10790 /* Check for a valid shift count. */
10791 if (wi::ltu_p (arg001, prec))
10793 tree arg01 = TREE_OPERAND (arg0, 1);
10794 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10795 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10796 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10797 can be rewritten as (X & (C2 << C1)) != 0. */
10798 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10800 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10801 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10802 return fold_build2_loc (loc, code, type, tem,
10803 fold_convert_loc (loc, itype, arg1));
10805 /* Otherwise, for signed (arithmetic) shifts,
10806 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10807 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10808 else if (!TYPE_UNSIGNED (itype))
10809 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10810 arg000, build_int_cst (itype, 0));
10811 /* Otherwise, of unsigned (logical) shifts,
10812 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10813 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10814 else
10815 return omit_one_operand_loc (loc, type,
10816 code == EQ_EXPR ? integer_one_node
10817 : integer_zero_node,
10818 arg000);
10822 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10823 Similarly for NE_EXPR. */
10824 if (TREE_CODE (arg0) == BIT_AND_EXPR
10825 && TREE_CODE (arg1) == INTEGER_CST
10826 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10828 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10829 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10830 TREE_OPERAND (arg0, 1));
10831 tree dandnotc
10832 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10833 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10834 notc);
10835 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10836 if (integer_nonzerop (dandnotc))
10837 return omit_one_operand_loc (loc, type, rslt, arg0);
10840 /* If this is a comparison of a field, we may be able to simplify it. */
10841 if ((TREE_CODE (arg0) == COMPONENT_REF
10842 || TREE_CODE (arg0) == BIT_FIELD_REF)
10843 /* Handle the constant case even without -O
10844 to make sure the warnings are given. */
10845 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10847 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10848 if (t1)
10849 return t1;
10852 /* Optimize comparisons of strlen vs zero to a compare of the
10853 first character of the string vs zero. To wit,
10854 strlen(ptr) == 0 => *ptr == 0
10855 strlen(ptr) != 0 => *ptr != 0
10856 Other cases should reduce to one of these two (or a constant)
10857 due to the return value of strlen being unsigned. */
10858 if (TREE_CODE (arg0) == CALL_EXPR
10859 && integer_zerop (arg1))
10861 tree fndecl = get_callee_fndecl (arg0);
10863 if (fndecl
10864 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10865 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10866 && call_expr_nargs (arg0) == 1
10867 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10869 tree iref = build_fold_indirect_ref_loc (loc,
10870 CALL_EXPR_ARG (arg0, 0));
10871 return fold_build2_loc (loc, code, type, iref,
10872 build_int_cst (TREE_TYPE (iref), 0));
10876 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10877 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10878 if (TREE_CODE (arg0) == RSHIFT_EXPR
10879 && integer_zerop (arg1)
10880 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10882 tree arg00 = TREE_OPERAND (arg0, 0);
10883 tree arg01 = TREE_OPERAND (arg0, 1);
10884 tree itype = TREE_TYPE (arg00);
10885 if (wi::eq_p (arg01, element_precision (itype) - 1))
10887 if (TYPE_UNSIGNED (itype))
10889 itype = signed_type_for (itype);
10890 arg00 = fold_convert_loc (loc, itype, arg00);
10892 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10893 type, arg00, build_zero_cst (itype));
10897 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10898 (X & C) == 0 when C is a single bit. */
10899 if (TREE_CODE (arg0) == BIT_AND_EXPR
10900 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10901 && integer_zerop (arg1)
10902 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10904 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10905 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10906 TREE_OPERAND (arg0, 1));
10907 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10908 type, tem,
10909 fold_convert_loc (loc, TREE_TYPE (arg0),
10910 arg1));
10913 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10914 constant C is a power of two, i.e. a single bit. */
10915 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10916 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10917 && integer_zerop (arg1)
10918 && integer_pow2p (TREE_OPERAND (arg0, 1))
10919 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10920 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10922 tree arg00 = TREE_OPERAND (arg0, 0);
10923 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10924 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10927 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10928 when is C is a power of two, i.e. a single bit. */
10929 if (TREE_CODE (arg0) == BIT_AND_EXPR
10930 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10931 && integer_zerop (arg1)
10932 && integer_pow2p (TREE_OPERAND (arg0, 1))
10933 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10934 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10936 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10937 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10938 arg000, TREE_OPERAND (arg0, 1));
10939 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10940 tem, build_int_cst (TREE_TYPE (tem), 0));
10943 if (integer_zerop (arg1)
10944 && tree_expr_nonzero_p (arg0))
10946 tree res = constant_boolean_node (code==NE_EXPR, type);
10947 return omit_one_operand_loc (loc, type, res, arg0);
10950 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10951 if (TREE_CODE (arg0) == BIT_AND_EXPR
10952 && TREE_CODE (arg1) == BIT_AND_EXPR)
10954 tree arg00 = TREE_OPERAND (arg0, 0);
10955 tree arg01 = TREE_OPERAND (arg0, 1);
10956 tree arg10 = TREE_OPERAND (arg1, 0);
10957 tree arg11 = TREE_OPERAND (arg1, 1);
10958 tree itype = TREE_TYPE (arg0);
10960 if (operand_equal_p (arg01, arg11, 0))
10961 return fold_build2_loc (loc, code, type,
10962 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10963 fold_build2_loc (loc,
10964 BIT_XOR_EXPR, itype,
10965 arg00, arg10),
10966 arg01),
10967 build_zero_cst (itype));
10969 if (operand_equal_p (arg01, arg10, 0))
10970 return fold_build2_loc (loc, code, type,
10971 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10972 fold_build2_loc (loc,
10973 BIT_XOR_EXPR, itype,
10974 arg00, arg11),
10975 arg01),
10976 build_zero_cst (itype));
10978 if (operand_equal_p (arg00, arg11, 0))
10979 return fold_build2_loc (loc, code, type,
10980 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10981 fold_build2_loc (loc,
10982 BIT_XOR_EXPR, itype,
10983 arg01, arg10),
10984 arg00),
10985 build_zero_cst (itype));
10987 if (operand_equal_p (arg00, arg10, 0))
10988 return fold_build2_loc (loc, code, type,
10989 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10990 fold_build2_loc (loc,
10991 BIT_XOR_EXPR, itype,
10992 arg01, arg11),
10993 arg00),
10994 build_zero_cst (itype));
10997 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10998 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11000 tree arg00 = TREE_OPERAND (arg0, 0);
11001 tree arg01 = TREE_OPERAND (arg0, 1);
11002 tree arg10 = TREE_OPERAND (arg1, 0);
11003 tree arg11 = TREE_OPERAND (arg1, 1);
11004 tree itype = TREE_TYPE (arg0);
11006 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11007 operand_equal_p guarantees no side-effects so we don't need
11008 to use omit_one_operand on Z. */
11009 if (operand_equal_p (arg01, arg11, 0))
11010 return fold_build2_loc (loc, code, type, arg00,
11011 fold_convert_loc (loc, TREE_TYPE (arg00),
11012 arg10));
11013 if (operand_equal_p (arg01, arg10, 0))
11014 return fold_build2_loc (loc, code, type, arg00,
11015 fold_convert_loc (loc, TREE_TYPE (arg00),
11016 arg11));
11017 if (operand_equal_p (arg00, arg11, 0))
11018 return fold_build2_loc (loc, code, type, arg01,
11019 fold_convert_loc (loc, TREE_TYPE (arg01),
11020 arg10));
11021 if (operand_equal_p (arg00, arg10, 0))
11022 return fold_build2_loc (loc, code, type, arg01,
11023 fold_convert_loc (loc, TREE_TYPE (arg01),
11024 arg11));
11026 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11027 if (TREE_CODE (arg01) == INTEGER_CST
11028 && TREE_CODE (arg11) == INTEGER_CST)
11030 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11031 fold_convert_loc (loc, itype, arg11));
11032 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11033 return fold_build2_loc (loc, code, type, tem,
11034 fold_convert_loc (loc, itype, arg10));
11038 /* Attempt to simplify equality/inequality comparisons of complex
11039 values. Only lower the comparison if the result is known or
11040 can be simplified to a single scalar comparison. */
11041 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11042 || TREE_CODE (arg0) == COMPLEX_CST)
11043 && (TREE_CODE (arg1) == COMPLEX_EXPR
11044 || TREE_CODE (arg1) == COMPLEX_CST))
11046 tree real0, imag0, real1, imag1;
11047 tree rcond, icond;
11049 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11051 real0 = TREE_OPERAND (arg0, 0);
11052 imag0 = TREE_OPERAND (arg0, 1);
11054 else
11056 real0 = TREE_REALPART (arg0);
11057 imag0 = TREE_IMAGPART (arg0);
11060 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11062 real1 = TREE_OPERAND (arg1, 0);
11063 imag1 = TREE_OPERAND (arg1, 1);
11065 else
11067 real1 = TREE_REALPART (arg1);
11068 imag1 = TREE_IMAGPART (arg1);
11071 rcond = fold_binary_loc (loc, code, type, real0, real1);
11072 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11074 if (integer_zerop (rcond))
11076 if (code == EQ_EXPR)
11077 return omit_two_operands_loc (loc, type, boolean_false_node,
11078 imag0, imag1);
11079 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11081 else
11083 if (code == NE_EXPR)
11084 return omit_two_operands_loc (loc, type, boolean_true_node,
11085 imag0, imag1);
11086 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11090 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11091 if (icond && TREE_CODE (icond) == INTEGER_CST)
11093 if (integer_zerop (icond))
11095 if (code == EQ_EXPR)
11096 return omit_two_operands_loc (loc, type, boolean_false_node,
11097 real0, real1);
11098 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11100 else
11102 if (code == NE_EXPR)
11103 return omit_two_operands_loc (loc, type, boolean_true_node,
11104 real0, real1);
11105 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11110 return NULL_TREE;
11112 case LT_EXPR:
11113 case GT_EXPR:
11114 case LE_EXPR:
11115 case GE_EXPR:
11116 tem = fold_comparison (loc, code, type, op0, op1);
11117 if (tem != NULL_TREE)
11118 return tem;
11120 /* Transform comparisons of the form X +- C CMP X. */
11121 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11123 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11124 && !HONOR_SNANS (arg0))
11125 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11126 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11128 tree arg01 = TREE_OPERAND (arg0, 1);
11129 enum tree_code code0 = TREE_CODE (arg0);
11130 int is_positive;
11132 if (TREE_CODE (arg01) == REAL_CST)
11133 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11134 else
11135 is_positive = tree_int_cst_sgn (arg01);
11137 /* (X - c) > X becomes false. */
11138 if (code == GT_EXPR
11139 && ((code0 == MINUS_EXPR && is_positive >= 0)
11140 || (code0 == PLUS_EXPR && is_positive <= 0)))
11142 if (TREE_CODE (arg01) == INTEGER_CST
11143 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11144 fold_overflow_warning (("assuming signed overflow does not "
11145 "occur when assuming that (X - c) > X "
11146 "is always false"),
11147 WARN_STRICT_OVERFLOW_ALL);
11148 return constant_boolean_node (0, type);
11151 /* Likewise (X + c) < X becomes false. */
11152 if (code == LT_EXPR
11153 && ((code0 == PLUS_EXPR && is_positive >= 0)
11154 || (code0 == MINUS_EXPR && is_positive <= 0)))
11156 if (TREE_CODE (arg01) == INTEGER_CST
11157 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11158 fold_overflow_warning (("assuming signed overflow does not "
11159 "occur when assuming that "
11160 "(X + c) < X is always false"),
11161 WARN_STRICT_OVERFLOW_ALL);
11162 return constant_boolean_node (0, type);
11165 /* Convert (X - c) <= X to true. */
11166 if (!HONOR_NANS (arg1)
11167 && code == LE_EXPR
11168 && ((code0 == MINUS_EXPR && is_positive >= 0)
11169 || (code0 == PLUS_EXPR && is_positive <= 0)))
11171 if (TREE_CODE (arg01) == INTEGER_CST
11172 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11173 fold_overflow_warning (("assuming signed overflow does not "
11174 "occur when assuming that "
11175 "(X - c) <= X is always true"),
11176 WARN_STRICT_OVERFLOW_ALL);
11177 return constant_boolean_node (1, type);
11180 /* Convert (X + c) >= X to true. */
11181 if (!HONOR_NANS (arg1)
11182 && code == GE_EXPR
11183 && ((code0 == PLUS_EXPR && is_positive >= 0)
11184 || (code0 == MINUS_EXPR && is_positive <= 0)))
11186 if (TREE_CODE (arg01) == INTEGER_CST
11187 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11188 fold_overflow_warning (("assuming signed overflow does not "
11189 "occur when assuming that "
11190 "(X + c) >= X is always true"),
11191 WARN_STRICT_OVERFLOW_ALL);
11192 return constant_boolean_node (1, type);
11195 if (TREE_CODE (arg01) == INTEGER_CST)
11197 /* Convert X + c > X and X - c < X to true for integers. */
11198 if (code == GT_EXPR
11199 && ((code0 == PLUS_EXPR && is_positive > 0)
11200 || (code0 == MINUS_EXPR && is_positive < 0)))
11202 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11203 fold_overflow_warning (("assuming signed overflow does "
11204 "not occur when assuming that "
11205 "(X + c) > X is always true"),
11206 WARN_STRICT_OVERFLOW_ALL);
11207 return constant_boolean_node (1, type);
11210 if (code == LT_EXPR
11211 && ((code0 == MINUS_EXPR && is_positive > 0)
11212 || (code0 == PLUS_EXPR && is_positive < 0)))
11214 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11215 fold_overflow_warning (("assuming signed overflow does "
11216 "not occur when assuming that "
11217 "(X - c) < X is always true"),
11218 WARN_STRICT_OVERFLOW_ALL);
11219 return constant_boolean_node (1, type);
11222 /* Convert X + c <= X and X - c >= X to false for integers. */
11223 if (code == LE_EXPR
11224 && ((code0 == PLUS_EXPR && is_positive > 0)
11225 || (code0 == MINUS_EXPR && is_positive < 0)))
11227 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11228 fold_overflow_warning (("assuming signed overflow does "
11229 "not occur when assuming that "
11230 "(X + c) <= X is always false"),
11231 WARN_STRICT_OVERFLOW_ALL);
11232 return constant_boolean_node (0, type);
11235 if (code == GE_EXPR
11236 && ((code0 == MINUS_EXPR && is_positive > 0)
11237 || (code0 == PLUS_EXPR && is_positive < 0)))
11239 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11240 fold_overflow_warning (("assuming signed overflow does "
11241 "not occur when assuming that "
11242 "(X - c) >= X is always false"),
11243 WARN_STRICT_OVERFLOW_ALL);
11244 return constant_boolean_node (0, type);
11249 /* If we are comparing an ABS_EXPR with a constant, we can
11250 convert all the cases into explicit comparisons, but they may
11251 well not be faster than doing the ABS and one comparison.
11252 But ABS (X) <= C is a range comparison, which becomes a subtraction
11253 and a comparison, and is probably faster. */
11254 if (code == LE_EXPR
11255 && TREE_CODE (arg1) == INTEGER_CST
11256 && TREE_CODE (arg0) == ABS_EXPR
11257 && ! TREE_SIDE_EFFECTS (arg0)
11258 && (0 != (tem = negate_expr (arg1)))
11259 && TREE_CODE (tem) == INTEGER_CST
11260 && !TREE_OVERFLOW (tem))
11261 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11262 build2 (GE_EXPR, type,
11263 TREE_OPERAND (arg0, 0), tem),
11264 build2 (LE_EXPR, type,
11265 TREE_OPERAND (arg0, 0), arg1));
11267 /* Convert ABS_EXPR<x> >= 0 to true. */
11268 strict_overflow_p = false;
11269 if (code == GE_EXPR
11270 && (integer_zerop (arg1)
11271 || (! HONOR_NANS (arg0)
11272 && real_zerop (arg1)))
11273 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11275 if (strict_overflow_p)
11276 fold_overflow_warning (("assuming signed overflow does not occur "
11277 "when simplifying comparison of "
11278 "absolute value and zero"),
11279 WARN_STRICT_OVERFLOW_CONDITIONAL);
11280 return omit_one_operand_loc (loc, type,
11281 constant_boolean_node (true, type),
11282 arg0);
11285 /* Convert ABS_EXPR<x> < 0 to false. */
11286 strict_overflow_p = false;
11287 if (code == LT_EXPR
11288 && (integer_zerop (arg1) || real_zerop (arg1))
11289 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11291 if (strict_overflow_p)
11292 fold_overflow_warning (("assuming signed overflow does not occur "
11293 "when simplifying comparison of "
11294 "absolute value and zero"),
11295 WARN_STRICT_OVERFLOW_CONDITIONAL);
11296 return omit_one_operand_loc (loc, type,
11297 constant_boolean_node (false, type),
11298 arg0);
11301 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11302 and similarly for >= into !=. */
11303 if ((code == LT_EXPR || code == GE_EXPR)
11304 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11305 && TREE_CODE (arg1) == LSHIFT_EXPR
11306 && integer_onep (TREE_OPERAND (arg1, 0)))
11307 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11308 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11309 TREE_OPERAND (arg1, 1)),
11310 build_zero_cst (TREE_TYPE (arg0)));
11312 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11313 otherwise Y might be >= # of bits in X's type and thus e.g.
11314 (unsigned char) (1 << Y) for Y 15 might be 0.
11315 If the cast is widening, then 1 << Y should have unsigned type,
11316 otherwise if Y is number of bits in the signed shift type minus 1,
11317 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11318 31 might be 0xffffffff80000000. */
11319 if ((code == LT_EXPR || code == GE_EXPR)
11320 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11321 && CONVERT_EXPR_P (arg1)
11322 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11323 && (element_precision (TREE_TYPE (arg1))
11324 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11325 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11326 || (element_precision (TREE_TYPE (arg1))
11327 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11328 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11330 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11331 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11332 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11333 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11334 build_zero_cst (TREE_TYPE (arg0)));
11337 return NULL_TREE;
11339 case UNORDERED_EXPR:
11340 case ORDERED_EXPR:
11341 case UNLT_EXPR:
11342 case UNLE_EXPR:
11343 case UNGT_EXPR:
11344 case UNGE_EXPR:
11345 case UNEQ_EXPR:
11346 case LTGT_EXPR:
11347 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11349 tree targ0 = strip_float_extensions (arg0);
11350 tree targ1 = strip_float_extensions (arg1);
11351 tree newtype = TREE_TYPE (targ0);
11353 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11354 newtype = TREE_TYPE (targ1);
11356 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11357 return fold_build2_loc (loc, code, type,
11358 fold_convert_loc (loc, newtype, targ0),
11359 fold_convert_loc (loc, newtype, targ1));
11362 return NULL_TREE;
11364 case COMPOUND_EXPR:
11365 /* When pedantic, a compound expression can be neither an lvalue
11366 nor an integer constant expression. */
11367 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11368 return NULL_TREE;
11369 /* Don't let (0, 0) be null pointer constant. */
11370 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11371 : fold_convert_loc (loc, type, arg1);
11372 return pedantic_non_lvalue_loc (loc, tem);
11374 case ASSERT_EXPR:
11375 /* An ASSERT_EXPR should never be passed to fold_binary. */
11376 gcc_unreachable ();
11378 default:
11379 return NULL_TREE;
11380 } /* switch (code) */
11383 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11384 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11385 of GOTO_EXPR. */
11387 static tree
11388 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11390 switch (TREE_CODE (*tp))
11392 case LABEL_EXPR:
11393 return *tp;
11395 case GOTO_EXPR:
11396 *walk_subtrees = 0;
11398 /* ... fall through ... */
11400 default:
11401 return NULL_TREE;
11405 /* Return whether the sub-tree ST contains a label which is accessible from
11406 outside the sub-tree. */
11408 static bool
11409 contains_label_p (tree st)
11411 return
11412 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11415 /* Fold a ternary expression of code CODE and type TYPE with operands
11416 OP0, OP1, and OP2. Return the folded expression if folding is
11417 successful. Otherwise, return NULL_TREE. */
11419 tree
11420 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11421 tree op0, tree op1, tree op2)
11423 tree tem;
11424 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11425 enum tree_code_class kind = TREE_CODE_CLASS (code);
11427 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11428 && TREE_CODE_LENGTH (code) == 3);
11430 /* If this is a commutative operation, and OP0 is a constant, move it
11431 to OP1 to reduce the number of tests below. */
11432 if (commutative_ternary_tree_code (code)
11433 && tree_swap_operands_p (op0, op1, true))
11434 return fold_build3_loc (loc, code, type, op1, op0, op2);
11436 tem = generic_simplify (loc, code, type, op0, op1, op2);
11437 if (tem)
11438 return tem;
11440 /* Strip any conversions that don't change the mode. This is safe
11441 for every expression, except for a comparison expression because
11442 its signedness is derived from its operands. So, in the latter
11443 case, only strip conversions that don't change the signedness.
11445 Note that this is done as an internal manipulation within the
11446 constant folder, in order to find the simplest representation of
11447 the arguments so that their form can be studied. In any cases,
11448 the appropriate type conversions should be put back in the tree
11449 that will get out of the constant folder. */
11450 if (op0)
11452 arg0 = op0;
11453 STRIP_NOPS (arg0);
11456 if (op1)
11458 arg1 = op1;
11459 STRIP_NOPS (arg1);
11462 if (op2)
11464 arg2 = op2;
11465 STRIP_NOPS (arg2);
11468 switch (code)
11470 case COMPONENT_REF:
11471 if (TREE_CODE (arg0) == CONSTRUCTOR
11472 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11474 unsigned HOST_WIDE_INT idx;
11475 tree field, value;
11476 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11477 if (field == arg1)
11478 return value;
11480 return NULL_TREE;
11482 case COND_EXPR:
11483 case VEC_COND_EXPR:
11484 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11485 so all simple results must be passed through pedantic_non_lvalue. */
11486 if (TREE_CODE (arg0) == INTEGER_CST)
11488 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11489 tem = integer_zerop (arg0) ? op2 : op1;
11490 /* Only optimize constant conditions when the selected branch
11491 has the same type as the COND_EXPR. This avoids optimizing
11492 away "c ? x : throw", where the throw has a void type.
11493 Avoid throwing away that operand which contains label. */
11494 if ((!TREE_SIDE_EFFECTS (unused_op)
11495 || !contains_label_p (unused_op))
11496 && (! VOID_TYPE_P (TREE_TYPE (tem))
11497 || VOID_TYPE_P (type)))
11498 return pedantic_non_lvalue_loc (loc, tem);
11499 return NULL_TREE;
11501 else if (TREE_CODE (arg0) == VECTOR_CST)
11503 if ((TREE_CODE (arg1) == VECTOR_CST
11504 || TREE_CODE (arg1) == CONSTRUCTOR)
11505 && (TREE_CODE (arg2) == VECTOR_CST
11506 || TREE_CODE (arg2) == CONSTRUCTOR))
11508 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11509 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11510 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11511 for (i = 0; i < nelts; i++)
11513 tree val = VECTOR_CST_ELT (arg0, i);
11514 if (integer_all_onesp (val))
11515 sel[i] = i;
11516 else if (integer_zerop (val))
11517 sel[i] = nelts + i;
11518 else /* Currently unreachable. */
11519 return NULL_TREE;
11521 tree t = fold_vec_perm (type, arg1, arg2, sel);
11522 if (t != NULL_TREE)
11523 return t;
11527 /* If we have A op B ? A : C, we may be able to convert this to a
11528 simpler expression, depending on the operation and the values
11529 of B and C. Signed zeros prevent all of these transformations,
11530 for reasons given above each one.
11532 Also try swapping the arguments and inverting the conditional. */
11533 if (COMPARISON_CLASS_P (arg0)
11534 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11535 arg1, TREE_OPERAND (arg0, 1))
11536 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11538 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11539 if (tem)
11540 return tem;
11543 if (COMPARISON_CLASS_P (arg0)
11544 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11545 op2,
11546 TREE_OPERAND (arg0, 1))
11547 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11549 location_t loc0 = expr_location_or (arg0, loc);
11550 tem = fold_invert_truthvalue (loc0, arg0);
11551 if (tem && COMPARISON_CLASS_P (tem))
11553 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11554 if (tem)
11555 return tem;
11559 /* If the second operand is simpler than the third, swap them
11560 since that produces better jump optimization results. */
11561 if (truth_value_p (TREE_CODE (arg0))
11562 && tree_swap_operands_p (op1, op2, false))
11564 location_t loc0 = expr_location_or (arg0, loc);
11565 /* See if this can be inverted. If it can't, possibly because
11566 it was a floating-point inequality comparison, don't do
11567 anything. */
11568 tem = fold_invert_truthvalue (loc0, arg0);
11569 if (tem)
11570 return fold_build3_loc (loc, code, type, tem, op2, op1);
11573 /* Convert A ? 1 : 0 to simply A. */
11574 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11575 : (integer_onep (op1)
11576 && !VECTOR_TYPE_P (type)))
11577 && integer_zerop (op2)
11578 /* If we try to convert OP0 to our type, the
11579 call to fold will try to move the conversion inside
11580 a COND, which will recurse. In that case, the COND_EXPR
11581 is probably the best choice, so leave it alone. */
11582 && type == TREE_TYPE (arg0))
11583 return pedantic_non_lvalue_loc (loc, arg0);
11585 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11586 over COND_EXPR in cases such as floating point comparisons. */
11587 if (integer_zerop (op1)
11588 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11589 : (integer_onep (op2)
11590 && !VECTOR_TYPE_P (type)))
11591 && truth_value_p (TREE_CODE (arg0)))
11592 return pedantic_non_lvalue_loc (loc,
11593 fold_convert_loc (loc, type,
11594 invert_truthvalue_loc (loc,
11595 arg0)));
11597 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11598 if (TREE_CODE (arg0) == LT_EXPR
11599 && integer_zerop (TREE_OPERAND (arg0, 1))
11600 && integer_zerop (op2)
11601 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11603 /* sign_bit_p looks through both zero and sign extensions,
11604 but for this optimization only sign extensions are
11605 usable. */
11606 tree tem2 = TREE_OPERAND (arg0, 0);
11607 while (tem != tem2)
11609 if (TREE_CODE (tem2) != NOP_EXPR
11610 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11612 tem = NULL_TREE;
11613 break;
11615 tem2 = TREE_OPERAND (tem2, 0);
11617 /* sign_bit_p only checks ARG1 bits within A's precision.
11618 If <sign bit of A> has wider type than A, bits outside
11619 of A's precision in <sign bit of A> need to be checked.
11620 If they are all 0, this optimization needs to be done
11621 in unsigned A's type, if they are all 1 in signed A's type,
11622 otherwise this can't be done. */
11623 if (tem
11624 && TYPE_PRECISION (TREE_TYPE (tem))
11625 < TYPE_PRECISION (TREE_TYPE (arg1))
11626 && TYPE_PRECISION (TREE_TYPE (tem))
11627 < TYPE_PRECISION (type))
11629 int inner_width, outer_width;
11630 tree tem_type;
11632 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11633 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11634 if (outer_width > TYPE_PRECISION (type))
11635 outer_width = TYPE_PRECISION (type);
11637 wide_int mask = wi::shifted_mask
11638 (inner_width, outer_width - inner_width, false,
11639 TYPE_PRECISION (TREE_TYPE (arg1)));
11641 wide_int common = mask & arg1;
11642 if (common == mask)
11644 tem_type = signed_type_for (TREE_TYPE (tem));
11645 tem = fold_convert_loc (loc, tem_type, tem);
11647 else if (common == 0)
11649 tem_type = unsigned_type_for (TREE_TYPE (tem));
11650 tem = fold_convert_loc (loc, tem_type, tem);
11652 else
11653 tem = NULL;
11656 if (tem)
11657 return
11658 fold_convert_loc (loc, type,
11659 fold_build2_loc (loc, BIT_AND_EXPR,
11660 TREE_TYPE (tem), tem,
11661 fold_convert_loc (loc,
11662 TREE_TYPE (tem),
11663 arg1)));
11666 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11667 already handled above. */
11668 if (TREE_CODE (arg0) == BIT_AND_EXPR
11669 && integer_onep (TREE_OPERAND (arg0, 1))
11670 && integer_zerop (op2)
11671 && integer_pow2p (arg1))
11673 tree tem = TREE_OPERAND (arg0, 0);
11674 STRIP_NOPS (tem);
11675 if (TREE_CODE (tem) == RSHIFT_EXPR
11676 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11677 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11678 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11679 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11680 TREE_OPERAND (tem, 0), arg1);
11683 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11684 is probably obsolete because the first operand should be a
11685 truth value (that's why we have the two cases above), but let's
11686 leave it in until we can confirm this for all front-ends. */
11687 if (integer_zerop (op2)
11688 && TREE_CODE (arg0) == NE_EXPR
11689 && integer_zerop (TREE_OPERAND (arg0, 1))
11690 && integer_pow2p (arg1)
11691 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11692 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11693 arg1, OEP_ONLY_CONST))
11694 return pedantic_non_lvalue_loc (loc,
11695 fold_convert_loc (loc, type,
11696 TREE_OPERAND (arg0, 0)));
11698 /* Disable the transformations below for vectors, since
11699 fold_binary_op_with_conditional_arg may undo them immediately,
11700 yielding an infinite loop. */
11701 if (code == VEC_COND_EXPR)
11702 return NULL_TREE;
11704 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11705 if (integer_zerop (op2)
11706 && truth_value_p (TREE_CODE (arg0))
11707 && truth_value_p (TREE_CODE (arg1))
11708 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11709 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11710 : TRUTH_ANDIF_EXPR,
11711 type, fold_convert_loc (loc, type, arg0), arg1);
11713 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11714 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11715 && truth_value_p (TREE_CODE (arg0))
11716 && truth_value_p (TREE_CODE (arg1))
11717 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11719 location_t loc0 = expr_location_or (arg0, loc);
11720 /* Only perform transformation if ARG0 is easily inverted. */
11721 tem = fold_invert_truthvalue (loc0, arg0);
11722 if (tem)
11723 return fold_build2_loc (loc, code == VEC_COND_EXPR
11724 ? BIT_IOR_EXPR
11725 : TRUTH_ORIF_EXPR,
11726 type, fold_convert_loc (loc, type, tem),
11727 arg1);
11730 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11731 if (integer_zerop (arg1)
11732 && truth_value_p (TREE_CODE (arg0))
11733 && truth_value_p (TREE_CODE (op2))
11734 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11736 location_t loc0 = expr_location_or (arg0, loc);
11737 /* Only perform transformation if ARG0 is easily inverted. */
11738 tem = fold_invert_truthvalue (loc0, arg0);
11739 if (tem)
11740 return fold_build2_loc (loc, code == VEC_COND_EXPR
11741 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11742 type, fold_convert_loc (loc, type, tem),
11743 op2);
11746 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11747 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11748 && truth_value_p (TREE_CODE (arg0))
11749 && truth_value_p (TREE_CODE (op2))
11750 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11751 return fold_build2_loc (loc, code == VEC_COND_EXPR
11752 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11753 type, fold_convert_loc (loc, type, arg0), op2);
11755 return NULL_TREE;
11757 case CALL_EXPR:
11758 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11759 of fold_ternary on them. */
11760 gcc_unreachable ();
11762 case BIT_FIELD_REF:
11763 if ((TREE_CODE (arg0) == VECTOR_CST
11764 || (TREE_CODE (arg0) == CONSTRUCTOR
11765 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11766 && (type == TREE_TYPE (TREE_TYPE (arg0))
11767 || (TREE_CODE (type) == VECTOR_TYPE
11768 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11770 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11771 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11772 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11773 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11775 if (n != 0
11776 && (idx % width) == 0
11777 && (n % width) == 0
11778 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11780 idx = idx / width;
11781 n = n / width;
11783 if (TREE_CODE (arg0) == VECTOR_CST)
11785 if (n == 1)
11786 return VECTOR_CST_ELT (arg0, idx);
11788 tree *vals = XALLOCAVEC (tree, n);
11789 for (unsigned i = 0; i < n; ++i)
11790 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11791 return build_vector (type, vals);
11794 /* Constructor elements can be subvectors. */
11795 unsigned HOST_WIDE_INT k = 1;
11796 if (CONSTRUCTOR_NELTS (arg0) != 0)
11798 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11799 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11800 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11803 /* We keep an exact subset of the constructor elements. */
11804 if ((idx % k) == 0 && (n % k) == 0)
11806 if (CONSTRUCTOR_NELTS (arg0) == 0)
11807 return build_constructor (type, NULL);
11808 idx /= k;
11809 n /= k;
11810 if (n == 1)
11812 if (idx < CONSTRUCTOR_NELTS (arg0))
11813 return CONSTRUCTOR_ELT (arg0, idx)->value;
11814 return build_zero_cst (type);
11817 vec<constructor_elt, va_gc> *vals;
11818 vec_alloc (vals, n);
11819 for (unsigned i = 0;
11820 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11821 ++i)
11822 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11823 CONSTRUCTOR_ELT
11824 (arg0, idx + i)->value);
11825 return build_constructor (type, vals);
11827 /* The bitfield references a single constructor element. */
11828 else if (idx + n <= (idx / k + 1) * k)
11830 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11831 return build_zero_cst (type);
11832 else if (n == k)
11833 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11834 else
11835 return fold_build3_loc (loc, code, type,
11836 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11837 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11842 /* A bit-field-ref that referenced the full argument can be stripped. */
11843 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11844 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11845 && integer_zerop (op2))
11846 return fold_convert_loc (loc, type, arg0);
11848 /* On constants we can use native encode/interpret to constant
11849 fold (nearly) all BIT_FIELD_REFs. */
11850 if (CONSTANT_CLASS_P (arg0)
11851 && can_native_interpret_type_p (type)
11852 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11853 /* This limitation should not be necessary, we just need to
11854 round this up to mode size. */
11855 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11856 /* Need bit-shifting of the buffer to relax the following. */
11857 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11859 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11860 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11861 unsigned HOST_WIDE_INT clen;
11862 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11863 /* ??? We cannot tell native_encode_expr to start at
11864 some random byte only. So limit us to a reasonable amount
11865 of work. */
11866 if (clen <= 4096)
11868 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11869 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11870 if (len > 0
11871 && len * BITS_PER_UNIT >= bitpos + bitsize)
11873 tree v = native_interpret_expr (type,
11874 b + bitpos / BITS_PER_UNIT,
11875 bitsize / BITS_PER_UNIT);
11876 if (v)
11877 return v;
11882 return NULL_TREE;
11884 case FMA_EXPR:
11885 /* For integers we can decompose the FMA if possible. */
11886 if (TREE_CODE (arg0) == INTEGER_CST
11887 && TREE_CODE (arg1) == INTEGER_CST)
11888 return fold_build2_loc (loc, PLUS_EXPR, type,
11889 const_binop (MULT_EXPR, arg0, arg1), arg2);
11890 if (integer_zerop (arg2))
11891 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11893 return fold_fma (loc, type, arg0, arg1, arg2);
11895 case VEC_PERM_EXPR:
11896 if (TREE_CODE (arg2) == VECTOR_CST)
11898 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11899 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11900 unsigned char *sel2 = sel + nelts;
11901 bool need_mask_canon = false;
11902 bool need_mask_canon2 = false;
11903 bool all_in_vec0 = true;
11904 bool all_in_vec1 = true;
11905 bool maybe_identity = true;
11906 bool single_arg = (op0 == op1);
11907 bool changed = false;
11909 mask2 = 2 * nelts - 1;
11910 mask = single_arg ? (nelts - 1) : mask2;
11911 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11912 for (i = 0; i < nelts; i++)
11914 tree val = VECTOR_CST_ELT (arg2, i);
11915 if (TREE_CODE (val) != INTEGER_CST)
11916 return NULL_TREE;
11918 /* Make sure that the perm value is in an acceptable
11919 range. */
11920 wide_int t = val;
11921 need_mask_canon |= wi::gtu_p (t, mask);
11922 need_mask_canon2 |= wi::gtu_p (t, mask2);
11923 sel[i] = t.to_uhwi () & mask;
11924 sel2[i] = t.to_uhwi () & mask2;
11926 if (sel[i] < nelts)
11927 all_in_vec1 = false;
11928 else
11929 all_in_vec0 = false;
11931 if ((sel[i] & (nelts-1)) != i)
11932 maybe_identity = false;
11935 if (maybe_identity)
11937 if (all_in_vec0)
11938 return op0;
11939 if (all_in_vec1)
11940 return op1;
11943 if (all_in_vec0)
11944 op1 = op0;
11945 else if (all_in_vec1)
11947 op0 = op1;
11948 for (i = 0; i < nelts; i++)
11949 sel[i] -= nelts;
11950 need_mask_canon = true;
11953 if ((TREE_CODE (op0) == VECTOR_CST
11954 || TREE_CODE (op0) == CONSTRUCTOR)
11955 && (TREE_CODE (op1) == VECTOR_CST
11956 || TREE_CODE (op1) == CONSTRUCTOR))
11958 tree t = fold_vec_perm (type, op0, op1, sel);
11959 if (t != NULL_TREE)
11960 return t;
11963 if (op0 == op1 && !single_arg)
11964 changed = true;
11966 /* Some targets are deficient and fail to expand a single
11967 argument permutation while still allowing an equivalent
11968 2-argument version. */
11969 if (need_mask_canon && arg2 == op2
11970 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11971 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11973 need_mask_canon = need_mask_canon2;
11974 sel = sel2;
11977 if (need_mask_canon && arg2 == op2)
11979 tree *tsel = XALLOCAVEC (tree, nelts);
11980 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11981 for (i = 0; i < nelts; i++)
11982 tsel[i] = build_int_cst (eltype, sel[i]);
11983 op2 = build_vector (TREE_TYPE (arg2), tsel);
11984 changed = true;
11987 if (changed)
11988 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11990 return NULL_TREE;
11992 default:
11993 return NULL_TREE;
11994 } /* switch (code) */
11997 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11998 of an array (or vector). */
12000 tree
12001 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
12003 tree index_type = NULL_TREE;
12004 offset_int low_bound = 0;
12006 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12008 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12009 if (domain_type && TYPE_MIN_VALUE (domain_type))
12011 /* Static constructors for variably sized objects makes no sense. */
12012 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12013 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12014 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12018 if (index_type)
12019 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12020 TYPE_SIGN (index_type));
12022 offset_int index = low_bound - 1;
12023 if (index_type)
12024 index = wi::ext (index, TYPE_PRECISION (index_type),
12025 TYPE_SIGN (index_type));
12027 offset_int max_index;
12028 unsigned HOST_WIDE_INT cnt;
12029 tree cfield, cval;
12031 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12033 /* Array constructor might explicitly set index, or specify a range,
12034 or leave index NULL meaning that it is next index after previous
12035 one. */
12036 if (cfield)
12038 if (TREE_CODE (cfield) == INTEGER_CST)
12039 max_index = index = wi::to_offset (cfield);
12040 else
12042 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12043 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12044 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12047 else
12049 index += 1;
12050 if (index_type)
12051 index = wi::ext (index, TYPE_PRECISION (index_type),
12052 TYPE_SIGN (index_type));
12053 max_index = index;
12056 /* Do we have match? */
12057 if (wi::cmpu (access_index, index) >= 0
12058 && wi::cmpu (access_index, max_index) <= 0)
12059 return cval;
12061 return NULL_TREE;
12064 /* Perform constant folding and related simplification of EXPR.
12065 The related simplifications include x*1 => x, x*0 => 0, etc.,
12066 and application of the associative law.
12067 NOP_EXPR conversions may be removed freely (as long as we
12068 are careful not to change the type of the overall expression).
12069 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12070 but we can constant-fold them if they have constant operands. */
12072 #ifdef ENABLE_FOLD_CHECKING
12073 # define fold(x) fold_1 (x)
12074 static tree fold_1 (tree);
12075 static
12076 #endif
12077 tree
12078 fold (tree expr)
12080 const tree t = expr;
12081 enum tree_code code = TREE_CODE (t);
12082 enum tree_code_class kind = TREE_CODE_CLASS (code);
12083 tree tem;
12084 location_t loc = EXPR_LOCATION (expr);
12086 /* Return right away if a constant. */
12087 if (kind == tcc_constant)
12088 return t;
12090 /* CALL_EXPR-like objects with variable numbers of operands are
12091 treated specially. */
12092 if (kind == tcc_vl_exp)
12094 if (code == CALL_EXPR)
12096 tem = fold_call_expr (loc, expr, false);
12097 return tem ? tem : expr;
12099 return expr;
12102 if (IS_EXPR_CODE_CLASS (kind))
12104 tree type = TREE_TYPE (t);
12105 tree op0, op1, op2;
12107 switch (TREE_CODE_LENGTH (code))
12109 case 1:
12110 op0 = TREE_OPERAND (t, 0);
12111 tem = fold_unary_loc (loc, code, type, op0);
12112 return tem ? tem : expr;
12113 case 2:
12114 op0 = TREE_OPERAND (t, 0);
12115 op1 = TREE_OPERAND (t, 1);
12116 tem = fold_binary_loc (loc, code, type, op0, op1);
12117 return tem ? tem : expr;
12118 case 3:
12119 op0 = TREE_OPERAND (t, 0);
12120 op1 = TREE_OPERAND (t, 1);
12121 op2 = TREE_OPERAND (t, 2);
12122 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12123 return tem ? tem : expr;
12124 default:
12125 break;
12129 switch (code)
12131 case ARRAY_REF:
12133 tree op0 = TREE_OPERAND (t, 0);
12134 tree op1 = TREE_OPERAND (t, 1);
12136 if (TREE_CODE (op1) == INTEGER_CST
12137 && TREE_CODE (op0) == CONSTRUCTOR
12138 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12140 tree val = get_array_ctor_element_at_index (op0,
12141 wi::to_offset (op1));
12142 if (val)
12143 return val;
12146 return t;
12149 /* Return a VECTOR_CST if possible. */
12150 case CONSTRUCTOR:
12152 tree type = TREE_TYPE (t);
12153 if (TREE_CODE (type) != VECTOR_TYPE)
12154 return t;
12156 unsigned i;
12157 tree val;
12158 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12159 if (! CONSTANT_CLASS_P (val))
12160 return t;
12162 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12165 case CONST_DECL:
12166 return fold (DECL_INITIAL (t));
12168 default:
12169 return t;
12170 } /* switch (code) */
12173 #ifdef ENABLE_FOLD_CHECKING
12174 #undef fold
12176 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12177 hash_table<nofree_ptr_hash<const tree_node> > *);
12178 static void fold_check_failed (const_tree, const_tree);
12179 void print_fold_checksum (const_tree);
12181 /* When --enable-checking=fold, compute a digest of expr before
12182 and after actual fold call to see if fold did not accidentally
12183 change original expr. */
12185 tree
12186 fold (tree expr)
12188 tree ret;
12189 struct md5_ctx ctx;
12190 unsigned char checksum_before[16], checksum_after[16];
12191 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12193 md5_init_ctx (&ctx);
12194 fold_checksum_tree (expr, &ctx, &ht);
12195 md5_finish_ctx (&ctx, checksum_before);
12196 ht.empty ();
12198 ret = fold_1 (expr);
12200 md5_init_ctx (&ctx);
12201 fold_checksum_tree (expr, &ctx, &ht);
12202 md5_finish_ctx (&ctx, checksum_after);
12204 if (memcmp (checksum_before, checksum_after, 16))
12205 fold_check_failed (expr, ret);
12207 return ret;
12210 void
12211 print_fold_checksum (const_tree expr)
12213 struct md5_ctx ctx;
12214 unsigned char checksum[16], cnt;
12215 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12217 md5_init_ctx (&ctx);
12218 fold_checksum_tree (expr, &ctx, &ht);
12219 md5_finish_ctx (&ctx, checksum);
12220 for (cnt = 0; cnt < 16; ++cnt)
12221 fprintf (stderr, "%02x", checksum[cnt]);
12222 putc ('\n', stderr);
12225 static void
12226 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12228 internal_error ("fold check: original tree changed by fold");
12231 static void
12232 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12233 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12235 const tree_node **slot;
12236 enum tree_code code;
12237 union tree_node buf;
12238 int i, len;
12240 recursive_label:
12241 if (expr == NULL)
12242 return;
12243 slot = ht->find_slot (expr, INSERT);
12244 if (*slot != NULL)
12245 return;
12246 *slot = expr;
12247 code = TREE_CODE (expr);
12248 if (TREE_CODE_CLASS (code) == tcc_declaration
12249 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12251 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12252 memcpy ((char *) &buf, expr, tree_size (expr));
12253 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12254 buf.decl_with_vis.symtab_node = NULL;
12255 expr = (tree) &buf;
12257 else if (TREE_CODE_CLASS (code) == tcc_type
12258 && (TYPE_POINTER_TO (expr)
12259 || TYPE_REFERENCE_TO (expr)
12260 || TYPE_CACHED_VALUES_P (expr)
12261 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12262 || TYPE_NEXT_VARIANT (expr)))
12264 /* Allow these fields to be modified. */
12265 tree tmp;
12266 memcpy ((char *) &buf, expr, tree_size (expr));
12267 expr = tmp = (tree) &buf;
12268 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12269 TYPE_POINTER_TO (tmp) = NULL;
12270 TYPE_REFERENCE_TO (tmp) = NULL;
12271 TYPE_NEXT_VARIANT (tmp) = NULL;
12272 if (TYPE_CACHED_VALUES_P (tmp))
12274 TYPE_CACHED_VALUES_P (tmp) = 0;
12275 TYPE_CACHED_VALUES (tmp) = NULL;
12278 md5_process_bytes (expr, tree_size (expr), ctx);
12279 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12280 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12281 if (TREE_CODE_CLASS (code) != tcc_type
12282 && TREE_CODE_CLASS (code) != tcc_declaration
12283 && code != TREE_LIST
12284 && code != SSA_NAME
12285 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12286 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12287 switch (TREE_CODE_CLASS (code))
12289 case tcc_constant:
12290 switch (code)
12292 case STRING_CST:
12293 md5_process_bytes (TREE_STRING_POINTER (expr),
12294 TREE_STRING_LENGTH (expr), ctx);
12295 break;
12296 case COMPLEX_CST:
12297 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12298 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12299 break;
12300 case VECTOR_CST:
12301 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12302 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12303 break;
12304 default:
12305 break;
12307 break;
12308 case tcc_exceptional:
12309 switch (code)
12311 case TREE_LIST:
12312 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12313 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12314 expr = TREE_CHAIN (expr);
12315 goto recursive_label;
12316 break;
12317 case TREE_VEC:
12318 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12319 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12320 break;
12321 default:
12322 break;
12324 break;
12325 case tcc_expression:
12326 case tcc_reference:
12327 case tcc_comparison:
12328 case tcc_unary:
12329 case tcc_binary:
12330 case tcc_statement:
12331 case tcc_vl_exp:
12332 len = TREE_OPERAND_LENGTH (expr);
12333 for (i = 0; i < len; ++i)
12334 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12335 break;
12336 case tcc_declaration:
12337 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12338 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12339 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12341 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12342 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12343 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12344 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12345 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12348 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12350 if (TREE_CODE (expr) == FUNCTION_DECL)
12352 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12353 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12355 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12357 break;
12358 case tcc_type:
12359 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12360 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12361 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12362 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12363 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12364 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12365 if (INTEGRAL_TYPE_P (expr)
12366 || SCALAR_FLOAT_TYPE_P (expr))
12368 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12369 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12371 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12372 if (TREE_CODE (expr) == RECORD_TYPE
12373 || TREE_CODE (expr) == UNION_TYPE
12374 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12375 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12376 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12377 break;
12378 default:
12379 break;
12383 /* Helper function for outputting the checksum of a tree T. When
12384 debugging with gdb, you can "define mynext" to be "next" followed
12385 by "call debug_fold_checksum (op0)", then just trace down till the
12386 outputs differ. */
12388 DEBUG_FUNCTION void
12389 debug_fold_checksum (const_tree t)
12391 int i;
12392 unsigned char checksum[16];
12393 struct md5_ctx ctx;
12394 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12396 md5_init_ctx (&ctx);
12397 fold_checksum_tree (t, &ctx, &ht);
12398 md5_finish_ctx (&ctx, checksum);
12399 ht.empty ();
12401 for (i = 0; i < 16; i++)
12402 fprintf (stderr, "%d ", checksum[i]);
12404 fprintf (stderr, "\n");
12407 #endif
12409 /* Fold a unary tree expression with code CODE of type TYPE with an
12410 operand OP0. LOC is the location of the resulting expression.
12411 Return a folded expression if successful. Otherwise, return a tree
12412 expression with code CODE of type TYPE with an operand OP0. */
12414 tree
12415 fold_build1_stat_loc (location_t loc,
12416 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12418 tree tem;
12419 #ifdef ENABLE_FOLD_CHECKING
12420 unsigned char checksum_before[16], checksum_after[16];
12421 struct md5_ctx ctx;
12422 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12424 md5_init_ctx (&ctx);
12425 fold_checksum_tree (op0, &ctx, &ht);
12426 md5_finish_ctx (&ctx, checksum_before);
12427 ht.empty ();
12428 #endif
12430 tem = fold_unary_loc (loc, code, type, op0);
12431 if (!tem)
12432 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12434 #ifdef ENABLE_FOLD_CHECKING
12435 md5_init_ctx (&ctx);
12436 fold_checksum_tree (op0, &ctx, &ht);
12437 md5_finish_ctx (&ctx, checksum_after);
12439 if (memcmp (checksum_before, checksum_after, 16))
12440 fold_check_failed (op0, tem);
12441 #endif
12442 return tem;
12445 /* Fold a binary tree expression with code CODE of type TYPE with
12446 operands OP0 and OP1. LOC is the location of the resulting
12447 expression. Return a folded expression if successful. Otherwise,
12448 return a tree expression with code CODE of type TYPE with operands
12449 OP0 and OP1. */
12451 tree
12452 fold_build2_stat_loc (location_t loc,
12453 enum tree_code code, tree type, tree op0, tree op1
12454 MEM_STAT_DECL)
12456 tree tem;
12457 #ifdef ENABLE_FOLD_CHECKING
12458 unsigned char checksum_before_op0[16],
12459 checksum_before_op1[16],
12460 checksum_after_op0[16],
12461 checksum_after_op1[16];
12462 struct md5_ctx ctx;
12463 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12465 md5_init_ctx (&ctx);
12466 fold_checksum_tree (op0, &ctx, &ht);
12467 md5_finish_ctx (&ctx, checksum_before_op0);
12468 ht.empty ();
12470 md5_init_ctx (&ctx);
12471 fold_checksum_tree (op1, &ctx, &ht);
12472 md5_finish_ctx (&ctx, checksum_before_op1);
12473 ht.empty ();
12474 #endif
12476 tem = fold_binary_loc (loc, code, type, op0, op1);
12477 if (!tem)
12478 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12480 #ifdef ENABLE_FOLD_CHECKING
12481 md5_init_ctx (&ctx);
12482 fold_checksum_tree (op0, &ctx, &ht);
12483 md5_finish_ctx (&ctx, checksum_after_op0);
12484 ht.empty ();
12486 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12487 fold_check_failed (op0, tem);
12489 md5_init_ctx (&ctx);
12490 fold_checksum_tree (op1, &ctx, &ht);
12491 md5_finish_ctx (&ctx, checksum_after_op1);
12493 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12494 fold_check_failed (op1, tem);
12495 #endif
12496 return tem;
12499 /* Fold a ternary tree expression with code CODE of type TYPE with
12500 operands OP0, OP1, and OP2. Return a folded expression if
12501 successful. Otherwise, return a tree expression with code CODE of
12502 type TYPE with operands OP0, OP1, and OP2. */
12504 tree
12505 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12506 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12508 tree tem;
12509 #ifdef ENABLE_FOLD_CHECKING
12510 unsigned char checksum_before_op0[16],
12511 checksum_before_op1[16],
12512 checksum_before_op2[16],
12513 checksum_after_op0[16],
12514 checksum_after_op1[16],
12515 checksum_after_op2[16];
12516 struct md5_ctx ctx;
12517 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12519 md5_init_ctx (&ctx);
12520 fold_checksum_tree (op0, &ctx, &ht);
12521 md5_finish_ctx (&ctx, checksum_before_op0);
12522 ht.empty ();
12524 md5_init_ctx (&ctx);
12525 fold_checksum_tree (op1, &ctx, &ht);
12526 md5_finish_ctx (&ctx, checksum_before_op1);
12527 ht.empty ();
12529 md5_init_ctx (&ctx);
12530 fold_checksum_tree (op2, &ctx, &ht);
12531 md5_finish_ctx (&ctx, checksum_before_op2);
12532 ht.empty ();
12533 #endif
12535 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12536 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12537 if (!tem)
12538 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12540 #ifdef ENABLE_FOLD_CHECKING
12541 md5_init_ctx (&ctx);
12542 fold_checksum_tree (op0, &ctx, &ht);
12543 md5_finish_ctx (&ctx, checksum_after_op0);
12544 ht.empty ();
12546 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12547 fold_check_failed (op0, tem);
12549 md5_init_ctx (&ctx);
12550 fold_checksum_tree (op1, &ctx, &ht);
12551 md5_finish_ctx (&ctx, checksum_after_op1);
12552 ht.empty ();
12554 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12555 fold_check_failed (op1, tem);
12557 md5_init_ctx (&ctx);
12558 fold_checksum_tree (op2, &ctx, &ht);
12559 md5_finish_ctx (&ctx, checksum_after_op2);
12561 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12562 fold_check_failed (op2, tem);
12563 #endif
12564 return tem;
12567 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12568 arguments in ARGARRAY, and a null static chain.
12569 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12570 of type TYPE from the given operands as constructed by build_call_array. */
12572 tree
12573 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12574 int nargs, tree *argarray)
12576 tree tem;
12577 #ifdef ENABLE_FOLD_CHECKING
12578 unsigned char checksum_before_fn[16],
12579 checksum_before_arglist[16],
12580 checksum_after_fn[16],
12581 checksum_after_arglist[16];
12582 struct md5_ctx ctx;
12583 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12584 int i;
12586 md5_init_ctx (&ctx);
12587 fold_checksum_tree (fn, &ctx, &ht);
12588 md5_finish_ctx (&ctx, checksum_before_fn);
12589 ht.empty ();
12591 md5_init_ctx (&ctx);
12592 for (i = 0; i < nargs; i++)
12593 fold_checksum_tree (argarray[i], &ctx, &ht);
12594 md5_finish_ctx (&ctx, checksum_before_arglist);
12595 ht.empty ();
12596 #endif
12598 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12599 if (!tem)
12600 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12602 #ifdef ENABLE_FOLD_CHECKING
12603 md5_init_ctx (&ctx);
12604 fold_checksum_tree (fn, &ctx, &ht);
12605 md5_finish_ctx (&ctx, checksum_after_fn);
12606 ht.empty ();
12608 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12609 fold_check_failed (fn, tem);
12611 md5_init_ctx (&ctx);
12612 for (i = 0; i < nargs; i++)
12613 fold_checksum_tree (argarray[i], &ctx, &ht);
12614 md5_finish_ctx (&ctx, checksum_after_arglist);
12616 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12617 fold_check_failed (NULL_TREE, tem);
12618 #endif
12619 return tem;
12622 /* Perform constant folding and related simplification of initializer
12623 expression EXPR. These behave identically to "fold_buildN" but ignore
12624 potential run-time traps and exceptions that fold must preserve. */
12626 #define START_FOLD_INIT \
12627 int saved_signaling_nans = flag_signaling_nans;\
12628 int saved_trapping_math = flag_trapping_math;\
12629 int saved_rounding_math = flag_rounding_math;\
12630 int saved_trapv = flag_trapv;\
12631 int saved_folding_initializer = folding_initializer;\
12632 flag_signaling_nans = 0;\
12633 flag_trapping_math = 0;\
12634 flag_rounding_math = 0;\
12635 flag_trapv = 0;\
12636 folding_initializer = 1;
12638 #define END_FOLD_INIT \
12639 flag_signaling_nans = saved_signaling_nans;\
12640 flag_trapping_math = saved_trapping_math;\
12641 flag_rounding_math = saved_rounding_math;\
12642 flag_trapv = saved_trapv;\
12643 folding_initializer = saved_folding_initializer;
12645 tree
12646 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12647 tree type, tree op)
12649 tree result;
12650 START_FOLD_INIT;
12652 result = fold_build1_loc (loc, code, type, op);
12654 END_FOLD_INIT;
12655 return result;
12658 tree
12659 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12660 tree type, tree op0, tree op1)
12662 tree result;
12663 START_FOLD_INIT;
12665 result = fold_build2_loc (loc, code, type, op0, op1);
12667 END_FOLD_INIT;
12668 return result;
12671 tree
12672 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12673 int nargs, tree *argarray)
12675 tree result;
12676 START_FOLD_INIT;
12678 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12680 END_FOLD_INIT;
12681 return result;
12684 #undef START_FOLD_INIT
12685 #undef END_FOLD_INIT
12687 /* Determine if first argument is a multiple of second argument. Return 0 if
12688 it is not, or we cannot easily determined it to be.
12690 An example of the sort of thing we care about (at this point; this routine
12691 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12692 fold cases do now) is discovering that
12694 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12696 is a multiple of
12698 SAVE_EXPR (J * 8)
12700 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12702 This code also handles discovering that
12704 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12706 is a multiple of 8 so we don't have to worry about dealing with a
12707 possible remainder.
12709 Note that we *look* inside a SAVE_EXPR only to determine how it was
12710 calculated; it is not safe for fold to do much of anything else with the
12711 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12712 at run time. For example, the latter example above *cannot* be implemented
12713 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12714 evaluation time of the original SAVE_EXPR is not necessarily the same at
12715 the time the new expression is evaluated. The only optimization of this
12716 sort that would be valid is changing
12718 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12720 divided by 8 to
12722 SAVE_EXPR (I) * SAVE_EXPR (J)
12724 (where the same SAVE_EXPR (J) is used in the original and the
12725 transformed version). */
12728 multiple_of_p (tree type, const_tree top, const_tree bottom)
12730 if (operand_equal_p (top, bottom, 0))
12731 return 1;
12733 if (TREE_CODE (type) != INTEGER_TYPE)
12734 return 0;
12736 switch (TREE_CODE (top))
12738 case BIT_AND_EXPR:
12739 /* Bitwise and provides a power of two multiple. If the mask is
12740 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12741 if (!integer_pow2p (bottom))
12742 return 0;
12743 /* FALLTHRU */
12745 case MULT_EXPR:
12746 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12747 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12749 case PLUS_EXPR:
12750 case MINUS_EXPR:
12751 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12752 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12754 case LSHIFT_EXPR:
12755 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12757 tree op1, t1;
12759 op1 = TREE_OPERAND (top, 1);
12760 /* const_binop may not detect overflow correctly,
12761 so check for it explicitly here. */
12762 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12763 && 0 != (t1 = fold_convert (type,
12764 const_binop (LSHIFT_EXPR,
12765 size_one_node,
12766 op1)))
12767 && !TREE_OVERFLOW (t1))
12768 return multiple_of_p (type, t1, bottom);
12770 return 0;
12772 case NOP_EXPR:
12773 /* Can't handle conversions from non-integral or wider integral type. */
12774 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12775 || (TYPE_PRECISION (type)
12776 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12777 return 0;
12779 /* .. fall through ... */
12781 case SAVE_EXPR:
12782 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12784 case COND_EXPR:
12785 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12786 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12788 case INTEGER_CST:
12789 if (TREE_CODE (bottom) != INTEGER_CST
12790 || integer_zerop (bottom)
12791 || (TYPE_UNSIGNED (type)
12792 && (tree_int_cst_sgn (top) < 0
12793 || tree_int_cst_sgn (bottom) < 0)))
12794 return 0;
12795 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12796 SIGNED);
12798 default:
12799 return 0;
12803 #define tree_expr_nonnegative_warnv_p(X, Y) \
12804 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12806 #define RECURSE(X) \
12807 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12809 /* Return true if CODE or TYPE is known to be non-negative. */
12811 static bool
12812 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12814 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12815 && truth_value_p (code))
12816 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12817 have a signed:1 type (where the value is -1 and 0). */
12818 return true;
12819 return false;
12822 /* Return true if (CODE OP0) is known to be non-negative. If the return
12823 value is based on the assumption that signed overflow is undefined,
12824 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12825 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12827 bool
12828 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12829 bool *strict_overflow_p, int depth)
12831 if (TYPE_UNSIGNED (type))
12832 return true;
12834 switch (code)
12836 case ABS_EXPR:
12837 /* We can't return 1 if flag_wrapv is set because
12838 ABS_EXPR<INT_MIN> = INT_MIN. */
12839 if (!ANY_INTEGRAL_TYPE_P (type))
12840 return true;
12841 if (TYPE_OVERFLOW_UNDEFINED (type))
12843 *strict_overflow_p = true;
12844 return true;
12846 break;
12848 case NON_LVALUE_EXPR:
12849 case FLOAT_EXPR:
12850 case FIX_TRUNC_EXPR:
12851 return RECURSE (op0);
12853 CASE_CONVERT:
12855 tree inner_type = TREE_TYPE (op0);
12856 tree outer_type = type;
12858 if (TREE_CODE (outer_type) == REAL_TYPE)
12860 if (TREE_CODE (inner_type) == REAL_TYPE)
12861 return RECURSE (op0);
12862 if (INTEGRAL_TYPE_P (inner_type))
12864 if (TYPE_UNSIGNED (inner_type))
12865 return true;
12866 return RECURSE (op0);
12869 else if (INTEGRAL_TYPE_P (outer_type))
12871 if (TREE_CODE (inner_type) == REAL_TYPE)
12872 return RECURSE (op0);
12873 if (INTEGRAL_TYPE_P (inner_type))
12874 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12875 && TYPE_UNSIGNED (inner_type);
12878 break;
12880 default:
12881 return tree_simple_nonnegative_warnv_p (code, type);
12884 /* We don't know sign of `t', so be conservative and return false. */
12885 return false;
12888 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12889 value is based on the assumption that signed overflow is undefined,
12890 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12891 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12893 bool
12894 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12895 tree op1, bool *strict_overflow_p,
12896 int depth)
12898 if (TYPE_UNSIGNED (type))
12899 return true;
12901 switch (code)
12903 case POINTER_PLUS_EXPR:
12904 case PLUS_EXPR:
12905 if (FLOAT_TYPE_P (type))
12906 return RECURSE (op0) && RECURSE (op1);
12908 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12909 both unsigned and at least 2 bits shorter than the result. */
12910 if (TREE_CODE (type) == INTEGER_TYPE
12911 && TREE_CODE (op0) == NOP_EXPR
12912 && TREE_CODE (op1) == NOP_EXPR)
12914 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12915 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12916 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12917 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12919 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12920 TYPE_PRECISION (inner2)) + 1;
12921 return prec < TYPE_PRECISION (type);
12924 break;
12926 case MULT_EXPR:
12927 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12929 /* x * x is always non-negative for floating point x
12930 or without overflow. */
12931 if (operand_equal_p (op0, op1, 0)
12932 || (RECURSE (op0) && RECURSE (op1)))
12934 if (ANY_INTEGRAL_TYPE_P (type)
12935 && TYPE_OVERFLOW_UNDEFINED (type))
12936 *strict_overflow_p = true;
12937 return true;
12941 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12942 both unsigned and their total bits is shorter than the result. */
12943 if (TREE_CODE (type) == INTEGER_TYPE
12944 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12945 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12947 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12948 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12949 : TREE_TYPE (op0);
12950 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12951 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12952 : TREE_TYPE (op1);
12954 bool unsigned0 = TYPE_UNSIGNED (inner0);
12955 bool unsigned1 = TYPE_UNSIGNED (inner1);
12957 if (TREE_CODE (op0) == INTEGER_CST)
12958 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12960 if (TREE_CODE (op1) == INTEGER_CST)
12961 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12963 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12964 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12966 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12967 ? tree_int_cst_min_precision (op0, UNSIGNED)
12968 : TYPE_PRECISION (inner0);
12970 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12971 ? tree_int_cst_min_precision (op1, UNSIGNED)
12972 : TYPE_PRECISION (inner1);
12974 return precision0 + precision1 < TYPE_PRECISION (type);
12977 return false;
12979 case BIT_AND_EXPR:
12980 case MAX_EXPR:
12981 return RECURSE (op0) || RECURSE (op1);
12983 case BIT_IOR_EXPR:
12984 case BIT_XOR_EXPR:
12985 case MIN_EXPR:
12986 case RDIV_EXPR:
12987 case TRUNC_DIV_EXPR:
12988 case CEIL_DIV_EXPR:
12989 case FLOOR_DIV_EXPR:
12990 case ROUND_DIV_EXPR:
12991 return RECURSE (op0) && RECURSE (op1);
12993 case TRUNC_MOD_EXPR:
12994 return RECURSE (op0);
12996 case FLOOR_MOD_EXPR:
12997 return RECURSE (op1);
12999 case CEIL_MOD_EXPR:
13000 case ROUND_MOD_EXPR:
13001 default:
13002 return tree_simple_nonnegative_warnv_p (code, type);
13005 /* We don't know sign of `t', so be conservative and return false. */
13006 return false;
13009 /* Return true if T is known to be non-negative. If the return
13010 value is based on the assumption that signed overflow is undefined,
13011 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13012 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13014 bool
13015 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13017 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13018 return true;
13020 switch (TREE_CODE (t))
13022 case INTEGER_CST:
13023 return tree_int_cst_sgn (t) >= 0;
13025 case REAL_CST:
13026 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13028 case FIXED_CST:
13029 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13031 case COND_EXPR:
13032 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13034 case SSA_NAME:
13035 /* Limit the depth of recursion to avoid quadratic behavior.
13036 This is expected to catch almost all occurrences in practice.
13037 If this code misses important cases that unbounded recursion
13038 would not, passes that need this information could be revised
13039 to provide it through dataflow propagation. */
13040 return (!name_registered_for_update_p (t)
13041 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13042 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13043 strict_overflow_p, depth));
13045 default:
13046 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13050 /* Return true if T is known to be non-negative. If the return
13051 value is based on the assumption that signed overflow is undefined,
13052 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13053 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13055 bool
13056 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13057 bool *strict_overflow_p, int depth)
13059 switch (fn)
13061 CASE_CFN_ACOS:
13062 CASE_CFN_ACOSH:
13063 CASE_CFN_CABS:
13064 CASE_CFN_COSH:
13065 CASE_CFN_ERFC:
13066 CASE_CFN_EXP:
13067 CASE_CFN_EXP10:
13068 CASE_CFN_EXP2:
13069 CASE_CFN_FABS:
13070 CASE_CFN_FDIM:
13071 CASE_CFN_HYPOT:
13072 CASE_CFN_POW10:
13073 CASE_CFN_FFS:
13074 CASE_CFN_PARITY:
13075 CASE_CFN_POPCOUNT:
13076 CASE_CFN_CLZ:
13077 CASE_CFN_CLRSB:
13078 case CFN_BUILT_IN_BSWAP32:
13079 case CFN_BUILT_IN_BSWAP64:
13080 /* Always true. */
13081 return true;
13083 CASE_CFN_SQRT:
13084 /* sqrt(-0.0) is -0.0. */
13085 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13086 return true;
13087 return RECURSE (arg0);
13089 CASE_CFN_ASINH:
13090 CASE_CFN_ATAN:
13091 CASE_CFN_ATANH:
13092 CASE_CFN_CBRT:
13093 CASE_CFN_CEIL:
13094 CASE_CFN_ERF:
13095 CASE_CFN_EXPM1:
13096 CASE_CFN_FLOOR:
13097 CASE_CFN_FMOD:
13098 CASE_CFN_FREXP:
13099 CASE_CFN_ICEIL:
13100 CASE_CFN_IFLOOR:
13101 CASE_CFN_IRINT:
13102 CASE_CFN_IROUND:
13103 CASE_CFN_LCEIL:
13104 CASE_CFN_LDEXP:
13105 CASE_CFN_LFLOOR:
13106 CASE_CFN_LLCEIL:
13107 CASE_CFN_LLFLOOR:
13108 CASE_CFN_LLRINT:
13109 CASE_CFN_LLROUND:
13110 CASE_CFN_LRINT:
13111 CASE_CFN_LROUND:
13112 CASE_CFN_MODF:
13113 CASE_CFN_NEARBYINT:
13114 CASE_CFN_RINT:
13115 CASE_CFN_ROUND:
13116 CASE_CFN_SCALB:
13117 CASE_CFN_SCALBLN:
13118 CASE_CFN_SCALBN:
13119 CASE_CFN_SIGNBIT:
13120 CASE_CFN_SIGNIFICAND:
13121 CASE_CFN_SINH:
13122 CASE_CFN_TANH:
13123 CASE_CFN_TRUNC:
13124 /* True if the 1st argument is nonnegative. */
13125 return RECURSE (arg0);
13127 CASE_CFN_FMAX:
13128 /* True if the 1st OR 2nd arguments are nonnegative. */
13129 return RECURSE (arg0) || RECURSE (arg1);
13131 CASE_CFN_FMIN:
13132 /* True if the 1st AND 2nd arguments are nonnegative. */
13133 return RECURSE (arg0) && RECURSE (arg1);
13135 CASE_CFN_COPYSIGN:
13136 /* True if the 2nd argument is nonnegative. */
13137 return RECURSE (arg1);
13139 CASE_CFN_POWI:
13140 /* True if the 1st argument is nonnegative or the second
13141 argument is an even integer. */
13142 if (TREE_CODE (arg1) == INTEGER_CST
13143 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13144 return true;
13145 return RECURSE (arg0);
13147 CASE_CFN_POW:
13148 /* True if the 1st argument is nonnegative or the second
13149 argument is an even integer valued real. */
13150 if (TREE_CODE (arg1) == REAL_CST)
13152 REAL_VALUE_TYPE c;
13153 HOST_WIDE_INT n;
13155 c = TREE_REAL_CST (arg1);
13156 n = real_to_integer (&c);
13157 if ((n & 1) == 0)
13159 REAL_VALUE_TYPE cint;
13160 real_from_integer (&cint, VOIDmode, n, SIGNED);
13161 if (real_identical (&c, &cint))
13162 return true;
13165 return RECURSE (arg0);
13167 default:
13168 break;
13170 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13173 /* Return true if T is known to be non-negative. If the return
13174 value is based on the assumption that signed overflow is undefined,
13175 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13176 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13178 static bool
13179 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13181 enum tree_code code = TREE_CODE (t);
13182 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13183 return true;
13185 switch (code)
13187 case TARGET_EXPR:
13189 tree temp = TARGET_EXPR_SLOT (t);
13190 t = TARGET_EXPR_INITIAL (t);
13192 /* If the initializer is non-void, then it's a normal expression
13193 that will be assigned to the slot. */
13194 if (!VOID_TYPE_P (t))
13195 return RECURSE (t);
13197 /* Otherwise, the initializer sets the slot in some way. One common
13198 way is an assignment statement at the end of the initializer. */
13199 while (1)
13201 if (TREE_CODE (t) == BIND_EXPR)
13202 t = expr_last (BIND_EXPR_BODY (t));
13203 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13204 || TREE_CODE (t) == TRY_CATCH_EXPR)
13205 t = expr_last (TREE_OPERAND (t, 0));
13206 else if (TREE_CODE (t) == STATEMENT_LIST)
13207 t = expr_last (t);
13208 else
13209 break;
13211 if (TREE_CODE (t) == MODIFY_EXPR
13212 && TREE_OPERAND (t, 0) == temp)
13213 return RECURSE (TREE_OPERAND (t, 1));
13215 return false;
13218 case CALL_EXPR:
13220 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13221 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13223 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13224 get_call_combined_fn (t),
13225 arg0,
13226 arg1,
13227 strict_overflow_p, depth);
13229 case COMPOUND_EXPR:
13230 case MODIFY_EXPR:
13231 return RECURSE (TREE_OPERAND (t, 1));
13233 case BIND_EXPR:
13234 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13236 case SAVE_EXPR:
13237 return RECURSE (TREE_OPERAND (t, 0));
13239 default:
13240 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13244 #undef RECURSE
13245 #undef tree_expr_nonnegative_warnv_p
13247 /* Return true if T is known to be non-negative. If the return
13248 value is based on the assumption that signed overflow is undefined,
13249 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13250 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13252 bool
13253 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13255 enum tree_code code;
13256 if (t == error_mark_node)
13257 return false;
13259 code = TREE_CODE (t);
13260 switch (TREE_CODE_CLASS (code))
13262 case tcc_binary:
13263 case tcc_comparison:
13264 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13265 TREE_TYPE (t),
13266 TREE_OPERAND (t, 0),
13267 TREE_OPERAND (t, 1),
13268 strict_overflow_p, depth);
13270 case tcc_unary:
13271 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13272 TREE_TYPE (t),
13273 TREE_OPERAND (t, 0),
13274 strict_overflow_p, depth);
13276 case tcc_constant:
13277 case tcc_declaration:
13278 case tcc_reference:
13279 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13281 default:
13282 break;
13285 switch (code)
13287 case TRUTH_AND_EXPR:
13288 case TRUTH_OR_EXPR:
13289 case TRUTH_XOR_EXPR:
13290 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13291 TREE_TYPE (t),
13292 TREE_OPERAND (t, 0),
13293 TREE_OPERAND (t, 1),
13294 strict_overflow_p, depth);
13295 case TRUTH_NOT_EXPR:
13296 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13297 TREE_TYPE (t),
13298 TREE_OPERAND (t, 0),
13299 strict_overflow_p, depth);
13301 case COND_EXPR:
13302 case CONSTRUCTOR:
13303 case OBJ_TYPE_REF:
13304 case ASSERT_EXPR:
13305 case ADDR_EXPR:
13306 case WITH_SIZE_EXPR:
13307 case SSA_NAME:
13308 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13310 default:
13311 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13315 /* Return true if `t' is known to be non-negative. Handle warnings
13316 about undefined signed overflow. */
13318 bool
13319 tree_expr_nonnegative_p (tree t)
13321 bool ret, strict_overflow_p;
13323 strict_overflow_p = false;
13324 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13325 if (strict_overflow_p)
13326 fold_overflow_warning (("assuming signed overflow does not occur when "
13327 "determining that expression is always "
13328 "non-negative"),
13329 WARN_STRICT_OVERFLOW_MISC);
13330 return ret;
13334 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13335 For floating point we further ensure that T is not denormal.
13336 Similar logic is present in nonzero_address in rtlanal.h.
13338 If the return value is based on the assumption that signed overflow
13339 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13340 change *STRICT_OVERFLOW_P. */
13342 bool
13343 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13344 bool *strict_overflow_p)
13346 switch (code)
13348 case ABS_EXPR:
13349 return tree_expr_nonzero_warnv_p (op0,
13350 strict_overflow_p);
13352 case NOP_EXPR:
13354 tree inner_type = TREE_TYPE (op0);
13355 tree outer_type = type;
13357 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13358 && tree_expr_nonzero_warnv_p (op0,
13359 strict_overflow_p));
13361 break;
13363 case NON_LVALUE_EXPR:
13364 return tree_expr_nonzero_warnv_p (op0,
13365 strict_overflow_p);
13367 default:
13368 break;
13371 return false;
13374 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13375 For floating point we further ensure that T is not denormal.
13376 Similar logic is present in nonzero_address in rtlanal.h.
13378 If the return value is based on the assumption that signed overflow
13379 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13380 change *STRICT_OVERFLOW_P. */
13382 bool
13383 tree_binary_nonzero_warnv_p (enum tree_code code,
13384 tree type,
13385 tree op0,
13386 tree op1, bool *strict_overflow_p)
13388 bool sub_strict_overflow_p;
13389 switch (code)
13391 case POINTER_PLUS_EXPR:
13392 case PLUS_EXPR:
13393 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13395 /* With the presence of negative values it is hard
13396 to say something. */
13397 sub_strict_overflow_p = false;
13398 if (!tree_expr_nonnegative_warnv_p (op0,
13399 &sub_strict_overflow_p)
13400 || !tree_expr_nonnegative_warnv_p (op1,
13401 &sub_strict_overflow_p))
13402 return false;
13403 /* One of operands must be positive and the other non-negative. */
13404 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13405 overflows, on a twos-complement machine the sum of two
13406 nonnegative numbers can never be zero. */
13407 return (tree_expr_nonzero_warnv_p (op0,
13408 strict_overflow_p)
13409 || tree_expr_nonzero_warnv_p (op1,
13410 strict_overflow_p));
13412 break;
13414 case MULT_EXPR:
13415 if (TYPE_OVERFLOW_UNDEFINED (type))
13417 if (tree_expr_nonzero_warnv_p (op0,
13418 strict_overflow_p)
13419 && tree_expr_nonzero_warnv_p (op1,
13420 strict_overflow_p))
13422 *strict_overflow_p = true;
13423 return true;
13426 break;
13428 case MIN_EXPR:
13429 sub_strict_overflow_p = false;
13430 if (tree_expr_nonzero_warnv_p (op0,
13431 &sub_strict_overflow_p)
13432 && tree_expr_nonzero_warnv_p (op1,
13433 &sub_strict_overflow_p))
13435 if (sub_strict_overflow_p)
13436 *strict_overflow_p = true;
13438 break;
13440 case MAX_EXPR:
13441 sub_strict_overflow_p = false;
13442 if (tree_expr_nonzero_warnv_p (op0,
13443 &sub_strict_overflow_p))
13445 if (sub_strict_overflow_p)
13446 *strict_overflow_p = true;
13448 /* When both operands are nonzero, then MAX must be too. */
13449 if (tree_expr_nonzero_warnv_p (op1,
13450 strict_overflow_p))
13451 return true;
13453 /* MAX where operand 0 is positive is positive. */
13454 return tree_expr_nonnegative_warnv_p (op0,
13455 strict_overflow_p);
13457 /* MAX where operand 1 is positive is positive. */
13458 else if (tree_expr_nonzero_warnv_p (op1,
13459 &sub_strict_overflow_p)
13460 && tree_expr_nonnegative_warnv_p (op1,
13461 &sub_strict_overflow_p))
13463 if (sub_strict_overflow_p)
13464 *strict_overflow_p = true;
13465 return true;
13467 break;
13469 case BIT_IOR_EXPR:
13470 return (tree_expr_nonzero_warnv_p (op1,
13471 strict_overflow_p)
13472 || tree_expr_nonzero_warnv_p (op0,
13473 strict_overflow_p));
13475 default:
13476 break;
13479 return false;
13482 /* Return true when T is an address and is known to be nonzero.
13483 For floating point we further ensure that T is not denormal.
13484 Similar logic is present in nonzero_address in rtlanal.h.
13486 If the return value is based on the assumption that signed overflow
13487 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13488 change *STRICT_OVERFLOW_P. */
13490 bool
13491 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13493 bool sub_strict_overflow_p;
13494 switch (TREE_CODE (t))
13496 case INTEGER_CST:
13497 return !integer_zerop (t);
13499 case ADDR_EXPR:
13501 tree base = TREE_OPERAND (t, 0);
13503 if (!DECL_P (base))
13504 base = get_base_address (base);
13506 if (!base)
13507 return false;
13509 /* For objects in symbol table check if we know they are non-zero.
13510 Don't do anything for variables and functions before symtab is built;
13511 it is quite possible that they will be declared weak later. */
13512 if (DECL_P (base) && decl_in_symtab_p (base))
13514 struct symtab_node *symbol;
13516 symbol = symtab_node::get_create (base);
13517 if (symbol)
13518 return symbol->nonzero_address ();
13519 else
13520 return false;
13523 /* Function local objects are never NULL. */
13524 if (DECL_P (base)
13525 && (DECL_CONTEXT (base)
13526 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13527 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13528 return true;
13530 /* Constants are never weak. */
13531 if (CONSTANT_CLASS_P (base))
13532 return true;
13534 return false;
13537 case COND_EXPR:
13538 sub_strict_overflow_p = false;
13539 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13540 &sub_strict_overflow_p)
13541 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13542 &sub_strict_overflow_p))
13544 if (sub_strict_overflow_p)
13545 *strict_overflow_p = true;
13546 return true;
13548 break;
13550 default:
13551 break;
13553 return false;
13556 #define integer_valued_real_p(X) \
13557 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13559 #define RECURSE(X) \
13560 ((integer_valued_real_p) (X, depth + 1))
13562 /* Return true if the floating point result of (CODE OP0) has an
13563 integer value. We also allow +Inf, -Inf and NaN to be considered
13564 integer values. Return false for signaling NaN.
13566 DEPTH is the current nesting depth of the query. */
13568 bool
13569 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13571 switch (code)
13573 case FLOAT_EXPR:
13574 return true;
13576 case ABS_EXPR:
13577 return RECURSE (op0);
13579 CASE_CONVERT:
13581 tree type = TREE_TYPE (op0);
13582 if (TREE_CODE (type) == INTEGER_TYPE)
13583 return true;
13584 if (TREE_CODE (type) == REAL_TYPE)
13585 return RECURSE (op0);
13586 break;
13589 default:
13590 break;
13592 return false;
13595 /* Return true if the floating point result of (CODE OP0 OP1) has an
13596 integer value. We also allow +Inf, -Inf and NaN to be considered
13597 integer values. Return false for signaling NaN.
13599 DEPTH is the current nesting depth of the query. */
13601 bool
13602 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13604 switch (code)
13606 case PLUS_EXPR:
13607 case MINUS_EXPR:
13608 case MULT_EXPR:
13609 case MIN_EXPR:
13610 case MAX_EXPR:
13611 return RECURSE (op0) && RECURSE (op1);
13613 default:
13614 break;
13616 return false;
13619 /* Return true if the floating point result of calling FNDECL with arguments
13620 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13621 considered integer values. Return false for signaling NaN. If FNDECL
13622 takes fewer than 2 arguments, the remaining ARGn are null.
13624 DEPTH is the current nesting depth of the query. */
13626 bool
13627 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13629 switch (fn)
13631 CASE_CFN_CEIL:
13632 CASE_CFN_FLOOR:
13633 CASE_CFN_NEARBYINT:
13634 CASE_CFN_RINT:
13635 CASE_CFN_ROUND:
13636 CASE_CFN_TRUNC:
13637 return true;
13639 CASE_CFN_FMIN:
13640 CASE_CFN_FMAX:
13641 return RECURSE (arg0) && RECURSE (arg1);
13643 default:
13644 break;
13646 return false;
13649 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13650 has an integer value. We also allow +Inf, -Inf and NaN to be
13651 considered integer values. Return false for signaling NaN.
13653 DEPTH is the current nesting depth of the query. */
13655 bool
13656 integer_valued_real_single_p (tree t, int depth)
13658 switch (TREE_CODE (t))
13660 case REAL_CST:
13661 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13663 case COND_EXPR:
13664 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13666 case SSA_NAME:
13667 /* Limit the depth of recursion to avoid quadratic behavior.
13668 This is expected to catch almost all occurrences in practice.
13669 If this code misses important cases that unbounded recursion
13670 would not, passes that need this information could be revised
13671 to provide it through dataflow propagation. */
13672 return (!name_registered_for_update_p (t)
13673 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13674 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13675 depth));
13677 default:
13678 break;
13680 return false;
13683 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13684 has an integer value. We also allow +Inf, -Inf and NaN to be
13685 considered integer values. Return false for signaling NaN.
13687 DEPTH is the current nesting depth of the query. */
13689 static bool
13690 integer_valued_real_invalid_p (tree t, int depth)
13692 switch (TREE_CODE (t))
13694 case COMPOUND_EXPR:
13695 case MODIFY_EXPR:
13696 case BIND_EXPR:
13697 return RECURSE (TREE_OPERAND (t, 1));
13699 case SAVE_EXPR:
13700 return RECURSE (TREE_OPERAND (t, 0));
13702 default:
13703 break;
13705 return false;
13708 #undef RECURSE
13709 #undef integer_valued_real_p
13711 /* Return true if the floating point expression T has an integer value.
13712 We also allow +Inf, -Inf and NaN to be considered integer values.
13713 Return false for signaling NaN.
13715 DEPTH is the current nesting depth of the query. */
13717 bool
13718 integer_valued_real_p (tree t, int depth)
13720 if (t == error_mark_node)
13721 return false;
13723 tree_code code = TREE_CODE (t);
13724 switch (TREE_CODE_CLASS (code))
13726 case tcc_binary:
13727 case tcc_comparison:
13728 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13729 TREE_OPERAND (t, 1), depth);
13731 case tcc_unary:
13732 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13734 case tcc_constant:
13735 case tcc_declaration:
13736 case tcc_reference:
13737 return integer_valued_real_single_p (t, depth);
13739 default:
13740 break;
13743 switch (code)
13745 case COND_EXPR:
13746 case SSA_NAME:
13747 return integer_valued_real_single_p (t, depth);
13749 case CALL_EXPR:
13751 tree arg0 = (call_expr_nargs (t) > 0
13752 ? CALL_EXPR_ARG (t, 0)
13753 : NULL_TREE);
13754 tree arg1 = (call_expr_nargs (t) > 1
13755 ? CALL_EXPR_ARG (t, 1)
13756 : NULL_TREE);
13757 return integer_valued_real_call_p (get_call_combined_fn (t),
13758 arg0, arg1, depth);
13761 default:
13762 return integer_valued_real_invalid_p (t, depth);
13766 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13767 attempt to fold the expression to a constant without modifying TYPE,
13768 OP0 or OP1.
13770 If the expression could be simplified to a constant, then return
13771 the constant. If the expression would not be simplified to a
13772 constant, then return NULL_TREE. */
13774 tree
13775 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13777 tree tem = fold_binary (code, type, op0, op1);
13778 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13781 /* Given the components of a unary expression CODE, TYPE and OP0,
13782 attempt to fold the expression to a constant without modifying
13783 TYPE or OP0.
13785 If the expression could be simplified to a constant, then return
13786 the constant. If the expression would not be simplified to a
13787 constant, then return NULL_TREE. */
13789 tree
13790 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13792 tree tem = fold_unary (code, type, op0);
13793 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13796 /* If EXP represents referencing an element in a constant string
13797 (either via pointer arithmetic or array indexing), return the
13798 tree representing the value accessed, otherwise return NULL. */
13800 tree
13801 fold_read_from_constant_string (tree exp)
13803 if ((TREE_CODE (exp) == INDIRECT_REF
13804 || TREE_CODE (exp) == ARRAY_REF)
13805 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13807 tree exp1 = TREE_OPERAND (exp, 0);
13808 tree index;
13809 tree string;
13810 location_t loc = EXPR_LOCATION (exp);
13812 if (TREE_CODE (exp) == INDIRECT_REF)
13813 string = string_constant (exp1, &index);
13814 else
13816 tree low_bound = array_ref_low_bound (exp);
13817 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13819 /* Optimize the special-case of a zero lower bound.
13821 We convert the low_bound to sizetype to avoid some problems
13822 with constant folding. (E.g. suppose the lower bound is 1,
13823 and its mode is QI. Without the conversion,l (ARRAY
13824 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13825 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13826 if (! integer_zerop (low_bound))
13827 index = size_diffop_loc (loc, index,
13828 fold_convert_loc (loc, sizetype, low_bound));
13830 string = exp1;
13833 if (string
13834 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13835 && TREE_CODE (string) == STRING_CST
13836 && TREE_CODE (index) == INTEGER_CST
13837 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13838 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13839 == MODE_INT)
13840 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13841 return build_int_cst_type (TREE_TYPE (exp),
13842 (TREE_STRING_POINTER (string)
13843 [TREE_INT_CST_LOW (index)]));
13845 return NULL;
13848 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13849 an integer constant, real, or fixed-point constant.
13851 TYPE is the type of the result. */
13853 static tree
13854 fold_negate_const (tree arg0, tree type)
13856 tree t = NULL_TREE;
13858 switch (TREE_CODE (arg0))
13860 case INTEGER_CST:
13862 bool overflow;
13863 wide_int val = wi::neg (arg0, &overflow);
13864 t = force_fit_type (type, val, 1,
13865 (overflow | TREE_OVERFLOW (arg0))
13866 && !TYPE_UNSIGNED (type));
13867 break;
13870 case REAL_CST:
13871 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13872 break;
13874 case FIXED_CST:
13876 FIXED_VALUE_TYPE f;
13877 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13878 &(TREE_FIXED_CST (arg0)), NULL,
13879 TYPE_SATURATING (type));
13880 t = build_fixed (type, f);
13881 /* Propagate overflow flags. */
13882 if (overflow_p | TREE_OVERFLOW (arg0))
13883 TREE_OVERFLOW (t) = 1;
13884 break;
13887 default:
13888 gcc_unreachable ();
13891 return t;
13894 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13895 an integer constant or real constant.
13897 TYPE is the type of the result. */
13899 tree
13900 fold_abs_const (tree arg0, tree type)
13902 tree t = NULL_TREE;
13904 switch (TREE_CODE (arg0))
13906 case INTEGER_CST:
13908 /* If the value is unsigned or non-negative, then the absolute value
13909 is the same as the ordinary value. */
13910 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13911 t = arg0;
13913 /* If the value is negative, then the absolute value is
13914 its negation. */
13915 else
13917 bool overflow;
13918 wide_int val = wi::neg (arg0, &overflow);
13919 t = force_fit_type (type, val, -1,
13920 overflow | TREE_OVERFLOW (arg0));
13923 break;
13925 case REAL_CST:
13926 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13927 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13928 else
13929 t = arg0;
13930 break;
13932 default:
13933 gcc_unreachable ();
13936 return t;
13939 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13940 constant. TYPE is the type of the result. */
13942 static tree
13943 fold_not_const (const_tree arg0, tree type)
13945 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13947 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13950 /* Given CODE, a relational operator, the target type, TYPE and two
13951 constant operands OP0 and OP1, return the result of the
13952 relational operation. If the result is not a compile time
13953 constant, then return NULL_TREE. */
13955 static tree
13956 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13958 int result, invert;
13960 /* From here on, the only cases we handle are when the result is
13961 known to be a constant. */
13963 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13965 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13966 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13968 /* Handle the cases where either operand is a NaN. */
13969 if (real_isnan (c0) || real_isnan (c1))
13971 switch (code)
13973 case EQ_EXPR:
13974 case ORDERED_EXPR:
13975 result = 0;
13976 break;
13978 case NE_EXPR:
13979 case UNORDERED_EXPR:
13980 case UNLT_EXPR:
13981 case UNLE_EXPR:
13982 case UNGT_EXPR:
13983 case UNGE_EXPR:
13984 case UNEQ_EXPR:
13985 result = 1;
13986 break;
13988 case LT_EXPR:
13989 case LE_EXPR:
13990 case GT_EXPR:
13991 case GE_EXPR:
13992 case LTGT_EXPR:
13993 if (flag_trapping_math)
13994 return NULL_TREE;
13995 result = 0;
13996 break;
13998 default:
13999 gcc_unreachable ();
14002 return constant_boolean_node (result, type);
14005 return constant_boolean_node (real_compare (code, c0, c1), type);
14008 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14010 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14011 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14012 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14015 /* Handle equality/inequality of complex constants. */
14016 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14018 tree rcond = fold_relational_const (code, type,
14019 TREE_REALPART (op0),
14020 TREE_REALPART (op1));
14021 tree icond = fold_relational_const (code, type,
14022 TREE_IMAGPART (op0),
14023 TREE_IMAGPART (op1));
14024 if (code == EQ_EXPR)
14025 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14026 else if (code == NE_EXPR)
14027 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14028 else
14029 return NULL_TREE;
14032 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14034 if (!VECTOR_TYPE_P (type))
14036 /* Have vector comparison with scalar boolean result. */
14037 bool result = true;
14038 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14039 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14040 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14042 tree elem0 = VECTOR_CST_ELT (op0, i);
14043 tree elem1 = VECTOR_CST_ELT (op1, i);
14044 tree tmp = fold_relational_const (code, type, elem0, elem1);
14045 result &= integer_onep (tmp);
14047 if (code == NE_EXPR)
14048 result = !result;
14049 return constant_boolean_node (result, type);
14051 unsigned count = VECTOR_CST_NELTS (op0);
14052 tree *elts = XALLOCAVEC (tree, count);
14053 gcc_assert (VECTOR_CST_NELTS (op1) == count
14054 && TYPE_VECTOR_SUBPARTS (type) == count);
14056 for (unsigned i = 0; i < count; i++)
14058 tree elem_type = TREE_TYPE (type);
14059 tree elem0 = VECTOR_CST_ELT (op0, i);
14060 tree elem1 = VECTOR_CST_ELT (op1, i);
14062 tree tem = fold_relational_const (code, elem_type,
14063 elem0, elem1);
14065 if (tem == NULL_TREE)
14066 return NULL_TREE;
14068 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14071 return build_vector (type, elts);
14074 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14076 To compute GT, swap the arguments and do LT.
14077 To compute GE, do LT and invert the result.
14078 To compute LE, swap the arguments, do LT and invert the result.
14079 To compute NE, do EQ and invert the result.
14081 Therefore, the code below must handle only EQ and LT. */
14083 if (code == LE_EXPR || code == GT_EXPR)
14085 std::swap (op0, op1);
14086 code = swap_tree_comparison (code);
14089 /* Note that it is safe to invert for real values here because we
14090 have already handled the one case that it matters. */
14092 invert = 0;
14093 if (code == NE_EXPR || code == GE_EXPR)
14095 invert = 1;
14096 code = invert_tree_comparison (code, false);
14099 /* Compute a result for LT or EQ if args permit;
14100 Otherwise return T. */
14101 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14103 if (code == EQ_EXPR)
14104 result = tree_int_cst_equal (op0, op1);
14105 else
14106 result = tree_int_cst_lt (op0, op1);
14108 else
14109 return NULL_TREE;
14111 if (invert)
14112 result ^= 1;
14113 return constant_boolean_node (result, type);
14116 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14117 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14118 itself. */
14120 tree
14121 fold_build_cleanup_point_expr (tree type, tree expr)
14123 /* If the expression does not have side effects then we don't have to wrap
14124 it with a cleanup point expression. */
14125 if (!TREE_SIDE_EFFECTS (expr))
14126 return expr;
14128 /* If the expression is a return, check to see if the expression inside the
14129 return has no side effects or the right hand side of the modify expression
14130 inside the return. If either don't have side effects set we don't need to
14131 wrap the expression in a cleanup point expression. Note we don't check the
14132 left hand side of the modify because it should always be a return decl. */
14133 if (TREE_CODE (expr) == RETURN_EXPR)
14135 tree op = TREE_OPERAND (expr, 0);
14136 if (!op || !TREE_SIDE_EFFECTS (op))
14137 return expr;
14138 op = TREE_OPERAND (op, 1);
14139 if (!TREE_SIDE_EFFECTS (op))
14140 return expr;
14143 return build1 (CLEANUP_POINT_EXPR, type, expr);
14146 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14147 of an indirection through OP0, or NULL_TREE if no simplification is
14148 possible. */
14150 tree
14151 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14153 tree sub = op0;
14154 tree subtype;
14156 STRIP_NOPS (sub);
14157 subtype = TREE_TYPE (sub);
14158 if (!POINTER_TYPE_P (subtype))
14159 return NULL_TREE;
14161 if (TREE_CODE (sub) == ADDR_EXPR)
14163 tree op = TREE_OPERAND (sub, 0);
14164 tree optype = TREE_TYPE (op);
14165 /* *&CONST_DECL -> to the value of the const decl. */
14166 if (TREE_CODE (op) == CONST_DECL)
14167 return DECL_INITIAL (op);
14168 /* *&p => p; make sure to handle *&"str"[cst] here. */
14169 if (type == optype)
14171 tree fop = fold_read_from_constant_string (op);
14172 if (fop)
14173 return fop;
14174 else
14175 return op;
14177 /* *(foo *)&fooarray => fooarray[0] */
14178 else if (TREE_CODE (optype) == ARRAY_TYPE
14179 && type == TREE_TYPE (optype)
14180 && (!in_gimple_form
14181 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14183 tree type_domain = TYPE_DOMAIN (optype);
14184 tree min_val = size_zero_node;
14185 if (type_domain && TYPE_MIN_VALUE (type_domain))
14186 min_val = TYPE_MIN_VALUE (type_domain);
14187 if (in_gimple_form
14188 && TREE_CODE (min_val) != INTEGER_CST)
14189 return NULL_TREE;
14190 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14191 NULL_TREE, NULL_TREE);
14193 /* *(foo *)&complexfoo => __real__ complexfoo */
14194 else if (TREE_CODE (optype) == COMPLEX_TYPE
14195 && type == TREE_TYPE (optype))
14196 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14197 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14198 else if (TREE_CODE (optype) == VECTOR_TYPE
14199 && type == TREE_TYPE (optype))
14201 tree part_width = TYPE_SIZE (type);
14202 tree index = bitsize_int (0);
14203 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14207 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14208 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14210 tree op00 = TREE_OPERAND (sub, 0);
14211 tree op01 = TREE_OPERAND (sub, 1);
14213 STRIP_NOPS (op00);
14214 if (TREE_CODE (op00) == ADDR_EXPR)
14216 tree op00type;
14217 op00 = TREE_OPERAND (op00, 0);
14218 op00type = TREE_TYPE (op00);
14220 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14221 if (TREE_CODE (op00type) == VECTOR_TYPE
14222 && type == TREE_TYPE (op00type))
14224 tree part_width = TYPE_SIZE (type);
14225 unsigned HOST_WIDE_INT max_offset
14226 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14227 * TYPE_VECTOR_SUBPARTS (op00type));
14228 if (tree_int_cst_sign_bit (op01) == 0
14229 && compare_tree_int (op01, max_offset) == -1)
14231 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14232 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14233 tree index = bitsize_int (indexi);
14234 return fold_build3_loc (loc,
14235 BIT_FIELD_REF, type, op00,
14236 part_width, index);
14239 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14240 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14241 && type == TREE_TYPE (op00type))
14243 tree size = TYPE_SIZE_UNIT (type);
14244 if (tree_int_cst_equal (size, op01))
14245 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14247 /* ((foo *)&fooarray)[1] => fooarray[1] */
14248 else if (TREE_CODE (op00type) == ARRAY_TYPE
14249 && type == TREE_TYPE (op00type))
14251 tree type_domain = TYPE_DOMAIN (op00type);
14252 tree min_val = size_zero_node;
14253 if (type_domain && TYPE_MIN_VALUE (type_domain))
14254 min_val = TYPE_MIN_VALUE (type_domain);
14255 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14256 TYPE_SIZE_UNIT (type));
14257 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14258 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14259 NULL_TREE, NULL_TREE);
14264 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14265 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14266 && type == TREE_TYPE (TREE_TYPE (subtype))
14267 && (!in_gimple_form
14268 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14270 tree type_domain;
14271 tree min_val = size_zero_node;
14272 sub = build_fold_indirect_ref_loc (loc, sub);
14273 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14274 if (type_domain && TYPE_MIN_VALUE (type_domain))
14275 min_val = TYPE_MIN_VALUE (type_domain);
14276 if (in_gimple_form
14277 && TREE_CODE (min_val) != INTEGER_CST)
14278 return NULL_TREE;
14279 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14280 NULL_TREE);
14283 return NULL_TREE;
14286 /* Builds an expression for an indirection through T, simplifying some
14287 cases. */
14289 tree
14290 build_fold_indirect_ref_loc (location_t loc, tree t)
14292 tree type = TREE_TYPE (TREE_TYPE (t));
14293 tree sub = fold_indirect_ref_1 (loc, type, t);
14295 if (sub)
14296 return sub;
14298 return build1_loc (loc, INDIRECT_REF, type, t);
14301 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14303 tree
14304 fold_indirect_ref_loc (location_t loc, tree t)
14306 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14308 if (sub)
14309 return sub;
14310 else
14311 return t;
14314 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14315 whose result is ignored. The type of the returned tree need not be
14316 the same as the original expression. */
14318 tree
14319 fold_ignored_result (tree t)
14321 if (!TREE_SIDE_EFFECTS (t))
14322 return integer_zero_node;
14324 for (;;)
14325 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14327 case tcc_unary:
14328 t = TREE_OPERAND (t, 0);
14329 break;
14331 case tcc_binary:
14332 case tcc_comparison:
14333 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14334 t = TREE_OPERAND (t, 0);
14335 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14336 t = TREE_OPERAND (t, 1);
14337 else
14338 return t;
14339 break;
14341 case tcc_expression:
14342 switch (TREE_CODE (t))
14344 case COMPOUND_EXPR:
14345 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14346 return t;
14347 t = TREE_OPERAND (t, 0);
14348 break;
14350 case COND_EXPR:
14351 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14352 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14353 return t;
14354 t = TREE_OPERAND (t, 0);
14355 break;
14357 default:
14358 return t;
14360 break;
14362 default:
14363 return t;
14367 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14369 tree
14370 round_up_loc (location_t loc, tree value, unsigned int divisor)
14372 tree div = NULL_TREE;
14374 if (divisor == 1)
14375 return value;
14377 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14378 have to do anything. Only do this when we are not given a const,
14379 because in that case, this check is more expensive than just
14380 doing it. */
14381 if (TREE_CODE (value) != INTEGER_CST)
14383 div = build_int_cst (TREE_TYPE (value), divisor);
14385 if (multiple_of_p (TREE_TYPE (value), value, div))
14386 return value;
14389 /* If divisor is a power of two, simplify this to bit manipulation. */
14390 if (divisor == (divisor & -divisor))
14392 if (TREE_CODE (value) == INTEGER_CST)
14394 wide_int val = value;
14395 bool overflow_p;
14397 if ((val & (divisor - 1)) == 0)
14398 return value;
14400 overflow_p = TREE_OVERFLOW (value);
14401 val += divisor - 1;
14402 val &= - (int) divisor;
14403 if (val == 0)
14404 overflow_p = true;
14406 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14408 else
14410 tree t;
14412 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14413 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14414 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14415 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14418 else
14420 if (!div)
14421 div = build_int_cst (TREE_TYPE (value), divisor);
14422 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14423 value = size_binop_loc (loc, MULT_EXPR, value, div);
14426 return value;
14429 /* Likewise, but round down. */
14431 tree
14432 round_down_loc (location_t loc, tree value, int divisor)
14434 tree div = NULL_TREE;
14436 gcc_assert (divisor > 0);
14437 if (divisor == 1)
14438 return value;
14440 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14441 have to do anything. Only do this when we are not given a const,
14442 because in that case, this check is more expensive than just
14443 doing it. */
14444 if (TREE_CODE (value) != INTEGER_CST)
14446 div = build_int_cst (TREE_TYPE (value), divisor);
14448 if (multiple_of_p (TREE_TYPE (value), value, div))
14449 return value;
14452 /* If divisor is a power of two, simplify this to bit manipulation. */
14453 if (divisor == (divisor & -divisor))
14455 tree t;
14457 t = build_int_cst (TREE_TYPE (value), -divisor);
14458 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14460 else
14462 if (!div)
14463 div = build_int_cst (TREE_TYPE (value), divisor);
14464 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14465 value = size_binop_loc (loc, MULT_EXPR, value, div);
14468 return value;
14471 /* Returns the pointer to the base of the object addressed by EXP and
14472 extracts the information about the offset of the access, storing it
14473 to PBITPOS and POFFSET. */
14475 static tree
14476 split_address_to_core_and_offset (tree exp,
14477 HOST_WIDE_INT *pbitpos, tree *poffset)
14479 tree core;
14480 machine_mode mode;
14481 int unsignedp, reversep, volatilep;
14482 HOST_WIDE_INT bitsize;
14483 location_t loc = EXPR_LOCATION (exp);
14485 if (TREE_CODE (exp) == ADDR_EXPR)
14487 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14488 poffset, &mode, &unsignedp, &reversep,
14489 &volatilep, false);
14490 core = build_fold_addr_expr_loc (loc, core);
14492 else
14494 core = exp;
14495 *pbitpos = 0;
14496 *poffset = NULL_TREE;
14499 return core;
14502 /* Returns true if addresses of E1 and E2 differ by a constant, false
14503 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14505 bool
14506 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14508 tree core1, core2;
14509 HOST_WIDE_INT bitpos1, bitpos2;
14510 tree toffset1, toffset2, tdiff, type;
14512 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14513 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14515 if (bitpos1 % BITS_PER_UNIT != 0
14516 || bitpos2 % BITS_PER_UNIT != 0
14517 || !operand_equal_p (core1, core2, 0))
14518 return false;
14520 if (toffset1 && toffset2)
14522 type = TREE_TYPE (toffset1);
14523 if (type != TREE_TYPE (toffset2))
14524 toffset2 = fold_convert (type, toffset2);
14526 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14527 if (!cst_and_fits_in_hwi (tdiff))
14528 return false;
14530 *diff = int_cst_value (tdiff);
14532 else if (toffset1 || toffset2)
14534 /* If only one of the offsets is non-constant, the difference cannot
14535 be a constant. */
14536 return false;
14538 else
14539 *diff = 0;
14541 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14542 return true;
14545 /* Return OFF converted to a pointer offset type suitable as offset for
14546 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14547 tree
14548 convert_to_ptrofftype_loc (location_t loc, tree off)
14550 return fold_convert_loc (loc, sizetype, off);
14553 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14554 tree
14555 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14557 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14558 ptr, convert_to_ptrofftype_loc (loc, off));
14561 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14562 tree
14563 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14565 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14566 ptr, size_int (off));
14569 /* Return a char pointer for a C string if it is a string constant
14570 or sum of string constant and integer constant. */
14572 const char *
14573 c_getstr (tree src)
14575 tree offset_node;
14577 src = string_constant (src, &offset_node);
14578 if (src == 0)
14579 return 0;
14581 if (offset_node == 0)
14582 return TREE_STRING_POINTER (src);
14583 else if (!tree_fits_uhwi_p (offset_node)
14584 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14585 return 0;
14587 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);