* da.po, es.po: Update.
[official-gcc.git] / gcc / fold-const.c
blob736552c33a9af3876bfd01f3cf47a078dec6558b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree split_tree (location_t, tree, tree, enum tree_code,
112 tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static bool vec_cst_ctor_to_array (tree, tree *);
142 static tree fold_negate_expr (location_t, tree);
145 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
146 Otherwise, return LOC. */
148 static location_t
149 expr_location_or (tree t, location_t loc)
151 location_t tloc = EXPR_LOCATION (t);
152 return tloc == UNKNOWN_LOCATION ? loc : tloc;
155 /* Similar to protected_set_expr_location, but never modify x in place,
156 if location can and needs to be set, unshare it. */
158 static inline tree
159 protected_set_expr_location_unshare (tree x, location_t loc)
161 if (CAN_HAVE_LOCATION_P (x)
162 && EXPR_LOCATION (x) != loc
163 && !(TREE_CODE (x) == SAVE_EXPR
164 || TREE_CODE (x) == TARGET_EXPR
165 || TREE_CODE (x) == BIND_EXPR))
167 x = copy_node (x);
168 SET_EXPR_LOCATION (x, loc);
170 return x;
173 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
174 division and returns the quotient. Otherwise returns
175 NULL_TREE. */
177 tree
178 div_if_zero_remainder (const_tree arg1, const_tree arg2)
180 widest_int quo;
182 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
183 SIGNED, &quo))
184 return wide_int_to_tree (TREE_TYPE (arg1), quo);
186 return NULL_TREE;
189 /* This is nonzero if we should defer warnings about undefined
190 overflow. This facility exists because these warnings are a
191 special case. The code to estimate loop iterations does not want
192 to issue any warnings, since it works with expressions which do not
193 occur in user code. Various bits of cleanup code call fold(), but
194 only use the result if it has certain characteristics (e.g., is a
195 constant); that code only wants to issue a warning if the result is
196 used. */
198 static int fold_deferring_overflow_warnings;
200 /* If a warning about undefined overflow is deferred, this is the
201 warning. Note that this may cause us to turn two warnings into
202 one, but that is fine since it is sufficient to only give one
203 warning per expression. */
205 static const char* fold_deferred_overflow_warning;
207 /* If a warning about undefined overflow is deferred, this is the
208 level at which the warning should be emitted. */
210 static enum warn_strict_overflow_code fold_deferred_overflow_code;
212 /* Start deferring overflow warnings. We could use a stack here to
213 permit nested calls, but at present it is not necessary. */
215 void
216 fold_defer_overflow_warnings (void)
218 ++fold_deferring_overflow_warnings;
221 /* Stop deferring overflow warnings. If there is a pending warning,
222 and ISSUE is true, then issue the warning if appropriate. STMT is
223 the statement with which the warning should be associated (used for
224 location information); STMT may be NULL. CODE is the level of the
225 warning--a warn_strict_overflow_code value. This function will use
226 the smaller of CODE and the deferred code when deciding whether to
227 issue the warning. CODE may be zero to mean to always use the
228 deferred code. */
230 void
231 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
233 const char *warnmsg;
234 location_t locus;
236 gcc_assert (fold_deferring_overflow_warnings > 0);
237 --fold_deferring_overflow_warnings;
238 if (fold_deferring_overflow_warnings > 0)
240 if (fold_deferred_overflow_warning != NULL
241 && code != 0
242 && code < (int) fold_deferred_overflow_code)
243 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
244 return;
247 warnmsg = fold_deferred_overflow_warning;
248 fold_deferred_overflow_warning = NULL;
250 if (!issue || warnmsg == NULL)
251 return;
253 if (gimple_no_warning_p (stmt))
254 return;
256 /* Use the smallest code level when deciding to issue the
257 warning. */
258 if (code == 0 || code > (int) fold_deferred_overflow_code)
259 code = fold_deferred_overflow_code;
261 if (!issue_strict_overflow_warning (code))
262 return;
264 if (stmt == NULL)
265 locus = input_location;
266 else
267 locus = gimple_location (stmt);
268 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 /* Stop deferring overflow warnings, ignoring any deferred
272 warnings. */
274 void
275 fold_undefer_and_ignore_overflow_warnings (void)
277 fold_undefer_overflow_warnings (false, NULL, 0);
280 /* Whether we are deferring overflow warnings. */
282 bool
283 fold_deferring_overflow_warnings_p (void)
285 return fold_deferring_overflow_warnings > 0;
288 /* This is called when we fold something based on the fact that signed
289 overflow is undefined. */
291 void
292 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
294 if (fold_deferring_overflow_warnings > 0)
296 if (fold_deferred_overflow_warning == NULL
297 || wc < fold_deferred_overflow_code)
299 fold_deferred_overflow_warning = gmsgid;
300 fold_deferred_overflow_code = wc;
303 else if (issue_strict_overflow_warning (wc))
304 warning (OPT_Wstrict_overflow, gmsgid);
307 /* Return true if the built-in mathematical function specified by CODE
308 is odd, i.e. -f(x) == f(-x). */
310 bool
311 negate_mathfn_p (combined_fn fn)
313 switch (fn)
315 CASE_CFN_ASIN:
316 CASE_CFN_ASINH:
317 CASE_CFN_ATAN:
318 CASE_CFN_ATANH:
319 CASE_CFN_CASIN:
320 CASE_CFN_CASINH:
321 CASE_CFN_CATAN:
322 CASE_CFN_CATANH:
323 CASE_CFN_CBRT:
324 CASE_CFN_CPROJ:
325 CASE_CFN_CSIN:
326 CASE_CFN_CSINH:
327 CASE_CFN_CTAN:
328 CASE_CFN_CTANH:
329 CASE_CFN_ERF:
330 CASE_CFN_LLROUND:
331 CASE_CFN_LROUND:
332 CASE_CFN_ROUND:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (t);
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 int count = TYPE_VECTOR_SUBPARTS (type), i;
417 for (i = 0; i < count; i++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
459 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
461 break;
463 /* Fall through. */
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
496 break;
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
510 return true;
512 break;
514 default:
515 break;
517 return false;
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
528 tree type = TREE_TYPE (t);
529 tree tem;
531 switch (TREE_CODE (t))
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 return tem;
554 case FIXED_CST:
555 tem = fold_negate_const (t, type);
556 return tem;
558 case COMPLEX_CST:
560 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
561 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
562 if (rpart && ipart)
563 return build_complex (type, rpart, ipart);
565 break;
567 case VECTOR_CST:
569 int count = TYPE_VECTOR_SUBPARTS (type), i;
570 tree *elts = XALLOCAVEC (tree, count);
572 for (i = 0; i < count; i++)
574 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
575 if (elts[i] == NULL_TREE)
576 return NULL_TREE;
579 return build_vector (type, elts);
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
620 break;
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
634 /* Fall through. */
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
648 break;
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
680 break;
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
687 tree fndecl, arg;
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
693 break;
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
710 break;
712 default:
713 break;
716 return NULL_TREE;
719 /* A wrapper for fold_negate_expr_1. */
721 static tree
722 fold_negate_expr (location_t loc, tree t)
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
778 static tree
779 split_tree (location_t loc, tree in, tree type, enum tree_code code,
780 tree *conp, tree *litp, tree *minus_litp, int negate_p)
782 tree var = 0;
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
802 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
803 || (code == MINUS_EXPR
804 && (TREE_CODE (in) == PLUS_EXPR
805 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p && *conp)
839 /* Convert to TYPE before negating. */
840 *conp = fold_convert_loc (loc, type, *conp);
841 *conp = negate_expr (*conp);
843 if (neg_var_p && var)
845 /* Convert to TYPE before negating. */
846 var = fold_convert_loc (loc, type, var);
847 var = negate_expr (var);
850 else if (TREE_CONSTANT (in))
851 *conp = in;
852 else if (TREE_CODE (in) == BIT_NOT_EXPR
853 && code == PLUS_EXPR)
855 /* -X - 1 is folded to ~X, undo that here. Do _not_ do this
856 when IN is constant. */
857 *minus_litp = build_one_cst (TREE_TYPE (in));
858 var = negate_expr (TREE_OPERAND (in, 0));
860 else
861 var = in;
863 if (negate_p)
865 if (*litp)
866 *minus_litp = *litp, *litp = 0;
867 else if (*minus_litp)
868 *litp = *minus_litp, *minus_litp = 0;
869 if (*conp)
871 /* Convert to TYPE before negating. */
872 *conp = fold_convert_loc (loc, type, *conp);
873 *conp = negate_expr (*conp);
875 if (var)
877 /* Convert to TYPE before negating. */
878 var = fold_convert_loc (loc, type, var);
879 var = negate_expr (var);
883 return var;
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
891 static tree
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
894 if (t1 == 0)
895 return t2;
896 else if (t2 == 0)
897 return t1;
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
945 switch (code)
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
953 default:
954 break;
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
977 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1138 if (code == POINTER_PLUS_EXPR)
1139 return int_const_binop (PLUS_EXPR,
1140 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1142 return int_const_binop (code, arg1, arg2);
1145 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1147 machine_mode mode;
1148 REAL_VALUE_TYPE d1;
1149 REAL_VALUE_TYPE d2;
1150 REAL_VALUE_TYPE value;
1151 REAL_VALUE_TYPE result;
1152 bool inexact;
1153 tree t, type;
1155 /* The following codes are handled by real_arithmetic. */
1156 switch (code)
1158 case PLUS_EXPR:
1159 case MINUS_EXPR:
1160 case MULT_EXPR:
1161 case RDIV_EXPR:
1162 case MIN_EXPR:
1163 case MAX_EXPR:
1164 break;
1166 default:
1167 return NULL_TREE;
1170 d1 = TREE_REAL_CST (arg1);
1171 d2 = TREE_REAL_CST (arg2);
1173 type = TREE_TYPE (arg1);
1174 mode = TYPE_MODE (type);
1176 /* Don't perform operation if we honor signaling NaNs and
1177 either operand is a signaling NaN. */
1178 if (HONOR_SNANS (mode)
1179 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1180 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1181 return NULL_TREE;
1183 /* Don't perform operation if it would raise a division
1184 by zero exception. */
1185 if (code == RDIV_EXPR
1186 && real_equal (&d2, &dconst0)
1187 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1188 return NULL_TREE;
1190 /* If either operand is a NaN, just return it. Otherwise, set up
1191 for floating-point trap; we return an overflow. */
1192 if (REAL_VALUE_ISNAN (d1))
1194 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1195 is off. */
1196 d1.signalling = 0;
1197 t = build_real (type, d1);
1198 return t;
1200 else if (REAL_VALUE_ISNAN (d2))
1202 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1203 is off. */
1204 d2.signalling = 0;
1205 t = build_real (type, d2);
1206 return t;
1209 inexact = real_arithmetic (&value, code, &d1, &d2);
1210 real_convert (&result, mode, &value);
1212 /* Don't constant fold this floating point operation if
1213 the result has overflowed and flag_trapping_math. */
1214 if (flag_trapping_math
1215 && MODE_HAS_INFINITIES (mode)
1216 && REAL_VALUE_ISINF (result)
1217 && !REAL_VALUE_ISINF (d1)
1218 && !REAL_VALUE_ISINF (d2))
1219 return NULL_TREE;
1221 /* Don't constant fold this floating point operation if the
1222 result may dependent upon the run-time rounding mode and
1223 flag_rounding_math is set, or if GCC's software emulation
1224 is unable to accurately represent the result. */
1225 if ((flag_rounding_math
1226 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1227 && (inexact || !real_identical (&result, &value)))
1228 return NULL_TREE;
1230 t = build_real (type, result);
1232 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1233 return t;
1236 if (TREE_CODE (arg1) == FIXED_CST)
1238 FIXED_VALUE_TYPE f1;
1239 FIXED_VALUE_TYPE f2;
1240 FIXED_VALUE_TYPE result;
1241 tree t, type;
1242 int sat_p;
1243 bool overflow_p;
1245 /* The following codes are handled by fixed_arithmetic. */
1246 switch (code)
1248 case PLUS_EXPR:
1249 case MINUS_EXPR:
1250 case MULT_EXPR:
1251 case TRUNC_DIV_EXPR:
1252 if (TREE_CODE (arg2) != FIXED_CST)
1253 return NULL_TREE;
1254 f2 = TREE_FIXED_CST (arg2);
1255 break;
1257 case LSHIFT_EXPR:
1258 case RSHIFT_EXPR:
1260 if (TREE_CODE (arg2) != INTEGER_CST)
1261 return NULL_TREE;
1262 wide_int w2 = arg2;
1263 f2.data.high = w2.elt (1);
1264 f2.data.low = w2.ulow ();
1265 f2.mode = SImode;
1267 break;
1269 default:
1270 return NULL_TREE;
1273 f1 = TREE_FIXED_CST (arg1);
1274 type = TREE_TYPE (arg1);
1275 sat_p = TYPE_SATURATING (type);
1276 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1277 t = build_fixed (type, result);
1278 /* Propagate overflow flags. */
1279 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1280 TREE_OVERFLOW (t) = 1;
1281 return t;
1284 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1286 tree type = TREE_TYPE (arg1);
1287 tree r1 = TREE_REALPART (arg1);
1288 tree i1 = TREE_IMAGPART (arg1);
1289 tree r2 = TREE_REALPART (arg2);
1290 tree i2 = TREE_IMAGPART (arg2);
1291 tree real, imag;
1293 switch (code)
1295 case PLUS_EXPR:
1296 case MINUS_EXPR:
1297 real = const_binop (code, r1, r2);
1298 imag = const_binop (code, i1, i2);
1299 break;
1301 case MULT_EXPR:
1302 if (COMPLEX_FLOAT_TYPE_P (type))
1303 return do_mpc_arg2 (arg1, arg2, type,
1304 /* do_nonfinite= */ folding_initializer,
1305 mpc_mul);
1307 real = const_binop (MINUS_EXPR,
1308 const_binop (MULT_EXPR, r1, r2),
1309 const_binop (MULT_EXPR, i1, i2));
1310 imag = const_binop (PLUS_EXPR,
1311 const_binop (MULT_EXPR, r1, i2),
1312 const_binop (MULT_EXPR, i1, r2));
1313 break;
1315 case RDIV_EXPR:
1316 if (COMPLEX_FLOAT_TYPE_P (type))
1317 return do_mpc_arg2 (arg1, arg2, type,
1318 /* do_nonfinite= */ folding_initializer,
1319 mpc_div);
1320 /* Fallthru. */
1321 case TRUNC_DIV_EXPR:
1322 case CEIL_DIV_EXPR:
1323 case FLOOR_DIV_EXPR:
1324 case ROUND_DIV_EXPR:
1325 if (flag_complex_method == 0)
1327 /* Keep this algorithm in sync with
1328 tree-complex.c:expand_complex_div_straight().
1330 Expand complex division to scalars, straightforward algorithm.
1331 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1332 t = br*br + bi*bi
1334 tree magsquared
1335 = const_binop (PLUS_EXPR,
1336 const_binop (MULT_EXPR, r2, r2),
1337 const_binop (MULT_EXPR, i2, i2));
1338 tree t1
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r1, r2),
1341 const_binop (MULT_EXPR, i1, i2));
1342 tree t2
1343 = const_binop (MINUS_EXPR,
1344 const_binop (MULT_EXPR, i1, r2),
1345 const_binop (MULT_EXPR, r1, i2));
1347 real = const_binop (code, t1, magsquared);
1348 imag = const_binop (code, t2, magsquared);
1350 else
1352 /* Keep this algorithm in sync with
1353 tree-complex.c:expand_complex_div_wide().
1355 Expand complex division to scalars, modified algorithm to minimize
1356 overflow with wide input ranges. */
1357 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1358 fold_abs_const (r2, TREE_TYPE (type)),
1359 fold_abs_const (i2, TREE_TYPE (type)));
1361 if (integer_nonzerop (compare))
1363 /* In the TRUE branch, we compute
1364 ratio = br/bi;
1365 div = (br * ratio) + bi;
1366 tr = (ar * ratio) + ai;
1367 ti = (ai * ratio) - ar;
1368 tr = tr / div;
1369 ti = ti / div; */
1370 tree ratio = const_binop (code, r2, i2);
1371 tree div = const_binop (PLUS_EXPR, i2,
1372 const_binop (MULT_EXPR, r2, ratio));
1373 real = const_binop (MULT_EXPR, r1, ratio);
1374 real = const_binop (PLUS_EXPR, real, i1);
1375 real = const_binop (code, real, div);
1377 imag = const_binop (MULT_EXPR, i1, ratio);
1378 imag = const_binop (MINUS_EXPR, imag, r1);
1379 imag = const_binop (code, imag, div);
1381 else
1383 /* In the FALSE branch, we compute
1384 ratio = d/c;
1385 divisor = (d * ratio) + c;
1386 tr = (b * ratio) + a;
1387 ti = b - (a * ratio);
1388 tr = tr / div;
1389 ti = ti / div; */
1390 tree ratio = const_binop (code, i2, r2);
1391 tree div = const_binop (PLUS_EXPR, r2,
1392 const_binop (MULT_EXPR, i2, ratio));
1394 real = const_binop (MULT_EXPR, i1, ratio);
1395 real = const_binop (PLUS_EXPR, real, r1);
1396 real = const_binop (code, real, div);
1398 imag = const_binop (MULT_EXPR, r1, ratio);
1399 imag = const_binop (MINUS_EXPR, i1, imag);
1400 imag = const_binop (code, imag, div);
1403 break;
1405 default:
1406 return NULL_TREE;
1409 if (real && imag)
1410 return build_complex (type, real, imag);
1413 if (TREE_CODE (arg1) == VECTOR_CST
1414 && TREE_CODE (arg2) == VECTOR_CST)
1416 tree type = TREE_TYPE (arg1);
1417 int count = TYPE_VECTOR_SUBPARTS (type), i;
1418 tree *elts = XALLOCAVEC (tree, count);
1420 for (i = 0; i < count; i++)
1422 tree elem1 = VECTOR_CST_ELT (arg1, i);
1423 tree elem2 = VECTOR_CST_ELT (arg2, i);
1425 elts[i] = const_binop (code, elem1, elem2);
1427 /* It is possible that const_binop cannot handle the given
1428 code and return NULL_TREE */
1429 if (elts[i] == NULL_TREE)
1430 return NULL_TREE;
1433 return build_vector (type, elts);
1436 /* Shifts allow a scalar offset for a vector. */
1437 if (TREE_CODE (arg1) == VECTOR_CST
1438 && TREE_CODE (arg2) == INTEGER_CST)
1440 tree type = TREE_TYPE (arg1);
1441 int count = TYPE_VECTOR_SUBPARTS (type), i;
1442 tree *elts = XALLOCAVEC (tree, count);
1444 for (i = 0; i < count; i++)
1446 tree elem1 = VECTOR_CST_ELT (arg1, i);
1448 elts[i] = const_binop (code, elem1, arg2);
1450 /* It is possible that const_binop cannot handle the given
1451 code and return NULL_TREE. */
1452 if (elts[i] == NULL_TREE)
1453 return NULL_TREE;
1456 return build_vector (type, elts);
1458 return NULL_TREE;
1461 /* Overload that adds a TYPE parameter to be able to dispatch
1462 to fold_relational_const. */
1464 tree
1465 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1467 if (TREE_CODE_CLASS (code) == tcc_comparison)
1468 return fold_relational_const (code, type, arg1, arg2);
1470 /* ??? Until we make the const_binop worker take the type of the
1471 result as argument put those cases that need it here. */
1472 switch (code)
1474 case COMPLEX_EXPR:
1475 if ((TREE_CODE (arg1) == REAL_CST
1476 && TREE_CODE (arg2) == REAL_CST)
1477 || (TREE_CODE (arg1) == INTEGER_CST
1478 && TREE_CODE (arg2) == INTEGER_CST))
1479 return build_complex (type, arg1, arg2);
1480 return NULL_TREE;
1482 case VEC_PACK_TRUNC_EXPR:
1483 case VEC_PACK_FIX_TRUNC_EXPR:
1485 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1486 tree *elts;
1488 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1489 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1490 if (TREE_CODE (arg1) != VECTOR_CST
1491 || TREE_CODE (arg2) != VECTOR_CST)
1492 return NULL_TREE;
1494 elts = XALLOCAVEC (tree, nelts);
1495 if (!vec_cst_ctor_to_array (arg1, elts)
1496 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1497 return NULL_TREE;
1499 for (i = 0; i < nelts; i++)
1501 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1502 ? NOP_EXPR : FIX_TRUNC_EXPR,
1503 TREE_TYPE (type), elts[i]);
1504 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1505 return NULL_TREE;
1508 return build_vector (type, elts);
1511 case VEC_WIDEN_MULT_LO_EXPR:
1512 case VEC_WIDEN_MULT_HI_EXPR:
1513 case VEC_WIDEN_MULT_EVEN_EXPR:
1514 case VEC_WIDEN_MULT_ODD_EXPR:
1516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1517 unsigned int out, ofs, scale;
1518 tree *elts;
1520 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1521 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1522 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1523 return NULL_TREE;
1525 elts = XALLOCAVEC (tree, nelts * 4);
1526 if (!vec_cst_ctor_to_array (arg1, elts)
1527 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1528 return NULL_TREE;
1530 if (code == VEC_WIDEN_MULT_LO_EXPR)
1531 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1532 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1533 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1534 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1535 scale = 1, ofs = 0;
1536 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1537 scale = 1, ofs = 1;
1539 for (out = 0; out < nelts; out++)
1541 unsigned int in1 = (out << scale) + ofs;
1542 unsigned int in2 = in1 + nelts * 2;
1543 tree t1, t2;
1545 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1546 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1548 if (t1 == NULL_TREE || t2 == NULL_TREE)
1549 return NULL_TREE;
1550 elts[out] = const_binop (MULT_EXPR, t1, t2);
1551 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1552 return NULL_TREE;
1555 return build_vector (type, elts);
1558 default:;
1561 if (TREE_CODE_CLASS (code) != tcc_binary)
1562 return NULL_TREE;
1564 /* Make sure type and arg0 have the same saturating flag. */
1565 gcc_checking_assert (TYPE_SATURATING (type)
1566 == TYPE_SATURATING (TREE_TYPE (arg1)));
1568 return const_binop (code, arg1, arg2);
1571 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1572 Return zero if computing the constants is not possible. */
1574 tree
1575 const_unop (enum tree_code code, tree type, tree arg0)
1577 /* Don't perform the operation, other than NEGATE and ABS, if
1578 flag_signaling_nans is on and the operand is a signaling NaN. */
1579 if (TREE_CODE (arg0) == REAL_CST
1580 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1581 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1582 && code != NEGATE_EXPR
1583 && code != ABS_EXPR)
1584 return NULL_TREE;
1586 switch (code)
1588 CASE_CONVERT:
1589 case FLOAT_EXPR:
1590 case FIX_TRUNC_EXPR:
1591 case FIXED_CONVERT_EXPR:
1592 return fold_convert_const (code, type, arg0);
1594 case ADDR_SPACE_CONVERT_EXPR:
1595 /* If the source address is 0, and the source address space
1596 cannot have a valid object at 0, fold to dest type null. */
1597 if (integer_zerop (arg0)
1598 && !(targetm.addr_space.zero_address_valid
1599 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1600 return fold_convert_const (code, type, arg0);
1601 break;
1603 case VIEW_CONVERT_EXPR:
1604 return fold_view_convert_expr (type, arg0);
1606 case NEGATE_EXPR:
1608 /* Can't call fold_negate_const directly here as that doesn't
1609 handle all cases and we might not be able to negate some
1610 constants. */
1611 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1612 if (tem && CONSTANT_CLASS_P (tem))
1613 return tem;
1614 break;
1617 case ABS_EXPR:
1618 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1619 return fold_abs_const (arg0, type);
1620 break;
1622 case CONJ_EXPR:
1623 if (TREE_CODE (arg0) == COMPLEX_CST)
1625 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1626 TREE_TYPE (type));
1627 return build_complex (type, TREE_REALPART (arg0), ipart);
1629 break;
1631 case BIT_NOT_EXPR:
1632 if (TREE_CODE (arg0) == INTEGER_CST)
1633 return fold_not_const (arg0, type);
1634 /* Perform BIT_NOT_EXPR on each element individually. */
1635 else if (TREE_CODE (arg0) == VECTOR_CST)
1637 tree *elements;
1638 tree elem;
1639 unsigned count = VECTOR_CST_NELTS (arg0), i;
1641 elements = XALLOCAVEC (tree, count);
1642 for (i = 0; i < count; i++)
1644 elem = VECTOR_CST_ELT (arg0, i);
1645 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1646 if (elem == NULL_TREE)
1647 break;
1648 elements[i] = elem;
1650 if (i == count)
1651 return build_vector (type, elements);
1653 break;
1655 case TRUTH_NOT_EXPR:
1656 if (TREE_CODE (arg0) == INTEGER_CST)
1657 return constant_boolean_node (integer_zerop (arg0), type);
1658 break;
1660 case REALPART_EXPR:
1661 if (TREE_CODE (arg0) == COMPLEX_CST)
1662 return fold_convert (type, TREE_REALPART (arg0));
1663 break;
1665 case IMAGPART_EXPR:
1666 if (TREE_CODE (arg0) == COMPLEX_CST)
1667 return fold_convert (type, TREE_IMAGPART (arg0));
1668 break;
1670 case VEC_UNPACK_LO_EXPR:
1671 case VEC_UNPACK_HI_EXPR:
1672 case VEC_UNPACK_FLOAT_LO_EXPR:
1673 case VEC_UNPACK_FLOAT_HI_EXPR:
1675 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1676 tree *elts;
1677 enum tree_code subcode;
1679 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1680 if (TREE_CODE (arg0) != VECTOR_CST)
1681 return NULL_TREE;
1683 elts = XALLOCAVEC (tree, nelts * 2);
1684 if (!vec_cst_ctor_to_array (arg0, elts))
1685 return NULL_TREE;
1687 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1688 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1689 elts += nelts;
1691 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1692 subcode = NOP_EXPR;
1693 else
1694 subcode = FLOAT_EXPR;
1696 for (i = 0; i < nelts; i++)
1698 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1699 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1700 return NULL_TREE;
1703 return build_vector (type, elts);
1706 case REDUC_MIN_EXPR:
1707 case REDUC_MAX_EXPR:
1708 case REDUC_PLUS_EXPR:
1710 unsigned int nelts, i;
1711 tree *elts;
1712 enum tree_code subcode;
1714 if (TREE_CODE (arg0) != VECTOR_CST)
1715 return NULL_TREE;
1716 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1718 elts = XALLOCAVEC (tree, nelts);
1719 if (!vec_cst_ctor_to_array (arg0, elts))
1720 return NULL_TREE;
1722 switch (code)
1724 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1725 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1726 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1727 default: gcc_unreachable ();
1730 for (i = 1; i < nelts; i++)
1732 elts[0] = const_binop (subcode, elts[0], elts[i]);
1733 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1734 return NULL_TREE;
1737 return elts[0];
1740 default:
1741 break;
1744 return NULL_TREE;
1747 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1748 indicates which particular sizetype to create. */
1750 tree
1751 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1753 return build_int_cst (sizetype_tab[(int) kind], number);
1756 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1757 is a tree code. The type of the result is taken from the operands.
1758 Both must be equivalent integer types, ala int_binop_types_match_p.
1759 If the operands are constant, so is the result. */
1761 tree
1762 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1764 tree type = TREE_TYPE (arg0);
1766 if (arg0 == error_mark_node || arg1 == error_mark_node)
1767 return error_mark_node;
1769 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1770 TREE_TYPE (arg1)));
1772 /* Handle the special case of two integer constants faster. */
1773 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1775 /* And some specific cases even faster than that. */
1776 if (code == PLUS_EXPR)
1778 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1779 return arg1;
1780 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1781 return arg0;
1783 else if (code == MINUS_EXPR)
1785 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1786 return arg0;
1788 else if (code == MULT_EXPR)
1790 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1791 return arg1;
1794 /* Handle general case of two integer constants. For sizetype
1795 constant calculations we always want to know about overflow,
1796 even in the unsigned case. */
1797 return int_const_binop_1 (code, arg0, arg1, -1);
1800 return fold_build2_loc (loc, code, type, arg0, arg1);
1803 /* Given two values, either both of sizetype or both of bitsizetype,
1804 compute the difference between the two values. Return the value
1805 in signed type corresponding to the type of the operands. */
1807 tree
1808 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1810 tree type = TREE_TYPE (arg0);
1811 tree ctype;
1813 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1814 TREE_TYPE (arg1)));
1816 /* If the type is already signed, just do the simple thing. */
1817 if (!TYPE_UNSIGNED (type))
1818 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1820 if (type == sizetype)
1821 ctype = ssizetype;
1822 else if (type == bitsizetype)
1823 ctype = sbitsizetype;
1824 else
1825 ctype = signed_type_for (type);
1827 /* If either operand is not a constant, do the conversions to the signed
1828 type and subtract. The hardware will do the right thing with any
1829 overflow in the subtraction. */
1830 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1831 return size_binop_loc (loc, MINUS_EXPR,
1832 fold_convert_loc (loc, ctype, arg0),
1833 fold_convert_loc (loc, ctype, arg1));
1835 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1836 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1837 overflow) and negate (which can't either). Special-case a result
1838 of zero while we're here. */
1839 if (tree_int_cst_equal (arg0, arg1))
1840 return build_int_cst (ctype, 0);
1841 else if (tree_int_cst_lt (arg1, arg0))
1842 return fold_convert_loc (loc, ctype,
1843 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1844 else
1845 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1846 fold_convert_loc (loc, ctype,
1847 size_binop_loc (loc,
1848 MINUS_EXPR,
1849 arg1, arg0)));
1852 /* A subroutine of fold_convert_const handling conversions of an
1853 INTEGER_CST to another integer type. */
1855 static tree
1856 fold_convert_const_int_from_int (tree type, const_tree arg1)
1858 /* Given an integer constant, make new constant with new type,
1859 appropriately sign-extended or truncated. Use widest_int
1860 so that any extension is done according ARG1's type. */
1861 return force_fit_type (type, wi::to_widest (arg1),
1862 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1863 TREE_OVERFLOW (arg1));
1866 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1867 to an integer type. */
1869 static tree
1870 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1872 bool overflow = false;
1873 tree t;
1875 /* The following code implements the floating point to integer
1876 conversion rules required by the Java Language Specification,
1877 that IEEE NaNs are mapped to zero and values that overflow
1878 the target precision saturate, i.e. values greater than
1879 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1880 are mapped to INT_MIN. These semantics are allowed by the
1881 C and C++ standards that simply state that the behavior of
1882 FP-to-integer conversion is unspecified upon overflow. */
1884 wide_int val;
1885 REAL_VALUE_TYPE r;
1886 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1888 switch (code)
1890 case FIX_TRUNC_EXPR:
1891 real_trunc (&r, VOIDmode, &x);
1892 break;
1894 default:
1895 gcc_unreachable ();
1898 /* If R is NaN, return zero and show we have an overflow. */
1899 if (REAL_VALUE_ISNAN (r))
1901 overflow = true;
1902 val = wi::zero (TYPE_PRECISION (type));
1905 /* See if R is less than the lower bound or greater than the
1906 upper bound. */
1908 if (! overflow)
1910 tree lt = TYPE_MIN_VALUE (type);
1911 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1912 if (real_less (&r, &l))
1914 overflow = true;
1915 val = lt;
1919 if (! overflow)
1921 tree ut = TYPE_MAX_VALUE (type);
1922 if (ut)
1924 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1925 if (real_less (&u, &r))
1927 overflow = true;
1928 val = ut;
1933 if (! overflow)
1934 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1936 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1937 return t;
1940 /* A subroutine of fold_convert_const handling conversions of a
1941 FIXED_CST to an integer type. */
1943 static tree
1944 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1946 tree t;
1947 double_int temp, temp_trunc;
1948 unsigned int mode;
1950 /* Right shift FIXED_CST to temp by fbit. */
1951 temp = TREE_FIXED_CST (arg1).data;
1952 mode = TREE_FIXED_CST (arg1).mode;
1953 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1955 temp = temp.rshift (GET_MODE_FBIT (mode),
1956 HOST_BITS_PER_DOUBLE_INT,
1957 SIGNED_FIXED_POINT_MODE_P (mode));
1959 /* Left shift temp to temp_trunc by fbit. */
1960 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1961 HOST_BITS_PER_DOUBLE_INT,
1962 SIGNED_FIXED_POINT_MODE_P (mode));
1964 else
1966 temp = double_int_zero;
1967 temp_trunc = double_int_zero;
1970 /* If FIXED_CST is negative, we need to round the value toward 0.
1971 By checking if the fractional bits are not zero to add 1 to temp. */
1972 if (SIGNED_FIXED_POINT_MODE_P (mode)
1973 && temp_trunc.is_negative ()
1974 && TREE_FIXED_CST (arg1).data != temp_trunc)
1975 temp += double_int_one;
1977 /* Given a fixed-point constant, make new constant with new type,
1978 appropriately sign-extended or truncated. */
1979 t = force_fit_type (type, temp, -1,
1980 (temp.is_negative ()
1981 && (TYPE_UNSIGNED (type)
1982 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1983 | TREE_OVERFLOW (arg1));
1985 return t;
1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1989 to another floating point type. */
1991 static tree
1992 fold_convert_const_real_from_real (tree type, const_tree arg1)
1994 REAL_VALUE_TYPE value;
1995 tree t;
1997 /* Don't perform the operation if flag_signaling_nans is on
1998 and the operand is a signaling NaN. */
1999 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2000 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2001 return NULL_TREE;
2003 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2004 t = build_real (type, value);
2006 /* If converting an infinity or NAN to a representation that doesn't
2007 have one, set the overflow bit so that we can produce some kind of
2008 error message at the appropriate point if necessary. It's not the
2009 most user-friendly message, but it's better than nothing. */
2010 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2011 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2012 TREE_OVERFLOW (t) = 1;
2013 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2014 && !MODE_HAS_NANS (TYPE_MODE (type)))
2015 TREE_OVERFLOW (t) = 1;
2016 /* Regular overflow, conversion produced an infinity in a mode that
2017 can't represent them. */
2018 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2019 && REAL_VALUE_ISINF (value)
2020 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2021 TREE_OVERFLOW (t) = 1;
2022 else
2023 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2024 return t;
2027 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2028 to a floating point type. */
2030 static tree
2031 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2033 REAL_VALUE_TYPE value;
2034 tree t;
2036 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2037 t = build_real (type, value);
2039 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2040 return t;
2043 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2044 to another fixed-point type. */
2046 static tree
2047 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2049 FIXED_VALUE_TYPE value;
2050 tree t;
2051 bool overflow_p;
2053 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2054 TYPE_SATURATING (type));
2055 t = build_fixed (type, value);
2057 /* Propagate overflow flags. */
2058 if (overflow_p | TREE_OVERFLOW (arg1))
2059 TREE_OVERFLOW (t) = 1;
2060 return t;
2063 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2064 to a fixed-point type. */
2066 static tree
2067 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2069 FIXED_VALUE_TYPE value;
2070 tree t;
2071 bool overflow_p;
2072 double_int di;
2074 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2076 di.low = TREE_INT_CST_ELT (arg1, 0);
2077 if (TREE_INT_CST_NUNITS (arg1) == 1)
2078 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2079 else
2080 di.high = TREE_INT_CST_ELT (arg1, 1);
2082 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2083 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2084 TYPE_SATURATING (type));
2085 t = build_fixed (type, value);
2087 /* Propagate overflow flags. */
2088 if (overflow_p | TREE_OVERFLOW (arg1))
2089 TREE_OVERFLOW (t) = 1;
2090 return t;
2093 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2094 to a fixed-point type. */
2096 static tree
2097 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2099 FIXED_VALUE_TYPE value;
2100 tree t;
2101 bool overflow_p;
2103 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2104 &TREE_REAL_CST (arg1),
2105 TYPE_SATURATING (type));
2106 t = build_fixed (type, value);
2108 /* Propagate overflow flags. */
2109 if (overflow_p | TREE_OVERFLOW (arg1))
2110 TREE_OVERFLOW (t) = 1;
2111 return t;
2114 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2115 type TYPE. If no simplification can be done return NULL_TREE. */
2117 static tree
2118 fold_convert_const (enum tree_code code, tree type, tree arg1)
2120 if (TREE_TYPE (arg1) == type)
2121 return arg1;
2123 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2124 || TREE_CODE (type) == OFFSET_TYPE)
2126 if (TREE_CODE (arg1) == INTEGER_CST)
2127 return fold_convert_const_int_from_int (type, arg1);
2128 else if (TREE_CODE (arg1) == REAL_CST)
2129 return fold_convert_const_int_from_real (code, type, arg1);
2130 else if (TREE_CODE (arg1) == FIXED_CST)
2131 return fold_convert_const_int_from_fixed (type, arg1);
2133 else if (TREE_CODE (type) == REAL_TYPE)
2135 if (TREE_CODE (arg1) == INTEGER_CST)
2136 return build_real_from_int_cst (type, arg1);
2137 else if (TREE_CODE (arg1) == REAL_CST)
2138 return fold_convert_const_real_from_real (type, arg1);
2139 else if (TREE_CODE (arg1) == FIXED_CST)
2140 return fold_convert_const_real_from_fixed (type, arg1);
2142 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2144 if (TREE_CODE (arg1) == FIXED_CST)
2145 return fold_convert_const_fixed_from_fixed (type, arg1);
2146 else if (TREE_CODE (arg1) == INTEGER_CST)
2147 return fold_convert_const_fixed_from_int (type, arg1);
2148 else if (TREE_CODE (arg1) == REAL_CST)
2149 return fold_convert_const_fixed_from_real (type, arg1);
2151 else if (TREE_CODE (type) == VECTOR_TYPE)
2153 if (TREE_CODE (arg1) == VECTOR_CST
2154 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2156 int len = TYPE_VECTOR_SUBPARTS (type);
2157 tree elttype = TREE_TYPE (type);
2158 tree *v = XALLOCAVEC (tree, len);
2159 for (int i = 0; i < len; ++i)
2161 tree elt = VECTOR_CST_ELT (arg1, i);
2162 tree cvt = fold_convert_const (code, elttype, elt);
2163 if (cvt == NULL_TREE)
2164 return NULL_TREE;
2165 v[i] = cvt;
2167 return build_vector (type, v);
2170 return NULL_TREE;
2173 /* Construct a vector of zero elements of vector type TYPE. */
2175 static tree
2176 build_zero_vector (tree type)
2178 tree t;
2180 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2181 return build_vector_from_val (type, t);
2184 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2186 bool
2187 fold_convertible_p (const_tree type, const_tree arg)
2189 tree orig = TREE_TYPE (arg);
2191 if (type == orig)
2192 return true;
2194 if (TREE_CODE (arg) == ERROR_MARK
2195 || TREE_CODE (type) == ERROR_MARK
2196 || TREE_CODE (orig) == ERROR_MARK)
2197 return false;
2199 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2200 return true;
2202 switch (TREE_CODE (type))
2204 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2205 case POINTER_TYPE: case REFERENCE_TYPE:
2206 case OFFSET_TYPE:
2207 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2208 || TREE_CODE (orig) == OFFSET_TYPE);
2210 case REAL_TYPE:
2211 case FIXED_POINT_TYPE:
2212 case VECTOR_TYPE:
2213 case VOID_TYPE:
2214 return TREE_CODE (type) == TREE_CODE (orig);
2216 default:
2217 return false;
2221 /* Convert expression ARG to type TYPE. Used by the middle-end for
2222 simple conversions in preference to calling the front-end's convert. */
2224 tree
2225 fold_convert_loc (location_t loc, tree type, tree arg)
2227 tree orig = TREE_TYPE (arg);
2228 tree tem;
2230 if (type == orig)
2231 return arg;
2233 if (TREE_CODE (arg) == ERROR_MARK
2234 || TREE_CODE (type) == ERROR_MARK
2235 || TREE_CODE (orig) == ERROR_MARK)
2236 return error_mark_node;
2238 switch (TREE_CODE (type))
2240 case POINTER_TYPE:
2241 case REFERENCE_TYPE:
2242 /* Handle conversions between pointers to different address spaces. */
2243 if (POINTER_TYPE_P (orig)
2244 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2245 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2246 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2247 /* fall through */
2249 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2250 case OFFSET_TYPE:
2251 if (TREE_CODE (arg) == INTEGER_CST)
2253 tem = fold_convert_const (NOP_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2257 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2258 || TREE_CODE (orig) == OFFSET_TYPE)
2259 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2260 if (TREE_CODE (orig) == COMPLEX_TYPE)
2261 return fold_convert_loc (loc, type,
2262 fold_build1_loc (loc, REALPART_EXPR,
2263 TREE_TYPE (orig), arg));
2264 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2265 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2266 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2268 case REAL_TYPE:
2269 if (TREE_CODE (arg) == INTEGER_CST)
2271 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2272 if (tem != NULL_TREE)
2273 return tem;
2275 else if (TREE_CODE (arg) == REAL_CST)
2277 tem = fold_convert_const (NOP_EXPR, type, arg);
2278 if (tem != NULL_TREE)
2279 return tem;
2281 else if (TREE_CODE (arg) == FIXED_CST)
2283 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 return tem;
2288 switch (TREE_CODE (orig))
2290 case INTEGER_TYPE:
2291 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2292 case POINTER_TYPE: case REFERENCE_TYPE:
2293 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2295 case REAL_TYPE:
2296 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2298 case FIXED_POINT_TYPE:
2299 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2301 case COMPLEX_TYPE:
2302 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2303 return fold_convert_loc (loc, type, tem);
2305 default:
2306 gcc_unreachable ();
2309 case FIXED_POINT_TYPE:
2310 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2311 || TREE_CODE (arg) == REAL_CST)
2313 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2314 if (tem != NULL_TREE)
2315 goto fold_convert_exit;
2318 switch (TREE_CODE (orig))
2320 case FIXED_POINT_TYPE:
2321 case INTEGER_TYPE:
2322 case ENUMERAL_TYPE:
2323 case BOOLEAN_TYPE:
2324 case REAL_TYPE:
2325 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2327 case COMPLEX_TYPE:
2328 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2329 return fold_convert_loc (loc, type, tem);
2331 default:
2332 gcc_unreachable ();
2335 case COMPLEX_TYPE:
2336 switch (TREE_CODE (orig))
2338 case INTEGER_TYPE:
2339 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2340 case POINTER_TYPE: case REFERENCE_TYPE:
2341 case REAL_TYPE:
2342 case FIXED_POINT_TYPE:
2343 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2344 fold_convert_loc (loc, TREE_TYPE (type), arg),
2345 fold_convert_loc (loc, TREE_TYPE (type),
2346 integer_zero_node));
2347 case COMPLEX_TYPE:
2349 tree rpart, ipart;
2351 if (TREE_CODE (arg) == COMPLEX_EXPR)
2353 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2354 TREE_OPERAND (arg, 0));
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2356 TREE_OPERAND (arg, 1));
2357 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2360 arg = save_expr (arg);
2361 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2362 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2363 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2364 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2365 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2368 default:
2369 gcc_unreachable ();
2372 case VECTOR_TYPE:
2373 if (integer_zerop (arg))
2374 return build_zero_vector (type);
2375 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2376 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2377 || TREE_CODE (orig) == VECTOR_TYPE);
2378 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2380 case VOID_TYPE:
2381 tem = fold_ignored_result (arg);
2382 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2384 default:
2385 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2386 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2387 gcc_unreachable ();
2389 fold_convert_exit:
2390 protected_set_expr_location_unshare (tem, loc);
2391 return tem;
2394 /* Return false if expr can be assumed not to be an lvalue, true
2395 otherwise. */
2397 static bool
2398 maybe_lvalue_p (const_tree x)
2400 /* We only need to wrap lvalue tree codes. */
2401 switch (TREE_CODE (x))
2403 case VAR_DECL:
2404 case PARM_DECL:
2405 case RESULT_DECL:
2406 case LABEL_DECL:
2407 case FUNCTION_DECL:
2408 case SSA_NAME:
2410 case COMPONENT_REF:
2411 case MEM_REF:
2412 case INDIRECT_REF:
2413 case ARRAY_REF:
2414 case ARRAY_RANGE_REF:
2415 case BIT_FIELD_REF:
2416 case OBJ_TYPE_REF:
2418 case REALPART_EXPR:
2419 case IMAGPART_EXPR:
2420 case PREINCREMENT_EXPR:
2421 case PREDECREMENT_EXPR:
2422 case SAVE_EXPR:
2423 case TRY_CATCH_EXPR:
2424 case WITH_CLEANUP_EXPR:
2425 case COMPOUND_EXPR:
2426 case MODIFY_EXPR:
2427 case TARGET_EXPR:
2428 case COND_EXPR:
2429 case BIND_EXPR:
2430 break;
2432 default:
2433 /* Assume the worst for front-end tree codes. */
2434 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2435 break;
2436 return false;
2439 return true;
2442 /* Return an expr equal to X but certainly not valid as an lvalue. */
2444 tree
2445 non_lvalue_loc (location_t loc, tree x)
2447 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2448 us. */
2449 if (in_gimple_form)
2450 return x;
2452 if (! maybe_lvalue_p (x))
2453 return x;
2454 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2457 /* When pedantic, return an expr equal to X but certainly not valid as a
2458 pedantic lvalue. Otherwise, return X. */
2460 static tree
2461 pedantic_non_lvalue_loc (location_t loc, tree x)
2463 return protected_set_expr_location_unshare (x, loc);
2466 /* Given a tree comparison code, return the code that is the logical inverse.
2467 It is generally not safe to do this for floating-point comparisons, except
2468 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2469 ERROR_MARK in this case. */
2471 enum tree_code
2472 invert_tree_comparison (enum tree_code code, bool honor_nans)
2474 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2475 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2476 return ERROR_MARK;
2478 switch (code)
2480 case EQ_EXPR:
2481 return NE_EXPR;
2482 case NE_EXPR:
2483 return EQ_EXPR;
2484 case GT_EXPR:
2485 return honor_nans ? UNLE_EXPR : LE_EXPR;
2486 case GE_EXPR:
2487 return honor_nans ? UNLT_EXPR : LT_EXPR;
2488 case LT_EXPR:
2489 return honor_nans ? UNGE_EXPR : GE_EXPR;
2490 case LE_EXPR:
2491 return honor_nans ? UNGT_EXPR : GT_EXPR;
2492 case LTGT_EXPR:
2493 return UNEQ_EXPR;
2494 case UNEQ_EXPR:
2495 return LTGT_EXPR;
2496 case UNGT_EXPR:
2497 return LE_EXPR;
2498 case UNGE_EXPR:
2499 return LT_EXPR;
2500 case UNLT_EXPR:
2501 return GE_EXPR;
2502 case UNLE_EXPR:
2503 return GT_EXPR;
2504 case ORDERED_EXPR:
2505 return UNORDERED_EXPR;
2506 case UNORDERED_EXPR:
2507 return ORDERED_EXPR;
2508 default:
2509 gcc_unreachable ();
2513 /* Similar, but return the comparison that results if the operands are
2514 swapped. This is safe for floating-point. */
2516 enum tree_code
2517 swap_tree_comparison (enum tree_code code)
2519 switch (code)
2521 case EQ_EXPR:
2522 case NE_EXPR:
2523 case ORDERED_EXPR:
2524 case UNORDERED_EXPR:
2525 case LTGT_EXPR:
2526 case UNEQ_EXPR:
2527 return code;
2528 case GT_EXPR:
2529 return LT_EXPR;
2530 case GE_EXPR:
2531 return LE_EXPR;
2532 case LT_EXPR:
2533 return GT_EXPR;
2534 case LE_EXPR:
2535 return GE_EXPR;
2536 case UNGT_EXPR:
2537 return UNLT_EXPR;
2538 case UNGE_EXPR:
2539 return UNLE_EXPR;
2540 case UNLT_EXPR:
2541 return UNGT_EXPR;
2542 case UNLE_EXPR:
2543 return UNGE_EXPR;
2544 default:
2545 gcc_unreachable ();
2550 /* Convert a comparison tree code from an enum tree_code representation
2551 into a compcode bit-based encoding. This function is the inverse of
2552 compcode_to_comparison. */
2554 static enum comparison_code
2555 comparison_to_compcode (enum tree_code code)
2557 switch (code)
2559 case LT_EXPR:
2560 return COMPCODE_LT;
2561 case EQ_EXPR:
2562 return COMPCODE_EQ;
2563 case LE_EXPR:
2564 return COMPCODE_LE;
2565 case GT_EXPR:
2566 return COMPCODE_GT;
2567 case NE_EXPR:
2568 return COMPCODE_NE;
2569 case GE_EXPR:
2570 return COMPCODE_GE;
2571 case ORDERED_EXPR:
2572 return COMPCODE_ORD;
2573 case UNORDERED_EXPR:
2574 return COMPCODE_UNORD;
2575 case UNLT_EXPR:
2576 return COMPCODE_UNLT;
2577 case UNEQ_EXPR:
2578 return COMPCODE_UNEQ;
2579 case UNLE_EXPR:
2580 return COMPCODE_UNLE;
2581 case UNGT_EXPR:
2582 return COMPCODE_UNGT;
2583 case LTGT_EXPR:
2584 return COMPCODE_LTGT;
2585 case UNGE_EXPR:
2586 return COMPCODE_UNGE;
2587 default:
2588 gcc_unreachable ();
2592 /* Convert a compcode bit-based encoding of a comparison operator back
2593 to GCC's enum tree_code representation. This function is the
2594 inverse of comparison_to_compcode. */
2596 static enum tree_code
2597 compcode_to_comparison (enum comparison_code code)
2599 switch (code)
2601 case COMPCODE_LT:
2602 return LT_EXPR;
2603 case COMPCODE_EQ:
2604 return EQ_EXPR;
2605 case COMPCODE_LE:
2606 return LE_EXPR;
2607 case COMPCODE_GT:
2608 return GT_EXPR;
2609 case COMPCODE_NE:
2610 return NE_EXPR;
2611 case COMPCODE_GE:
2612 return GE_EXPR;
2613 case COMPCODE_ORD:
2614 return ORDERED_EXPR;
2615 case COMPCODE_UNORD:
2616 return UNORDERED_EXPR;
2617 case COMPCODE_UNLT:
2618 return UNLT_EXPR;
2619 case COMPCODE_UNEQ:
2620 return UNEQ_EXPR;
2621 case COMPCODE_UNLE:
2622 return UNLE_EXPR;
2623 case COMPCODE_UNGT:
2624 return UNGT_EXPR;
2625 case COMPCODE_LTGT:
2626 return LTGT_EXPR;
2627 case COMPCODE_UNGE:
2628 return UNGE_EXPR;
2629 default:
2630 gcc_unreachable ();
2634 /* Return a tree for the comparison which is the combination of
2635 doing the AND or OR (depending on CODE) of the two operations LCODE
2636 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2637 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2638 if this makes the transformation invalid. */
2640 tree
2641 combine_comparisons (location_t loc,
2642 enum tree_code code, enum tree_code lcode,
2643 enum tree_code rcode, tree truth_type,
2644 tree ll_arg, tree lr_arg)
2646 bool honor_nans = HONOR_NANS (ll_arg);
2647 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2648 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2649 int compcode;
2651 switch (code)
2653 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2654 compcode = lcompcode & rcompcode;
2655 break;
2657 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2658 compcode = lcompcode | rcompcode;
2659 break;
2661 default:
2662 return NULL_TREE;
2665 if (!honor_nans)
2667 /* Eliminate unordered comparisons, as well as LTGT and ORD
2668 which are not used unless the mode has NaNs. */
2669 compcode &= ~COMPCODE_UNORD;
2670 if (compcode == COMPCODE_LTGT)
2671 compcode = COMPCODE_NE;
2672 else if (compcode == COMPCODE_ORD)
2673 compcode = COMPCODE_TRUE;
2675 else if (flag_trapping_math)
2677 /* Check that the original operation and the optimized ones will trap
2678 under the same condition. */
2679 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2680 && (lcompcode != COMPCODE_EQ)
2681 && (lcompcode != COMPCODE_ORD);
2682 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2683 && (rcompcode != COMPCODE_EQ)
2684 && (rcompcode != COMPCODE_ORD);
2685 bool trap = (compcode & COMPCODE_UNORD) == 0
2686 && (compcode != COMPCODE_EQ)
2687 && (compcode != COMPCODE_ORD);
2689 /* In a short-circuited boolean expression the LHS might be
2690 such that the RHS, if evaluated, will never trap. For
2691 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2692 if neither x nor y is NaN. (This is a mixed blessing: for
2693 example, the expression above will never trap, hence
2694 optimizing it to x < y would be invalid). */
2695 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2696 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2697 rtrap = false;
2699 /* If the comparison was short-circuited, and only the RHS
2700 trapped, we may now generate a spurious trap. */
2701 if (rtrap && !ltrap
2702 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2703 return NULL_TREE;
2705 /* If we changed the conditions that cause a trap, we lose. */
2706 if ((ltrap || rtrap) != trap)
2707 return NULL_TREE;
2710 if (compcode == COMPCODE_TRUE)
2711 return constant_boolean_node (true, truth_type);
2712 else if (compcode == COMPCODE_FALSE)
2713 return constant_boolean_node (false, truth_type);
2714 else
2716 enum tree_code tcode;
2718 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2719 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2723 /* Return nonzero if two operands (typically of the same tree node)
2724 are necessarily equal. FLAGS modifies behavior as follows:
2726 If OEP_ONLY_CONST is set, only return nonzero for constants.
2727 This function tests whether the operands are indistinguishable;
2728 it does not test whether they are equal using C's == operation.
2729 The distinction is important for IEEE floating point, because
2730 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2731 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2733 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2734 even though it may hold multiple values during a function.
2735 This is because a GCC tree node guarantees that nothing else is
2736 executed between the evaluation of its "operands" (which may often
2737 be evaluated in arbitrary order). Hence if the operands themselves
2738 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2739 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2740 unset means assuming isochronic (or instantaneous) tree equivalence.
2741 Unless comparing arbitrary expression trees, such as from different
2742 statements, this flag can usually be left unset.
2744 If OEP_PURE_SAME is set, then pure functions with identical arguments
2745 are considered the same. It is used when the caller has other ways
2746 to ensure that global memory is unchanged in between.
2748 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2749 not values of expressions.
2751 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2752 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2754 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2755 any operand with side effect. This is unnecesarily conservative in the
2756 case we know that arg0 and arg1 are in disjoint code paths (such as in
2757 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2758 addresses with TREE_CONSTANT flag set so we know that &var == &var
2759 even if var is volatile. */
2762 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2764 /* When checking, verify at the outermost operand_equal_p call that
2765 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2766 hash value. */
2767 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2769 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2771 if (arg0 != arg1)
2773 inchash::hash hstate0 (0), hstate1 (0);
2774 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2775 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2776 hashval_t h0 = hstate0.end ();
2777 hashval_t h1 = hstate1.end ();
2778 gcc_assert (h0 == h1);
2780 return 1;
2782 else
2783 return 0;
2786 /* If either is ERROR_MARK, they aren't equal. */
2787 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2788 || TREE_TYPE (arg0) == error_mark_node
2789 || TREE_TYPE (arg1) == error_mark_node)
2790 return 0;
2792 /* Similar, if either does not have a type (like a released SSA name),
2793 they aren't equal. */
2794 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2795 return 0;
2797 /* We cannot consider pointers to different address space equal. */
2798 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2799 && POINTER_TYPE_P (TREE_TYPE (arg1))
2800 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2801 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2802 return 0;
2804 /* Check equality of integer constants before bailing out due to
2805 precision differences. */
2806 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2808 /* Address of INTEGER_CST is not defined; check that we did not forget
2809 to drop the OEP_ADDRESS_OF flags. */
2810 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2811 return tree_int_cst_equal (arg0, arg1);
2814 if (!(flags & OEP_ADDRESS_OF))
2816 /* If both types don't have the same signedness, then we can't consider
2817 them equal. We must check this before the STRIP_NOPS calls
2818 because they may change the signedness of the arguments. As pointers
2819 strictly don't have a signedness, require either two pointers or
2820 two non-pointers as well. */
2821 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2822 || POINTER_TYPE_P (TREE_TYPE (arg0))
2823 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2824 return 0;
2826 /* If both types don't have the same precision, then it is not safe
2827 to strip NOPs. */
2828 if (element_precision (TREE_TYPE (arg0))
2829 != element_precision (TREE_TYPE (arg1)))
2830 return 0;
2832 STRIP_NOPS (arg0);
2833 STRIP_NOPS (arg1);
2835 #if 0
2836 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2837 sanity check once the issue is solved. */
2838 else
2839 /* Addresses of conversions and SSA_NAMEs (and many other things)
2840 are not defined. Check that we did not forget to drop the
2841 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2842 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2843 && TREE_CODE (arg0) != SSA_NAME);
2844 #endif
2846 /* In case both args are comparisons but with different comparison
2847 code, try to swap the comparison operands of one arg to produce
2848 a match and compare that variant. */
2849 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2850 && COMPARISON_CLASS_P (arg0)
2851 && COMPARISON_CLASS_P (arg1))
2853 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2855 if (TREE_CODE (arg0) == swap_code)
2856 return operand_equal_p (TREE_OPERAND (arg0, 0),
2857 TREE_OPERAND (arg1, 1), flags)
2858 && operand_equal_p (TREE_OPERAND (arg0, 1),
2859 TREE_OPERAND (arg1, 0), flags);
2862 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2864 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2865 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2867 else if (flags & OEP_ADDRESS_OF)
2869 /* If we are interested in comparing addresses ignore
2870 MEM_REF wrappings of the base that can appear just for
2871 TBAA reasons. */
2872 if (TREE_CODE (arg0) == MEM_REF
2873 && DECL_P (arg1)
2874 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2875 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2876 && integer_zerop (TREE_OPERAND (arg0, 1)))
2877 return 1;
2878 else if (TREE_CODE (arg1) == MEM_REF
2879 && DECL_P (arg0)
2880 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2881 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2882 && integer_zerop (TREE_OPERAND (arg1, 1)))
2883 return 1;
2884 return 0;
2886 else
2887 return 0;
2890 /* When not checking adddresses, this is needed for conversions and for
2891 COMPONENT_REF. Might as well play it safe and always test this. */
2892 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2893 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2894 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2895 && !(flags & OEP_ADDRESS_OF)))
2896 return 0;
2898 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2899 We don't care about side effects in that case because the SAVE_EXPR
2900 takes care of that for us. In all other cases, two expressions are
2901 equal if they have no side effects. If we have two identical
2902 expressions with side effects that should be treated the same due
2903 to the only side effects being identical SAVE_EXPR's, that will
2904 be detected in the recursive calls below.
2905 If we are taking an invariant address of two identical objects
2906 they are necessarily equal as well. */
2907 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2908 && (TREE_CODE (arg0) == SAVE_EXPR
2909 || (flags & OEP_MATCH_SIDE_EFFECTS)
2910 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2911 return 1;
2913 /* Next handle constant cases, those for which we can return 1 even
2914 if ONLY_CONST is set. */
2915 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2916 switch (TREE_CODE (arg0))
2918 case INTEGER_CST:
2919 return tree_int_cst_equal (arg0, arg1);
2921 case FIXED_CST:
2922 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2923 TREE_FIXED_CST (arg1));
2925 case REAL_CST:
2926 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2927 return 1;
2930 if (!HONOR_SIGNED_ZEROS (arg0))
2932 /* If we do not distinguish between signed and unsigned zero,
2933 consider them equal. */
2934 if (real_zerop (arg0) && real_zerop (arg1))
2935 return 1;
2937 return 0;
2939 case VECTOR_CST:
2941 unsigned i;
2943 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2944 return 0;
2946 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2948 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2949 VECTOR_CST_ELT (arg1, i), flags))
2950 return 0;
2952 return 1;
2955 case COMPLEX_CST:
2956 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2957 flags)
2958 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2959 flags));
2961 case STRING_CST:
2962 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2963 && ! memcmp (TREE_STRING_POINTER (arg0),
2964 TREE_STRING_POINTER (arg1),
2965 TREE_STRING_LENGTH (arg0)));
2967 case ADDR_EXPR:
2968 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2969 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2970 flags | OEP_ADDRESS_OF
2971 | OEP_MATCH_SIDE_EFFECTS);
2972 case CONSTRUCTOR:
2973 /* In GIMPLE empty constructors are allowed in initializers of
2974 aggregates. */
2975 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2976 default:
2977 break;
2980 if (flags & OEP_ONLY_CONST)
2981 return 0;
2983 /* Define macros to test an operand from arg0 and arg1 for equality and a
2984 variant that allows null and views null as being different from any
2985 non-null value. In the latter case, if either is null, the both
2986 must be; otherwise, do the normal comparison. */
2987 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2988 TREE_OPERAND (arg1, N), flags)
2990 #define OP_SAME_WITH_NULL(N) \
2991 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2992 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2994 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2996 case tcc_unary:
2997 /* Two conversions are equal only if signedness and modes match. */
2998 switch (TREE_CODE (arg0))
3000 CASE_CONVERT:
3001 case FIX_TRUNC_EXPR:
3002 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3003 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3004 return 0;
3005 break;
3006 default:
3007 break;
3010 return OP_SAME (0);
3013 case tcc_comparison:
3014 case tcc_binary:
3015 if (OP_SAME (0) && OP_SAME (1))
3016 return 1;
3018 /* For commutative ops, allow the other order. */
3019 return (commutative_tree_code (TREE_CODE (arg0))
3020 && operand_equal_p (TREE_OPERAND (arg0, 0),
3021 TREE_OPERAND (arg1, 1), flags)
3022 && operand_equal_p (TREE_OPERAND (arg0, 1),
3023 TREE_OPERAND (arg1, 0), flags));
3025 case tcc_reference:
3026 /* If either of the pointer (or reference) expressions we are
3027 dereferencing contain a side effect, these cannot be equal,
3028 but their addresses can be. */
3029 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3030 && (TREE_SIDE_EFFECTS (arg0)
3031 || TREE_SIDE_EFFECTS (arg1)))
3032 return 0;
3034 switch (TREE_CODE (arg0))
3036 case INDIRECT_REF:
3037 if (!(flags & OEP_ADDRESS_OF)
3038 && (TYPE_ALIGN (TREE_TYPE (arg0))
3039 != TYPE_ALIGN (TREE_TYPE (arg1))))
3040 return 0;
3041 flags &= ~OEP_ADDRESS_OF;
3042 return OP_SAME (0);
3044 case IMAGPART_EXPR:
3045 /* Require the same offset. */
3046 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3047 TYPE_SIZE (TREE_TYPE (arg1)),
3048 flags & ~OEP_ADDRESS_OF))
3049 return 0;
3051 /* Fallthru. */
3052 case REALPART_EXPR:
3053 case VIEW_CONVERT_EXPR:
3054 return OP_SAME (0);
3056 case TARGET_MEM_REF:
3057 case MEM_REF:
3058 if (!(flags & OEP_ADDRESS_OF))
3060 /* Require equal access sizes */
3061 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3062 && (!TYPE_SIZE (TREE_TYPE (arg0))
3063 || !TYPE_SIZE (TREE_TYPE (arg1))
3064 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3065 TYPE_SIZE (TREE_TYPE (arg1)),
3066 flags)))
3067 return 0;
3068 /* Verify that access happens in similar types. */
3069 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3070 return 0;
3071 /* Verify that accesses are TBAA compatible. */
3072 if (!alias_ptr_types_compatible_p
3073 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3074 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3075 || (MR_DEPENDENCE_CLIQUE (arg0)
3076 != MR_DEPENDENCE_CLIQUE (arg1))
3077 || (MR_DEPENDENCE_BASE (arg0)
3078 != MR_DEPENDENCE_BASE (arg1)))
3079 return 0;
3080 /* Verify that alignment is compatible. */
3081 if (TYPE_ALIGN (TREE_TYPE (arg0))
3082 != TYPE_ALIGN (TREE_TYPE (arg1)))
3083 return 0;
3085 flags &= ~OEP_ADDRESS_OF;
3086 return (OP_SAME (0) && OP_SAME (1)
3087 /* TARGET_MEM_REF require equal extra operands. */
3088 && (TREE_CODE (arg0) != TARGET_MEM_REF
3089 || (OP_SAME_WITH_NULL (2)
3090 && OP_SAME_WITH_NULL (3)
3091 && OP_SAME_WITH_NULL (4))));
3093 case ARRAY_REF:
3094 case ARRAY_RANGE_REF:
3095 if (!OP_SAME (0))
3096 return 0;
3097 flags &= ~OEP_ADDRESS_OF;
3098 /* Compare the array index by value if it is constant first as we
3099 may have different types but same value here. */
3100 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3101 TREE_OPERAND (arg1, 1))
3102 || OP_SAME (1))
3103 && OP_SAME_WITH_NULL (2)
3104 && OP_SAME_WITH_NULL (3)
3105 /* Compare low bound and element size as with OEP_ADDRESS_OF
3106 we have to account for the offset of the ref. */
3107 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3108 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3109 || (operand_equal_p (array_ref_low_bound
3110 (CONST_CAST_TREE (arg0)),
3111 array_ref_low_bound
3112 (CONST_CAST_TREE (arg1)), flags)
3113 && operand_equal_p (array_ref_element_size
3114 (CONST_CAST_TREE (arg0)),
3115 array_ref_element_size
3116 (CONST_CAST_TREE (arg1)),
3117 flags))));
3119 case COMPONENT_REF:
3120 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3121 may be NULL when we're called to compare MEM_EXPRs. */
3122 if (!OP_SAME_WITH_NULL (0)
3123 || !OP_SAME (1))
3124 return 0;
3125 flags &= ~OEP_ADDRESS_OF;
3126 return OP_SAME_WITH_NULL (2);
3128 case BIT_FIELD_REF:
3129 if (!OP_SAME (0))
3130 return 0;
3131 flags &= ~OEP_ADDRESS_OF;
3132 return OP_SAME (1) && OP_SAME (2);
3134 default:
3135 return 0;
3138 case tcc_expression:
3139 switch (TREE_CODE (arg0))
3141 case ADDR_EXPR:
3142 /* Be sure we pass right ADDRESS_OF flag. */
3143 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3144 return operand_equal_p (TREE_OPERAND (arg0, 0),
3145 TREE_OPERAND (arg1, 0),
3146 flags | OEP_ADDRESS_OF);
3148 case TRUTH_NOT_EXPR:
3149 return OP_SAME (0);
3151 case TRUTH_ANDIF_EXPR:
3152 case TRUTH_ORIF_EXPR:
3153 return OP_SAME (0) && OP_SAME (1);
3155 case FMA_EXPR:
3156 case WIDEN_MULT_PLUS_EXPR:
3157 case WIDEN_MULT_MINUS_EXPR:
3158 if (!OP_SAME (2))
3159 return 0;
3160 /* The multiplcation operands are commutative. */
3161 /* FALLTHRU */
3163 case TRUTH_AND_EXPR:
3164 case TRUTH_OR_EXPR:
3165 case TRUTH_XOR_EXPR:
3166 if (OP_SAME (0) && OP_SAME (1))
3167 return 1;
3169 /* Otherwise take into account this is a commutative operation. */
3170 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3171 TREE_OPERAND (arg1, 1), flags)
3172 && operand_equal_p (TREE_OPERAND (arg0, 1),
3173 TREE_OPERAND (arg1, 0), flags));
3175 case COND_EXPR:
3176 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3177 return 0;
3178 flags &= ~OEP_ADDRESS_OF;
3179 return OP_SAME (0);
3181 case VEC_COND_EXPR:
3182 case DOT_PROD_EXPR:
3183 case BIT_INSERT_EXPR:
3184 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3186 case MODIFY_EXPR:
3187 case INIT_EXPR:
3188 case COMPOUND_EXPR:
3189 case PREDECREMENT_EXPR:
3190 case PREINCREMENT_EXPR:
3191 case POSTDECREMENT_EXPR:
3192 case POSTINCREMENT_EXPR:
3193 if (flags & OEP_LEXICOGRAPHIC)
3194 return OP_SAME (0) && OP_SAME (1);
3195 return 0;
3197 case CLEANUP_POINT_EXPR:
3198 case EXPR_STMT:
3199 if (flags & OEP_LEXICOGRAPHIC)
3200 return OP_SAME (0);
3201 return 0;
3203 default:
3204 return 0;
3207 case tcc_vl_exp:
3208 switch (TREE_CODE (arg0))
3210 case CALL_EXPR:
3211 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3212 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3213 /* If not both CALL_EXPRs are either internal or normal function
3214 functions, then they are not equal. */
3215 return 0;
3216 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3218 /* If the CALL_EXPRs call different internal functions, then they
3219 are not equal. */
3220 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3221 return 0;
3223 else
3225 /* If the CALL_EXPRs call different functions, then they are not
3226 equal. */
3227 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3228 flags))
3229 return 0;
3232 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3234 unsigned int cef = call_expr_flags (arg0);
3235 if (flags & OEP_PURE_SAME)
3236 cef &= ECF_CONST | ECF_PURE;
3237 else
3238 cef &= ECF_CONST;
3239 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3240 return 0;
3243 /* Now see if all the arguments are the same. */
3245 const_call_expr_arg_iterator iter0, iter1;
3246 const_tree a0, a1;
3247 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3248 a1 = first_const_call_expr_arg (arg1, &iter1);
3249 a0 && a1;
3250 a0 = next_const_call_expr_arg (&iter0),
3251 a1 = next_const_call_expr_arg (&iter1))
3252 if (! operand_equal_p (a0, a1, flags))
3253 return 0;
3255 /* If we get here and both argument lists are exhausted
3256 then the CALL_EXPRs are equal. */
3257 return ! (a0 || a1);
3259 default:
3260 return 0;
3263 case tcc_declaration:
3264 /* Consider __builtin_sqrt equal to sqrt. */
3265 return (TREE_CODE (arg0) == FUNCTION_DECL
3266 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3267 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3268 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3270 case tcc_exceptional:
3271 if (TREE_CODE (arg0) == CONSTRUCTOR)
3273 /* In GIMPLE constructors are used only to build vectors from
3274 elements. Individual elements in the constructor must be
3275 indexed in increasing order and form an initial sequence.
3277 We make no effort to compare constructors in generic.
3278 (see sem_variable::equals in ipa-icf which can do so for
3279 constants). */
3280 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3281 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3282 return 0;
3284 /* Be sure that vectors constructed have the same representation.
3285 We only tested element precision and modes to match.
3286 Vectors may be BLKmode and thus also check that the number of
3287 parts match. */
3288 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3289 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3290 return 0;
3292 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3293 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3294 unsigned int len = vec_safe_length (v0);
3296 if (len != vec_safe_length (v1))
3297 return 0;
3299 for (unsigned int i = 0; i < len; i++)
3301 constructor_elt *c0 = &(*v0)[i];
3302 constructor_elt *c1 = &(*v1)[i];
3304 if (!operand_equal_p (c0->value, c1->value, flags)
3305 /* In GIMPLE the indexes can be either NULL or matching i.
3306 Double check this so we won't get false
3307 positives for GENERIC. */
3308 || (c0->index
3309 && (TREE_CODE (c0->index) != INTEGER_CST
3310 || !compare_tree_int (c0->index, i)))
3311 || (c1->index
3312 && (TREE_CODE (c1->index) != INTEGER_CST
3313 || !compare_tree_int (c1->index, i))))
3314 return 0;
3316 return 1;
3318 else if (TREE_CODE (arg0) == STATEMENT_LIST
3319 && (flags & OEP_LEXICOGRAPHIC))
3321 /* Compare the STATEMENT_LISTs. */
3322 tree_stmt_iterator tsi1, tsi2;
3323 tree body1 = CONST_CAST_TREE (arg0);
3324 tree body2 = CONST_CAST_TREE (arg1);
3325 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3326 tsi_next (&tsi1), tsi_next (&tsi2))
3328 /* The lists don't have the same number of statements. */
3329 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3330 return 0;
3331 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3332 return 1;
3333 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3334 OEP_LEXICOGRAPHIC))
3335 return 0;
3338 return 0;
3340 case tcc_statement:
3341 switch (TREE_CODE (arg0))
3343 case RETURN_EXPR:
3344 if (flags & OEP_LEXICOGRAPHIC)
3345 return OP_SAME_WITH_NULL (0);
3346 return 0;
3347 default:
3348 return 0;
3351 default:
3352 return 0;
3355 #undef OP_SAME
3356 #undef OP_SAME_WITH_NULL
3359 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3360 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3362 When in doubt, return 0. */
3364 static int
3365 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3367 int unsignedp1, unsignedpo;
3368 tree primarg0, primarg1, primother;
3369 unsigned int correct_width;
3371 if (operand_equal_p (arg0, arg1, 0))
3372 return 1;
3374 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3375 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3376 return 0;
3378 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3379 and see if the inner values are the same. This removes any
3380 signedness comparison, which doesn't matter here. */
3381 primarg0 = arg0, primarg1 = arg1;
3382 STRIP_NOPS (primarg0);
3383 STRIP_NOPS (primarg1);
3384 if (operand_equal_p (primarg0, primarg1, 0))
3385 return 1;
3387 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3388 actual comparison operand, ARG0.
3390 First throw away any conversions to wider types
3391 already present in the operands. */
3393 primarg1 = get_narrower (arg1, &unsignedp1);
3394 primother = get_narrower (other, &unsignedpo);
3396 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3397 if (unsignedp1 == unsignedpo
3398 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3399 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3401 tree type = TREE_TYPE (arg0);
3403 /* Make sure shorter operand is extended the right way
3404 to match the longer operand. */
3405 primarg1 = fold_convert (signed_or_unsigned_type_for
3406 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3408 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3409 return 1;
3412 return 0;
3415 /* See if ARG is an expression that is either a comparison or is performing
3416 arithmetic on comparisons. The comparisons must only be comparing
3417 two different values, which will be stored in *CVAL1 and *CVAL2; if
3418 they are nonzero it means that some operands have already been found.
3419 No variables may be used anywhere else in the expression except in the
3420 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3421 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3423 If this is true, return 1. Otherwise, return zero. */
3425 static int
3426 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3428 enum tree_code code = TREE_CODE (arg);
3429 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3431 /* We can handle some of the tcc_expression cases here. */
3432 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3433 tclass = tcc_unary;
3434 else if (tclass == tcc_expression
3435 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3436 || code == COMPOUND_EXPR))
3437 tclass = tcc_binary;
3439 else if (tclass == tcc_expression && code == SAVE_EXPR
3440 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3442 /* If we've already found a CVAL1 or CVAL2, this expression is
3443 two complex to handle. */
3444 if (*cval1 || *cval2)
3445 return 0;
3447 tclass = tcc_unary;
3448 *save_p = 1;
3451 switch (tclass)
3453 case tcc_unary:
3454 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3456 case tcc_binary:
3457 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3458 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3459 cval1, cval2, save_p));
3461 case tcc_constant:
3462 return 1;
3464 case tcc_expression:
3465 if (code == COND_EXPR)
3466 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3467 cval1, cval2, save_p)
3468 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3469 cval1, cval2, save_p)
3470 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3471 cval1, cval2, save_p));
3472 return 0;
3474 case tcc_comparison:
3475 /* First see if we can handle the first operand, then the second. For
3476 the second operand, we know *CVAL1 can't be zero. It must be that
3477 one side of the comparison is each of the values; test for the
3478 case where this isn't true by failing if the two operands
3479 are the same. */
3481 if (operand_equal_p (TREE_OPERAND (arg, 0),
3482 TREE_OPERAND (arg, 1), 0))
3483 return 0;
3485 if (*cval1 == 0)
3486 *cval1 = TREE_OPERAND (arg, 0);
3487 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3489 else if (*cval2 == 0)
3490 *cval2 = TREE_OPERAND (arg, 0);
3491 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3493 else
3494 return 0;
3496 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3498 else if (*cval2 == 0)
3499 *cval2 = TREE_OPERAND (arg, 1);
3500 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3502 else
3503 return 0;
3505 return 1;
3507 default:
3508 return 0;
3512 /* ARG is a tree that is known to contain just arithmetic operations and
3513 comparisons. Evaluate the operations in the tree substituting NEW0 for
3514 any occurrence of OLD0 as an operand of a comparison and likewise for
3515 NEW1 and OLD1. */
3517 static tree
3518 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3519 tree old1, tree new1)
3521 tree type = TREE_TYPE (arg);
3522 enum tree_code code = TREE_CODE (arg);
3523 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3525 /* We can handle some of the tcc_expression cases here. */
3526 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3527 tclass = tcc_unary;
3528 else if (tclass == tcc_expression
3529 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3530 tclass = tcc_binary;
3532 switch (tclass)
3534 case tcc_unary:
3535 return fold_build1_loc (loc, code, type,
3536 eval_subst (loc, TREE_OPERAND (arg, 0),
3537 old0, new0, old1, new1));
3539 case tcc_binary:
3540 return fold_build2_loc (loc, code, type,
3541 eval_subst (loc, TREE_OPERAND (arg, 0),
3542 old0, new0, old1, new1),
3543 eval_subst (loc, TREE_OPERAND (arg, 1),
3544 old0, new0, old1, new1));
3546 case tcc_expression:
3547 switch (code)
3549 case SAVE_EXPR:
3550 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3551 old1, new1);
3553 case COMPOUND_EXPR:
3554 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3555 old1, new1);
3557 case COND_EXPR:
3558 return fold_build3_loc (loc, code, type,
3559 eval_subst (loc, TREE_OPERAND (arg, 0),
3560 old0, new0, old1, new1),
3561 eval_subst (loc, TREE_OPERAND (arg, 1),
3562 old0, new0, old1, new1),
3563 eval_subst (loc, TREE_OPERAND (arg, 2),
3564 old0, new0, old1, new1));
3565 default:
3566 break;
3568 /* Fall through - ??? */
3570 case tcc_comparison:
3572 tree arg0 = TREE_OPERAND (arg, 0);
3573 tree arg1 = TREE_OPERAND (arg, 1);
3575 /* We need to check both for exact equality and tree equality. The
3576 former will be true if the operand has a side-effect. In that
3577 case, we know the operand occurred exactly once. */
3579 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3580 arg0 = new0;
3581 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3582 arg0 = new1;
3584 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3585 arg1 = new0;
3586 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3587 arg1 = new1;
3589 return fold_build2_loc (loc, code, type, arg0, arg1);
3592 default:
3593 return arg;
3597 /* Return a tree for the case when the result of an expression is RESULT
3598 converted to TYPE and OMITTED was previously an operand of the expression
3599 but is now not needed (e.g., we folded OMITTED * 0).
3601 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3602 the conversion of RESULT to TYPE. */
3604 tree
3605 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3607 tree t = fold_convert_loc (loc, type, result);
3609 /* If the resulting operand is an empty statement, just return the omitted
3610 statement casted to void. */
3611 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3612 return build1_loc (loc, NOP_EXPR, void_type_node,
3613 fold_ignored_result (omitted));
3615 if (TREE_SIDE_EFFECTS (omitted))
3616 return build2_loc (loc, COMPOUND_EXPR, type,
3617 fold_ignored_result (omitted), t);
3619 return non_lvalue_loc (loc, t);
3622 /* Return a tree for the case when the result of an expression is RESULT
3623 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3624 of the expression but are now not needed.
3626 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3627 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3628 evaluated before OMITTED2. Otherwise, if neither has side effects,
3629 just do the conversion of RESULT to TYPE. */
3631 tree
3632 omit_two_operands_loc (location_t loc, tree type, tree result,
3633 tree omitted1, tree omitted2)
3635 tree t = fold_convert_loc (loc, type, result);
3637 if (TREE_SIDE_EFFECTS (omitted2))
3638 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3639 if (TREE_SIDE_EFFECTS (omitted1))
3640 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3642 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3646 /* Return a simplified tree node for the truth-negation of ARG. This
3647 never alters ARG itself. We assume that ARG is an operation that
3648 returns a truth value (0 or 1).
3650 FIXME: one would think we would fold the result, but it causes
3651 problems with the dominator optimizer. */
3653 static tree
3654 fold_truth_not_expr (location_t loc, tree arg)
3656 tree type = TREE_TYPE (arg);
3657 enum tree_code code = TREE_CODE (arg);
3658 location_t loc1, loc2;
3660 /* If this is a comparison, we can simply invert it, except for
3661 floating-point non-equality comparisons, in which case we just
3662 enclose a TRUTH_NOT_EXPR around what we have. */
3664 if (TREE_CODE_CLASS (code) == tcc_comparison)
3666 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3667 if (FLOAT_TYPE_P (op_type)
3668 && flag_trapping_math
3669 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3670 && code != NE_EXPR && code != EQ_EXPR)
3671 return NULL_TREE;
3673 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3674 if (code == ERROR_MARK)
3675 return NULL_TREE;
3677 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3678 TREE_OPERAND (arg, 1));
3679 if (TREE_NO_WARNING (arg))
3680 TREE_NO_WARNING (ret) = 1;
3681 return ret;
3684 switch (code)
3686 case INTEGER_CST:
3687 return constant_boolean_node (integer_zerop (arg), type);
3689 case TRUTH_AND_EXPR:
3690 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3691 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3692 return build2_loc (loc, TRUTH_OR_EXPR, type,
3693 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3694 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3696 case TRUTH_OR_EXPR:
3697 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3698 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3699 return build2_loc (loc, TRUTH_AND_EXPR, type,
3700 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3701 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3703 case TRUTH_XOR_EXPR:
3704 /* Here we can invert either operand. We invert the first operand
3705 unless the second operand is a TRUTH_NOT_EXPR in which case our
3706 result is the XOR of the first operand with the inside of the
3707 negation of the second operand. */
3709 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3710 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3711 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3712 else
3713 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3714 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3715 TREE_OPERAND (arg, 1));
3717 case TRUTH_ANDIF_EXPR:
3718 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3719 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3720 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3721 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3722 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3724 case TRUTH_ORIF_EXPR:
3725 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3726 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3727 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3728 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3729 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3731 case TRUTH_NOT_EXPR:
3732 return TREE_OPERAND (arg, 0);
3734 case COND_EXPR:
3736 tree arg1 = TREE_OPERAND (arg, 1);
3737 tree arg2 = TREE_OPERAND (arg, 2);
3739 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3740 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3742 /* A COND_EXPR may have a throw as one operand, which
3743 then has void type. Just leave void operands
3744 as they are. */
3745 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3746 VOID_TYPE_P (TREE_TYPE (arg1))
3747 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3748 VOID_TYPE_P (TREE_TYPE (arg2))
3749 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3752 case COMPOUND_EXPR:
3753 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3754 return build2_loc (loc, COMPOUND_EXPR, type,
3755 TREE_OPERAND (arg, 0),
3756 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3758 case NON_LVALUE_EXPR:
3759 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3760 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3762 CASE_CONVERT:
3763 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3764 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3766 /* fall through */
3768 case FLOAT_EXPR:
3769 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3770 return build1_loc (loc, TREE_CODE (arg), type,
3771 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3773 case BIT_AND_EXPR:
3774 if (!integer_onep (TREE_OPERAND (arg, 1)))
3775 return NULL_TREE;
3776 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3778 case SAVE_EXPR:
3779 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3781 case CLEANUP_POINT_EXPR:
3782 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3783 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3784 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3786 default:
3787 return NULL_TREE;
3791 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3792 assume that ARG is an operation that returns a truth value (0 or 1
3793 for scalars, 0 or -1 for vectors). Return the folded expression if
3794 folding is successful. Otherwise, return NULL_TREE. */
3796 static tree
3797 fold_invert_truthvalue (location_t loc, tree arg)
3799 tree type = TREE_TYPE (arg);
3800 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3801 ? BIT_NOT_EXPR
3802 : TRUTH_NOT_EXPR,
3803 type, arg);
3806 /* Return a simplified tree node for the truth-negation of ARG. This
3807 never alters ARG itself. We assume that ARG is an operation that
3808 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3810 tree
3811 invert_truthvalue_loc (location_t loc, tree arg)
3813 if (TREE_CODE (arg) == ERROR_MARK)
3814 return arg;
3816 tree type = TREE_TYPE (arg);
3817 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3818 ? BIT_NOT_EXPR
3819 : TRUTH_NOT_EXPR,
3820 type, arg);
3823 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3824 with code CODE. This optimization is unsafe. */
3825 static tree
3826 distribute_real_division (location_t loc, enum tree_code code, tree type,
3827 tree arg0, tree arg1)
3829 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3830 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3832 /* (A / C) +- (B / C) -> (A +- B) / C. */
3833 if (mul0 == mul1
3834 && operand_equal_p (TREE_OPERAND (arg0, 1),
3835 TREE_OPERAND (arg1, 1), 0))
3836 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3837 fold_build2_loc (loc, code, type,
3838 TREE_OPERAND (arg0, 0),
3839 TREE_OPERAND (arg1, 0)),
3840 TREE_OPERAND (arg0, 1));
3842 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3843 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3844 TREE_OPERAND (arg1, 0), 0)
3845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3846 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3848 REAL_VALUE_TYPE r0, r1;
3849 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3850 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3851 if (!mul0)
3852 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3853 if (!mul1)
3854 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3855 real_arithmetic (&r0, code, &r0, &r1);
3856 return fold_build2_loc (loc, MULT_EXPR, type,
3857 TREE_OPERAND (arg0, 0),
3858 build_real (type, r0));
3861 return NULL_TREE;
3864 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3865 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3866 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3867 is the original memory reference used to preserve the alias set of
3868 the access. */
3870 static tree
3871 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3872 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3873 int unsignedp, int reversep)
3875 tree result, bftype;
3877 /* Attempt not to lose the access path if possible. */
3878 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3880 tree ninner = TREE_OPERAND (orig_inner, 0);
3881 machine_mode nmode;
3882 HOST_WIDE_INT nbitsize, nbitpos;
3883 tree noffset;
3884 int nunsignedp, nreversep, nvolatilep = 0;
3885 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3886 &noffset, &nmode, &nunsignedp,
3887 &nreversep, &nvolatilep);
3888 if (base == inner
3889 && noffset == NULL_TREE
3890 && nbitsize >= bitsize
3891 && nbitpos <= bitpos
3892 && bitpos + bitsize <= nbitpos + nbitsize
3893 && !reversep
3894 && !nreversep
3895 && !nvolatilep)
3897 inner = ninner;
3898 bitpos -= nbitpos;
3902 alias_set_type iset = get_alias_set (orig_inner);
3903 if (iset == 0 && get_alias_set (inner) != iset)
3904 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3905 build_fold_addr_expr (inner),
3906 build_int_cst (ptr_type_node, 0));
3908 if (bitpos == 0 && !reversep)
3910 tree size = TYPE_SIZE (TREE_TYPE (inner));
3911 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3912 || POINTER_TYPE_P (TREE_TYPE (inner)))
3913 && tree_fits_shwi_p (size)
3914 && tree_to_shwi (size) == bitsize)
3915 return fold_convert_loc (loc, type, inner);
3918 bftype = type;
3919 if (TYPE_PRECISION (bftype) != bitsize
3920 || TYPE_UNSIGNED (bftype) == !unsignedp)
3921 bftype = build_nonstandard_integer_type (bitsize, 0);
3923 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3924 size_int (bitsize), bitsize_int (bitpos));
3925 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3927 if (bftype != type)
3928 result = fold_convert_loc (loc, type, result);
3930 return result;
3933 /* Optimize a bit-field compare.
3935 There are two cases: First is a compare against a constant and the
3936 second is a comparison of two items where the fields are at the same
3937 bit position relative to the start of a chunk (byte, halfword, word)
3938 large enough to contain it. In these cases we can avoid the shift
3939 implicit in bitfield extractions.
3941 For constants, we emit a compare of the shifted constant with the
3942 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3943 compared. For two fields at the same position, we do the ANDs with the
3944 similar mask and compare the result of the ANDs.
3946 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3947 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3948 are the left and right operands of the comparison, respectively.
3950 If the optimization described above can be done, we return the resulting
3951 tree. Otherwise we return zero. */
3953 static tree
3954 optimize_bit_field_compare (location_t loc, enum tree_code code,
3955 tree compare_type, tree lhs, tree rhs)
3957 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3958 tree type = TREE_TYPE (lhs);
3959 tree unsigned_type;
3960 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3961 machine_mode lmode, rmode, nmode;
3962 int lunsignedp, runsignedp;
3963 int lreversep, rreversep;
3964 int lvolatilep = 0, rvolatilep = 0;
3965 tree linner, rinner = NULL_TREE;
3966 tree mask;
3967 tree offset;
3969 /* Get all the information about the extractions being done. If the bit size
3970 if the same as the size of the underlying object, we aren't doing an
3971 extraction at all and so can do nothing. We also don't want to
3972 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3973 then will no longer be able to replace it. */
3974 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3975 &lunsignedp, &lreversep, &lvolatilep);
3976 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3977 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3978 return 0;
3980 if (const_p)
3981 rreversep = lreversep;
3982 else
3984 /* If this is not a constant, we can only do something if bit positions,
3985 sizes, signedness and storage order are the same. */
3986 rinner
3987 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3988 &runsignedp, &rreversep, &rvolatilep);
3990 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3991 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3992 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3993 return 0;
3996 /* Honor the C++ memory model and mimic what RTL expansion does. */
3997 unsigned HOST_WIDE_INT bitstart = 0;
3998 unsigned HOST_WIDE_INT bitend = 0;
3999 if (TREE_CODE (lhs) == COMPONENT_REF)
4001 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4002 if (offset != NULL_TREE)
4003 return 0;
4006 /* See if we can find a mode to refer to this field. We should be able to,
4007 but fail if we can't. */
4008 nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4009 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4010 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4011 TYPE_ALIGN (TREE_TYPE (rinner))),
4012 word_mode, false);
4013 if (nmode == VOIDmode)
4014 return 0;
4016 /* Set signed and unsigned types of the precision of this mode for the
4017 shifts below. */
4018 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4020 /* Compute the bit position and size for the new reference and our offset
4021 within it. If the new reference is the same size as the original, we
4022 won't optimize anything, so return zero. */
4023 nbitsize = GET_MODE_BITSIZE (nmode);
4024 nbitpos = lbitpos & ~ (nbitsize - 1);
4025 lbitpos -= nbitpos;
4026 if (nbitsize == lbitsize)
4027 return 0;
4029 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4030 lbitpos = nbitsize - lbitsize - lbitpos;
4032 /* Make the mask to be used against the extracted field. */
4033 mask = build_int_cst_type (unsigned_type, -1);
4034 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4035 mask = const_binop (RSHIFT_EXPR, mask,
4036 size_int (nbitsize - lbitsize - lbitpos));
4038 if (! const_p)
4039 /* If not comparing with constant, just rework the comparison
4040 and return. */
4041 return fold_build2_loc (loc, code, compare_type,
4042 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4043 make_bit_field_ref (loc, linner, lhs,
4044 unsigned_type,
4045 nbitsize, nbitpos,
4046 1, lreversep),
4047 mask),
4048 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4049 make_bit_field_ref (loc, rinner, rhs,
4050 unsigned_type,
4051 nbitsize, nbitpos,
4052 1, rreversep),
4053 mask));
4055 /* Otherwise, we are handling the constant case. See if the constant is too
4056 big for the field. Warn and return a tree for 0 (false) if so. We do
4057 this not only for its own sake, but to avoid having to test for this
4058 error case below. If we didn't, we might generate wrong code.
4060 For unsigned fields, the constant shifted right by the field length should
4061 be all zero. For signed fields, the high-order bits should agree with
4062 the sign bit. */
4064 if (lunsignedp)
4066 if (wi::lrshift (rhs, lbitsize) != 0)
4068 warning (0, "comparison is always %d due to width of bit-field",
4069 code == NE_EXPR);
4070 return constant_boolean_node (code == NE_EXPR, compare_type);
4073 else
4075 wide_int tem = wi::arshift (rhs, lbitsize - 1);
4076 if (tem != 0 && tem != -1)
4078 warning (0, "comparison is always %d due to width of bit-field",
4079 code == NE_EXPR);
4080 return constant_boolean_node (code == NE_EXPR, compare_type);
4084 /* Single-bit compares should always be against zero. */
4085 if (lbitsize == 1 && ! integer_zerop (rhs))
4087 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4088 rhs = build_int_cst (type, 0);
4091 /* Make a new bitfield reference, shift the constant over the
4092 appropriate number of bits and mask it with the computed mask
4093 (in case this was a signed field). If we changed it, make a new one. */
4094 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4095 nbitsize, nbitpos, 1, lreversep);
4097 rhs = const_binop (BIT_AND_EXPR,
4098 const_binop (LSHIFT_EXPR,
4099 fold_convert_loc (loc, unsigned_type, rhs),
4100 size_int (lbitpos)),
4101 mask);
4103 lhs = build2_loc (loc, code, compare_type,
4104 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4105 return lhs;
4108 /* Subroutine for fold_truth_andor_1: decode a field reference.
4110 If EXP is a comparison reference, we return the innermost reference.
4112 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4113 set to the starting bit number.
4115 If the innermost field can be completely contained in a mode-sized
4116 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4118 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4119 otherwise it is not changed.
4121 *PUNSIGNEDP is set to the signedness of the field.
4123 *PREVERSEP is set to the storage order of the field.
4125 *PMASK is set to the mask used. This is either contained in a
4126 BIT_AND_EXPR or derived from the width of the field.
4128 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4130 Return 0 if this is not a component reference or is one that we can't
4131 do anything with. */
4133 static tree
4134 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4135 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4136 int *punsignedp, int *preversep, int *pvolatilep,
4137 tree *pmask, tree *pand_mask)
4139 tree exp = *exp_;
4140 tree outer_type = 0;
4141 tree and_mask = 0;
4142 tree mask, inner, offset;
4143 tree unsigned_type;
4144 unsigned int precision;
4146 /* All the optimizations using this function assume integer fields.
4147 There are problems with FP fields since the type_for_size call
4148 below can fail for, e.g., XFmode. */
4149 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4150 return 0;
4152 /* We are interested in the bare arrangement of bits, so strip everything
4153 that doesn't affect the machine mode. However, record the type of the
4154 outermost expression if it may matter below. */
4155 if (CONVERT_EXPR_P (exp)
4156 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4157 outer_type = TREE_TYPE (exp);
4158 STRIP_NOPS (exp);
4160 if (TREE_CODE (exp) == BIT_AND_EXPR)
4162 and_mask = TREE_OPERAND (exp, 1);
4163 exp = TREE_OPERAND (exp, 0);
4164 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4165 if (TREE_CODE (and_mask) != INTEGER_CST)
4166 return 0;
4169 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4170 punsignedp, preversep, pvolatilep);
4171 if ((inner == exp && and_mask == 0)
4172 || *pbitsize < 0 || offset != 0
4173 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4174 /* Reject out-of-bound accesses (PR79731). */
4175 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4176 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4177 *pbitpos + *pbitsize) < 0))
4178 return 0;
4180 *exp_ = exp;
4182 /* If the number of bits in the reference is the same as the bitsize of
4183 the outer type, then the outer type gives the signedness. Otherwise
4184 (in case of a small bitfield) the signedness is unchanged. */
4185 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4186 *punsignedp = TYPE_UNSIGNED (outer_type);
4188 /* Compute the mask to access the bitfield. */
4189 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4190 precision = TYPE_PRECISION (unsigned_type);
4192 mask = build_int_cst_type (unsigned_type, -1);
4194 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4195 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4197 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4198 if (and_mask != 0)
4199 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4200 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4202 *pmask = mask;
4203 *pand_mask = and_mask;
4204 return inner;
4207 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4208 bit positions and MASK is SIGNED. */
4210 static int
4211 all_ones_mask_p (const_tree mask, unsigned int size)
4213 tree type = TREE_TYPE (mask);
4214 unsigned int precision = TYPE_PRECISION (type);
4216 /* If this function returns true when the type of the mask is
4217 UNSIGNED, then there will be errors. In particular see
4218 gcc.c-torture/execute/990326-1.c. There does not appear to be
4219 any documentation paper trail as to why this is so. But the pre
4220 wide-int worked with that restriction and it has been preserved
4221 here. */
4222 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4223 return false;
4225 return wi::mask (size, false, precision) == mask;
4228 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4229 represents the sign bit of EXP's type. If EXP represents a sign
4230 or zero extension, also test VAL against the unextended type.
4231 The return value is the (sub)expression whose sign bit is VAL,
4232 or NULL_TREE otherwise. */
4234 tree
4235 sign_bit_p (tree exp, const_tree val)
4237 int width;
4238 tree t;
4240 /* Tree EXP must have an integral type. */
4241 t = TREE_TYPE (exp);
4242 if (! INTEGRAL_TYPE_P (t))
4243 return NULL_TREE;
4245 /* Tree VAL must be an integer constant. */
4246 if (TREE_CODE (val) != INTEGER_CST
4247 || TREE_OVERFLOW (val))
4248 return NULL_TREE;
4250 width = TYPE_PRECISION (t);
4251 if (wi::only_sign_bit_p (val, width))
4252 return exp;
4254 /* Handle extension from a narrower type. */
4255 if (TREE_CODE (exp) == NOP_EXPR
4256 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4257 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4259 return NULL_TREE;
4262 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4263 to be evaluated unconditionally. */
4265 static int
4266 simple_operand_p (const_tree exp)
4268 /* Strip any conversions that don't change the machine mode. */
4269 STRIP_NOPS (exp);
4271 return (CONSTANT_CLASS_P (exp)
4272 || TREE_CODE (exp) == SSA_NAME
4273 || (DECL_P (exp)
4274 && ! TREE_ADDRESSABLE (exp)
4275 && ! TREE_THIS_VOLATILE (exp)
4276 && ! DECL_NONLOCAL (exp)
4277 /* Don't regard global variables as simple. They may be
4278 allocated in ways unknown to the compiler (shared memory,
4279 #pragma weak, etc). */
4280 && ! TREE_PUBLIC (exp)
4281 && ! DECL_EXTERNAL (exp)
4282 /* Weakrefs are not safe to be read, since they can be NULL.
4283 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4284 have DECL_WEAK flag set. */
4285 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4286 /* Loading a static variable is unduly expensive, but global
4287 registers aren't expensive. */
4288 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4291 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4292 to be evaluated unconditionally.
4293 I addition to simple_operand_p, we assume that comparisons, conversions,
4294 and logic-not operations are simple, if their operands are simple, too. */
4296 static bool
4297 simple_operand_p_2 (tree exp)
4299 enum tree_code code;
4301 if (TREE_SIDE_EFFECTS (exp)
4302 || tree_could_trap_p (exp))
4303 return false;
4305 while (CONVERT_EXPR_P (exp))
4306 exp = TREE_OPERAND (exp, 0);
4308 code = TREE_CODE (exp);
4310 if (TREE_CODE_CLASS (code) == tcc_comparison)
4311 return (simple_operand_p (TREE_OPERAND (exp, 0))
4312 && simple_operand_p (TREE_OPERAND (exp, 1)));
4314 if (code == TRUTH_NOT_EXPR)
4315 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4317 return simple_operand_p (exp);
4321 /* The following functions are subroutines to fold_range_test and allow it to
4322 try to change a logical combination of comparisons into a range test.
4324 For example, both
4325 X == 2 || X == 3 || X == 4 || X == 5
4327 X >= 2 && X <= 5
4328 are converted to
4329 (unsigned) (X - 2) <= 3
4331 We describe each set of comparisons as being either inside or outside
4332 a range, using a variable named like IN_P, and then describe the
4333 range with a lower and upper bound. If one of the bounds is omitted,
4334 it represents either the highest or lowest value of the type.
4336 In the comments below, we represent a range by two numbers in brackets
4337 preceded by a "+" to designate being inside that range, or a "-" to
4338 designate being outside that range, so the condition can be inverted by
4339 flipping the prefix. An omitted bound is represented by a "-". For
4340 example, "- [-, 10]" means being outside the range starting at the lowest
4341 possible value and ending at 10, in other words, being greater than 10.
4342 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4343 always false.
4345 We set up things so that the missing bounds are handled in a consistent
4346 manner so neither a missing bound nor "true" and "false" need to be
4347 handled using a special case. */
4349 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4350 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4351 and UPPER1_P are nonzero if the respective argument is an upper bound
4352 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4353 must be specified for a comparison. ARG1 will be converted to ARG0's
4354 type if both are specified. */
4356 static tree
4357 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4358 tree arg1, int upper1_p)
4360 tree tem;
4361 int result;
4362 int sgn0, sgn1;
4364 /* If neither arg represents infinity, do the normal operation.
4365 Else, if not a comparison, return infinity. Else handle the special
4366 comparison rules. Note that most of the cases below won't occur, but
4367 are handled for consistency. */
4369 if (arg0 != 0 && arg1 != 0)
4371 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4372 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4373 STRIP_NOPS (tem);
4374 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4377 if (TREE_CODE_CLASS (code) != tcc_comparison)
4378 return 0;
4380 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4381 for neither. In real maths, we cannot assume open ended ranges are
4382 the same. But, this is computer arithmetic, where numbers are finite.
4383 We can therefore make the transformation of any unbounded range with
4384 the value Z, Z being greater than any representable number. This permits
4385 us to treat unbounded ranges as equal. */
4386 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4387 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4388 switch (code)
4390 case EQ_EXPR:
4391 result = sgn0 == sgn1;
4392 break;
4393 case NE_EXPR:
4394 result = sgn0 != sgn1;
4395 break;
4396 case LT_EXPR:
4397 result = sgn0 < sgn1;
4398 break;
4399 case LE_EXPR:
4400 result = sgn0 <= sgn1;
4401 break;
4402 case GT_EXPR:
4403 result = sgn0 > sgn1;
4404 break;
4405 case GE_EXPR:
4406 result = sgn0 >= sgn1;
4407 break;
4408 default:
4409 gcc_unreachable ();
4412 return constant_boolean_node (result, type);
4415 /* Helper routine for make_range. Perform one step for it, return
4416 new expression if the loop should continue or NULL_TREE if it should
4417 stop. */
4419 tree
4420 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4421 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4422 bool *strict_overflow_p)
4424 tree arg0_type = TREE_TYPE (arg0);
4425 tree n_low, n_high, low = *p_low, high = *p_high;
4426 int in_p = *p_in_p, n_in_p;
4428 switch (code)
4430 case TRUTH_NOT_EXPR:
4431 /* We can only do something if the range is testing for zero. */
4432 if (low == NULL_TREE || high == NULL_TREE
4433 || ! integer_zerop (low) || ! integer_zerop (high))
4434 return NULL_TREE;
4435 *p_in_p = ! in_p;
4436 return arg0;
4438 case EQ_EXPR: case NE_EXPR:
4439 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4440 /* We can only do something if the range is testing for zero
4441 and if the second operand is an integer constant. Note that
4442 saying something is "in" the range we make is done by
4443 complementing IN_P since it will set in the initial case of
4444 being not equal to zero; "out" is leaving it alone. */
4445 if (low == NULL_TREE || high == NULL_TREE
4446 || ! integer_zerop (low) || ! integer_zerop (high)
4447 || TREE_CODE (arg1) != INTEGER_CST)
4448 return NULL_TREE;
4450 switch (code)
4452 case NE_EXPR: /* - [c, c] */
4453 low = high = arg1;
4454 break;
4455 case EQ_EXPR: /* + [c, c] */
4456 in_p = ! in_p, low = high = arg1;
4457 break;
4458 case GT_EXPR: /* - [-, c] */
4459 low = 0, high = arg1;
4460 break;
4461 case GE_EXPR: /* + [c, -] */
4462 in_p = ! in_p, low = arg1, high = 0;
4463 break;
4464 case LT_EXPR: /* - [c, -] */
4465 low = arg1, high = 0;
4466 break;
4467 case LE_EXPR: /* + [-, c] */
4468 in_p = ! in_p, low = 0, high = arg1;
4469 break;
4470 default:
4471 gcc_unreachable ();
4474 /* If this is an unsigned comparison, we also know that EXP is
4475 greater than or equal to zero. We base the range tests we make
4476 on that fact, so we record it here so we can parse existing
4477 range tests. We test arg0_type since often the return type
4478 of, e.g. EQ_EXPR, is boolean. */
4479 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4481 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4482 in_p, low, high, 1,
4483 build_int_cst (arg0_type, 0),
4484 NULL_TREE))
4485 return NULL_TREE;
4487 in_p = n_in_p, low = n_low, high = n_high;
4489 /* If the high bound is missing, but we have a nonzero low
4490 bound, reverse the range so it goes from zero to the low bound
4491 minus 1. */
4492 if (high == 0 && low && ! integer_zerop (low))
4494 in_p = ! in_p;
4495 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4496 build_int_cst (TREE_TYPE (low), 1), 0);
4497 low = build_int_cst (arg0_type, 0);
4501 *p_low = low;
4502 *p_high = high;
4503 *p_in_p = in_p;
4504 return arg0;
4506 case NEGATE_EXPR:
4507 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4508 low and high are non-NULL, then normalize will DTRT. */
4509 if (!TYPE_UNSIGNED (arg0_type)
4510 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4512 if (low == NULL_TREE)
4513 low = TYPE_MIN_VALUE (arg0_type);
4514 if (high == NULL_TREE)
4515 high = TYPE_MAX_VALUE (arg0_type);
4518 /* (-x) IN [a,b] -> x in [-b, -a] */
4519 n_low = range_binop (MINUS_EXPR, exp_type,
4520 build_int_cst (exp_type, 0),
4521 0, high, 1);
4522 n_high = range_binop (MINUS_EXPR, exp_type,
4523 build_int_cst (exp_type, 0),
4524 0, low, 0);
4525 if (n_high != 0 && TREE_OVERFLOW (n_high))
4526 return NULL_TREE;
4527 goto normalize;
4529 case BIT_NOT_EXPR:
4530 /* ~ X -> -X - 1 */
4531 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4532 build_int_cst (exp_type, 1));
4534 case PLUS_EXPR:
4535 case MINUS_EXPR:
4536 if (TREE_CODE (arg1) != INTEGER_CST)
4537 return NULL_TREE;
4539 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4540 move a constant to the other side. */
4541 if (!TYPE_UNSIGNED (arg0_type)
4542 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4543 return NULL_TREE;
4545 /* If EXP is signed, any overflow in the computation is undefined,
4546 so we don't worry about it so long as our computations on
4547 the bounds don't overflow. For unsigned, overflow is defined
4548 and this is exactly the right thing. */
4549 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4550 arg0_type, low, 0, arg1, 0);
4551 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4552 arg0_type, high, 1, arg1, 0);
4553 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4554 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4555 return NULL_TREE;
4557 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4558 *strict_overflow_p = true;
4560 normalize:
4561 /* Check for an unsigned range which has wrapped around the maximum
4562 value thus making n_high < n_low, and normalize it. */
4563 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4565 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4566 build_int_cst (TREE_TYPE (n_high), 1), 0);
4567 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4568 build_int_cst (TREE_TYPE (n_low), 1), 0);
4570 /* If the range is of the form +/- [ x+1, x ], we won't
4571 be able to normalize it. But then, it represents the
4572 whole range or the empty set, so make it
4573 +/- [ -, - ]. */
4574 if (tree_int_cst_equal (n_low, low)
4575 && tree_int_cst_equal (n_high, high))
4576 low = high = 0;
4577 else
4578 in_p = ! in_p;
4580 else
4581 low = n_low, high = n_high;
4583 *p_low = low;
4584 *p_high = high;
4585 *p_in_p = in_p;
4586 return arg0;
4588 CASE_CONVERT:
4589 case NON_LVALUE_EXPR:
4590 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4591 return NULL_TREE;
4593 if (! INTEGRAL_TYPE_P (arg0_type)
4594 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4595 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4596 return NULL_TREE;
4598 n_low = low, n_high = high;
4600 if (n_low != 0)
4601 n_low = fold_convert_loc (loc, arg0_type, n_low);
4603 if (n_high != 0)
4604 n_high = fold_convert_loc (loc, arg0_type, n_high);
4606 /* If we're converting arg0 from an unsigned type, to exp,
4607 a signed type, we will be doing the comparison as unsigned.
4608 The tests above have already verified that LOW and HIGH
4609 are both positive.
4611 So we have to ensure that we will handle large unsigned
4612 values the same way that the current signed bounds treat
4613 negative values. */
4615 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4617 tree high_positive;
4618 tree equiv_type;
4619 /* For fixed-point modes, we need to pass the saturating flag
4620 as the 2nd parameter. */
4621 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4622 equiv_type
4623 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4624 TYPE_SATURATING (arg0_type));
4625 else
4626 equiv_type
4627 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4629 /* A range without an upper bound is, naturally, unbounded.
4630 Since convert would have cropped a very large value, use
4631 the max value for the destination type. */
4632 high_positive
4633 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4634 : TYPE_MAX_VALUE (arg0_type);
4636 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4637 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4638 fold_convert_loc (loc, arg0_type,
4639 high_positive),
4640 build_int_cst (arg0_type, 1));
4642 /* If the low bound is specified, "and" the range with the
4643 range for which the original unsigned value will be
4644 positive. */
4645 if (low != 0)
4647 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4648 1, fold_convert_loc (loc, arg0_type,
4649 integer_zero_node),
4650 high_positive))
4651 return NULL_TREE;
4653 in_p = (n_in_p == in_p);
4655 else
4657 /* Otherwise, "or" the range with the range of the input
4658 that will be interpreted as negative. */
4659 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4660 1, fold_convert_loc (loc, arg0_type,
4661 integer_zero_node),
4662 high_positive))
4663 return NULL_TREE;
4665 in_p = (in_p != n_in_p);
4669 *p_low = n_low;
4670 *p_high = n_high;
4671 *p_in_p = in_p;
4672 return arg0;
4674 default:
4675 return NULL_TREE;
4679 /* Given EXP, a logical expression, set the range it is testing into
4680 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4681 actually being tested. *PLOW and *PHIGH will be made of the same
4682 type as the returned expression. If EXP is not a comparison, we
4683 will most likely not be returning a useful value and range. Set
4684 *STRICT_OVERFLOW_P to true if the return value is only valid
4685 because signed overflow is undefined; otherwise, do not change
4686 *STRICT_OVERFLOW_P. */
4688 tree
4689 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4690 bool *strict_overflow_p)
4692 enum tree_code code;
4693 tree arg0, arg1 = NULL_TREE;
4694 tree exp_type, nexp;
4695 int in_p;
4696 tree low, high;
4697 location_t loc = EXPR_LOCATION (exp);
4699 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4700 and see if we can refine the range. Some of the cases below may not
4701 happen, but it doesn't seem worth worrying about this. We "continue"
4702 the outer loop when we've changed something; otherwise we "break"
4703 the switch, which will "break" the while. */
4705 in_p = 0;
4706 low = high = build_int_cst (TREE_TYPE (exp), 0);
4708 while (1)
4710 code = TREE_CODE (exp);
4711 exp_type = TREE_TYPE (exp);
4712 arg0 = NULL_TREE;
4714 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4716 if (TREE_OPERAND_LENGTH (exp) > 0)
4717 arg0 = TREE_OPERAND (exp, 0);
4718 if (TREE_CODE_CLASS (code) == tcc_binary
4719 || TREE_CODE_CLASS (code) == tcc_comparison
4720 || (TREE_CODE_CLASS (code) == tcc_expression
4721 && TREE_OPERAND_LENGTH (exp) > 1))
4722 arg1 = TREE_OPERAND (exp, 1);
4724 if (arg0 == NULL_TREE)
4725 break;
4727 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4728 &high, &in_p, strict_overflow_p);
4729 if (nexp == NULL_TREE)
4730 break;
4731 exp = nexp;
4734 /* If EXP is a constant, we can evaluate whether this is true or false. */
4735 if (TREE_CODE (exp) == INTEGER_CST)
4737 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4738 exp, 0, low, 0))
4739 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4740 exp, 1, high, 1)));
4741 low = high = 0;
4742 exp = 0;
4745 *pin_p = in_p, *plow = low, *phigh = high;
4746 return exp;
4749 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4750 type, TYPE, return an expression to test if EXP is in (or out of, depending
4751 on IN_P) the range. Return 0 if the test couldn't be created. */
4753 tree
4754 build_range_check (location_t loc, tree type, tree exp, int in_p,
4755 tree low, tree high)
4757 tree etype = TREE_TYPE (exp), value;
4759 /* Disable this optimization for function pointer expressions
4760 on targets that require function pointer canonicalization. */
4761 if (targetm.have_canonicalize_funcptr_for_compare ()
4762 && TREE_CODE (etype) == POINTER_TYPE
4763 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4764 return NULL_TREE;
4766 if (! in_p)
4768 value = build_range_check (loc, type, exp, 1, low, high);
4769 if (value != 0)
4770 return invert_truthvalue_loc (loc, value);
4772 return 0;
4775 if (low == 0 && high == 0)
4776 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4778 if (low == 0)
4779 return fold_build2_loc (loc, LE_EXPR, type, exp,
4780 fold_convert_loc (loc, etype, high));
4782 if (high == 0)
4783 return fold_build2_loc (loc, GE_EXPR, type, exp,
4784 fold_convert_loc (loc, etype, low));
4786 if (operand_equal_p (low, high, 0))
4787 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4788 fold_convert_loc (loc, etype, low));
4790 if (integer_zerop (low))
4792 if (! TYPE_UNSIGNED (etype))
4794 etype = unsigned_type_for (etype);
4795 high = fold_convert_loc (loc, etype, high);
4796 exp = fold_convert_loc (loc, etype, exp);
4798 return build_range_check (loc, type, exp, 1, 0, high);
4801 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4802 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4804 int prec = TYPE_PRECISION (etype);
4806 if (wi::mask (prec - 1, false, prec) == high)
4808 if (TYPE_UNSIGNED (etype))
4810 tree signed_etype = signed_type_for (etype);
4811 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4812 etype
4813 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4814 else
4815 etype = signed_etype;
4816 exp = fold_convert_loc (loc, etype, exp);
4818 return fold_build2_loc (loc, GT_EXPR, type, exp,
4819 build_int_cst (etype, 0));
4823 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4824 This requires wrap-around arithmetics for the type of the expression.
4825 First make sure that arithmetics in this type is valid, then make sure
4826 that it wraps around. */
4827 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4828 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4829 TYPE_UNSIGNED (etype));
4831 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4833 tree utype, minv, maxv;
4835 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4836 for the type in question, as we rely on this here. */
4837 utype = unsigned_type_for (etype);
4838 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4839 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4840 build_int_cst (TREE_TYPE (maxv), 1), 1);
4841 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4843 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4844 minv, 1, maxv, 1)))
4845 etype = utype;
4846 else
4847 return 0;
4850 high = fold_convert_loc (loc, etype, high);
4851 low = fold_convert_loc (loc, etype, low);
4852 exp = fold_convert_loc (loc, etype, exp);
4854 value = const_binop (MINUS_EXPR, high, low);
4857 if (POINTER_TYPE_P (etype))
4859 if (value != 0 && !TREE_OVERFLOW (value))
4861 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4862 return build_range_check (loc, type,
4863 fold_build_pointer_plus_loc (loc, exp, low),
4864 1, build_int_cst (etype, 0), value);
4866 return 0;
4869 if (value != 0 && !TREE_OVERFLOW (value))
4870 return build_range_check (loc, type,
4871 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4872 1, build_int_cst (etype, 0), value);
4874 return 0;
4877 /* Return the predecessor of VAL in its type, handling the infinite case. */
4879 static tree
4880 range_predecessor (tree val)
4882 tree type = TREE_TYPE (val);
4884 if (INTEGRAL_TYPE_P (type)
4885 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4886 return 0;
4887 else
4888 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4889 build_int_cst (TREE_TYPE (val), 1), 0);
4892 /* Return the successor of VAL in its type, handling the infinite case. */
4894 static tree
4895 range_successor (tree val)
4897 tree type = TREE_TYPE (val);
4899 if (INTEGRAL_TYPE_P (type)
4900 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4901 return 0;
4902 else
4903 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4904 build_int_cst (TREE_TYPE (val), 1), 0);
4907 /* Given two ranges, see if we can merge them into one. Return 1 if we
4908 can, 0 if we can't. Set the output range into the specified parameters. */
4910 bool
4911 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4912 tree high0, int in1_p, tree low1, tree high1)
4914 int no_overlap;
4915 int subset;
4916 int temp;
4917 tree tem;
4918 int in_p;
4919 tree low, high;
4920 int lowequal = ((low0 == 0 && low1 == 0)
4921 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4922 low0, 0, low1, 0)));
4923 int highequal = ((high0 == 0 && high1 == 0)
4924 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4925 high0, 1, high1, 1)));
4927 /* Make range 0 be the range that starts first, or ends last if they
4928 start at the same value. Swap them if it isn't. */
4929 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4930 low0, 0, low1, 0))
4931 || (lowequal
4932 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4933 high1, 1, high0, 1))))
4935 temp = in0_p, in0_p = in1_p, in1_p = temp;
4936 tem = low0, low0 = low1, low1 = tem;
4937 tem = high0, high0 = high1, high1 = tem;
4940 /* Now flag two cases, whether the ranges are disjoint or whether the
4941 second range is totally subsumed in the first. Note that the tests
4942 below are simplified by the ones above. */
4943 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4944 high0, 1, low1, 0));
4945 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4946 high1, 1, high0, 1));
4948 /* We now have four cases, depending on whether we are including or
4949 excluding the two ranges. */
4950 if (in0_p && in1_p)
4952 /* If they don't overlap, the result is false. If the second range
4953 is a subset it is the result. Otherwise, the range is from the start
4954 of the second to the end of the first. */
4955 if (no_overlap)
4956 in_p = 0, low = high = 0;
4957 else if (subset)
4958 in_p = 1, low = low1, high = high1;
4959 else
4960 in_p = 1, low = low1, high = high0;
4963 else if (in0_p && ! in1_p)
4965 /* If they don't overlap, the result is the first range. If they are
4966 equal, the result is false. If the second range is a subset of the
4967 first, and the ranges begin at the same place, we go from just after
4968 the end of the second range to the end of the first. If the second
4969 range is not a subset of the first, or if it is a subset and both
4970 ranges end at the same place, the range starts at the start of the
4971 first range and ends just before the second range.
4972 Otherwise, we can't describe this as a single range. */
4973 if (no_overlap)
4974 in_p = 1, low = low0, high = high0;
4975 else if (lowequal && highequal)
4976 in_p = 0, low = high = 0;
4977 else if (subset && lowequal)
4979 low = range_successor (high1);
4980 high = high0;
4981 in_p = 1;
4982 if (low == 0)
4984 /* We are in the weird situation where high0 > high1 but
4985 high1 has no successor. Punt. */
4986 return 0;
4989 else if (! subset || highequal)
4991 low = low0;
4992 high = range_predecessor (low1);
4993 in_p = 1;
4994 if (high == 0)
4996 /* low0 < low1 but low1 has no predecessor. Punt. */
4997 return 0;
5000 else
5001 return 0;
5004 else if (! in0_p && in1_p)
5006 /* If they don't overlap, the result is the second range. If the second
5007 is a subset of the first, the result is false. Otherwise,
5008 the range starts just after the first range and ends at the
5009 end of the second. */
5010 if (no_overlap)
5011 in_p = 1, low = low1, high = high1;
5012 else if (subset || highequal)
5013 in_p = 0, low = high = 0;
5014 else
5016 low = range_successor (high0);
5017 high = high1;
5018 in_p = 1;
5019 if (low == 0)
5021 /* high1 > high0 but high0 has no successor. Punt. */
5022 return 0;
5027 else
5029 /* The case where we are excluding both ranges. Here the complex case
5030 is if they don't overlap. In that case, the only time we have a
5031 range is if they are adjacent. If the second is a subset of the
5032 first, the result is the first. Otherwise, the range to exclude
5033 starts at the beginning of the first range and ends at the end of the
5034 second. */
5035 if (no_overlap)
5037 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5038 range_successor (high0),
5039 1, low1, 0)))
5040 in_p = 0, low = low0, high = high1;
5041 else
5043 /* Canonicalize - [min, x] into - [-, x]. */
5044 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5045 switch (TREE_CODE (TREE_TYPE (low0)))
5047 case ENUMERAL_TYPE:
5048 if (TYPE_PRECISION (TREE_TYPE (low0))
5049 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5050 break;
5051 /* FALLTHROUGH */
5052 case INTEGER_TYPE:
5053 if (tree_int_cst_equal (low0,
5054 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5055 low0 = 0;
5056 break;
5057 case POINTER_TYPE:
5058 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5059 && integer_zerop (low0))
5060 low0 = 0;
5061 break;
5062 default:
5063 break;
5066 /* Canonicalize - [x, max] into - [x, -]. */
5067 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5068 switch (TREE_CODE (TREE_TYPE (high1)))
5070 case ENUMERAL_TYPE:
5071 if (TYPE_PRECISION (TREE_TYPE (high1))
5072 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5073 break;
5074 /* FALLTHROUGH */
5075 case INTEGER_TYPE:
5076 if (tree_int_cst_equal (high1,
5077 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5078 high1 = 0;
5079 break;
5080 case POINTER_TYPE:
5081 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5082 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5083 high1, 1,
5084 build_int_cst (TREE_TYPE (high1), 1),
5085 1)))
5086 high1 = 0;
5087 break;
5088 default:
5089 break;
5092 /* The ranges might be also adjacent between the maximum and
5093 minimum values of the given type. For
5094 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5095 return + [x + 1, y - 1]. */
5096 if (low0 == 0 && high1 == 0)
5098 low = range_successor (high0);
5099 high = range_predecessor (low1);
5100 if (low == 0 || high == 0)
5101 return 0;
5103 in_p = 1;
5105 else
5106 return 0;
5109 else if (subset)
5110 in_p = 0, low = low0, high = high0;
5111 else
5112 in_p = 0, low = low0, high = high1;
5115 *pin_p = in_p, *plow = low, *phigh = high;
5116 return 1;
5120 /* Subroutine of fold, looking inside expressions of the form
5121 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5122 of the COND_EXPR. This function is being used also to optimize
5123 A op B ? C : A, by reversing the comparison first.
5125 Return a folded expression whose code is not a COND_EXPR
5126 anymore, or NULL_TREE if no folding opportunity is found. */
5128 static tree
5129 fold_cond_expr_with_comparison (location_t loc, tree type,
5130 tree arg0, tree arg1, tree arg2)
5132 enum tree_code comp_code = TREE_CODE (arg0);
5133 tree arg00 = TREE_OPERAND (arg0, 0);
5134 tree arg01 = TREE_OPERAND (arg0, 1);
5135 tree arg1_type = TREE_TYPE (arg1);
5136 tree tem;
5138 STRIP_NOPS (arg1);
5139 STRIP_NOPS (arg2);
5141 /* If we have A op 0 ? A : -A, consider applying the following
5142 transformations:
5144 A == 0? A : -A same as -A
5145 A != 0? A : -A same as A
5146 A >= 0? A : -A same as abs (A)
5147 A > 0? A : -A same as abs (A)
5148 A <= 0? A : -A same as -abs (A)
5149 A < 0? A : -A same as -abs (A)
5151 None of these transformations work for modes with signed
5152 zeros. If A is +/-0, the first two transformations will
5153 change the sign of the result (from +0 to -0, or vice
5154 versa). The last four will fix the sign of the result,
5155 even though the original expressions could be positive or
5156 negative, depending on the sign of A.
5158 Note that all these transformations are correct if A is
5159 NaN, since the two alternatives (A and -A) are also NaNs. */
5160 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5161 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5162 ? real_zerop (arg01)
5163 : integer_zerop (arg01))
5164 && ((TREE_CODE (arg2) == NEGATE_EXPR
5165 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5166 /* In the case that A is of the form X-Y, '-A' (arg2) may
5167 have already been folded to Y-X, check for that. */
5168 || (TREE_CODE (arg1) == MINUS_EXPR
5169 && TREE_CODE (arg2) == MINUS_EXPR
5170 && operand_equal_p (TREE_OPERAND (arg1, 0),
5171 TREE_OPERAND (arg2, 1), 0)
5172 && operand_equal_p (TREE_OPERAND (arg1, 1),
5173 TREE_OPERAND (arg2, 0), 0))))
5174 switch (comp_code)
5176 case EQ_EXPR:
5177 case UNEQ_EXPR:
5178 tem = fold_convert_loc (loc, arg1_type, arg1);
5179 return fold_convert_loc (loc, type, negate_expr (tem));
5180 case NE_EXPR:
5181 case LTGT_EXPR:
5182 return fold_convert_loc (loc, type, arg1);
5183 case UNGE_EXPR:
5184 case UNGT_EXPR:
5185 if (flag_trapping_math)
5186 break;
5187 /* Fall through. */
5188 case GE_EXPR:
5189 case GT_EXPR:
5190 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5191 break;
5192 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5193 return fold_convert_loc (loc, type, tem);
5194 case UNLE_EXPR:
5195 case UNLT_EXPR:
5196 if (flag_trapping_math)
5197 break;
5198 /* FALLTHRU */
5199 case LE_EXPR:
5200 case LT_EXPR:
5201 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5202 break;
5203 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5204 return negate_expr (fold_convert_loc (loc, type, tem));
5205 default:
5206 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5207 break;
5210 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5211 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5212 both transformations are correct when A is NaN: A != 0
5213 is then true, and A == 0 is false. */
5215 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5216 && integer_zerop (arg01) && integer_zerop (arg2))
5218 if (comp_code == NE_EXPR)
5219 return fold_convert_loc (loc, type, arg1);
5220 else if (comp_code == EQ_EXPR)
5221 return build_zero_cst (type);
5224 /* Try some transformations of A op B ? A : B.
5226 A == B? A : B same as B
5227 A != B? A : B same as A
5228 A >= B? A : B same as max (A, B)
5229 A > B? A : B same as max (B, A)
5230 A <= B? A : B same as min (A, B)
5231 A < B? A : B same as min (B, A)
5233 As above, these transformations don't work in the presence
5234 of signed zeros. For example, if A and B are zeros of
5235 opposite sign, the first two transformations will change
5236 the sign of the result. In the last four, the original
5237 expressions give different results for (A=+0, B=-0) and
5238 (A=-0, B=+0), but the transformed expressions do not.
5240 The first two transformations are correct if either A or B
5241 is a NaN. In the first transformation, the condition will
5242 be false, and B will indeed be chosen. In the case of the
5243 second transformation, the condition A != B will be true,
5244 and A will be chosen.
5246 The conversions to max() and min() are not correct if B is
5247 a number and A is not. The conditions in the original
5248 expressions will be false, so all four give B. The min()
5249 and max() versions would give a NaN instead. */
5250 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5251 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5252 /* Avoid these transformations if the COND_EXPR may be used
5253 as an lvalue in the C++ front-end. PR c++/19199. */
5254 && (in_gimple_form
5255 || VECTOR_TYPE_P (type)
5256 || (! lang_GNU_CXX ()
5257 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5258 || ! maybe_lvalue_p (arg1)
5259 || ! maybe_lvalue_p (arg2)))
5261 tree comp_op0 = arg00;
5262 tree comp_op1 = arg01;
5263 tree comp_type = TREE_TYPE (comp_op0);
5265 switch (comp_code)
5267 case EQ_EXPR:
5268 return fold_convert_loc (loc, type, arg2);
5269 case NE_EXPR:
5270 return fold_convert_loc (loc, type, arg1);
5271 case LE_EXPR:
5272 case LT_EXPR:
5273 case UNLE_EXPR:
5274 case UNLT_EXPR:
5275 /* In C++ a ?: expression can be an lvalue, so put the
5276 operand which will be used if they are equal first
5277 so that we can convert this back to the
5278 corresponding COND_EXPR. */
5279 if (!HONOR_NANS (arg1))
5281 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5282 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5283 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5284 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5285 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5286 comp_op1, comp_op0);
5287 return fold_convert_loc (loc, type, tem);
5289 break;
5290 case GE_EXPR:
5291 case GT_EXPR:
5292 case UNGE_EXPR:
5293 case UNGT_EXPR:
5294 if (!HONOR_NANS (arg1))
5296 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5297 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5298 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5299 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5300 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5301 comp_op1, comp_op0);
5302 return fold_convert_loc (loc, type, tem);
5304 break;
5305 case UNEQ_EXPR:
5306 if (!HONOR_NANS (arg1))
5307 return fold_convert_loc (loc, type, arg2);
5308 break;
5309 case LTGT_EXPR:
5310 if (!HONOR_NANS (arg1))
5311 return fold_convert_loc (loc, type, arg1);
5312 break;
5313 default:
5314 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5315 break;
5319 return NULL_TREE;
5324 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5325 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5326 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5327 false) >= 2)
5328 #endif
5330 /* EXP is some logical combination of boolean tests. See if we can
5331 merge it into some range test. Return the new tree if so. */
5333 static tree
5334 fold_range_test (location_t loc, enum tree_code code, tree type,
5335 tree op0, tree op1)
5337 int or_op = (code == TRUTH_ORIF_EXPR
5338 || code == TRUTH_OR_EXPR);
5339 int in0_p, in1_p, in_p;
5340 tree low0, low1, low, high0, high1, high;
5341 bool strict_overflow_p = false;
5342 tree tem, lhs, rhs;
5343 const char * const warnmsg = G_("assuming signed overflow does not occur "
5344 "when simplifying range test");
5346 if (!INTEGRAL_TYPE_P (type))
5347 return 0;
5349 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5350 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5352 /* If this is an OR operation, invert both sides; we will invert
5353 again at the end. */
5354 if (or_op)
5355 in0_p = ! in0_p, in1_p = ! in1_p;
5357 /* If both expressions are the same, if we can merge the ranges, and we
5358 can build the range test, return it or it inverted. If one of the
5359 ranges is always true or always false, consider it to be the same
5360 expression as the other. */
5361 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5362 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5363 in1_p, low1, high1)
5364 && 0 != (tem = (build_range_check (loc, type,
5365 lhs != 0 ? lhs
5366 : rhs != 0 ? rhs : integer_zero_node,
5367 in_p, low, high))))
5369 if (strict_overflow_p)
5370 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5371 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5374 /* On machines where the branch cost is expensive, if this is a
5375 short-circuited branch and the underlying object on both sides
5376 is the same, make a non-short-circuit operation. */
5377 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5378 && lhs != 0 && rhs != 0
5379 && (code == TRUTH_ANDIF_EXPR
5380 || code == TRUTH_ORIF_EXPR)
5381 && operand_equal_p (lhs, rhs, 0))
5383 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5384 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5385 which cases we can't do this. */
5386 if (simple_operand_p (lhs))
5387 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5388 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5389 type, op0, op1);
5391 else if (!lang_hooks.decls.global_bindings_p ()
5392 && !CONTAINS_PLACEHOLDER_P (lhs))
5394 tree common = save_expr (lhs);
5396 if (0 != (lhs = build_range_check (loc, type, common,
5397 or_op ? ! in0_p : in0_p,
5398 low0, high0))
5399 && (0 != (rhs = build_range_check (loc, type, common,
5400 or_op ? ! in1_p : in1_p,
5401 low1, high1))))
5403 if (strict_overflow_p)
5404 fold_overflow_warning (warnmsg,
5405 WARN_STRICT_OVERFLOW_COMPARISON);
5406 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5407 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5408 type, lhs, rhs);
5413 return 0;
5416 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5417 bit value. Arrange things so the extra bits will be set to zero if and
5418 only if C is signed-extended to its full width. If MASK is nonzero,
5419 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5421 static tree
5422 unextend (tree c, int p, int unsignedp, tree mask)
5424 tree type = TREE_TYPE (c);
5425 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5426 tree temp;
5428 if (p == modesize || unsignedp)
5429 return c;
5431 /* We work by getting just the sign bit into the low-order bit, then
5432 into the high-order bit, then sign-extend. We then XOR that value
5433 with C. */
5434 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5436 /* We must use a signed type in order to get an arithmetic right shift.
5437 However, we must also avoid introducing accidental overflows, so that
5438 a subsequent call to integer_zerop will work. Hence we must
5439 do the type conversion here. At this point, the constant is either
5440 zero or one, and the conversion to a signed type can never overflow.
5441 We could get an overflow if this conversion is done anywhere else. */
5442 if (TYPE_UNSIGNED (type))
5443 temp = fold_convert (signed_type_for (type), temp);
5445 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5446 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5447 if (mask != 0)
5448 temp = const_binop (BIT_AND_EXPR, temp,
5449 fold_convert (TREE_TYPE (c), mask));
5450 /* If necessary, convert the type back to match the type of C. */
5451 if (TYPE_UNSIGNED (type))
5452 temp = fold_convert (type, temp);
5454 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5457 /* For an expression that has the form
5458 (A && B) || ~B
5460 (A || B) && ~B,
5461 we can drop one of the inner expressions and simplify to
5462 A || ~B
5464 A && ~B
5465 LOC is the location of the resulting expression. OP is the inner
5466 logical operation; the left-hand side in the examples above, while CMPOP
5467 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5468 removing a condition that guards another, as in
5469 (A != NULL && A->...) || A == NULL
5470 which we must not transform. If RHS_ONLY is true, only eliminate the
5471 right-most operand of the inner logical operation. */
5473 static tree
5474 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5475 bool rhs_only)
5477 tree type = TREE_TYPE (cmpop);
5478 enum tree_code code = TREE_CODE (cmpop);
5479 enum tree_code truthop_code = TREE_CODE (op);
5480 tree lhs = TREE_OPERAND (op, 0);
5481 tree rhs = TREE_OPERAND (op, 1);
5482 tree orig_lhs = lhs, orig_rhs = rhs;
5483 enum tree_code rhs_code = TREE_CODE (rhs);
5484 enum tree_code lhs_code = TREE_CODE (lhs);
5485 enum tree_code inv_code;
5487 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5488 return NULL_TREE;
5490 if (TREE_CODE_CLASS (code) != tcc_comparison)
5491 return NULL_TREE;
5493 if (rhs_code == truthop_code)
5495 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5496 if (newrhs != NULL_TREE)
5498 rhs = newrhs;
5499 rhs_code = TREE_CODE (rhs);
5502 if (lhs_code == truthop_code && !rhs_only)
5504 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5505 if (newlhs != NULL_TREE)
5507 lhs = newlhs;
5508 lhs_code = TREE_CODE (lhs);
5512 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5513 if (inv_code == rhs_code
5514 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5515 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5516 return lhs;
5517 if (!rhs_only && inv_code == lhs_code
5518 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5519 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5520 return rhs;
5521 if (rhs != orig_rhs || lhs != orig_lhs)
5522 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5523 lhs, rhs);
5524 return NULL_TREE;
5527 /* Find ways of folding logical expressions of LHS and RHS:
5528 Try to merge two comparisons to the same innermost item.
5529 Look for range tests like "ch >= '0' && ch <= '9'".
5530 Look for combinations of simple terms on machines with expensive branches
5531 and evaluate the RHS unconditionally.
5533 For example, if we have p->a == 2 && p->b == 4 and we can make an
5534 object large enough to span both A and B, we can do this with a comparison
5535 against the object ANDed with the a mask.
5537 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5538 operations to do this with one comparison.
5540 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5541 function and the one above.
5543 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5544 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5546 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5547 two operands.
5549 We return the simplified tree or 0 if no optimization is possible. */
5551 static tree
5552 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5553 tree lhs, tree rhs)
5555 /* If this is the "or" of two comparisons, we can do something if
5556 the comparisons are NE_EXPR. If this is the "and", we can do something
5557 if the comparisons are EQ_EXPR. I.e.,
5558 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5560 WANTED_CODE is this operation code. For single bit fields, we can
5561 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5562 comparison for one-bit fields. */
5564 enum tree_code wanted_code;
5565 enum tree_code lcode, rcode;
5566 tree ll_arg, lr_arg, rl_arg, rr_arg;
5567 tree ll_inner, lr_inner, rl_inner, rr_inner;
5568 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5569 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5570 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5571 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5572 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5573 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5574 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5575 machine_mode lnmode, rnmode;
5576 tree ll_mask, lr_mask, rl_mask, rr_mask;
5577 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5578 tree l_const, r_const;
5579 tree lntype, rntype, result;
5580 HOST_WIDE_INT first_bit, end_bit;
5581 int volatilep;
5583 /* Start by getting the comparison codes. Fail if anything is volatile.
5584 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5585 it were surrounded with a NE_EXPR. */
5587 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5588 return 0;
5590 lcode = TREE_CODE (lhs);
5591 rcode = TREE_CODE (rhs);
5593 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5595 lhs = build2 (NE_EXPR, truth_type, lhs,
5596 build_int_cst (TREE_TYPE (lhs), 0));
5597 lcode = NE_EXPR;
5600 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5602 rhs = build2 (NE_EXPR, truth_type, rhs,
5603 build_int_cst (TREE_TYPE (rhs), 0));
5604 rcode = NE_EXPR;
5607 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5608 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5609 return 0;
5611 ll_arg = TREE_OPERAND (lhs, 0);
5612 lr_arg = TREE_OPERAND (lhs, 1);
5613 rl_arg = TREE_OPERAND (rhs, 0);
5614 rr_arg = TREE_OPERAND (rhs, 1);
5616 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5617 if (simple_operand_p (ll_arg)
5618 && simple_operand_p (lr_arg))
5620 if (operand_equal_p (ll_arg, rl_arg, 0)
5621 && operand_equal_p (lr_arg, rr_arg, 0))
5623 result = combine_comparisons (loc, code, lcode, rcode,
5624 truth_type, ll_arg, lr_arg);
5625 if (result)
5626 return result;
5628 else if (operand_equal_p (ll_arg, rr_arg, 0)
5629 && operand_equal_p (lr_arg, rl_arg, 0))
5631 result = combine_comparisons (loc, code, lcode,
5632 swap_tree_comparison (rcode),
5633 truth_type, ll_arg, lr_arg);
5634 if (result)
5635 return result;
5639 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5640 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5642 /* If the RHS can be evaluated unconditionally and its operands are
5643 simple, it wins to evaluate the RHS unconditionally on machines
5644 with expensive branches. In this case, this isn't a comparison
5645 that can be merged. */
5647 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5648 false) >= 2
5649 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5650 && simple_operand_p (rl_arg)
5651 && simple_operand_p (rr_arg))
5653 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5654 if (code == TRUTH_OR_EXPR
5655 && lcode == NE_EXPR && integer_zerop (lr_arg)
5656 && rcode == NE_EXPR && integer_zerop (rr_arg)
5657 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5658 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5659 return build2_loc (loc, NE_EXPR, truth_type,
5660 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5661 ll_arg, rl_arg),
5662 build_int_cst (TREE_TYPE (ll_arg), 0));
5664 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5665 if (code == TRUTH_AND_EXPR
5666 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5667 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5668 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5669 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5670 return build2_loc (loc, EQ_EXPR, truth_type,
5671 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5672 ll_arg, rl_arg),
5673 build_int_cst (TREE_TYPE (ll_arg), 0));
5676 /* See if the comparisons can be merged. Then get all the parameters for
5677 each side. */
5679 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5680 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5681 return 0;
5683 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5684 volatilep = 0;
5685 ll_inner = decode_field_reference (loc, &ll_arg,
5686 &ll_bitsize, &ll_bitpos, &ll_mode,
5687 &ll_unsignedp, &ll_reversep, &volatilep,
5688 &ll_mask, &ll_and_mask);
5689 lr_inner = decode_field_reference (loc, &lr_arg,
5690 &lr_bitsize, &lr_bitpos, &lr_mode,
5691 &lr_unsignedp, &lr_reversep, &volatilep,
5692 &lr_mask, &lr_and_mask);
5693 rl_inner = decode_field_reference (loc, &rl_arg,
5694 &rl_bitsize, &rl_bitpos, &rl_mode,
5695 &rl_unsignedp, &rl_reversep, &volatilep,
5696 &rl_mask, &rl_and_mask);
5697 rr_inner = decode_field_reference (loc, &rr_arg,
5698 &rr_bitsize, &rr_bitpos, &rr_mode,
5699 &rr_unsignedp, &rr_reversep, &volatilep,
5700 &rr_mask, &rr_and_mask);
5702 /* It must be true that the inner operation on the lhs of each
5703 comparison must be the same if we are to be able to do anything.
5704 Then see if we have constants. If not, the same must be true for
5705 the rhs's. */
5706 if (volatilep
5707 || ll_reversep != rl_reversep
5708 || ll_inner == 0 || rl_inner == 0
5709 || ! operand_equal_p (ll_inner, rl_inner, 0))
5710 return 0;
5712 if (TREE_CODE (lr_arg) == INTEGER_CST
5713 && TREE_CODE (rr_arg) == INTEGER_CST)
5715 l_const = lr_arg, r_const = rr_arg;
5716 lr_reversep = ll_reversep;
5718 else if (lr_reversep != rr_reversep
5719 || lr_inner == 0 || rr_inner == 0
5720 || ! operand_equal_p (lr_inner, rr_inner, 0))
5721 return 0;
5722 else
5723 l_const = r_const = 0;
5725 /* If either comparison code is not correct for our logical operation,
5726 fail. However, we can convert a one-bit comparison against zero into
5727 the opposite comparison against that bit being set in the field. */
5729 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5730 if (lcode != wanted_code)
5732 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5734 /* Make the left operand unsigned, since we are only interested
5735 in the value of one bit. Otherwise we are doing the wrong
5736 thing below. */
5737 ll_unsignedp = 1;
5738 l_const = ll_mask;
5740 else
5741 return 0;
5744 /* This is analogous to the code for l_const above. */
5745 if (rcode != wanted_code)
5747 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5749 rl_unsignedp = 1;
5750 r_const = rl_mask;
5752 else
5753 return 0;
5756 /* See if we can find a mode that contains both fields being compared on
5757 the left. If we can't, fail. Otherwise, update all constants and masks
5758 to be relative to a field of that size. */
5759 first_bit = MIN (ll_bitpos, rl_bitpos);
5760 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5761 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5762 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5763 volatilep);
5764 if (lnmode == VOIDmode)
5765 return 0;
5767 lnbitsize = GET_MODE_BITSIZE (lnmode);
5768 lnbitpos = first_bit & ~ (lnbitsize - 1);
5769 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5770 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5772 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5774 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5775 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5778 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5779 size_int (xll_bitpos));
5780 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5781 size_int (xrl_bitpos));
5783 if (l_const)
5785 l_const = fold_convert_loc (loc, lntype, l_const);
5786 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5787 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5788 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5789 fold_build1_loc (loc, BIT_NOT_EXPR,
5790 lntype, ll_mask))))
5792 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5794 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5797 if (r_const)
5799 r_const = fold_convert_loc (loc, lntype, r_const);
5800 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5801 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5802 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5803 fold_build1_loc (loc, BIT_NOT_EXPR,
5804 lntype, rl_mask))))
5806 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5808 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5812 /* If the right sides are not constant, do the same for it. Also,
5813 disallow this optimization if a size or signedness mismatch occurs
5814 between the left and right sides. */
5815 if (l_const == 0)
5817 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5818 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5819 /* Make sure the two fields on the right
5820 correspond to the left without being swapped. */
5821 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5822 return 0;
5824 first_bit = MIN (lr_bitpos, rr_bitpos);
5825 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5826 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5827 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5828 volatilep);
5829 if (rnmode == VOIDmode)
5830 return 0;
5832 rnbitsize = GET_MODE_BITSIZE (rnmode);
5833 rnbitpos = first_bit & ~ (rnbitsize - 1);
5834 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5835 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5837 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5839 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5840 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5843 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5844 rntype, lr_mask),
5845 size_int (xlr_bitpos));
5846 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5847 rntype, rr_mask),
5848 size_int (xrr_bitpos));
5850 /* Make a mask that corresponds to both fields being compared.
5851 Do this for both items being compared. If the operands are the
5852 same size and the bits being compared are in the same position
5853 then we can do this by masking both and comparing the masked
5854 results. */
5855 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5856 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5857 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5859 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5860 lntype, lnbitsize, lnbitpos,
5861 ll_unsignedp || rl_unsignedp, ll_reversep);
5862 if (! all_ones_mask_p (ll_mask, lnbitsize))
5863 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5865 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5866 rntype, rnbitsize, rnbitpos,
5867 lr_unsignedp || rr_unsignedp, lr_reversep);
5868 if (! all_ones_mask_p (lr_mask, rnbitsize))
5869 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5871 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5874 /* There is still another way we can do something: If both pairs of
5875 fields being compared are adjacent, we may be able to make a wider
5876 field containing them both.
5878 Note that we still must mask the lhs/rhs expressions. Furthermore,
5879 the mask must be shifted to account for the shift done by
5880 make_bit_field_ref. */
5881 if ((ll_bitsize + ll_bitpos == rl_bitpos
5882 && lr_bitsize + lr_bitpos == rr_bitpos)
5883 || (ll_bitpos == rl_bitpos + rl_bitsize
5884 && lr_bitpos == rr_bitpos + rr_bitsize))
5886 tree type;
5888 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5889 ll_bitsize + rl_bitsize,
5890 MIN (ll_bitpos, rl_bitpos),
5891 ll_unsignedp, ll_reversep);
5892 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5893 lr_bitsize + rr_bitsize,
5894 MIN (lr_bitpos, rr_bitpos),
5895 lr_unsignedp, lr_reversep);
5897 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5898 size_int (MIN (xll_bitpos, xrl_bitpos)));
5899 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5900 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5902 /* Convert to the smaller type before masking out unwanted bits. */
5903 type = lntype;
5904 if (lntype != rntype)
5906 if (lnbitsize > rnbitsize)
5908 lhs = fold_convert_loc (loc, rntype, lhs);
5909 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5910 type = rntype;
5912 else if (lnbitsize < rnbitsize)
5914 rhs = fold_convert_loc (loc, lntype, rhs);
5915 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5916 type = lntype;
5920 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5921 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5923 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5924 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5926 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5929 return 0;
5932 /* Handle the case of comparisons with constants. If there is something in
5933 common between the masks, those bits of the constants must be the same.
5934 If not, the condition is always false. Test for this to avoid generating
5935 incorrect code below. */
5936 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5937 if (! integer_zerop (result)
5938 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5939 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5941 if (wanted_code == NE_EXPR)
5943 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5944 return constant_boolean_node (true, truth_type);
5946 else
5948 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5949 return constant_boolean_node (false, truth_type);
5953 /* Construct the expression we will return. First get the component
5954 reference we will make. Unless the mask is all ones the width of
5955 that field, perform the mask operation. Then compare with the
5956 merged constant. */
5957 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5958 lntype, lnbitsize, lnbitpos,
5959 ll_unsignedp || rl_unsignedp, ll_reversep);
5961 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5962 if (! all_ones_mask_p (ll_mask, lnbitsize))
5963 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5965 return build2_loc (loc, wanted_code, truth_type, result,
5966 const_binop (BIT_IOR_EXPR, l_const, r_const));
5969 /* T is an integer expression that is being multiplied, divided, or taken a
5970 modulus (CODE says which and what kind of divide or modulus) by a
5971 constant C. See if we can eliminate that operation by folding it with
5972 other operations already in T. WIDE_TYPE, if non-null, is a type that
5973 should be used for the computation if wider than our type.
5975 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5976 (X * 2) + (Y * 4). We must, however, be assured that either the original
5977 expression would not overflow or that overflow is undefined for the type
5978 in the language in question.
5980 If we return a non-null expression, it is an equivalent form of the
5981 original computation, but need not be in the original type.
5983 We set *STRICT_OVERFLOW_P to true if the return values depends on
5984 signed overflow being undefined. Otherwise we do not change
5985 *STRICT_OVERFLOW_P. */
5987 static tree
5988 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5989 bool *strict_overflow_p)
5991 /* To avoid exponential search depth, refuse to allow recursion past
5992 three levels. Beyond that (1) it's highly unlikely that we'll find
5993 something interesting and (2) we've probably processed it before
5994 when we built the inner expression. */
5996 static int depth;
5997 tree ret;
5999 if (depth > 3)
6000 return NULL;
6002 depth++;
6003 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6004 depth--;
6006 return ret;
6009 static tree
6010 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6011 bool *strict_overflow_p)
6013 tree type = TREE_TYPE (t);
6014 enum tree_code tcode = TREE_CODE (t);
6015 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6016 > GET_MODE_SIZE (TYPE_MODE (type)))
6017 ? wide_type : type);
6018 tree t1, t2;
6019 int same_p = tcode == code;
6020 tree op0 = NULL_TREE, op1 = NULL_TREE;
6021 bool sub_strict_overflow_p;
6023 /* Don't deal with constants of zero here; they confuse the code below. */
6024 if (integer_zerop (c))
6025 return NULL_TREE;
6027 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6028 op0 = TREE_OPERAND (t, 0);
6030 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6031 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6033 /* Note that we need not handle conditional operations here since fold
6034 already handles those cases. So just do arithmetic here. */
6035 switch (tcode)
6037 case INTEGER_CST:
6038 /* For a constant, we can always simplify if we are a multiply
6039 or (for divide and modulus) if it is a multiple of our constant. */
6040 if (code == MULT_EXPR
6041 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6043 tree tem = const_binop (code, fold_convert (ctype, t),
6044 fold_convert (ctype, c));
6045 /* If the multiplication overflowed, we lost information on it.
6046 See PR68142 and PR69845. */
6047 if (TREE_OVERFLOW (tem))
6048 return NULL_TREE;
6049 return tem;
6051 break;
6053 CASE_CONVERT: case NON_LVALUE_EXPR:
6054 /* If op0 is an expression ... */
6055 if ((COMPARISON_CLASS_P (op0)
6056 || UNARY_CLASS_P (op0)
6057 || BINARY_CLASS_P (op0)
6058 || VL_EXP_CLASS_P (op0)
6059 || EXPRESSION_CLASS_P (op0))
6060 /* ... and has wrapping overflow, and its type is smaller
6061 than ctype, then we cannot pass through as widening. */
6062 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6063 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6064 && (TYPE_PRECISION (ctype)
6065 > TYPE_PRECISION (TREE_TYPE (op0))))
6066 /* ... or this is a truncation (t is narrower than op0),
6067 then we cannot pass through this narrowing. */
6068 || (TYPE_PRECISION (type)
6069 < TYPE_PRECISION (TREE_TYPE (op0)))
6070 /* ... or signedness changes for division or modulus,
6071 then we cannot pass through this conversion. */
6072 || (code != MULT_EXPR
6073 && (TYPE_UNSIGNED (ctype)
6074 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6075 /* ... or has undefined overflow while the converted to
6076 type has not, we cannot do the operation in the inner type
6077 as that would introduce undefined overflow. */
6078 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6079 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6080 && !TYPE_OVERFLOW_UNDEFINED (type))))
6081 break;
6083 /* Pass the constant down and see if we can make a simplification. If
6084 we can, replace this expression with the inner simplification for
6085 possible later conversion to our or some other type. */
6086 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6087 && TREE_CODE (t2) == INTEGER_CST
6088 && !TREE_OVERFLOW (t2)
6089 && (0 != (t1 = extract_muldiv (op0, t2, code,
6090 code == MULT_EXPR
6091 ? ctype : NULL_TREE,
6092 strict_overflow_p))))
6093 return t1;
6094 break;
6096 case ABS_EXPR:
6097 /* If widening the type changes it from signed to unsigned, then we
6098 must avoid building ABS_EXPR itself as unsigned. */
6099 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6101 tree cstype = (*signed_type_for) (ctype);
6102 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6103 != 0)
6105 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6106 return fold_convert (ctype, t1);
6108 break;
6110 /* If the constant is negative, we cannot simplify this. */
6111 if (tree_int_cst_sgn (c) == -1)
6112 break;
6113 /* FALLTHROUGH */
6114 case NEGATE_EXPR:
6115 /* For division and modulus, type can't be unsigned, as e.g.
6116 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6117 For signed types, even with wrapping overflow, this is fine. */
6118 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6119 break;
6120 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6121 != 0)
6122 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6123 break;
6125 case MIN_EXPR: case MAX_EXPR:
6126 /* If widening the type changes the signedness, then we can't perform
6127 this optimization as that changes the result. */
6128 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6129 break;
6131 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6132 sub_strict_overflow_p = false;
6133 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6134 &sub_strict_overflow_p)) != 0
6135 && (t2 = extract_muldiv (op1, c, code, wide_type,
6136 &sub_strict_overflow_p)) != 0)
6138 if (tree_int_cst_sgn (c) < 0)
6139 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6140 if (sub_strict_overflow_p)
6141 *strict_overflow_p = true;
6142 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6143 fold_convert (ctype, t2));
6145 break;
6147 case LSHIFT_EXPR: case RSHIFT_EXPR:
6148 /* If the second operand is constant, this is a multiplication
6149 or floor division, by a power of two, so we can treat it that
6150 way unless the multiplier or divisor overflows. Signed
6151 left-shift overflow is implementation-defined rather than
6152 undefined in C90, so do not convert signed left shift into
6153 multiplication. */
6154 if (TREE_CODE (op1) == INTEGER_CST
6155 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6156 /* const_binop may not detect overflow correctly,
6157 so check for it explicitly here. */
6158 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6159 && 0 != (t1 = fold_convert (ctype,
6160 const_binop (LSHIFT_EXPR,
6161 size_one_node,
6162 op1)))
6163 && !TREE_OVERFLOW (t1))
6164 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6165 ? MULT_EXPR : FLOOR_DIV_EXPR,
6166 ctype,
6167 fold_convert (ctype, op0),
6168 t1),
6169 c, code, wide_type, strict_overflow_p);
6170 break;
6172 case PLUS_EXPR: case MINUS_EXPR:
6173 /* See if we can eliminate the operation on both sides. If we can, we
6174 can return a new PLUS or MINUS. If we can't, the only remaining
6175 cases where we can do anything are if the second operand is a
6176 constant. */
6177 sub_strict_overflow_p = false;
6178 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6179 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6180 if (t1 != 0 && t2 != 0
6181 && (code == MULT_EXPR
6182 /* If not multiplication, we can only do this if both operands
6183 are divisible by c. */
6184 || (multiple_of_p (ctype, op0, c)
6185 && multiple_of_p (ctype, op1, c))))
6187 if (sub_strict_overflow_p)
6188 *strict_overflow_p = true;
6189 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6190 fold_convert (ctype, t2));
6193 /* If this was a subtraction, negate OP1 and set it to be an addition.
6194 This simplifies the logic below. */
6195 if (tcode == MINUS_EXPR)
6197 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6198 /* If OP1 was not easily negatable, the constant may be OP0. */
6199 if (TREE_CODE (op0) == INTEGER_CST)
6201 std::swap (op0, op1);
6202 std::swap (t1, t2);
6206 if (TREE_CODE (op1) != INTEGER_CST)
6207 break;
6209 /* If either OP1 or C are negative, this optimization is not safe for
6210 some of the division and remainder types while for others we need
6211 to change the code. */
6212 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6214 if (code == CEIL_DIV_EXPR)
6215 code = FLOOR_DIV_EXPR;
6216 else if (code == FLOOR_DIV_EXPR)
6217 code = CEIL_DIV_EXPR;
6218 else if (code != MULT_EXPR
6219 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6220 break;
6223 /* If it's a multiply or a division/modulus operation of a multiple
6224 of our constant, do the operation and verify it doesn't overflow. */
6225 if (code == MULT_EXPR
6226 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6228 op1 = const_binop (code, fold_convert (ctype, op1),
6229 fold_convert (ctype, c));
6230 /* We allow the constant to overflow with wrapping semantics. */
6231 if (op1 == 0
6232 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6233 break;
6235 else
6236 break;
6238 /* If we have an unsigned type, we cannot widen the operation since it
6239 will change the result if the original computation overflowed. */
6240 if (TYPE_UNSIGNED (ctype) && ctype != type)
6241 break;
6243 /* If we were able to eliminate our operation from the first side,
6244 apply our operation to the second side and reform the PLUS. */
6245 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6246 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6248 /* The last case is if we are a multiply. In that case, we can
6249 apply the distributive law to commute the multiply and addition
6250 if the multiplication of the constants doesn't overflow
6251 and overflow is defined. With undefined overflow
6252 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6253 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6254 return fold_build2 (tcode, ctype,
6255 fold_build2 (code, ctype,
6256 fold_convert (ctype, op0),
6257 fold_convert (ctype, c)),
6258 op1);
6260 break;
6262 case MULT_EXPR:
6263 /* We have a special case here if we are doing something like
6264 (C * 8) % 4 since we know that's zero. */
6265 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6266 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6267 /* If the multiplication can overflow we cannot optimize this. */
6268 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6269 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6270 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6272 *strict_overflow_p = true;
6273 return omit_one_operand (type, integer_zero_node, op0);
6276 /* ... fall through ... */
6278 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6279 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6280 /* If we can extract our operation from the LHS, do so and return a
6281 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6282 do something only if the second operand is a constant. */
6283 if (same_p
6284 && TYPE_OVERFLOW_WRAPS (ctype)
6285 && (t1 = extract_muldiv (op0, c, code, wide_type,
6286 strict_overflow_p)) != 0)
6287 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6288 fold_convert (ctype, op1));
6289 else if (tcode == MULT_EXPR && code == MULT_EXPR
6290 && TYPE_OVERFLOW_WRAPS (ctype)
6291 && (t1 = extract_muldiv (op1, c, code, wide_type,
6292 strict_overflow_p)) != 0)
6293 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6294 fold_convert (ctype, t1));
6295 else if (TREE_CODE (op1) != INTEGER_CST)
6296 return 0;
6298 /* If these are the same operation types, we can associate them
6299 assuming no overflow. */
6300 if (tcode == code)
6302 bool overflow_p = false;
6303 bool overflow_mul_p;
6304 signop sign = TYPE_SIGN (ctype);
6305 unsigned prec = TYPE_PRECISION (ctype);
6306 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6307 wi::to_wide (c, prec),
6308 sign, &overflow_mul_p);
6309 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6310 if (overflow_mul_p
6311 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6312 overflow_p = true;
6313 if (!overflow_p)
6314 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6315 wide_int_to_tree (ctype, mul));
6318 /* If these operations "cancel" each other, we have the main
6319 optimizations of this pass, which occur when either constant is a
6320 multiple of the other, in which case we replace this with either an
6321 operation or CODE or TCODE.
6323 If we have an unsigned type, we cannot do this since it will change
6324 the result if the original computation overflowed. */
6325 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6326 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6327 || (tcode == MULT_EXPR
6328 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6329 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6330 && code != MULT_EXPR)))
6332 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6334 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6335 *strict_overflow_p = true;
6336 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6337 fold_convert (ctype,
6338 const_binop (TRUNC_DIV_EXPR,
6339 op1, c)));
6341 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6343 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6344 *strict_overflow_p = true;
6345 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6346 fold_convert (ctype,
6347 const_binop (TRUNC_DIV_EXPR,
6348 c, op1)));
6351 break;
6353 default:
6354 break;
6357 return 0;
6360 /* Return a node which has the indicated constant VALUE (either 0 or
6361 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6362 and is of the indicated TYPE. */
6364 tree
6365 constant_boolean_node (bool value, tree type)
6367 if (type == integer_type_node)
6368 return value ? integer_one_node : integer_zero_node;
6369 else if (type == boolean_type_node)
6370 return value ? boolean_true_node : boolean_false_node;
6371 else if (TREE_CODE (type) == VECTOR_TYPE)
6372 return build_vector_from_val (type,
6373 build_int_cst (TREE_TYPE (type),
6374 value ? -1 : 0));
6375 else
6376 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6380 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6381 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6382 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6383 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6384 COND is the first argument to CODE; otherwise (as in the example
6385 given here), it is the second argument. TYPE is the type of the
6386 original expression. Return NULL_TREE if no simplification is
6387 possible. */
6389 static tree
6390 fold_binary_op_with_conditional_arg (location_t loc,
6391 enum tree_code code,
6392 tree type, tree op0, tree op1,
6393 tree cond, tree arg, int cond_first_p)
6395 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6396 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6397 tree test, true_value, false_value;
6398 tree lhs = NULL_TREE;
6399 tree rhs = NULL_TREE;
6400 enum tree_code cond_code = COND_EXPR;
6402 if (TREE_CODE (cond) == COND_EXPR
6403 || TREE_CODE (cond) == VEC_COND_EXPR)
6405 test = TREE_OPERAND (cond, 0);
6406 true_value = TREE_OPERAND (cond, 1);
6407 false_value = TREE_OPERAND (cond, 2);
6408 /* If this operand throws an expression, then it does not make
6409 sense to try to perform a logical or arithmetic operation
6410 involving it. */
6411 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6412 lhs = true_value;
6413 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6414 rhs = false_value;
6416 else if (!(TREE_CODE (type) != VECTOR_TYPE
6417 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6419 tree testtype = TREE_TYPE (cond);
6420 test = cond;
6421 true_value = constant_boolean_node (true, testtype);
6422 false_value = constant_boolean_node (false, testtype);
6424 else
6425 /* Detect the case of mixing vector and scalar types - bail out. */
6426 return NULL_TREE;
6428 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6429 cond_code = VEC_COND_EXPR;
6431 /* This transformation is only worthwhile if we don't have to wrap ARG
6432 in a SAVE_EXPR and the operation can be simplified without recursing
6433 on at least one of the branches once its pushed inside the COND_EXPR. */
6434 if (!TREE_CONSTANT (arg)
6435 && (TREE_SIDE_EFFECTS (arg)
6436 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6437 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6438 return NULL_TREE;
6440 arg = fold_convert_loc (loc, arg_type, arg);
6441 if (lhs == 0)
6443 true_value = fold_convert_loc (loc, cond_type, true_value);
6444 if (cond_first_p)
6445 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6446 else
6447 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6449 if (rhs == 0)
6451 false_value = fold_convert_loc (loc, cond_type, false_value);
6452 if (cond_first_p)
6453 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6454 else
6455 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6458 /* Check that we have simplified at least one of the branches. */
6459 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6460 return NULL_TREE;
6462 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6466 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6468 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6469 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6470 ADDEND is the same as X.
6472 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6473 and finite. The problematic cases are when X is zero, and its mode
6474 has signed zeros. In the case of rounding towards -infinity,
6475 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6476 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6478 bool
6479 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6481 if (!real_zerop (addend))
6482 return false;
6484 /* Don't allow the fold with -fsignaling-nans. */
6485 if (HONOR_SNANS (element_mode (type)))
6486 return false;
6488 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6489 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6490 return true;
6492 /* In a vector or complex, we would need to check the sign of all zeros. */
6493 if (TREE_CODE (addend) != REAL_CST)
6494 return false;
6496 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6497 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6498 negate = !negate;
6500 /* The mode has signed zeros, and we have to honor their sign.
6501 In this situation, there is only one case we can return true for.
6502 X - 0 is the same as X unless rounding towards -infinity is
6503 supported. */
6504 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6507 /* Subroutine of fold() that optimizes comparisons of a division by
6508 a nonzero integer constant against an integer constant, i.e.
6509 X/C1 op C2.
6511 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6512 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6513 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6515 The function returns the constant folded tree if a simplification
6516 can be made, and NULL_TREE otherwise. */
6518 static tree
6519 fold_div_compare (location_t loc,
6520 enum tree_code code, tree type, tree arg0, tree arg1)
6522 tree prod, tmp, hi, lo;
6523 tree arg00 = TREE_OPERAND (arg0, 0);
6524 tree arg01 = TREE_OPERAND (arg0, 1);
6525 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6526 bool neg_overflow = false;
6527 bool overflow;
6529 /* We have to do this the hard way to detect unsigned overflow.
6530 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6531 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6532 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6533 neg_overflow = false;
6535 if (sign == UNSIGNED)
6537 tmp = int_const_binop (MINUS_EXPR, arg01,
6538 build_int_cst (TREE_TYPE (arg01), 1));
6539 lo = prod;
6541 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6542 val = wi::add (prod, tmp, sign, &overflow);
6543 hi = force_fit_type (TREE_TYPE (arg00), val,
6544 -1, overflow | TREE_OVERFLOW (prod));
6546 else if (tree_int_cst_sgn (arg01) >= 0)
6548 tmp = int_const_binop (MINUS_EXPR, arg01,
6549 build_int_cst (TREE_TYPE (arg01), 1));
6550 switch (tree_int_cst_sgn (arg1))
6552 case -1:
6553 neg_overflow = true;
6554 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6555 hi = prod;
6556 break;
6558 case 0:
6559 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6560 hi = tmp;
6561 break;
6563 case 1:
6564 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6565 lo = prod;
6566 break;
6568 default:
6569 gcc_unreachable ();
6572 else
6574 /* A negative divisor reverses the relational operators. */
6575 code = swap_tree_comparison (code);
6577 tmp = int_const_binop (PLUS_EXPR, arg01,
6578 build_int_cst (TREE_TYPE (arg01), 1));
6579 switch (tree_int_cst_sgn (arg1))
6581 case -1:
6582 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6583 lo = prod;
6584 break;
6586 case 0:
6587 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6588 lo = tmp;
6589 break;
6591 case 1:
6592 neg_overflow = true;
6593 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6594 hi = prod;
6595 break;
6597 default:
6598 gcc_unreachable ();
6602 switch (code)
6604 case EQ_EXPR:
6605 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6606 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6607 if (TREE_OVERFLOW (hi))
6608 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6609 if (TREE_OVERFLOW (lo))
6610 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6611 return build_range_check (loc, type, arg00, 1, lo, hi);
6613 case NE_EXPR:
6614 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6615 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6616 if (TREE_OVERFLOW (hi))
6617 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6618 if (TREE_OVERFLOW (lo))
6619 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6620 return build_range_check (loc, type, arg00, 0, lo, hi);
6622 case LT_EXPR:
6623 if (TREE_OVERFLOW (lo))
6625 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6626 return omit_one_operand_loc (loc, type, tmp, arg00);
6628 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6630 case LE_EXPR:
6631 if (TREE_OVERFLOW (hi))
6633 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6634 return omit_one_operand_loc (loc, type, tmp, arg00);
6636 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6638 case GT_EXPR:
6639 if (TREE_OVERFLOW (hi))
6641 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6642 return omit_one_operand_loc (loc, type, tmp, arg00);
6644 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6646 case GE_EXPR:
6647 if (TREE_OVERFLOW (lo))
6649 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6650 return omit_one_operand_loc (loc, type, tmp, arg00);
6652 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6654 default:
6655 break;
6658 return NULL_TREE;
6662 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6663 equality/inequality test, then return a simplified form of the test
6664 using a sign testing. Otherwise return NULL. TYPE is the desired
6665 result type. */
6667 static tree
6668 fold_single_bit_test_into_sign_test (location_t loc,
6669 enum tree_code code, tree arg0, tree arg1,
6670 tree result_type)
6672 /* If this is testing a single bit, we can optimize the test. */
6673 if ((code == NE_EXPR || code == EQ_EXPR)
6674 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6675 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6677 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6678 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6679 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6681 if (arg00 != NULL_TREE
6682 /* This is only a win if casting to a signed type is cheap,
6683 i.e. when arg00's type is not a partial mode. */
6684 && TYPE_PRECISION (TREE_TYPE (arg00))
6685 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6687 tree stype = signed_type_for (TREE_TYPE (arg00));
6688 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6689 result_type,
6690 fold_convert_loc (loc, stype, arg00),
6691 build_int_cst (stype, 0));
6695 return NULL_TREE;
6698 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6699 equality/inequality test, then return a simplified form of
6700 the test using shifts and logical operations. Otherwise return
6701 NULL. TYPE is the desired result type. */
6703 tree
6704 fold_single_bit_test (location_t loc, enum tree_code code,
6705 tree arg0, tree arg1, tree result_type)
6707 /* If this is testing a single bit, we can optimize the test. */
6708 if ((code == NE_EXPR || code == EQ_EXPR)
6709 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6710 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6712 tree inner = TREE_OPERAND (arg0, 0);
6713 tree type = TREE_TYPE (arg0);
6714 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6715 machine_mode operand_mode = TYPE_MODE (type);
6716 int ops_unsigned;
6717 tree signed_type, unsigned_type, intermediate_type;
6718 tree tem, one;
6720 /* First, see if we can fold the single bit test into a sign-bit
6721 test. */
6722 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6723 result_type);
6724 if (tem)
6725 return tem;
6727 /* Otherwise we have (A & C) != 0 where C is a single bit,
6728 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6729 Similarly for (A & C) == 0. */
6731 /* If INNER is a right shift of a constant and it plus BITNUM does
6732 not overflow, adjust BITNUM and INNER. */
6733 if (TREE_CODE (inner) == RSHIFT_EXPR
6734 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6735 && bitnum < TYPE_PRECISION (type)
6736 && wi::ltu_p (TREE_OPERAND (inner, 1),
6737 TYPE_PRECISION (type) - bitnum))
6739 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6740 inner = TREE_OPERAND (inner, 0);
6743 /* If we are going to be able to omit the AND below, we must do our
6744 operations as unsigned. If we must use the AND, we have a choice.
6745 Normally unsigned is faster, but for some machines signed is. */
6746 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6747 && !flag_syntax_only) ? 0 : 1;
6749 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6750 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6751 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6752 inner = fold_convert_loc (loc, intermediate_type, inner);
6754 if (bitnum != 0)
6755 inner = build2 (RSHIFT_EXPR, intermediate_type,
6756 inner, size_int (bitnum));
6758 one = build_int_cst (intermediate_type, 1);
6760 if (code == EQ_EXPR)
6761 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6763 /* Put the AND last so it can combine with more things. */
6764 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6766 /* Make sure to return the proper type. */
6767 inner = fold_convert_loc (loc, result_type, inner);
6769 return inner;
6771 return NULL_TREE;
6774 /* Test whether it is preferable two swap two operands, ARG0 and
6775 ARG1, for example because ARG0 is an integer constant and ARG1
6776 isn't. */
6778 bool
6779 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6781 if (CONSTANT_CLASS_P (arg1))
6782 return 0;
6783 if (CONSTANT_CLASS_P (arg0))
6784 return 1;
6786 STRIP_NOPS (arg0);
6787 STRIP_NOPS (arg1);
6789 if (TREE_CONSTANT (arg1))
6790 return 0;
6791 if (TREE_CONSTANT (arg0))
6792 return 1;
6794 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6795 for commutative and comparison operators. Ensuring a canonical
6796 form allows the optimizers to find additional redundancies without
6797 having to explicitly check for both orderings. */
6798 if (TREE_CODE (arg0) == SSA_NAME
6799 && TREE_CODE (arg1) == SSA_NAME
6800 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6801 return 1;
6803 /* Put SSA_NAMEs last. */
6804 if (TREE_CODE (arg1) == SSA_NAME)
6805 return 0;
6806 if (TREE_CODE (arg0) == SSA_NAME)
6807 return 1;
6809 /* Put variables last. */
6810 if (DECL_P (arg1))
6811 return 0;
6812 if (DECL_P (arg0))
6813 return 1;
6815 return 0;
6819 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6820 means A >= Y && A != MAX, but in this case we know that
6821 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6823 static tree
6824 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6826 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6828 if (TREE_CODE (bound) == LT_EXPR)
6829 a = TREE_OPERAND (bound, 0);
6830 else if (TREE_CODE (bound) == GT_EXPR)
6831 a = TREE_OPERAND (bound, 1);
6832 else
6833 return NULL_TREE;
6835 typea = TREE_TYPE (a);
6836 if (!INTEGRAL_TYPE_P (typea)
6837 && !POINTER_TYPE_P (typea))
6838 return NULL_TREE;
6840 if (TREE_CODE (ineq) == LT_EXPR)
6842 a1 = TREE_OPERAND (ineq, 1);
6843 y = TREE_OPERAND (ineq, 0);
6845 else if (TREE_CODE (ineq) == GT_EXPR)
6847 a1 = TREE_OPERAND (ineq, 0);
6848 y = TREE_OPERAND (ineq, 1);
6850 else
6851 return NULL_TREE;
6853 if (TREE_TYPE (a1) != typea)
6854 return NULL_TREE;
6856 if (POINTER_TYPE_P (typea))
6858 /* Convert the pointer types into integer before taking the difference. */
6859 tree ta = fold_convert_loc (loc, ssizetype, a);
6860 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6861 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6863 else
6864 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6866 if (!diff || !integer_onep (diff))
6867 return NULL_TREE;
6869 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6872 /* Fold a sum or difference of at least one multiplication.
6873 Returns the folded tree or NULL if no simplification could be made. */
6875 static tree
6876 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6877 tree arg0, tree arg1)
6879 tree arg00, arg01, arg10, arg11;
6880 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6882 /* (A * C) +- (B * C) -> (A+-B) * C.
6883 (A * C) +- A -> A * (C+-1).
6884 We are most concerned about the case where C is a constant,
6885 but other combinations show up during loop reduction. Since
6886 it is not difficult, try all four possibilities. */
6888 if (TREE_CODE (arg0) == MULT_EXPR)
6890 arg00 = TREE_OPERAND (arg0, 0);
6891 arg01 = TREE_OPERAND (arg0, 1);
6893 else if (TREE_CODE (arg0) == INTEGER_CST)
6895 arg00 = build_one_cst (type);
6896 arg01 = arg0;
6898 else
6900 /* We cannot generate constant 1 for fract. */
6901 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6902 return NULL_TREE;
6903 arg00 = arg0;
6904 arg01 = build_one_cst (type);
6906 if (TREE_CODE (arg1) == MULT_EXPR)
6908 arg10 = TREE_OPERAND (arg1, 0);
6909 arg11 = TREE_OPERAND (arg1, 1);
6911 else if (TREE_CODE (arg1) == INTEGER_CST)
6913 arg10 = build_one_cst (type);
6914 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6915 the purpose of this canonicalization. */
6916 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6917 && negate_expr_p (arg1)
6918 && code == PLUS_EXPR)
6920 arg11 = negate_expr (arg1);
6921 code = MINUS_EXPR;
6923 else
6924 arg11 = arg1;
6926 else
6928 /* We cannot generate constant 1 for fract. */
6929 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6930 return NULL_TREE;
6931 arg10 = arg1;
6932 arg11 = build_one_cst (type);
6934 same = NULL_TREE;
6936 if (operand_equal_p (arg01, arg11, 0))
6937 same = arg01, alt0 = arg00, alt1 = arg10;
6938 else if (operand_equal_p (arg00, arg10, 0))
6939 same = arg00, alt0 = arg01, alt1 = arg11;
6940 else if (operand_equal_p (arg00, arg11, 0))
6941 same = arg00, alt0 = arg01, alt1 = arg10;
6942 else if (operand_equal_p (arg01, arg10, 0))
6943 same = arg01, alt0 = arg00, alt1 = arg11;
6945 /* No identical multiplicands; see if we can find a common
6946 power-of-two factor in non-power-of-two multiplies. This
6947 can help in multi-dimensional array access. */
6948 else if (tree_fits_shwi_p (arg01)
6949 && tree_fits_shwi_p (arg11))
6951 HOST_WIDE_INT int01, int11, tmp;
6952 bool swap = false;
6953 tree maybe_same;
6954 int01 = tree_to_shwi (arg01);
6955 int11 = tree_to_shwi (arg11);
6957 /* Move min of absolute values to int11. */
6958 if (absu_hwi (int01) < absu_hwi (int11))
6960 tmp = int01, int01 = int11, int11 = tmp;
6961 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6962 maybe_same = arg01;
6963 swap = true;
6965 else
6966 maybe_same = arg11;
6968 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6969 /* The remainder should not be a constant, otherwise we
6970 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6971 increased the number of multiplications necessary. */
6972 && TREE_CODE (arg10) != INTEGER_CST)
6974 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6975 build_int_cst (TREE_TYPE (arg00),
6976 int01 / int11));
6977 alt1 = arg10;
6978 same = maybe_same;
6979 if (swap)
6980 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6984 if (same)
6985 return fold_build2_loc (loc, MULT_EXPR, type,
6986 fold_build2_loc (loc, code, type,
6987 fold_convert_loc (loc, type, alt0),
6988 fold_convert_loc (loc, type, alt1)),
6989 fold_convert_loc (loc, type, same));
6991 return NULL_TREE;
6994 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6995 specified by EXPR into the buffer PTR of length LEN bytes.
6996 Return the number of bytes placed in the buffer, or zero
6997 upon failure. */
6999 static int
7000 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7002 tree type = TREE_TYPE (expr);
7003 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7004 int byte, offset, word, words;
7005 unsigned char value;
7007 if ((off == -1 && total_bytes > len)
7008 || off >= total_bytes)
7009 return 0;
7010 if (off == -1)
7011 off = 0;
7012 words = total_bytes / UNITS_PER_WORD;
7014 for (byte = 0; byte < total_bytes; byte++)
7016 int bitpos = byte * BITS_PER_UNIT;
7017 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7018 number of bytes. */
7019 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7021 if (total_bytes > UNITS_PER_WORD)
7023 word = byte / UNITS_PER_WORD;
7024 if (WORDS_BIG_ENDIAN)
7025 word = (words - 1) - word;
7026 offset = word * UNITS_PER_WORD;
7027 if (BYTES_BIG_ENDIAN)
7028 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7029 else
7030 offset += byte % UNITS_PER_WORD;
7032 else
7033 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7034 if (offset >= off
7035 && offset - off < len)
7036 ptr[offset - off] = value;
7038 return MIN (len, total_bytes - off);
7042 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7043 specified by EXPR into the buffer PTR of length LEN bytes.
7044 Return the number of bytes placed in the buffer, or zero
7045 upon failure. */
7047 static int
7048 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7050 tree type = TREE_TYPE (expr);
7051 machine_mode mode = TYPE_MODE (type);
7052 int total_bytes = GET_MODE_SIZE (mode);
7053 FIXED_VALUE_TYPE value;
7054 tree i_value, i_type;
7056 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7057 return 0;
7059 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7061 if (NULL_TREE == i_type
7062 || TYPE_PRECISION (i_type) != total_bytes)
7063 return 0;
7065 value = TREE_FIXED_CST (expr);
7066 i_value = double_int_to_tree (i_type, value.data);
7068 return native_encode_int (i_value, ptr, len, off);
7072 /* Subroutine of native_encode_expr. Encode the REAL_CST
7073 specified by EXPR into the buffer PTR of length LEN bytes.
7074 Return the number of bytes placed in the buffer, or zero
7075 upon failure. */
7077 static int
7078 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7080 tree type = TREE_TYPE (expr);
7081 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7082 int byte, offset, word, words, bitpos;
7083 unsigned char value;
7085 /* There are always 32 bits in each long, no matter the size of
7086 the hosts long. We handle floating point representations with
7087 up to 192 bits. */
7088 long tmp[6];
7090 if ((off == -1 && total_bytes > len)
7091 || off >= total_bytes)
7092 return 0;
7093 if (off == -1)
7094 off = 0;
7095 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7097 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7099 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7100 bitpos += BITS_PER_UNIT)
7102 byte = (bitpos / BITS_PER_UNIT) & 3;
7103 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7105 if (UNITS_PER_WORD < 4)
7107 word = byte / UNITS_PER_WORD;
7108 if (WORDS_BIG_ENDIAN)
7109 word = (words - 1) - word;
7110 offset = word * UNITS_PER_WORD;
7111 if (BYTES_BIG_ENDIAN)
7112 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7113 else
7114 offset += byte % UNITS_PER_WORD;
7116 else
7118 offset = byte;
7119 if (BYTES_BIG_ENDIAN)
7121 /* Reverse bytes within each long, or within the entire float
7122 if it's smaller than a long (for HFmode). */
7123 offset = MIN (3, total_bytes - 1) - offset;
7124 gcc_assert (offset >= 0);
7127 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7128 if (offset >= off
7129 && offset - off < len)
7130 ptr[offset - off] = value;
7132 return MIN (len, total_bytes - off);
7135 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7136 specified by EXPR into the buffer PTR of length LEN bytes.
7137 Return the number of bytes placed in the buffer, or zero
7138 upon failure. */
7140 static int
7141 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7143 int rsize, isize;
7144 tree part;
7146 part = TREE_REALPART (expr);
7147 rsize = native_encode_expr (part, ptr, len, off);
7148 if (off == -1
7149 && rsize == 0)
7150 return 0;
7151 part = TREE_IMAGPART (expr);
7152 if (off != -1)
7153 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7154 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7155 if (off == -1
7156 && isize != rsize)
7157 return 0;
7158 return rsize + isize;
7162 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7163 specified by EXPR into the buffer PTR of length LEN bytes.
7164 Return the number of bytes placed in the buffer, or zero
7165 upon failure. */
7167 static int
7168 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7170 unsigned i, count;
7171 int size, offset;
7172 tree itype, elem;
7174 offset = 0;
7175 count = VECTOR_CST_NELTS (expr);
7176 itype = TREE_TYPE (TREE_TYPE (expr));
7177 size = GET_MODE_SIZE (TYPE_MODE (itype));
7178 for (i = 0; i < count; i++)
7180 if (off >= size)
7182 off -= size;
7183 continue;
7185 elem = VECTOR_CST_ELT (expr, i);
7186 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7187 if ((off == -1 && res != size)
7188 || res == 0)
7189 return 0;
7190 offset += res;
7191 if (offset >= len)
7192 return offset;
7193 if (off != -1)
7194 off = 0;
7196 return offset;
7200 /* Subroutine of native_encode_expr. Encode the STRING_CST
7201 specified by EXPR into the buffer PTR of length LEN bytes.
7202 Return the number of bytes placed in the buffer, or zero
7203 upon failure. */
7205 static int
7206 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7208 tree type = TREE_TYPE (expr);
7209 HOST_WIDE_INT total_bytes;
7211 if (TREE_CODE (type) != ARRAY_TYPE
7212 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7213 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7214 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7215 return 0;
7216 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7217 if ((off == -1 && total_bytes > len)
7218 || off >= total_bytes)
7219 return 0;
7220 if (off == -1)
7221 off = 0;
7222 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7224 int written = 0;
7225 if (off < TREE_STRING_LENGTH (expr))
7227 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7228 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7230 memset (ptr + written, 0,
7231 MIN (total_bytes - written, len - written));
7233 else
7234 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7235 return MIN (total_bytes - off, len);
7239 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7240 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7241 buffer PTR of length LEN bytes. If OFF is not -1 then start
7242 the encoding at byte offset OFF and encode at most LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero upon failure. */
7246 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7248 /* We don't support starting at negative offset and -1 is special. */
7249 if (off < -1)
7250 return 0;
7252 switch (TREE_CODE (expr))
7254 case INTEGER_CST:
7255 return native_encode_int (expr, ptr, len, off);
7257 case REAL_CST:
7258 return native_encode_real (expr, ptr, len, off);
7260 case FIXED_CST:
7261 return native_encode_fixed (expr, ptr, len, off);
7263 case COMPLEX_CST:
7264 return native_encode_complex (expr, ptr, len, off);
7266 case VECTOR_CST:
7267 return native_encode_vector (expr, ptr, len, off);
7269 case STRING_CST:
7270 return native_encode_string (expr, ptr, len, off);
7272 default:
7273 return 0;
7278 /* Subroutine of native_interpret_expr. Interpret the contents of
7279 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7280 If the buffer cannot be interpreted, return NULL_TREE. */
7282 static tree
7283 native_interpret_int (tree type, const unsigned char *ptr, int len)
7285 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7287 if (total_bytes > len
7288 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7289 return NULL_TREE;
7291 wide_int result = wi::from_buffer (ptr, total_bytes);
7293 return wide_int_to_tree (type, result);
7297 /* Subroutine of native_interpret_expr. Interpret the contents of
7298 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7299 If the buffer cannot be interpreted, return NULL_TREE. */
7301 static tree
7302 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7304 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7305 double_int result;
7306 FIXED_VALUE_TYPE fixed_value;
7308 if (total_bytes > len
7309 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7310 return NULL_TREE;
7312 result = double_int::from_buffer (ptr, total_bytes);
7313 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7315 return build_fixed (type, fixed_value);
7319 /* Subroutine of native_interpret_expr. Interpret the contents of
7320 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7321 If the buffer cannot be interpreted, return NULL_TREE. */
7323 static tree
7324 native_interpret_real (tree type, const unsigned char *ptr, int len)
7326 machine_mode mode = TYPE_MODE (type);
7327 int total_bytes = GET_MODE_SIZE (mode);
7328 unsigned char value;
7329 /* There are always 32 bits in each long, no matter the size of
7330 the hosts long. We handle floating point representations with
7331 up to 192 bits. */
7332 REAL_VALUE_TYPE r;
7333 long tmp[6];
7335 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7336 if (total_bytes > len || total_bytes > 24)
7337 return NULL_TREE;
7338 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7340 memset (tmp, 0, sizeof (tmp));
7341 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7342 bitpos += BITS_PER_UNIT)
7344 /* Both OFFSET and BYTE index within a long;
7345 bitpos indexes the whole float. */
7346 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7347 if (UNITS_PER_WORD < 4)
7349 int word = byte / UNITS_PER_WORD;
7350 if (WORDS_BIG_ENDIAN)
7351 word = (words - 1) - word;
7352 offset = word * UNITS_PER_WORD;
7353 if (BYTES_BIG_ENDIAN)
7354 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7355 else
7356 offset += byte % UNITS_PER_WORD;
7358 else
7360 offset = byte;
7361 if (BYTES_BIG_ENDIAN)
7363 /* Reverse bytes within each long, or within the entire float
7364 if it's smaller than a long (for HFmode). */
7365 offset = MIN (3, total_bytes - 1) - offset;
7366 gcc_assert (offset >= 0);
7369 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7371 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7374 real_from_target (&r, tmp, mode);
7375 return build_real (type, r);
7379 /* Subroutine of native_interpret_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7381 If the buffer cannot be interpreted, return NULL_TREE. */
7383 static tree
7384 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7386 tree etype, rpart, ipart;
7387 int size;
7389 etype = TREE_TYPE (type);
7390 size = GET_MODE_SIZE (TYPE_MODE (etype));
7391 if (size * 2 > len)
7392 return NULL_TREE;
7393 rpart = native_interpret_expr (etype, ptr, size);
7394 if (!rpart)
7395 return NULL_TREE;
7396 ipart = native_interpret_expr (etype, ptr+size, size);
7397 if (!ipart)
7398 return NULL_TREE;
7399 return build_complex (type, rpart, ipart);
7403 /* Subroutine of native_interpret_expr. Interpret the contents of
7404 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7405 If the buffer cannot be interpreted, return NULL_TREE. */
7407 static tree
7408 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7410 tree etype, elem;
7411 int i, size, count;
7412 tree *elements;
7414 etype = TREE_TYPE (type);
7415 size = GET_MODE_SIZE (TYPE_MODE (etype));
7416 count = TYPE_VECTOR_SUBPARTS (type);
7417 if (size * count > len)
7418 return NULL_TREE;
7420 elements = XALLOCAVEC (tree, count);
7421 for (i = count - 1; i >= 0; i--)
7423 elem = native_interpret_expr (etype, ptr+(i*size), size);
7424 if (!elem)
7425 return NULL_TREE;
7426 elements[i] = elem;
7428 return build_vector (type, elements);
7432 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7433 the buffer PTR of length LEN as a constant of type TYPE. For
7434 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7435 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7436 return NULL_TREE. */
7438 tree
7439 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7441 switch (TREE_CODE (type))
7443 case INTEGER_TYPE:
7444 case ENUMERAL_TYPE:
7445 case BOOLEAN_TYPE:
7446 case POINTER_TYPE:
7447 case REFERENCE_TYPE:
7448 return native_interpret_int (type, ptr, len);
7450 case REAL_TYPE:
7451 return native_interpret_real (type, ptr, len);
7453 case FIXED_POINT_TYPE:
7454 return native_interpret_fixed (type, ptr, len);
7456 case COMPLEX_TYPE:
7457 return native_interpret_complex (type, ptr, len);
7459 case VECTOR_TYPE:
7460 return native_interpret_vector (type, ptr, len);
7462 default:
7463 return NULL_TREE;
7467 /* Returns true if we can interpret the contents of a native encoding
7468 as TYPE. */
7470 static bool
7471 can_native_interpret_type_p (tree type)
7473 switch (TREE_CODE (type))
7475 case INTEGER_TYPE:
7476 case ENUMERAL_TYPE:
7477 case BOOLEAN_TYPE:
7478 case POINTER_TYPE:
7479 case REFERENCE_TYPE:
7480 case FIXED_POINT_TYPE:
7481 case REAL_TYPE:
7482 case COMPLEX_TYPE:
7483 case VECTOR_TYPE:
7484 return true;
7485 default:
7486 return false;
7490 /* Return true iff a constant of type TYPE is accepted by
7491 native_encode_expr. */
7493 bool
7494 can_native_encode_type_p (tree type)
7496 switch (TREE_CODE (type))
7498 case INTEGER_TYPE:
7499 case REAL_TYPE:
7500 case FIXED_POINT_TYPE:
7501 case COMPLEX_TYPE:
7502 case VECTOR_TYPE:
7503 case POINTER_TYPE:
7504 return true;
7505 default:
7506 return false;
7510 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7511 TYPE at compile-time. If we're unable to perform the conversion
7512 return NULL_TREE. */
7514 static tree
7515 fold_view_convert_expr (tree type, tree expr)
7517 /* We support up to 512-bit values (for V8DFmode). */
7518 unsigned char buffer[64];
7519 int len;
7521 /* Check that the host and target are sane. */
7522 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7523 return NULL_TREE;
7525 len = native_encode_expr (expr, buffer, sizeof (buffer));
7526 if (len == 0)
7527 return NULL_TREE;
7529 return native_interpret_expr (type, buffer, len);
7532 /* Build an expression for the address of T. Folds away INDIRECT_REF
7533 to avoid confusing the gimplify process. */
7535 tree
7536 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7538 /* The size of the object is not relevant when talking about its address. */
7539 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7540 t = TREE_OPERAND (t, 0);
7542 if (TREE_CODE (t) == INDIRECT_REF)
7544 t = TREE_OPERAND (t, 0);
7546 if (TREE_TYPE (t) != ptrtype)
7547 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7549 else if (TREE_CODE (t) == MEM_REF
7550 && integer_zerop (TREE_OPERAND (t, 1)))
7551 return TREE_OPERAND (t, 0);
7552 else if (TREE_CODE (t) == MEM_REF
7553 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7554 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7555 TREE_OPERAND (t, 0),
7556 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7557 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7559 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7561 if (TREE_TYPE (t) != ptrtype)
7562 t = fold_convert_loc (loc, ptrtype, t);
7564 else
7565 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7567 return t;
7570 /* Build an expression for the address of T. */
7572 tree
7573 build_fold_addr_expr_loc (location_t loc, tree t)
7575 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7577 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7580 /* Fold a unary expression of code CODE and type TYPE with operand
7581 OP0. Return the folded expression if folding is successful.
7582 Otherwise, return NULL_TREE. */
7584 tree
7585 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7587 tree tem;
7588 tree arg0;
7589 enum tree_code_class kind = TREE_CODE_CLASS (code);
7591 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7592 && TREE_CODE_LENGTH (code) == 1);
7594 arg0 = op0;
7595 if (arg0)
7597 if (CONVERT_EXPR_CODE_P (code)
7598 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7600 /* Don't use STRIP_NOPS, because signedness of argument type
7601 matters. */
7602 STRIP_SIGN_NOPS (arg0);
7604 else
7606 /* Strip any conversions that don't change the mode. This
7607 is safe for every expression, except for a comparison
7608 expression because its signedness is derived from its
7609 operands.
7611 Note that this is done as an internal manipulation within
7612 the constant folder, in order to find the simplest
7613 representation of the arguments so that their form can be
7614 studied. In any cases, the appropriate type conversions
7615 should be put back in the tree that will get out of the
7616 constant folder. */
7617 STRIP_NOPS (arg0);
7620 if (CONSTANT_CLASS_P (arg0))
7622 tree tem = const_unop (code, type, arg0);
7623 if (tem)
7625 if (TREE_TYPE (tem) != type)
7626 tem = fold_convert_loc (loc, type, tem);
7627 return tem;
7632 tem = generic_simplify (loc, code, type, op0);
7633 if (tem)
7634 return tem;
7636 if (TREE_CODE_CLASS (code) == tcc_unary)
7638 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7639 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7640 fold_build1_loc (loc, code, type,
7641 fold_convert_loc (loc, TREE_TYPE (op0),
7642 TREE_OPERAND (arg0, 1))));
7643 else if (TREE_CODE (arg0) == COND_EXPR)
7645 tree arg01 = TREE_OPERAND (arg0, 1);
7646 tree arg02 = TREE_OPERAND (arg0, 2);
7647 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7648 arg01 = fold_build1_loc (loc, code, type,
7649 fold_convert_loc (loc,
7650 TREE_TYPE (op0), arg01));
7651 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7652 arg02 = fold_build1_loc (loc, code, type,
7653 fold_convert_loc (loc,
7654 TREE_TYPE (op0), arg02));
7655 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7656 arg01, arg02);
7658 /* If this was a conversion, and all we did was to move into
7659 inside the COND_EXPR, bring it back out. But leave it if
7660 it is a conversion from integer to integer and the
7661 result precision is no wider than a word since such a
7662 conversion is cheap and may be optimized away by combine,
7663 while it couldn't if it were outside the COND_EXPR. Then return
7664 so we don't get into an infinite recursion loop taking the
7665 conversion out and then back in. */
7667 if ((CONVERT_EXPR_CODE_P (code)
7668 || code == NON_LVALUE_EXPR)
7669 && TREE_CODE (tem) == COND_EXPR
7670 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7671 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7672 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7673 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7674 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7675 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7676 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7677 && (INTEGRAL_TYPE_P
7678 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7679 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7680 || flag_syntax_only))
7681 tem = build1_loc (loc, code, type,
7682 build3 (COND_EXPR,
7683 TREE_TYPE (TREE_OPERAND
7684 (TREE_OPERAND (tem, 1), 0)),
7685 TREE_OPERAND (tem, 0),
7686 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7687 TREE_OPERAND (TREE_OPERAND (tem, 2),
7688 0)));
7689 return tem;
7693 switch (code)
7695 case NON_LVALUE_EXPR:
7696 if (!maybe_lvalue_p (op0))
7697 return fold_convert_loc (loc, type, op0);
7698 return NULL_TREE;
7700 CASE_CONVERT:
7701 case FLOAT_EXPR:
7702 case FIX_TRUNC_EXPR:
7703 if (COMPARISON_CLASS_P (op0))
7705 /* If we have (type) (a CMP b) and type is an integral type, return
7706 new expression involving the new type. Canonicalize
7707 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7708 non-integral type.
7709 Do not fold the result as that would not simplify further, also
7710 folding again results in recursions. */
7711 if (TREE_CODE (type) == BOOLEAN_TYPE)
7712 return build2_loc (loc, TREE_CODE (op0), type,
7713 TREE_OPERAND (op0, 0),
7714 TREE_OPERAND (op0, 1));
7715 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7716 && TREE_CODE (type) != VECTOR_TYPE)
7717 return build3_loc (loc, COND_EXPR, type, op0,
7718 constant_boolean_node (true, type),
7719 constant_boolean_node (false, type));
7722 /* Handle (T *)&A.B.C for A being of type T and B and C
7723 living at offset zero. This occurs frequently in
7724 C++ upcasting and then accessing the base. */
7725 if (TREE_CODE (op0) == ADDR_EXPR
7726 && POINTER_TYPE_P (type)
7727 && handled_component_p (TREE_OPERAND (op0, 0)))
7729 HOST_WIDE_INT bitsize, bitpos;
7730 tree offset;
7731 machine_mode mode;
7732 int unsignedp, reversep, volatilep;
7733 tree base
7734 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7735 &offset, &mode, &unsignedp, &reversep,
7736 &volatilep);
7737 /* If the reference was to a (constant) zero offset, we can use
7738 the address of the base if it has the same base type
7739 as the result type and the pointer type is unqualified. */
7740 if (! offset && bitpos == 0
7741 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7742 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7743 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7744 return fold_convert_loc (loc, type,
7745 build_fold_addr_expr_loc (loc, base));
7748 if (TREE_CODE (op0) == MODIFY_EXPR
7749 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7750 /* Detect assigning a bitfield. */
7751 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7752 && DECL_BIT_FIELD
7753 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7755 /* Don't leave an assignment inside a conversion
7756 unless assigning a bitfield. */
7757 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7758 /* First do the assignment, then return converted constant. */
7759 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7760 TREE_NO_WARNING (tem) = 1;
7761 TREE_USED (tem) = 1;
7762 return tem;
7765 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7766 constants (if x has signed type, the sign bit cannot be set
7767 in c). This folds extension into the BIT_AND_EXPR.
7768 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7769 very likely don't have maximal range for their precision and this
7770 transformation effectively doesn't preserve non-maximal ranges. */
7771 if (TREE_CODE (type) == INTEGER_TYPE
7772 && TREE_CODE (op0) == BIT_AND_EXPR
7773 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7775 tree and_expr = op0;
7776 tree and0 = TREE_OPERAND (and_expr, 0);
7777 tree and1 = TREE_OPERAND (and_expr, 1);
7778 int change = 0;
7780 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7781 || (TYPE_PRECISION (type)
7782 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7783 change = 1;
7784 else if (TYPE_PRECISION (TREE_TYPE (and1))
7785 <= HOST_BITS_PER_WIDE_INT
7786 && tree_fits_uhwi_p (and1))
7788 unsigned HOST_WIDE_INT cst;
7790 cst = tree_to_uhwi (and1);
7791 cst &= HOST_WIDE_INT_M1U
7792 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7793 change = (cst == 0);
7794 if (change
7795 && !flag_syntax_only
7796 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7797 == ZERO_EXTEND))
7799 tree uns = unsigned_type_for (TREE_TYPE (and0));
7800 and0 = fold_convert_loc (loc, uns, and0);
7801 and1 = fold_convert_loc (loc, uns, and1);
7804 if (change)
7806 tem = force_fit_type (type, wi::to_widest (and1), 0,
7807 TREE_OVERFLOW (and1));
7808 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7809 fold_convert_loc (loc, type, and0), tem);
7813 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7814 cast (T1)X will fold away. We assume that this happens when X itself
7815 is a cast. */
7816 if (POINTER_TYPE_P (type)
7817 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7818 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7820 tree arg00 = TREE_OPERAND (arg0, 0);
7821 tree arg01 = TREE_OPERAND (arg0, 1);
7823 return fold_build_pointer_plus_loc
7824 (loc, fold_convert_loc (loc, type, arg00), arg01);
7827 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7828 of the same precision, and X is an integer type not narrower than
7829 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7830 if (INTEGRAL_TYPE_P (type)
7831 && TREE_CODE (op0) == BIT_NOT_EXPR
7832 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7833 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7834 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7836 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7837 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7838 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7839 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7840 fold_convert_loc (loc, type, tem));
7843 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7844 type of X and Y (integer types only). */
7845 if (INTEGRAL_TYPE_P (type)
7846 && TREE_CODE (op0) == MULT_EXPR
7847 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7848 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7850 /* Be careful not to introduce new overflows. */
7851 tree mult_type;
7852 if (TYPE_OVERFLOW_WRAPS (type))
7853 mult_type = type;
7854 else
7855 mult_type = unsigned_type_for (type);
7857 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7859 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7860 fold_convert_loc (loc, mult_type,
7861 TREE_OPERAND (op0, 0)),
7862 fold_convert_loc (loc, mult_type,
7863 TREE_OPERAND (op0, 1)));
7864 return fold_convert_loc (loc, type, tem);
7868 return NULL_TREE;
7870 case VIEW_CONVERT_EXPR:
7871 if (TREE_CODE (op0) == MEM_REF)
7873 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7874 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7875 tem = fold_build2_loc (loc, MEM_REF, type,
7876 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7877 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7878 return tem;
7881 return NULL_TREE;
7883 case NEGATE_EXPR:
7884 tem = fold_negate_expr (loc, arg0);
7885 if (tem)
7886 return fold_convert_loc (loc, type, tem);
7887 return NULL_TREE;
7889 case ABS_EXPR:
7890 /* Convert fabs((double)float) into (double)fabsf(float). */
7891 if (TREE_CODE (arg0) == NOP_EXPR
7892 && TREE_CODE (type) == REAL_TYPE)
7894 tree targ0 = strip_float_extensions (arg0);
7895 if (targ0 != arg0)
7896 return fold_convert_loc (loc, type,
7897 fold_build1_loc (loc, ABS_EXPR,
7898 TREE_TYPE (targ0),
7899 targ0));
7901 return NULL_TREE;
7903 case BIT_NOT_EXPR:
7904 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7905 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7906 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7907 fold_convert_loc (loc, type,
7908 TREE_OPERAND (arg0, 0)))))
7909 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7910 fold_convert_loc (loc, type,
7911 TREE_OPERAND (arg0, 1)));
7912 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7913 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7914 fold_convert_loc (loc, type,
7915 TREE_OPERAND (arg0, 1)))))
7916 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7917 fold_convert_loc (loc, type,
7918 TREE_OPERAND (arg0, 0)), tem);
7920 return NULL_TREE;
7922 case TRUTH_NOT_EXPR:
7923 /* Note that the operand of this must be an int
7924 and its values must be 0 or 1.
7925 ("true" is a fixed value perhaps depending on the language,
7926 but we don't handle values other than 1 correctly yet.) */
7927 tem = fold_truth_not_expr (loc, arg0);
7928 if (!tem)
7929 return NULL_TREE;
7930 return fold_convert_loc (loc, type, tem);
7932 case INDIRECT_REF:
7933 /* Fold *&X to X if X is an lvalue. */
7934 if (TREE_CODE (op0) == ADDR_EXPR)
7936 tree op00 = TREE_OPERAND (op0, 0);
7937 if ((VAR_P (op00)
7938 || TREE_CODE (op00) == PARM_DECL
7939 || TREE_CODE (op00) == RESULT_DECL)
7940 && !TREE_READONLY (op00))
7941 return op00;
7943 return NULL_TREE;
7945 default:
7946 return NULL_TREE;
7947 } /* switch (code) */
7951 /* If the operation was a conversion do _not_ mark a resulting constant
7952 with TREE_OVERFLOW if the original constant was not. These conversions
7953 have implementation defined behavior and retaining the TREE_OVERFLOW
7954 flag here would confuse later passes such as VRP. */
7955 tree
7956 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7957 tree type, tree op0)
7959 tree res = fold_unary_loc (loc, code, type, op0);
7960 if (res
7961 && TREE_CODE (res) == INTEGER_CST
7962 && TREE_CODE (op0) == INTEGER_CST
7963 && CONVERT_EXPR_CODE_P (code))
7964 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7966 return res;
7969 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7970 operands OP0 and OP1. LOC is the location of the resulting expression.
7971 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7972 Return the folded expression if folding is successful. Otherwise,
7973 return NULL_TREE. */
7974 static tree
7975 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7976 tree arg0, tree arg1, tree op0, tree op1)
7978 tree tem;
7980 /* We only do these simplifications if we are optimizing. */
7981 if (!optimize)
7982 return NULL_TREE;
7984 /* Check for things like (A || B) && (A || C). We can convert this
7985 to A || (B && C). Note that either operator can be any of the four
7986 truth and/or operations and the transformation will still be
7987 valid. Also note that we only care about order for the
7988 ANDIF and ORIF operators. If B contains side effects, this
7989 might change the truth-value of A. */
7990 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7991 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7992 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7993 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7994 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7995 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7997 tree a00 = TREE_OPERAND (arg0, 0);
7998 tree a01 = TREE_OPERAND (arg0, 1);
7999 tree a10 = TREE_OPERAND (arg1, 0);
8000 tree a11 = TREE_OPERAND (arg1, 1);
8001 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8002 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8003 && (code == TRUTH_AND_EXPR
8004 || code == TRUTH_OR_EXPR));
8006 if (operand_equal_p (a00, a10, 0))
8007 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8008 fold_build2_loc (loc, code, type, a01, a11));
8009 else if (commutative && operand_equal_p (a00, a11, 0))
8010 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8011 fold_build2_loc (loc, code, type, a01, a10));
8012 else if (commutative && operand_equal_p (a01, a10, 0))
8013 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8014 fold_build2_loc (loc, code, type, a00, a11));
8016 /* This case if tricky because we must either have commutative
8017 operators or else A10 must not have side-effects. */
8019 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8020 && operand_equal_p (a01, a11, 0))
8021 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8022 fold_build2_loc (loc, code, type, a00, a10),
8023 a01);
8026 /* See if we can build a range comparison. */
8027 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8028 return tem;
8030 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8031 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8033 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8034 if (tem)
8035 return fold_build2_loc (loc, code, type, tem, arg1);
8038 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8039 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8041 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8042 if (tem)
8043 return fold_build2_loc (loc, code, type, arg0, tem);
8046 /* Check for the possibility of merging component references. If our
8047 lhs is another similar operation, try to merge its rhs with our
8048 rhs. Then try to merge our lhs and rhs. */
8049 if (TREE_CODE (arg0) == code
8050 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8051 TREE_OPERAND (arg0, 1), arg1)))
8052 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8054 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8055 return tem;
8057 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8058 && (code == TRUTH_AND_EXPR
8059 || code == TRUTH_ANDIF_EXPR
8060 || code == TRUTH_OR_EXPR
8061 || code == TRUTH_ORIF_EXPR))
8063 enum tree_code ncode, icode;
8065 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8066 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8067 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8069 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8070 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8071 We don't want to pack more than two leafs to a non-IF AND/OR
8072 expression.
8073 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8074 equal to IF-CODE, then we don't want to add right-hand operand.
8075 If the inner right-hand side of left-hand operand has
8076 side-effects, or isn't simple, then we can't add to it,
8077 as otherwise we might destroy if-sequence. */
8078 if (TREE_CODE (arg0) == icode
8079 && simple_operand_p_2 (arg1)
8080 /* Needed for sequence points to handle trappings, and
8081 side-effects. */
8082 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8084 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8085 arg1);
8086 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8087 tem);
8089 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8090 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8091 else if (TREE_CODE (arg1) == icode
8092 && simple_operand_p_2 (arg0)
8093 /* Needed for sequence points to handle trappings, and
8094 side-effects. */
8095 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8097 tem = fold_build2_loc (loc, ncode, type,
8098 arg0, TREE_OPERAND (arg1, 0));
8099 return fold_build2_loc (loc, icode, type, tem,
8100 TREE_OPERAND (arg1, 1));
8102 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8103 into (A OR B).
8104 For sequence point consistancy, we need to check for trapping,
8105 and side-effects. */
8106 else if (code == icode && simple_operand_p_2 (arg0)
8107 && simple_operand_p_2 (arg1))
8108 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8111 return NULL_TREE;
8114 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8115 by changing CODE to reduce the magnitude of constants involved in
8116 ARG0 of the comparison.
8117 Returns a canonicalized comparison tree if a simplification was
8118 possible, otherwise returns NULL_TREE.
8119 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8120 valid if signed overflow is undefined. */
8122 static tree
8123 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8124 tree arg0, tree arg1,
8125 bool *strict_overflow_p)
8127 enum tree_code code0 = TREE_CODE (arg0);
8128 tree t, cst0 = NULL_TREE;
8129 int sgn0;
8131 /* Match A +- CST code arg1. We can change this only if overflow
8132 is undefined. */
8133 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8134 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8135 /* In principle pointers also have undefined overflow behavior,
8136 but that causes problems elsewhere. */
8137 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8138 && (code0 == MINUS_EXPR
8139 || code0 == PLUS_EXPR)
8140 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8141 return NULL_TREE;
8143 /* Identify the constant in arg0 and its sign. */
8144 cst0 = TREE_OPERAND (arg0, 1);
8145 sgn0 = tree_int_cst_sgn (cst0);
8147 /* Overflowed constants and zero will cause problems. */
8148 if (integer_zerop (cst0)
8149 || TREE_OVERFLOW (cst0))
8150 return NULL_TREE;
8152 /* See if we can reduce the magnitude of the constant in
8153 arg0 by changing the comparison code. */
8154 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8155 if (code == LT_EXPR
8156 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8157 code = LE_EXPR;
8158 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8159 else if (code == GT_EXPR
8160 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8161 code = GE_EXPR;
8162 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8163 else if (code == LE_EXPR
8164 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8165 code = LT_EXPR;
8166 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8167 else if (code == GE_EXPR
8168 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8169 code = GT_EXPR;
8170 else
8171 return NULL_TREE;
8172 *strict_overflow_p = true;
8174 /* Now build the constant reduced in magnitude. But not if that
8175 would produce one outside of its types range. */
8176 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8177 && ((sgn0 == 1
8178 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8179 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8180 || (sgn0 == -1
8181 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8182 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8183 return NULL_TREE;
8185 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8186 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8187 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8188 t = fold_convert (TREE_TYPE (arg1), t);
8190 return fold_build2_loc (loc, code, type, t, arg1);
8193 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8194 overflow further. Try to decrease the magnitude of constants involved
8195 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8196 and put sole constants at the second argument position.
8197 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8199 static tree
8200 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8201 tree arg0, tree arg1)
8203 tree t;
8204 bool strict_overflow_p;
8205 const char * const warnmsg = G_("assuming signed overflow does not occur "
8206 "when reducing constant in comparison");
8208 /* Try canonicalization by simplifying arg0. */
8209 strict_overflow_p = false;
8210 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8211 &strict_overflow_p);
8212 if (t)
8214 if (strict_overflow_p)
8215 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8216 return t;
8219 /* Try canonicalization by simplifying arg1 using the swapped
8220 comparison. */
8221 code = swap_tree_comparison (code);
8222 strict_overflow_p = false;
8223 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8224 &strict_overflow_p);
8225 if (t && strict_overflow_p)
8226 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8227 return t;
8230 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8231 space. This is used to avoid issuing overflow warnings for
8232 expressions like &p->x which can not wrap. */
8234 static bool
8235 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8237 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8238 return true;
8240 if (bitpos < 0)
8241 return true;
8243 wide_int wi_offset;
8244 int precision = TYPE_PRECISION (TREE_TYPE (base));
8245 if (offset == NULL_TREE)
8246 wi_offset = wi::zero (precision);
8247 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8248 return true;
8249 else
8250 wi_offset = offset;
8252 bool overflow;
8253 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8254 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8255 if (overflow)
8256 return true;
8258 if (!wi::fits_uhwi_p (total))
8259 return true;
8261 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8262 if (size <= 0)
8263 return true;
8265 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8266 array. */
8267 if (TREE_CODE (base) == ADDR_EXPR)
8269 HOST_WIDE_INT base_size;
8271 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8272 if (base_size > 0 && size < base_size)
8273 size = base_size;
8276 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8279 /* Return a positive integer when the symbol DECL is known to have
8280 a nonzero address, zero when it's known not to (e.g., it's a weak
8281 symbol), and a negative integer when the symbol is not yet in the
8282 symbol table and so whether or not its address is zero is unknown.
8283 For function local objects always return positive integer. */
8284 static int
8285 maybe_nonzero_address (tree decl)
8287 if (DECL_P (decl) && decl_in_symtab_p (decl))
8288 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8289 return symbol->nonzero_address ();
8291 /* Function local objects are never NULL. */
8292 if (DECL_P (decl)
8293 && (DECL_CONTEXT (decl)
8294 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8295 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8296 return 1;
8298 return -1;
8301 /* Subroutine of fold_binary. This routine performs all of the
8302 transformations that are common to the equality/inequality
8303 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8304 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8305 fold_binary should call fold_binary. Fold a comparison with
8306 tree code CODE and type TYPE with operands OP0 and OP1. Return
8307 the folded comparison or NULL_TREE. */
8309 static tree
8310 fold_comparison (location_t loc, enum tree_code code, tree type,
8311 tree op0, tree op1)
8313 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8314 tree arg0, arg1, tem;
8316 arg0 = op0;
8317 arg1 = op1;
8319 STRIP_SIGN_NOPS (arg0);
8320 STRIP_SIGN_NOPS (arg1);
8322 /* For comparisons of pointers we can decompose it to a compile time
8323 comparison of the base objects and the offsets into the object.
8324 This requires at least one operand being an ADDR_EXPR or a
8325 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8326 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8327 && (TREE_CODE (arg0) == ADDR_EXPR
8328 || TREE_CODE (arg1) == ADDR_EXPR
8329 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8330 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8332 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8333 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8334 machine_mode mode;
8335 int volatilep, reversep, unsignedp;
8336 bool indirect_base0 = false, indirect_base1 = false;
8338 /* Get base and offset for the access. Strip ADDR_EXPR for
8339 get_inner_reference, but put it back by stripping INDIRECT_REF
8340 off the base object if possible. indirect_baseN will be true
8341 if baseN is not an address but refers to the object itself. */
8342 base0 = arg0;
8343 if (TREE_CODE (arg0) == ADDR_EXPR)
8345 base0
8346 = get_inner_reference (TREE_OPERAND (arg0, 0),
8347 &bitsize, &bitpos0, &offset0, &mode,
8348 &unsignedp, &reversep, &volatilep);
8349 if (TREE_CODE (base0) == INDIRECT_REF)
8350 base0 = TREE_OPERAND (base0, 0);
8351 else
8352 indirect_base0 = true;
8354 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8356 base0 = TREE_OPERAND (arg0, 0);
8357 STRIP_SIGN_NOPS (base0);
8358 if (TREE_CODE (base0) == ADDR_EXPR)
8360 base0
8361 = get_inner_reference (TREE_OPERAND (base0, 0),
8362 &bitsize, &bitpos0, &offset0, &mode,
8363 &unsignedp, &reversep, &volatilep);
8364 if (TREE_CODE (base0) == INDIRECT_REF)
8365 base0 = TREE_OPERAND (base0, 0);
8366 else
8367 indirect_base0 = true;
8369 if (offset0 == NULL_TREE || integer_zerop (offset0))
8370 offset0 = TREE_OPERAND (arg0, 1);
8371 else
8372 offset0 = size_binop (PLUS_EXPR, offset0,
8373 TREE_OPERAND (arg0, 1));
8374 if (TREE_CODE (offset0) == INTEGER_CST)
8376 offset_int tem = wi::sext (wi::to_offset (offset0),
8377 TYPE_PRECISION (sizetype));
8378 tem <<= LOG2_BITS_PER_UNIT;
8379 tem += bitpos0;
8380 if (wi::fits_shwi_p (tem))
8382 bitpos0 = tem.to_shwi ();
8383 offset0 = NULL_TREE;
8388 base1 = arg1;
8389 if (TREE_CODE (arg1) == ADDR_EXPR)
8391 base1
8392 = get_inner_reference (TREE_OPERAND (arg1, 0),
8393 &bitsize, &bitpos1, &offset1, &mode,
8394 &unsignedp, &reversep, &volatilep);
8395 if (TREE_CODE (base1) == INDIRECT_REF)
8396 base1 = TREE_OPERAND (base1, 0);
8397 else
8398 indirect_base1 = true;
8400 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8402 base1 = TREE_OPERAND (arg1, 0);
8403 STRIP_SIGN_NOPS (base1);
8404 if (TREE_CODE (base1) == ADDR_EXPR)
8406 base1
8407 = get_inner_reference (TREE_OPERAND (base1, 0),
8408 &bitsize, &bitpos1, &offset1, &mode,
8409 &unsignedp, &reversep, &volatilep);
8410 if (TREE_CODE (base1) == INDIRECT_REF)
8411 base1 = TREE_OPERAND (base1, 0);
8412 else
8413 indirect_base1 = true;
8415 if (offset1 == NULL_TREE || integer_zerop (offset1))
8416 offset1 = TREE_OPERAND (arg1, 1);
8417 else
8418 offset1 = size_binop (PLUS_EXPR, offset1,
8419 TREE_OPERAND (arg1, 1));
8420 if (TREE_CODE (offset1) == INTEGER_CST)
8422 offset_int tem = wi::sext (wi::to_offset (offset1),
8423 TYPE_PRECISION (sizetype));
8424 tem <<= LOG2_BITS_PER_UNIT;
8425 tem += bitpos1;
8426 if (wi::fits_shwi_p (tem))
8428 bitpos1 = tem.to_shwi ();
8429 offset1 = NULL_TREE;
8434 /* If we have equivalent bases we might be able to simplify. */
8435 if (indirect_base0 == indirect_base1
8436 && operand_equal_p (base0, base1,
8437 indirect_base0 ? OEP_ADDRESS_OF : 0))
8439 /* We can fold this expression to a constant if the non-constant
8440 offset parts are equal. */
8441 if ((offset0 == offset1
8442 || (offset0 && offset1
8443 && operand_equal_p (offset0, offset1, 0)))
8444 && (equality_code
8445 || (indirect_base0
8446 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8447 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8450 if (!equality_code
8451 && bitpos0 != bitpos1
8452 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8453 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8454 fold_overflow_warning (("assuming pointer wraparound does not "
8455 "occur when comparing P +- C1 with "
8456 "P +- C2"),
8457 WARN_STRICT_OVERFLOW_CONDITIONAL);
8459 switch (code)
8461 case EQ_EXPR:
8462 return constant_boolean_node (bitpos0 == bitpos1, type);
8463 case NE_EXPR:
8464 return constant_boolean_node (bitpos0 != bitpos1, type);
8465 case LT_EXPR:
8466 return constant_boolean_node (bitpos0 < bitpos1, type);
8467 case LE_EXPR:
8468 return constant_boolean_node (bitpos0 <= bitpos1, type);
8469 case GE_EXPR:
8470 return constant_boolean_node (bitpos0 >= bitpos1, type);
8471 case GT_EXPR:
8472 return constant_boolean_node (bitpos0 > bitpos1, type);
8473 default:;
8476 /* We can simplify the comparison to a comparison of the variable
8477 offset parts if the constant offset parts are equal.
8478 Be careful to use signed sizetype here because otherwise we
8479 mess with array offsets in the wrong way. This is possible
8480 because pointer arithmetic is restricted to retain within an
8481 object and overflow on pointer differences is undefined as of
8482 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8483 else if (bitpos0 == bitpos1
8484 && (equality_code
8485 || (indirect_base0
8486 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8487 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8489 /* By converting to signed sizetype we cover middle-end pointer
8490 arithmetic which operates on unsigned pointer types of size
8491 type size and ARRAY_REF offsets which are properly sign or
8492 zero extended from their type in case it is narrower than
8493 sizetype. */
8494 if (offset0 == NULL_TREE)
8495 offset0 = build_int_cst (ssizetype, 0);
8496 else
8497 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8498 if (offset1 == NULL_TREE)
8499 offset1 = build_int_cst (ssizetype, 0);
8500 else
8501 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8503 if (!equality_code
8504 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8505 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8506 fold_overflow_warning (("assuming pointer wraparound does not "
8507 "occur when comparing P +- C1 with "
8508 "P +- C2"),
8509 WARN_STRICT_OVERFLOW_COMPARISON);
8511 return fold_build2_loc (loc, code, type, offset0, offset1);
8514 /* For equal offsets we can simplify to a comparison of the
8515 base addresses. */
8516 else if (bitpos0 == bitpos1
8517 && (indirect_base0
8518 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8519 && (indirect_base1
8520 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8521 && ((offset0 == offset1)
8522 || (offset0 && offset1
8523 && operand_equal_p (offset0, offset1, 0))))
8525 if (indirect_base0)
8526 base0 = build_fold_addr_expr_loc (loc, base0);
8527 if (indirect_base1)
8528 base1 = build_fold_addr_expr_loc (loc, base1);
8529 return fold_build2_loc (loc, code, type, base0, base1);
8531 /* Comparison between an ordinary (non-weak) symbol and a null
8532 pointer can be eliminated since such symbols must have a non
8533 null address. In C, relational expressions between pointers
8534 to objects and null pointers are undefined. The results
8535 below follow the C++ rules with the additional property that
8536 every object pointer compares greater than a null pointer.
8538 else if (((DECL_P (base0)
8539 && maybe_nonzero_address (base0) > 0
8540 /* Avoid folding references to struct members at offset 0 to
8541 prevent tests like '&ptr->firstmember == 0' from getting
8542 eliminated. When ptr is null, although the -> expression
8543 is strictly speaking invalid, GCC retains it as a matter
8544 of QoI. See PR c/44555. */
8545 && (offset0 == NULL_TREE && bitpos0 != 0))
8546 || CONSTANT_CLASS_P (base0))
8547 && indirect_base0
8548 /* The caller guarantees that when one of the arguments is
8549 constant (i.e., null in this case) it is second. */
8550 && integer_zerop (arg1))
8552 switch (code)
8554 case EQ_EXPR:
8555 case LE_EXPR:
8556 case LT_EXPR:
8557 return constant_boolean_node (false, type);
8558 case GE_EXPR:
8559 case GT_EXPR:
8560 case NE_EXPR:
8561 return constant_boolean_node (true, type);
8562 default:
8563 gcc_unreachable ();
8568 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8569 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8570 the resulting offset is smaller in absolute value than the
8571 original one and has the same sign. */
8572 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8573 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8574 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8575 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8576 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8577 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8578 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8579 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8581 tree const1 = TREE_OPERAND (arg0, 1);
8582 tree const2 = TREE_OPERAND (arg1, 1);
8583 tree variable1 = TREE_OPERAND (arg0, 0);
8584 tree variable2 = TREE_OPERAND (arg1, 0);
8585 tree cst;
8586 const char * const warnmsg = G_("assuming signed overflow does not "
8587 "occur when combining constants around "
8588 "a comparison");
8590 /* Put the constant on the side where it doesn't overflow and is
8591 of lower absolute value and of same sign than before. */
8592 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8593 ? MINUS_EXPR : PLUS_EXPR,
8594 const2, const1);
8595 if (!TREE_OVERFLOW (cst)
8596 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8597 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8599 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8600 return fold_build2_loc (loc, code, type,
8601 variable1,
8602 fold_build2_loc (loc, TREE_CODE (arg1),
8603 TREE_TYPE (arg1),
8604 variable2, cst));
8607 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8608 ? MINUS_EXPR : PLUS_EXPR,
8609 const1, const2);
8610 if (!TREE_OVERFLOW (cst)
8611 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8612 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8614 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8615 return fold_build2_loc (loc, code, type,
8616 fold_build2_loc (loc, TREE_CODE (arg0),
8617 TREE_TYPE (arg0),
8618 variable1, cst),
8619 variable2);
8623 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8624 if (tem)
8625 return tem;
8627 /* If we are comparing an expression that just has comparisons
8628 of two integer values, arithmetic expressions of those comparisons,
8629 and constants, we can simplify it. There are only three cases
8630 to check: the two values can either be equal, the first can be
8631 greater, or the second can be greater. Fold the expression for
8632 those three values. Since each value must be 0 or 1, we have
8633 eight possibilities, each of which corresponds to the constant 0
8634 or 1 or one of the six possible comparisons.
8636 This handles common cases like (a > b) == 0 but also handles
8637 expressions like ((x > y) - (y > x)) > 0, which supposedly
8638 occur in macroized code. */
8640 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8642 tree cval1 = 0, cval2 = 0;
8643 int save_p = 0;
8645 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8646 /* Don't handle degenerate cases here; they should already
8647 have been handled anyway. */
8648 && cval1 != 0 && cval2 != 0
8649 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8650 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8651 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8652 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8653 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8654 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8655 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8657 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8658 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8660 /* We can't just pass T to eval_subst in case cval1 or cval2
8661 was the same as ARG1. */
8663 tree high_result
8664 = fold_build2_loc (loc, code, type,
8665 eval_subst (loc, arg0, cval1, maxval,
8666 cval2, minval),
8667 arg1);
8668 tree equal_result
8669 = fold_build2_loc (loc, code, type,
8670 eval_subst (loc, arg0, cval1, maxval,
8671 cval2, maxval),
8672 arg1);
8673 tree low_result
8674 = fold_build2_loc (loc, code, type,
8675 eval_subst (loc, arg0, cval1, minval,
8676 cval2, maxval),
8677 arg1);
8679 /* All three of these results should be 0 or 1. Confirm they are.
8680 Then use those values to select the proper code to use. */
8682 if (TREE_CODE (high_result) == INTEGER_CST
8683 && TREE_CODE (equal_result) == INTEGER_CST
8684 && TREE_CODE (low_result) == INTEGER_CST)
8686 /* Make a 3-bit mask with the high-order bit being the
8687 value for `>', the next for '=', and the low for '<'. */
8688 switch ((integer_onep (high_result) * 4)
8689 + (integer_onep (equal_result) * 2)
8690 + integer_onep (low_result))
8692 case 0:
8693 /* Always false. */
8694 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8695 case 1:
8696 code = LT_EXPR;
8697 break;
8698 case 2:
8699 code = EQ_EXPR;
8700 break;
8701 case 3:
8702 code = LE_EXPR;
8703 break;
8704 case 4:
8705 code = GT_EXPR;
8706 break;
8707 case 5:
8708 code = NE_EXPR;
8709 break;
8710 case 6:
8711 code = GE_EXPR;
8712 break;
8713 case 7:
8714 /* Always true. */
8715 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8718 if (save_p)
8720 tem = save_expr (build2 (code, type, cval1, cval2));
8721 protected_set_expr_location (tem, loc);
8722 return tem;
8724 return fold_build2_loc (loc, code, type, cval1, cval2);
8729 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8730 into a single range test. */
8731 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8732 && TREE_CODE (arg1) == INTEGER_CST
8733 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8734 && !integer_zerop (TREE_OPERAND (arg0, 1))
8735 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8736 && !TREE_OVERFLOW (arg1))
8738 tem = fold_div_compare (loc, code, type, arg0, arg1);
8739 if (tem != NULL_TREE)
8740 return tem;
8743 return NULL_TREE;
8747 /* Subroutine of fold_binary. Optimize complex multiplications of the
8748 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8749 argument EXPR represents the expression "z" of type TYPE. */
8751 static tree
8752 fold_mult_zconjz (location_t loc, tree type, tree expr)
8754 tree itype = TREE_TYPE (type);
8755 tree rpart, ipart, tem;
8757 if (TREE_CODE (expr) == COMPLEX_EXPR)
8759 rpart = TREE_OPERAND (expr, 0);
8760 ipart = TREE_OPERAND (expr, 1);
8762 else if (TREE_CODE (expr) == COMPLEX_CST)
8764 rpart = TREE_REALPART (expr);
8765 ipart = TREE_IMAGPART (expr);
8767 else
8769 expr = save_expr (expr);
8770 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8771 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8774 rpart = save_expr (rpart);
8775 ipart = save_expr (ipart);
8776 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8777 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8778 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8779 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8780 build_zero_cst (itype));
8784 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8785 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8787 static bool
8788 vec_cst_ctor_to_array (tree arg, tree *elts)
8790 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8792 if (TREE_CODE (arg) == VECTOR_CST)
8794 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8795 elts[i] = VECTOR_CST_ELT (arg, i);
8797 else if (TREE_CODE (arg) == CONSTRUCTOR)
8799 constructor_elt *elt;
8801 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8802 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8803 return false;
8804 else
8805 elts[i] = elt->value;
8807 else
8808 return false;
8809 for (; i < nelts; i++)
8810 elts[i]
8811 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8812 return true;
8815 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8816 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8817 NULL_TREE otherwise. */
8819 static tree
8820 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8822 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8823 tree *elts;
8824 bool need_ctor = false;
8826 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8827 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8828 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8829 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8830 return NULL_TREE;
8832 elts = XALLOCAVEC (tree, nelts * 3);
8833 if (!vec_cst_ctor_to_array (arg0, elts)
8834 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8835 return NULL_TREE;
8837 for (i = 0; i < nelts; i++)
8839 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8840 need_ctor = true;
8841 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8844 if (need_ctor)
8846 vec<constructor_elt, va_gc> *v;
8847 vec_alloc (v, nelts);
8848 for (i = 0; i < nelts; i++)
8849 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8850 return build_constructor (type, v);
8852 else
8853 return build_vector (type, &elts[2 * nelts]);
8856 /* Try to fold a pointer difference of type TYPE two address expressions of
8857 array references AREF0 and AREF1 using location LOC. Return a
8858 simplified expression for the difference or NULL_TREE. */
8860 static tree
8861 fold_addr_of_array_ref_difference (location_t loc, tree type,
8862 tree aref0, tree aref1)
8864 tree base0 = TREE_OPERAND (aref0, 0);
8865 tree base1 = TREE_OPERAND (aref1, 0);
8866 tree base_offset = build_int_cst (type, 0);
8868 /* If the bases are array references as well, recurse. If the bases
8869 are pointer indirections compute the difference of the pointers.
8870 If the bases are equal, we are set. */
8871 if ((TREE_CODE (base0) == ARRAY_REF
8872 && TREE_CODE (base1) == ARRAY_REF
8873 && (base_offset
8874 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8875 || (INDIRECT_REF_P (base0)
8876 && INDIRECT_REF_P (base1)
8877 && (base_offset
8878 = fold_binary_loc (loc, MINUS_EXPR, type,
8879 fold_convert (type, TREE_OPERAND (base0, 0)),
8880 fold_convert (type,
8881 TREE_OPERAND (base1, 0)))))
8882 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8884 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8885 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8886 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8887 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8888 return fold_build2_loc (loc, PLUS_EXPR, type,
8889 base_offset,
8890 fold_build2_loc (loc, MULT_EXPR, type,
8891 diff, esz));
8893 return NULL_TREE;
8896 /* If the real or vector real constant CST of type TYPE has an exact
8897 inverse, return it, else return NULL. */
8899 tree
8900 exact_inverse (tree type, tree cst)
8902 REAL_VALUE_TYPE r;
8903 tree unit_type, *elts;
8904 machine_mode mode;
8905 unsigned vec_nelts, i;
8907 switch (TREE_CODE (cst))
8909 case REAL_CST:
8910 r = TREE_REAL_CST (cst);
8912 if (exact_real_inverse (TYPE_MODE (type), &r))
8913 return build_real (type, r);
8915 return NULL_TREE;
8917 case VECTOR_CST:
8918 vec_nelts = VECTOR_CST_NELTS (cst);
8919 elts = XALLOCAVEC (tree, vec_nelts);
8920 unit_type = TREE_TYPE (type);
8921 mode = TYPE_MODE (unit_type);
8923 for (i = 0; i < vec_nelts; i++)
8925 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8926 if (!exact_real_inverse (mode, &r))
8927 return NULL_TREE;
8928 elts[i] = build_real (unit_type, r);
8931 return build_vector (type, elts);
8933 default:
8934 return NULL_TREE;
8938 /* Mask out the tz least significant bits of X of type TYPE where
8939 tz is the number of trailing zeroes in Y. */
8940 static wide_int
8941 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8943 int tz = wi::ctz (y);
8944 if (tz > 0)
8945 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8946 return x;
8949 /* Return true when T is an address and is known to be nonzero.
8950 For floating point we further ensure that T is not denormal.
8951 Similar logic is present in nonzero_address in rtlanal.h.
8953 If the return value is based on the assumption that signed overflow
8954 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8955 change *STRICT_OVERFLOW_P. */
8957 static bool
8958 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8960 tree type = TREE_TYPE (t);
8961 enum tree_code code;
8963 /* Doing something useful for floating point would need more work. */
8964 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8965 return false;
8967 code = TREE_CODE (t);
8968 switch (TREE_CODE_CLASS (code))
8970 case tcc_unary:
8971 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8972 strict_overflow_p);
8973 case tcc_binary:
8974 case tcc_comparison:
8975 return tree_binary_nonzero_warnv_p (code, type,
8976 TREE_OPERAND (t, 0),
8977 TREE_OPERAND (t, 1),
8978 strict_overflow_p);
8979 case tcc_constant:
8980 case tcc_declaration:
8981 case tcc_reference:
8982 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8984 default:
8985 break;
8988 switch (code)
8990 case TRUTH_NOT_EXPR:
8991 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8992 strict_overflow_p);
8994 case TRUTH_AND_EXPR:
8995 case TRUTH_OR_EXPR:
8996 case TRUTH_XOR_EXPR:
8997 return tree_binary_nonzero_warnv_p (code, type,
8998 TREE_OPERAND (t, 0),
8999 TREE_OPERAND (t, 1),
9000 strict_overflow_p);
9002 case COND_EXPR:
9003 case CONSTRUCTOR:
9004 case OBJ_TYPE_REF:
9005 case ASSERT_EXPR:
9006 case ADDR_EXPR:
9007 case WITH_SIZE_EXPR:
9008 case SSA_NAME:
9009 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9011 case COMPOUND_EXPR:
9012 case MODIFY_EXPR:
9013 case BIND_EXPR:
9014 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9015 strict_overflow_p);
9017 case SAVE_EXPR:
9018 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9019 strict_overflow_p);
9021 case CALL_EXPR:
9023 tree fndecl = get_callee_fndecl (t);
9024 if (!fndecl) return false;
9025 if (flag_delete_null_pointer_checks && !flag_check_new
9026 && DECL_IS_OPERATOR_NEW (fndecl)
9027 && !TREE_NOTHROW (fndecl))
9028 return true;
9029 if (flag_delete_null_pointer_checks
9030 && lookup_attribute ("returns_nonnull",
9031 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9032 return true;
9033 return alloca_call_p (t);
9036 default:
9037 break;
9039 return false;
9042 /* Return true when T is an address and is known to be nonzero.
9043 Handle warnings about undefined signed overflow. */
9045 bool
9046 tree_expr_nonzero_p (tree t)
9048 bool ret, strict_overflow_p;
9050 strict_overflow_p = false;
9051 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9052 if (strict_overflow_p)
9053 fold_overflow_warning (("assuming signed overflow does not occur when "
9054 "determining that expression is always "
9055 "non-zero"),
9056 WARN_STRICT_OVERFLOW_MISC);
9057 return ret;
9060 /* Return true if T is known not to be equal to an integer W. */
9062 bool
9063 expr_not_equal_to (tree t, const wide_int &w)
9065 wide_int min, max, nz;
9066 value_range_type rtype;
9067 switch (TREE_CODE (t))
9069 case INTEGER_CST:
9070 return wi::ne_p (t, w);
9072 case SSA_NAME:
9073 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9074 return false;
9075 rtype = get_range_info (t, &min, &max);
9076 if (rtype == VR_RANGE)
9078 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9079 return true;
9080 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9081 return true;
9083 else if (rtype == VR_ANTI_RANGE
9084 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9085 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9086 return true;
9087 /* If T has some known zero bits and W has any of those bits set,
9088 then T is known not to be equal to W. */
9089 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9090 TYPE_PRECISION (TREE_TYPE (t))), 0))
9091 return true;
9092 return false;
9094 default:
9095 return false;
9099 /* Fold a binary expression of code CODE and type TYPE with operands
9100 OP0 and OP1. LOC is the location of the resulting expression.
9101 Return the folded expression if folding is successful. Otherwise,
9102 return NULL_TREE. */
9104 tree
9105 fold_binary_loc (location_t loc,
9106 enum tree_code code, tree type, tree op0, tree op1)
9108 enum tree_code_class kind = TREE_CODE_CLASS (code);
9109 tree arg0, arg1, tem;
9110 tree t1 = NULL_TREE;
9111 bool strict_overflow_p;
9112 unsigned int prec;
9114 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9115 && TREE_CODE_LENGTH (code) == 2
9116 && op0 != NULL_TREE
9117 && op1 != NULL_TREE);
9119 arg0 = op0;
9120 arg1 = op1;
9122 /* Strip any conversions that don't change the mode. This is
9123 safe for every expression, except for a comparison expression
9124 because its signedness is derived from its operands. So, in
9125 the latter case, only strip conversions that don't change the
9126 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9127 preserved.
9129 Note that this is done as an internal manipulation within the
9130 constant folder, in order to find the simplest representation
9131 of the arguments so that their form can be studied. In any
9132 cases, the appropriate type conversions should be put back in
9133 the tree that will get out of the constant folder. */
9135 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9137 STRIP_SIGN_NOPS (arg0);
9138 STRIP_SIGN_NOPS (arg1);
9140 else
9142 STRIP_NOPS (arg0);
9143 STRIP_NOPS (arg1);
9146 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9147 constant but we can't do arithmetic on them. */
9148 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9150 tem = const_binop (code, type, arg0, arg1);
9151 if (tem != NULL_TREE)
9153 if (TREE_TYPE (tem) != type)
9154 tem = fold_convert_loc (loc, type, tem);
9155 return tem;
9159 /* If this is a commutative operation, and ARG0 is a constant, move it
9160 to ARG1 to reduce the number of tests below. */
9161 if (commutative_tree_code (code)
9162 && tree_swap_operands_p (arg0, arg1))
9163 return fold_build2_loc (loc, code, type, op1, op0);
9165 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9166 to ARG1 to reduce the number of tests below. */
9167 if (kind == tcc_comparison
9168 && tree_swap_operands_p (arg0, arg1))
9169 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9171 tem = generic_simplify (loc, code, type, op0, op1);
9172 if (tem)
9173 return tem;
9175 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9177 First check for cases where an arithmetic operation is applied to a
9178 compound, conditional, or comparison operation. Push the arithmetic
9179 operation inside the compound or conditional to see if any folding
9180 can then be done. Convert comparison to conditional for this purpose.
9181 The also optimizes non-constant cases that used to be done in
9182 expand_expr.
9184 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9185 one of the operands is a comparison and the other is a comparison, a
9186 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9187 code below would make the expression more complex. Change it to a
9188 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9189 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9191 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9192 || code == EQ_EXPR || code == NE_EXPR)
9193 && TREE_CODE (type) != VECTOR_TYPE
9194 && ((truth_value_p (TREE_CODE (arg0))
9195 && (truth_value_p (TREE_CODE (arg1))
9196 || (TREE_CODE (arg1) == BIT_AND_EXPR
9197 && integer_onep (TREE_OPERAND (arg1, 1)))))
9198 || (truth_value_p (TREE_CODE (arg1))
9199 && (truth_value_p (TREE_CODE (arg0))
9200 || (TREE_CODE (arg0) == BIT_AND_EXPR
9201 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9203 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9204 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9205 : TRUTH_XOR_EXPR,
9206 boolean_type_node,
9207 fold_convert_loc (loc, boolean_type_node, arg0),
9208 fold_convert_loc (loc, boolean_type_node, arg1));
9210 if (code == EQ_EXPR)
9211 tem = invert_truthvalue_loc (loc, tem);
9213 return fold_convert_loc (loc, type, tem);
9216 if (TREE_CODE_CLASS (code) == tcc_binary
9217 || TREE_CODE_CLASS (code) == tcc_comparison)
9219 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9221 tem = fold_build2_loc (loc, code, type,
9222 fold_convert_loc (loc, TREE_TYPE (op0),
9223 TREE_OPERAND (arg0, 1)), op1);
9224 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9225 tem);
9227 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9229 tem = fold_build2_loc (loc, code, type, op0,
9230 fold_convert_loc (loc, TREE_TYPE (op1),
9231 TREE_OPERAND (arg1, 1)));
9232 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9233 tem);
9236 if (TREE_CODE (arg0) == COND_EXPR
9237 || TREE_CODE (arg0) == VEC_COND_EXPR
9238 || COMPARISON_CLASS_P (arg0))
9240 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9241 arg0, arg1,
9242 /*cond_first_p=*/1);
9243 if (tem != NULL_TREE)
9244 return tem;
9247 if (TREE_CODE (arg1) == COND_EXPR
9248 || TREE_CODE (arg1) == VEC_COND_EXPR
9249 || COMPARISON_CLASS_P (arg1))
9251 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9252 arg1, arg0,
9253 /*cond_first_p=*/0);
9254 if (tem != NULL_TREE)
9255 return tem;
9259 switch (code)
9261 case MEM_REF:
9262 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9263 if (TREE_CODE (arg0) == ADDR_EXPR
9264 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9266 tree iref = TREE_OPERAND (arg0, 0);
9267 return fold_build2 (MEM_REF, type,
9268 TREE_OPERAND (iref, 0),
9269 int_const_binop (PLUS_EXPR, arg1,
9270 TREE_OPERAND (iref, 1)));
9273 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9274 if (TREE_CODE (arg0) == ADDR_EXPR
9275 && handled_component_p (TREE_OPERAND (arg0, 0)))
9277 tree base;
9278 HOST_WIDE_INT coffset;
9279 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9280 &coffset);
9281 if (!base)
9282 return NULL_TREE;
9283 return fold_build2 (MEM_REF, type,
9284 build_fold_addr_expr (base),
9285 int_const_binop (PLUS_EXPR, arg1,
9286 size_int (coffset)));
9289 return NULL_TREE;
9291 case POINTER_PLUS_EXPR:
9292 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9293 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9294 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9295 return fold_convert_loc (loc, type,
9296 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9297 fold_convert_loc (loc, sizetype,
9298 arg1),
9299 fold_convert_loc (loc, sizetype,
9300 arg0)));
9302 return NULL_TREE;
9304 case PLUS_EXPR:
9305 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9307 /* X + (X / CST) * -CST is X % CST. */
9308 if (TREE_CODE (arg1) == MULT_EXPR
9309 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9310 && operand_equal_p (arg0,
9311 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9313 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9314 tree cst1 = TREE_OPERAND (arg1, 1);
9315 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9316 cst1, cst0);
9317 if (sum && integer_zerop (sum))
9318 return fold_convert_loc (loc, type,
9319 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9320 TREE_TYPE (arg0), arg0,
9321 cst0));
9325 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9326 one. Make sure the type is not saturating and has the signedness of
9327 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9328 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9329 if ((TREE_CODE (arg0) == MULT_EXPR
9330 || TREE_CODE (arg1) == MULT_EXPR)
9331 && !TYPE_SATURATING (type)
9332 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9333 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9334 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9336 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9337 if (tem)
9338 return tem;
9341 if (! FLOAT_TYPE_P (type))
9343 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9344 (plus (plus (mult) (mult)) (foo)) so that we can
9345 take advantage of the factoring cases below. */
9346 if (ANY_INTEGRAL_TYPE_P (type)
9347 && TYPE_OVERFLOW_WRAPS (type)
9348 && (((TREE_CODE (arg0) == PLUS_EXPR
9349 || TREE_CODE (arg0) == MINUS_EXPR)
9350 && TREE_CODE (arg1) == MULT_EXPR)
9351 || ((TREE_CODE (arg1) == PLUS_EXPR
9352 || TREE_CODE (arg1) == MINUS_EXPR)
9353 && TREE_CODE (arg0) == MULT_EXPR)))
9355 tree parg0, parg1, parg, marg;
9356 enum tree_code pcode;
9358 if (TREE_CODE (arg1) == MULT_EXPR)
9359 parg = arg0, marg = arg1;
9360 else
9361 parg = arg1, marg = arg0;
9362 pcode = TREE_CODE (parg);
9363 parg0 = TREE_OPERAND (parg, 0);
9364 parg1 = TREE_OPERAND (parg, 1);
9365 STRIP_NOPS (parg0);
9366 STRIP_NOPS (parg1);
9368 if (TREE_CODE (parg0) == MULT_EXPR
9369 && TREE_CODE (parg1) != MULT_EXPR)
9370 return fold_build2_loc (loc, pcode, type,
9371 fold_build2_loc (loc, PLUS_EXPR, type,
9372 fold_convert_loc (loc, type,
9373 parg0),
9374 fold_convert_loc (loc, type,
9375 marg)),
9376 fold_convert_loc (loc, type, parg1));
9377 if (TREE_CODE (parg0) != MULT_EXPR
9378 && TREE_CODE (parg1) == MULT_EXPR)
9379 return
9380 fold_build2_loc (loc, PLUS_EXPR, type,
9381 fold_convert_loc (loc, type, parg0),
9382 fold_build2_loc (loc, pcode, type,
9383 fold_convert_loc (loc, type, marg),
9384 fold_convert_loc (loc, type,
9385 parg1)));
9388 else
9390 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9391 to __complex__ ( x, y ). This is not the same for SNaNs or
9392 if signed zeros are involved. */
9393 if (!HONOR_SNANS (element_mode (arg0))
9394 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9395 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9397 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9398 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9399 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9400 bool arg0rz = false, arg0iz = false;
9401 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9402 || (arg0i && (arg0iz = real_zerop (arg0i))))
9404 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9405 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9406 if (arg0rz && arg1i && real_zerop (arg1i))
9408 tree rp = arg1r ? arg1r
9409 : build1 (REALPART_EXPR, rtype, arg1);
9410 tree ip = arg0i ? arg0i
9411 : build1 (IMAGPART_EXPR, rtype, arg0);
9412 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9414 else if (arg0iz && arg1r && real_zerop (arg1r))
9416 tree rp = arg0r ? arg0r
9417 : build1 (REALPART_EXPR, rtype, arg0);
9418 tree ip = arg1i ? arg1i
9419 : build1 (IMAGPART_EXPR, rtype, arg1);
9420 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9425 if (flag_unsafe_math_optimizations
9426 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9427 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9428 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9429 return tem;
9431 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9432 We associate floats only if the user has specified
9433 -fassociative-math. */
9434 if (flag_associative_math
9435 && TREE_CODE (arg1) == PLUS_EXPR
9436 && TREE_CODE (arg0) != MULT_EXPR)
9438 tree tree10 = TREE_OPERAND (arg1, 0);
9439 tree tree11 = TREE_OPERAND (arg1, 1);
9440 if (TREE_CODE (tree11) == MULT_EXPR
9441 && TREE_CODE (tree10) == MULT_EXPR)
9443 tree tree0;
9444 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9445 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9448 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9449 We associate floats only if the user has specified
9450 -fassociative-math. */
9451 if (flag_associative_math
9452 && TREE_CODE (arg0) == PLUS_EXPR
9453 && TREE_CODE (arg1) != MULT_EXPR)
9455 tree tree00 = TREE_OPERAND (arg0, 0);
9456 tree tree01 = TREE_OPERAND (arg0, 1);
9457 if (TREE_CODE (tree01) == MULT_EXPR
9458 && TREE_CODE (tree00) == MULT_EXPR)
9460 tree tree0;
9461 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9462 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9467 bit_rotate:
9468 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9469 is a rotate of A by C1 bits. */
9470 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9471 is a rotate of A by B bits. */
9473 enum tree_code code0, code1;
9474 tree rtype;
9475 code0 = TREE_CODE (arg0);
9476 code1 = TREE_CODE (arg1);
9477 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9478 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9479 && operand_equal_p (TREE_OPERAND (arg0, 0),
9480 TREE_OPERAND (arg1, 0), 0)
9481 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9482 TYPE_UNSIGNED (rtype))
9483 /* Only create rotates in complete modes. Other cases are not
9484 expanded properly. */
9485 && (element_precision (rtype)
9486 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9488 tree tree01, tree11;
9489 enum tree_code code01, code11;
9491 tree01 = TREE_OPERAND (arg0, 1);
9492 tree11 = TREE_OPERAND (arg1, 1);
9493 STRIP_NOPS (tree01);
9494 STRIP_NOPS (tree11);
9495 code01 = TREE_CODE (tree01);
9496 code11 = TREE_CODE (tree11);
9497 if (code01 == INTEGER_CST
9498 && code11 == INTEGER_CST
9499 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9500 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9502 tem = build2_loc (loc, LROTATE_EXPR,
9503 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9504 TREE_OPERAND (arg0, 0),
9505 code0 == LSHIFT_EXPR
9506 ? TREE_OPERAND (arg0, 1)
9507 : TREE_OPERAND (arg1, 1));
9508 return fold_convert_loc (loc, type, tem);
9510 else if (code11 == MINUS_EXPR)
9512 tree tree110, tree111;
9513 tree110 = TREE_OPERAND (tree11, 0);
9514 tree111 = TREE_OPERAND (tree11, 1);
9515 STRIP_NOPS (tree110);
9516 STRIP_NOPS (tree111);
9517 if (TREE_CODE (tree110) == INTEGER_CST
9518 && 0 == compare_tree_int (tree110,
9519 element_precision
9520 (TREE_TYPE (TREE_OPERAND
9521 (arg0, 0))))
9522 && operand_equal_p (tree01, tree111, 0))
9523 return
9524 fold_convert_loc (loc, type,
9525 build2 ((code0 == LSHIFT_EXPR
9526 ? LROTATE_EXPR
9527 : RROTATE_EXPR),
9528 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9529 TREE_OPERAND (arg0, 0),
9530 TREE_OPERAND (arg0, 1)));
9532 else if (code01 == MINUS_EXPR)
9534 tree tree010, tree011;
9535 tree010 = TREE_OPERAND (tree01, 0);
9536 tree011 = TREE_OPERAND (tree01, 1);
9537 STRIP_NOPS (tree010);
9538 STRIP_NOPS (tree011);
9539 if (TREE_CODE (tree010) == INTEGER_CST
9540 && 0 == compare_tree_int (tree010,
9541 element_precision
9542 (TREE_TYPE (TREE_OPERAND
9543 (arg0, 0))))
9544 && operand_equal_p (tree11, tree011, 0))
9545 return fold_convert_loc
9546 (loc, type,
9547 build2 ((code0 != LSHIFT_EXPR
9548 ? LROTATE_EXPR
9549 : RROTATE_EXPR),
9550 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9551 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9556 associate:
9557 /* In most languages, can't associate operations on floats through
9558 parentheses. Rather than remember where the parentheses were, we
9559 don't associate floats at all, unless the user has specified
9560 -fassociative-math.
9561 And, we need to make sure type is not saturating. */
9563 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9564 && !TYPE_SATURATING (type))
9566 tree var0, con0, lit0, minus_lit0;
9567 tree var1, con1, lit1, minus_lit1;
9568 tree atype = type;
9569 bool ok = true;
9571 /* Split both trees into variables, constants, and literals. Then
9572 associate each group together, the constants with literals,
9573 then the result with variables. This increases the chances of
9574 literals being recombined later and of generating relocatable
9575 expressions for the sum of a constant and literal. */
9576 var0 = split_tree (loc, arg0, type, code,
9577 &con0, &lit0, &minus_lit0, 0);
9578 var1 = split_tree (loc, arg1, type, code,
9579 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9581 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9582 if (code == MINUS_EXPR)
9583 code = PLUS_EXPR;
9585 /* With undefined overflow prefer doing association in a type
9586 which wraps on overflow, if that is one of the operand types. */
9587 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9588 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9590 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9591 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9592 atype = TREE_TYPE (arg0);
9593 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9594 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9595 atype = TREE_TYPE (arg1);
9596 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9599 /* With undefined overflow we can only associate constants with one
9600 variable, and constants whose association doesn't overflow. */
9601 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9602 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9604 if (var0 && var1)
9606 tree tmp0 = var0;
9607 tree tmp1 = var1;
9608 bool one_neg = false;
9610 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9612 tmp0 = TREE_OPERAND (tmp0, 0);
9613 one_neg = !one_neg;
9615 if (CONVERT_EXPR_P (tmp0)
9616 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9617 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9618 <= TYPE_PRECISION (atype)))
9619 tmp0 = TREE_OPERAND (tmp0, 0);
9620 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9622 tmp1 = TREE_OPERAND (tmp1, 0);
9623 one_neg = !one_neg;
9625 if (CONVERT_EXPR_P (tmp1)
9626 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9627 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9628 <= TYPE_PRECISION (atype)))
9629 tmp1 = TREE_OPERAND (tmp1, 0);
9630 /* The only case we can still associate with two variables
9631 is if they cancel out. */
9632 if (!one_neg
9633 || !operand_equal_p (tmp0, tmp1, 0))
9634 ok = false;
9638 /* Only do something if we found more than two objects. Otherwise,
9639 nothing has changed and we risk infinite recursion. */
9640 if (ok
9641 && (2 < ((var0 != 0) + (var1 != 0)
9642 + (con0 != 0) + (con1 != 0)
9643 + (lit0 != 0) + (lit1 != 0)
9644 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9646 bool any_overflows = false;
9647 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9648 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9649 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9650 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9651 var0 = associate_trees (loc, var0, var1, code, atype);
9652 con0 = associate_trees (loc, con0, con1, code, atype);
9653 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9654 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9655 code, atype);
9657 /* Preserve the MINUS_EXPR if the negative part of the literal is
9658 greater than the positive part. Otherwise, the multiplicative
9659 folding code (i.e extract_muldiv) may be fooled in case
9660 unsigned constants are subtracted, like in the following
9661 example: ((X*2 + 4) - 8U)/2. */
9662 if (minus_lit0 && lit0)
9664 if (TREE_CODE (lit0) == INTEGER_CST
9665 && TREE_CODE (minus_lit0) == INTEGER_CST
9666 && tree_int_cst_lt (lit0, minus_lit0))
9668 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9669 MINUS_EXPR, atype);
9670 lit0 = 0;
9672 else
9674 lit0 = associate_trees (loc, lit0, minus_lit0,
9675 MINUS_EXPR, atype);
9676 minus_lit0 = 0;
9680 /* Don't introduce overflows through reassociation. */
9681 if (!any_overflows
9682 && ((lit0 && TREE_OVERFLOW_P (lit0))
9683 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9684 return NULL_TREE;
9686 if (minus_lit0)
9688 if (con0 == 0)
9689 return
9690 fold_convert_loc (loc, type,
9691 associate_trees (loc, var0, minus_lit0,
9692 MINUS_EXPR, atype));
9693 else
9695 con0 = associate_trees (loc, con0, minus_lit0,
9696 MINUS_EXPR, atype);
9697 return
9698 fold_convert_loc (loc, type,
9699 associate_trees (loc, var0, con0,
9700 PLUS_EXPR, atype));
9704 con0 = associate_trees (loc, con0, lit0, code, atype);
9705 return
9706 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9707 code, atype));
9711 return NULL_TREE;
9713 case MINUS_EXPR:
9714 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9715 if (TREE_CODE (arg0) == NEGATE_EXPR
9716 && negate_expr_p (op1))
9717 return fold_build2_loc (loc, MINUS_EXPR, type,
9718 negate_expr (op1),
9719 fold_convert_loc (loc, type,
9720 TREE_OPERAND (arg0, 0)));
9722 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9723 __complex__ ( x, -y ). This is not the same for SNaNs or if
9724 signed zeros are involved. */
9725 if (!HONOR_SNANS (element_mode (arg0))
9726 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9727 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9729 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9730 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9731 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9732 bool arg0rz = false, arg0iz = false;
9733 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9734 || (arg0i && (arg0iz = real_zerop (arg0i))))
9736 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9737 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9738 if (arg0rz && arg1i && real_zerop (arg1i))
9740 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9741 arg1r ? arg1r
9742 : build1 (REALPART_EXPR, rtype, arg1));
9743 tree ip = arg0i ? arg0i
9744 : build1 (IMAGPART_EXPR, rtype, arg0);
9745 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9747 else if (arg0iz && arg1r && real_zerop (arg1r))
9749 tree rp = arg0r ? arg0r
9750 : build1 (REALPART_EXPR, rtype, arg0);
9751 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9752 arg1i ? arg1i
9753 : build1 (IMAGPART_EXPR, rtype, arg1));
9754 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9759 /* A - B -> A + (-B) if B is easily negatable. */
9760 if (negate_expr_p (op1)
9761 && ! TYPE_OVERFLOW_SANITIZED (type)
9762 && ((FLOAT_TYPE_P (type)
9763 /* Avoid this transformation if B is a positive REAL_CST. */
9764 && (TREE_CODE (op1) != REAL_CST
9765 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9766 || INTEGRAL_TYPE_P (type)))
9767 return fold_build2_loc (loc, PLUS_EXPR, type,
9768 fold_convert_loc (loc, type, arg0),
9769 negate_expr (op1));
9771 /* Fold &a[i] - &a[j] to i-j. */
9772 if (TREE_CODE (arg0) == ADDR_EXPR
9773 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9774 && TREE_CODE (arg1) == ADDR_EXPR
9775 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9777 tree tem = fold_addr_of_array_ref_difference (loc, type,
9778 TREE_OPERAND (arg0, 0),
9779 TREE_OPERAND (arg1, 0));
9780 if (tem)
9781 return tem;
9784 if (FLOAT_TYPE_P (type)
9785 && flag_unsafe_math_optimizations
9786 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9787 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9788 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9789 return tem;
9791 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9792 one. Make sure the type is not saturating and has the signedness of
9793 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9794 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9795 if ((TREE_CODE (arg0) == MULT_EXPR
9796 || TREE_CODE (arg1) == MULT_EXPR)
9797 && !TYPE_SATURATING (type)
9798 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9799 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9800 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9802 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9803 if (tem)
9804 return tem;
9807 goto associate;
9809 case MULT_EXPR:
9810 if (! FLOAT_TYPE_P (type))
9812 /* Transform x * -C into -x * C if x is easily negatable. */
9813 if (TREE_CODE (op1) == INTEGER_CST
9814 && tree_int_cst_sgn (op1) == -1
9815 && negate_expr_p (op0)
9816 && (tem = negate_expr (op1)) != op1
9817 && ! TREE_OVERFLOW (tem))
9818 return fold_build2_loc (loc, MULT_EXPR, type,
9819 fold_convert_loc (loc, type,
9820 negate_expr (op0)), tem);
9822 strict_overflow_p = false;
9823 if (TREE_CODE (arg1) == INTEGER_CST
9824 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9825 &strict_overflow_p)))
9827 if (strict_overflow_p)
9828 fold_overflow_warning (("assuming signed overflow does not "
9829 "occur when simplifying "
9830 "multiplication"),
9831 WARN_STRICT_OVERFLOW_MISC);
9832 return fold_convert_loc (loc, type, tem);
9835 /* Optimize z * conj(z) for integer complex numbers. */
9836 if (TREE_CODE (arg0) == CONJ_EXPR
9837 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9838 return fold_mult_zconjz (loc, type, arg1);
9839 if (TREE_CODE (arg1) == CONJ_EXPR
9840 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9841 return fold_mult_zconjz (loc, type, arg0);
9843 else
9845 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9846 This is not the same for NaNs or if signed zeros are
9847 involved. */
9848 if (!HONOR_NANS (arg0)
9849 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9850 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9851 && TREE_CODE (arg1) == COMPLEX_CST
9852 && real_zerop (TREE_REALPART (arg1)))
9854 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9855 if (real_onep (TREE_IMAGPART (arg1)))
9856 return
9857 fold_build2_loc (loc, COMPLEX_EXPR, type,
9858 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9859 rtype, arg0)),
9860 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9861 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9862 return
9863 fold_build2_loc (loc, COMPLEX_EXPR, type,
9864 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9865 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9866 rtype, arg0)));
9869 /* Optimize z * conj(z) for floating point complex numbers.
9870 Guarded by flag_unsafe_math_optimizations as non-finite
9871 imaginary components don't produce scalar results. */
9872 if (flag_unsafe_math_optimizations
9873 && TREE_CODE (arg0) == CONJ_EXPR
9874 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9875 return fold_mult_zconjz (loc, type, arg1);
9876 if (flag_unsafe_math_optimizations
9877 && TREE_CODE (arg1) == CONJ_EXPR
9878 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9879 return fold_mult_zconjz (loc, type, arg0);
9881 goto associate;
9883 case BIT_IOR_EXPR:
9884 /* Canonicalize (X & C1) | C2. */
9885 if (TREE_CODE (arg0) == BIT_AND_EXPR
9886 && TREE_CODE (arg1) == INTEGER_CST
9887 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9889 int width = TYPE_PRECISION (type), w;
9890 wide_int c1 = TREE_OPERAND (arg0, 1);
9891 wide_int c2 = arg1;
9893 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9894 if ((c1 & c2) == c1)
9895 return omit_one_operand_loc (loc, type, arg1,
9896 TREE_OPERAND (arg0, 0));
9898 wide_int msk = wi::mask (width, false,
9899 TYPE_PRECISION (TREE_TYPE (arg1)));
9901 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9902 if (msk.and_not (c1 | c2) == 0)
9904 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9905 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9908 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9909 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9910 mode which allows further optimizations. */
9911 c1 &= msk;
9912 c2 &= msk;
9913 wide_int c3 = c1.and_not (c2);
9914 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9916 wide_int mask = wi::mask (w, false,
9917 TYPE_PRECISION (type));
9918 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9920 c3 = mask;
9921 break;
9925 if (c3 != c1)
9927 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9928 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9929 wide_int_to_tree (type, c3));
9930 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9934 /* See if this can be simplified into a rotate first. If that
9935 is unsuccessful continue in the association code. */
9936 goto bit_rotate;
9938 case BIT_XOR_EXPR:
9939 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9940 if (TREE_CODE (arg0) == BIT_AND_EXPR
9941 && INTEGRAL_TYPE_P (type)
9942 && integer_onep (TREE_OPERAND (arg0, 1))
9943 && integer_onep (arg1))
9944 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9945 build_zero_cst (TREE_TYPE (arg0)));
9947 /* See if this can be simplified into a rotate first. If that
9948 is unsuccessful continue in the association code. */
9949 goto bit_rotate;
9951 case BIT_AND_EXPR:
9952 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9953 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9954 && INTEGRAL_TYPE_P (type)
9955 && integer_onep (TREE_OPERAND (arg0, 1))
9956 && integer_onep (arg1))
9958 tree tem2;
9959 tem = TREE_OPERAND (arg0, 0);
9960 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9961 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9962 tem, tem2);
9963 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9964 build_zero_cst (TREE_TYPE (tem)));
9966 /* Fold ~X & 1 as (X & 1) == 0. */
9967 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9968 && INTEGRAL_TYPE_P (type)
9969 && integer_onep (arg1))
9971 tree tem2;
9972 tem = TREE_OPERAND (arg0, 0);
9973 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9974 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9975 tem, tem2);
9976 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9977 build_zero_cst (TREE_TYPE (tem)));
9979 /* Fold !X & 1 as X == 0. */
9980 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9981 && integer_onep (arg1))
9983 tem = TREE_OPERAND (arg0, 0);
9984 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9985 build_zero_cst (TREE_TYPE (tem)));
9988 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9989 multiple of 1 << CST. */
9990 if (TREE_CODE (arg1) == INTEGER_CST)
9992 wide_int cst1 = arg1;
9993 wide_int ncst1 = -cst1;
9994 if ((cst1 & ncst1) == ncst1
9995 && multiple_of_p (type, arg0,
9996 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
9997 return fold_convert_loc (loc, type, arg0);
10000 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10001 bits from CST2. */
10002 if (TREE_CODE (arg1) == INTEGER_CST
10003 && TREE_CODE (arg0) == MULT_EXPR
10004 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10006 wide_int warg1 = arg1;
10007 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10009 if (masked == 0)
10010 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10011 arg0, arg1);
10012 else if (masked != warg1)
10014 /* Avoid the transform if arg1 is a mask of some
10015 mode which allows further optimizations. */
10016 int pop = wi::popcount (warg1);
10017 if (!(pop >= BITS_PER_UNIT
10018 && pow2p_hwi (pop)
10019 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10020 return fold_build2_loc (loc, code, type, op0,
10021 wide_int_to_tree (type, masked));
10025 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10026 ((A & N) + B) & M -> (A + B) & M
10027 Similarly if (N & M) == 0,
10028 ((A | N) + B) & M -> (A + B) & M
10029 and for - instead of + (or unary - instead of +)
10030 and/or ^ instead of |.
10031 If B is constant and (B & M) == 0, fold into A & M. */
10032 if (TREE_CODE (arg1) == INTEGER_CST)
10034 wide_int cst1 = arg1;
10035 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10036 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10037 && (TREE_CODE (arg0) == PLUS_EXPR
10038 || TREE_CODE (arg0) == MINUS_EXPR
10039 || TREE_CODE (arg0) == NEGATE_EXPR)
10040 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10041 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10043 tree pmop[2];
10044 int which = 0;
10045 wide_int cst0;
10047 /* Now we know that arg0 is (C + D) or (C - D) or
10048 -C and arg1 (M) is == (1LL << cst) - 1.
10049 Store C into PMOP[0] and D into PMOP[1]. */
10050 pmop[0] = TREE_OPERAND (arg0, 0);
10051 pmop[1] = NULL;
10052 if (TREE_CODE (arg0) != NEGATE_EXPR)
10054 pmop[1] = TREE_OPERAND (arg0, 1);
10055 which = 1;
10058 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10059 which = -1;
10061 for (; which >= 0; which--)
10062 switch (TREE_CODE (pmop[which]))
10064 case BIT_AND_EXPR:
10065 case BIT_IOR_EXPR:
10066 case BIT_XOR_EXPR:
10067 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10068 != INTEGER_CST)
10069 break;
10070 cst0 = TREE_OPERAND (pmop[which], 1);
10071 cst0 &= cst1;
10072 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10074 if (cst0 != cst1)
10075 break;
10077 else if (cst0 != 0)
10078 break;
10079 /* If C or D is of the form (A & N) where
10080 (N & M) == M, or of the form (A | N) or
10081 (A ^ N) where (N & M) == 0, replace it with A. */
10082 pmop[which] = TREE_OPERAND (pmop[which], 0);
10083 break;
10084 case INTEGER_CST:
10085 /* If C or D is a N where (N & M) == 0, it can be
10086 omitted (assumed 0). */
10087 if ((TREE_CODE (arg0) == PLUS_EXPR
10088 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10089 && (cst1 & pmop[which]) == 0)
10090 pmop[which] = NULL;
10091 break;
10092 default:
10093 break;
10096 /* Only build anything new if we optimized one or both arguments
10097 above. */
10098 if (pmop[0] != TREE_OPERAND (arg0, 0)
10099 || (TREE_CODE (arg0) != NEGATE_EXPR
10100 && pmop[1] != TREE_OPERAND (arg0, 1)))
10102 tree utype = TREE_TYPE (arg0);
10103 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10105 /* Perform the operations in a type that has defined
10106 overflow behavior. */
10107 utype = unsigned_type_for (TREE_TYPE (arg0));
10108 if (pmop[0] != NULL)
10109 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10110 if (pmop[1] != NULL)
10111 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10114 if (TREE_CODE (arg0) == NEGATE_EXPR)
10115 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10116 else if (TREE_CODE (arg0) == PLUS_EXPR)
10118 if (pmop[0] != NULL && pmop[1] != NULL)
10119 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10120 pmop[0], pmop[1]);
10121 else if (pmop[0] != NULL)
10122 tem = pmop[0];
10123 else if (pmop[1] != NULL)
10124 tem = pmop[1];
10125 else
10126 return build_int_cst (type, 0);
10128 else if (pmop[0] == NULL)
10129 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10130 else
10131 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10132 pmop[0], pmop[1]);
10133 /* TEM is now the new binary +, - or unary - replacement. */
10134 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10135 fold_convert_loc (loc, utype, arg1));
10136 return fold_convert_loc (loc, type, tem);
10141 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10142 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10143 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10145 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10147 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10148 if (mask == -1)
10149 return
10150 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10153 goto associate;
10155 case RDIV_EXPR:
10156 /* Don't touch a floating-point divide by zero unless the mode
10157 of the constant can represent infinity. */
10158 if (TREE_CODE (arg1) == REAL_CST
10159 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10160 && real_zerop (arg1))
10161 return NULL_TREE;
10163 /* (-A) / (-B) -> A / B */
10164 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10165 return fold_build2_loc (loc, RDIV_EXPR, type,
10166 TREE_OPERAND (arg0, 0),
10167 negate_expr (arg1));
10168 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10169 return fold_build2_loc (loc, RDIV_EXPR, type,
10170 negate_expr (arg0),
10171 TREE_OPERAND (arg1, 0));
10172 return NULL_TREE;
10174 case TRUNC_DIV_EXPR:
10175 /* Fall through */
10177 case FLOOR_DIV_EXPR:
10178 /* Simplify A / (B << N) where A and B are positive and B is
10179 a power of 2, to A >> (N + log2(B)). */
10180 strict_overflow_p = false;
10181 if (TREE_CODE (arg1) == LSHIFT_EXPR
10182 && (TYPE_UNSIGNED (type)
10183 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10185 tree sval = TREE_OPERAND (arg1, 0);
10186 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10188 tree sh_cnt = TREE_OPERAND (arg1, 1);
10189 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10190 wi::exact_log2 (sval));
10192 if (strict_overflow_p)
10193 fold_overflow_warning (("assuming signed overflow does not "
10194 "occur when simplifying A / (B << N)"),
10195 WARN_STRICT_OVERFLOW_MISC);
10197 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10198 sh_cnt, pow2);
10199 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10200 fold_convert_loc (loc, type, arg0), sh_cnt);
10204 /* Fall through */
10206 case ROUND_DIV_EXPR:
10207 case CEIL_DIV_EXPR:
10208 case EXACT_DIV_EXPR:
10209 if (integer_zerop (arg1))
10210 return NULL_TREE;
10212 /* Convert -A / -B to A / B when the type is signed and overflow is
10213 undefined. */
10214 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10215 && TREE_CODE (op0) == NEGATE_EXPR
10216 && negate_expr_p (op1))
10218 if (INTEGRAL_TYPE_P (type))
10219 fold_overflow_warning (("assuming signed overflow does not occur "
10220 "when distributing negation across "
10221 "division"),
10222 WARN_STRICT_OVERFLOW_MISC);
10223 return fold_build2_loc (loc, code, type,
10224 fold_convert_loc (loc, type,
10225 TREE_OPERAND (arg0, 0)),
10226 negate_expr (op1));
10228 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10229 && TREE_CODE (arg1) == NEGATE_EXPR
10230 && negate_expr_p (op0))
10232 if (INTEGRAL_TYPE_P (type))
10233 fold_overflow_warning (("assuming signed overflow does not occur "
10234 "when distributing negation across "
10235 "division"),
10236 WARN_STRICT_OVERFLOW_MISC);
10237 return fold_build2_loc (loc, code, type,
10238 negate_expr (op0),
10239 fold_convert_loc (loc, type,
10240 TREE_OPERAND (arg1, 0)));
10243 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10244 operation, EXACT_DIV_EXPR.
10246 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10247 At one time others generated faster code, it's not clear if they do
10248 after the last round to changes to the DIV code in expmed.c. */
10249 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10250 && multiple_of_p (type, arg0, arg1))
10251 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10252 fold_convert (type, arg0),
10253 fold_convert (type, arg1));
10255 strict_overflow_p = false;
10256 if (TREE_CODE (arg1) == INTEGER_CST
10257 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10258 &strict_overflow_p)))
10260 if (strict_overflow_p)
10261 fold_overflow_warning (("assuming signed overflow does not occur "
10262 "when simplifying division"),
10263 WARN_STRICT_OVERFLOW_MISC);
10264 return fold_convert_loc (loc, type, tem);
10267 return NULL_TREE;
10269 case CEIL_MOD_EXPR:
10270 case FLOOR_MOD_EXPR:
10271 case ROUND_MOD_EXPR:
10272 case TRUNC_MOD_EXPR:
10273 strict_overflow_p = false;
10274 if (TREE_CODE (arg1) == INTEGER_CST
10275 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10276 &strict_overflow_p)))
10278 if (strict_overflow_p)
10279 fold_overflow_warning (("assuming signed overflow does not occur "
10280 "when simplifying modulus"),
10281 WARN_STRICT_OVERFLOW_MISC);
10282 return fold_convert_loc (loc, type, tem);
10285 return NULL_TREE;
10287 case LROTATE_EXPR:
10288 case RROTATE_EXPR:
10289 case RSHIFT_EXPR:
10290 case LSHIFT_EXPR:
10291 /* Since negative shift count is not well-defined,
10292 don't try to compute it in the compiler. */
10293 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10294 return NULL_TREE;
10296 prec = element_precision (type);
10298 /* If we have a rotate of a bit operation with the rotate count and
10299 the second operand of the bit operation both constant,
10300 permute the two operations. */
10301 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10302 && (TREE_CODE (arg0) == BIT_AND_EXPR
10303 || TREE_CODE (arg0) == BIT_IOR_EXPR
10304 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10307 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10308 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10309 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10310 fold_build2_loc (loc, code, type,
10311 arg00, arg1),
10312 fold_build2_loc (loc, code, type,
10313 arg01, arg1));
10316 /* Two consecutive rotates adding up to the some integer
10317 multiple of the precision of the type can be ignored. */
10318 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10319 && TREE_CODE (arg0) == RROTATE_EXPR
10320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10321 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10322 prec) == 0)
10323 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10325 return NULL_TREE;
10327 case MIN_EXPR:
10328 case MAX_EXPR:
10329 goto associate;
10331 case TRUTH_ANDIF_EXPR:
10332 /* Note that the operands of this must be ints
10333 and their values must be 0 or 1.
10334 ("true" is a fixed value perhaps depending on the language.) */
10335 /* If first arg is constant zero, return it. */
10336 if (integer_zerop (arg0))
10337 return fold_convert_loc (loc, type, arg0);
10338 /* FALLTHRU */
10339 case TRUTH_AND_EXPR:
10340 /* If either arg is constant true, drop it. */
10341 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10342 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10343 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10344 /* Preserve sequence points. */
10345 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10346 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10347 /* If second arg is constant zero, result is zero, but first arg
10348 must be evaluated. */
10349 if (integer_zerop (arg1))
10350 return omit_one_operand_loc (loc, type, arg1, arg0);
10351 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10352 case will be handled here. */
10353 if (integer_zerop (arg0))
10354 return omit_one_operand_loc (loc, type, arg0, arg1);
10356 /* !X && X is always false. */
10357 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10358 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10359 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10360 /* X && !X is always false. */
10361 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10362 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10363 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10365 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10366 means A >= Y && A != MAX, but in this case we know that
10367 A < X <= MAX. */
10369 if (!TREE_SIDE_EFFECTS (arg0)
10370 && !TREE_SIDE_EFFECTS (arg1))
10372 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10373 if (tem && !operand_equal_p (tem, arg0, 0))
10374 return fold_build2_loc (loc, code, type, tem, arg1);
10376 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10377 if (tem && !operand_equal_p (tem, arg1, 0))
10378 return fold_build2_loc (loc, code, type, arg0, tem);
10381 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10382 != NULL_TREE)
10383 return tem;
10385 return NULL_TREE;
10387 case TRUTH_ORIF_EXPR:
10388 /* Note that the operands of this must be ints
10389 and their values must be 0 or true.
10390 ("true" is a fixed value perhaps depending on the language.) */
10391 /* If first arg is constant true, return it. */
10392 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10393 return fold_convert_loc (loc, type, arg0);
10394 /* FALLTHRU */
10395 case TRUTH_OR_EXPR:
10396 /* If either arg is constant zero, drop it. */
10397 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10398 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10399 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10400 /* Preserve sequence points. */
10401 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10402 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10403 /* If second arg is constant true, result is true, but we must
10404 evaluate first arg. */
10405 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10406 return omit_one_operand_loc (loc, type, arg1, arg0);
10407 /* Likewise for first arg, but note this only occurs here for
10408 TRUTH_OR_EXPR. */
10409 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10410 return omit_one_operand_loc (loc, type, arg0, arg1);
10412 /* !X || X is always true. */
10413 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10414 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10415 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10416 /* X || !X is always true. */
10417 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10418 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10419 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10421 /* (X && !Y) || (!X && Y) is X ^ Y */
10422 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10423 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10425 tree a0, a1, l0, l1, n0, n1;
10427 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10428 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10430 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10431 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10433 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10434 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10436 if ((operand_equal_p (n0, a0, 0)
10437 && operand_equal_p (n1, a1, 0))
10438 || (operand_equal_p (n0, a1, 0)
10439 && operand_equal_p (n1, a0, 0)))
10440 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10443 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10444 != NULL_TREE)
10445 return tem;
10447 return NULL_TREE;
10449 case TRUTH_XOR_EXPR:
10450 /* If the second arg is constant zero, drop it. */
10451 if (integer_zerop (arg1))
10452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10453 /* If the second arg is constant true, this is a logical inversion. */
10454 if (integer_onep (arg1))
10456 tem = invert_truthvalue_loc (loc, arg0);
10457 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10459 /* Identical arguments cancel to zero. */
10460 if (operand_equal_p (arg0, arg1, 0))
10461 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10463 /* !X ^ X is always true. */
10464 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10465 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10466 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10468 /* X ^ !X is always true. */
10469 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10470 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10471 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10473 return NULL_TREE;
10475 case EQ_EXPR:
10476 case NE_EXPR:
10477 STRIP_NOPS (arg0);
10478 STRIP_NOPS (arg1);
10480 tem = fold_comparison (loc, code, type, op0, op1);
10481 if (tem != NULL_TREE)
10482 return tem;
10484 /* bool_var != 1 becomes !bool_var. */
10485 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10486 && code == NE_EXPR)
10487 return fold_convert_loc (loc, type,
10488 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10489 TREE_TYPE (arg0), arg0));
10491 /* bool_var == 0 becomes !bool_var. */
10492 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10493 && code == EQ_EXPR)
10494 return fold_convert_loc (loc, type,
10495 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10496 TREE_TYPE (arg0), arg0));
10498 /* !exp != 0 becomes !exp */
10499 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10500 && code == NE_EXPR)
10501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10503 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10504 if ((TREE_CODE (arg0) == PLUS_EXPR
10505 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10506 || TREE_CODE (arg0) == MINUS_EXPR)
10507 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10508 0)),
10509 arg1, 0)
10510 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10511 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10513 tree val = TREE_OPERAND (arg0, 1);
10514 val = fold_build2_loc (loc, code, type, val,
10515 build_int_cst (TREE_TYPE (val), 0));
10516 return omit_two_operands_loc (loc, type, val,
10517 TREE_OPERAND (arg0, 0), arg1);
10520 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10521 if ((TREE_CODE (arg1) == PLUS_EXPR
10522 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10523 || TREE_CODE (arg1) == MINUS_EXPR)
10524 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10525 0)),
10526 arg0, 0)
10527 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10528 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10530 tree val = TREE_OPERAND (arg1, 1);
10531 val = fold_build2_loc (loc, code, type, val,
10532 build_int_cst (TREE_TYPE (val), 0));
10533 return omit_two_operands_loc (loc, type, val,
10534 TREE_OPERAND (arg1, 0), arg0);
10537 /* If this is an EQ or NE comparison with zero and ARG0 is
10538 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10539 two operations, but the latter can be done in one less insn
10540 on machines that have only two-operand insns or on which a
10541 constant cannot be the first operand. */
10542 if (TREE_CODE (arg0) == BIT_AND_EXPR
10543 && integer_zerop (arg1))
10545 tree arg00 = TREE_OPERAND (arg0, 0);
10546 tree arg01 = TREE_OPERAND (arg0, 1);
10547 if (TREE_CODE (arg00) == LSHIFT_EXPR
10548 && integer_onep (TREE_OPERAND (arg00, 0)))
10550 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10551 arg01, TREE_OPERAND (arg00, 1));
10552 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10553 build_int_cst (TREE_TYPE (arg0), 1));
10554 return fold_build2_loc (loc, code, type,
10555 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10556 arg1);
10558 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10559 && integer_onep (TREE_OPERAND (arg01, 0)))
10561 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10562 arg00, TREE_OPERAND (arg01, 1));
10563 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10564 build_int_cst (TREE_TYPE (arg0), 1));
10565 return fold_build2_loc (loc, code, type,
10566 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10567 arg1);
10571 /* If this is an NE or EQ comparison of zero against the result of a
10572 signed MOD operation whose second operand is a power of 2, make
10573 the MOD operation unsigned since it is simpler and equivalent. */
10574 if (integer_zerop (arg1)
10575 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10576 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10577 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10578 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10579 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10580 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10582 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10583 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10584 fold_convert_loc (loc, newtype,
10585 TREE_OPERAND (arg0, 0)),
10586 fold_convert_loc (loc, newtype,
10587 TREE_OPERAND (arg0, 1)));
10589 return fold_build2_loc (loc, code, type, newmod,
10590 fold_convert_loc (loc, newtype, arg1));
10593 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10594 C1 is a valid shift constant, and C2 is a power of two, i.e.
10595 a single bit. */
10596 if (TREE_CODE (arg0) == BIT_AND_EXPR
10597 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10598 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10599 == INTEGER_CST
10600 && integer_pow2p (TREE_OPERAND (arg0, 1))
10601 && integer_zerop (arg1))
10603 tree itype = TREE_TYPE (arg0);
10604 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10605 prec = TYPE_PRECISION (itype);
10607 /* Check for a valid shift count. */
10608 if (wi::ltu_p (arg001, prec))
10610 tree arg01 = TREE_OPERAND (arg0, 1);
10611 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10612 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10613 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10614 can be rewritten as (X & (C2 << C1)) != 0. */
10615 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10617 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10618 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10619 return fold_build2_loc (loc, code, type, tem,
10620 fold_convert_loc (loc, itype, arg1));
10622 /* Otherwise, for signed (arithmetic) shifts,
10623 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10624 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10625 else if (!TYPE_UNSIGNED (itype))
10626 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10627 arg000, build_int_cst (itype, 0));
10628 /* Otherwise, of unsigned (logical) shifts,
10629 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10630 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10631 else
10632 return omit_one_operand_loc (loc, type,
10633 code == EQ_EXPR ? integer_one_node
10634 : integer_zero_node,
10635 arg000);
10639 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10640 Similarly for NE_EXPR. */
10641 if (TREE_CODE (arg0) == BIT_AND_EXPR
10642 && TREE_CODE (arg1) == INTEGER_CST
10643 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10645 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10646 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10647 TREE_OPERAND (arg0, 1));
10648 tree dandnotc
10649 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10650 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10651 notc);
10652 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10653 if (integer_nonzerop (dandnotc))
10654 return omit_one_operand_loc (loc, type, rslt, arg0);
10657 /* If this is a comparison of a field, we may be able to simplify it. */
10658 if ((TREE_CODE (arg0) == COMPONENT_REF
10659 || TREE_CODE (arg0) == BIT_FIELD_REF)
10660 /* Handle the constant case even without -O
10661 to make sure the warnings are given. */
10662 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10664 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10665 if (t1)
10666 return t1;
10669 /* Optimize comparisons of strlen vs zero to a compare of the
10670 first character of the string vs zero. To wit,
10671 strlen(ptr) == 0 => *ptr == 0
10672 strlen(ptr) != 0 => *ptr != 0
10673 Other cases should reduce to one of these two (or a constant)
10674 due to the return value of strlen being unsigned. */
10675 if (TREE_CODE (arg0) == CALL_EXPR
10676 && integer_zerop (arg1))
10678 tree fndecl = get_callee_fndecl (arg0);
10680 if (fndecl
10681 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10682 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10683 && call_expr_nargs (arg0) == 1
10684 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10686 tree iref = build_fold_indirect_ref_loc (loc,
10687 CALL_EXPR_ARG (arg0, 0));
10688 return fold_build2_loc (loc, code, type, iref,
10689 build_int_cst (TREE_TYPE (iref), 0));
10693 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10694 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10695 if (TREE_CODE (arg0) == RSHIFT_EXPR
10696 && integer_zerop (arg1)
10697 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10699 tree arg00 = TREE_OPERAND (arg0, 0);
10700 tree arg01 = TREE_OPERAND (arg0, 1);
10701 tree itype = TREE_TYPE (arg00);
10702 if (wi::eq_p (arg01, element_precision (itype) - 1))
10704 if (TYPE_UNSIGNED (itype))
10706 itype = signed_type_for (itype);
10707 arg00 = fold_convert_loc (loc, itype, arg00);
10709 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10710 type, arg00, build_zero_cst (itype));
10714 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10715 (X & C) == 0 when C is a single bit. */
10716 if (TREE_CODE (arg0) == BIT_AND_EXPR
10717 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10718 && integer_zerop (arg1)
10719 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10721 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10722 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10723 TREE_OPERAND (arg0, 1));
10724 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10725 type, tem,
10726 fold_convert_loc (loc, TREE_TYPE (arg0),
10727 arg1));
10730 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10731 constant C is a power of two, i.e. a single bit. */
10732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10733 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10734 && integer_zerop (arg1)
10735 && integer_pow2p (TREE_OPERAND (arg0, 1))
10736 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10737 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10739 tree arg00 = TREE_OPERAND (arg0, 0);
10740 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10741 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10744 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10745 when is C is a power of two, i.e. a single bit. */
10746 if (TREE_CODE (arg0) == BIT_AND_EXPR
10747 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10748 && integer_zerop (arg1)
10749 && integer_pow2p (TREE_OPERAND (arg0, 1))
10750 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10751 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10753 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10754 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10755 arg000, TREE_OPERAND (arg0, 1));
10756 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10757 tem, build_int_cst (TREE_TYPE (tem), 0));
10760 if (integer_zerop (arg1)
10761 && tree_expr_nonzero_p (arg0))
10763 tree res = constant_boolean_node (code==NE_EXPR, type);
10764 return omit_one_operand_loc (loc, type, res, arg0);
10767 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10768 if (TREE_CODE (arg0) == BIT_AND_EXPR
10769 && TREE_CODE (arg1) == BIT_AND_EXPR)
10771 tree arg00 = TREE_OPERAND (arg0, 0);
10772 tree arg01 = TREE_OPERAND (arg0, 1);
10773 tree arg10 = TREE_OPERAND (arg1, 0);
10774 tree arg11 = TREE_OPERAND (arg1, 1);
10775 tree itype = TREE_TYPE (arg0);
10777 if (operand_equal_p (arg01, arg11, 0))
10779 tem = fold_convert_loc (loc, itype, arg10);
10780 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10781 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10782 return fold_build2_loc (loc, code, type, tem,
10783 build_zero_cst (itype));
10785 if (operand_equal_p (arg01, arg10, 0))
10787 tem = fold_convert_loc (loc, itype, arg11);
10788 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10789 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10790 return fold_build2_loc (loc, code, type, tem,
10791 build_zero_cst (itype));
10793 if (operand_equal_p (arg00, arg11, 0))
10795 tem = fold_convert_loc (loc, itype, arg10);
10796 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10797 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10798 return fold_build2_loc (loc, code, type, tem,
10799 build_zero_cst (itype));
10801 if (operand_equal_p (arg00, arg10, 0))
10803 tem = fold_convert_loc (loc, itype, arg11);
10804 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10805 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10806 return fold_build2_loc (loc, code, type, tem,
10807 build_zero_cst (itype));
10811 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10812 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10814 tree arg00 = TREE_OPERAND (arg0, 0);
10815 tree arg01 = TREE_OPERAND (arg0, 1);
10816 tree arg10 = TREE_OPERAND (arg1, 0);
10817 tree arg11 = TREE_OPERAND (arg1, 1);
10818 tree itype = TREE_TYPE (arg0);
10820 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10821 operand_equal_p guarantees no side-effects so we don't need
10822 to use omit_one_operand on Z. */
10823 if (operand_equal_p (arg01, arg11, 0))
10824 return fold_build2_loc (loc, code, type, arg00,
10825 fold_convert_loc (loc, TREE_TYPE (arg00),
10826 arg10));
10827 if (operand_equal_p (arg01, arg10, 0))
10828 return fold_build2_loc (loc, code, type, arg00,
10829 fold_convert_loc (loc, TREE_TYPE (arg00),
10830 arg11));
10831 if (operand_equal_p (arg00, arg11, 0))
10832 return fold_build2_loc (loc, code, type, arg01,
10833 fold_convert_loc (loc, TREE_TYPE (arg01),
10834 arg10));
10835 if (operand_equal_p (arg00, arg10, 0))
10836 return fold_build2_loc (loc, code, type, arg01,
10837 fold_convert_loc (loc, TREE_TYPE (arg01),
10838 arg11));
10840 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10841 if (TREE_CODE (arg01) == INTEGER_CST
10842 && TREE_CODE (arg11) == INTEGER_CST)
10844 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10845 fold_convert_loc (loc, itype, arg11));
10846 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10847 return fold_build2_loc (loc, code, type, tem,
10848 fold_convert_loc (loc, itype, arg10));
10852 /* Attempt to simplify equality/inequality comparisons of complex
10853 values. Only lower the comparison if the result is known or
10854 can be simplified to a single scalar comparison. */
10855 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10856 || TREE_CODE (arg0) == COMPLEX_CST)
10857 && (TREE_CODE (arg1) == COMPLEX_EXPR
10858 || TREE_CODE (arg1) == COMPLEX_CST))
10860 tree real0, imag0, real1, imag1;
10861 tree rcond, icond;
10863 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10865 real0 = TREE_OPERAND (arg0, 0);
10866 imag0 = TREE_OPERAND (arg0, 1);
10868 else
10870 real0 = TREE_REALPART (arg0);
10871 imag0 = TREE_IMAGPART (arg0);
10874 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10876 real1 = TREE_OPERAND (arg1, 0);
10877 imag1 = TREE_OPERAND (arg1, 1);
10879 else
10881 real1 = TREE_REALPART (arg1);
10882 imag1 = TREE_IMAGPART (arg1);
10885 rcond = fold_binary_loc (loc, code, type, real0, real1);
10886 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10888 if (integer_zerop (rcond))
10890 if (code == EQ_EXPR)
10891 return omit_two_operands_loc (loc, type, boolean_false_node,
10892 imag0, imag1);
10893 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10895 else
10897 if (code == NE_EXPR)
10898 return omit_two_operands_loc (loc, type, boolean_true_node,
10899 imag0, imag1);
10900 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10904 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10905 if (icond && TREE_CODE (icond) == INTEGER_CST)
10907 if (integer_zerop (icond))
10909 if (code == EQ_EXPR)
10910 return omit_two_operands_loc (loc, type, boolean_false_node,
10911 real0, real1);
10912 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10914 else
10916 if (code == NE_EXPR)
10917 return omit_two_operands_loc (loc, type, boolean_true_node,
10918 real0, real1);
10919 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10924 return NULL_TREE;
10926 case LT_EXPR:
10927 case GT_EXPR:
10928 case LE_EXPR:
10929 case GE_EXPR:
10930 tem = fold_comparison (loc, code, type, op0, op1);
10931 if (tem != NULL_TREE)
10932 return tem;
10934 /* Transform comparisons of the form X +- C CMP X. */
10935 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10937 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10938 && !HONOR_SNANS (arg0))
10939 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10942 tree arg01 = TREE_OPERAND (arg0, 1);
10943 enum tree_code code0 = TREE_CODE (arg0);
10944 int is_positive;
10946 if (TREE_CODE (arg01) == REAL_CST)
10947 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10948 else
10949 is_positive = tree_int_cst_sgn (arg01);
10951 /* (X - c) > X becomes false. */
10952 if (code == GT_EXPR
10953 && ((code0 == MINUS_EXPR && is_positive >= 0)
10954 || (code0 == PLUS_EXPR && is_positive <= 0)))
10956 if (TREE_CODE (arg01) == INTEGER_CST
10957 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10958 fold_overflow_warning (("assuming signed overflow does not "
10959 "occur when assuming that (X - c) > X "
10960 "is always false"),
10961 WARN_STRICT_OVERFLOW_ALL);
10962 return constant_boolean_node (0, type);
10965 /* Likewise (X + c) < X becomes false. */
10966 if (code == LT_EXPR
10967 && ((code0 == PLUS_EXPR && is_positive >= 0)
10968 || (code0 == MINUS_EXPR && is_positive <= 0)))
10970 if (TREE_CODE (arg01) == INTEGER_CST
10971 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10972 fold_overflow_warning (("assuming signed overflow does not "
10973 "occur when assuming that "
10974 "(X + c) < X is always false"),
10975 WARN_STRICT_OVERFLOW_ALL);
10976 return constant_boolean_node (0, type);
10979 /* Convert (X - c) <= X to true. */
10980 if (!HONOR_NANS (arg1)
10981 && code == LE_EXPR
10982 && ((code0 == MINUS_EXPR && is_positive >= 0)
10983 || (code0 == PLUS_EXPR && is_positive <= 0)))
10985 if (TREE_CODE (arg01) == INTEGER_CST
10986 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10987 fold_overflow_warning (("assuming signed overflow does not "
10988 "occur when assuming that "
10989 "(X - c) <= X is always true"),
10990 WARN_STRICT_OVERFLOW_ALL);
10991 return constant_boolean_node (1, type);
10994 /* Convert (X + c) >= X to true. */
10995 if (!HONOR_NANS (arg1)
10996 && code == GE_EXPR
10997 && ((code0 == PLUS_EXPR && is_positive >= 0)
10998 || (code0 == MINUS_EXPR && is_positive <= 0)))
11000 if (TREE_CODE (arg01) == INTEGER_CST
11001 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11002 fold_overflow_warning (("assuming signed overflow does not "
11003 "occur when assuming that "
11004 "(X + c) >= X is always true"),
11005 WARN_STRICT_OVERFLOW_ALL);
11006 return constant_boolean_node (1, type);
11009 if (TREE_CODE (arg01) == INTEGER_CST)
11011 /* Convert X + c > X and X - c < X to true for integers. */
11012 if (code == GT_EXPR
11013 && ((code0 == PLUS_EXPR && is_positive > 0)
11014 || (code0 == MINUS_EXPR && is_positive < 0)))
11016 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11017 fold_overflow_warning (("assuming signed overflow does "
11018 "not occur when assuming that "
11019 "(X + c) > X is always true"),
11020 WARN_STRICT_OVERFLOW_ALL);
11021 return constant_boolean_node (1, type);
11024 if (code == LT_EXPR
11025 && ((code0 == MINUS_EXPR && is_positive > 0)
11026 || (code0 == PLUS_EXPR && is_positive < 0)))
11028 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029 fold_overflow_warning (("assuming signed overflow does "
11030 "not occur when assuming that "
11031 "(X - c) < X is always true"),
11032 WARN_STRICT_OVERFLOW_ALL);
11033 return constant_boolean_node (1, type);
11036 /* Convert X + c <= X and X - c >= X to false for integers. */
11037 if (code == LE_EXPR
11038 && ((code0 == PLUS_EXPR && is_positive > 0)
11039 || (code0 == MINUS_EXPR && is_positive < 0)))
11041 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11042 fold_overflow_warning (("assuming signed overflow does "
11043 "not occur when assuming that "
11044 "(X + c) <= X is always false"),
11045 WARN_STRICT_OVERFLOW_ALL);
11046 return constant_boolean_node (0, type);
11049 if (code == GE_EXPR
11050 && ((code0 == MINUS_EXPR && is_positive > 0)
11051 || (code0 == PLUS_EXPR && is_positive < 0)))
11053 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11054 fold_overflow_warning (("assuming signed overflow does "
11055 "not occur when assuming that "
11056 "(X - c) >= X is always false"),
11057 WARN_STRICT_OVERFLOW_ALL);
11058 return constant_boolean_node (0, type);
11063 /* If we are comparing an ABS_EXPR with a constant, we can
11064 convert all the cases into explicit comparisons, but they may
11065 well not be faster than doing the ABS and one comparison.
11066 But ABS (X) <= C is a range comparison, which becomes a subtraction
11067 and a comparison, and is probably faster. */
11068 if (code == LE_EXPR
11069 && TREE_CODE (arg1) == INTEGER_CST
11070 && TREE_CODE (arg0) == ABS_EXPR
11071 && ! TREE_SIDE_EFFECTS (arg0)
11072 && (0 != (tem = negate_expr (arg1)))
11073 && TREE_CODE (tem) == INTEGER_CST
11074 && !TREE_OVERFLOW (tem))
11075 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11076 build2 (GE_EXPR, type,
11077 TREE_OPERAND (arg0, 0), tem),
11078 build2 (LE_EXPR, type,
11079 TREE_OPERAND (arg0, 0), arg1));
11081 /* Convert ABS_EXPR<x> >= 0 to true. */
11082 strict_overflow_p = false;
11083 if (code == GE_EXPR
11084 && (integer_zerop (arg1)
11085 || (! HONOR_NANS (arg0)
11086 && real_zerop (arg1)))
11087 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11089 if (strict_overflow_p)
11090 fold_overflow_warning (("assuming signed overflow does not occur "
11091 "when simplifying comparison of "
11092 "absolute value and zero"),
11093 WARN_STRICT_OVERFLOW_CONDITIONAL);
11094 return omit_one_operand_loc (loc, type,
11095 constant_boolean_node (true, type),
11096 arg0);
11099 /* Convert ABS_EXPR<x> < 0 to false. */
11100 strict_overflow_p = false;
11101 if (code == LT_EXPR
11102 && (integer_zerop (arg1) || real_zerop (arg1))
11103 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11105 if (strict_overflow_p)
11106 fold_overflow_warning (("assuming signed overflow does not occur "
11107 "when simplifying comparison of "
11108 "absolute value and zero"),
11109 WARN_STRICT_OVERFLOW_CONDITIONAL);
11110 return omit_one_operand_loc (loc, type,
11111 constant_boolean_node (false, type),
11112 arg0);
11115 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11116 and similarly for >= into !=. */
11117 if ((code == LT_EXPR || code == GE_EXPR)
11118 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11119 && TREE_CODE (arg1) == LSHIFT_EXPR
11120 && integer_onep (TREE_OPERAND (arg1, 0)))
11121 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11122 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11123 TREE_OPERAND (arg1, 1)),
11124 build_zero_cst (TREE_TYPE (arg0)));
11126 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11127 otherwise Y might be >= # of bits in X's type and thus e.g.
11128 (unsigned char) (1 << Y) for Y 15 might be 0.
11129 If the cast is widening, then 1 << Y should have unsigned type,
11130 otherwise if Y is number of bits in the signed shift type minus 1,
11131 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11132 31 might be 0xffffffff80000000. */
11133 if ((code == LT_EXPR || code == GE_EXPR)
11134 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11135 && CONVERT_EXPR_P (arg1)
11136 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11137 && (element_precision (TREE_TYPE (arg1))
11138 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11139 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11140 || (element_precision (TREE_TYPE (arg1))
11141 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11142 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11144 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11145 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11146 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11147 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11148 build_zero_cst (TREE_TYPE (arg0)));
11151 return NULL_TREE;
11153 case UNORDERED_EXPR:
11154 case ORDERED_EXPR:
11155 case UNLT_EXPR:
11156 case UNLE_EXPR:
11157 case UNGT_EXPR:
11158 case UNGE_EXPR:
11159 case UNEQ_EXPR:
11160 case LTGT_EXPR:
11161 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11163 tree targ0 = strip_float_extensions (arg0);
11164 tree targ1 = strip_float_extensions (arg1);
11165 tree newtype = TREE_TYPE (targ0);
11167 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11168 newtype = TREE_TYPE (targ1);
11170 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11171 return fold_build2_loc (loc, code, type,
11172 fold_convert_loc (loc, newtype, targ0),
11173 fold_convert_loc (loc, newtype, targ1));
11176 return NULL_TREE;
11178 case COMPOUND_EXPR:
11179 /* When pedantic, a compound expression can be neither an lvalue
11180 nor an integer constant expression. */
11181 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11182 return NULL_TREE;
11183 /* Don't let (0, 0) be null pointer constant. */
11184 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11185 : fold_convert_loc (loc, type, arg1);
11186 return pedantic_non_lvalue_loc (loc, tem);
11188 case ASSERT_EXPR:
11189 /* An ASSERT_EXPR should never be passed to fold_binary. */
11190 gcc_unreachable ();
11192 default:
11193 return NULL_TREE;
11194 } /* switch (code) */
11197 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11198 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11199 of GOTO_EXPR. */
11201 static tree
11202 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11204 switch (TREE_CODE (*tp))
11206 case LABEL_EXPR:
11207 return *tp;
11209 case GOTO_EXPR:
11210 *walk_subtrees = 0;
11212 /* fall through */
11214 default:
11215 return NULL_TREE;
11219 /* Return whether the sub-tree ST contains a label which is accessible from
11220 outside the sub-tree. */
11222 static bool
11223 contains_label_p (tree st)
11225 return
11226 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11229 /* Fold a ternary expression of code CODE and type TYPE with operands
11230 OP0, OP1, and OP2. Return the folded expression if folding is
11231 successful. Otherwise, return NULL_TREE. */
11233 tree
11234 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11235 tree op0, tree op1, tree op2)
11237 tree tem;
11238 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11239 enum tree_code_class kind = TREE_CODE_CLASS (code);
11241 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11242 && TREE_CODE_LENGTH (code) == 3);
11244 /* If this is a commutative operation, and OP0 is a constant, move it
11245 to OP1 to reduce the number of tests below. */
11246 if (commutative_ternary_tree_code (code)
11247 && tree_swap_operands_p (op0, op1))
11248 return fold_build3_loc (loc, code, type, op1, op0, op2);
11250 tem = generic_simplify (loc, code, type, op0, op1, op2);
11251 if (tem)
11252 return tem;
11254 /* Strip any conversions that don't change the mode. This is safe
11255 for every expression, except for a comparison expression because
11256 its signedness is derived from its operands. So, in the latter
11257 case, only strip conversions that don't change the signedness.
11259 Note that this is done as an internal manipulation within the
11260 constant folder, in order to find the simplest representation of
11261 the arguments so that their form can be studied. In any cases,
11262 the appropriate type conversions should be put back in the tree
11263 that will get out of the constant folder. */
11264 if (op0)
11266 arg0 = op0;
11267 STRIP_NOPS (arg0);
11270 if (op1)
11272 arg1 = op1;
11273 STRIP_NOPS (arg1);
11276 if (op2)
11278 arg2 = op2;
11279 STRIP_NOPS (arg2);
11282 switch (code)
11284 case COMPONENT_REF:
11285 if (TREE_CODE (arg0) == CONSTRUCTOR
11286 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11288 unsigned HOST_WIDE_INT idx;
11289 tree field, value;
11290 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11291 if (field == arg1)
11292 return value;
11294 return NULL_TREE;
11296 case COND_EXPR:
11297 case VEC_COND_EXPR:
11298 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11299 so all simple results must be passed through pedantic_non_lvalue. */
11300 if (TREE_CODE (arg0) == INTEGER_CST)
11302 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11303 tem = integer_zerop (arg0) ? op2 : op1;
11304 /* Only optimize constant conditions when the selected branch
11305 has the same type as the COND_EXPR. This avoids optimizing
11306 away "c ? x : throw", where the throw has a void type.
11307 Avoid throwing away that operand which contains label. */
11308 if ((!TREE_SIDE_EFFECTS (unused_op)
11309 || !contains_label_p (unused_op))
11310 && (! VOID_TYPE_P (TREE_TYPE (tem))
11311 || VOID_TYPE_P (type)))
11312 return pedantic_non_lvalue_loc (loc, tem);
11313 return NULL_TREE;
11315 else if (TREE_CODE (arg0) == VECTOR_CST)
11317 if ((TREE_CODE (arg1) == VECTOR_CST
11318 || TREE_CODE (arg1) == CONSTRUCTOR)
11319 && (TREE_CODE (arg2) == VECTOR_CST
11320 || TREE_CODE (arg2) == CONSTRUCTOR))
11322 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11323 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11324 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11325 for (i = 0; i < nelts; i++)
11327 tree val = VECTOR_CST_ELT (arg0, i);
11328 if (integer_all_onesp (val))
11329 sel[i] = i;
11330 else if (integer_zerop (val))
11331 sel[i] = nelts + i;
11332 else /* Currently unreachable. */
11333 return NULL_TREE;
11335 tree t = fold_vec_perm (type, arg1, arg2, sel);
11336 if (t != NULL_TREE)
11337 return t;
11341 /* If we have A op B ? A : C, we may be able to convert this to a
11342 simpler expression, depending on the operation and the values
11343 of B and C. Signed zeros prevent all of these transformations,
11344 for reasons given above each one.
11346 Also try swapping the arguments and inverting the conditional. */
11347 if (COMPARISON_CLASS_P (arg0)
11348 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11349 arg1, TREE_OPERAND (arg0, 1))
11350 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11352 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11353 if (tem)
11354 return tem;
11357 if (COMPARISON_CLASS_P (arg0)
11358 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11359 op2,
11360 TREE_OPERAND (arg0, 1))
11361 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11363 location_t loc0 = expr_location_or (arg0, loc);
11364 tem = fold_invert_truthvalue (loc0, arg0);
11365 if (tem && COMPARISON_CLASS_P (tem))
11367 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11368 if (tem)
11369 return tem;
11373 /* If the second operand is simpler than the third, swap them
11374 since that produces better jump optimization results. */
11375 if (truth_value_p (TREE_CODE (arg0))
11376 && tree_swap_operands_p (op1, op2))
11378 location_t loc0 = expr_location_or (arg0, loc);
11379 /* See if this can be inverted. If it can't, possibly because
11380 it was a floating-point inequality comparison, don't do
11381 anything. */
11382 tem = fold_invert_truthvalue (loc0, arg0);
11383 if (tem)
11384 return fold_build3_loc (loc, code, type, tem, op2, op1);
11387 /* Convert A ? 1 : 0 to simply A. */
11388 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11389 : (integer_onep (op1)
11390 && !VECTOR_TYPE_P (type)))
11391 && integer_zerop (op2)
11392 /* If we try to convert OP0 to our type, the
11393 call to fold will try to move the conversion inside
11394 a COND, which will recurse. In that case, the COND_EXPR
11395 is probably the best choice, so leave it alone. */
11396 && type == TREE_TYPE (arg0))
11397 return pedantic_non_lvalue_loc (loc, arg0);
11399 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11400 over COND_EXPR in cases such as floating point comparisons. */
11401 if (integer_zerop (op1)
11402 && code == COND_EXPR
11403 && integer_onep (op2)
11404 && !VECTOR_TYPE_P (type)
11405 && truth_value_p (TREE_CODE (arg0)))
11406 return pedantic_non_lvalue_loc (loc,
11407 fold_convert_loc (loc, type,
11408 invert_truthvalue_loc (loc,
11409 arg0)));
11411 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11412 if (TREE_CODE (arg0) == LT_EXPR
11413 && integer_zerop (TREE_OPERAND (arg0, 1))
11414 && integer_zerop (op2)
11415 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11417 /* sign_bit_p looks through both zero and sign extensions,
11418 but for this optimization only sign extensions are
11419 usable. */
11420 tree tem2 = TREE_OPERAND (arg0, 0);
11421 while (tem != tem2)
11423 if (TREE_CODE (tem2) != NOP_EXPR
11424 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11426 tem = NULL_TREE;
11427 break;
11429 tem2 = TREE_OPERAND (tem2, 0);
11431 /* sign_bit_p only checks ARG1 bits within A's precision.
11432 If <sign bit of A> has wider type than A, bits outside
11433 of A's precision in <sign bit of A> need to be checked.
11434 If they are all 0, this optimization needs to be done
11435 in unsigned A's type, if they are all 1 in signed A's type,
11436 otherwise this can't be done. */
11437 if (tem
11438 && TYPE_PRECISION (TREE_TYPE (tem))
11439 < TYPE_PRECISION (TREE_TYPE (arg1))
11440 && TYPE_PRECISION (TREE_TYPE (tem))
11441 < TYPE_PRECISION (type))
11443 int inner_width, outer_width;
11444 tree tem_type;
11446 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11447 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11448 if (outer_width > TYPE_PRECISION (type))
11449 outer_width = TYPE_PRECISION (type);
11451 wide_int mask = wi::shifted_mask
11452 (inner_width, outer_width - inner_width, false,
11453 TYPE_PRECISION (TREE_TYPE (arg1)));
11455 wide_int common = mask & arg1;
11456 if (common == mask)
11458 tem_type = signed_type_for (TREE_TYPE (tem));
11459 tem = fold_convert_loc (loc, tem_type, tem);
11461 else if (common == 0)
11463 tem_type = unsigned_type_for (TREE_TYPE (tem));
11464 tem = fold_convert_loc (loc, tem_type, tem);
11466 else
11467 tem = NULL;
11470 if (tem)
11471 return
11472 fold_convert_loc (loc, type,
11473 fold_build2_loc (loc, BIT_AND_EXPR,
11474 TREE_TYPE (tem), tem,
11475 fold_convert_loc (loc,
11476 TREE_TYPE (tem),
11477 arg1)));
11480 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11481 already handled above. */
11482 if (TREE_CODE (arg0) == BIT_AND_EXPR
11483 && integer_onep (TREE_OPERAND (arg0, 1))
11484 && integer_zerop (op2)
11485 && integer_pow2p (arg1))
11487 tree tem = TREE_OPERAND (arg0, 0);
11488 STRIP_NOPS (tem);
11489 if (TREE_CODE (tem) == RSHIFT_EXPR
11490 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11491 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11492 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11493 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11494 fold_convert_loc (loc, type,
11495 TREE_OPERAND (tem, 0)),
11496 op1);
11499 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11500 is probably obsolete because the first operand should be a
11501 truth value (that's why we have the two cases above), but let's
11502 leave it in until we can confirm this for all front-ends. */
11503 if (integer_zerop (op2)
11504 && TREE_CODE (arg0) == NE_EXPR
11505 && integer_zerop (TREE_OPERAND (arg0, 1))
11506 && integer_pow2p (arg1)
11507 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11508 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11509 arg1, OEP_ONLY_CONST))
11510 return pedantic_non_lvalue_loc (loc,
11511 fold_convert_loc (loc, type,
11512 TREE_OPERAND (arg0, 0)));
11514 /* Disable the transformations below for vectors, since
11515 fold_binary_op_with_conditional_arg may undo them immediately,
11516 yielding an infinite loop. */
11517 if (code == VEC_COND_EXPR)
11518 return NULL_TREE;
11520 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11521 if (integer_zerop (op2)
11522 && truth_value_p (TREE_CODE (arg0))
11523 && truth_value_p (TREE_CODE (arg1))
11524 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11525 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11526 : TRUTH_ANDIF_EXPR,
11527 type, fold_convert_loc (loc, type, arg0), op1);
11529 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11530 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11531 && truth_value_p (TREE_CODE (arg0))
11532 && truth_value_p (TREE_CODE (arg1))
11533 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11535 location_t loc0 = expr_location_or (arg0, loc);
11536 /* Only perform transformation if ARG0 is easily inverted. */
11537 tem = fold_invert_truthvalue (loc0, arg0);
11538 if (tem)
11539 return fold_build2_loc (loc, code == VEC_COND_EXPR
11540 ? BIT_IOR_EXPR
11541 : TRUTH_ORIF_EXPR,
11542 type, fold_convert_loc (loc, type, tem),
11543 op1);
11546 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11547 if (integer_zerop (arg1)
11548 && truth_value_p (TREE_CODE (arg0))
11549 && truth_value_p (TREE_CODE (op2))
11550 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11552 location_t loc0 = expr_location_or (arg0, loc);
11553 /* Only perform transformation if ARG0 is easily inverted. */
11554 tem = fold_invert_truthvalue (loc0, arg0);
11555 if (tem)
11556 return fold_build2_loc (loc, code == VEC_COND_EXPR
11557 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11558 type, fold_convert_loc (loc, type, tem),
11559 op2);
11562 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11563 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11564 && truth_value_p (TREE_CODE (arg0))
11565 && truth_value_p (TREE_CODE (op2))
11566 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11567 return fold_build2_loc (loc, code == VEC_COND_EXPR
11568 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11569 type, fold_convert_loc (loc, type, arg0), op2);
11571 return NULL_TREE;
11573 case CALL_EXPR:
11574 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11575 of fold_ternary on them. */
11576 gcc_unreachable ();
11578 case BIT_FIELD_REF:
11579 if (TREE_CODE (arg0) == VECTOR_CST
11580 && (type == TREE_TYPE (TREE_TYPE (arg0))
11581 || (TREE_CODE (type) == VECTOR_TYPE
11582 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11584 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11585 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11586 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11587 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11589 if (n != 0
11590 && (idx % width) == 0
11591 && (n % width) == 0
11592 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11594 idx = idx / width;
11595 n = n / width;
11597 if (TREE_CODE (arg0) == VECTOR_CST)
11599 if (n == 1)
11600 return VECTOR_CST_ELT (arg0, idx);
11602 tree *vals = XALLOCAVEC (tree, n);
11603 for (unsigned i = 0; i < n; ++i)
11604 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11605 return build_vector (type, vals);
11610 /* On constants we can use native encode/interpret to constant
11611 fold (nearly) all BIT_FIELD_REFs. */
11612 if (CONSTANT_CLASS_P (arg0)
11613 && can_native_interpret_type_p (type)
11614 && BITS_PER_UNIT == 8)
11616 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11617 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11618 /* Limit us to a reasonable amount of work. To relax the
11619 other limitations we need bit-shifting of the buffer
11620 and rounding up the size. */
11621 if (bitpos % BITS_PER_UNIT == 0
11622 && bitsize % BITS_PER_UNIT == 0
11623 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11625 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11626 unsigned HOST_WIDE_INT len
11627 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11628 bitpos / BITS_PER_UNIT);
11629 if (len > 0
11630 && len * BITS_PER_UNIT >= bitsize)
11632 tree v = native_interpret_expr (type, b,
11633 bitsize / BITS_PER_UNIT);
11634 if (v)
11635 return v;
11640 return NULL_TREE;
11642 case FMA_EXPR:
11643 /* For integers we can decompose the FMA if possible. */
11644 if (TREE_CODE (arg0) == INTEGER_CST
11645 && TREE_CODE (arg1) == INTEGER_CST)
11646 return fold_build2_loc (loc, PLUS_EXPR, type,
11647 const_binop (MULT_EXPR, arg0, arg1), arg2);
11648 if (integer_zerop (arg2))
11649 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11651 return fold_fma (loc, type, arg0, arg1, arg2);
11653 case VEC_PERM_EXPR:
11654 if (TREE_CODE (arg2) == VECTOR_CST)
11656 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11657 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11658 unsigned char *sel2 = sel + nelts;
11659 bool need_mask_canon = false;
11660 bool need_mask_canon2 = false;
11661 bool all_in_vec0 = true;
11662 bool all_in_vec1 = true;
11663 bool maybe_identity = true;
11664 bool single_arg = (op0 == op1);
11665 bool changed = false;
11667 mask2 = 2 * nelts - 1;
11668 mask = single_arg ? (nelts - 1) : mask2;
11669 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11670 for (i = 0; i < nelts; i++)
11672 tree val = VECTOR_CST_ELT (arg2, i);
11673 if (TREE_CODE (val) != INTEGER_CST)
11674 return NULL_TREE;
11676 /* Make sure that the perm value is in an acceptable
11677 range. */
11678 wide_int t = val;
11679 need_mask_canon |= wi::gtu_p (t, mask);
11680 need_mask_canon2 |= wi::gtu_p (t, mask2);
11681 sel[i] = t.to_uhwi () & mask;
11682 sel2[i] = t.to_uhwi () & mask2;
11684 if (sel[i] < nelts)
11685 all_in_vec1 = false;
11686 else
11687 all_in_vec0 = false;
11689 if ((sel[i] & (nelts-1)) != i)
11690 maybe_identity = false;
11693 if (maybe_identity)
11695 if (all_in_vec0)
11696 return op0;
11697 if (all_in_vec1)
11698 return op1;
11701 if (all_in_vec0)
11702 op1 = op0;
11703 else if (all_in_vec1)
11705 op0 = op1;
11706 for (i = 0; i < nelts; i++)
11707 sel[i] -= nelts;
11708 need_mask_canon = true;
11711 if ((TREE_CODE (op0) == VECTOR_CST
11712 || TREE_CODE (op0) == CONSTRUCTOR)
11713 && (TREE_CODE (op1) == VECTOR_CST
11714 || TREE_CODE (op1) == CONSTRUCTOR))
11716 tree t = fold_vec_perm (type, op0, op1, sel);
11717 if (t != NULL_TREE)
11718 return t;
11721 if (op0 == op1 && !single_arg)
11722 changed = true;
11724 /* Some targets are deficient and fail to expand a single
11725 argument permutation while still allowing an equivalent
11726 2-argument version. */
11727 if (need_mask_canon && arg2 == op2
11728 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11729 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11731 need_mask_canon = need_mask_canon2;
11732 sel = sel2;
11735 if (need_mask_canon && arg2 == op2)
11737 tree *tsel = XALLOCAVEC (tree, nelts);
11738 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11739 for (i = 0; i < nelts; i++)
11740 tsel[i] = build_int_cst (eltype, sel[i]);
11741 op2 = build_vector (TREE_TYPE (arg2), tsel);
11742 changed = true;
11745 if (changed)
11746 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11748 return NULL_TREE;
11750 case BIT_INSERT_EXPR:
11751 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11752 if (TREE_CODE (arg0) == INTEGER_CST
11753 && TREE_CODE (arg1) == INTEGER_CST)
11755 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11756 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11757 wide_int tem = wi::bit_and (arg0,
11758 wi::shifted_mask (bitpos, bitsize, true,
11759 TYPE_PRECISION (type)));
11760 wide_int tem2
11761 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11762 bitsize), bitpos);
11763 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11765 else if (TREE_CODE (arg0) == VECTOR_CST
11766 && CONSTANT_CLASS_P (arg1)
11767 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11768 TREE_TYPE (arg1)))
11770 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11771 unsigned HOST_WIDE_INT elsize
11772 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11773 if (bitpos % elsize == 0)
11775 unsigned k = bitpos / elsize;
11776 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11777 return arg0;
11778 else
11780 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11781 memcpy (elts, VECTOR_CST_ELTS (arg0),
11782 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11783 elts[k] = arg1;
11784 return build_vector (type, elts);
11788 return NULL_TREE;
11790 default:
11791 return NULL_TREE;
11792 } /* switch (code) */
11795 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11796 of an array (or vector). */
11798 tree
11799 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11801 tree index_type = NULL_TREE;
11802 offset_int low_bound = 0;
11804 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11806 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11807 if (domain_type && TYPE_MIN_VALUE (domain_type))
11809 /* Static constructors for variably sized objects makes no sense. */
11810 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11811 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11812 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11816 if (index_type)
11817 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11818 TYPE_SIGN (index_type));
11820 offset_int index = low_bound - 1;
11821 if (index_type)
11822 index = wi::ext (index, TYPE_PRECISION (index_type),
11823 TYPE_SIGN (index_type));
11825 offset_int max_index;
11826 unsigned HOST_WIDE_INT cnt;
11827 tree cfield, cval;
11829 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11831 /* Array constructor might explicitly set index, or specify a range,
11832 or leave index NULL meaning that it is next index after previous
11833 one. */
11834 if (cfield)
11836 if (TREE_CODE (cfield) == INTEGER_CST)
11837 max_index = index = wi::to_offset (cfield);
11838 else
11840 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11841 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11842 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11845 else
11847 index += 1;
11848 if (index_type)
11849 index = wi::ext (index, TYPE_PRECISION (index_type),
11850 TYPE_SIGN (index_type));
11851 max_index = index;
11854 /* Do we have match? */
11855 if (wi::cmpu (access_index, index) >= 0
11856 && wi::cmpu (access_index, max_index) <= 0)
11857 return cval;
11859 return NULL_TREE;
11862 /* Perform constant folding and related simplification of EXPR.
11863 The related simplifications include x*1 => x, x*0 => 0, etc.,
11864 and application of the associative law.
11865 NOP_EXPR conversions may be removed freely (as long as we
11866 are careful not to change the type of the overall expression).
11867 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11868 but we can constant-fold them if they have constant operands. */
11870 #ifdef ENABLE_FOLD_CHECKING
11871 # define fold(x) fold_1 (x)
11872 static tree fold_1 (tree);
11873 static
11874 #endif
11875 tree
11876 fold (tree expr)
11878 const tree t = expr;
11879 enum tree_code code = TREE_CODE (t);
11880 enum tree_code_class kind = TREE_CODE_CLASS (code);
11881 tree tem;
11882 location_t loc = EXPR_LOCATION (expr);
11884 /* Return right away if a constant. */
11885 if (kind == tcc_constant)
11886 return t;
11888 /* CALL_EXPR-like objects with variable numbers of operands are
11889 treated specially. */
11890 if (kind == tcc_vl_exp)
11892 if (code == CALL_EXPR)
11894 tem = fold_call_expr (loc, expr, false);
11895 return tem ? tem : expr;
11897 return expr;
11900 if (IS_EXPR_CODE_CLASS (kind))
11902 tree type = TREE_TYPE (t);
11903 tree op0, op1, op2;
11905 switch (TREE_CODE_LENGTH (code))
11907 case 1:
11908 op0 = TREE_OPERAND (t, 0);
11909 tem = fold_unary_loc (loc, code, type, op0);
11910 return tem ? tem : expr;
11911 case 2:
11912 op0 = TREE_OPERAND (t, 0);
11913 op1 = TREE_OPERAND (t, 1);
11914 tem = fold_binary_loc (loc, code, type, op0, op1);
11915 return tem ? tem : expr;
11916 case 3:
11917 op0 = TREE_OPERAND (t, 0);
11918 op1 = TREE_OPERAND (t, 1);
11919 op2 = TREE_OPERAND (t, 2);
11920 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11921 return tem ? tem : expr;
11922 default:
11923 break;
11927 switch (code)
11929 case ARRAY_REF:
11931 tree op0 = TREE_OPERAND (t, 0);
11932 tree op1 = TREE_OPERAND (t, 1);
11934 if (TREE_CODE (op1) == INTEGER_CST
11935 && TREE_CODE (op0) == CONSTRUCTOR
11936 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11938 tree val = get_array_ctor_element_at_index (op0,
11939 wi::to_offset (op1));
11940 if (val)
11941 return val;
11944 return t;
11947 /* Return a VECTOR_CST if possible. */
11948 case CONSTRUCTOR:
11950 tree type = TREE_TYPE (t);
11951 if (TREE_CODE (type) != VECTOR_TYPE)
11952 return t;
11954 unsigned i;
11955 tree val;
11956 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11957 if (! CONSTANT_CLASS_P (val))
11958 return t;
11960 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11963 case CONST_DECL:
11964 return fold (DECL_INITIAL (t));
11966 default:
11967 return t;
11968 } /* switch (code) */
11971 #ifdef ENABLE_FOLD_CHECKING
11972 #undef fold
11974 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11975 hash_table<nofree_ptr_hash<const tree_node> > *);
11976 static void fold_check_failed (const_tree, const_tree);
11977 void print_fold_checksum (const_tree);
11979 /* When --enable-checking=fold, compute a digest of expr before
11980 and after actual fold call to see if fold did not accidentally
11981 change original expr. */
11983 tree
11984 fold (tree expr)
11986 tree ret;
11987 struct md5_ctx ctx;
11988 unsigned char checksum_before[16], checksum_after[16];
11989 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11991 md5_init_ctx (&ctx);
11992 fold_checksum_tree (expr, &ctx, &ht);
11993 md5_finish_ctx (&ctx, checksum_before);
11994 ht.empty ();
11996 ret = fold_1 (expr);
11998 md5_init_ctx (&ctx);
11999 fold_checksum_tree (expr, &ctx, &ht);
12000 md5_finish_ctx (&ctx, checksum_after);
12002 if (memcmp (checksum_before, checksum_after, 16))
12003 fold_check_failed (expr, ret);
12005 return ret;
12008 void
12009 print_fold_checksum (const_tree expr)
12011 struct md5_ctx ctx;
12012 unsigned char checksum[16], cnt;
12013 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12015 md5_init_ctx (&ctx);
12016 fold_checksum_tree (expr, &ctx, &ht);
12017 md5_finish_ctx (&ctx, checksum);
12018 for (cnt = 0; cnt < 16; ++cnt)
12019 fprintf (stderr, "%02x", checksum[cnt]);
12020 putc ('\n', stderr);
12023 static void
12024 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12026 internal_error ("fold check: original tree changed by fold");
12029 static void
12030 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12031 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12033 const tree_node **slot;
12034 enum tree_code code;
12035 union tree_node buf;
12036 int i, len;
12038 recursive_label:
12039 if (expr == NULL)
12040 return;
12041 slot = ht->find_slot (expr, INSERT);
12042 if (*slot != NULL)
12043 return;
12044 *slot = expr;
12045 code = TREE_CODE (expr);
12046 if (TREE_CODE_CLASS (code) == tcc_declaration
12047 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12049 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12050 memcpy ((char *) &buf, expr, tree_size (expr));
12051 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12052 buf.decl_with_vis.symtab_node = NULL;
12053 expr = (tree) &buf;
12055 else if (TREE_CODE_CLASS (code) == tcc_type
12056 && (TYPE_POINTER_TO (expr)
12057 || TYPE_REFERENCE_TO (expr)
12058 || TYPE_CACHED_VALUES_P (expr)
12059 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12060 || TYPE_NEXT_VARIANT (expr)
12061 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12063 /* Allow these fields to be modified. */
12064 tree tmp;
12065 memcpy ((char *) &buf, expr, tree_size (expr));
12066 expr = tmp = (tree) &buf;
12067 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12068 TYPE_POINTER_TO (tmp) = NULL;
12069 TYPE_REFERENCE_TO (tmp) = NULL;
12070 TYPE_NEXT_VARIANT (tmp) = NULL;
12071 TYPE_ALIAS_SET (tmp) = -1;
12072 if (TYPE_CACHED_VALUES_P (tmp))
12074 TYPE_CACHED_VALUES_P (tmp) = 0;
12075 TYPE_CACHED_VALUES (tmp) = NULL;
12078 md5_process_bytes (expr, tree_size (expr), ctx);
12079 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12080 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12081 if (TREE_CODE_CLASS (code) != tcc_type
12082 && TREE_CODE_CLASS (code) != tcc_declaration
12083 && code != TREE_LIST
12084 && code != SSA_NAME
12085 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12086 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12087 switch (TREE_CODE_CLASS (code))
12089 case tcc_constant:
12090 switch (code)
12092 case STRING_CST:
12093 md5_process_bytes (TREE_STRING_POINTER (expr),
12094 TREE_STRING_LENGTH (expr), ctx);
12095 break;
12096 case COMPLEX_CST:
12097 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12098 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12099 break;
12100 case VECTOR_CST:
12101 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12102 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12103 break;
12104 default:
12105 break;
12107 break;
12108 case tcc_exceptional:
12109 switch (code)
12111 case TREE_LIST:
12112 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12113 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12114 expr = TREE_CHAIN (expr);
12115 goto recursive_label;
12116 break;
12117 case TREE_VEC:
12118 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12119 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12120 break;
12121 default:
12122 break;
12124 break;
12125 case tcc_expression:
12126 case tcc_reference:
12127 case tcc_comparison:
12128 case tcc_unary:
12129 case tcc_binary:
12130 case tcc_statement:
12131 case tcc_vl_exp:
12132 len = TREE_OPERAND_LENGTH (expr);
12133 for (i = 0; i < len; ++i)
12134 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12135 break;
12136 case tcc_declaration:
12137 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12138 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12139 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12141 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12142 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12143 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12144 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12145 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12148 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12150 if (TREE_CODE (expr) == FUNCTION_DECL)
12152 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12153 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12155 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12157 break;
12158 case tcc_type:
12159 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12160 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12161 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12162 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12163 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12164 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12165 if (INTEGRAL_TYPE_P (expr)
12166 || SCALAR_FLOAT_TYPE_P (expr))
12168 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12169 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12171 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12172 if (TREE_CODE (expr) == RECORD_TYPE
12173 || TREE_CODE (expr) == UNION_TYPE
12174 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12175 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12176 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12177 break;
12178 default:
12179 break;
12183 /* Helper function for outputting the checksum of a tree T. When
12184 debugging with gdb, you can "define mynext" to be "next" followed
12185 by "call debug_fold_checksum (op0)", then just trace down till the
12186 outputs differ. */
12188 DEBUG_FUNCTION void
12189 debug_fold_checksum (const_tree t)
12191 int i;
12192 unsigned char checksum[16];
12193 struct md5_ctx ctx;
12194 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12196 md5_init_ctx (&ctx);
12197 fold_checksum_tree (t, &ctx, &ht);
12198 md5_finish_ctx (&ctx, checksum);
12199 ht.empty ();
12201 for (i = 0; i < 16; i++)
12202 fprintf (stderr, "%d ", checksum[i]);
12204 fprintf (stderr, "\n");
12207 #endif
12209 /* Fold a unary tree expression with code CODE of type TYPE with an
12210 operand OP0. LOC is the location of the resulting expression.
12211 Return a folded expression if successful. Otherwise, return a tree
12212 expression with code CODE of type TYPE with an operand OP0. */
12214 tree
12215 fold_build1_stat_loc (location_t loc,
12216 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12218 tree tem;
12219 #ifdef ENABLE_FOLD_CHECKING
12220 unsigned char checksum_before[16], checksum_after[16];
12221 struct md5_ctx ctx;
12222 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12224 md5_init_ctx (&ctx);
12225 fold_checksum_tree (op0, &ctx, &ht);
12226 md5_finish_ctx (&ctx, checksum_before);
12227 ht.empty ();
12228 #endif
12230 tem = fold_unary_loc (loc, code, type, op0);
12231 if (!tem)
12232 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12234 #ifdef ENABLE_FOLD_CHECKING
12235 md5_init_ctx (&ctx);
12236 fold_checksum_tree (op0, &ctx, &ht);
12237 md5_finish_ctx (&ctx, checksum_after);
12239 if (memcmp (checksum_before, checksum_after, 16))
12240 fold_check_failed (op0, tem);
12241 #endif
12242 return tem;
12245 /* Fold a binary tree expression with code CODE of type TYPE with
12246 operands OP0 and OP1. LOC is the location of the resulting
12247 expression. Return a folded expression if successful. Otherwise,
12248 return a tree expression with code CODE of type TYPE with operands
12249 OP0 and OP1. */
12251 tree
12252 fold_build2_stat_loc (location_t loc,
12253 enum tree_code code, tree type, tree op0, tree op1
12254 MEM_STAT_DECL)
12256 tree tem;
12257 #ifdef ENABLE_FOLD_CHECKING
12258 unsigned char checksum_before_op0[16],
12259 checksum_before_op1[16],
12260 checksum_after_op0[16],
12261 checksum_after_op1[16];
12262 struct md5_ctx ctx;
12263 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12265 md5_init_ctx (&ctx);
12266 fold_checksum_tree (op0, &ctx, &ht);
12267 md5_finish_ctx (&ctx, checksum_before_op0);
12268 ht.empty ();
12270 md5_init_ctx (&ctx);
12271 fold_checksum_tree (op1, &ctx, &ht);
12272 md5_finish_ctx (&ctx, checksum_before_op1);
12273 ht.empty ();
12274 #endif
12276 tem = fold_binary_loc (loc, code, type, op0, op1);
12277 if (!tem)
12278 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12280 #ifdef ENABLE_FOLD_CHECKING
12281 md5_init_ctx (&ctx);
12282 fold_checksum_tree (op0, &ctx, &ht);
12283 md5_finish_ctx (&ctx, checksum_after_op0);
12284 ht.empty ();
12286 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12287 fold_check_failed (op0, tem);
12289 md5_init_ctx (&ctx);
12290 fold_checksum_tree (op1, &ctx, &ht);
12291 md5_finish_ctx (&ctx, checksum_after_op1);
12293 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12294 fold_check_failed (op1, tem);
12295 #endif
12296 return tem;
12299 /* Fold a ternary tree expression with code CODE of type TYPE with
12300 operands OP0, OP1, and OP2. Return a folded expression if
12301 successful. Otherwise, return a tree expression with code CODE of
12302 type TYPE with operands OP0, OP1, and OP2. */
12304 tree
12305 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12306 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12308 tree tem;
12309 #ifdef ENABLE_FOLD_CHECKING
12310 unsigned char checksum_before_op0[16],
12311 checksum_before_op1[16],
12312 checksum_before_op2[16],
12313 checksum_after_op0[16],
12314 checksum_after_op1[16],
12315 checksum_after_op2[16];
12316 struct md5_ctx ctx;
12317 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12319 md5_init_ctx (&ctx);
12320 fold_checksum_tree (op0, &ctx, &ht);
12321 md5_finish_ctx (&ctx, checksum_before_op0);
12322 ht.empty ();
12324 md5_init_ctx (&ctx);
12325 fold_checksum_tree (op1, &ctx, &ht);
12326 md5_finish_ctx (&ctx, checksum_before_op1);
12327 ht.empty ();
12329 md5_init_ctx (&ctx);
12330 fold_checksum_tree (op2, &ctx, &ht);
12331 md5_finish_ctx (&ctx, checksum_before_op2);
12332 ht.empty ();
12333 #endif
12335 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12336 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12337 if (!tem)
12338 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12340 #ifdef ENABLE_FOLD_CHECKING
12341 md5_init_ctx (&ctx);
12342 fold_checksum_tree (op0, &ctx, &ht);
12343 md5_finish_ctx (&ctx, checksum_after_op0);
12344 ht.empty ();
12346 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12347 fold_check_failed (op0, tem);
12349 md5_init_ctx (&ctx);
12350 fold_checksum_tree (op1, &ctx, &ht);
12351 md5_finish_ctx (&ctx, checksum_after_op1);
12352 ht.empty ();
12354 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12355 fold_check_failed (op1, tem);
12357 md5_init_ctx (&ctx);
12358 fold_checksum_tree (op2, &ctx, &ht);
12359 md5_finish_ctx (&ctx, checksum_after_op2);
12361 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12362 fold_check_failed (op2, tem);
12363 #endif
12364 return tem;
12367 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12368 arguments in ARGARRAY, and a null static chain.
12369 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12370 of type TYPE from the given operands as constructed by build_call_array. */
12372 tree
12373 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12374 int nargs, tree *argarray)
12376 tree tem;
12377 #ifdef ENABLE_FOLD_CHECKING
12378 unsigned char checksum_before_fn[16],
12379 checksum_before_arglist[16],
12380 checksum_after_fn[16],
12381 checksum_after_arglist[16];
12382 struct md5_ctx ctx;
12383 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12384 int i;
12386 md5_init_ctx (&ctx);
12387 fold_checksum_tree (fn, &ctx, &ht);
12388 md5_finish_ctx (&ctx, checksum_before_fn);
12389 ht.empty ();
12391 md5_init_ctx (&ctx);
12392 for (i = 0; i < nargs; i++)
12393 fold_checksum_tree (argarray[i], &ctx, &ht);
12394 md5_finish_ctx (&ctx, checksum_before_arglist);
12395 ht.empty ();
12396 #endif
12398 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12399 if (!tem)
12400 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12402 #ifdef ENABLE_FOLD_CHECKING
12403 md5_init_ctx (&ctx);
12404 fold_checksum_tree (fn, &ctx, &ht);
12405 md5_finish_ctx (&ctx, checksum_after_fn);
12406 ht.empty ();
12408 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12409 fold_check_failed (fn, tem);
12411 md5_init_ctx (&ctx);
12412 for (i = 0; i < nargs; i++)
12413 fold_checksum_tree (argarray[i], &ctx, &ht);
12414 md5_finish_ctx (&ctx, checksum_after_arglist);
12416 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12417 fold_check_failed (NULL_TREE, tem);
12418 #endif
12419 return tem;
12422 /* Perform constant folding and related simplification of initializer
12423 expression EXPR. These behave identically to "fold_buildN" but ignore
12424 potential run-time traps and exceptions that fold must preserve. */
12426 #define START_FOLD_INIT \
12427 int saved_signaling_nans = flag_signaling_nans;\
12428 int saved_trapping_math = flag_trapping_math;\
12429 int saved_rounding_math = flag_rounding_math;\
12430 int saved_trapv = flag_trapv;\
12431 int saved_folding_initializer = folding_initializer;\
12432 flag_signaling_nans = 0;\
12433 flag_trapping_math = 0;\
12434 flag_rounding_math = 0;\
12435 flag_trapv = 0;\
12436 folding_initializer = 1;
12438 #define END_FOLD_INIT \
12439 flag_signaling_nans = saved_signaling_nans;\
12440 flag_trapping_math = saved_trapping_math;\
12441 flag_rounding_math = saved_rounding_math;\
12442 flag_trapv = saved_trapv;\
12443 folding_initializer = saved_folding_initializer;
12445 tree
12446 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12447 tree type, tree op)
12449 tree result;
12450 START_FOLD_INIT;
12452 result = fold_build1_loc (loc, code, type, op);
12454 END_FOLD_INIT;
12455 return result;
12458 tree
12459 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12460 tree type, tree op0, tree op1)
12462 tree result;
12463 START_FOLD_INIT;
12465 result = fold_build2_loc (loc, code, type, op0, op1);
12467 END_FOLD_INIT;
12468 return result;
12471 tree
12472 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12473 int nargs, tree *argarray)
12475 tree result;
12476 START_FOLD_INIT;
12478 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12480 END_FOLD_INIT;
12481 return result;
12484 #undef START_FOLD_INIT
12485 #undef END_FOLD_INIT
12487 /* Determine if first argument is a multiple of second argument. Return 0 if
12488 it is not, or we cannot easily determined it to be.
12490 An example of the sort of thing we care about (at this point; this routine
12491 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12492 fold cases do now) is discovering that
12494 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12496 is a multiple of
12498 SAVE_EXPR (J * 8)
12500 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12502 This code also handles discovering that
12504 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12506 is a multiple of 8 so we don't have to worry about dealing with a
12507 possible remainder.
12509 Note that we *look* inside a SAVE_EXPR only to determine how it was
12510 calculated; it is not safe for fold to do much of anything else with the
12511 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12512 at run time. For example, the latter example above *cannot* be implemented
12513 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12514 evaluation time of the original SAVE_EXPR is not necessarily the same at
12515 the time the new expression is evaluated. The only optimization of this
12516 sort that would be valid is changing
12518 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12520 divided by 8 to
12522 SAVE_EXPR (I) * SAVE_EXPR (J)
12524 (where the same SAVE_EXPR (J) is used in the original and the
12525 transformed version). */
12528 multiple_of_p (tree type, const_tree top, const_tree bottom)
12530 gimple *stmt;
12531 tree t1, op1, op2;
12533 if (operand_equal_p (top, bottom, 0))
12534 return 1;
12536 if (TREE_CODE (type) != INTEGER_TYPE)
12537 return 0;
12539 switch (TREE_CODE (top))
12541 case BIT_AND_EXPR:
12542 /* Bitwise and provides a power of two multiple. If the mask is
12543 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12544 if (!integer_pow2p (bottom))
12545 return 0;
12546 /* FALLTHRU */
12548 case MULT_EXPR:
12549 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12550 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12552 case MINUS_EXPR:
12553 /* It is impossible to prove if op0 - op1 is multiple of bottom
12554 precisely, so be conservative here checking if both op0 and op1
12555 are multiple of bottom. Note we check the second operand first
12556 since it's usually simpler. */
12557 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12558 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12560 case PLUS_EXPR:
12561 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12562 as op0 - 3 if the expression has unsigned type. For example,
12563 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12564 op1 = TREE_OPERAND (top, 1);
12565 if (TYPE_UNSIGNED (type)
12566 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12567 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12568 return (multiple_of_p (type, op1, bottom)
12569 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12571 case LSHIFT_EXPR:
12572 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12574 op1 = TREE_OPERAND (top, 1);
12575 /* const_binop may not detect overflow correctly,
12576 so check for it explicitly here. */
12577 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12578 && 0 != (t1 = fold_convert (type,
12579 const_binop (LSHIFT_EXPR,
12580 size_one_node,
12581 op1)))
12582 && !TREE_OVERFLOW (t1))
12583 return multiple_of_p (type, t1, bottom);
12585 return 0;
12587 case NOP_EXPR:
12588 /* Can't handle conversions from non-integral or wider integral type. */
12589 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12590 || (TYPE_PRECISION (type)
12591 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12592 return 0;
12594 /* fall through */
12596 case SAVE_EXPR:
12597 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12599 case COND_EXPR:
12600 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12601 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12603 case INTEGER_CST:
12604 if (TREE_CODE (bottom) != INTEGER_CST
12605 || integer_zerop (bottom)
12606 || (TYPE_UNSIGNED (type)
12607 && (tree_int_cst_sgn (top) < 0
12608 || tree_int_cst_sgn (bottom) < 0)))
12609 return 0;
12610 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12611 SIGNED);
12613 case SSA_NAME:
12614 if (TREE_CODE (bottom) == INTEGER_CST
12615 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12616 && gimple_code (stmt) == GIMPLE_ASSIGN)
12618 enum tree_code code = gimple_assign_rhs_code (stmt);
12620 /* Check for special cases to see if top is defined as multiple
12621 of bottom:
12623 top = (X & ~(bottom - 1) ; bottom is power of 2
12627 Y = X % bottom
12628 top = X - Y. */
12629 if (code == BIT_AND_EXPR
12630 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12631 && TREE_CODE (op2) == INTEGER_CST
12632 && integer_pow2p (bottom)
12633 && wi::multiple_of_p (wi::to_widest (op2),
12634 wi::to_widest (bottom), UNSIGNED))
12635 return 1;
12637 op1 = gimple_assign_rhs1 (stmt);
12638 if (code == MINUS_EXPR
12639 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12640 && TREE_CODE (op2) == SSA_NAME
12641 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12642 && gimple_code (stmt) == GIMPLE_ASSIGN
12643 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12644 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12645 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12646 return 1;
12649 /* fall through */
12651 default:
12652 return 0;
12656 #define tree_expr_nonnegative_warnv_p(X, Y) \
12657 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12659 #define RECURSE(X) \
12660 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12662 /* Return true if CODE or TYPE is known to be non-negative. */
12664 static bool
12665 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12667 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12668 && truth_value_p (code))
12669 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12670 have a signed:1 type (where the value is -1 and 0). */
12671 return true;
12672 return false;
12675 /* Return true if (CODE OP0) is known to be non-negative. If the return
12676 value is based on the assumption that signed overflow is undefined,
12677 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12678 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12680 bool
12681 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12682 bool *strict_overflow_p, int depth)
12684 if (TYPE_UNSIGNED (type))
12685 return true;
12687 switch (code)
12689 case ABS_EXPR:
12690 /* We can't return 1 if flag_wrapv is set because
12691 ABS_EXPR<INT_MIN> = INT_MIN. */
12692 if (!ANY_INTEGRAL_TYPE_P (type))
12693 return true;
12694 if (TYPE_OVERFLOW_UNDEFINED (type))
12696 *strict_overflow_p = true;
12697 return true;
12699 break;
12701 case NON_LVALUE_EXPR:
12702 case FLOAT_EXPR:
12703 case FIX_TRUNC_EXPR:
12704 return RECURSE (op0);
12706 CASE_CONVERT:
12708 tree inner_type = TREE_TYPE (op0);
12709 tree outer_type = type;
12711 if (TREE_CODE (outer_type) == REAL_TYPE)
12713 if (TREE_CODE (inner_type) == REAL_TYPE)
12714 return RECURSE (op0);
12715 if (INTEGRAL_TYPE_P (inner_type))
12717 if (TYPE_UNSIGNED (inner_type))
12718 return true;
12719 return RECURSE (op0);
12722 else if (INTEGRAL_TYPE_P (outer_type))
12724 if (TREE_CODE (inner_type) == REAL_TYPE)
12725 return RECURSE (op0);
12726 if (INTEGRAL_TYPE_P (inner_type))
12727 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12728 && TYPE_UNSIGNED (inner_type);
12731 break;
12733 default:
12734 return tree_simple_nonnegative_warnv_p (code, type);
12737 /* We don't know sign of `t', so be conservative and return false. */
12738 return false;
12741 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12742 value is based on the assumption that signed overflow is undefined,
12743 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12744 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12746 bool
12747 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12748 tree op1, bool *strict_overflow_p,
12749 int depth)
12751 if (TYPE_UNSIGNED (type))
12752 return true;
12754 switch (code)
12756 case POINTER_PLUS_EXPR:
12757 case PLUS_EXPR:
12758 if (FLOAT_TYPE_P (type))
12759 return RECURSE (op0) && RECURSE (op1);
12761 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12762 both unsigned and at least 2 bits shorter than the result. */
12763 if (TREE_CODE (type) == INTEGER_TYPE
12764 && TREE_CODE (op0) == NOP_EXPR
12765 && TREE_CODE (op1) == NOP_EXPR)
12767 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12768 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12769 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12770 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12772 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12773 TYPE_PRECISION (inner2)) + 1;
12774 return prec < TYPE_PRECISION (type);
12777 break;
12779 case MULT_EXPR:
12780 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12782 /* x * x is always non-negative for floating point x
12783 or without overflow. */
12784 if (operand_equal_p (op0, op1, 0)
12785 || (RECURSE (op0) && RECURSE (op1)))
12787 if (ANY_INTEGRAL_TYPE_P (type)
12788 && TYPE_OVERFLOW_UNDEFINED (type))
12789 *strict_overflow_p = true;
12790 return true;
12794 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12795 both unsigned and their total bits is shorter than the result. */
12796 if (TREE_CODE (type) == INTEGER_TYPE
12797 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12798 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12800 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12801 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12802 : TREE_TYPE (op0);
12803 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12804 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12805 : TREE_TYPE (op1);
12807 bool unsigned0 = TYPE_UNSIGNED (inner0);
12808 bool unsigned1 = TYPE_UNSIGNED (inner1);
12810 if (TREE_CODE (op0) == INTEGER_CST)
12811 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12813 if (TREE_CODE (op1) == INTEGER_CST)
12814 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12816 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12817 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12819 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12820 ? tree_int_cst_min_precision (op0, UNSIGNED)
12821 : TYPE_PRECISION (inner0);
12823 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12824 ? tree_int_cst_min_precision (op1, UNSIGNED)
12825 : TYPE_PRECISION (inner1);
12827 return precision0 + precision1 < TYPE_PRECISION (type);
12830 return false;
12832 case BIT_AND_EXPR:
12833 case MAX_EXPR:
12834 return RECURSE (op0) || RECURSE (op1);
12836 case BIT_IOR_EXPR:
12837 case BIT_XOR_EXPR:
12838 case MIN_EXPR:
12839 case RDIV_EXPR:
12840 case TRUNC_DIV_EXPR:
12841 case CEIL_DIV_EXPR:
12842 case FLOOR_DIV_EXPR:
12843 case ROUND_DIV_EXPR:
12844 return RECURSE (op0) && RECURSE (op1);
12846 case TRUNC_MOD_EXPR:
12847 return RECURSE (op0);
12849 case FLOOR_MOD_EXPR:
12850 return RECURSE (op1);
12852 case CEIL_MOD_EXPR:
12853 case ROUND_MOD_EXPR:
12854 default:
12855 return tree_simple_nonnegative_warnv_p (code, type);
12858 /* We don't know sign of `t', so be conservative and return false. */
12859 return false;
12862 /* Return true if T is known to be non-negative. If the return
12863 value is based on the assumption that signed overflow is undefined,
12864 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12865 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12867 bool
12868 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12870 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12871 return true;
12873 switch (TREE_CODE (t))
12875 case INTEGER_CST:
12876 return tree_int_cst_sgn (t) >= 0;
12878 case REAL_CST:
12879 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12881 case FIXED_CST:
12882 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12884 case COND_EXPR:
12885 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12887 case SSA_NAME:
12888 /* Limit the depth of recursion to avoid quadratic behavior.
12889 This is expected to catch almost all occurrences in practice.
12890 If this code misses important cases that unbounded recursion
12891 would not, passes that need this information could be revised
12892 to provide it through dataflow propagation. */
12893 return (!name_registered_for_update_p (t)
12894 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12895 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12896 strict_overflow_p, depth));
12898 default:
12899 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12903 /* Return true if T is known to be non-negative. If the return
12904 value is based on the assumption that signed overflow is undefined,
12905 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12906 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12908 bool
12909 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12910 bool *strict_overflow_p, int depth)
12912 switch (fn)
12914 CASE_CFN_ACOS:
12915 CASE_CFN_ACOSH:
12916 CASE_CFN_CABS:
12917 CASE_CFN_COSH:
12918 CASE_CFN_ERFC:
12919 CASE_CFN_EXP:
12920 CASE_CFN_EXP10:
12921 CASE_CFN_EXP2:
12922 CASE_CFN_FABS:
12923 CASE_CFN_FDIM:
12924 CASE_CFN_HYPOT:
12925 CASE_CFN_POW10:
12926 CASE_CFN_FFS:
12927 CASE_CFN_PARITY:
12928 CASE_CFN_POPCOUNT:
12929 CASE_CFN_CLZ:
12930 CASE_CFN_CLRSB:
12931 case CFN_BUILT_IN_BSWAP32:
12932 case CFN_BUILT_IN_BSWAP64:
12933 /* Always true. */
12934 return true;
12936 CASE_CFN_SQRT:
12937 /* sqrt(-0.0) is -0.0. */
12938 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12939 return true;
12940 return RECURSE (arg0);
12942 CASE_CFN_ASINH:
12943 CASE_CFN_ATAN:
12944 CASE_CFN_ATANH:
12945 CASE_CFN_CBRT:
12946 CASE_CFN_CEIL:
12947 CASE_CFN_ERF:
12948 CASE_CFN_EXPM1:
12949 CASE_CFN_FLOOR:
12950 CASE_CFN_FMOD:
12951 CASE_CFN_FREXP:
12952 CASE_CFN_ICEIL:
12953 CASE_CFN_IFLOOR:
12954 CASE_CFN_IRINT:
12955 CASE_CFN_IROUND:
12956 CASE_CFN_LCEIL:
12957 CASE_CFN_LDEXP:
12958 CASE_CFN_LFLOOR:
12959 CASE_CFN_LLCEIL:
12960 CASE_CFN_LLFLOOR:
12961 CASE_CFN_LLRINT:
12962 CASE_CFN_LLROUND:
12963 CASE_CFN_LRINT:
12964 CASE_CFN_LROUND:
12965 CASE_CFN_MODF:
12966 CASE_CFN_NEARBYINT:
12967 CASE_CFN_RINT:
12968 CASE_CFN_ROUND:
12969 CASE_CFN_SCALB:
12970 CASE_CFN_SCALBLN:
12971 CASE_CFN_SCALBN:
12972 CASE_CFN_SIGNBIT:
12973 CASE_CFN_SIGNIFICAND:
12974 CASE_CFN_SINH:
12975 CASE_CFN_TANH:
12976 CASE_CFN_TRUNC:
12977 /* True if the 1st argument is nonnegative. */
12978 return RECURSE (arg0);
12980 CASE_CFN_FMAX:
12981 /* True if the 1st OR 2nd arguments are nonnegative. */
12982 return RECURSE (arg0) || RECURSE (arg1);
12984 CASE_CFN_FMIN:
12985 /* True if the 1st AND 2nd arguments are nonnegative. */
12986 return RECURSE (arg0) && RECURSE (arg1);
12988 CASE_CFN_COPYSIGN:
12989 /* True if the 2nd argument is nonnegative. */
12990 return RECURSE (arg1);
12992 CASE_CFN_POWI:
12993 /* True if the 1st argument is nonnegative or the second
12994 argument is an even integer. */
12995 if (TREE_CODE (arg1) == INTEGER_CST
12996 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12997 return true;
12998 return RECURSE (arg0);
13000 CASE_CFN_POW:
13001 /* True if the 1st argument is nonnegative or the second
13002 argument is an even integer valued real. */
13003 if (TREE_CODE (arg1) == REAL_CST)
13005 REAL_VALUE_TYPE c;
13006 HOST_WIDE_INT n;
13008 c = TREE_REAL_CST (arg1);
13009 n = real_to_integer (&c);
13010 if ((n & 1) == 0)
13012 REAL_VALUE_TYPE cint;
13013 real_from_integer (&cint, VOIDmode, n, SIGNED);
13014 if (real_identical (&c, &cint))
13015 return true;
13018 return RECURSE (arg0);
13020 default:
13021 break;
13023 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13026 /* Return true if T is known to be non-negative. If the return
13027 value is based on the assumption that signed overflow is undefined,
13028 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13029 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13031 static bool
13032 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13034 enum tree_code code = TREE_CODE (t);
13035 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13036 return true;
13038 switch (code)
13040 case TARGET_EXPR:
13042 tree temp = TARGET_EXPR_SLOT (t);
13043 t = TARGET_EXPR_INITIAL (t);
13045 /* If the initializer is non-void, then it's a normal expression
13046 that will be assigned to the slot. */
13047 if (!VOID_TYPE_P (t))
13048 return RECURSE (t);
13050 /* Otherwise, the initializer sets the slot in some way. One common
13051 way is an assignment statement at the end of the initializer. */
13052 while (1)
13054 if (TREE_CODE (t) == BIND_EXPR)
13055 t = expr_last (BIND_EXPR_BODY (t));
13056 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13057 || TREE_CODE (t) == TRY_CATCH_EXPR)
13058 t = expr_last (TREE_OPERAND (t, 0));
13059 else if (TREE_CODE (t) == STATEMENT_LIST)
13060 t = expr_last (t);
13061 else
13062 break;
13064 if (TREE_CODE (t) == MODIFY_EXPR
13065 && TREE_OPERAND (t, 0) == temp)
13066 return RECURSE (TREE_OPERAND (t, 1));
13068 return false;
13071 case CALL_EXPR:
13073 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13074 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13076 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13077 get_call_combined_fn (t),
13078 arg0,
13079 arg1,
13080 strict_overflow_p, depth);
13082 case COMPOUND_EXPR:
13083 case MODIFY_EXPR:
13084 return RECURSE (TREE_OPERAND (t, 1));
13086 case BIND_EXPR:
13087 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13089 case SAVE_EXPR:
13090 return RECURSE (TREE_OPERAND (t, 0));
13092 default:
13093 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13097 #undef RECURSE
13098 #undef tree_expr_nonnegative_warnv_p
13100 /* Return true if T is known to be non-negative. If the return
13101 value is based on the assumption that signed overflow is undefined,
13102 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13103 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13105 bool
13106 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13108 enum tree_code code;
13109 if (t == error_mark_node)
13110 return false;
13112 code = TREE_CODE (t);
13113 switch (TREE_CODE_CLASS (code))
13115 case tcc_binary:
13116 case tcc_comparison:
13117 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13118 TREE_TYPE (t),
13119 TREE_OPERAND (t, 0),
13120 TREE_OPERAND (t, 1),
13121 strict_overflow_p, depth);
13123 case tcc_unary:
13124 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13125 TREE_TYPE (t),
13126 TREE_OPERAND (t, 0),
13127 strict_overflow_p, depth);
13129 case tcc_constant:
13130 case tcc_declaration:
13131 case tcc_reference:
13132 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13134 default:
13135 break;
13138 switch (code)
13140 case TRUTH_AND_EXPR:
13141 case TRUTH_OR_EXPR:
13142 case TRUTH_XOR_EXPR:
13143 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13144 TREE_TYPE (t),
13145 TREE_OPERAND (t, 0),
13146 TREE_OPERAND (t, 1),
13147 strict_overflow_p, depth);
13148 case TRUTH_NOT_EXPR:
13149 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13150 TREE_TYPE (t),
13151 TREE_OPERAND (t, 0),
13152 strict_overflow_p, depth);
13154 case COND_EXPR:
13155 case CONSTRUCTOR:
13156 case OBJ_TYPE_REF:
13157 case ASSERT_EXPR:
13158 case ADDR_EXPR:
13159 case WITH_SIZE_EXPR:
13160 case SSA_NAME:
13161 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13163 default:
13164 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13168 /* Return true if `t' is known to be non-negative. Handle warnings
13169 about undefined signed overflow. */
13171 bool
13172 tree_expr_nonnegative_p (tree t)
13174 bool ret, strict_overflow_p;
13176 strict_overflow_p = false;
13177 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13178 if (strict_overflow_p)
13179 fold_overflow_warning (("assuming signed overflow does not occur when "
13180 "determining that expression is always "
13181 "non-negative"),
13182 WARN_STRICT_OVERFLOW_MISC);
13183 return ret;
13187 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13188 For floating point we further ensure that T is not denormal.
13189 Similar logic is present in nonzero_address in rtlanal.h.
13191 If the return value is based on the assumption that signed overflow
13192 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13193 change *STRICT_OVERFLOW_P. */
13195 bool
13196 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13197 bool *strict_overflow_p)
13199 switch (code)
13201 case ABS_EXPR:
13202 return tree_expr_nonzero_warnv_p (op0,
13203 strict_overflow_p);
13205 case NOP_EXPR:
13207 tree inner_type = TREE_TYPE (op0);
13208 tree outer_type = type;
13210 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13211 && tree_expr_nonzero_warnv_p (op0,
13212 strict_overflow_p));
13214 break;
13216 case NON_LVALUE_EXPR:
13217 return tree_expr_nonzero_warnv_p (op0,
13218 strict_overflow_p);
13220 default:
13221 break;
13224 return false;
13227 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13228 For floating point we further ensure that T is not denormal.
13229 Similar logic is present in nonzero_address in rtlanal.h.
13231 If the return value is based on the assumption that signed overflow
13232 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13233 change *STRICT_OVERFLOW_P. */
13235 bool
13236 tree_binary_nonzero_warnv_p (enum tree_code code,
13237 tree type,
13238 tree op0,
13239 tree op1, bool *strict_overflow_p)
13241 bool sub_strict_overflow_p;
13242 switch (code)
13244 case POINTER_PLUS_EXPR:
13245 case PLUS_EXPR:
13246 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13248 /* With the presence of negative values it is hard
13249 to say something. */
13250 sub_strict_overflow_p = false;
13251 if (!tree_expr_nonnegative_warnv_p (op0,
13252 &sub_strict_overflow_p)
13253 || !tree_expr_nonnegative_warnv_p (op1,
13254 &sub_strict_overflow_p))
13255 return false;
13256 /* One of operands must be positive and the other non-negative. */
13257 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13258 overflows, on a twos-complement machine the sum of two
13259 nonnegative numbers can never be zero. */
13260 return (tree_expr_nonzero_warnv_p (op0,
13261 strict_overflow_p)
13262 || tree_expr_nonzero_warnv_p (op1,
13263 strict_overflow_p));
13265 break;
13267 case MULT_EXPR:
13268 if (TYPE_OVERFLOW_UNDEFINED (type))
13270 if (tree_expr_nonzero_warnv_p (op0,
13271 strict_overflow_p)
13272 && tree_expr_nonzero_warnv_p (op1,
13273 strict_overflow_p))
13275 *strict_overflow_p = true;
13276 return true;
13279 break;
13281 case MIN_EXPR:
13282 sub_strict_overflow_p = false;
13283 if (tree_expr_nonzero_warnv_p (op0,
13284 &sub_strict_overflow_p)
13285 && tree_expr_nonzero_warnv_p (op1,
13286 &sub_strict_overflow_p))
13288 if (sub_strict_overflow_p)
13289 *strict_overflow_p = true;
13291 break;
13293 case MAX_EXPR:
13294 sub_strict_overflow_p = false;
13295 if (tree_expr_nonzero_warnv_p (op0,
13296 &sub_strict_overflow_p))
13298 if (sub_strict_overflow_p)
13299 *strict_overflow_p = true;
13301 /* When both operands are nonzero, then MAX must be too. */
13302 if (tree_expr_nonzero_warnv_p (op1,
13303 strict_overflow_p))
13304 return true;
13306 /* MAX where operand 0 is positive is positive. */
13307 return tree_expr_nonnegative_warnv_p (op0,
13308 strict_overflow_p);
13310 /* MAX where operand 1 is positive is positive. */
13311 else if (tree_expr_nonzero_warnv_p (op1,
13312 &sub_strict_overflow_p)
13313 && tree_expr_nonnegative_warnv_p (op1,
13314 &sub_strict_overflow_p))
13316 if (sub_strict_overflow_p)
13317 *strict_overflow_p = true;
13318 return true;
13320 break;
13322 case BIT_IOR_EXPR:
13323 return (tree_expr_nonzero_warnv_p (op1,
13324 strict_overflow_p)
13325 || tree_expr_nonzero_warnv_p (op0,
13326 strict_overflow_p));
13328 default:
13329 break;
13332 return false;
13335 /* Return true when T is an address and is known to be nonzero.
13336 For floating point we further ensure that T is not denormal.
13337 Similar logic is present in nonzero_address in rtlanal.h.
13339 If the return value is based on the assumption that signed overflow
13340 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13341 change *STRICT_OVERFLOW_P. */
13343 bool
13344 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13346 bool sub_strict_overflow_p;
13347 switch (TREE_CODE (t))
13349 case INTEGER_CST:
13350 return !integer_zerop (t);
13352 case ADDR_EXPR:
13354 tree base = TREE_OPERAND (t, 0);
13356 if (!DECL_P (base))
13357 base = get_base_address (base);
13359 if (base && TREE_CODE (base) == TARGET_EXPR)
13360 base = TARGET_EXPR_SLOT (base);
13362 if (!base)
13363 return false;
13365 /* For objects in symbol table check if we know they are non-zero.
13366 Don't do anything for variables and functions before symtab is built;
13367 it is quite possible that they will be declared weak later. */
13368 int nonzero_addr = maybe_nonzero_address (base);
13369 if (nonzero_addr >= 0)
13370 return nonzero_addr;
13372 /* Constants are never weak. */
13373 if (CONSTANT_CLASS_P (base))
13374 return true;
13376 return false;
13379 case COND_EXPR:
13380 sub_strict_overflow_p = false;
13381 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13382 &sub_strict_overflow_p)
13383 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13384 &sub_strict_overflow_p))
13386 if (sub_strict_overflow_p)
13387 *strict_overflow_p = true;
13388 return true;
13390 break;
13392 case SSA_NAME:
13393 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13394 break;
13395 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13397 default:
13398 break;
13400 return false;
13403 #define integer_valued_real_p(X) \
13404 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13406 #define RECURSE(X) \
13407 ((integer_valued_real_p) (X, depth + 1))
13409 /* Return true if the floating point result of (CODE OP0) has an
13410 integer value. We also allow +Inf, -Inf and NaN to be considered
13411 integer values. Return false for signaling NaN.
13413 DEPTH is the current nesting depth of the query. */
13415 bool
13416 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13418 switch (code)
13420 case FLOAT_EXPR:
13421 return true;
13423 case ABS_EXPR:
13424 return RECURSE (op0);
13426 CASE_CONVERT:
13428 tree type = TREE_TYPE (op0);
13429 if (TREE_CODE (type) == INTEGER_TYPE)
13430 return true;
13431 if (TREE_CODE (type) == REAL_TYPE)
13432 return RECURSE (op0);
13433 break;
13436 default:
13437 break;
13439 return false;
13442 /* Return true if the floating point result of (CODE OP0 OP1) has an
13443 integer value. We also allow +Inf, -Inf and NaN to be considered
13444 integer values. Return false for signaling NaN.
13446 DEPTH is the current nesting depth of the query. */
13448 bool
13449 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13451 switch (code)
13453 case PLUS_EXPR:
13454 case MINUS_EXPR:
13455 case MULT_EXPR:
13456 case MIN_EXPR:
13457 case MAX_EXPR:
13458 return RECURSE (op0) && RECURSE (op1);
13460 default:
13461 break;
13463 return false;
13466 /* Return true if the floating point result of calling FNDECL with arguments
13467 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13468 considered integer values. Return false for signaling NaN. If FNDECL
13469 takes fewer than 2 arguments, the remaining ARGn are null.
13471 DEPTH is the current nesting depth of the query. */
13473 bool
13474 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13476 switch (fn)
13478 CASE_CFN_CEIL:
13479 CASE_CFN_FLOOR:
13480 CASE_CFN_NEARBYINT:
13481 CASE_CFN_RINT:
13482 CASE_CFN_ROUND:
13483 CASE_CFN_TRUNC:
13484 return true;
13486 CASE_CFN_FMIN:
13487 CASE_CFN_FMAX:
13488 return RECURSE (arg0) && RECURSE (arg1);
13490 default:
13491 break;
13493 return false;
13496 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13497 has an integer value. We also allow +Inf, -Inf and NaN to be
13498 considered integer values. Return false for signaling NaN.
13500 DEPTH is the current nesting depth of the query. */
13502 bool
13503 integer_valued_real_single_p (tree t, int depth)
13505 switch (TREE_CODE (t))
13507 case REAL_CST:
13508 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13510 case COND_EXPR:
13511 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13513 case SSA_NAME:
13514 /* Limit the depth of recursion to avoid quadratic behavior.
13515 This is expected to catch almost all occurrences in practice.
13516 If this code misses important cases that unbounded recursion
13517 would not, passes that need this information could be revised
13518 to provide it through dataflow propagation. */
13519 return (!name_registered_for_update_p (t)
13520 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13521 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13522 depth));
13524 default:
13525 break;
13527 return false;
13530 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13531 has an integer value. We also allow +Inf, -Inf and NaN to be
13532 considered integer values. Return false for signaling NaN.
13534 DEPTH is the current nesting depth of the query. */
13536 static bool
13537 integer_valued_real_invalid_p (tree t, int depth)
13539 switch (TREE_CODE (t))
13541 case COMPOUND_EXPR:
13542 case MODIFY_EXPR:
13543 case BIND_EXPR:
13544 return RECURSE (TREE_OPERAND (t, 1));
13546 case SAVE_EXPR:
13547 return RECURSE (TREE_OPERAND (t, 0));
13549 default:
13550 break;
13552 return false;
13555 #undef RECURSE
13556 #undef integer_valued_real_p
13558 /* Return true if the floating point expression T has an integer value.
13559 We also allow +Inf, -Inf and NaN to be considered integer values.
13560 Return false for signaling NaN.
13562 DEPTH is the current nesting depth of the query. */
13564 bool
13565 integer_valued_real_p (tree t, int depth)
13567 if (t == error_mark_node)
13568 return false;
13570 tree_code code = TREE_CODE (t);
13571 switch (TREE_CODE_CLASS (code))
13573 case tcc_binary:
13574 case tcc_comparison:
13575 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13576 TREE_OPERAND (t, 1), depth);
13578 case tcc_unary:
13579 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13581 case tcc_constant:
13582 case tcc_declaration:
13583 case tcc_reference:
13584 return integer_valued_real_single_p (t, depth);
13586 default:
13587 break;
13590 switch (code)
13592 case COND_EXPR:
13593 case SSA_NAME:
13594 return integer_valued_real_single_p (t, depth);
13596 case CALL_EXPR:
13598 tree arg0 = (call_expr_nargs (t) > 0
13599 ? CALL_EXPR_ARG (t, 0)
13600 : NULL_TREE);
13601 tree arg1 = (call_expr_nargs (t) > 1
13602 ? CALL_EXPR_ARG (t, 1)
13603 : NULL_TREE);
13604 return integer_valued_real_call_p (get_call_combined_fn (t),
13605 arg0, arg1, depth);
13608 default:
13609 return integer_valued_real_invalid_p (t, depth);
13613 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13614 attempt to fold the expression to a constant without modifying TYPE,
13615 OP0 or OP1.
13617 If the expression could be simplified to a constant, then return
13618 the constant. If the expression would not be simplified to a
13619 constant, then return NULL_TREE. */
13621 tree
13622 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13624 tree tem = fold_binary (code, type, op0, op1);
13625 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13628 /* Given the components of a unary expression CODE, TYPE and OP0,
13629 attempt to fold the expression to a constant without modifying
13630 TYPE or OP0.
13632 If the expression could be simplified to a constant, then return
13633 the constant. If the expression would not be simplified to a
13634 constant, then return NULL_TREE. */
13636 tree
13637 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13639 tree tem = fold_unary (code, type, op0);
13640 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13643 /* If EXP represents referencing an element in a constant string
13644 (either via pointer arithmetic or array indexing), return the
13645 tree representing the value accessed, otherwise return NULL. */
13647 tree
13648 fold_read_from_constant_string (tree exp)
13650 if ((TREE_CODE (exp) == INDIRECT_REF
13651 || TREE_CODE (exp) == ARRAY_REF)
13652 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13654 tree exp1 = TREE_OPERAND (exp, 0);
13655 tree index;
13656 tree string;
13657 location_t loc = EXPR_LOCATION (exp);
13659 if (TREE_CODE (exp) == INDIRECT_REF)
13660 string = string_constant (exp1, &index);
13661 else
13663 tree low_bound = array_ref_low_bound (exp);
13664 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13666 /* Optimize the special-case of a zero lower bound.
13668 We convert the low_bound to sizetype to avoid some problems
13669 with constant folding. (E.g. suppose the lower bound is 1,
13670 and its mode is QI. Without the conversion,l (ARRAY
13671 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13672 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13673 if (! integer_zerop (low_bound))
13674 index = size_diffop_loc (loc, index,
13675 fold_convert_loc (loc, sizetype, low_bound));
13677 string = exp1;
13680 if (string
13681 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13682 && TREE_CODE (string) == STRING_CST
13683 && TREE_CODE (index) == INTEGER_CST
13684 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13685 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13686 == MODE_INT)
13687 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13688 return build_int_cst_type (TREE_TYPE (exp),
13689 (TREE_STRING_POINTER (string)
13690 [TREE_INT_CST_LOW (index)]));
13692 return NULL;
13695 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13696 an integer constant, real, or fixed-point constant.
13698 TYPE is the type of the result. */
13700 static tree
13701 fold_negate_const (tree arg0, tree type)
13703 tree t = NULL_TREE;
13705 switch (TREE_CODE (arg0))
13707 case INTEGER_CST:
13709 bool overflow;
13710 wide_int val = wi::neg (arg0, &overflow);
13711 t = force_fit_type (type, val, 1,
13712 (overflow | TREE_OVERFLOW (arg0))
13713 && !TYPE_UNSIGNED (type));
13714 break;
13717 case REAL_CST:
13718 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13719 break;
13721 case FIXED_CST:
13723 FIXED_VALUE_TYPE f;
13724 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13725 &(TREE_FIXED_CST (arg0)), NULL,
13726 TYPE_SATURATING (type));
13727 t = build_fixed (type, f);
13728 /* Propagate overflow flags. */
13729 if (overflow_p | TREE_OVERFLOW (arg0))
13730 TREE_OVERFLOW (t) = 1;
13731 break;
13734 default:
13735 gcc_unreachable ();
13738 return t;
13741 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13742 an integer constant or real constant.
13744 TYPE is the type of the result. */
13746 tree
13747 fold_abs_const (tree arg0, tree type)
13749 tree t = NULL_TREE;
13751 switch (TREE_CODE (arg0))
13753 case INTEGER_CST:
13755 /* If the value is unsigned or non-negative, then the absolute value
13756 is the same as the ordinary value. */
13757 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13758 t = arg0;
13760 /* If the value is negative, then the absolute value is
13761 its negation. */
13762 else
13764 bool overflow;
13765 wide_int val = wi::neg (arg0, &overflow);
13766 t = force_fit_type (type, val, -1,
13767 overflow | TREE_OVERFLOW (arg0));
13770 break;
13772 case REAL_CST:
13773 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13774 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13775 else
13776 t = arg0;
13777 break;
13779 default:
13780 gcc_unreachable ();
13783 return t;
13786 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13787 constant. TYPE is the type of the result. */
13789 static tree
13790 fold_not_const (const_tree arg0, tree type)
13792 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13794 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13797 /* Given CODE, a relational operator, the target type, TYPE and two
13798 constant operands OP0 and OP1, return the result of the
13799 relational operation. If the result is not a compile time
13800 constant, then return NULL_TREE. */
13802 static tree
13803 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13805 int result, invert;
13807 /* From here on, the only cases we handle are when the result is
13808 known to be a constant. */
13810 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13812 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13813 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13815 /* Handle the cases where either operand is a NaN. */
13816 if (real_isnan (c0) || real_isnan (c1))
13818 switch (code)
13820 case EQ_EXPR:
13821 case ORDERED_EXPR:
13822 result = 0;
13823 break;
13825 case NE_EXPR:
13826 case UNORDERED_EXPR:
13827 case UNLT_EXPR:
13828 case UNLE_EXPR:
13829 case UNGT_EXPR:
13830 case UNGE_EXPR:
13831 case UNEQ_EXPR:
13832 result = 1;
13833 break;
13835 case LT_EXPR:
13836 case LE_EXPR:
13837 case GT_EXPR:
13838 case GE_EXPR:
13839 case LTGT_EXPR:
13840 if (flag_trapping_math)
13841 return NULL_TREE;
13842 result = 0;
13843 break;
13845 default:
13846 gcc_unreachable ();
13849 return constant_boolean_node (result, type);
13852 return constant_boolean_node (real_compare (code, c0, c1), type);
13855 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13857 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13858 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13859 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13862 /* Handle equality/inequality of complex constants. */
13863 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13865 tree rcond = fold_relational_const (code, type,
13866 TREE_REALPART (op0),
13867 TREE_REALPART (op1));
13868 tree icond = fold_relational_const (code, type,
13869 TREE_IMAGPART (op0),
13870 TREE_IMAGPART (op1));
13871 if (code == EQ_EXPR)
13872 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13873 else if (code == NE_EXPR)
13874 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13875 else
13876 return NULL_TREE;
13879 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13881 if (!VECTOR_TYPE_P (type))
13883 /* Have vector comparison with scalar boolean result. */
13884 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13885 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13886 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13888 tree elem0 = VECTOR_CST_ELT (op0, i);
13889 tree elem1 = VECTOR_CST_ELT (op1, i);
13890 tree tmp = fold_relational_const (code, type, elem0, elem1);
13891 if (tmp == NULL_TREE)
13892 return NULL_TREE;
13893 if (integer_zerop (tmp))
13894 return constant_boolean_node (false, type);
13896 return constant_boolean_node (true, type);
13898 unsigned count = VECTOR_CST_NELTS (op0);
13899 tree *elts = XALLOCAVEC (tree, count);
13900 gcc_assert (VECTOR_CST_NELTS (op1) == count
13901 && TYPE_VECTOR_SUBPARTS (type) == count);
13903 for (unsigned i = 0; i < count; i++)
13905 tree elem_type = TREE_TYPE (type);
13906 tree elem0 = VECTOR_CST_ELT (op0, i);
13907 tree elem1 = VECTOR_CST_ELT (op1, i);
13909 tree tem = fold_relational_const (code, elem_type,
13910 elem0, elem1);
13912 if (tem == NULL_TREE)
13913 return NULL_TREE;
13915 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13918 return build_vector (type, elts);
13921 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13923 To compute GT, swap the arguments and do LT.
13924 To compute GE, do LT and invert the result.
13925 To compute LE, swap the arguments, do LT and invert the result.
13926 To compute NE, do EQ and invert the result.
13928 Therefore, the code below must handle only EQ and LT. */
13930 if (code == LE_EXPR || code == GT_EXPR)
13932 std::swap (op0, op1);
13933 code = swap_tree_comparison (code);
13936 /* Note that it is safe to invert for real values here because we
13937 have already handled the one case that it matters. */
13939 invert = 0;
13940 if (code == NE_EXPR || code == GE_EXPR)
13942 invert = 1;
13943 code = invert_tree_comparison (code, false);
13946 /* Compute a result for LT or EQ if args permit;
13947 Otherwise return T. */
13948 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13950 if (code == EQ_EXPR)
13951 result = tree_int_cst_equal (op0, op1);
13952 else
13953 result = tree_int_cst_lt (op0, op1);
13955 else
13956 return NULL_TREE;
13958 if (invert)
13959 result ^= 1;
13960 return constant_boolean_node (result, type);
13963 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13964 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13965 itself. */
13967 tree
13968 fold_build_cleanup_point_expr (tree type, tree expr)
13970 /* If the expression does not have side effects then we don't have to wrap
13971 it with a cleanup point expression. */
13972 if (!TREE_SIDE_EFFECTS (expr))
13973 return expr;
13975 /* If the expression is a return, check to see if the expression inside the
13976 return has no side effects or the right hand side of the modify expression
13977 inside the return. If either don't have side effects set we don't need to
13978 wrap the expression in a cleanup point expression. Note we don't check the
13979 left hand side of the modify because it should always be a return decl. */
13980 if (TREE_CODE (expr) == RETURN_EXPR)
13982 tree op = TREE_OPERAND (expr, 0);
13983 if (!op || !TREE_SIDE_EFFECTS (op))
13984 return expr;
13985 op = TREE_OPERAND (op, 1);
13986 if (!TREE_SIDE_EFFECTS (op))
13987 return expr;
13990 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13993 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13994 of an indirection through OP0, or NULL_TREE if no simplification is
13995 possible. */
13997 tree
13998 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14000 tree sub = op0;
14001 tree subtype;
14003 STRIP_NOPS (sub);
14004 subtype = TREE_TYPE (sub);
14005 if (!POINTER_TYPE_P (subtype)
14006 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14007 return NULL_TREE;
14009 if (TREE_CODE (sub) == ADDR_EXPR)
14011 tree op = TREE_OPERAND (sub, 0);
14012 tree optype = TREE_TYPE (op);
14013 /* *&CONST_DECL -> to the value of the const decl. */
14014 if (TREE_CODE (op) == CONST_DECL)
14015 return DECL_INITIAL (op);
14016 /* *&p => p; make sure to handle *&"str"[cst] here. */
14017 if (type == optype)
14019 tree fop = fold_read_from_constant_string (op);
14020 if (fop)
14021 return fop;
14022 else
14023 return op;
14025 /* *(foo *)&fooarray => fooarray[0] */
14026 else if (TREE_CODE (optype) == ARRAY_TYPE
14027 && type == TREE_TYPE (optype)
14028 && (!in_gimple_form
14029 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14031 tree type_domain = TYPE_DOMAIN (optype);
14032 tree min_val = size_zero_node;
14033 if (type_domain && TYPE_MIN_VALUE (type_domain))
14034 min_val = TYPE_MIN_VALUE (type_domain);
14035 if (in_gimple_form
14036 && TREE_CODE (min_val) != INTEGER_CST)
14037 return NULL_TREE;
14038 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14039 NULL_TREE, NULL_TREE);
14041 /* *(foo *)&complexfoo => __real__ complexfoo */
14042 else if (TREE_CODE (optype) == COMPLEX_TYPE
14043 && type == TREE_TYPE (optype))
14044 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14045 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14046 else if (TREE_CODE (optype) == VECTOR_TYPE
14047 && type == TREE_TYPE (optype))
14049 tree part_width = TYPE_SIZE (type);
14050 tree index = bitsize_int (0);
14051 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14055 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14056 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14058 tree op00 = TREE_OPERAND (sub, 0);
14059 tree op01 = TREE_OPERAND (sub, 1);
14061 STRIP_NOPS (op00);
14062 if (TREE_CODE (op00) == ADDR_EXPR)
14064 tree op00type;
14065 op00 = TREE_OPERAND (op00, 0);
14066 op00type = TREE_TYPE (op00);
14068 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14069 if (TREE_CODE (op00type) == VECTOR_TYPE
14070 && type == TREE_TYPE (op00type))
14072 tree part_width = TYPE_SIZE (type);
14073 unsigned HOST_WIDE_INT max_offset
14074 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14075 * TYPE_VECTOR_SUBPARTS (op00type));
14076 if (tree_int_cst_sign_bit (op01) == 0
14077 && compare_tree_int (op01, max_offset) == -1)
14079 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14080 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14081 tree index = bitsize_int (indexi);
14082 return fold_build3_loc (loc,
14083 BIT_FIELD_REF, type, op00,
14084 part_width, index);
14087 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14088 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14089 && type == TREE_TYPE (op00type))
14091 tree size = TYPE_SIZE_UNIT (type);
14092 if (tree_int_cst_equal (size, op01))
14093 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14095 /* ((foo *)&fooarray)[1] => fooarray[1] */
14096 else if (TREE_CODE (op00type) == ARRAY_TYPE
14097 && type == TREE_TYPE (op00type))
14099 tree type_domain = TYPE_DOMAIN (op00type);
14100 tree min_val = size_zero_node;
14101 if (type_domain && TYPE_MIN_VALUE (type_domain))
14102 min_val = TYPE_MIN_VALUE (type_domain);
14103 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14104 TYPE_SIZE_UNIT (type));
14105 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14106 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14107 NULL_TREE, NULL_TREE);
14112 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14113 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14114 && type == TREE_TYPE (TREE_TYPE (subtype))
14115 && (!in_gimple_form
14116 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14118 tree type_domain;
14119 tree min_val = size_zero_node;
14120 sub = build_fold_indirect_ref_loc (loc, sub);
14121 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14122 if (type_domain && TYPE_MIN_VALUE (type_domain))
14123 min_val = TYPE_MIN_VALUE (type_domain);
14124 if (in_gimple_form
14125 && TREE_CODE (min_val) != INTEGER_CST)
14126 return NULL_TREE;
14127 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14128 NULL_TREE);
14131 return NULL_TREE;
14134 /* Builds an expression for an indirection through T, simplifying some
14135 cases. */
14137 tree
14138 build_fold_indirect_ref_loc (location_t loc, tree t)
14140 tree type = TREE_TYPE (TREE_TYPE (t));
14141 tree sub = fold_indirect_ref_1 (loc, type, t);
14143 if (sub)
14144 return sub;
14146 return build1_loc (loc, INDIRECT_REF, type, t);
14149 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14151 tree
14152 fold_indirect_ref_loc (location_t loc, tree t)
14154 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14156 if (sub)
14157 return sub;
14158 else
14159 return t;
14162 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14163 whose result is ignored. The type of the returned tree need not be
14164 the same as the original expression. */
14166 tree
14167 fold_ignored_result (tree t)
14169 if (!TREE_SIDE_EFFECTS (t))
14170 return integer_zero_node;
14172 for (;;)
14173 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14175 case tcc_unary:
14176 t = TREE_OPERAND (t, 0);
14177 break;
14179 case tcc_binary:
14180 case tcc_comparison:
14181 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14182 t = TREE_OPERAND (t, 0);
14183 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14184 t = TREE_OPERAND (t, 1);
14185 else
14186 return t;
14187 break;
14189 case tcc_expression:
14190 switch (TREE_CODE (t))
14192 case COMPOUND_EXPR:
14193 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14194 return t;
14195 t = TREE_OPERAND (t, 0);
14196 break;
14198 case COND_EXPR:
14199 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14200 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14201 return t;
14202 t = TREE_OPERAND (t, 0);
14203 break;
14205 default:
14206 return t;
14208 break;
14210 default:
14211 return t;
14215 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14217 tree
14218 round_up_loc (location_t loc, tree value, unsigned int divisor)
14220 tree div = NULL_TREE;
14222 if (divisor == 1)
14223 return value;
14225 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14226 have to do anything. Only do this when we are not given a const,
14227 because in that case, this check is more expensive than just
14228 doing it. */
14229 if (TREE_CODE (value) != INTEGER_CST)
14231 div = build_int_cst (TREE_TYPE (value), divisor);
14233 if (multiple_of_p (TREE_TYPE (value), value, div))
14234 return value;
14237 /* If divisor is a power of two, simplify this to bit manipulation. */
14238 if (pow2_or_zerop (divisor))
14240 if (TREE_CODE (value) == INTEGER_CST)
14242 wide_int val = value;
14243 bool overflow_p;
14245 if ((val & (divisor - 1)) == 0)
14246 return value;
14248 overflow_p = TREE_OVERFLOW (value);
14249 val += divisor - 1;
14250 val &= (int) -divisor;
14251 if (val == 0)
14252 overflow_p = true;
14254 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14256 else
14258 tree t;
14260 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14261 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14262 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14263 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14266 else
14268 if (!div)
14269 div = build_int_cst (TREE_TYPE (value), divisor);
14270 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14271 value = size_binop_loc (loc, MULT_EXPR, value, div);
14274 return value;
14277 /* Likewise, but round down. */
14279 tree
14280 round_down_loc (location_t loc, tree value, int divisor)
14282 tree div = NULL_TREE;
14284 gcc_assert (divisor > 0);
14285 if (divisor == 1)
14286 return value;
14288 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14289 have to do anything. Only do this when we are not given a const,
14290 because in that case, this check is more expensive than just
14291 doing it. */
14292 if (TREE_CODE (value) != INTEGER_CST)
14294 div = build_int_cst (TREE_TYPE (value), divisor);
14296 if (multiple_of_p (TREE_TYPE (value), value, div))
14297 return value;
14300 /* If divisor is a power of two, simplify this to bit manipulation. */
14301 if (pow2_or_zerop (divisor))
14303 tree t;
14305 t = build_int_cst (TREE_TYPE (value), -divisor);
14306 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14308 else
14310 if (!div)
14311 div = build_int_cst (TREE_TYPE (value), divisor);
14312 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14313 value = size_binop_loc (loc, MULT_EXPR, value, div);
14316 return value;
14319 /* Returns the pointer to the base of the object addressed by EXP and
14320 extracts the information about the offset of the access, storing it
14321 to PBITPOS and POFFSET. */
14323 static tree
14324 split_address_to_core_and_offset (tree exp,
14325 HOST_WIDE_INT *pbitpos, tree *poffset)
14327 tree core;
14328 machine_mode mode;
14329 int unsignedp, reversep, volatilep;
14330 HOST_WIDE_INT bitsize;
14331 location_t loc = EXPR_LOCATION (exp);
14333 if (TREE_CODE (exp) == ADDR_EXPR)
14335 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14336 poffset, &mode, &unsignedp, &reversep,
14337 &volatilep);
14338 core = build_fold_addr_expr_loc (loc, core);
14340 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14342 core = TREE_OPERAND (exp, 0);
14343 STRIP_NOPS (core);
14344 *pbitpos = 0;
14345 *poffset = TREE_OPERAND (exp, 1);
14346 if (TREE_CODE (*poffset) == INTEGER_CST)
14348 offset_int tem = wi::sext (wi::to_offset (*poffset),
14349 TYPE_PRECISION (TREE_TYPE (*poffset)));
14350 tem <<= LOG2_BITS_PER_UNIT;
14351 if (wi::fits_shwi_p (tem))
14353 *pbitpos = tem.to_shwi ();
14354 *poffset = NULL_TREE;
14358 else
14360 core = exp;
14361 *pbitpos = 0;
14362 *poffset = NULL_TREE;
14365 return core;
14368 /* Returns true if addresses of E1 and E2 differ by a constant, false
14369 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14371 bool
14372 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14374 tree core1, core2;
14375 HOST_WIDE_INT bitpos1, bitpos2;
14376 tree toffset1, toffset2, tdiff, type;
14378 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14379 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14381 if (bitpos1 % BITS_PER_UNIT != 0
14382 || bitpos2 % BITS_PER_UNIT != 0
14383 || !operand_equal_p (core1, core2, 0))
14384 return false;
14386 if (toffset1 && toffset2)
14388 type = TREE_TYPE (toffset1);
14389 if (type != TREE_TYPE (toffset2))
14390 toffset2 = fold_convert (type, toffset2);
14392 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14393 if (!cst_and_fits_in_hwi (tdiff))
14394 return false;
14396 *diff = int_cst_value (tdiff);
14398 else if (toffset1 || toffset2)
14400 /* If only one of the offsets is non-constant, the difference cannot
14401 be a constant. */
14402 return false;
14404 else
14405 *diff = 0;
14407 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14408 return true;
14411 /* Return OFF converted to a pointer offset type suitable as offset for
14412 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14413 tree
14414 convert_to_ptrofftype_loc (location_t loc, tree off)
14416 return fold_convert_loc (loc, sizetype, off);
14419 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14420 tree
14421 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14423 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14424 ptr, convert_to_ptrofftype_loc (loc, off));
14427 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14428 tree
14429 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14431 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14432 ptr, size_int (off));
14435 /* Return a char pointer for a C string if it is a string constant
14436 or sum of string constant and integer constant. We only support
14437 string constants properly terminated with '\0' character.
14438 If STRLEN is a valid pointer, length (including terminating character)
14439 of returned string is stored to the argument. */
14441 const char *
14442 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14444 tree offset_node;
14446 if (strlen)
14447 *strlen = 0;
14449 src = string_constant (src, &offset_node);
14450 if (src == 0)
14451 return NULL;
14453 unsigned HOST_WIDE_INT offset = 0;
14454 if (offset_node != NULL_TREE)
14456 if (!tree_fits_uhwi_p (offset_node))
14457 return NULL;
14458 else
14459 offset = tree_to_uhwi (offset_node);
14462 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14463 const char *string = TREE_STRING_POINTER (src);
14465 /* Support only properly null-terminated strings. */
14466 if (string_length == 0
14467 || string[string_length - 1] != '\0'
14468 || offset >= string_length)
14469 return NULL;
14471 if (strlen)
14472 *strlen = string_length - offset;
14473 return string + offset;
14476 #if CHECKING_P
14478 namespace selftest {
14480 /* Helper functions for writing tests of folding trees. */
14482 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14484 static void
14485 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14486 tree constant)
14488 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14491 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14492 wrapping WRAPPED_EXPR. */
14494 static void
14495 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14496 tree wrapped_expr)
14498 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14499 ASSERT_NE (wrapped_expr, result);
14500 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14501 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14504 /* Verify that various arithmetic binary operations are folded
14505 correctly. */
14507 static void
14508 test_arithmetic_folding ()
14510 tree type = integer_type_node;
14511 tree x = create_tmp_var_raw (type, "x");
14512 tree zero = build_zero_cst (type);
14513 tree one = build_int_cst (type, 1);
14515 /* Addition. */
14516 /* 1 <-- (0 + 1) */
14517 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14518 one);
14519 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14520 one);
14522 /* (nonlvalue)x <-- (x + 0) */
14523 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14526 /* Subtraction. */
14527 /* 0 <-- (x - x) */
14528 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14529 zero);
14530 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14533 /* Multiplication. */
14534 /* 0 <-- (x * 0) */
14535 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14536 zero);
14538 /* (nonlvalue)x <-- (x * 1) */
14539 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14543 /* Verify that various binary operations on vectors are folded
14544 correctly. */
14546 static void
14547 test_vector_folding ()
14549 tree inner_type = integer_type_node;
14550 tree type = build_vector_type (inner_type, 4);
14551 tree zero = build_zero_cst (type);
14552 tree one = build_one_cst (type);
14554 /* Verify equality tests that return a scalar boolean result. */
14555 tree res_type = boolean_type_node;
14556 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14557 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14558 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14559 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14562 /* Run all of the selftests within this file. */
14564 void
14565 fold_const_c_tests ()
14567 test_arithmetic_folding ();
14568 test_vector_folding ();
14571 } // namespace selftest
14573 #endif /* CHECKING_P */