[ARM] Add source mode to coprocessor pattern SETs
[official-gcc.git] / gcc / fold-const.c
blobb4c117c84948f6994c2ac0354b76ce8d393eb2b4
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree split_tree (location_t, tree, tree, enum tree_code,
112 tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static bool vec_cst_ctor_to_array (tree, tree *);
142 static tree fold_negate_expr (location_t, tree);
145 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
146 Otherwise, return LOC. */
148 static location_t
149 expr_location_or (tree t, location_t loc)
151 location_t tloc = EXPR_LOCATION (t);
152 return tloc == UNKNOWN_LOCATION ? loc : tloc;
155 /* Similar to protected_set_expr_location, but never modify x in place,
156 if location can and needs to be set, unshare it. */
158 static inline tree
159 protected_set_expr_location_unshare (tree x, location_t loc)
161 if (CAN_HAVE_LOCATION_P (x)
162 && EXPR_LOCATION (x) != loc
163 && !(TREE_CODE (x) == SAVE_EXPR
164 || TREE_CODE (x) == TARGET_EXPR
165 || TREE_CODE (x) == BIND_EXPR))
167 x = copy_node (x);
168 SET_EXPR_LOCATION (x, loc);
170 return x;
173 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
174 division and returns the quotient. Otherwise returns
175 NULL_TREE. */
177 tree
178 div_if_zero_remainder (const_tree arg1, const_tree arg2)
180 widest_int quo;
182 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
183 SIGNED, &quo))
184 return wide_int_to_tree (TREE_TYPE (arg1), quo);
186 return NULL_TREE;
189 /* This is nonzero if we should defer warnings about undefined
190 overflow. This facility exists because these warnings are a
191 special case. The code to estimate loop iterations does not want
192 to issue any warnings, since it works with expressions which do not
193 occur in user code. Various bits of cleanup code call fold(), but
194 only use the result if it has certain characteristics (e.g., is a
195 constant); that code only wants to issue a warning if the result is
196 used. */
198 static int fold_deferring_overflow_warnings;
200 /* If a warning about undefined overflow is deferred, this is the
201 warning. Note that this may cause us to turn two warnings into
202 one, but that is fine since it is sufficient to only give one
203 warning per expression. */
205 static const char* fold_deferred_overflow_warning;
207 /* If a warning about undefined overflow is deferred, this is the
208 level at which the warning should be emitted. */
210 static enum warn_strict_overflow_code fold_deferred_overflow_code;
212 /* Start deferring overflow warnings. We could use a stack here to
213 permit nested calls, but at present it is not necessary. */
215 void
216 fold_defer_overflow_warnings (void)
218 ++fold_deferring_overflow_warnings;
221 /* Stop deferring overflow warnings. If there is a pending warning,
222 and ISSUE is true, then issue the warning if appropriate. STMT is
223 the statement with which the warning should be associated (used for
224 location information); STMT may be NULL. CODE is the level of the
225 warning--a warn_strict_overflow_code value. This function will use
226 the smaller of CODE and the deferred code when deciding whether to
227 issue the warning. CODE may be zero to mean to always use the
228 deferred code. */
230 void
231 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
233 const char *warnmsg;
234 location_t locus;
236 gcc_assert (fold_deferring_overflow_warnings > 0);
237 --fold_deferring_overflow_warnings;
238 if (fold_deferring_overflow_warnings > 0)
240 if (fold_deferred_overflow_warning != NULL
241 && code != 0
242 && code < (int) fold_deferred_overflow_code)
243 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
244 return;
247 warnmsg = fold_deferred_overflow_warning;
248 fold_deferred_overflow_warning = NULL;
250 if (!issue || warnmsg == NULL)
251 return;
253 if (gimple_no_warning_p (stmt))
254 return;
256 /* Use the smallest code level when deciding to issue the
257 warning. */
258 if (code == 0 || code > (int) fold_deferred_overflow_code)
259 code = fold_deferred_overflow_code;
261 if (!issue_strict_overflow_warning (code))
262 return;
264 if (stmt == NULL)
265 locus = input_location;
266 else
267 locus = gimple_location (stmt);
268 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 /* Stop deferring overflow warnings, ignoring any deferred
272 warnings. */
274 void
275 fold_undefer_and_ignore_overflow_warnings (void)
277 fold_undefer_overflow_warnings (false, NULL, 0);
280 /* Whether we are deferring overflow warnings. */
282 bool
283 fold_deferring_overflow_warnings_p (void)
285 return fold_deferring_overflow_warnings > 0;
288 /* This is called when we fold something based on the fact that signed
289 overflow is undefined. */
291 void
292 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
294 if (fold_deferring_overflow_warnings > 0)
296 if (fold_deferred_overflow_warning == NULL
297 || wc < fold_deferred_overflow_code)
299 fold_deferred_overflow_warning = gmsgid;
300 fold_deferred_overflow_code = wc;
303 else if (issue_strict_overflow_warning (wc))
304 warning (OPT_Wstrict_overflow, gmsgid);
307 /* Return true if the built-in mathematical function specified by CODE
308 is odd, i.e. -f(x) == f(-x). */
310 bool
311 negate_mathfn_p (combined_fn fn)
313 switch (fn)
315 CASE_CFN_ASIN:
316 CASE_CFN_ASINH:
317 CASE_CFN_ATAN:
318 CASE_CFN_ATANH:
319 CASE_CFN_CASIN:
320 CASE_CFN_CASINH:
321 CASE_CFN_CATAN:
322 CASE_CFN_CATANH:
323 CASE_CFN_CBRT:
324 CASE_CFN_CPROJ:
325 CASE_CFN_CSIN:
326 CASE_CFN_CSINH:
327 CASE_CFN_CTAN:
328 CASE_CFN_CTANH:
329 CASE_CFN_ERF:
330 CASE_CFN_LLROUND:
331 CASE_CFN_LROUND:
332 CASE_CFN_ROUND:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (t);
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 int count = TYPE_VECTOR_SUBPARTS (type), i;
417 for (i = 0; i < count; i++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
459 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
461 break;
463 /* Fall through. */
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
496 break;
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
510 return true;
512 break;
514 default:
515 break;
517 return false;
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
528 tree type = TREE_TYPE (t);
529 tree tem;
531 switch (TREE_CODE (t))
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 return tem;
554 case FIXED_CST:
555 tem = fold_negate_const (t, type);
556 return tem;
558 case COMPLEX_CST:
560 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
561 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
562 if (rpart && ipart)
563 return build_complex (type, rpart, ipart);
565 break;
567 case VECTOR_CST:
569 int count = TYPE_VECTOR_SUBPARTS (type), i;
570 tree *elts = XALLOCAVEC (tree, count);
572 for (i = 0; i < count; i++)
574 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
575 if (elts[i] == NULL_TREE)
576 return NULL_TREE;
579 return build_vector (type, elts);
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
620 break;
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
634 /* Fall through. */
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
648 break;
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
680 break;
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
687 tree fndecl, arg;
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
693 break;
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
710 break;
712 default:
713 break;
716 return NULL_TREE;
719 /* A wrapper for fold_negate_expr_1. */
721 static tree
722 fold_negate_expr (location_t loc, tree t)
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
778 static tree
779 split_tree (location_t loc, tree in, tree type, enum tree_code code,
780 tree *conp, tree *litp, tree *minus_litp, int negate_p)
782 tree var = 0;
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
825 var = in;
826 else if (op0 != 0)
827 var = op0;
828 else
829 var = op1, neg_var_p = neg1_p;
831 /* Now do any needed negations. */
832 if (neg_litp_p)
833 *minus_litp = *litp, *litp = 0;
834 if (neg_conp_p && *conp)
836 /* Convert to TYPE before negating. */
837 *conp = fold_convert_loc (loc, type, *conp);
838 *conp = negate_expr (*conp);
840 if (neg_var_p && var)
842 /* Convert to TYPE before negating. */
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
847 else if (TREE_CONSTANT (in))
848 *conp = in;
849 else if (TREE_CODE (in) == BIT_NOT_EXPR
850 && code == PLUS_EXPR)
852 /* -X - 1 is folded to ~X, undo that here. Do _not_ do this
853 when IN is constant. */
854 *minus_litp = build_one_cst (TREE_TYPE (in));
855 var = negate_expr (TREE_OPERAND (in, 0));
857 else
858 var = in;
860 if (negate_p)
862 if (*litp)
863 *minus_litp = *litp, *litp = 0;
864 else if (*minus_litp)
865 *litp = *minus_litp, *minus_litp = 0;
866 if (*conp)
868 /* Convert to TYPE before negating. */
869 *conp = fold_convert_loc (loc, type, *conp);
870 *conp = negate_expr (*conp);
872 if (var)
874 /* Convert to TYPE before negating. */
875 var = fold_convert_loc (loc, type, var);
876 var = negate_expr (var);
880 return var;
883 /* Re-associate trees split by the above function. T1 and T2 are
884 either expressions to associate or null. Return the new
885 expression, if any. LOC is the location of the new expression. If
886 we build an operation, do it in TYPE and with CODE. */
888 static tree
889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
891 if (t1 == 0)
892 return t2;
893 else if (t2 == 0)
894 return t1;
896 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
897 try to fold this since we will have infinite recursion. But do
898 deal with any NEGATE_EXPRs. */
899 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
900 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
902 if (code == PLUS_EXPR)
904 if (TREE_CODE (t1) == NEGATE_EXPR)
905 return build2_loc (loc, MINUS_EXPR, type,
906 fold_convert_loc (loc, type, t2),
907 fold_convert_loc (loc, type,
908 TREE_OPERAND (t1, 0)));
909 else if (TREE_CODE (t2) == NEGATE_EXPR)
910 return build2_loc (loc, MINUS_EXPR, type,
911 fold_convert_loc (loc, type, t1),
912 fold_convert_loc (loc, type,
913 TREE_OPERAND (t2, 0)));
914 else if (integer_zerop (t2))
915 return fold_convert_loc (loc, type, t1);
917 else if (code == MINUS_EXPR)
919 if (integer_zerop (t2))
920 return fold_convert_loc (loc, type, t1);
923 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
924 fold_convert_loc (loc, type, t2));
927 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
931 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
932 for use in int_const_binop, size_binop and size_diffop. */
934 static bool
935 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
937 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
938 return false;
939 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
940 return false;
942 switch (code)
944 case LSHIFT_EXPR:
945 case RSHIFT_EXPR:
946 case LROTATE_EXPR:
947 case RROTATE_EXPR:
948 return true;
950 default:
951 break;
954 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
955 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
956 && TYPE_MODE (type1) == TYPE_MODE (type2);
960 /* Combine two integer constants ARG1 and ARG2 under operation CODE
961 to produce a new constant. Return NULL_TREE if we don't know how
962 to evaluate CODE at compile-time. */
964 static tree
965 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
966 int overflowable)
968 wide_int res;
969 tree t;
970 tree type = TREE_TYPE (arg1);
971 signop sign = TYPE_SIGN (type);
972 bool overflow = false;
974 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
976 switch (code)
978 case BIT_IOR_EXPR:
979 res = wi::bit_or (arg1, arg2);
980 break;
982 case BIT_XOR_EXPR:
983 res = wi::bit_xor (arg1, arg2);
984 break;
986 case BIT_AND_EXPR:
987 res = wi::bit_and (arg1, arg2);
988 break;
990 case RSHIFT_EXPR:
991 case LSHIFT_EXPR:
992 if (wi::neg_p (arg2))
994 arg2 = -arg2;
995 if (code == RSHIFT_EXPR)
996 code = LSHIFT_EXPR;
997 else
998 code = RSHIFT_EXPR;
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1006 else
1007 res = wi::lshift (arg1, arg2);
1008 break;
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1014 arg2 = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1023 else
1024 res = wi::lrotate (arg1, arg2);
1025 break;
1027 case PLUS_EXPR:
1028 res = wi::add (arg1, arg2, sign, &overflow);
1029 break;
1031 case MINUS_EXPR:
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1033 break;
1035 case MULT_EXPR:
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1037 break;
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1041 break;
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1048 break;
1050 case FLOOR_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1054 break;
1056 case CEIL_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1060 break;
1062 case ROUND_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1066 break;
1068 case TRUNC_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1072 break;
1074 case FLOOR_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1078 break;
1080 case CEIL_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1084 break;
1086 case ROUND_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 break;
1092 case MIN_EXPR:
1093 res = wi::min (arg1, arg2, sign);
1094 break;
1096 case MAX_EXPR:
1097 res = wi::max (arg1, arg2, sign);
1098 break;
1100 default:
1101 return NULL_TREE;
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1106 && overflow)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1109 return t;
1112 tree
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1123 static tree
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1126 /* Sanity check for the recursive cases. */
1127 if (!arg1 || !arg2)
1128 return NULL_TREE;
1130 STRIP_NOPS (arg1);
1131 STRIP_NOPS (arg2);
1133 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1135 if (code == POINTER_PLUS_EXPR)
1136 return int_const_binop (PLUS_EXPR,
1137 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1139 return int_const_binop (code, arg1, arg2);
1142 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1144 machine_mode mode;
1145 REAL_VALUE_TYPE d1;
1146 REAL_VALUE_TYPE d2;
1147 REAL_VALUE_TYPE value;
1148 REAL_VALUE_TYPE result;
1149 bool inexact;
1150 tree t, type;
1152 /* The following codes are handled by real_arithmetic. */
1153 switch (code)
1155 case PLUS_EXPR:
1156 case MINUS_EXPR:
1157 case MULT_EXPR:
1158 case RDIV_EXPR:
1159 case MIN_EXPR:
1160 case MAX_EXPR:
1161 break;
1163 default:
1164 return NULL_TREE;
1167 d1 = TREE_REAL_CST (arg1);
1168 d2 = TREE_REAL_CST (arg2);
1170 type = TREE_TYPE (arg1);
1171 mode = TYPE_MODE (type);
1173 /* Don't perform operation if we honor signaling NaNs and
1174 either operand is a signaling NaN. */
1175 if (HONOR_SNANS (mode)
1176 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1177 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1178 return NULL_TREE;
1180 /* Don't perform operation if it would raise a division
1181 by zero exception. */
1182 if (code == RDIV_EXPR
1183 && real_equal (&d2, &dconst0)
1184 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1185 return NULL_TREE;
1187 /* If either operand is a NaN, just return it. Otherwise, set up
1188 for floating-point trap; we return an overflow. */
1189 if (REAL_VALUE_ISNAN (d1))
1191 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1192 is off. */
1193 d1.signalling = 0;
1194 t = build_real (type, d1);
1195 return t;
1197 else if (REAL_VALUE_ISNAN (d2))
1199 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1200 is off. */
1201 d2.signalling = 0;
1202 t = build_real (type, d2);
1203 return t;
1206 inexact = real_arithmetic (&value, code, &d1, &d2);
1207 real_convert (&result, mode, &value);
1209 /* Don't constant fold this floating point operation if
1210 the result has overflowed and flag_trapping_math. */
1211 if (flag_trapping_math
1212 && MODE_HAS_INFINITIES (mode)
1213 && REAL_VALUE_ISINF (result)
1214 && !REAL_VALUE_ISINF (d1)
1215 && !REAL_VALUE_ISINF (d2))
1216 return NULL_TREE;
1218 /* Don't constant fold this floating point operation if the
1219 result may dependent upon the run-time rounding mode and
1220 flag_rounding_math is set, or if GCC's software emulation
1221 is unable to accurately represent the result. */
1222 if ((flag_rounding_math
1223 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1224 && (inexact || !real_identical (&result, &value)))
1225 return NULL_TREE;
1227 t = build_real (type, result);
1229 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1230 return t;
1233 if (TREE_CODE (arg1) == FIXED_CST)
1235 FIXED_VALUE_TYPE f1;
1236 FIXED_VALUE_TYPE f2;
1237 FIXED_VALUE_TYPE result;
1238 tree t, type;
1239 int sat_p;
1240 bool overflow_p;
1242 /* The following codes are handled by fixed_arithmetic. */
1243 switch (code)
1245 case PLUS_EXPR:
1246 case MINUS_EXPR:
1247 case MULT_EXPR:
1248 case TRUNC_DIV_EXPR:
1249 if (TREE_CODE (arg2) != FIXED_CST)
1250 return NULL_TREE;
1251 f2 = TREE_FIXED_CST (arg2);
1252 break;
1254 case LSHIFT_EXPR:
1255 case RSHIFT_EXPR:
1257 if (TREE_CODE (arg2) != INTEGER_CST)
1258 return NULL_TREE;
1259 wide_int w2 = arg2;
1260 f2.data.high = w2.elt (1);
1261 f2.data.low = w2.ulow ();
1262 f2.mode = SImode;
1264 break;
1266 default:
1267 return NULL_TREE;
1270 f1 = TREE_FIXED_CST (arg1);
1271 type = TREE_TYPE (arg1);
1272 sat_p = TYPE_SATURATING (type);
1273 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1274 t = build_fixed (type, result);
1275 /* Propagate overflow flags. */
1276 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1277 TREE_OVERFLOW (t) = 1;
1278 return t;
1281 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1283 tree type = TREE_TYPE (arg1);
1284 tree r1 = TREE_REALPART (arg1);
1285 tree i1 = TREE_IMAGPART (arg1);
1286 tree r2 = TREE_REALPART (arg2);
1287 tree i2 = TREE_IMAGPART (arg2);
1288 tree real, imag;
1290 switch (code)
1292 case PLUS_EXPR:
1293 case MINUS_EXPR:
1294 real = const_binop (code, r1, r2);
1295 imag = const_binop (code, i1, i2);
1296 break;
1298 case MULT_EXPR:
1299 if (COMPLEX_FLOAT_TYPE_P (type))
1300 return do_mpc_arg2 (arg1, arg2, type,
1301 /* do_nonfinite= */ folding_initializer,
1302 mpc_mul);
1304 real = const_binop (MINUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2),
1306 const_binop (MULT_EXPR, i1, i2));
1307 imag = const_binop (PLUS_EXPR,
1308 const_binop (MULT_EXPR, r1, i2),
1309 const_binop (MULT_EXPR, i1, r2));
1310 break;
1312 case RDIV_EXPR:
1313 if (COMPLEX_FLOAT_TYPE_P (type))
1314 return do_mpc_arg2 (arg1, arg2, type,
1315 /* do_nonfinite= */ folding_initializer,
1316 mpc_div);
1317 /* Fallthru. */
1318 case TRUNC_DIV_EXPR:
1319 case CEIL_DIV_EXPR:
1320 case FLOOR_DIV_EXPR:
1321 case ROUND_DIV_EXPR:
1322 if (flag_complex_method == 0)
1324 /* Keep this algorithm in sync with
1325 tree-complex.c:expand_complex_div_straight().
1327 Expand complex division to scalars, straightforward algorithm.
1328 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1329 t = br*br + bi*bi
1331 tree magsquared
1332 = const_binop (PLUS_EXPR,
1333 const_binop (MULT_EXPR, r2, r2),
1334 const_binop (MULT_EXPR, i2, i2));
1335 tree t1
1336 = const_binop (PLUS_EXPR,
1337 const_binop (MULT_EXPR, r1, r2),
1338 const_binop (MULT_EXPR, i1, i2));
1339 tree t2
1340 = const_binop (MINUS_EXPR,
1341 const_binop (MULT_EXPR, i1, r2),
1342 const_binop (MULT_EXPR, r1, i2));
1344 real = const_binop (code, t1, magsquared);
1345 imag = const_binop (code, t2, magsquared);
1347 else
1349 /* Keep this algorithm in sync with
1350 tree-complex.c:expand_complex_div_wide().
1352 Expand complex division to scalars, modified algorithm to minimize
1353 overflow with wide input ranges. */
1354 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1355 fold_abs_const (r2, TREE_TYPE (type)),
1356 fold_abs_const (i2, TREE_TYPE (type)));
1358 if (integer_nonzerop (compare))
1360 /* In the TRUE branch, we compute
1361 ratio = br/bi;
1362 div = (br * ratio) + bi;
1363 tr = (ar * ratio) + ai;
1364 ti = (ai * ratio) - ar;
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, r2, i2);
1368 tree div = const_binop (PLUS_EXPR, i2,
1369 const_binop (MULT_EXPR, r2, ratio));
1370 real = const_binop (MULT_EXPR, r1, ratio);
1371 real = const_binop (PLUS_EXPR, real, i1);
1372 real = const_binop (code, real, div);
1374 imag = const_binop (MULT_EXPR, i1, ratio);
1375 imag = const_binop (MINUS_EXPR, imag, r1);
1376 imag = const_binop (code, imag, div);
1378 else
1380 /* In the FALSE branch, we compute
1381 ratio = d/c;
1382 divisor = (d * ratio) + c;
1383 tr = (b * ratio) + a;
1384 ti = b - (a * ratio);
1385 tr = tr / div;
1386 ti = ti / div; */
1387 tree ratio = const_binop (code, i2, r2);
1388 tree div = const_binop (PLUS_EXPR, r2,
1389 const_binop (MULT_EXPR, i2, ratio));
1391 real = const_binop (MULT_EXPR, i1, ratio);
1392 real = const_binop (PLUS_EXPR, real, r1);
1393 real = const_binop (code, real, div);
1395 imag = const_binop (MULT_EXPR, r1, ratio);
1396 imag = const_binop (MINUS_EXPR, i1, imag);
1397 imag = const_binop (code, imag, div);
1400 break;
1402 default:
1403 return NULL_TREE;
1406 if (real && imag)
1407 return build_complex (type, real, imag);
1410 if (TREE_CODE (arg1) == VECTOR_CST
1411 && TREE_CODE (arg2) == VECTOR_CST)
1413 tree type = TREE_TYPE (arg1);
1414 int count = TYPE_VECTOR_SUBPARTS (type), i;
1415 tree *elts = XALLOCAVEC (tree, count);
1417 for (i = 0; i < count; i++)
1419 tree elem1 = VECTOR_CST_ELT (arg1, i);
1420 tree elem2 = VECTOR_CST_ELT (arg2, i);
1422 elts[i] = const_binop (code, elem1, elem2);
1424 /* It is possible that const_binop cannot handle the given
1425 code and return NULL_TREE */
1426 if (elts[i] == NULL_TREE)
1427 return NULL_TREE;
1430 return build_vector (type, elts);
1433 /* Shifts allow a scalar offset for a vector. */
1434 if (TREE_CODE (arg1) == VECTOR_CST
1435 && TREE_CODE (arg2) == INTEGER_CST)
1437 tree type = TREE_TYPE (arg1);
1438 int count = TYPE_VECTOR_SUBPARTS (type), i;
1439 tree *elts = XALLOCAVEC (tree, count);
1441 for (i = 0; i < count; i++)
1443 tree elem1 = VECTOR_CST_ELT (arg1, i);
1445 elts[i] = const_binop (code, elem1, arg2);
1447 /* It is possible that const_binop cannot handle the given
1448 code and return NULL_TREE. */
1449 if (elts[i] == NULL_TREE)
1450 return NULL_TREE;
1453 return build_vector (type, elts);
1455 return NULL_TREE;
1458 /* Overload that adds a TYPE parameter to be able to dispatch
1459 to fold_relational_const. */
1461 tree
1462 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1464 if (TREE_CODE_CLASS (code) == tcc_comparison)
1465 return fold_relational_const (code, type, arg1, arg2);
1467 /* ??? Until we make the const_binop worker take the type of the
1468 result as argument put those cases that need it here. */
1469 switch (code)
1471 case COMPLEX_EXPR:
1472 if ((TREE_CODE (arg1) == REAL_CST
1473 && TREE_CODE (arg2) == REAL_CST)
1474 || (TREE_CODE (arg1) == INTEGER_CST
1475 && TREE_CODE (arg2) == INTEGER_CST))
1476 return build_complex (type, arg1, arg2);
1477 return NULL_TREE;
1479 case VEC_PACK_TRUNC_EXPR:
1480 case VEC_PACK_FIX_TRUNC_EXPR:
1482 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1483 tree *elts;
1485 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1486 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1487 if (TREE_CODE (arg1) != VECTOR_CST
1488 || TREE_CODE (arg2) != VECTOR_CST)
1489 return NULL_TREE;
1491 elts = XALLOCAVEC (tree, nelts);
1492 if (!vec_cst_ctor_to_array (arg1, elts)
1493 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1494 return NULL_TREE;
1496 for (i = 0; i < nelts; i++)
1498 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1499 ? NOP_EXPR : FIX_TRUNC_EXPR,
1500 TREE_TYPE (type), elts[i]);
1501 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1502 return NULL_TREE;
1505 return build_vector (type, elts);
1508 case VEC_WIDEN_MULT_LO_EXPR:
1509 case VEC_WIDEN_MULT_HI_EXPR:
1510 case VEC_WIDEN_MULT_EVEN_EXPR:
1511 case VEC_WIDEN_MULT_ODD_EXPR:
1513 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1514 unsigned int out, ofs, scale;
1515 tree *elts;
1517 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1518 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1519 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1520 return NULL_TREE;
1522 elts = XALLOCAVEC (tree, nelts * 4);
1523 if (!vec_cst_ctor_to_array (arg1, elts)
1524 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1525 return NULL_TREE;
1527 if (code == VEC_WIDEN_MULT_LO_EXPR)
1528 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1529 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1530 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1531 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1532 scale = 1, ofs = 0;
1533 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1534 scale = 1, ofs = 1;
1536 for (out = 0; out < nelts; out++)
1538 unsigned int in1 = (out << scale) + ofs;
1539 unsigned int in2 = in1 + nelts * 2;
1540 tree t1, t2;
1542 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1543 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1545 if (t1 == NULL_TREE || t2 == NULL_TREE)
1546 return NULL_TREE;
1547 elts[out] = const_binop (MULT_EXPR, t1, t2);
1548 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1549 return NULL_TREE;
1552 return build_vector (type, elts);
1555 default:;
1558 if (TREE_CODE_CLASS (code) != tcc_binary)
1559 return NULL_TREE;
1561 /* Make sure type and arg0 have the same saturating flag. */
1562 gcc_checking_assert (TYPE_SATURATING (type)
1563 == TYPE_SATURATING (TREE_TYPE (arg1)));
1565 return const_binop (code, arg1, arg2);
1568 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1569 Return zero if computing the constants is not possible. */
1571 tree
1572 const_unop (enum tree_code code, tree type, tree arg0)
1574 /* Don't perform the operation, other than NEGATE and ABS, if
1575 flag_signaling_nans is on and the operand is a signaling NaN. */
1576 if (TREE_CODE (arg0) == REAL_CST
1577 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1578 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1579 && code != NEGATE_EXPR
1580 && code != ABS_EXPR)
1581 return NULL_TREE;
1583 switch (code)
1585 CASE_CONVERT:
1586 case FLOAT_EXPR:
1587 case FIX_TRUNC_EXPR:
1588 case FIXED_CONVERT_EXPR:
1589 return fold_convert_const (code, type, arg0);
1591 case ADDR_SPACE_CONVERT_EXPR:
1592 /* If the source address is 0, and the source address space
1593 cannot have a valid object at 0, fold to dest type null. */
1594 if (integer_zerop (arg0)
1595 && !(targetm.addr_space.zero_address_valid
1596 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1597 return fold_convert_const (code, type, arg0);
1598 break;
1600 case VIEW_CONVERT_EXPR:
1601 return fold_view_convert_expr (type, arg0);
1603 case NEGATE_EXPR:
1605 /* Can't call fold_negate_const directly here as that doesn't
1606 handle all cases and we might not be able to negate some
1607 constants. */
1608 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1609 if (tem && CONSTANT_CLASS_P (tem))
1610 return tem;
1611 break;
1614 case ABS_EXPR:
1615 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1616 return fold_abs_const (arg0, type);
1617 break;
1619 case CONJ_EXPR:
1620 if (TREE_CODE (arg0) == COMPLEX_CST)
1622 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1623 TREE_TYPE (type));
1624 return build_complex (type, TREE_REALPART (arg0), ipart);
1626 break;
1628 case BIT_NOT_EXPR:
1629 if (TREE_CODE (arg0) == INTEGER_CST)
1630 return fold_not_const (arg0, type);
1631 /* Perform BIT_NOT_EXPR on each element individually. */
1632 else if (TREE_CODE (arg0) == VECTOR_CST)
1634 tree *elements;
1635 tree elem;
1636 unsigned count = VECTOR_CST_NELTS (arg0), i;
1638 elements = XALLOCAVEC (tree, count);
1639 for (i = 0; i < count; i++)
1641 elem = VECTOR_CST_ELT (arg0, i);
1642 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1643 if (elem == NULL_TREE)
1644 break;
1645 elements[i] = elem;
1647 if (i == count)
1648 return build_vector (type, elements);
1650 break;
1652 case TRUTH_NOT_EXPR:
1653 if (TREE_CODE (arg0) == INTEGER_CST)
1654 return constant_boolean_node (integer_zerop (arg0), type);
1655 break;
1657 case REALPART_EXPR:
1658 if (TREE_CODE (arg0) == COMPLEX_CST)
1659 return fold_convert (type, TREE_REALPART (arg0));
1660 break;
1662 case IMAGPART_EXPR:
1663 if (TREE_CODE (arg0) == COMPLEX_CST)
1664 return fold_convert (type, TREE_IMAGPART (arg0));
1665 break;
1667 case VEC_UNPACK_LO_EXPR:
1668 case VEC_UNPACK_HI_EXPR:
1669 case VEC_UNPACK_FLOAT_LO_EXPR:
1670 case VEC_UNPACK_FLOAT_HI_EXPR:
1672 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1673 tree *elts;
1674 enum tree_code subcode;
1676 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1677 if (TREE_CODE (arg0) != VECTOR_CST)
1678 return NULL_TREE;
1680 elts = XALLOCAVEC (tree, nelts * 2);
1681 if (!vec_cst_ctor_to_array (arg0, elts))
1682 return NULL_TREE;
1684 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1685 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1686 elts += nelts;
1688 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1689 subcode = NOP_EXPR;
1690 else
1691 subcode = FLOAT_EXPR;
1693 for (i = 0; i < nelts; i++)
1695 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1696 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1697 return NULL_TREE;
1700 return build_vector (type, elts);
1703 case REDUC_MIN_EXPR:
1704 case REDUC_MAX_EXPR:
1705 case REDUC_PLUS_EXPR:
1707 unsigned int nelts, i;
1708 tree *elts;
1709 enum tree_code subcode;
1711 if (TREE_CODE (arg0) != VECTOR_CST)
1712 return NULL_TREE;
1713 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1715 elts = XALLOCAVEC (tree, nelts);
1716 if (!vec_cst_ctor_to_array (arg0, elts))
1717 return NULL_TREE;
1719 switch (code)
1721 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1722 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1723 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1724 default: gcc_unreachable ();
1727 for (i = 1; i < nelts; i++)
1729 elts[0] = const_binop (subcode, elts[0], elts[i]);
1730 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1731 return NULL_TREE;
1734 return elts[0];
1737 default:
1738 break;
1741 return NULL_TREE;
1744 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1745 indicates which particular sizetype to create. */
1747 tree
1748 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1750 return build_int_cst (sizetype_tab[(int) kind], number);
1753 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1754 is a tree code. The type of the result is taken from the operands.
1755 Both must be equivalent integer types, ala int_binop_types_match_p.
1756 If the operands are constant, so is the result. */
1758 tree
1759 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1761 tree type = TREE_TYPE (arg0);
1763 if (arg0 == error_mark_node || arg1 == error_mark_node)
1764 return error_mark_node;
1766 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1767 TREE_TYPE (arg1)));
1769 /* Handle the special case of two integer constants faster. */
1770 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1772 /* And some specific cases even faster than that. */
1773 if (code == PLUS_EXPR)
1775 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1776 return arg1;
1777 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1778 return arg0;
1780 else if (code == MINUS_EXPR)
1782 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1783 return arg0;
1785 else if (code == MULT_EXPR)
1787 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1788 return arg1;
1791 /* Handle general case of two integer constants. For sizetype
1792 constant calculations we always want to know about overflow,
1793 even in the unsigned case. */
1794 return int_const_binop_1 (code, arg0, arg1, -1);
1797 return fold_build2_loc (loc, code, type, arg0, arg1);
1800 /* Given two values, either both of sizetype or both of bitsizetype,
1801 compute the difference between the two values. Return the value
1802 in signed type corresponding to the type of the operands. */
1804 tree
1805 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1807 tree type = TREE_TYPE (arg0);
1808 tree ctype;
1810 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1811 TREE_TYPE (arg1)));
1813 /* If the type is already signed, just do the simple thing. */
1814 if (!TYPE_UNSIGNED (type))
1815 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1817 if (type == sizetype)
1818 ctype = ssizetype;
1819 else if (type == bitsizetype)
1820 ctype = sbitsizetype;
1821 else
1822 ctype = signed_type_for (type);
1824 /* If either operand is not a constant, do the conversions to the signed
1825 type and subtract. The hardware will do the right thing with any
1826 overflow in the subtraction. */
1827 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1828 return size_binop_loc (loc, MINUS_EXPR,
1829 fold_convert_loc (loc, ctype, arg0),
1830 fold_convert_loc (loc, ctype, arg1));
1832 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1833 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1834 overflow) and negate (which can't either). Special-case a result
1835 of zero while we're here. */
1836 if (tree_int_cst_equal (arg0, arg1))
1837 return build_int_cst (ctype, 0);
1838 else if (tree_int_cst_lt (arg1, arg0))
1839 return fold_convert_loc (loc, ctype,
1840 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1841 else
1842 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1843 fold_convert_loc (loc, ctype,
1844 size_binop_loc (loc,
1845 MINUS_EXPR,
1846 arg1, arg0)));
1849 /* A subroutine of fold_convert_const handling conversions of an
1850 INTEGER_CST to another integer type. */
1852 static tree
1853 fold_convert_const_int_from_int (tree type, const_tree arg1)
1855 /* Given an integer constant, make new constant with new type,
1856 appropriately sign-extended or truncated. Use widest_int
1857 so that any extension is done according ARG1's type. */
1858 return force_fit_type (type, wi::to_widest (arg1),
1859 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1860 TREE_OVERFLOW (arg1));
1863 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1864 to an integer type. */
1866 static tree
1867 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1869 bool overflow = false;
1870 tree t;
1872 /* The following code implements the floating point to integer
1873 conversion rules required by the Java Language Specification,
1874 that IEEE NaNs are mapped to zero and values that overflow
1875 the target precision saturate, i.e. values greater than
1876 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1877 are mapped to INT_MIN. These semantics are allowed by the
1878 C and C++ standards that simply state that the behavior of
1879 FP-to-integer conversion is unspecified upon overflow. */
1881 wide_int val;
1882 REAL_VALUE_TYPE r;
1883 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1885 switch (code)
1887 case FIX_TRUNC_EXPR:
1888 real_trunc (&r, VOIDmode, &x);
1889 break;
1891 default:
1892 gcc_unreachable ();
1895 /* If R is NaN, return zero and show we have an overflow. */
1896 if (REAL_VALUE_ISNAN (r))
1898 overflow = true;
1899 val = wi::zero (TYPE_PRECISION (type));
1902 /* See if R is less than the lower bound or greater than the
1903 upper bound. */
1905 if (! overflow)
1907 tree lt = TYPE_MIN_VALUE (type);
1908 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1909 if (real_less (&r, &l))
1911 overflow = true;
1912 val = lt;
1916 if (! overflow)
1918 tree ut = TYPE_MAX_VALUE (type);
1919 if (ut)
1921 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1922 if (real_less (&u, &r))
1924 overflow = true;
1925 val = ut;
1930 if (! overflow)
1931 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1933 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1934 return t;
1937 /* A subroutine of fold_convert_const handling conversions of a
1938 FIXED_CST to an integer type. */
1940 static tree
1941 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1943 tree t;
1944 double_int temp, temp_trunc;
1945 unsigned int mode;
1947 /* Right shift FIXED_CST to temp by fbit. */
1948 temp = TREE_FIXED_CST (arg1).data;
1949 mode = TREE_FIXED_CST (arg1).mode;
1950 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1952 temp = temp.rshift (GET_MODE_FBIT (mode),
1953 HOST_BITS_PER_DOUBLE_INT,
1954 SIGNED_FIXED_POINT_MODE_P (mode));
1956 /* Left shift temp to temp_trunc by fbit. */
1957 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1958 HOST_BITS_PER_DOUBLE_INT,
1959 SIGNED_FIXED_POINT_MODE_P (mode));
1961 else
1963 temp = double_int_zero;
1964 temp_trunc = double_int_zero;
1967 /* If FIXED_CST is negative, we need to round the value toward 0.
1968 By checking if the fractional bits are not zero to add 1 to temp. */
1969 if (SIGNED_FIXED_POINT_MODE_P (mode)
1970 && temp_trunc.is_negative ()
1971 && TREE_FIXED_CST (arg1).data != temp_trunc)
1972 temp += double_int_one;
1974 /* Given a fixed-point constant, make new constant with new type,
1975 appropriately sign-extended or truncated. */
1976 t = force_fit_type (type, temp, -1,
1977 (temp.is_negative ()
1978 && (TYPE_UNSIGNED (type)
1979 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1980 | TREE_OVERFLOW (arg1));
1982 return t;
1985 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1986 to another floating point type. */
1988 static tree
1989 fold_convert_const_real_from_real (tree type, const_tree arg1)
1991 REAL_VALUE_TYPE value;
1992 tree t;
1994 /* Don't perform the operation if flag_signaling_nans is on
1995 and the operand is a signaling NaN. */
1996 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1997 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1998 return NULL_TREE;
2000 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2001 t = build_real (type, value);
2003 /* If converting an infinity or NAN to a representation that doesn't
2004 have one, set the overflow bit so that we can produce some kind of
2005 error message at the appropriate point if necessary. It's not the
2006 most user-friendly message, but it's better than nothing. */
2007 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2008 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2009 TREE_OVERFLOW (t) = 1;
2010 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2011 && !MODE_HAS_NANS (TYPE_MODE (type)))
2012 TREE_OVERFLOW (t) = 1;
2013 /* Regular overflow, conversion produced an infinity in a mode that
2014 can't represent them. */
2015 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2016 && REAL_VALUE_ISINF (value)
2017 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2018 TREE_OVERFLOW (t) = 1;
2019 else
2020 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2021 return t;
2024 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2025 to a floating point type. */
2027 static tree
2028 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2030 REAL_VALUE_TYPE value;
2031 tree t;
2033 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2034 t = build_real (type, value);
2036 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2037 return t;
2040 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2041 to another fixed-point type. */
2043 static tree
2044 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2046 FIXED_VALUE_TYPE value;
2047 tree t;
2048 bool overflow_p;
2050 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2051 TYPE_SATURATING (type));
2052 t = build_fixed (type, value);
2054 /* Propagate overflow flags. */
2055 if (overflow_p | TREE_OVERFLOW (arg1))
2056 TREE_OVERFLOW (t) = 1;
2057 return t;
2060 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2061 to a fixed-point type. */
2063 static tree
2064 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2066 FIXED_VALUE_TYPE value;
2067 tree t;
2068 bool overflow_p;
2069 double_int di;
2071 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2073 di.low = TREE_INT_CST_ELT (arg1, 0);
2074 if (TREE_INT_CST_NUNITS (arg1) == 1)
2075 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2076 else
2077 di.high = TREE_INT_CST_ELT (arg1, 1);
2079 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2080 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2081 TYPE_SATURATING (type));
2082 t = build_fixed (type, value);
2084 /* Propagate overflow flags. */
2085 if (overflow_p | TREE_OVERFLOW (arg1))
2086 TREE_OVERFLOW (t) = 1;
2087 return t;
2090 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2091 to a fixed-point type. */
2093 static tree
2094 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2096 FIXED_VALUE_TYPE value;
2097 tree t;
2098 bool overflow_p;
2100 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2101 &TREE_REAL_CST (arg1),
2102 TYPE_SATURATING (type));
2103 t = build_fixed (type, value);
2105 /* Propagate overflow flags. */
2106 if (overflow_p | TREE_OVERFLOW (arg1))
2107 TREE_OVERFLOW (t) = 1;
2108 return t;
2111 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2112 type TYPE. If no simplification can be done return NULL_TREE. */
2114 static tree
2115 fold_convert_const (enum tree_code code, tree type, tree arg1)
2117 if (TREE_TYPE (arg1) == type)
2118 return arg1;
2120 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2121 || TREE_CODE (type) == OFFSET_TYPE)
2123 if (TREE_CODE (arg1) == INTEGER_CST)
2124 return fold_convert_const_int_from_int (type, arg1);
2125 else if (TREE_CODE (arg1) == REAL_CST)
2126 return fold_convert_const_int_from_real (code, type, arg1);
2127 else if (TREE_CODE (arg1) == FIXED_CST)
2128 return fold_convert_const_int_from_fixed (type, arg1);
2130 else if (TREE_CODE (type) == REAL_TYPE)
2132 if (TREE_CODE (arg1) == INTEGER_CST)
2133 return build_real_from_int_cst (type, arg1);
2134 else if (TREE_CODE (arg1) == REAL_CST)
2135 return fold_convert_const_real_from_real (type, arg1);
2136 else if (TREE_CODE (arg1) == FIXED_CST)
2137 return fold_convert_const_real_from_fixed (type, arg1);
2139 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2141 if (TREE_CODE (arg1) == FIXED_CST)
2142 return fold_convert_const_fixed_from_fixed (type, arg1);
2143 else if (TREE_CODE (arg1) == INTEGER_CST)
2144 return fold_convert_const_fixed_from_int (type, arg1);
2145 else if (TREE_CODE (arg1) == REAL_CST)
2146 return fold_convert_const_fixed_from_real (type, arg1);
2148 else if (TREE_CODE (type) == VECTOR_TYPE)
2150 if (TREE_CODE (arg1) == VECTOR_CST
2151 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2153 int len = TYPE_VECTOR_SUBPARTS (type);
2154 tree elttype = TREE_TYPE (type);
2155 tree *v = XALLOCAVEC (tree, len);
2156 for (int i = 0; i < len; ++i)
2158 tree elt = VECTOR_CST_ELT (arg1, i);
2159 tree cvt = fold_convert_const (code, elttype, elt);
2160 if (cvt == NULL_TREE)
2161 return NULL_TREE;
2162 v[i] = cvt;
2164 return build_vector (type, v);
2167 return NULL_TREE;
2170 /* Construct a vector of zero elements of vector type TYPE. */
2172 static tree
2173 build_zero_vector (tree type)
2175 tree t;
2177 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2178 return build_vector_from_val (type, t);
2181 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2183 bool
2184 fold_convertible_p (const_tree type, const_tree arg)
2186 tree orig = TREE_TYPE (arg);
2188 if (type == orig)
2189 return true;
2191 if (TREE_CODE (arg) == ERROR_MARK
2192 || TREE_CODE (type) == ERROR_MARK
2193 || TREE_CODE (orig) == ERROR_MARK)
2194 return false;
2196 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2197 return true;
2199 switch (TREE_CODE (type))
2201 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2202 case POINTER_TYPE: case REFERENCE_TYPE:
2203 case OFFSET_TYPE:
2204 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2205 || TREE_CODE (orig) == OFFSET_TYPE);
2207 case REAL_TYPE:
2208 case FIXED_POINT_TYPE:
2209 case VECTOR_TYPE:
2210 case VOID_TYPE:
2211 return TREE_CODE (type) == TREE_CODE (orig);
2213 default:
2214 return false;
2218 /* Convert expression ARG to type TYPE. Used by the middle-end for
2219 simple conversions in preference to calling the front-end's convert. */
2221 tree
2222 fold_convert_loc (location_t loc, tree type, tree arg)
2224 tree orig = TREE_TYPE (arg);
2225 tree tem;
2227 if (type == orig)
2228 return arg;
2230 if (TREE_CODE (arg) == ERROR_MARK
2231 || TREE_CODE (type) == ERROR_MARK
2232 || TREE_CODE (orig) == ERROR_MARK)
2233 return error_mark_node;
2235 switch (TREE_CODE (type))
2237 case POINTER_TYPE:
2238 case REFERENCE_TYPE:
2239 /* Handle conversions between pointers to different address spaces. */
2240 if (POINTER_TYPE_P (orig)
2241 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2242 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2243 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2244 /* fall through */
2246 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2247 case OFFSET_TYPE:
2248 if (TREE_CODE (arg) == INTEGER_CST)
2250 tem = fold_convert_const (NOP_EXPR, type, arg);
2251 if (tem != NULL_TREE)
2252 return tem;
2254 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2255 || TREE_CODE (orig) == OFFSET_TYPE)
2256 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2257 if (TREE_CODE (orig) == COMPLEX_TYPE)
2258 return fold_convert_loc (loc, type,
2259 fold_build1_loc (loc, REALPART_EXPR,
2260 TREE_TYPE (orig), arg));
2261 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2262 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2263 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2265 case REAL_TYPE:
2266 if (TREE_CODE (arg) == INTEGER_CST)
2268 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2272 else if (TREE_CODE (arg) == REAL_CST)
2274 tem = fold_convert_const (NOP_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2278 else if (TREE_CODE (arg) == FIXED_CST)
2280 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2281 if (tem != NULL_TREE)
2282 return tem;
2285 switch (TREE_CODE (orig))
2287 case INTEGER_TYPE:
2288 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2289 case POINTER_TYPE: case REFERENCE_TYPE:
2290 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2292 case REAL_TYPE:
2293 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2295 case FIXED_POINT_TYPE:
2296 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2298 case COMPLEX_TYPE:
2299 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2300 return fold_convert_loc (loc, type, tem);
2302 default:
2303 gcc_unreachable ();
2306 case FIXED_POINT_TYPE:
2307 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2308 || TREE_CODE (arg) == REAL_CST)
2310 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2311 if (tem != NULL_TREE)
2312 goto fold_convert_exit;
2315 switch (TREE_CODE (orig))
2317 case FIXED_POINT_TYPE:
2318 case INTEGER_TYPE:
2319 case ENUMERAL_TYPE:
2320 case BOOLEAN_TYPE:
2321 case REAL_TYPE:
2322 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2324 case COMPLEX_TYPE:
2325 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2326 return fold_convert_loc (loc, type, tem);
2328 default:
2329 gcc_unreachable ();
2332 case COMPLEX_TYPE:
2333 switch (TREE_CODE (orig))
2335 case INTEGER_TYPE:
2336 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2337 case POINTER_TYPE: case REFERENCE_TYPE:
2338 case REAL_TYPE:
2339 case FIXED_POINT_TYPE:
2340 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2341 fold_convert_loc (loc, TREE_TYPE (type), arg),
2342 fold_convert_loc (loc, TREE_TYPE (type),
2343 integer_zero_node));
2344 case COMPLEX_TYPE:
2346 tree rpart, ipart;
2348 if (TREE_CODE (arg) == COMPLEX_EXPR)
2350 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2351 TREE_OPERAND (arg, 0));
2352 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2353 TREE_OPERAND (arg, 1));
2354 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2357 arg = save_expr (arg);
2358 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2359 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2360 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2361 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2362 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2365 default:
2366 gcc_unreachable ();
2369 case VECTOR_TYPE:
2370 if (integer_zerop (arg))
2371 return build_zero_vector (type);
2372 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2373 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2374 || TREE_CODE (orig) == VECTOR_TYPE);
2375 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2377 case VOID_TYPE:
2378 tem = fold_ignored_result (arg);
2379 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2381 default:
2382 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2383 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2384 gcc_unreachable ();
2386 fold_convert_exit:
2387 protected_set_expr_location_unshare (tem, loc);
2388 return tem;
2391 /* Return false if expr can be assumed not to be an lvalue, true
2392 otherwise. */
2394 static bool
2395 maybe_lvalue_p (const_tree x)
2397 /* We only need to wrap lvalue tree codes. */
2398 switch (TREE_CODE (x))
2400 case VAR_DECL:
2401 case PARM_DECL:
2402 case RESULT_DECL:
2403 case LABEL_DECL:
2404 case FUNCTION_DECL:
2405 case SSA_NAME:
2407 case COMPONENT_REF:
2408 case MEM_REF:
2409 case INDIRECT_REF:
2410 case ARRAY_REF:
2411 case ARRAY_RANGE_REF:
2412 case BIT_FIELD_REF:
2413 case OBJ_TYPE_REF:
2415 case REALPART_EXPR:
2416 case IMAGPART_EXPR:
2417 case PREINCREMENT_EXPR:
2418 case PREDECREMENT_EXPR:
2419 case SAVE_EXPR:
2420 case TRY_CATCH_EXPR:
2421 case WITH_CLEANUP_EXPR:
2422 case COMPOUND_EXPR:
2423 case MODIFY_EXPR:
2424 case TARGET_EXPR:
2425 case COND_EXPR:
2426 case BIND_EXPR:
2427 break;
2429 default:
2430 /* Assume the worst for front-end tree codes. */
2431 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2432 break;
2433 return false;
2436 return true;
2439 /* Return an expr equal to X but certainly not valid as an lvalue. */
2441 tree
2442 non_lvalue_loc (location_t loc, tree x)
2444 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2445 us. */
2446 if (in_gimple_form)
2447 return x;
2449 if (! maybe_lvalue_p (x))
2450 return x;
2451 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2454 /* When pedantic, return an expr equal to X but certainly not valid as a
2455 pedantic lvalue. Otherwise, return X. */
2457 static tree
2458 pedantic_non_lvalue_loc (location_t loc, tree x)
2460 return protected_set_expr_location_unshare (x, loc);
2463 /* Given a tree comparison code, return the code that is the logical inverse.
2464 It is generally not safe to do this for floating-point comparisons, except
2465 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2466 ERROR_MARK in this case. */
2468 enum tree_code
2469 invert_tree_comparison (enum tree_code code, bool honor_nans)
2471 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2472 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2473 return ERROR_MARK;
2475 switch (code)
2477 case EQ_EXPR:
2478 return NE_EXPR;
2479 case NE_EXPR:
2480 return EQ_EXPR;
2481 case GT_EXPR:
2482 return honor_nans ? UNLE_EXPR : LE_EXPR;
2483 case GE_EXPR:
2484 return honor_nans ? UNLT_EXPR : LT_EXPR;
2485 case LT_EXPR:
2486 return honor_nans ? UNGE_EXPR : GE_EXPR;
2487 case LE_EXPR:
2488 return honor_nans ? UNGT_EXPR : GT_EXPR;
2489 case LTGT_EXPR:
2490 return UNEQ_EXPR;
2491 case UNEQ_EXPR:
2492 return LTGT_EXPR;
2493 case UNGT_EXPR:
2494 return LE_EXPR;
2495 case UNGE_EXPR:
2496 return LT_EXPR;
2497 case UNLT_EXPR:
2498 return GE_EXPR;
2499 case UNLE_EXPR:
2500 return GT_EXPR;
2501 case ORDERED_EXPR:
2502 return UNORDERED_EXPR;
2503 case UNORDERED_EXPR:
2504 return ORDERED_EXPR;
2505 default:
2506 gcc_unreachable ();
2510 /* Similar, but return the comparison that results if the operands are
2511 swapped. This is safe for floating-point. */
2513 enum tree_code
2514 swap_tree_comparison (enum tree_code code)
2516 switch (code)
2518 case EQ_EXPR:
2519 case NE_EXPR:
2520 case ORDERED_EXPR:
2521 case UNORDERED_EXPR:
2522 case LTGT_EXPR:
2523 case UNEQ_EXPR:
2524 return code;
2525 case GT_EXPR:
2526 return LT_EXPR;
2527 case GE_EXPR:
2528 return LE_EXPR;
2529 case LT_EXPR:
2530 return GT_EXPR;
2531 case LE_EXPR:
2532 return GE_EXPR;
2533 case UNGT_EXPR:
2534 return UNLT_EXPR;
2535 case UNGE_EXPR:
2536 return UNLE_EXPR;
2537 case UNLT_EXPR:
2538 return UNGT_EXPR;
2539 case UNLE_EXPR:
2540 return UNGE_EXPR;
2541 default:
2542 gcc_unreachable ();
2547 /* Convert a comparison tree code from an enum tree_code representation
2548 into a compcode bit-based encoding. This function is the inverse of
2549 compcode_to_comparison. */
2551 static enum comparison_code
2552 comparison_to_compcode (enum tree_code code)
2554 switch (code)
2556 case LT_EXPR:
2557 return COMPCODE_LT;
2558 case EQ_EXPR:
2559 return COMPCODE_EQ;
2560 case LE_EXPR:
2561 return COMPCODE_LE;
2562 case GT_EXPR:
2563 return COMPCODE_GT;
2564 case NE_EXPR:
2565 return COMPCODE_NE;
2566 case GE_EXPR:
2567 return COMPCODE_GE;
2568 case ORDERED_EXPR:
2569 return COMPCODE_ORD;
2570 case UNORDERED_EXPR:
2571 return COMPCODE_UNORD;
2572 case UNLT_EXPR:
2573 return COMPCODE_UNLT;
2574 case UNEQ_EXPR:
2575 return COMPCODE_UNEQ;
2576 case UNLE_EXPR:
2577 return COMPCODE_UNLE;
2578 case UNGT_EXPR:
2579 return COMPCODE_UNGT;
2580 case LTGT_EXPR:
2581 return COMPCODE_LTGT;
2582 case UNGE_EXPR:
2583 return COMPCODE_UNGE;
2584 default:
2585 gcc_unreachable ();
2589 /* Convert a compcode bit-based encoding of a comparison operator back
2590 to GCC's enum tree_code representation. This function is the
2591 inverse of comparison_to_compcode. */
2593 static enum tree_code
2594 compcode_to_comparison (enum comparison_code code)
2596 switch (code)
2598 case COMPCODE_LT:
2599 return LT_EXPR;
2600 case COMPCODE_EQ:
2601 return EQ_EXPR;
2602 case COMPCODE_LE:
2603 return LE_EXPR;
2604 case COMPCODE_GT:
2605 return GT_EXPR;
2606 case COMPCODE_NE:
2607 return NE_EXPR;
2608 case COMPCODE_GE:
2609 return GE_EXPR;
2610 case COMPCODE_ORD:
2611 return ORDERED_EXPR;
2612 case COMPCODE_UNORD:
2613 return UNORDERED_EXPR;
2614 case COMPCODE_UNLT:
2615 return UNLT_EXPR;
2616 case COMPCODE_UNEQ:
2617 return UNEQ_EXPR;
2618 case COMPCODE_UNLE:
2619 return UNLE_EXPR;
2620 case COMPCODE_UNGT:
2621 return UNGT_EXPR;
2622 case COMPCODE_LTGT:
2623 return LTGT_EXPR;
2624 case COMPCODE_UNGE:
2625 return UNGE_EXPR;
2626 default:
2627 gcc_unreachable ();
2631 /* Return a tree for the comparison which is the combination of
2632 doing the AND or OR (depending on CODE) of the two operations LCODE
2633 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2634 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2635 if this makes the transformation invalid. */
2637 tree
2638 combine_comparisons (location_t loc,
2639 enum tree_code code, enum tree_code lcode,
2640 enum tree_code rcode, tree truth_type,
2641 tree ll_arg, tree lr_arg)
2643 bool honor_nans = HONOR_NANS (ll_arg);
2644 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2645 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2646 int compcode;
2648 switch (code)
2650 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2651 compcode = lcompcode & rcompcode;
2652 break;
2654 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2655 compcode = lcompcode | rcompcode;
2656 break;
2658 default:
2659 return NULL_TREE;
2662 if (!honor_nans)
2664 /* Eliminate unordered comparisons, as well as LTGT and ORD
2665 which are not used unless the mode has NaNs. */
2666 compcode &= ~COMPCODE_UNORD;
2667 if (compcode == COMPCODE_LTGT)
2668 compcode = COMPCODE_NE;
2669 else if (compcode == COMPCODE_ORD)
2670 compcode = COMPCODE_TRUE;
2672 else if (flag_trapping_math)
2674 /* Check that the original operation and the optimized ones will trap
2675 under the same condition. */
2676 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2677 && (lcompcode != COMPCODE_EQ)
2678 && (lcompcode != COMPCODE_ORD);
2679 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2680 && (rcompcode != COMPCODE_EQ)
2681 && (rcompcode != COMPCODE_ORD);
2682 bool trap = (compcode & COMPCODE_UNORD) == 0
2683 && (compcode != COMPCODE_EQ)
2684 && (compcode != COMPCODE_ORD);
2686 /* In a short-circuited boolean expression the LHS might be
2687 such that the RHS, if evaluated, will never trap. For
2688 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2689 if neither x nor y is NaN. (This is a mixed blessing: for
2690 example, the expression above will never trap, hence
2691 optimizing it to x < y would be invalid). */
2692 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2693 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2694 rtrap = false;
2696 /* If the comparison was short-circuited, and only the RHS
2697 trapped, we may now generate a spurious trap. */
2698 if (rtrap && !ltrap
2699 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2700 return NULL_TREE;
2702 /* If we changed the conditions that cause a trap, we lose. */
2703 if ((ltrap || rtrap) != trap)
2704 return NULL_TREE;
2707 if (compcode == COMPCODE_TRUE)
2708 return constant_boolean_node (true, truth_type);
2709 else if (compcode == COMPCODE_FALSE)
2710 return constant_boolean_node (false, truth_type);
2711 else
2713 enum tree_code tcode;
2715 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2716 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2720 /* Return nonzero if two operands (typically of the same tree node)
2721 are necessarily equal. FLAGS modifies behavior as follows:
2723 If OEP_ONLY_CONST is set, only return nonzero for constants.
2724 This function tests whether the operands are indistinguishable;
2725 it does not test whether they are equal using C's == operation.
2726 The distinction is important for IEEE floating point, because
2727 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2728 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2730 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2731 even though it may hold multiple values during a function.
2732 This is because a GCC tree node guarantees that nothing else is
2733 executed between the evaluation of its "operands" (which may often
2734 be evaluated in arbitrary order). Hence if the operands themselves
2735 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2736 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2737 unset means assuming isochronic (or instantaneous) tree equivalence.
2738 Unless comparing arbitrary expression trees, such as from different
2739 statements, this flag can usually be left unset.
2741 If OEP_PURE_SAME is set, then pure functions with identical arguments
2742 are considered the same. It is used when the caller has other ways
2743 to ensure that global memory is unchanged in between.
2745 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2746 not values of expressions.
2748 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2749 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2751 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2752 any operand with side effect. This is unnecesarily conservative in the
2753 case we know that arg0 and arg1 are in disjoint code paths (such as in
2754 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2755 addresses with TREE_CONSTANT flag set so we know that &var == &var
2756 even if var is volatile. */
2759 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2761 /* When checking, verify at the outermost operand_equal_p call that
2762 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2763 hash value. */
2764 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2766 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2768 if (arg0 != arg1)
2770 inchash::hash hstate0 (0), hstate1 (0);
2771 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2772 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2773 hashval_t h0 = hstate0.end ();
2774 hashval_t h1 = hstate1.end ();
2775 gcc_assert (h0 == h1);
2777 return 1;
2779 else
2780 return 0;
2783 /* If either is ERROR_MARK, they aren't equal. */
2784 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2785 || TREE_TYPE (arg0) == error_mark_node
2786 || TREE_TYPE (arg1) == error_mark_node)
2787 return 0;
2789 /* Similar, if either does not have a type (like a released SSA name),
2790 they aren't equal. */
2791 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2792 return 0;
2794 /* We cannot consider pointers to different address space equal. */
2795 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2796 && POINTER_TYPE_P (TREE_TYPE (arg1))
2797 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2798 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2799 return 0;
2801 /* Check equality of integer constants before bailing out due to
2802 precision differences. */
2803 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2805 /* Address of INTEGER_CST is not defined; check that we did not forget
2806 to drop the OEP_ADDRESS_OF flags. */
2807 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2808 return tree_int_cst_equal (arg0, arg1);
2811 if (!(flags & OEP_ADDRESS_OF))
2813 /* If both types don't have the same signedness, then we can't consider
2814 them equal. We must check this before the STRIP_NOPS calls
2815 because they may change the signedness of the arguments. As pointers
2816 strictly don't have a signedness, require either two pointers or
2817 two non-pointers as well. */
2818 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2819 || POINTER_TYPE_P (TREE_TYPE (arg0))
2820 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2821 return 0;
2823 /* If both types don't have the same precision, then it is not safe
2824 to strip NOPs. */
2825 if (element_precision (TREE_TYPE (arg0))
2826 != element_precision (TREE_TYPE (arg1)))
2827 return 0;
2829 STRIP_NOPS (arg0);
2830 STRIP_NOPS (arg1);
2832 #if 0
2833 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2834 sanity check once the issue is solved. */
2835 else
2836 /* Addresses of conversions and SSA_NAMEs (and many other things)
2837 are not defined. Check that we did not forget to drop the
2838 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2839 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2840 && TREE_CODE (arg0) != SSA_NAME);
2841 #endif
2843 /* In case both args are comparisons but with different comparison
2844 code, try to swap the comparison operands of one arg to produce
2845 a match and compare that variant. */
2846 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2847 && COMPARISON_CLASS_P (arg0)
2848 && COMPARISON_CLASS_P (arg1))
2850 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2852 if (TREE_CODE (arg0) == swap_code)
2853 return operand_equal_p (TREE_OPERAND (arg0, 0),
2854 TREE_OPERAND (arg1, 1), flags)
2855 && operand_equal_p (TREE_OPERAND (arg0, 1),
2856 TREE_OPERAND (arg1, 0), flags);
2859 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2861 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2862 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2864 else if (flags & OEP_ADDRESS_OF)
2866 /* If we are interested in comparing addresses ignore
2867 MEM_REF wrappings of the base that can appear just for
2868 TBAA reasons. */
2869 if (TREE_CODE (arg0) == MEM_REF
2870 && DECL_P (arg1)
2871 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2872 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2873 && integer_zerop (TREE_OPERAND (arg0, 1)))
2874 return 1;
2875 else if (TREE_CODE (arg1) == MEM_REF
2876 && DECL_P (arg0)
2877 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2878 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2879 && integer_zerop (TREE_OPERAND (arg1, 1)))
2880 return 1;
2881 return 0;
2883 else
2884 return 0;
2887 /* When not checking adddresses, this is needed for conversions and for
2888 COMPONENT_REF. Might as well play it safe and always test this. */
2889 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2890 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2891 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2892 && !(flags & OEP_ADDRESS_OF)))
2893 return 0;
2895 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2896 We don't care about side effects in that case because the SAVE_EXPR
2897 takes care of that for us. In all other cases, two expressions are
2898 equal if they have no side effects. If we have two identical
2899 expressions with side effects that should be treated the same due
2900 to the only side effects being identical SAVE_EXPR's, that will
2901 be detected in the recursive calls below.
2902 If we are taking an invariant address of two identical objects
2903 they are necessarily equal as well. */
2904 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2905 && (TREE_CODE (arg0) == SAVE_EXPR
2906 || (flags & OEP_MATCH_SIDE_EFFECTS)
2907 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2908 return 1;
2910 /* Next handle constant cases, those for which we can return 1 even
2911 if ONLY_CONST is set. */
2912 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2913 switch (TREE_CODE (arg0))
2915 case INTEGER_CST:
2916 return tree_int_cst_equal (arg0, arg1);
2918 case FIXED_CST:
2919 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2920 TREE_FIXED_CST (arg1));
2922 case REAL_CST:
2923 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2924 return 1;
2927 if (!HONOR_SIGNED_ZEROS (arg0))
2929 /* If we do not distinguish between signed and unsigned zero,
2930 consider them equal. */
2931 if (real_zerop (arg0) && real_zerop (arg1))
2932 return 1;
2934 return 0;
2936 case VECTOR_CST:
2938 unsigned i;
2940 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2941 return 0;
2943 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2945 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2946 VECTOR_CST_ELT (arg1, i), flags))
2947 return 0;
2949 return 1;
2952 case COMPLEX_CST:
2953 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2954 flags)
2955 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2956 flags));
2958 case STRING_CST:
2959 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2960 && ! memcmp (TREE_STRING_POINTER (arg0),
2961 TREE_STRING_POINTER (arg1),
2962 TREE_STRING_LENGTH (arg0)));
2964 case ADDR_EXPR:
2965 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2966 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2967 flags | OEP_ADDRESS_OF
2968 | OEP_MATCH_SIDE_EFFECTS);
2969 case CONSTRUCTOR:
2970 /* In GIMPLE empty constructors are allowed in initializers of
2971 aggregates. */
2972 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2973 default:
2974 break;
2977 if (flags & OEP_ONLY_CONST)
2978 return 0;
2980 /* Define macros to test an operand from arg0 and arg1 for equality and a
2981 variant that allows null and views null as being different from any
2982 non-null value. In the latter case, if either is null, the both
2983 must be; otherwise, do the normal comparison. */
2984 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2985 TREE_OPERAND (arg1, N), flags)
2987 #define OP_SAME_WITH_NULL(N) \
2988 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2989 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2991 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2993 case tcc_unary:
2994 /* Two conversions are equal only if signedness and modes match. */
2995 switch (TREE_CODE (arg0))
2997 CASE_CONVERT:
2998 case FIX_TRUNC_EXPR:
2999 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3000 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3001 return 0;
3002 break;
3003 default:
3004 break;
3007 return OP_SAME (0);
3010 case tcc_comparison:
3011 case tcc_binary:
3012 if (OP_SAME (0) && OP_SAME (1))
3013 return 1;
3015 /* For commutative ops, allow the other order. */
3016 return (commutative_tree_code (TREE_CODE (arg0))
3017 && operand_equal_p (TREE_OPERAND (arg0, 0),
3018 TREE_OPERAND (arg1, 1), flags)
3019 && operand_equal_p (TREE_OPERAND (arg0, 1),
3020 TREE_OPERAND (arg1, 0), flags));
3022 case tcc_reference:
3023 /* If either of the pointer (or reference) expressions we are
3024 dereferencing contain a side effect, these cannot be equal,
3025 but their addresses can be. */
3026 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3027 && (TREE_SIDE_EFFECTS (arg0)
3028 || TREE_SIDE_EFFECTS (arg1)))
3029 return 0;
3031 switch (TREE_CODE (arg0))
3033 case INDIRECT_REF:
3034 if (!(flags & OEP_ADDRESS_OF)
3035 && (TYPE_ALIGN (TREE_TYPE (arg0))
3036 != TYPE_ALIGN (TREE_TYPE (arg1))))
3037 return 0;
3038 flags &= ~OEP_ADDRESS_OF;
3039 return OP_SAME (0);
3041 case IMAGPART_EXPR:
3042 /* Require the same offset. */
3043 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3044 TYPE_SIZE (TREE_TYPE (arg1)),
3045 flags & ~OEP_ADDRESS_OF))
3046 return 0;
3048 /* Fallthru. */
3049 case REALPART_EXPR:
3050 case VIEW_CONVERT_EXPR:
3051 return OP_SAME (0);
3053 case TARGET_MEM_REF:
3054 case MEM_REF:
3055 if (!(flags & OEP_ADDRESS_OF))
3057 /* Require equal access sizes */
3058 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3059 && (!TYPE_SIZE (TREE_TYPE (arg0))
3060 || !TYPE_SIZE (TREE_TYPE (arg1))
3061 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3062 TYPE_SIZE (TREE_TYPE (arg1)),
3063 flags)))
3064 return 0;
3065 /* Verify that access happens in similar types. */
3066 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3067 return 0;
3068 /* Verify that accesses are TBAA compatible. */
3069 if (!alias_ptr_types_compatible_p
3070 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3071 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3072 || (MR_DEPENDENCE_CLIQUE (arg0)
3073 != MR_DEPENDENCE_CLIQUE (arg1))
3074 || (MR_DEPENDENCE_BASE (arg0)
3075 != MR_DEPENDENCE_BASE (arg1)))
3076 return 0;
3077 /* Verify that alignment is compatible. */
3078 if (TYPE_ALIGN (TREE_TYPE (arg0))
3079 != TYPE_ALIGN (TREE_TYPE (arg1)))
3080 return 0;
3082 flags &= ~OEP_ADDRESS_OF;
3083 return (OP_SAME (0) && OP_SAME (1)
3084 /* TARGET_MEM_REF require equal extra operands. */
3085 && (TREE_CODE (arg0) != TARGET_MEM_REF
3086 || (OP_SAME_WITH_NULL (2)
3087 && OP_SAME_WITH_NULL (3)
3088 && OP_SAME_WITH_NULL (4))));
3090 case ARRAY_REF:
3091 case ARRAY_RANGE_REF:
3092 if (!OP_SAME (0))
3093 return 0;
3094 flags &= ~OEP_ADDRESS_OF;
3095 /* Compare the array index by value if it is constant first as we
3096 may have different types but same value here. */
3097 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3098 TREE_OPERAND (arg1, 1))
3099 || OP_SAME (1))
3100 && OP_SAME_WITH_NULL (2)
3101 && OP_SAME_WITH_NULL (3)
3102 /* Compare low bound and element size as with OEP_ADDRESS_OF
3103 we have to account for the offset of the ref. */
3104 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3105 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3106 || (operand_equal_p (array_ref_low_bound
3107 (CONST_CAST_TREE (arg0)),
3108 array_ref_low_bound
3109 (CONST_CAST_TREE (arg1)), flags)
3110 && operand_equal_p (array_ref_element_size
3111 (CONST_CAST_TREE (arg0)),
3112 array_ref_element_size
3113 (CONST_CAST_TREE (arg1)),
3114 flags))));
3116 case COMPONENT_REF:
3117 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3118 may be NULL when we're called to compare MEM_EXPRs. */
3119 if (!OP_SAME_WITH_NULL (0)
3120 || !OP_SAME (1))
3121 return 0;
3122 flags &= ~OEP_ADDRESS_OF;
3123 return OP_SAME_WITH_NULL (2);
3125 case BIT_FIELD_REF:
3126 if (!OP_SAME (0))
3127 return 0;
3128 flags &= ~OEP_ADDRESS_OF;
3129 return OP_SAME (1) && OP_SAME (2);
3131 default:
3132 return 0;
3135 case tcc_expression:
3136 switch (TREE_CODE (arg0))
3138 case ADDR_EXPR:
3139 /* Be sure we pass right ADDRESS_OF flag. */
3140 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3141 return operand_equal_p (TREE_OPERAND (arg0, 0),
3142 TREE_OPERAND (arg1, 0),
3143 flags | OEP_ADDRESS_OF);
3145 case TRUTH_NOT_EXPR:
3146 return OP_SAME (0);
3148 case TRUTH_ANDIF_EXPR:
3149 case TRUTH_ORIF_EXPR:
3150 return OP_SAME (0) && OP_SAME (1);
3152 case FMA_EXPR:
3153 case WIDEN_MULT_PLUS_EXPR:
3154 case WIDEN_MULT_MINUS_EXPR:
3155 if (!OP_SAME (2))
3156 return 0;
3157 /* The multiplcation operands are commutative. */
3158 /* FALLTHRU */
3160 case TRUTH_AND_EXPR:
3161 case TRUTH_OR_EXPR:
3162 case TRUTH_XOR_EXPR:
3163 if (OP_SAME (0) && OP_SAME (1))
3164 return 1;
3166 /* Otherwise take into account this is a commutative operation. */
3167 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3168 TREE_OPERAND (arg1, 1), flags)
3169 && operand_equal_p (TREE_OPERAND (arg0, 1),
3170 TREE_OPERAND (arg1, 0), flags));
3172 case COND_EXPR:
3173 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3174 return 0;
3175 flags &= ~OEP_ADDRESS_OF;
3176 return OP_SAME (0);
3178 case VEC_COND_EXPR:
3179 case DOT_PROD_EXPR:
3180 case BIT_INSERT_EXPR:
3181 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3183 case MODIFY_EXPR:
3184 case INIT_EXPR:
3185 case COMPOUND_EXPR:
3186 case PREDECREMENT_EXPR:
3187 case PREINCREMENT_EXPR:
3188 case POSTDECREMENT_EXPR:
3189 case POSTINCREMENT_EXPR:
3190 if (flags & OEP_LEXICOGRAPHIC)
3191 return OP_SAME (0) && OP_SAME (1);
3192 return 0;
3194 case CLEANUP_POINT_EXPR:
3195 case EXPR_STMT:
3196 if (flags & OEP_LEXICOGRAPHIC)
3197 return OP_SAME (0);
3198 return 0;
3200 default:
3201 return 0;
3204 case tcc_vl_exp:
3205 switch (TREE_CODE (arg0))
3207 case CALL_EXPR:
3208 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3209 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3210 /* If not both CALL_EXPRs are either internal or normal function
3211 functions, then they are not equal. */
3212 return 0;
3213 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3215 /* If the CALL_EXPRs call different internal functions, then they
3216 are not equal. */
3217 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3218 return 0;
3220 else
3222 /* If the CALL_EXPRs call different functions, then they are not
3223 equal. */
3224 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3225 flags))
3226 return 0;
3229 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3231 unsigned int cef = call_expr_flags (arg0);
3232 if (flags & OEP_PURE_SAME)
3233 cef &= ECF_CONST | ECF_PURE;
3234 else
3235 cef &= ECF_CONST;
3236 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3237 return 0;
3240 /* Now see if all the arguments are the same. */
3242 const_call_expr_arg_iterator iter0, iter1;
3243 const_tree a0, a1;
3244 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3245 a1 = first_const_call_expr_arg (arg1, &iter1);
3246 a0 && a1;
3247 a0 = next_const_call_expr_arg (&iter0),
3248 a1 = next_const_call_expr_arg (&iter1))
3249 if (! operand_equal_p (a0, a1, flags))
3250 return 0;
3252 /* If we get here and both argument lists are exhausted
3253 then the CALL_EXPRs are equal. */
3254 return ! (a0 || a1);
3256 default:
3257 return 0;
3260 case tcc_declaration:
3261 /* Consider __builtin_sqrt equal to sqrt. */
3262 return (TREE_CODE (arg0) == FUNCTION_DECL
3263 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3264 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3265 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3267 case tcc_exceptional:
3268 if (TREE_CODE (arg0) == CONSTRUCTOR)
3270 /* In GIMPLE constructors are used only to build vectors from
3271 elements. Individual elements in the constructor must be
3272 indexed in increasing order and form an initial sequence.
3274 We make no effort to compare constructors in generic.
3275 (see sem_variable::equals in ipa-icf which can do so for
3276 constants). */
3277 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3278 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3279 return 0;
3281 /* Be sure that vectors constructed have the same representation.
3282 We only tested element precision and modes to match.
3283 Vectors may be BLKmode and thus also check that the number of
3284 parts match. */
3285 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3286 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3287 return 0;
3289 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3290 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3291 unsigned int len = vec_safe_length (v0);
3293 if (len != vec_safe_length (v1))
3294 return 0;
3296 for (unsigned int i = 0; i < len; i++)
3298 constructor_elt *c0 = &(*v0)[i];
3299 constructor_elt *c1 = &(*v1)[i];
3301 if (!operand_equal_p (c0->value, c1->value, flags)
3302 /* In GIMPLE the indexes can be either NULL or matching i.
3303 Double check this so we won't get false
3304 positives for GENERIC. */
3305 || (c0->index
3306 && (TREE_CODE (c0->index) != INTEGER_CST
3307 || !compare_tree_int (c0->index, i)))
3308 || (c1->index
3309 && (TREE_CODE (c1->index) != INTEGER_CST
3310 || !compare_tree_int (c1->index, i))))
3311 return 0;
3313 return 1;
3315 else if (TREE_CODE (arg0) == STATEMENT_LIST
3316 && (flags & OEP_LEXICOGRAPHIC))
3318 /* Compare the STATEMENT_LISTs. */
3319 tree_stmt_iterator tsi1, tsi2;
3320 tree body1 = CONST_CAST_TREE (arg0);
3321 tree body2 = CONST_CAST_TREE (arg1);
3322 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3323 tsi_next (&tsi1), tsi_next (&tsi2))
3325 /* The lists don't have the same number of statements. */
3326 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3327 return 0;
3328 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3329 return 1;
3330 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3331 OEP_LEXICOGRAPHIC))
3332 return 0;
3335 return 0;
3337 case tcc_statement:
3338 switch (TREE_CODE (arg0))
3340 case RETURN_EXPR:
3341 if (flags & OEP_LEXICOGRAPHIC)
3342 return OP_SAME_WITH_NULL (0);
3343 return 0;
3344 default:
3345 return 0;
3348 default:
3349 return 0;
3352 #undef OP_SAME
3353 #undef OP_SAME_WITH_NULL
3356 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3357 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3359 When in doubt, return 0. */
3361 static int
3362 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3364 int unsignedp1, unsignedpo;
3365 tree primarg0, primarg1, primother;
3366 unsigned int correct_width;
3368 if (operand_equal_p (arg0, arg1, 0))
3369 return 1;
3371 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3372 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3373 return 0;
3375 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3376 and see if the inner values are the same. This removes any
3377 signedness comparison, which doesn't matter here. */
3378 primarg0 = arg0, primarg1 = arg1;
3379 STRIP_NOPS (primarg0);
3380 STRIP_NOPS (primarg1);
3381 if (operand_equal_p (primarg0, primarg1, 0))
3382 return 1;
3384 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3385 actual comparison operand, ARG0.
3387 First throw away any conversions to wider types
3388 already present in the operands. */
3390 primarg1 = get_narrower (arg1, &unsignedp1);
3391 primother = get_narrower (other, &unsignedpo);
3393 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3394 if (unsignedp1 == unsignedpo
3395 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3396 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3398 tree type = TREE_TYPE (arg0);
3400 /* Make sure shorter operand is extended the right way
3401 to match the longer operand. */
3402 primarg1 = fold_convert (signed_or_unsigned_type_for
3403 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3405 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3406 return 1;
3409 return 0;
3412 /* See if ARG is an expression that is either a comparison or is performing
3413 arithmetic on comparisons. The comparisons must only be comparing
3414 two different values, which will be stored in *CVAL1 and *CVAL2; if
3415 they are nonzero it means that some operands have already been found.
3416 No variables may be used anywhere else in the expression except in the
3417 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3418 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3420 If this is true, return 1. Otherwise, return zero. */
3422 static int
3423 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3425 enum tree_code code = TREE_CODE (arg);
3426 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3428 /* We can handle some of the tcc_expression cases here. */
3429 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3430 tclass = tcc_unary;
3431 else if (tclass == tcc_expression
3432 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3433 || code == COMPOUND_EXPR))
3434 tclass = tcc_binary;
3436 else if (tclass == tcc_expression && code == SAVE_EXPR
3437 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3439 /* If we've already found a CVAL1 or CVAL2, this expression is
3440 two complex to handle. */
3441 if (*cval1 || *cval2)
3442 return 0;
3444 tclass = tcc_unary;
3445 *save_p = 1;
3448 switch (tclass)
3450 case tcc_unary:
3451 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3453 case tcc_binary:
3454 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3455 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3456 cval1, cval2, save_p));
3458 case tcc_constant:
3459 return 1;
3461 case tcc_expression:
3462 if (code == COND_EXPR)
3463 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3464 cval1, cval2, save_p)
3465 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3466 cval1, cval2, save_p)
3467 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3468 cval1, cval2, save_p));
3469 return 0;
3471 case tcc_comparison:
3472 /* First see if we can handle the first operand, then the second. For
3473 the second operand, we know *CVAL1 can't be zero. It must be that
3474 one side of the comparison is each of the values; test for the
3475 case where this isn't true by failing if the two operands
3476 are the same. */
3478 if (operand_equal_p (TREE_OPERAND (arg, 0),
3479 TREE_OPERAND (arg, 1), 0))
3480 return 0;
3482 if (*cval1 == 0)
3483 *cval1 = TREE_OPERAND (arg, 0);
3484 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3486 else if (*cval2 == 0)
3487 *cval2 = TREE_OPERAND (arg, 0);
3488 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3490 else
3491 return 0;
3493 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3495 else if (*cval2 == 0)
3496 *cval2 = TREE_OPERAND (arg, 1);
3497 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3499 else
3500 return 0;
3502 return 1;
3504 default:
3505 return 0;
3509 /* ARG is a tree that is known to contain just arithmetic operations and
3510 comparisons. Evaluate the operations in the tree substituting NEW0 for
3511 any occurrence of OLD0 as an operand of a comparison and likewise for
3512 NEW1 and OLD1. */
3514 static tree
3515 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3516 tree old1, tree new1)
3518 tree type = TREE_TYPE (arg);
3519 enum tree_code code = TREE_CODE (arg);
3520 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3522 /* We can handle some of the tcc_expression cases here. */
3523 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3524 tclass = tcc_unary;
3525 else if (tclass == tcc_expression
3526 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3527 tclass = tcc_binary;
3529 switch (tclass)
3531 case tcc_unary:
3532 return fold_build1_loc (loc, code, type,
3533 eval_subst (loc, TREE_OPERAND (arg, 0),
3534 old0, new0, old1, new1));
3536 case tcc_binary:
3537 return fold_build2_loc (loc, code, type,
3538 eval_subst (loc, TREE_OPERAND (arg, 0),
3539 old0, new0, old1, new1),
3540 eval_subst (loc, TREE_OPERAND (arg, 1),
3541 old0, new0, old1, new1));
3543 case tcc_expression:
3544 switch (code)
3546 case SAVE_EXPR:
3547 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3548 old1, new1);
3550 case COMPOUND_EXPR:
3551 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3552 old1, new1);
3554 case COND_EXPR:
3555 return fold_build3_loc (loc, code, type,
3556 eval_subst (loc, TREE_OPERAND (arg, 0),
3557 old0, new0, old1, new1),
3558 eval_subst (loc, TREE_OPERAND (arg, 1),
3559 old0, new0, old1, new1),
3560 eval_subst (loc, TREE_OPERAND (arg, 2),
3561 old0, new0, old1, new1));
3562 default:
3563 break;
3565 /* Fall through - ??? */
3567 case tcc_comparison:
3569 tree arg0 = TREE_OPERAND (arg, 0);
3570 tree arg1 = TREE_OPERAND (arg, 1);
3572 /* We need to check both for exact equality and tree equality. The
3573 former will be true if the operand has a side-effect. In that
3574 case, we know the operand occurred exactly once. */
3576 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3577 arg0 = new0;
3578 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3579 arg0 = new1;
3581 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3582 arg1 = new0;
3583 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3584 arg1 = new1;
3586 return fold_build2_loc (loc, code, type, arg0, arg1);
3589 default:
3590 return arg;
3594 /* Return a tree for the case when the result of an expression is RESULT
3595 converted to TYPE and OMITTED was previously an operand of the expression
3596 but is now not needed (e.g., we folded OMITTED * 0).
3598 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3599 the conversion of RESULT to TYPE. */
3601 tree
3602 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3604 tree t = fold_convert_loc (loc, type, result);
3606 /* If the resulting operand is an empty statement, just return the omitted
3607 statement casted to void. */
3608 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3609 return build1_loc (loc, NOP_EXPR, void_type_node,
3610 fold_ignored_result (omitted));
3612 if (TREE_SIDE_EFFECTS (omitted))
3613 return build2_loc (loc, COMPOUND_EXPR, type,
3614 fold_ignored_result (omitted), t);
3616 return non_lvalue_loc (loc, t);
3619 /* Return a tree for the case when the result of an expression is RESULT
3620 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3621 of the expression but are now not needed.
3623 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3624 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3625 evaluated before OMITTED2. Otherwise, if neither has side effects,
3626 just do the conversion of RESULT to TYPE. */
3628 tree
3629 omit_two_operands_loc (location_t loc, tree type, tree result,
3630 tree omitted1, tree omitted2)
3632 tree t = fold_convert_loc (loc, type, result);
3634 if (TREE_SIDE_EFFECTS (omitted2))
3635 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3636 if (TREE_SIDE_EFFECTS (omitted1))
3637 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3639 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3643 /* Return a simplified tree node for the truth-negation of ARG. This
3644 never alters ARG itself. We assume that ARG is an operation that
3645 returns a truth value (0 or 1).
3647 FIXME: one would think we would fold the result, but it causes
3648 problems with the dominator optimizer. */
3650 static tree
3651 fold_truth_not_expr (location_t loc, tree arg)
3653 tree type = TREE_TYPE (arg);
3654 enum tree_code code = TREE_CODE (arg);
3655 location_t loc1, loc2;
3657 /* If this is a comparison, we can simply invert it, except for
3658 floating-point non-equality comparisons, in which case we just
3659 enclose a TRUTH_NOT_EXPR around what we have. */
3661 if (TREE_CODE_CLASS (code) == tcc_comparison)
3663 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3664 if (FLOAT_TYPE_P (op_type)
3665 && flag_trapping_math
3666 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3667 && code != NE_EXPR && code != EQ_EXPR)
3668 return NULL_TREE;
3670 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3671 if (code == ERROR_MARK)
3672 return NULL_TREE;
3674 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3675 TREE_OPERAND (arg, 1));
3676 if (TREE_NO_WARNING (arg))
3677 TREE_NO_WARNING (ret) = 1;
3678 return ret;
3681 switch (code)
3683 case INTEGER_CST:
3684 return constant_boolean_node (integer_zerop (arg), type);
3686 case TRUTH_AND_EXPR:
3687 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3688 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3689 return build2_loc (loc, TRUTH_OR_EXPR, type,
3690 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3691 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3693 case TRUTH_OR_EXPR:
3694 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3695 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3696 return build2_loc (loc, TRUTH_AND_EXPR, type,
3697 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3698 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3700 case TRUTH_XOR_EXPR:
3701 /* Here we can invert either operand. We invert the first operand
3702 unless the second operand is a TRUTH_NOT_EXPR in which case our
3703 result is the XOR of the first operand with the inside of the
3704 negation of the second operand. */
3706 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3707 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3708 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3709 else
3710 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3711 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3712 TREE_OPERAND (arg, 1));
3714 case TRUTH_ANDIF_EXPR:
3715 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3716 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3717 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3718 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3719 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3721 case TRUTH_ORIF_EXPR:
3722 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3723 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3724 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3725 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3726 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3728 case TRUTH_NOT_EXPR:
3729 return TREE_OPERAND (arg, 0);
3731 case COND_EXPR:
3733 tree arg1 = TREE_OPERAND (arg, 1);
3734 tree arg2 = TREE_OPERAND (arg, 2);
3736 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3737 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3739 /* A COND_EXPR may have a throw as one operand, which
3740 then has void type. Just leave void operands
3741 as they are. */
3742 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3743 VOID_TYPE_P (TREE_TYPE (arg1))
3744 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3745 VOID_TYPE_P (TREE_TYPE (arg2))
3746 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3749 case COMPOUND_EXPR:
3750 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3751 return build2_loc (loc, COMPOUND_EXPR, type,
3752 TREE_OPERAND (arg, 0),
3753 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3755 case NON_LVALUE_EXPR:
3756 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3757 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3759 CASE_CONVERT:
3760 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3761 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3763 /* fall through */
3765 case FLOAT_EXPR:
3766 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3767 return build1_loc (loc, TREE_CODE (arg), type,
3768 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3770 case BIT_AND_EXPR:
3771 if (!integer_onep (TREE_OPERAND (arg, 1)))
3772 return NULL_TREE;
3773 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3775 case SAVE_EXPR:
3776 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3778 case CLEANUP_POINT_EXPR:
3779 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3780 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3781 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3783 default:
3784 return NULL_TREE;
3788 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3789 assume that ARG is an operation that returns a truth value (0 or 1
3790 for scalars, 0 or -1 for vectors). Return the folded expression if
3791 folding is successful. Otherwise, return NULL_TREE. */
3793 static tree
3794 fold_invert_truthvalue (location_t loc, tree arg)
3796 tree type = TREE_TYPE (arg);
3797 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3798 ? BIT_NOT_EXPR
3799 : TRUTH_NOT_EXPR,
3800 type, arg);
3803 /* Return a simplified tree node for the truth-negation of ARG. This
3804 never alters ARG itself. We assume that ARG is an operation that
3805 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3807 tree
3808 invert_truthvalue_loc (location_t loc, tree arg)
3810 if (TREE_CODE (arg) == ERROR_MARK)
3811 return arg;
3813 tree type = TREE_TYPE (arg);
3814 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3815 ? BIT_NOT_EXPR
3816 : TRUTH_NOT_EXPR,
3817 type, arg);
3820 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3821 with code CODE. This optimization is unsafe. */
3822 static tree
3823 distribute_real_division (location_t loc, enum tree_code code, tree type,
3824 tree arg0, tree arg1)
3826 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3827 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3829 /* (A / C) +- (B / C) -> (A +- B) / C. */
3830 if (mul0 == mul1
3831 && operand_equal_p (TREE_OPERAND (arg0, 1),
3832 TREE_OPERAND (arg1, 1), 0))
3833 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3834 fold_build2_loc (loc, code, type,
3835 TREE_OPERAND (arg0, 0),
3836 TREE_OPERAND (arg1, 0)),
3837 TREE_OPERAND (arg0, 1));
3839 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3840 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3841 TREE_OPERAND (arg1, 0), 0)
3842 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3843 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3845 REAL_VALUE_TYPE r0, r1;
3846 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3847 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3848 if (!mul0)
3849 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3850 if (!mul1)
3851 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3852 real_arithmetic (&r0, code, &r0, &r1);
3853 return fold_build2_loc (loc, MULT_EXPR, type,
3854 TREE_OPERAND (arg0, 0),
3855 build_real (type, r0));
3858 return NULL_TREE;
3861 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3862 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3863 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3864 is the original memory reference used to preserve the alias set of
3865 the access. */
3867 static tree
3868 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3869 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3870 int unsignedp, int reversep)
3872 tree result, bftype;
3874 /* Attempt not to lose the access path if possible. */
3875 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3877 tree ninner = TREE_OPERAND (orig_inner, 0);
3878 machine_mode nmode;
3879 HOST_WIDE_INT nbitsize, nbitpos;
3880 tree noffset;
3881 int nunsignedp, nreversep, nvolatilep = 0;
3882 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3883 &noffset, &nmode, &nunsignedp,
3884 &nreversep, &nvolatilep);
3885 if (base == inner
3886 && noffset == NULL_TREE
3887 && nbitsize >= bitsize
3888 && nbitpos <= bitpos
3889 && bitpos + bitsize <= nbitpos + nbitsize
3890 && !reversep
3891 && !nreversep
3892 && !nvolatilep)
3894 inner = ninner;
3895 bitpos -= nbitpos;
3899 alias_set_type iset = get_alias_set (orig_inner);
3900 if (iset == 0 && get_alias_set (inner) != iset)
3901 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3902 build_fold_addr_expr (inner),
3903 build_int_cst (ptr_type_node, 0));
3905 if (bitpos == 0 && !reversep)
3907 tree size = TYPE_SIZE (TREE_TYPE (inner));
3908 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3909 || POINTER_TYPE_P (TREE_TYPE (inner)))
3910 && tree_fits_shwi_p (size)
3911 && tree_to_shwi (size) == bitsize)
3912 return fold_convert_loc (loc, type, inner);
3915 bftype = type;
3916 if (TYPE_PRECISION (bftype) != bitsize
3917 || TYPE_UNSIGNED (bftype) == !unsignedp)
3918 bftype = build_nonstandard_integer_type (bitsize, 0);
3920 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3921 size_int (bitsize), bitsize_int (bitpos));
3922 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3924 if (bftype != type)
3925 result = fold_convert_loc (loc, type, result);
3927 return result;
3930 /* Optimize a bit-field compare.
3932 There are two cases: First is a compare against a constant and the
3933 second is a comparison of two items where the fields are at the same
3934 bit position relative to the start of a chunk (byte, halfword, word)
3935 large enough to contain it. In these cases we can avoid the shift
3936 implicit in bitfield extractions.
3938 For constants, we emit a compare of the shifted constant with the
3939 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3940 compared. For two fields at the same position, we do the ANDs with the
3941 similar mask and compare the result of the ANDs.
3943 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3944 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3945 are the left and right operands of the comparison, respectively.
3947 If the optimization described above can be done, we return the resulting
3948 tree. Otherwise we return zero. */
3950 static tree
3951 optimize_bit_field_compare (location_t loc, enum tree_code code,
3952 tree compare_type, tree lhs, tree rhs)
3954 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3955 tree type = TREE_TYPE (lhs);
3956 tree unsigned_type;
3957 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3958 machine_mode lmode, rmode, nmode;
3959 int lunsignedp, runsignedp;
3960 int lreversep, rreversep;
3961 int lvolatilep = 0, rvolatilep = 0;
3962 tree linner, rinner = NULL_TREE;
3963 tree mask;
3964 tree offset;
3966 /* Get all the information about the extractions being done. If the bit size
3967 if the same as the size of the underlying object, we aren't doing an
3968 extraction at all and so can do nothing. We also don't want to
3969 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3970 then will no longer be able to replace it. */
3971 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3972 &lunsignedp, &lreversep, &lvolatilep);
3973 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3974 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3975 return 0;
3977 if (const_p)
3978 rreversep = lreversep;
3979 else
3981 /* If this is not a constant, we can only do something if bit positions,
3982 sizes, signedness and storage order are the same. */
3983 rinner
3984 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3985 &runsignedp, &rreversep, &rvolatilep);
3987 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3988 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3989 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3990 return 0;
3993 /* Honor the C++ memory model and mimic what RTL expansion does. */
3994 unsigned HOST_WIDE_INT bitstart = 0;
3995 unsigned HOST_WIDE_INT bitend = 0;
3996 if (TREE_CODE (lhs) == COMPONENT_REF)
3998 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3999 if (offset != NULL_TREE)
4000 return 0;
4003 /* See if we can find a mode to refer to this field. We should be able to,
4004 but fail if we can't. */
4005 nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4006 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4007 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4008 TYPE_ALIGN (TREE_TYPE (rinner))),
4009 word_mode, false);
4010 if (nmode == VOIDmode)
4011 return 0;
4013 /* Set signed and unsigned types of the precision of this mode for the
4014 shifts below. */
4015 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4017 /* Compute the bit position and size for the new reference and our offset
4018 within it. If the new reference is the same size as the original, we
4019 won't optimize anything, so return zero. */
4020 nbitsize = GET_MODE_BITSIZE (nmode);
4021 nbitpos = lbitpos & ~ (nbitsize - 1);
4022 lbitpos -= nbitpos;
4023 if (nbitsize == lbitsize)
4024 return 0;
4026 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4027 lbitpos = nbitsize - lbitsize - lbitpos;
4029 /* Make the mask to be used against the extracted field. */
4030 mask = build_int_cst_type (unsigned_type, -1);
4031 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4032 mask = const_binop (RSHIFT_EXPR, mask,
4033 size_int (nbitsize - lbitsize - lbitpos));
4035 if (! const_p)
4036 /* If not comparing with constant, just rework the comparison
4037 and return. */
4038 return fold_build2_loc (loc, code, compare_type,
4039 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4040 make_bit_field_ref (loc, linner, lhs,
4041 unsigned_type,
4042 nbitsize, nbitpos,
4043 1, lreversep),
4044 mask),
4045 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4046 make_bit_field_ref (loc, rinner, rhs,
4047 unsigned_type,
4048 nbitsize, nbitpos,
4049 1, rreversep),
4050 mask));
4052 /* Otherwise, we are handling the constant case. See if the constant is too
4053 big for the field. Warn and return a tree for 0 (false) if so. We do
4054 this not only for its own sake, but to avoid having to test for this
4055 error case below. If we didn't, we might generate wrong code.
4057 For unsigned fields, the constant shifted right by the field length should
4058 be all zero. For signed fields, the high-order bits should agree with
4059 the sign bit. */
4061 if (lunsignedp)
4063 if (wi::lrshift (rhs, lbitsize) != 0)
4065 warning (0, "comparison is always %d due to width of bit-field",
4066 code == NE_EXPR);
4067 return constant_boolean_node (code == NE_EXPR, compare_type);
4070 else
4072 wide_int tem = wi::arshift (rhs, lbitsize - 1);
4073 if (tem != 0 && tem != -1)
4075 warning (0, "comparison is always %d due to width of bit-field",
4076 code == NE_EXPR);
4077 return constant_boolean_node (code == NE_EXPR, compare_type);
4081 /* Single-bit compares should always be against zero. */
4082 if (lbitsize == 1 && ! integer_zerop (rhs))
4084 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4085 rhs = build_int_cst (type, 0);
4088 /* Make a new bitfield reference, shift the constant over the
4089 appropriate number of bits and mask it with the computed mask
4090 (in case this was a signed field). If we changed it, make a new one. */
4091 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4092 nbitsize, nbitpos, 1, lreversep);
4094 rhs = const_binop (BIT_AND_EXPR,
4095 const_binop (LSHIFT_EXPR,
4096 fold_convert_loc (loc, unsigned_type, rhs),
4097 size_int (lbitpos)),
4098 mask);
4100 lhs = build2_loc (loc, code, compare_type,
4101 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4102 return lhs;
4105 /* Subroutine for fold_truth_andor_1: decode a field reference.
4107 If EXP is a comparison reference, we return the innermost reference.
4109 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4110 set to the starting bit number.
4112 If the innermost field can be completely contained in a mode-sized
4113 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4115 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4116 otherwise it is not changed.
4118 *PUNSIGNEDP is set to the signedness of the field.
4120 *PREVERSEP is set to the storage order of the field.
4122 *PMASK is set to the mask used. This is either contained in a
4123 BIT_AND_EXPR or derived from the width of the field.
4125 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4127 Return 0 if this is not a component reference or is one that we can't
4128 do anything with. */
4130 static tree
4131 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4132 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4133 int *punsignedp, int *preversep, int *pvolatilep,
4134 tree *pmask, tree *pand_mask)
4136 tree exp = *exp_;
4137 tree outer_type = 0;
4138 tree and_mask = 0;
4139 tree mask, inner, offset;
4140 tree unsigned_type;
4141 unsigned int precision;
4143 /* All the optimizations using this function assume integer fields.
4144 There are problems with FP fields since the type_for_size call
4145 below can fail for, e.g., XFmode. */
4146 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4147 return 0;
4149 /* We are interested in the bare arrangement of bits, so strip everything
4150 that doesn't affect the machine mode. However, record the type of the
4151 outermost expression if it may matter below. */
4152 if (CONVERT_EXPR_P (exp)
4153 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4154 outer_type = TREE_TYPE (exp);
4155 STRIP_NOPS (exp);
4157 if (TREE_CODE (exp) == BIT_AND_EXPR)
4159 and_mask = TREE_OPERAND (exp, 1);
4160 exp = TREE_OPERAND (exp, 0);
4161 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4162 if (TREE_CODE (and_mask) != INTEGER_CST)
4163 return 0;
4166 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4167 punsignedp, preversep, pvolatilep);
4168 if ((inner == exp && and_mask == 0)
4169 || *pbitsize < 0 || offset != 0
4170 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4171 /* Reject out-of-bound accesses (PR79731). */
4172 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4173 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4174 *pbitpos + *pbitsize) < 0))
4175 return 0;
4177 *exp_ = exp;
4179 /* If the number of bits in the reference is the same as the bitsize of
4180 the outer type, then the outer type gives the signedness. Otherwise
4181 (in case of a small bitfield) the signedness is unchanged. */
4182 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4183 *punsignedp = TYPE_UNSIGNED (outer_type);
4185 /* Compute the mask to access the bitfield. */
4186 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4187 precision = TYPE_PRECISION (unsigned_type);
4189 mask = build_int_cst_type (unsigned_type, -1);
4191 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4192 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4194 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4195 if (and_mask != 0)
4196 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4197 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4199 *pmask = mask;
4200 *pand_mask = and_mask;
4201 return inner;
4204 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4205 bit positions and MASK is SIGNED. */
4207 static int
4208 all_ones_mask_p (const_tree mask, unsigned int size)
4210 tree type = TREE_TYPE (mask);
4211 unsigned int precision = TYPE_PRECISION (type);
4213 /* If this function returns true when the type of the mask is
4214 UNSIGNED, then there will be errors. In particular see
4215 gcc.c-torture/execute/990326-1.c. There does not appear to be
4216 any documentation paper trail as to why this is so. But the pre
4217 wide-int worked with that restriction and it has been preserved
4218 here. */
4219 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4220 return false;
4222 return wi::mask (size, false, precision) == mask;
4225 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4226 represents the sign bit of EXP's type. If EXP represents a sign
4227 or zero extension, also test VAL against the unextended type.
4228 The return value is the (sub)expression whose sign bit is VAL,
4229 or NULL_TREE otherwise. */
4231 tree
4232 sign_bit_p (tree exp, const_tree val)
4234 int width;
4235 tree t;
4237 /* Tree EXP must have an integral type. */
4238 t = TREE_TYPE (exp);
4239 if (! INTEGRAL_TYPE_P (t))
4240 return NULL_TREE;
4242 /* Tree VAL must be an integer constant. */
4243 if (TREE_CODE (val) != INTEGER_CST
4244 || TREE_OVERFLOW (val))
4245 return NULL_TREE;
4247 width = TYPE_PRECISION (t);
4248 if (wi::only_sign_bit_p (val, width))
4249 return exp;
4251 /* Handle extension from a narrower type. */
4252 if (TREE_CODE (exp) == NOP_EXPR
4253 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4254 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4256 return NULL_TREE;
4259 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4260 to be evaluated unconditionally. */
4262 static int
4263 simple_operand_p (const_tree exp)
4265 /* Strip any conversions that don't change the machine mode. */
4266 STRIP_NOPS (exp);
4268 return (CONSTANT_CLASS_P (exp)
4269 || TREE_CODE (exp) == SSA_NAME
4270 || (DECL_P (exp)
4271 && ! TREE_ADDRESSABLE (exp)
4272 && ! TREE_THIS_VOLATILE (exp)
4273 && ! DECL_NONLOCAL (exp)
4274 /* Don't regard global variables as simple. They may be
4275 allocated in ways unknown to the compiler (shared memory,
4276 #pragma weak, etc). */
4277 && ! TREE_PUBLIC (exp)
4278 && ! DECL_EXTERNAL (exp)
4279 /* Weakrefs are not safe to be read, since they can be NULL.
4280 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4281 have DECL_WEAK flag set. */
4282 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4283 /* Loading a static variable is unduly expensive, but global
4284 registers aren't expensive. */
4285 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4288 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4289 to be evaluated unconditionally.
4290 I addition to simple_operand_p, we assume that comparisons, conversions,
4291 and logic-not operations are simple, if their operands are simple, too. */
4293 static bool
4294 simple_operand_p_2 (tree exp)
4296 enum tree_code code;
4298 if (TREE_SIDE_EFFECTS (exp)
4299 || tree_could_trap_p (exp))
4300 return false;
4302 while (CONVERT_EXPR_P (exp))
4303 exp = TREE_OPERAND (exp, 0);
4305 code = TREE_CODE (exp);
4307 if (TREE_CODE_CLASS (code) == tcc_comparison)
4308 return (simple_operand_p (TREE_OPERAND (exp, 0))
4309 && simple_operand_p (TREE_OPERAND (exp, 1)));
4311 if (code == TRUTH_NOT_EXPR)
4312 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4314 return simple_operand_p (exp);
4318 /* The following functions are subroutines to fold_range_test and allow it to
4319 try to change a logical combination of comparisons into a range test.
4321 For example, both
4322 X == 2 || X == 3 || X == 4 || X == 5
4324 X >= 2 && X <= 5
4325 are converted to
4326 (unsigned) (X - 2) <= 3
4328 We describe each set of comparisons as being either inside or outside
4329 a range, using a variable named like IN_P, and then describe the
4330 range with a lower and upper bound. If one of the bounds is omitted,
4331 it represents either the highest or lowest value of the type.
4333 In the comments below, we represent a range by two numbers in brackets
4334 preceded by a "+" to designate being inside that range, or a "-" to
4335 designate being outside that range, so the condition can be inverted by
4336 flipping the prefix. An omitted bound is represented by a "-". For
4337 example, "- [-, 10]" means being outside the range starting at the lowest
4338 possible value and ending at 10, in other words, being greater than 10.
4339 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4340 always false.
4342 We set up things so that the missing bounds are handled in a consistent
4343 manner so neither a missing bound nor "true" and "false" need to be
4344 handled using a special case. */
4346 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4347 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4348 and UPPER1_P are nonzero if the respective argument is an upper bound
4349 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4350 must be specified for a comparison. ARG1 will be converted to ARG0's
4351 type if both are specified. */
4353 static tree
4354 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4355 tree arg1, int upper1_p)
4357 tree tem;
4358 int result;
4359 int sgn0, sgn1;
4361 /* If neither arg represents infinity, do the normal operation.
4362 Else, if not a comparison, return infinity. Else handle the special
4363 comparison rules. Note that most of the cases below won't occur, but
4364 are handled for consistency. */
4366 if (arg0 != 0 && arg1 != 0)
4368 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4369 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4370 STRIP_NOPS (tem);
4371 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4374 if (TREE_CODE_CLASS (code) != tcc_comparison)
4375 return 0;
4377 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4378 for neither. In real maths, we cannot assume open ended ranges are
4379 the same. But, this is computer arithmetic, where numbers are finite.
4380 We can therefore make the transformation of any unbounded range with
4381 the value Z, Z being greater than any representable number. This permits
4382 us to treat unbounded ranges as equal. */
4383 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4384 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4385 switch (code)
4387 case EQ_EXPR:
4388 result = sgn0 == sgn1;
4389 break;
4390 case NE_EXPR:
4391 result = sgn0 != sgn1;
4392 break;
4393 case LT_EXPR:
4394 result = sgn0 < sgn1;
4395 break;
4396 case LE_EXPR:
4397 result = sgn0 <= sgn1;
4398 break;
4399 case GT_EXPR:
4400 result = sgn0 > sgn1;
4401 break;
4402 case GE_EXPR:
4403 result = sgn0 >= sgn1;
4404 break;
4405 default:
4406 gcc_unreachable ();
4409 return constant_boolean_node (result, type);
4412 /* Helper routine for make_range. Perform one step for it, return
4413 new expression if the loop should continue or NULL_TREE if it should
4414 stop. */
4416 tree
4417 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4418 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4419 bool *strict_overflow_p)
4421 tree arg0_type = TREE_TYPE (arg0);
4422 tree n_low, n_high, low = *p_low, high = *p_high;
4423 int in_p = *p_in_p, n_in_p;
4425 switch (code)
4427 case TRUTH_NOT_EXPR:
4428 /* We can only do something if the range is testing for zero. */
4429 if (low == NULL_TREE || high == NULL_TREE
4430 || ! integer_zerop (low) || ! integer_zerop (high))
4431 return NULL_TREE;
4432 *p_in_p = ! in_p;
4433 return arg0;
4435 case EQ_EXPR: case NE_EXPR:
4436 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4437 /* We can only do something if the range is testing for zero
4438 and if the second operand is an integer constant. Note that
4439 saying something is "in" the range we make is done by
4440 complementing IN_P since it will set in the initial case of
4441 being not equal to zero; "out" is leaving it alone. */
4442 if (low == NULL_TREE || high == NULL_TREE
4443 || ! integer_zerop (low) || ! integer_zerop (high)
4444 || TREE_CODE (arg1) != INTEGER_CST)
4445 return NULL_TREE;
4447 switch (code)
4449 case NE_EXPR: /* - [c, c] */
4450 low = high = arg1;
4451 break;
4452 case EQ_EXPR: /* + [c, c] */
4453 in_p = ! in_p, low = high = arg1;
4454 break;
4455 case GT_EXPR: /* - [-, c] */
4456 low = 0, high = arg1;
4457 break;
4458 case GE_EXPR: /* + [c, -] */
4459 in_p = ! in_p, low = arg1, high = 0;
4460 break;
4461 case LT_EXPR: /* - [c, -] */
4462 low = arg1, high = 0;
4463 break;
4464 case LE_EXPR: /* + [-, c] */
4465 in_p = ! in_p, low = 0, high = arg1;
4466 break;
4467 default:
4468 gcc_unreachable ();
4471 /* If this is an unsigned comparison, we also know that EXP is
4472 greater than or equal to zero. We base the range tests we make
4473 on that fact, so we record it here so we can parse existing
4474 range tests. We test arg0_type since often the return type
4475 of, e.g. EQ_EXPR, is boolean. */
4476 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4478 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4479 in_p, low, high, 1,
4480 build_int_cst (arg0_type, 0),
4481 NULL_TREE))
4482 return NULL_TREE;
4484 in_p = n_in_p, low = n_low, high = n_high;
4486 /* If the high bound is missing, but we have a nonzero low
4487 bound, reverse the range so it goes from zero to the low bound
4488 minus 1. */
4489 if (high == 0 && low && ! integer_zerop (low))
4491 in_p = ! in_p;
4492 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4493 build_int_cst (TREE_TYPE (low), 1), 0);
4494 low = build_int_cst (arg0_type, 0);
4498 *p_low = low;
4499 *p_high = high;
4500 *p_in_p = in_p;
4501 return arg0;
4503 case NEGATE_EXPR:
4504 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4505 low and high are non-NULL, then normalize will DTRT. */
4506 if (!TYPE_UNSIGNED (arg0_type)
4507 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4509 if (low == NULL_TREE)
4510 low = TYPE_MIN_VALUE (arg0_type);
4511 if (high == NULL_TREE)
4512 high = TYPE_MAX_VALUE (arg0_type);
4515 /* (-x) IN [a,b] -> x in [-b, -a] */
4516 n_low = range_binop (MINUS_EXPR, exp_type,
4517 build_int_cst (exp_type, 0),
4518 0, high, 1);
4519 n_high = range_binop (MINUS_EXPR, exp_type,
4520 build_int_cst (exp_type, 0),
4521 0, low, 0);
4522 if (n_high != 0 && TREE_OVERFLOW (n_high))
4523 return NULL_TREE;
4524 goto normalize;
4526 case BIT_NOT_EXPR:
4527 /* ~ X -> -X - 1 */
4528 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4529 build_int_cst (exp_type, 1));
4531 case PLUS_EXPR:
4532 case MINUS_EXPR:
4533 if (TREE_CODE (arg1) != INTEGER_CST)
4534 return NULL_TREE;
4536 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4537 move a constant to the other side. */
4538 if (!TYPE_UNSIGNED (arg0_type)
4539 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4540 return NULL_TREE;
4542 /* If EXP is signed, any overflow in the computation is undefined,
4543 so we don't worry about it so long as our computations on
4544 the bounds don't overflow. For unsigned, overflow is defined
4545 and this is exactly the right thing. */
4546 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4547 arg0_type, low, 0, arg1, 0);
4548 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4549 arg0_type, high, 1, arg1, 0);
4550 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4551 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4552 return NULL_TREE;
4554 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4555 *strict_overflow_p = true;
4557 normalize:
4558 /* Check for an unsigned range which has wrapped around the maximum
4559 value thus making n_high < n_low, and normalize it. */
4560 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4562 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4563 build_int_cst (TREE_TYPE (n_high), 1), 0);
4564 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4565 build_int_cst (TREE_TYPE (n_low), 1), 0);
4567 /* If the range is of the form +/- [ x+1, x ], we won't
4568 be able to normalize it. But then, it represents the
4569 whole range or the empty set, so make it
4570 +/- [ -, - ]. */
4571 if (tree_int_cst_equal (n_low, low)
4572 && tree_int_cst_equal (n_high, high))
4573 low = high = 0;
4574 else
4575 in_p = ! in_p;
4577 else
4578 low = n_low, high = n_high;
4580 *p_low = low;
4581 *p_high = high;
4582 *p_in_p = in_p;
4583 return arg0;
4585 CASE_CONVERT:
4586 case NON_LVALUE_EXPR:
4587 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4588 return NULL_TREE;
4590 if (! INTEGRAL_TYPE_P (arg0_type)
4591 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4592 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4593 return NULL_TREE;
4595 n_low = low, n_high = high;
4597 if (n_low != 0)
4598 n_low = fold_convert_loc (loc, arg0_type, n_low);
4600 if (n_high != 0)
4601 n_high = fold_convert_loc (loc, arg0_type, n_high);
4603 /* If we're converting arg0 from an unsigned type, to exp,
4604 a signed type, we will be doing the comparison as unsigned.
4605 The tests above have already verified that LOW and HIGH
4606 are both positive.
4608 So we have to ensure that we will handle large unsigned
4609 values the same way that the current signed bounds treat
4610 negative values. */
4612 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4614 tree high_positive;
4615 tree equiv_type;
4616 /* For fixed-point modes, we need to pass the saturating flag
4617 as the 2nd parameter. */
4618 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4619 equiv_type
4620 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4621 TYPE_SATURATING (arg0_type));
4622 else
4623 equiv_type
4624 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4626 /* A range without an upper bound is, naturally, unbounded.
4627 Since convert would have cropped a very large value, use
4628 the max value for the destination type. */
4629 high_positive
4630 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4631 : TYPE_MAX_VALUE (arg0_type);
4633 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4634 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4635 fold_convert_loc (loc, arg0_type,
4636 high_positive),
4637 build_int_cst (arg0_type, 1));
4639 /* If the low bound is specified, "and" the range with the
4640 range for which the original unsigned value will be
4641 positive. */
4642 if (low != 0)
4644 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4645 1, fold_convert_loc (loc, arg0_type,
4646 integer_zero_node),
4647 high_positive))
4648 return NULL_TREE;
4650 in_p = (n_in_p == in_p);
4652 else
4654 /* Otherwise, "or" the range with the range of the input
4655 that will be interpreted as negative. */
4656 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4657 1, fold_convert_loc (loc, arg0_type,
4658 integer_zero_node),
4659 high_positive))
4660 return NULL_TREE;
4662 in_p = (in_p != n_in_p);
4666 *p_low = n_low;
4667 *p_high = n_high;
4668 *p_in_p = in_p;
4669 return arg0;
4671 default:
4672 return NULL_TREE;
4676 /* Given EXP, a logical expression, set the range it is testing into
4677 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4678 actually being tested. *PLOW and *PHIGH will be made of the same
4679 type as the returned expression. If EXP is not a comparison, we
4680 will most likely not be returning a useful value and range. Set
4681 *STRICT_OVERFLOW_P to true if the return value is only valid
4682 because signed overflow is undefined; otherwise, do not change
4683 *STRICT_OVERFLOW_P. */
4685 tree
4686 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4687 bool *strict_overflow_p)
4689 enum tree_code code;
4690 tree arg0, arg1 = NULL_TREE;
4691 tree exp_type, nexp;
4692 int in_p;
4693 tree low, high;
4694 location_t loc = EXPR_LOCATION (exp);
4696 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4697 and see if we can refine the range. Some of the cases below may not
4698 happen, but it doesn't seem worth worrying about this. We "continue"
4699 the outer loop when we've changed something; otherwise we "break"
4700 the switch, which will "break" the while. */
4702 in_p = 0;
4703 low = high = build_int_cst (TREE_TYPE (exp), 0);
4705 while (1)
4707 code = TREE_CODE (exp);
4708 exp_type = TREE_TYPE (exp);
4709 arg0 = NULL_TREE;
4711 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4713 if (TREE_OPERAND_LENGTH (exp) > 0)
4714 arg0 = TREE_OPERAND (exp, 0);
4715 if (TREE_CODE_CLASS (code) == tcc_binary
4716 || TREE_CODE_CLASS (code) == tcc_comparison
4717 || (TREE_CODE_CLASS (code) == tcc_expression
4718 && TREE_OPERAND_LENGTH (exp) > 1))
4719 arg1 = TREE_OPERAND (exp, 1);
4721 if (arg0 == NULL_TREE)
4722 break;
4724 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4725 &high, &in_p, strict_overflow_p);
4726 if (nexp == NULL_TREE)
4727 break;
4728 exp = nexp;
4731 /* If EXP is a constant, we can evaluate whether this is true or false. */
4732 if (TREE_CODE (exp) == INTEGER_CST)
4734 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4735 exp, 0, low, 0))
4736 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4737 exp, 1, high, 1)));
4738 low = high = 0;
4739 exp = 0;
4742 *pin_p = in_p, *plow = low, *phigh = high;
4743 return exp;
4746 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4747 type, TYPE, return an expression to test if EXP is in (or out of, depending
4748 on IN_P) the range. Return 0 if the test couldn't be created. */
4750 tree
4751 build_range_check (location_t loc, tree type, tree exp, int in_p,
4752 tree low, tree high)
4754 tree etype = TREE_TYPE (exp), value;
4756 /* Disable this optimization for function pointer expressions
4757 on targets that require function pointer canonicalization. */
4758 if (targetm.have_canonicalize_funcptr_for_compare ()
4759 && TREE_CODE (etype) == POINTER_TYPE
4760 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4761 return NULL_TREE;
4763 if (! in_p)
4765 value = build_range_check (loc, type, exp, 1, low, high);
4766 if (value != 0)
4767 return invert_truthvalue_loc (loc, value);
4769 return 0;
4772 if (low == 0 && high == 0)
4773 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4775 if (low == 0)
4776 return fold_build2_loc (loc, LE_EXPR, type, exp,
4777 fold_convert_loc (loc, etype, high));
4779 if (high == 0)
4780 return fold_build2_loc (loc, GE_EXPR, type, exp,
4781 fold_convert_loc (loc, etype, low));
4783 if (operand_equal_p (low, high, 0))
4784 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4785 fold_convert_loc (loc, etype, low));
4787 if (integer_zerop (low))
4789 if (! TYPE_UNSIGNED (etype))
4791 etype = unsigned_type_for (etype);
4792 high = fold_convert_loc (loc, etype, high);
4793 exp = fold_convert_loc (loc, etype, exp);
4795 return build_range_check (loc, type, exp, 1, 0, high);
4798 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4799 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4801 int prec = TYPE_PRECISION (etype);
4803 if (wi::mask (prec - 1, false, prec) == high)
4805 if (TYPE_UNSIGNED (etype))
4807 tree signed_etype = signed_type_for (etype);
4808 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4809 etype
4810 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4811 else
4812 etype = signed_etype;
4813 exp = fold_convert_loc (loc, etype, exp);
4815 return fold_build2_loc (loc, GT_EXPR, type, exp,
4816 build_int_cst (etype, 0));
4820 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4821 This requires wrap-around arithmetics for the type of the expression.
4822 First make sure that arithmetics in this type is valid, then make sure
4823 that it wraps around. */
4824 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4825 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4826 TYPE_UNSIGNED (etype));
4828 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4830 tree utype, minv, maxv;
4832 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4833 for the type in question, as we rely on this here. */
4834 utype = unsigned_type_for (etype);
4835 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4836 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4837 build_int_cst (TREE_TYPE (maxv), 1), 1);
4838 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4840 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4841 minv, 1, maxv, 1)))
4842 etype = utype;
4843 else
4844 return 0;
4847 high = fold_convert_loc (loc, etype, high);
4848 low = fold_convert_loc (loc, etype, low);
4849 exp = fold_convert_loc (loc, etype, exp);
4851 value = const_binop (MINUS_EXPR, high, low);
4854 if (POINTER_TYPE_P (etype))
4856 if (value != 0 && !TREE_OVERFLOW (value))
4858 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4859 return build_range_check (loc, type,
4860 fold_build_pointer_plus_loc (loc, exp, low),
4861 1, build_int_cst (etype, 0), value);
4863 return 0;
4866 if (value != 0 && !TREE_OVERFLOW (value))
4867 return build_range_check (loc, type,
4868 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4869 1, build_int_cst (etype, 0), value);
4871 return 0;
4874 /* Return the predecessor of VAL in its type, handling the infinite case. */
4876 static tree
4877 range_predecessor (tree val)
4879 tree type = TREE_TYPE (val);
4881 if (INTEGRAL_TYPE_P (type)
4882 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4883 return 0;
4884 else
4885 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4886 build_int_cst (TREE_TYPE (val), 1), 0);
4889 /* Return the successor of VAL in its type, handling the infinite case. */
4891 static tree
4892 range_successor (tree val)
4894 tree type = TREE_TYPE (val);
4896 if (INTEGRAL_TYPE_P (type)
4897 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4898 return 0;
4899 else
4900 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4901 build_int_cst (TREE_TYPE (val), 1), 0);
4904 /* Given two ranges, see if we can merge them into one. Return 1 if we
4905 can, 0 if we can't. Set the output range into the specified parameters. */
4907 bool
4908 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4909 tree high0, int in1_p, tree low1, tree high1)
4911 int no_overlap;
4912 int subset;
4913 int temp;
4914 tree tem;
4915 int in_p;
4916 tree low, high;
4917 int lowequal = ((low0 == 0 && low1 == 0)
4918 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4919 low0, 0, low1, 0)));
4920 int highequal = ((high0 == 0 && high1 == 0)
4921 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4922 high0, 1, high1, 1)));
4924 /* Make range 0 be the range that starts first, or ends last if they
4925 start at the same value. Swap them if it isn't. */
4926 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4927 low0, 0, low1, 0))
4928 || (lowequal
4929 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4930 high1, 1, high0, 1))))
4932 temp = in0_p, in0_p = in1_p, in1_p = temp;
4933 tem = low0, low0 = low1, low1 = tem;
4934 tem = high0, high0 = high1, high1 = tem;
4937 /* Now flag two cases, whether the ranges are disjoint or whether the
4938 second range is totally subsumed in the first. Note that the tests
4939 below are simplified by the ones above. */
4940 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4941 high0, 1, low1, 0));
4942 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4943 high1, 1, high0, 1));
4945 /* We now have four cases, depending on whether we are including or
4946 excluding the two ranges. */
4947 if (in0_p && in1_p)
4949 /* If they don't overlap, the result is false. If the second range
4950 is a subset it is the result. Otherwise, the range is from the start
4951 of the second to the end of the first. */
4952 if (no_overlap)
4953 in_p = 0, low = high = 0;
4954 else if (subset)
4955 in_p = 1, low = low1, high = high1;
4956 else
4957 in_p = 1, low = low1, high = high0;
4960 else if (in0_p && ! in1_p)
4962 /* If they don't overlap, the result is the first range. If they are
4963 equal, the result is false. If the second range is a subset of the
4964 first, and the ranges begin at the same place, we go from just after
4965 the end of the second range to the end of the first. If the second
4966 range is not a subset of the first, or if it is a subset and both
4967 ranges end at the same place, the range starts at the start of the
4968 first range and ends just before the second range.
4969 Otherwise, we can't describe this as a single range. */
4970 if (no_overlap)
4971 in_p = 1, low = low0, high = high0;
4972 else if (lowequal && highequal)
4973 in_p = 0, low = high = 0;
4974 else if (subset && lowequal)
4976 low = range_successor (high1);
4977 high = high0;
4978 in_p = 1;
4979 if (low == 0)
4981 /* We are in the weird situation where high0 > high1 but
4982 high1 has no successor. Punt. */
4983 return 0;
4986 else if (! subset || highequal)
4988 low = low0;
4989 high = range_predecessor (low1);
4990 in_p = 1;
4991 if (high == 0)
4993 /* low0 < low1 but low1 has no predecessor. Punt. */
4994 return 0;
4997 else
4998 return 0;
5001 else if (! in0_p && in1_p)
5003 /* If they don't overlap, the result is the second range. If the second
5004 is a subset of the first, the result is false. Otherwise,
5005 the range starts just after the first range and ends at the
5006 end of the second. */
5007 if (no_overlap)
5008 in_p = 1, low = low1, high = high1;
5009 else if (subset || highequal)
5010 in_p = 0, low = high = 0;
5011 else
5013 low = range_successor (high0);
5014 high = high1;
5015 in_p = 1;
5016 if (low == 0)
5018 /* high1 > high0 but high0 has no successor. Punt. */
5019 return 0;
5024 else
5026 /* The case where we are excluding both ranges. Here the complex case
5027 is if they don't overlap. In that case, the only time we have a
5028 range is if they are adjacent. If the second is a subset of the
5029 first, the result is the first. Otherwise, the range to exclude
5030 starts at the beginning of the first range and ends at the end of the
5031 second. */
5032 if (no_overlap)
5034 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5035 range_successor (high0),
5036 1, low1, 0)))
5037 in_p = 0, low = low0, high = high1;
5038 else
5040 /* Canonicalize - [min, x] into - [-, x]. */
5041 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5042 switch (TREE_CODE (TREE_TYPE (low0)))
5044 case ENUMERAL_TYPE:
5045 if (TYPE_PRECISION (TREE_TYPE (low0))
5046 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5047 break;
5048 /* FALLTHROUGH */
5049 case INTEGER_TYPE:
5050 if (tree_int_cst_equal (low0,
5051 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5052 low0 = 0;
5053 break;
5054 case POINTER_TYPE:
5055 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5056 && integer_zerop (low0))
5057 low0 = 0;
5058 break;
5059 default:
5060 break;
5063 /* Canonicalize - [x, max] into - [x, -]. */
5064 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5065 switch (TREE_CODE (TREE_TYPE (high1)))
5067 case ENUMERAL_TYPE:
5068 if (TYPE_PRECISION (TREE_TYPE (high1))
5069 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5070 break;
5071 /* FALLTHROUGH */
5072 case INTEGER_TYPE:
5073 if (tree_int_cst_equal (high1,
5074 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5075 high1 = 0;
5076 break;
5077 case POINTER_TYPE:
5078 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5079 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5080 high1, 1,
5081 build_int_cst (TREE_TYPE (high1), 1),
5082 1)))
5083 high1 = 0;
5084 break;
5085 default:
5086 break;
5089 /* The ranges might be also adjacent between the maximum and
5090 minimum values of the given type. For
5091 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5092 return + [x + 1, y - 1]. */
5093 if (low0 == 0 && high1 == 0)
5095 low = range_successor (high0);
5096 high = range_predecessor (low1);
5097 if (low == 0 || high == 0)
5098 return 0;
5100 in_p = 1;
5102 else
5103 return 0;
5106 else if (subset)
5107 in_p = 0, low = low0, high = high0;
5108 else
5109 in_p = 0, low = low0, high = high1;
5112 *pin_p = in_p, *plow = low, *phigh = high;
5113 return 1;
5117 /* Subroutine of fold, looking inside expressions of the form
5118 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5119 of the COND_EXPR. This function is being used also to optimize
5120 A op B ? C : A, by reversing the comparison first.
5122 Return a folded expression whose code is not a COND_EXPR
5123 anymore, or NULL_TREE if no folding opportunity is found. */
5125 static tree
5126 fold_cond_expr_with_comparison (location_t loc, tree type,
5127 tree arg0, tree arg1, tree arg2)
5129 enum tree_code comp_code = TREE_CODE (arg0);
5130 tree arg00 = TREE_OPERAND (arg0, 0);
5131 tree arg01 = TREE_OPERAND (arg0, 1);
5132 tree arg1_type = TREE_TYPE (arg1);
5133 tree tem;
5135 STRIP_NOPS (arg1);
5136 STRIP_NOPS (arg2);
5138 /* If we have A op 0 ? A : -A, consider applying the following
5139 transformations:
5141 A == 0? A : -A same as -A
5142 A != 0? A : -A same as A
5143 A >= 0? A : -A same as abs (A)
5144 A > 0? A : -A same as abs (A)
5145 A <= 0? A : -A same as -abs (A)
5146 A < 0? A : -A same as -abs (A)
5148 None of these transformations work for modes with signed
5149 zeros. If A is +/-0, the first two transformations will
5150 change the sign of the result (from +0 to -0, or vice
5151 versa). The last four will fix the sign of the result,
5152 even though the original expressions could be positive or
5153 negative, depending on the sign of A.
5155 Note that all these transformations are correct if A is
5156 NaN, since the two alternatives (A and -A) are also NaNs. */
5157 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5158 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5159 ? real_zerop (arg01)
5160 : integer_zerop (arg01))
5161 && ((TREE_CODE (arg2) == NEGATE_EXPR
5162 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5163 /* In the case that A is of the form X-Y, '-A' (arg2) may
5164 have already been folded to Y-X, check for that. */
5165 || (TREE_CODE (arg1) == MINUS_EXPR
5166 && TREE_CODE (arg2) == MINUS_EXPR
5167 && operand_equal_p (TREE_OPERAND (arg1, 0),
5168 TREE_OPERAND (arg2, 1), 0)
5169 && operand_equal_p (TREE_OPERAND (arg1, 1),
5170 TREE_OPERAND (arg2, 0), 0))))
5171 switch (comp_code)
5173 case EQ_EXPR:
5174 case UNEQ_EXPR:
5175 tem = fold_convert_loc (loc, arg1_type, arg1);
5176 return fold_convert_loc (loc, type, negate_expr (tem));
5177 case NE_EXPR:
5178 case LTGT_EXPR:
5179 return fold_convert_loc (loc, type, arg1);
5180 case UNGE_EXPR:
5181 case UNGT_EXPR:
5182 if (flag_trapping_math)
5183 break;
5184 /* Fall through. */
5185 case GE_EXPR:
5186 case GT_EXPR:
5187 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5188 break;
5189 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5190 return fold_convert_loc (loc, type, tem);
5191 case UNLE_EXPR:
5192 case UNLT_EXPR:
5193 if (flag_trapping_math)
5194 break;
5195 /* FALLTHRU */
5196 case LE_EXPR:
5197 case LT_EXPR:
5198 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5199 break;
5200 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5201 return negate_expr (fold_convert_loc (loc, type, tem));
5202 default:
5203 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5204 break;
5207 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5208 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5209 both transformations are correct when A is NaN: A != 0
5210 is then true, and A == 0 is false. */
5212 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5213 && integer_zerop (arg01) && integer_zerop (arg2))
5215 if (comp_code == NE_EXPR)
5216 return fold_convert_loc (loc, type, arg1);
5217 else if (comp_code == EQ_EXPR)
5218 return build_zero_cst (type);
5221 /* Try some transformations of A op B ? A : B.
5223 A == B? A : B same as B
5224 A != B? A : B same as A
5225 A >= B? A : B same as max (A, B)
5226 A > B? A : B same as max (B, A)
5227 A <= B? A : B same as min (A, B)
5228 A < B? A : B same as min (B, A)
5230 As above, these transformations don't work in the presence
5231 of signed zeros. For example, if A and B are zeros of
5232 opposite sign, the first two transformations will change
5233 the sign of the result. In the last four, the original
5234 expressions give different results for (A=+0, B=-0) and
5235 (A=-0, B=+0), but the transformed expressions do not.
5237 The first two transformations are correct if either A or B
5238 is a NaN. In the first transformation, the condition will
5239 be false, and B will indeed be chosen. In the case of the
5240 second transformation, the condition A != B will be true,
5241 and A will be chosen.
5243 The conversions to max() and min() are not correct if B is
5244 a number and A is not. The conditions in the original
5245 expressions will be false, so all four give B. The min()
5246 and max() versions would give a NaN instead. */
5247 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5248 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5249 /* Avoid these transformations if the COND_EXPR may be used
5250 as an lvalue in the C++ front-end. PR c++/19199. */
5251 && (in_gimple_form
5252 || VECTOR_TYPE_P (type)
5253 || (! lang_GNU_CXX ()
5254 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5255 || ! maybe_lvalue_p (arg1)
5256 || ! maybe_lvalue_p (arg2)))
5258 tree comp_op0 = arg00;
5259 tree comp_op1 = arg01;
5260 tree comp_type = TREE_TYPE (comp_op0);
5262 switch (comp_code)
5264 case EQ_EXPR:
5265 return fold_convert_loc (loc, type, arg2);
5266 case NE_EXPR:
5267 return fold_convert_loc (loc, type, arg1);
5268 case LE_EXPR:
5269 case LT_EXPR:
5270 case UNLE_EXPR:
5271 case UNLT_EXPR:
5272 /* In C++ a ?: expression can be an lvalue, so put the
5273 operand which will be used if they are equal first
5274 so that we can convert this back to the
5275 corresponding COND_EXPR. */
5276 if (!HONOR_NANS (arg1))
5278 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5279 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5280 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5281 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5282 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5283 comp_op1, comp_op0);
5284 return fold_convert_loc (loc, type, tem);
5286 break;
5287 case GE_EXPR:
5288 case GT_EXPR:
5289 case UNGE_EXPR:
5290 case UNGT_EXPR:
5291 if (!HONOR_NANS (arg1))
5293 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5294 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5295 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5296 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5297 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5298 comp_op1, comp_op0);
5299 return fold_convert_loc (loc, type, tem);
5301 break;
5302 case UNEQ_EXPR:
5303 if (!HONOR_NANS (arg1))
5304 return fold_convert_loc (loc, type, arg2);
5305 break;
5306 case LTGT_EXPR:
5307 if (!HONOR_NANS (arg1))
5308 return fold_convert_loc (loc, type, arg1);
5309 break;
5310 default:
5311 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5312 break;
5316 return NULL_TREE;
5321 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5322 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5323 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5324 false) >= 2)
5325 #endif
5327 /* EXP is some logical combination of boolean tests. See if we can
5328 merge it into some range test. Return the new tree if so. */
5330 static tree
5331 fold_range_test (location_t loc, enum tree_code code, tree type,
5332 tree op0, tree op1)
5334 int or_op = (code == TRUTH_ORIF_EXPR
5335 || code == TRUTH_OR_EXPR);
5336 int in0_p, in1_p, in_p;
5337 tree low0, low1, low, high0, high1, high;
5338 bool strict_overflow_p = false;
5339 tree tem, lhs, rhs;
5340 const char * const warnmsg = G_("assuming signed overflow does not occur "
5341 "when simplifying range test");
5343 if (!INTEGRAL_TYPE_P (type))
5344 return 0;
5346 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5347 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5349 /* If this is an OR operation, invert both sides; we will invert
5350 again at the end. */
5351 if (or_op)
5352 in0_p = ! in0_p, in1_p = ! in1_p;
5354 /* If both expressions are the same, if we can merge the ranges, and we
5355 can build the range test, return it or it inverted. If one of the
5356 ranges is always true or always false, consider it to be the same
5357 expression as the other. */
5358 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5359 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5360 in1_p, low1, high1)
5361 && 0 != (tem = (build_range_check (loc, type,
5362 lhs != 0 ? lhs
5363 : rhs != 0 ? rhs : integer_zero_node,
5364 in_p, low, high))))
5366 if (strict_overflow_p)
5367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5368 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5371 /* On machines where the branch cost is expensive, if this is a
5372 short-circuited branch and the underlying object on both sides
5373 is the same, make a non-short-circuit operation. */
5374 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5375 && lhs != 0 && rhs != 0
5376 && (code == TRUTH_ANDIF_EXPR
5377 || code == TRUTH_ORIF_EXPR)
5378 && operand_equal_p (lhs, rhs, 0))
5380 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5381 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5382 which cases we can't do this. */
5383 if (simple_operand_p (lhs))
5384 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5385 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5386 type, op0, op1);
5388 else if (!lang_hooks.decls.global_bindings_p ()
5389 && !CONTAINS_PLACEHOLDER_P (lhs))
5391 tree common = save_expr (lhs);
5393 if (0 != (lhs = build_range_check (loc, type, common,
5394 or_op ? ! in0_p : in0_p,
5395 low0, high0))
5396 && (0 != (rhs = build_range_check (loc, type, common,
5397 or_op ? ! in1_p : in1_p,
5398 low1, high1))))
5400 if (strict_overflow_p)
5401 fold_overflow_warning (warnmsg,
5402 WARN_STRICT_OVERFLOW_COMPARISON);
5403 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5404 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5405 type, lhs, rhs);
5410 return 0;
5413 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5414 bit value. Arrange things so the extra bits will be set to zero if and
5415 only if C is signed-extended to its full width. If MASK is nonzero,
5416 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5418 static tree
5419 unextend (tree c, int p, int unsignedp, tree mask)
5421 tree type = TREE_TYPE (c);
5422 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5423 tree temp;
5425 if (p == modesize || unsignedp)
5426 return c;
5428 /* We work by getting just the sign bit into the low-order bit, then
5429 into the high-order bit, then sign-extend. We then XOR that value
5430 with C. */
5431 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5433 /* We must use a signed type in order to get an arithmetic right shift.
5434 However, we must also avoid introducing accidental overflows, so that
5435 a subsequent call to integer_zerop will work. Hence we must
5436 do the type conversion here. At this point, the constant is either
5437 zero or one, and the conversion to a signed type can never overflow.
5438 We could get an overflow if this conversion is done anywhere else. */
5439 if (TYPE_UNSIGNED (type))
5440 temp = fold_convert (signed_type_for (type), temp);
5442 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5443 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5444 if (mask != 0)
5445 temp = const_binop (BIT_AND_EXPR, temp,
5446 fold_convert (TREE_TYPE (c), mask));
5447 /* If necessary, convert the type back to match the type of C. */
5448 if (TYPE_UNSIGNED (type))
5449 temp = fold_convert (type, temp);
5451 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5454 /* For an expression that has the form
5455 (A && B) || ~B
5457 (A || B) && ~B,
5458 we can drop one of the inner expressions and simplify to
5459 A || ~B
5461 A && ~B
5462 LOC is the location of the resulting expression. OP is the inner
5463 logical operation; the left-hand side in the examples above, while CMPOP
5464 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5465 removing a condition that guards another, as in
5466 (A != NULL && A->...) || A == NULL
5467 which we must not transform. If RHS_ONLY is true, only eliminate the
5468 right-most operand of the inner logical operation. */
5470 static tree
5471 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5472 bool rhs_only)
5474 tree type = TREE_TYPE (cmpop);
5475 enum tree_code code = TREE_CODE (cmpop);
5476 enum tree_code truthop_code = TREE_CODE (op);
5477 tree lhs = TREE_OPERAND (op, 0);
5478 tree rhs = TREE_OPERAND (op, 1);
5479 tree orig_lhs = lhs, orig_rhs = rhs;
5480 enum tree_code rhs_code = TREE_CODE (rhs);
5481 enum tree_code lhs_code = TREE_CODE (lhs);
5482 enum tree_code inv_code;
5484 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5485 return NULL_TREE;
5487 if (TREE_CODE_CLASS (code) != tcc_comparison)
5488 return NULL_TREE;
5490 if (rhs_code == truthop_code)
5492 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5493 if (newrhs != NULL_TREE)
5495 rhs = newrhs;
5496 rhs_code = TREE_CODE (rhs);
5499 if (lhs_code == truthop_code && !rhs_only)
5501 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5502 if (newlhs != NULL_TREE)
5504 lhs = newlhs;
5505 lhs_code = TREE_CODE (lhs);
5509 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5510 if (inv_code == rhs_code
5511 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5512 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5513 return lhs;
5514 if (!rhs_only && inv_code == lhs_code
5515 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5516 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5517 return rhs;
5518 if (rhs != orig_rhs || lhs != orig_lhs)
5519 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5520 lhs, rhs);
5521 return NULL_TREE;
5524 /* Find ways of folding logical expressions of LHS and RHS:
5525 Try to merge two comparisons to the same innermost item.
5526 Look for range tests like "ch >= '0' && ch <= '9'".
5527 Look for combinations of simple terms on machines with expensive branches
5528 and evaluate the RHS unconditionally.
5530 For example, if we have p->a == 2 && p->b == 4 and we can make an
5531 object large enough to span both A and B, we can do this with a comparison
5532 against the object ANDed with the a mask.
5534 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5535 operations to do this with one comparison.
5537 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5538 function and the one above.
5540 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5541 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5543 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5544 two operands.
5546 We return the simplified tree or 0 if no optimization is possible. */
5548 static tree
5549 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5550 tree lhs, tree rhs)
5552 /* If this is the "or" of two comparisons, we can do something if
5553 the comparisons are NE_EXPR. If this is the "and", we can do something
5554 if the comparisons are EQ_EXPR. I.e.,
5555 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5557 WANTED_CODE is this operation code. For single bit fields, we can
5558 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5559 comparison for one-bit fields. */
5561 enum tree_code wanted_code;
5562 enum tree_code lcode, rcode;
5563 tree ll_arg, lr_arg, rl_arg, rr_arg;
5564 tree ll_inner, lr_inner, rl_inner, rr_inner;
5565 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5566 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5567 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5568 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5569 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5570 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5571 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5572 machine_mode lnmode, rnmode;
5573 tree ll_mask, lr_mask, rl_mask, rr_mask;
5574 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5575 tree l_const, r_const;
5576 tree lntype, rntype, result;
5577 HOST_WIDE_INT first_bit, end_bit;
5578 int volatilep;
5580 /* Start by getting the comparison codes. Fail if anything is volatile.
5581 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5582 it were surrounded with a NE_EXPR. */
5584 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5585 return 0;
5587 lcode = TREE_CODE (lhs);
5588 rcode = TREE_CODE (rhs);
5590 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5592 lhs = build2 (NE_EXPR, truth_type, lhs,
5593 build_int_cst (TREE_TYPE (lhs), 0));
5594 lcode = NE_EXPR;
5597 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5599 rhs = build2 (NE_EXPR, truth_type, rhs,
5600 build_int_cst (TREE_TYPE (rhs), 0));
5601 rcode = NE_EXPR;
5604 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5605 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5606 return 0;
5608 ll_arg = TREE_OPERAND (lhs, 0);
5609 lr_arg = TREE_OPERAND (lhs, 1);
5610 rl_arg = TREE_OPERAND (rhs, 0);
5611 rr_arg = TREE_OPERAND (rhs, 1);
5613 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5614 if (simple_operand_p (ll_arg)
5615 && simple_operand_p (lr_arg))
5617 if (operand_equal_p (ll_arg, rl_arg, 0)
5618 && operand_equal_p (lr_arg, rr_arg, 0))
5620 result = combine_comparisons (loc, code, lcode, rcode,
5621 truth_type, ll_arg, lr_arg);
5622 if (result)
5623 return result;
5625 else if (operand_equal_p (ll_arg, rr_arg, 0)
5626 && operand_equal_p (lr_arg, rl_arg, 0))
5628 result = combine_comparisons (loc, code, lcode,
5629 swap_tree_comparison (rcode),
5630 truth_type, ll_arg, lr_arg);
5631 if (result)
5632 return result;
5636 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5637 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5639 /* If the RHS can be evaluated unconditionally and its operands are
5640 simple, it wins to evaluate the RHS unconditionally on machines
5641 with expensive branches. In this case, this isn't a comparison
5642 that can be merged. */
5644 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5645 false) >= 2
5646 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5647 && simple_operand_p (rl_arg)
5648 && simple_operand_p (rr_arg))
5650 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5651 if (code == TRUTH_OR_EXPR
5652 && lcode == NE_EXPR && integer_zerop (lr_arg)
5653 && rcode == NE_EXPR && integer_zerop (rr_arg)
5654 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5655 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5656 return build2_loc (loc, NE_EXPR, truth_type,
5657 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5658 ll_arg, rl_arg),
5659 build_int_cst (TREE_TYPE (ll_arg), 0));
5661 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5662 if (code == TRUTH_AND_EXPR
5663 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5664 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5665 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5666 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5667 return build2_loc (loc, EQ_EXPR, truth_type,
5668 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5669 ll_arg, rl_arg),
5670 build_int_cst (TREE_TYPE (ll_arg), 0));
5673 /* See if the comparisons can be merged. Then get all the parameters for
5674 each side. */
5676 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5677 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5678 return 0;
5680 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5681 volatilep = 0;
5682 ll_inner = decode_field_reference (loc, &ll_arg,
5683 &ll_bitsize, &ll_bitpos, &ll_mode,
5684 &ll_unsignedp, &ll_reversep, &volatilep,
5685 &ll_mask, &ll_and_mask);
5686 lr_inner = decode_field_reference (loc, &lr_arg,
5687 &lr_bitsize, &lr_bitpos, &lr_mode,
5688 &lr_unsignedp, &lr_reversep, &volatilep,
5689 &lr_mask, &lr_and_mask);
5690 rl_inner = decode_field_reference (loc, &rl_arg,
5691 &rl_bitsize, &rl_bitpos, &rl_mode,
5692 &rl_unsignedp, &rl_reversep, &volatilep,
5693 &rl_mask, &rl_and_mask);
5694 rr_inner = decode_field_reference (loc, &rr_arg,
5695 &rr_bitsize, &rr_bitpos, &rr_mode,
5696 &rr_unsignedp, &rr_reversep, &volatilep,
5697 &rr_mask, &rr_and_mask);
5699 /* It must be true that the inner operation on the lhs of each
5700 comparison must be the same if we are to be able to do anything.
5701 Then see if we have constants. If not, the same must be true for
5702 the rhs's. */
5703 if (volatilep
5704 || ll_reversep != rl_reversep
5705 || ll_inner == 0 || rl_inner == 0
5706 || ! operand_equal_p (ll_inner, rl_inner, 0))
5707 return 0;
5709 if (TREE_CODE (lr_arg) == INTEGER_CST
5710 && TREE_CODE (rr_arg) == INTEGER_CST)
5712 l_const = lr_arg, r_const = rr_arg;
5713 lr_reversep = ll_reversep;
5715 else if (lr_reversep != rr_reversep
5716 || lr_inner == 0 || rr_inner == 0
5717 || ! operand_equal_p (lr_inner, rr_inner, 0))
5718 return 0;
5719 else
5720 l_const = r_const = 0;
5722 /* If either comparison code is not correct for our logical operation,
5723 fail. However, we can convert a one-bit comparison against zero into
5724 the opposite comparison against that bit being set in the field. */
5726 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5727 if (lcode != wanted_code)
5729 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5731 /* Make the left operand unsigned, since we are only interested
5732 in the value of one bit. Otherwise we are doing the wrong
5733 thing below. */
5734 ll_unsignedp = 1;
5735 l_const = ll_mask;
5737 else
5738 return 0;
5741 /* This is analogous to the code for l_const above. */
5742 if (rcode != wanted_code)
5744 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5746 rl_unsignedp = 1;
5747 r_const = rl_mask;
5749 else
5750 return 0;
5753 /* See if we can find a mode that contains both fields being compared on
5754 the left. If we can't, fail. Otherwise, update all constants and masks
5755 to be relative to a field of that size. */
5756 first_bit = MIN (ll_bitpos, rl_bitpos);
5757 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5758 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5759 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5760 volatilep);
5761 if (lnmode == VOIDmode)
5762 return 0;
5764 lnbitsize = GET_MODE_BITSIZE (lnmode);
5765 lnbitpos = first_bit & ~ (lnbitsize - 1);
5766 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5767 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5769 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5771 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5772 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5775 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5776 size_int (xll_bitpos));
5777 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5778 size_int (xrl_bitpos));
5780 if (l_const)
5782 l_const = fold_convert_loc (loc, lntype, l_const);
5783 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5784 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5785 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5786 fold_build1_loc (loc, BIT_NOT_EXPR,
5787 lntype, ll_mask))))
5789 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5791 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5794 if (r_const)
5796 r_const = fold_convert_loc (loc, lntype, r_const);
5797 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5798 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5799 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5800 fold_build1_loc (loc, BIT_NOT_EXPR,
5801 lntype, rl_mask))))
5803 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5805 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5809 /* If the right sides are not constant, do the same for it. Also,
5810 disallow this optimization if a size or signedness mismatch occurs
5811 between the left and right sides. */
5812 if (l_const == 0)
5814 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5815 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5816 /* Make sure the two fields on the right
5817 correspond to the left without being swapped. */
5818 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5819 return 0;
5821 first_bit = MIN (lr_bitpos, rr_bitpos);
5822 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5823 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5824 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5825 volatilep);
5826 if (rnmode == VOIDmode)
5827 return 0;
5829 rnbitsize = GET_MODE_BITSIZE (rnmode);
5830 rnbitpos = first_bit & ~ (rnbitsize - 1);
5831 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5832 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5834 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5836 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5837 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5840 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5841 rntype, lr_mask),
5842 size_int (xlr_bitpos));
5843 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5844 rntype, rr_mask),
5845 size_int (xrr_bitpos));
5847 /* Make a mask that corresponds to both fields being compared.
5848 Do this for both items being compared. If the operands are the
5849 same size and the bits being compared are in the same position
5850 then we can do this by masking both and comparing the masked
5851 results. */
5852 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5853 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5854 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5856 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5857 lntype, lnbitsize, lnbitpos,
5858 ll_unsignedp || rl_unsignedp, ll_reversep);
5859 if (! all_ones_mask_p (ll_mask, lnbitsize))
5860 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5862 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5863 rntype, rnbitsize, rnbitpos,
5864 lr_unsignedp || rr_unsignedp, lr_reversep);
5865 if (! all_ones_mask_p (lr_mask, rnbitsize))
5866 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5868 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5871 /* There is still another way we can do something: If both pairs of
5872 fields being compared are adjacent, we may be able to make a wider
5873 field containing them both.
5875 Note that we still must mask the lhs/rhs expressions. Furthermore,
5876 the mask must be shifted to account for the shift done by
5877 make_bit_field_ref. */
5878 if ((ll_bitsize + ll_bitpos == rl_bitpos
5879 && lr_bitsize + lr_bitpos == rr_bitpos)
5880 || (ll_bitpos == rl_bitpos + rl_bitsize
5881 && lr_bitpos == rr_bitpos + rr_bitsize))
5883 tree type;
5885 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5886 ll_bitsize + rl_bitsize,
5887 MIN (ll_bitpos, rl_bitpos),
5888 ll_unsignedp, ll_reversep);
5889 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5890 lr_bitsize + rr_bitsize,
5891 MIN (lr_bitpos, rr_bitpos),
5892 lr_unsignedp, lr_reversep);
5894 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5895 size_int (MIN (xll_bitpos, xrl_bitpos)));
5896 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5897 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5899 /* Convert to the smaller type before masking out unwanted bits. */
5900 type = lntype;
5901 if (lntype != rntype)
5903 if (lnbitsize > rnbitsize)
5905 lhs = fold_convert_loc (loc, rntype, lhs);
5906 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5907 type = rntype;
5909 else if (lnbitsize < rnbitsize)
5911 rhs = fold_convert_loc (loc, lntype, rhs);
5912 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5913 type = lntype;
5917 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5918 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5920 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5921 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5923 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5926 return 0;
5929 /* Handle the case of comparisons with constants. If there is something in
5930 common between the masks, those bits of the constants must be the same.
5931 If not, the condition is always false. Test for this to avoid generating
5932 incorrect code below. */
5933 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5934 if (! integer_zerop (result)
5935 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5936 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5938 if (wanted_code == NE_EXPR)
5940 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5941 return constant_boolean_node (true, truth_type);
5943 else
5945 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5946 return constant_boolean_node (false, truth_type);
5950 /* Construct the expression we will return. First get the component
5951 reference we will make. Unless the mask is all ones the width of
5952 that field, perform the mask operation. Then compare with the
5953 merged constant. */
5954 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5955 lntype, lnbitsize, lnbitpos,
5956 ll_unsignedp || rl_unsignedp, ll_reversep);
5958 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5959 if (! all_ones_mask_p (ll_mask, lnbitsize))
5960 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5962 return build2_loc (loc, wanted_code, truth_type, result,
5963 const_binop (BIT_IOR_EXPR, l_const, r_const));
5966 /* T is an integer expression that is being multiplied, divided, or taken a
5967 modulus (CODE says which and what kind of divide or modulus) by a
5968 constant C. See if we can eliminate that operation by folding it with
5969 other operations already in T. WIDE_TYPE, if non-null, is a type that
5970 should be used for the computation if wider than our type.
5972 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5973 (X * 2) + (Y * 4). We must, however, be assured that either the original
5974 expression would not overflow or that overflow is undefined for the type
5975 in the language in question.
5977 If we return a non-null expression, it is an equivalent form of the
5978 original computation, but need not be in the original type.
5980 We set *STRICT_OVERFLOW_P to true if the return values depends on
5981 signed overflow being undefined. Otherwise we do not change
5982 *STRICT_OVERFLOW_P. */
5984 static tree
5985 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5986 bool *strict_overflow_p)
5988 /* To avoid exponential search depth, refuse to allow recursion past
5989 three levels. Beyond that (1) it's highly unlikely that we'll find
5990 something interesting and (2) we've probably processed it before
5991 when we built the inner expression. */
5993 static int depth;
5994 tree ret;
5996 if (depth > 3)
5997 return NULL;
5999 depth++;
6000 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6001 depth--;
6003 return ret;
6006 static tree
6007 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6008 bool *strict_overflow_p)
6010 tree type = TREE_TYPE (t);
6011 enum tree_code tcode = TREE_CODE (t);
6012 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6013 > GET_MODE_SIZE (TYPE_MODE (type)))
6014 ? wide_type : type);
6015 tree t1, t2;
6016 int same_p = tcode == code;
6017 tree op0 = NULL_TREE, op1 = NULL_TREE;
6018 bool sub_strict_overflow_p;
6020 /* Don't deal with constants of zero here; they confuse the code below. */
6021 if (integer_zerop (c))
6022 return NULL_TREE;
6024 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6025 op0 = TREE_OPERAND (t, 0);
6027 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6028 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6030 /* Note that we need not handle conditional operations here since fold
6031 already handles those cases. So just do arithmetic here. */
6032 switch (tcode)
6034 case INTEGER_CST:
6035 /* For a constant, we can always simplify if we are a multiply
6036 or (for divide and modulus) if it is a multiple of our constant. */
6037 if (code == MULT_EXPR
6038 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6040 tree tem = const_binop (code, fold_convert (ctype, t),
6041 fold_convert (ctype, c));
6042 /* If the multiplication overflowed, we lost information on it.
6043 See PR68142 and PR69845. */
6044 if (TREE_OVERFLOW (tem))
6045 return NULL_TREE;
6046 return tem;
6048 break;
6050 CASE_CONVERT: case NON_LVALUE_EXPR:
6051 /* If op0 is an expression ... */
6052 if ((COMPARISON_CLASS_P (op0)
6053 || UNARY_CLASS_P (op0)
6054 || BINARY_CLASS_P (op0)
6055 || VL_EXP_CLASS_P (op0)
6056 || EXPRESSION_CLASS_P (op0))
6057 /* ... and has wrapping overflow, and its type is smaller
6058 than ctype, then we cannot pass through as widening. */
6059 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6060 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6061 && (TYPE_PRECISION (ctype)
6062 > TYPE_PRECISION (TREE_TYPE (op0))))
6063 /* ... or this is a truncation (t is narrower than op0),
6064 then we cannot pass through this narrowing. */
6065 || (TYPE_PRECISION (type)
6066 < TYPE_PRECISION (TREE_TYPE (op0)))
6067 /* ... or signedness changes for division or modulus,
6068 then we cannot pass through this conversion. */
6069 || (code != MULT_EXPR
6070 && (TYPE_UNSIGNED (ctype)
6071 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6072 /* ... or has undefined overflow while the converted to
6073 type has not, we cannot do the operation in the inner type
6074 as that would introduce undefined overflow. */
6075 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6076 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6077 && !TYPE_OVERFLOW_UNDEFINED (type))))
6078 break;
6080 /* Pass the constant down and see if we can make a simplification. If
6081 we can, replace this expression with the inner simplification for
6082 possible later conversion to our or some other type. */
6083 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6084 && TREE_CODE (t2) == INTEGER_CST
6085 && !TREE_OVERFLOW (t2)
6086 && (0 != (t1 = extract_muldiv (op0, t2, code,
6087 code == MULT_EXPR
6088 ? ctype : NULL_TREE,
6089 strict_overflow_p))))
6090 return t1;
6091 break;
6093 case ABS_EXPR:
6094 /* If widening the type changes it from signed to unsigned, then we
6095 must avoid building ABS_EXPR itself as unsigned. */
6096 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6098 tree cstype = (*signed_type_for) (ctype);
6099 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6100 != 0)
6102 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6103 return fold_convert (ctype, t1);
6105 break;
6107 /* If the constant is negative, we cannot simplify this. */
6108 if (tree_int_cst_sgn (c) == -1)
6109 break;
6110 /* FALLTHROUGH */
6111 case NEGATE_EXPR:
6112 /* For division and modulus, type can't be unsigned, as e.g.
6113 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6114 For signed types, even with wrapping overflow, this is fine. */
6115 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6116 break;
6117 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6118 != 0)
6119 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6120 break;
6122 case MIN_EXPR: case MAX_EXPR:
6123 /* If widening the type changes the signedness, then we can't perform
6124 this optimization as that changes the result. */
6125 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6126 break;
6128 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6129 sub_strict_overflow_p = false;
6130 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6131 &sub_strict_overflow_p)) != 0
6132 && (t2 = extract_muldiv (op1, c, code, wide_type,
6133 &sub_strict_overflow_p)) != 0)
6135 if (tree_int_cst_sgn (c) < 0)
6136 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6137 if (sub_strict_overflow_p)
6138 *strict_overflow_p = true;
6139 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6140 fold_convert (ctype, t2));
6142 break;
6144 case LSHIFT_EXPR: case RSHIFT_EXPR:
6145 /* If the second operand is constant, this is a multiplication
6146 or floor division, by a power of two, so we can treat it that
6147 way unless the multiplier or divisor overflows. Signed
6148 left-shift overflow is implementation-defined rather than
6149 undefined in C90, so do not convert signed left shift into
6150 multiplication. */
6151 if (TREE_CODE (op1) == INTEGER_CST
6152 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6153 /* const_binop may not detect overflow correctly,
6154 so check for it explicitly here. */
6155 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6156 && 0 != (t1 = fold_convert (ctype,
6157 const_binop (LSHIFT_EXPR,
6158 size_one_node,
6159 op1)))
6160 && !TREE_OVERFLOW (t1))
6161 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6162 ? MULT_EXPR : FLOOR_DIV_EXPR,
6163 ctype,
6164 fold_convert (ctype, op0),
6165 t1),
6166 c, code, wide_type, strict_overflow_p);
6167 break;
6169 case PLUS_EXPR: case MINUS_EXPR:
6170 /* See if we can eliminate the operation on both sides. If we can, we
6171 can return a new PLUS or MINUS. If we can't, the only remaining
6172 cases where we can do anything are if the second operand is a
6173 constant. */
6174 sub_strict_overflow_p = false;
6175 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6176 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6177 if (t1 != 0 && t2 != 0
6178 && (code == MULT_EXPR
6179 /* If not multiplication, we can only do this if both operands
6180 are divisible by c. */
6181 || (multiple_of_p (ctype, op0, c)
6182 && multiple_of_p (ctype, op1, c))))
6184 if (sub_strict_overflow_p)
6185 *strict_overflow_p = true;
6186 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6187 fold_convert (ctype, t2));
6190 /* If this was a subtraction, negate OP1 and set it to be an addition.
6191 This simplifies the logic below. */
6192 if (tcode == MINUS_EXPR)
6194 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6195 /* If OP1 was not easily negatable, the constant may be OP0. */
6196 if (TREE_CODE (op0) == INTEGER_CST)
6198 std::swap (op0, op1);
6199 std::swap (t1, t2);
6203 if (TREE_CODE (op1) != INTEGER_CST)
6204 break;
6206 /* If either OP1 or C are negative, this optimization is not safe for
6207 some of the division and remainder types while for others we need
6208 to change the code. */
6209 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6211 if (code == CEIL_DIV_EXPR)
6212 code = FLOOR_DIV_EXPR;
6213 else if (code == FLOOR_DIV_EXPR)
6214 code = CEIL_DIV_EXPR;
6215 else if (code != MULT_EXPR
6216 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6217 break;
6220 /* If it's a multiply or a division/modulus operation of a multiple
6221 of our constant, do the operation and verify it doesn't overflow. */
6222 if (code == MULT_EXPR
6223 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6225 op1 = const_binop (code, fold_convert (ctype, op1),
6226 fold_convert (ctype, c));
6227 /* We allow the constant to overflow with wrapping semantics. */
6228 if (op1 == 0
6229 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6230 break;
6232 else
6233 break;
6235 /* If we have an unsigned type, we cannot widen the operation since it
6236 will change the result if the original computation overflowed. */
6237 if (TYPE_UNSIGNED (ctype) && ctype != type)
6238 break;
6240 /* If we were able to eliminate our operation from the first side,
6241 apply our operation to the second side and reform the PLUS. */
6242 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6243 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6245 /* The last case is if we are a multiply. In that case, we can
6246 apply the distributive law to commute the multiply and addition
6247 if the multiplication of the constants doesn't overflow
6248 and overflow is defined. With undefined overflow
6249 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6250 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6251 return fold_build2 (tcode, ctype,
6252 fold_build2 (code, ctype,
6253 fold_convert (ctype, op0),
6254 fold_convert (ctype, c)),
6255 op1);
6257 break;
6259 case MULT_EXPR:
6260 /* We have a special case here if we are doing something like
6261 (C * 8) % 4 since we know that's zero. */
6262 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6263 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6264 /* If the multiplication can overflow we cannot optimize this. */
6265 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6266 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6267 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6269 *strict_overflow_p = true;
6270 return omit_one_operand (type, integer_zero_node, op0);
6273 /* ... fall through ... */
6275 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6276 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6277 /* If we can extract our operation from the LHS, do so and return a
6278 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6279 do something only if the second operand is a constant. */
6280 if (same_p
6281 && (t1 = extract_muldiv (op0, c, code, wide_type,
6282 strict_overflow_p)) != 0)
6283 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6284 fold_convert (ctype, op1));
6285 else if (tcode == MULT_EXPR && code == MULT_EXPR
6286 && (t1 = extract_muldiv (op1, c, code, wide_type,
6287 strict_overflow_p)) != 0)
6288 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6289 fold_convert (ctype, t1));
6290 else if (TREE_CODE (op1) != INTEGER_CST)
6291 return 0;
6293 /* If these are the same operation types, we can associate them
6294 assuming no overflow. */
6295 if (tcode == code)
6297 bool overflow_p = false;
6298 bool overflow_mul_p;
6299 signop sign = TYPE_SIGN (ctype);
6300 unsigned prec = TYPE_PRECISION (ctype);
6301 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6302 wi::to_wide (c, prec),
6303 sign, &overflow_mul_p);
6304 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6305 if (overflow_mul_p
6306 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6307 overflow_p = true;
6308 if (!overflow_p)
6309 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6310 wide_int_to_tree (ctype, mul));
6313 /* If these operations "cancel" each other, we have the main
6314 optimizations of this pass, which occur when either constant is a
6315 multiple of the other, in which case we replace this with either an
6316 operation or CODE or TCODE.
6318 If we have an unsigned type, we cannot do this since it will change
6319 the result if the original computation overflowed. */
6320 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6321 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6322 || (tcode == MULT_EXPR
6323 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6324 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6325 && code != MULT_EXPR)))
6327 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6329 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6330 *strict_overflow_p = true;
6331 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6332 fold_convert (ctype,
6333 const_binop (TRUNC_DIV_EXPR,
6334 op1, c)));
6336 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6338 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6339 *strict_overflow_p = true;
6340 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6341 fold_convert (ctype,
6342 const_binop (TRUNC_DIV_EXPR,
6343 c, op1)));
6346 break;
6348 default:
6349 break;
6352 return 0;
6355 /* Return a node which has the indicated constant VALUE (either 0 or
6356 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6357 and is of the indicated TYPE. */
6359 tree
6360 constant_boolean_node (bool value, tree type)
6362 if (type == integer_type_node)
6363 return value ? integer_one_node : integer_zero_node;
6364 else if (type == boolean_type_node)
6365 return value ? boolean_true_node : boolean_false_node;
6366 else if (TREE_CODE (type) == VECTOR_TYPE)
6367 return build_vector_from_val (type,
6368 build_int_cst (TREE_TYPE (type),
6369 value ? -1 : 0));
6370 else
6371 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6375 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6376 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6377 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6378 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6379 COND is the first argument to CODE; otherwise (as in the example
6380 given here), it is the second argument. TYPE is the type of the
6381 original expression. Return NULL_TREE if no simplification is
6382 possible. */
6384 static tree
6385 fold_binary_op_with_conditional_arg (location_t loc,
6386 enum tree_code code,
6387 tree type, tree op0, tree op1,
6388 tree cond, tree arg, int cond_first_p)
6390 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6391 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6392 tree test, true_value, false_value;
6393 tree lhs = NULL_TREE;
6394 tree rhs = NULL_TREE;
6395 enum tree_code cond_code = COND_EXPR;
6397 if (TREE_CODE (cond) == COND_EXPR
6398 || TREE_CODE (cond) == VEC_COND_EXPR)
6400 test = TREE_OPERAND (cond, 0);
6401 true_value = TREE_OPERAND (cond, 1);
6402 false_value = TREE_OPERAND (cond, 2);
6403 /* If this operand throws an expression, then it does not make
6404 sense to try to perform a logical or arithmetic operation
6405 involving it. */
6406 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6407 lhs = true_value;
6408 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6409 rhs = false_value;
6411 else if (!(TREE_CODE (type) != VECTOR_TYPE
6412 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6414 tree testtype = TREE_TYPE (cond);
6415 test = cond;
6416 true_value = constant_boolean_node (true, testtype);
6417 false_value = constant_boolean_node (false, testtype);
6419 else
6420 /* Detect the case of mixing vector and scalar types - bail out. */
6421 return NULL_TREE;
6423 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6424 cond_code = VEC_COND_EXPR;
6426 /* This transformation is only worthwhile if we don't have to wrap ARG
6427 in a SAVE_EXPR and the operation can be simplified without recursing
6428 on at least one of the branches once its pushed inside the COND_EXPR. */
6429 if (!TREE_CONSTANT (arg)
6430 && (TREE_SIDE_EFFECTS (arg)
6431 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6432 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6433 return NULL_TREE;
6435 arg = fold_convert_loc (loc, arg_type, arg);
6436 if (lhs == 0)
6438 true_value = fold_convert_loc (loc, cond_type, true_value);
6439 if (cond_first_p)
6440 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6441 else
6442 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6444 if (rhs == 0)
6446 false_value = fold_convert_loc (loc, cond_type, false_value);
6447 if (cond_first_p)
6448 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6449 else
6450 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6453 /* Check that we have simplified at least one of the branches. */
6454 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6455 return NULL_TREE;
6457 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6461 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6463 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6464 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6465 ADDEND is the same as X.
6467 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6468 and finite. The problematic cases are when X is zero, and its mode
6469 has signed zeros. In the case of rounding towards -infinity,
6470 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6471 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6473 bool
6474 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6476 if (!real_zerop (addend))
6477 return false;
6479 /* Don't allow the fold with -fsignaling-nans. */
6480 if (HONOR_SNANS (element_mode (type)))
6481 return false;
6483 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6484 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6485 return true;
6487 /* In a vector or complex, we would need to check the sign of all zeros. */
6488 if (TREE_CODE (addend) != REAL_CST)
6489 return false;
6491 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6492 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6493 negate = !negate;
6495 /* The mode has signed zeros, and we have to honor their sign.
6496 In this situation, there is only one case we can return true for.
6497 X - 0 is the same as X unless rounding towards -infinity is
6498 supported. */
6499 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6502 /* Subroutine of fold() that optimizes comparisons of a division by
6503 a nonzero integer constant against an integer constant, i.e.
6504 X/C1 op C2.
6506 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6507 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6508 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6510 The function returns the constant folded tree if a simplification
6511 can be made, and NULL_TREE otherwise. */
6513 static tree
6514 fold_div_compare (location_t loc,
6515 enum tree_code code, tree type, tree arg0, tree arg1)
6517 tree prod, tmp, hi, lo;
6518 tree arg00 = TREE_OPERAND (arg0, 0);
6519 tree arg01 = TREE_OPERAND (arg0, 1);
6520 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6521 bool neg_overflow = false;
6522 bool overflow;
6524 /* We have to do this the hard way to detect unsigned overflow.
6525 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6526 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6527 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6528 neg_overflow = false;
6530 if (sign == UNSIGNED)
6532 tmp = int_const_binop (MINUS_EXPR, arg01,
6533 build_int_cst (TREE_TYPE (arg01), 1));
6534 lo = prod;
6536 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6537 val = wi::add (prod, tmp, sign, &overflow);
6538 hi = force_fit_type (TREE_TYPE (arg00), val,
6539 -1, overflow | TREE_OVERFLOW (prod));
6541 else if (tree_int_cst_sgn (arg01) >= 0)
6543 tmp = int_const_binop (MINUS_EXPR, arg01,
6544 build_int_cst (TREE_TYPE (arg01), 1));
6545 switch (tree_int_cst_sgn (arg1))
6547 case -1:
6548 neg_overflow = true;
6549 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6550 hi = prod;
6551 break;
6553 case 0:
6554 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6555 hi = tmp;
6556 break;
6558 case 1:
6559 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6560 lo = prod;
6561 break;
6563 default:
6564 gcc_unreachable ();
6567 else
6569 /* A negative divisor reverses the relational operators. */
6570 code = swap_tree_comparison (code);
6572 tmp = int_const_binop (PLUS_EXPR, arg01,
6573 build_int_cst (TREE_TYPE (arg01), 1));
6574 switch (tree_int_cst_sgn (arg1))
6576 case -1:
6577 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6578 lo = prod;
6579 break;
6581 case 0:
6582 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6583 lo = tmp;
6584 break;
6586 case 1:
6587 neg_overflow = true;
6588 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6589 hi = prod;
6590 break;
6592 default:
6593 gcc_unreachable ();
6597 switch (code)
6599 case EQ_EXPR:
6600 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6601 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6602 if (TREE_OVERFLOW (hi))
6603 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6604 if (TREE_OVERFLOW (lo))
6605 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6606 return build_range_check (loc, type, arg00, 1, lo, hi);
6608 case NE_EXPR:
6609 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6610 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6611 if (TREE_OVERFLOW (hi))
6612 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6613 if (TREE_OVERFLOW (lo))
6614 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6615 return build_range_check (loc, type, arg00, 0, lo, hi);
6617 case LT_EXPR:
6618 if (TREE_OVERFLOW (lo))
6620 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6621 return omit_one_operand_loc (loc, type, tmp, arg00);
6623 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6625 case LE_EXPR:
6626 if (TREE_OVERFLOW (hi))
6628 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6629 return omit_one_operand_loc (loc, type, tmp, arg00);
6631 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6633 case GT_EXPR:
6634 if (TREE_OVERFLOW (hi))
6636 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6637 return omit_one_operand_loc (loc, type, tmp, arg00);
6639 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6641 case GE_EXPR:
6642 if (TREE_OVERFLOW (lo))
6644 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6645 return omit_one_operand_loc (loc, type, tmp, arg00);
6647 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6649 default:
6650 break;
6653 return NULL_TREE;
6657 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6658 equality/inequality test, then return a simplified form of the test
6659 using a sign testing. Otherwise return NULL. TYPE is the desired
6660 result type. */
6662 static tree
6663 fold_single_bit_test_into_sign_test (location_t loc,
6664 enum tree_code code, tree arg0, tree arg1,
6665 tree result_type)
6667 /* If this is testing a single bit, we can optimize the test. */
6668 if ((code == NE_EXPR || code == EQ_EXPR)
6669 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6670 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6672 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6673 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6674 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6676 if (arg00 != NULL_TREE
6677 /* This is only a win if casting to a signed type is cheap,
6678 i.e. when arg00's type is not a partial mode. */
6679 && TYPE_PRECISION (TREE_TYPE (arg00))
6680 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6682 tree stype = signed_type_for (TREE_TYPE (arg00));
6683 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6684 result_type,
6685 fold_convert_loc (loc, stype, arg00),
6686 build_int_cst (stype, 0));
6690 return NULL_TREE;
6693 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6694 equality/inequality test, then return a simplified form of
6695 the test using shifts and logical operations. Otherwise return
6696 NULL. TYPE is the desired result type. */
6698 tree
6699 fold_single_bit_test (location_t loc, enum tree_code code,
6700 tree arg0, tree arg1, tree result_type)
6702 /* If this is testing a single bit, we can optimize the test. */
6703 if ((code == NE_EXPR || code == EQ_EXPR)
6704 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6705 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6707 tree inner = TREE_OPERAND (arg0, 0);
6708 tree type = TREE_TYPE (arg0);
6709 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6710 machine_mode operand_mode = TYPE_MODE (type);
6711 int ops_unsigned;
6712 tree signed_type, unsigned_type, intermediate_type;
6713 tree tem, one;
6715 /* First, see if we can fold the single bit test into a sign-bit
6716 test. */
6717 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6718 result_type);
6719 if (tem)
6720 return tem;
6722 /* Otherwise we have (A & C) != 0 where C is a single bit,
6723 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6724 Similarly for (A & C) == 0. */
6726 /* If INNER is a right shift of a constant and it plus BITNUM does
6727 not overflow, adjust BITNUM and INNER. */
6728 if (TREE_CODE (inner) == RSHIFT_EXPR
6729 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6730 && bitnum < TYPE_PRECISION (type)
6731 && wi::ltu_p (TREE_OPERAND (inner, 1),
6732 TYPE_PRECISION (type) - bitnum))
6734 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6735 inner = TREE_OPERAND (inner, 0);
6738 /* If we are going to be able to omit the AND below, we must do our
6739 operations as unsigned. If we must use the AND, we have a choice.
6740 Normally unsigned is faster, but for some machines signed is. */
6741 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6742 && !flag_syntax_only) ? 0 : 1;
6744 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6745 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6746 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6747 inner = fold_convert_loc (loc, intermediate_type, inner);
6749 if (bitnum != 0)
6750 inner = build2 (RSHIFT_EXPR, intermediate_type,
6751 inner, size_int (bitnum));
6753 one = build_int_cst (intermediate_type, 1);
6755 if (code == EQ_EXPR)
6756 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6758 /* Put the AND last so it can combine with more things. */
6759 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6761 /* Make sure to return the proper type. */
6762 inner = fold_convert_loc (loc, result_type, inner);
6764 return inner;
6766 return NULL_TREE;
6769 /* Test whether it is preferable two swap two operands, ARG0 and
6770 ARG1, for example because ARG0 is an integer constant and ARG1
6771 isn't. */
6773 bool
6774 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6776 if (CONSTANT_CLASS_P (arg1))
6777 return 0;
6778 if (CONSTANT_CLASS_P (arg0))
6779 return 1;
6781 STRIP_NOPS (arg0);
6782 STRIP_NOPS (arg1);
6784 if (TREE_CONSTANT (arg1))
6785 return 0;
6786 if (TREE_CONSTANT (arg0))
6787 return 1;
6789 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6790 for commutative and comparison operators. Ensuring a canonical
6791 form allows the optimizers to find additional redundancies without
6792 having to explicitly check for both orderings. */
6793 if (TREE_CODE (arg0) == SSA_NAME
6794 && TREE_CODE (arg1) == SSA_NAME
6795 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6796 return 1;
6798 /* Put SSA_NAMEs last. */
6799 if (TREE_CODE (arg1) == SSA_NAME)
6800 return 0;
6801 if (TREE_CODE (arg0) == SSA_NAME)
6802 return 1;
6804 /* Put variables last. */
6805 if (DECL_P (arg1))
6806 return 0;
6807 if (DECL_P (arg0))
6808 return 1;
6810 return 0;
6814 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6815 means A >= Y && A != MAX, but in this case we know that
6816 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6818 static tree
6819 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6821 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6823 if (TREE_CODE (bound) == LT_EXPR)
6824 a = TREE_OPERAND (bound, 0);
6825 else if (TREE_CODE (bound) == GT_EXPR)
6826 a = TREE_OPERAND (bound, 1);
6827 else
6828 return NULL_TREE;
6830 typea = TREE_TYPE (a);
6831 if (!INTEGRAL_TYPE_P (typea)
6832 && !POINTER_TYPE_P (typea))
6833 return NULL_TREE;
6835 if (TREE_CODE (ineq) == LT_EXPR)
6837 a1 = TREE_OPERAND (ineq, 1);
6838 y = TREE_OPERAND (ineq, 0);
6840 else if (TREE_CODE (ineq) == GT_EXPR)
6842 a1 = TREE_OPERAND (ineq, 0);
6843 y = TREE_OPERAND (ineq, 1);
6845 else
6846 return NULL_TREE;
6848 if (TREE_TYPE (a1) != typea)
6849 return NULL_TREE;
6851 if (POINTER_TYPE_P (typea))
6853 /* Convert the pointer types into integer before taking the difference. */
6854 tree ta = fold_convert_loc (loc, ssizetype, a);
6855 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6856 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6858 else
6859 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6861 if (!diff || !integer_onep (diff))
6862 return NULL_TREE;
6864 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6867 /* Fold a sum or difference of at least one multiplication.
6868 Returns the folded tree or NULL if no simplification could be made. */
6870 static tree
6871 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6872 tree arg0, tree arg1)
6874 tree arg00, arg01, arg10, arg11;
6875 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6877 /* (A * C) +- (B * C) -> (A+-B) * C.
6878 (A * C) +- A -> A * (C+-1).
6879 We are most concerned about the case where C is a constant,
6880 but other combinations show up during loop reduction. Since
6881 it is not difficult, try all four possibilities. */
6883 if (TREE_CODE (arg0) == MULT_EXPR)
6885 arg00 = TREE_OPERAND (arg0, 0);
6886 arg01 = TREE_OPERAND (arg0, 1);
6888 else if (TREE_CODE (arg0) == INTEGER_CST)
6890 arg00 = build_one_cst (type);
6891 arg01 = arg0;
6893 else
6895 /* We cannot generate constant 1 for fract. */
6896 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6897 return NULL_TREE;
6898 arg00 = arg0;
6899 arg01 = build_one_cst (type);
6901 if (TREE_CODE (arg1) == MULT_EXPR)
6903 arg10 = TREE_OPERAND (arg1, 0);
6904 arg11 = TREE_OPERAND (arg1, 1);
6906 else if (TREE_CODE (arg1) == INTEGER_CST)
6908 arg10 = build_one_cst (type);
6909 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6910 the purpose of this canonicalization. */
6911 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6912 && negate_expr_p (arg1)
6913 && code == PLUS_EXPR)
6915 arg11 = negate_expr (arg1);
6916 code = MINUS_EXPR;
6918 else
6919 arg11 = arg1;
6921 else
6923 /* We cannot generate constant 1 for fract. */
6924 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6925 return NULL_TREE;
6926 arg10 = arg1;
6927 arg11 = build_one_cst (type);
6929 same = NULL_TREE;
6931 if (operand_equal_p (arg01, arg11, 0))
6932 same = arg01, alt0 = arg00, alt1 = arg10;
6933 else if (operand_equal_p (arg00, arg10, 0))
6934 same = arg00, alt0 = arg01, alt1 = arg11;
6935 else if (operand_equal_p (arg00, arg11, 0))
6936 same = arg00, alt0 = arg01, alt1 = arg10;
6937 else if (operand_equal_p (arg01, arg10, 0))
6938 same = arg01, alt0 = arg00, alt1 = arg11;
6940 /* No identical multiplicands; see if we can find a common
6941 power-of-two factor in non-power-of-two multiplies. This
6942 can help in multi-dimensional array access. */
6943 else if (tree_fits_shwi_p (arg01)
6944 && tree_fits_shwi_p (arg11))
6946 HOST_WIDE_INT int01, int11, tmp;
6947 bool swap = false;
6948 tree maybe_same;
6949 int01 = tree_to_shwi (arg01);
6950 int11 = tree_to_shwi (arg11);
6952 /* Move min of absolute values to int11. */
6953 if (absu_hwi (int01) < absu_hwi (int11))
6955 tmp = int01, int01 = int11, int11 = tmp;
6956 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6957 maybe_same = arg01;
6958 swap = true;
6960 else
6961 maybe_same = arg11;
6963 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6964 /* The remainder should not be a constant, otherwise we
6965 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6966 increased the number of multiplications necessary. */
6967 && TREE_CODE (arg10) != INTEGER_CST)
6969 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6970 build_int_cst (TREE_TYPE (arg00),
6971 int01 / int11));
6972 alt1 = arg10;
6973 same = maybe_same;
6974 if (swap)
6975 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6979 if (same)
6980 return fold_build2_loc (loc, MULT_EXPR, type,
6981 fold_build2_loc (loc, code, type,
6982 fold_convert_loc (loc, type, alt0),
6983 fold_convert_loc (loc, type, alt1)),
6984 fold_convert_loc (loc, type, same));
6986 return NULL_TREE;
6989 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6990 specified by EXPR into the buffer PTR of length LEN bytes.
6991 Return the number of bytes placed in the buffer, or zero
6992 upon failure. */
6994 static int
6995 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6997 tree type = TREE_TYPE (expr);
6998 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6999 int byte, offset, word, words;
7000 unsigned char value;
7002 if ((off == -1 && total_bytes > len)
7003 || off >= total_bytes)
7004 return 0;
7005 if (off == -1)
7006 off = 0;
7007 words = total_bytes / UNITS_PER_WORD;
7009 for (byte = 0; byte < total_bytes; byte++)
7011 int bitpos = byte * BITS_PER_UNIT;
7012 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7013 number of bytes. */
7014 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7016 if (total_bytes > UNITS_PER_WORD)
7018 word = byte / UNITS_PER_WORD;
7019 if (WORDS_BIG_ENDIAN)
7020 word = (words - 1) - word;
7021 offset = word * UNITS_PER_WORD;
7022 if (BYTES_BIG_ENDIAN)
7023 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7024 else
7025 offset += byte % UNITS_PER_WORD;
7027 else
7028 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7029 if (offset >= off
7030 && offset - off < len)
7031 ptr[offset - off] = value;
7033 return MIN (len, total_bytes - off);
7037 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7038 specified by EXPR into the buffer PTR of length LEN bytes.
7039 Return the number of bytes placed in the buffer, or zero
7040 upon failure. */
7042 static int
7043 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7045 tree type = TREE_TYPE (expr);
7046 machine_mode mode = TYPE_MODE (type);
7047 int total_bytes = GET_MODE_SIZE (mode);
7048 FIXED_VALUE_TYPE value;
7049 tree i_value, i_type;
7051 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7052 return 0;
7054 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7056 if (NULL_TREE == i_type
7057 || TYPE_PRECISION (i_type) != total_bytes)
7058 return 0;
7060 value = TREE_FIXED_CST (expr);
7061 i_value = double_int_to_tree (i_type, value.data);
7063 return native_encode_int (i_value, ptr, len, off);
7067 /* Subroutine of native_encode_expr. Encode the REAL_CST
7068 specified by EXPR into the buffer PTR of length LEN bytes.
7069 Return the number of bytes placed in the buffer, or zero
7070 upon failure. */
7072 static int
7073 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7075 tree type = TREE_TYPE (expr);
7076 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7077 int byte, offset, word, words, bitpos;
7078 unsigned char value;
7080 /* There are always 32 bits in each long, no matter the size of
7081 the hosts long. We handle floating point representations with
7082 up to 192 bits. */
7083 long tmp[6];
7085 if ((off == -1 && total_bytes > len)
7086 || off >= total_bytes)
7087 return 0;
7088 if (off == -1)
7089 off = 0;
7090 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7092 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7094 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7095 bitpos += BITS_PER_UNIT)
7097 byte = (bitpos / BITS_PER_UNIT) & 3;
7098 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7100 if (UNITS_PER_WORD < 4)
7102 word = byte / UNITS_PER_WORD;
7103 if (WORDS_BIG_ENDIAN)
7104 word = (words - 1) - word;
7105 offset = word * UNITS_PER_WORD;
7106 if (BYTES_BIG_ENDIAN)
7107 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7108 else
7109 offset += byte % UNITS_PER_WORD;
7111 else
7113 offset = byte;
7114 if (BYTES_BIG_ENDIAN)
7116 /* Reverse bytes within each long, or within the entire float
7117 if it's smaller than a long (for HFmode). */
7118 offset = MIN (3, total_bytes - 1) - offset;
7119 gcc_assert (offset >= 0);
7122 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7123 if (offset >= off
7124 && offset - off < len)
7125 ptr[offset - off] = value;
7127 return MIN (len, total_bytes - off);
7130 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7131 specified by EXPR into the buffer PTR of length LEN bytes.
7132 Return the number of bytes placed in the buffer, or zero
7133 upon failure. */
7135 static int
7136 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7138 int rsize, isize;
7139 tree part;
7141 part = TREE_REALPART (expr);
7142 rsize = native_encode_expr (part, ptr, len, off);
7143 if (off == -1
7144 && rsize == 0)
7145 return 0;
7146 part = TREE_IMAGPART (expr);
7147 if (off != -1)
7148 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7149 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7150 if (off == -1
7151 && isize != rsize)
7152 return 0;
7153 return rsize + isize;
7157 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7158 specified by EXPR into the buffer PTR of length LEN bytes.
7159 Return the number of bytes placed in the buffer, or zero
7160 upon failure. */
7162 static int
7163 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7165 unsigned i, count;
7166 int size, offset;
7167 tree itype, elem;
7169 offset = 0;
7170 count = VECTOR_CST_NELTS (expr);
7171 itype = TREE_TYPE (TREE_TYPE (expr));
7172 size = GET_MODE_SIZE (TYPE_MODE (itype));
7173 for (i = 0; i < count; i++)
7175 if (off >= size)
7177 off -= size;
7178 continue;
7180 elem = VECTOR_CST_ELT (expr, i);
7181 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7182 if ((off == -1 && res != size)
7183 || res == 0)
7184 return 0;
7185 offset += res;
7186 if (offset >= len)
7187 return offset;
7188 if (off != -1)
7189 off = 0;
7191 return offset;
7195 /* Subroutine of native_encode_expr. Encode the STRING_CST
7196 specified by EXPR into the buffer PTR of length LEN bytes.
7197 Return the number of bytes placed in the buffer, or zero
7198 upon failure. */
7200 static int
7201 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7203 tree type = TREE_TYPE (expr);
7204 HOST_WIDE_INT total_bytes;
7206 if (TREE_CODE (type) != ARRAY_TYPE
7207 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7208 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7209 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7210 return 0;
7211 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7212 if ((off == -1 && total_bytes > len)
7213 || off >= total_bytes)
7214 return 0;
7215 if (off == -1)
7216 off = 0;
7217 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7219 int written = 0;
7220 if (off < TREE_STRING_LENGTH (expr))
7222 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7223 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7225 memset (ptr + written, 0,
7226 MIN (total_bytes - written, len - written));
7228 else
7229 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7230 return MIN (total_bytes - off, len);
7234 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7235 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7236 buffer PTR of length LEN bytes. If OFF is not -1 then start
7237 the encoding at byte offset OFF and encode at most LEN bytes.
7238 Return the number of bytes placed in the buffer, or zero upon failure. */
7241 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7243 /* We don't support starting at negative offset and -1 is special. */
7244 if (off < -1)
7245 return 0;
7247 switch (TREE_CODE (expr))
7249 case INTEGER_CST:
7250 return native_encode_int (expr, ptr, len, off);
7252 case REAL_CST:
7253 return native_encode_real (expr, ptr, len, off);
7255 case FIXED_CST:
7256 return native_encode_fixed (expr, ptr, len, off);
7258 case COMPLEX_CST:
7259 return native_encode_complex (expr, ptr, len, off);
7261 case VECTOR_CST:
7262 return native_encode_vector (expr, ptr, len, off);
7264 case STRING_CST:
7265 return native_encode_string (expr, ptr, len, off);
7267 default:
7268 return 0;
7273 /* Subroutine of native_interpret_expr. Interpret the contents of
7274 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7275 If the buffer cannot be interpreted, return NULL_TREE. */
7277 static tree
7278 native_interpret_int (tree type, const unsigned char *ptr, int len)
7280 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7282 if (total_bytes > len
7283 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7284 return NULL_TREE;
7286 wide_int result = wi::from_buffer (ptr, total_bytes);
7288 return wide_int_to_tree (type, result);
7292 /* Subroutine of native_interpret_expr. Interpret the contents of
7293 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7294 If the buffer cannot be interpreted, return NULL_TREE. */
7296 static tree
7297 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7299 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7300 double_int result;
7301 FIXED_VALUE_TYPE fixed_value;
7303 if (total_bytes > len
7304 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7305 return NULL_TREE;
7307 result = double_int::from_buffer (ptr, total_bytes);
7308 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7310 return build_fixed (type, fixed_value);
7314 /* Subroutine of native_interpret_expr. Interpret the contents of
7315 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7316 If the buffer cannot be interpreted, return NULL_TREE. */
7318 static tree
7319 native_interpret_real (tree type, const unsigned char *ptr, int len)
7321 machine_mode mode = TYPE_MODE (type);
7322 int total_bytes = GET_MODE_SIZE (mode);
7323 unsigned char value;
7324 /* There are always 32 bits in each long, no matter the size of
7325 the hosts long. We handle floating point representations with
7326 up to 192 bits. */
7327 REAL_VALUE_TYPE r;
7328 long tmp[6];
7330 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7331 if (total_bytes > len || total_bytes > 24)
7332 return NULL_TREE;
7333 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7335 memset (tmp, 0, sizeof (tmp));
7336 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7337 bitpos += BITS_PER_UNIT)
7339 /* Both OFFSET and BYTE index within a long;
7340 bitpos indexes the whole float. */
7341 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7342 if (UNITS_PER_WORD < 4)
7344 int word = byte / UNITS_PER_WORD;
7345 if (WORDS_BIG_ENDIAN)
7346 word = (words - 1) - word;
7347 offset = word * UNITS_PER_WORD;
7348 if (BYTES_BIG_ENDIAN)
7349 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7350 else
7351 offset += byte % UNITS_PER_WORD;
7353 else
7355 offset = byte;
7356 if (BYTES_BIG_ENDIAN)
7358 /* Reverse bytes within each long, or within the entire float
7359 if it's smaller than a long (for HFmode). */
7360 offset = MIN (3, total_bytes - 1) - offset;
7361 gcc_assert (offset >= 0);
7364 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7366 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7369 real_from_target (&r, tmp, mode);
7370 return build_real (type, r);
7374 /* Subroutine of native_interpret_expr. Interpret the contents of
7375 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7376 If the buffer cannot be interpreted, return NULL_TREE. */
7378 static tree
7379 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7381 tree etype, rpart, ipart;
7382 int size;
7384 etype = TREE_TYPE (type);
7385 size = GET_MODE_SIZE (TYPE_MODE (etype));
7386 if (size * 2 > len)
7387 return NULL_TREE;
7388 rpart = native_interpret_expr (etype, ptr, size);
7389 if (!rpart)
7390 return NULL_TREE;
7391 ipart = native_interpret_expr (etype, ptr+size, size);
7392 if (!ipart)
7393 return NULL_TREE;
7394 return build_complex (type, rpart, ipart);
7398 /* Subroutine of native_interpret_expr. Interpret the contents of
7399 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7400 If the buffer cannot be interpreted, return NULL_TREE. */
7402 static tree
7403 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7405 tree etype, elem;
7406 int i, size, count;
7407 tree *elements;
7409 etype = TREE_TYPE (type);
7410 size = GET_MODE_SIZE (TYPE_MODE (etype));
7411 count = TYPE_VECTOR_SUBPARTS (type);
7412 if (size * count > len)
7413 return NULL_TREE;
7415 elements = XALLOCAVEC (tree, count);
7416 for (i = count - 1; i >= 0; i--)
7418 elem = native_interpret_expr (etype, ptr+(i*size), size);
7419 if (!elem)
7420 return NULL_TREE;
7421 elements[i] = elem;
7423 return build_vector (type, elements);
7427 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7428 the buffer PTR of length LEN as a constant of type TYPE. For
7429 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7430 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7431 return NULL_TREE. */
7433 tree
7434 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7436 switch (TREE_CODE (type))
7438 case INTEGER_TYPE:
7439 case ENUMERAL_TYPE:
7440 case BOOLEAN_TYPE:
7441 case POINTER_TYPE:
7442 case REFERENCE_TYPE:
7443 return native_interpret_int (type, ptr, len);
7445 case REAL_TYPE:
7446 return native_interpret_real (type, ptr, len);
7448 case FIXED_POINT_TYPE:
7449 return native_interpret_fixed (type, ptr, len);
7451 case COMPLEX_TYPE:
7452 return native_interpret_complex (type, ptr, len);
7454 case VECTOR_TYPE:
7455 return native_interpret_vector (type, ptr, len);
7457 default:
7458 return NULL_TREE;
7462 /* Returns true if we can interpret the contents of a native encoding
7463 as TYPE. */
7465 static bool
7466 can_native_interpret_type_p (tree type)
7468 switch (TREE_CODE (type))
7470 case INTEGER_TYPE:
7471 case ENUMERAL_TYPE:
7472 case BOOLEAN_TYPE:
7473 case POINTER_TYPE:
7474 case REFERENCE_TYPE:
7475 case FIXED_POINT_TYPE:
7476 case REAL_TYPE:
7477 case COMPLEX_TYPE:
7478 case VECTOR_TYPE:
7479 return true;
7480 default:
7481 return false;
7485 /* Return true iff a constant of type TYPE is accepted by
7486 native_encode_expr. */
7488 bool
7489 can_native_encode_type_p (tree type)
7491 switch (TREE_CODE (type))
7493 case INTEGER_TYPE:
7494 case REAL_TYPE:
7495 case FIXED_POINT_TYPE:
7496 case COMPLEX_TYPE:
7497 case VECTOR_TYPE:
7498 case POINTER_TYPE:
7499 return true;
7500 default:
7501 return false;
7505 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7506 TYPE at compile-time. If we're unable to perform the conversion
7507 return NULL_TREE. */
7509 static tree
7510 fold_view_convert_expr (tree type, tree expr)
7512 /* We support up to 512-bit values (for V8DFmode). */
7513 unsigned char buffer[64];
7514 int len;
7516 /* Check that the host and target are sane. */
7517 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7518 return NULL_TREE;
7520 len = native_encode_expr (expr, buffer, sizeof (buffer));
7521 if (len == 0)
7522 return NULL_TREE;
7524 return native_interpret_expr (type, buffer, len);
7527 /* Build an expression for the address of T. Folds away INDIRECT_REF
7528 to avoid confusing the gimplify process. */
7530 tree
7531 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7533 /* The size of the object is not relevant when talking about its address. */
7534 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7535 t = TREE_OPERAND (t, 0);
7537 if (TREE_CODE (t) == INDIRECT_REF)
7539 t = TREE_OPERAND (t, 0);
7541 if (TREE_TYPE (t) != ptrtype)
7542 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7544 else if (TREE_CODE (t) == MEM_REF
7545 && integer_zerop (TREE_OPERAND (t, 1)))
7546 return TREE_OPERAND (t, 0);
7547 else if (TREE_CODE (t) == MEM_REF
7548 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7549 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7550 TREE_OPERAND (t, 0),
7551 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7552 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7554 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7556 if (TREE_TYPE (t) != ptrtype)
7557 t = fold_convert_loc (loc, ptrtype, t);
7559 else
7560 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7562 return t;
7565 /* Build an expression for the address of T. */
7567 tree
7568 build_fold_addr_expr_loc (location_t loc, tree t)
7570 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7572 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7575 /* Fold a unary expression of code CODE and type TYPE with operand
7576 OP0. Return the folded expression if folding is successful.
7577 Otherwise, return NULL_TREE. */
7579 tree
7580 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7582 tree tem;
7583 tree arg0;
7584 enum tree_code_class kind = TREE_CODE_CLASS (code);
7586 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7587 && TREE_CODE_LENGTH (code) == 1);
7589 arg0 = op0;
7590 if (arg0)
7592 if (CONVERT_EXPR_CODE_P (code)
7593 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7595 /* Don't use STRIP_NOPS, because signedness of argument type
7596 matters. */
7597 STRIP_SIGN_NOPS (arg0);
7599 else
7601 /* Strip any conversions that don't change the mode. This
7602 is safe for every expression, except for a comparison
7603 expression because its signedness is derived from its
7604 operands.
7606 Note that this is done as an internal manipulation within
7607 the constant folder, in order to find the simplest
7608 representation of the arguments so that their form can be
7609 studied. In any cases, the appropriate type conversions
7610 should be put back in the tree that will get out of the
7611 constant folder. */
7612 STRIP_NOPS (arg0);
7615 if (CONSTANT_CLASS_P (arg0))
7617 tree tem = const_unop (code, type, arg0);
7618 if (tem)
7620 if (TREE_TYPE (tem) != type)
7621 tem = fold_convert_loc (loc, type, tem);
7622 return tem;
7627 tem = generic_simplify (loc, code, type, op0);
7628 if (tem)
7629 return tem;
7631 if (TREE_CODE_CLASS (code) == tcc_unary)
7633 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7634 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7635 fold_build1_loc (loc, code, type,
7636 fold_convert_loc (loc, TREE_TYPE (op0),
7637 TREE_OPERAND (arg0, 1))));
7638 else if (TREE_CODE (arg0) == COND_EXPR)
7640 tree arg01 = TREE_OPERAND (arg0, 1);
7641 tree arg02 = TREE_OPERAND (arg0, 2);
7642 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7643 arg01 = fold_build1_loc (loc, code, type,
7644 fold_convert_loc (loc,
7645 TREE_TYPE (op0), arg01));
7646 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7647 arg02 = fold_build1_loc (loc, code, type,
7648 fold_convert_loc (loc,
7649 TREE_TYPE (op0), arg02));
7650 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7651 arg01, arg02);
7653 /* If this was a conversion, and all we did was to move into
7654 inside the COND_EXPR, bring it back out. But leave it if
7655 it is a conversion from integer to integer and the
7656 result precision is no wider than a word since such a
7657 conversion is cheap and may be optimized away by combine,
7658 while it couldn't if it were outside the COND_EXPR. Then return
7659 so we don't get into an infinite recursion loop taking the
7660 conversion out and then back in. */
7662 if ((CONVERT_EXPR_CODE_P (code)
7663 || code == NON_LVALUE_EXPR)
7664 && TREE_CODE (tem) == COND_EXPR
7665 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7666 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7667 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7668 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7669 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7670 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7671 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7672 && (INTEGRAL_TYPE_P
7673 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7674 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7675 || flag_syntax_only))
7676 tem = build1_loc (loc, code, type,
7677 build3 (COND_EXPR,
7678 TREE_TYPE (TREE_OPERAND
7679 (TREE_OPERAND (tem, 1), 0)),
7680 TREE_OPERAND (tem, 0),
7681 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7682 TREE_OPERAND (TREE_OPERAND (tem, 2),
7683 0)));
7684 return tem;
7688 switch (code)
7690 case NON_LVALUE_EXPR:
7691 if (!maybe_lvalue_p (op0))
7692 return fold_convert_loc (loc, type, op0);
7693 return NULL_TREE;
7695 CASE_CONVERT:
7696 case FLOAT_EXPR:
7697 case FIX_TRUNC_EXPR:
7698 if (COMPARISON_CLASS_P (op0))
7700 /* If we have (type) (a CMP b) and type is an integral type, return
7701 new expression involving the new type. Canonicalize
7702 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7703 non-integral type.
7704 Do not fold the result as that would not simplify further, also
7705 folding again results in recursions. */
7706 if (TREE_CODE (type) == BOOLEAN_TYPE)
7707 return build2_loc (loc, TREE_CODE (op0), type,
7708 TREE_OPERAND (op0, 0),
7709 TREE_OPERAND (op0, 1));
7710 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7711 && TREE_CODE (type) != VECTOR_TYPE)
7712 return build3_loc (loc, COND_EXPR, type, op0,
7713 constant_boolean_node (true, type),
7714 constant_boolean_node (false, type));
7717 /* Handle (T *)&A.B.C for A being of type T and B and C
7718 living at offset zero. This occurs frequently in
7719 C++ upcasting and then accessing the base. */
7720 if (TREE_CODE (op0) == ADDR_EXPR
7721 && POINTER_TYPE_P (type)
7722 && handled_component_p (TREE_OPERAND (op0, 0)))
7724 HOST_WIDE_INT bitsize, bitpos;
7725 tree offset;
7726 machine_mode mode;
7727 int unsignedp, reversep, volatilep;
7728 tree base
7729 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7730 &offset, &mode, &unsignedp, &reversep,
7731 &volatilep);
7732 /* If the reference was to a (constant) zero offset, we can use
7733 the address of the base if it has the same base type
7734 as the result type and the pointer type is unqualified. */
7735 if (! offset && bitpos == 0
7736 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7737 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7738 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7739 return fold_convert_loc (loc, type,
7740 build_fold_addr_expr_loc (loc, base));
7743 if (TREE_CODE (op0) == MODIFY_EXPR
7744 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7745 /* Detect assigning a bitfield. */
7746 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7747 && DECL_BIT_FIELD
7748 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7750 /* Don't leave an assignment inside a conversion
7751 unless assigning a bitfield. */
7752 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7753 /* First do the assignment, then return converted constant. */
7754 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7755 TREE_NO_WARNING (tem) = 1;
7756 TREE_USED (tem) = 1;
7757 return tem;
7760 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7761 constants (if x has signed type, the sign bit cannot be set
7762 in c). This folds extension into the BIT_AND_EXPR.
7763 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7764 very likely don't have maximal range for their precision and this
7765 transformation effectively doesn't preserve non-maximal ranges. */
7766 if (TREE_CODE (type) == INTEGER_TYPE
7767 && TREE_CODE (op0) == BIT_AND_EXPR
7768 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7770 tree and_expr = op0;
7771 tree and0 = TREE_OPERAND (and_expr, 0);
7772 tree and1 = TREE_OPERAND (and_expr, 1);
7773 int change = 0;
7775 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7776 || (TYPE_PRECISION (type)
7777 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7778 change = 1;
7779 else if (TYPE_PRECISION (TREE_TYPE (and1))
7780 <= HOST_BITS_PER_WIDE_INT
7781 && tree_fits_uhwi_p (and1))
7783 unsigned HOST_WIDE_INT cst;
7785 cst = tree_to_uhwi (and1);
7786 cst &= HOST_WIDE_INT_M1U
7787 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7788 change = (cst == 0);
7789 if (change
7790 && !flag_syntax_only
7791 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7792 == ZERO_EXTEND))
7794 tree uns = unsigned_type_for (TREE_TYPE (and0));
7795 and0 = fold_convert_loc (loc, uns, and0);
7796 and1 = fold_convert_loc (loc, uns, and1);
7799 if (change)
7801 tem = force_fit_type (type, wi::to_widest (and1), 0,
7802 TREE_OVERFLOW (and1));
7803 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7804 fold_convert_loc (loc, type, and0), tem);
7808 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7809 cast (T1)X will fold away. We assume that this happens when X itself
7810 is a cast. */
7811 if (POINTER_TYPE_P (type)
7812 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7813 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7815 tree arg00 = TREE_OPERAND (arg0, 0);
7816 tree arg01 = TREE_OPERAND (arg0, 1);
7818 return fold_build_pointer_plus_loc
7819 (loc, fold_convert_loc (loc, type, arg00), arg01);
7822 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7823 of the same precision, and X is an integer type not narrower than
7824 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7825 if (INTEGRAL_TYPE_P (type)
7826 && TREE_CODE (op0) == BIT_NOT_EXPR
7827 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7828 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7829 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7831 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7832 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7833 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7834 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7835 fold_convert_loc (loc, type, tem));
7838 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7839 type of X and Y (integer types only). */
7840 if (INTEGRAL_TYPE_P (type)
7841 && TREE_CODE (op0) == MULT_EXPR
7842 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7843 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7845 /* Be careful not to introduce new overflows. */
7846 tree mult_type;
7847 if (TYPE_OVERFLOW_WRAPS (type))
7848 mult_type = type;
7849 else
7850 mult_type = unsigned_type_for (type);
7852 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7854 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7855 fold_convert_loc (loc, mult_type,
7856 TREE_OPERAND (op0, 0)),
7857 fold_convert_loc (loc, mult_type,
7858 TREE_OPERAND (op0, 1)));
7859 return fold_convert_loc (loc, type, tem);
7863 return NULL_TREE;
7865 case VIEW_CONVERT_EXPR:
7866 if (TREE_CODE (op0) == MEM_REF)
7868 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7869 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7870 tem = fold_build2_loc (loc, MEM_REF, type,
7871 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7872 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7873 return tem;
7876 return NULL_TREE;
7878 case NEGATE_EXPR:
7879 tem = fold_negate_expr (loc, arg0);
7880 if (tem)
7881 return fold_convert_loc (loc, type, tem);
7882 return NULL_TREE;
7884 case ABS_EXPR:
7885 /* Convert fabs((double)float) into (double)fabsf(float). */
7886 if (TREE_CODE (arg0) == NOP_EXPR
7887 && TREE_CODE (type) == REAL_TYPE)
7889 tree targ0 = strip_float_extensions (arg0);
7890 if (targ0 != arg0)
7891 return fold_convert_loc (loc, type,
7892 fold_build1_loc (loc, ABS_EXPR,
7893 TREE_TYPE (targ0),
7894 targ0));
7896 return NULL_TREE;
7898 case BIT_NOT_EXPR:
7899 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7900 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7901 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7902 fold_convert_loc (loc, type,
7903 TREE_OPERAND (arg0, 0)))))
7904 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7905 fold_convert_loc (loc, type,
7906 TREE_OPERAND (arg0, 1)));
7907 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7908 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7909 fold_convert_loc (loc, type,
7910 TREE_OPERAND (arg0, 1)))))
7911 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7912 fold_convert_loc (loc, type,
7913 TREE_OPERAND (arg0, 0)), tem);
7915 return NULL_TREE;
7917 case TRUTH_NOT_EXPR:
7918 /* Note that the operand of this must be an int
7919 and its values must be 0 or 1.
7920 ("true" is a fixed value perhaps depending on the language,
7921 but we don't handle values other than 1 correctly yet.) */
7922 tem = fold_truth_not_expr (loc, arg0);
7923 if (!tem)
7924 return NULL_TREE;
7925 return fold_convert_loc (loc, type, tem);
7927 case INDIRECT_REF:
7928 /* Fold *&X to X if X is an lvalue. */
7929 if (TREE_CODE (op0) == ADDR_EXPR)
7931 tree op00 = TREE_OPERAND (op0, 0);
7932 if ((VAR_P (op00)
7933 || TREE_CODE (op00) == PARM_DECL
7934 || TREE_CODE (op00) == RESULT_DECL)
7935 && !TREE_READONLY (op00))
7936 return op00;
7938 return NULL_TREE;
7940 default:
7941 return NULL_TREE;
7942 } /* switch (code) */
7946 /* If the operation was a conversion do _not_ mark a resulting constant
7947 with TREE_OVERFLOW if the original constant was not. These conversions
7948 have implementation defined behavior and retaining the TREE_OVERFLOW
7949 flag here would confuse later passes such as VRP. */
7950 tree
7951 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7952 tree type, tree op0)
7954 tree res = fold_unary_loc (loc, code, type, op0);
7955 if (res
7956 && TREE_CODE (res) == INTEGER_CST
7957 && TREE_CODE (op0) == INTEGER_CST
7958 && CONVERT_EXPR_CODE_P (code))
7959 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7961 return res;
7964 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7965 operands OP0 and OP1. LOC is the location of the resulting expression.
7966 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7967 Return the folded expression if folding is successful. Otherwise,
7968 return NULL_TREE. */
7969 static tree
7970 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7971 tree arg0, tree arg1, tree op0, tree op1)
7973 tree tem;
7975 /* We only do these simplifications if we are optimizing. */
7976 if (!optimize)
7977 return NULL_TREE;
7979 /* Check for things like (A || B) && (A || C). We can convert this
7980 to A || (B && C). Note that either operator can be any of the four
7981 truth and/or operations and the transformation will still be
7982 valid. Also note that we only care about order for the
7983 ANDIF and ORIF operators. If B contains side effects, this
7984 might change the truth-value of A. */
7985 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7986 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7987 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7988 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7989 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7990 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7992 tree a00 = TREE_OPERAND (arg0, 0);
7993 tree a01 = TREE_OPERAND (arg0, 1);
7994 tree a10 = TREE_OPERAND (arg1, 0);
7995 tree a11 = TREE_OPERAND (arg1, 1);
7996 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7997 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7998 && (code == TRUTH_AND_EXPR
7999 || code == TRUTH_OR_EXPR));
8001 if (operand_equal_p (a00, a10, 0))
8002 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8003 fold_build2_loc (loc, code, type, a01, a11));
8004 else if (commutative && operand_equal_p (a00, a11, 0))
8005 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8006 fold_build2_loc (loc, code, type, a01, a10));
8007 else if (commutative && operand_equal_p (a01, a10, 0))
8008 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8009 fold_build2_loc (loc, code, type, a00, a11));
8011 /* This case if tricky because we must either have commutative
8012 operators or else A10 must not have side-effects. */
8014 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8015 && operand_equal_p (a01, a11, 0))
8016 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8017 fold_build2_loc (loc, code, type, a00, a10),
8018 a01);
8021 /* See if we can build a range comparison. */
8022 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8023 return tem;
8025 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8026 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8028 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8029 if (tem)
8030 return fold_build2_loc (loc, code, type, tem, arg1);
8033 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8034 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8036 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8037 if (tem)
8038 return fold_build2_loc (loc, code, type, arg0, tem);
8041 /* Check for the possibility of merging component references. If our
8042 lhs is another similar operation, try to merge its rhs with our
8043 rhs. Then try to merge our lhs and rhs. */
8044 if (TREE_CODE (arg0) == code
8045 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8046 TREE_OPERAND (arg0, 1), arg1)))
8047 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8049 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8050 return tem;
8052 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8053 && (code == TRUTH_AND_EXPR
8054 || code == TRUTH_ANDIF_EXPR
8055 || code == TRUTH_OR_EXPR
8056 || code == TRUTH_ORIF_EXPR))
8058 enum tree_code ncode, icode;
8060 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8061 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8062 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8064 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8065 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8066 We don't want to pack more than two leafs to a non-IF AND/OR
8067 expression.
8068 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8069 equal to IF-CODE, then we don't want to add right-hand operand.
8070 If the inner right-hand side of left-hand operand has
8071 side-effects, or isn't simple, then we can't add to it,
8072 as otherwise we might destroy if-sequence. */
8073 if (TREE_CODE (arg0) == icode
8074 && simple_operand_p_2 (arg1)
8075 /* Needed for sequence points to handle trappings, and
8076 side-effects. */
8077 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8079 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8080 arg1);
8081 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8082 tem);
8084 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8085 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8086 else if (TREE_CODE (arg1) == icode
8087 && simple_operand_p_2 (arg0)
8088 /* Needed for sequence points to handle trappings, and
8089 side-effects. */
8090 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8092 tem = fold_build2_loc (loc, ncode, type,
8093 arg0, TREE_OPERAND (arg1, 0));
8094 return fold_build2_loc (loc, icode, type, tem,
8095 TREE_OPERAND (arg1, 1));
8097 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8098 into (A OR B).
8099 For sequence point consistancy, we need to check for trapping,
8100 and side-effects. */
8101 else if (code == icode && simple_operand_p_2 (arg0)
8102 && simple_operand_p_2 (arg1))
8103 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8106 return NULL_TREE;
8109 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8110 by changing CODE to reduce the magnitude of constants involved in
8111 ARG0 of the comparison.
8112 Returns a canonicalized comparison tree if a simplification was
8113 possible, otherwise returns NULL_TREE.
8114 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8115 valid if signed overflow is undefined. */
8117 static tree
8118 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8119 tree arg0, tree arg1,
8120 bool *strict_overflow_p)
8122 enum tree_code code0 = TREE_CODE (arg0);
8123 tree t, cst0 = NULL_TREE;
8124 int sgn0;
8126 /* Match A +- CST code arg1. We can change this only if overflow
8127 is undefined. */
8128 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8129 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8130 /* In principle pointers also have undefined overflow behavior,
8131 but that causes problems elsewhere. */
8132 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8133 && (code0 == MINUS_EXPR
8134 || code0 == PLUS_EXPR)
8135 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8136 return NULL_TREE;
8138 /* Identify the constant in arg0 and its sign. */
8139 cst0 = TREE_OPERAND (arg0, 1);
8140 sgn0 = tree_int_cst_sgn (cst0);
8142 /* Overflowed constants and zero will cause problems. */
8143 if (integer_zerop (cst0)
8144 || TREE_OVERFLOW (cst0))
8145 return NULL_TREE;
8147 /* See if we can reduce the magnitude of the constant in
8148 arg0 by changing the comparison code. */
8149 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8150 if (code == LT_EXPR
8151 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8152 code = LE_EXPR;
8153 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8154 else if (code == GT_EXPR
8155 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8156 code = GE_EXPR;
8157 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8158 else if (code == LE_EXPR
8159 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8160 code = LT_EXPR;
8161 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8162 else if (code == GE_EXPR
8163 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8164 code = GT_EXPR;
8165 else
8166 return NULL_TREE;
8167 *strict_overflow_p = true;
8169 /* Now build the constant reduced in magnitude. But not if that
8170 would produce one outside of its types range. */
8171 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8172 && ((sgn0 == 1
8173 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8174 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8175 || (sgn0 == -1
8176 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8177 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8178 return NULL_TREE;
8180 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8181 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8182 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8183 t = fold_convert (TREE_TYPE (arg1), t);
8185 return fold_build2_loc (loc, code, type, t, arg1);
8188 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8189 overflow further. Try to decrease the magnitude of constants involved
8190 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8191 and put sole constants at the second argument position.
8192 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8194 static tree
8195 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8196 tree arg0, tree arg1)
8198 tree t;
8199 bool strict_overflow_p;
8200 const char * const warnmsg = G_("assuming signed overflow does not occur "
8201 "when reducing constant in comparison");
8203 /* Try canonicalization by simplifying arg0. */
8204 strict_overflow_p = false;
8205 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8206 &strict_overflow_p);
8207 if (t)
8209 if (strict_overflow_p)
8210 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8211 return t;
8214 /* Try canonicalization by simplifying arg1 using the swapped
8215 comparison. */
8216 code = swap_tree_comparison (code);
8217 strict_overflow_p = false;
8218 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8219 &strict_overflow_p);
8220 if (t && strict_overflow_p)
8221 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8222 return t;
8225 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8226 space. This is used to avoid issuing overflow warnings for
8227 expressions like &p->x which can not wrap. */
8229 static bool
8230 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8232 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8233 return true;
8235 if (bitpos < 0)
8236 return true;
8238 wide_int wi_offset;
8239 int precision = TYPE_PRECISION (TREE_TYPE (base));
8240 if (offset == NULL_TREE)
8241 wi_offset = wi::zero (precision);
8242 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8243 return true;
8244 else
8245 wi_offset = offset;
8247 bool overflow;
8248 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8249 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8250 if (overflow)
8251 return true;
8253 if (!wi::fits_uhwi_p (total))
8254 return true;
8256 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8257 if (size <= 0)
8258 return true;
8260 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8261 array. */
8262 if (TREE_CODE (base) == ADDR_EXPR)
8264 HOST_WIDE_INT base_size;
8266 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8267 if (base_size > 0 && size < base_size)
8268 size = base_size;
8271 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8274 /* Return a positive integer when the symbol DECL is known to have
8275 a nonzero address, zero when it's known not to (e.g., it's a weak
8276 symbol), and a negative integer when the symbol is not yet in the
8277 symbol table and so whether or not its address is zero is unknown.
8278 For function local objects always return positive integer. */
8279 static int
8280 maybe_nonzero_address (tree decl)
8282 if (DECL_P (decl) && decl_in_symtab_p (decl))
8283 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8284 return symbol->nonzero_address ();
8286 /* Function local objects are never NULL. */
8287 if (DECL_P (decl)
8288 && (DECL_CONTEXT (decl)
8289 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8290 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8291 return 1;
8293 return -1;
8296 /* Subroutine of fold_binary. This routine performs all of the
8297 transformations that are common to the equality/inequality
8298 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8299 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8300 fold_binary should call fold_binary. Fold a comparison with
8301 tree code CODE and type TYPE with operands OP0 and OP1. Return
8302 the folded comparison or NULL_TREE. */
8304 static tree
8305 fold_comparison (location_t loc, enum tree_code code, tree type,
8306 tree op0, tree op1)
8308 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8309 tree arg0, arg1, tem;
8311 arg0 = op0;
8312 arg1 = op1;
8314 STRIP_SIGN_NOPS (arg0);
8315 STRIP_SIGN_NOPS (arg1);
8317 /* For comparisons of pointers we can decompose it to a compile time
8318 comparison of the base objects and the offsets into the object.
8319 This requires at least one operand being an ADDR_EXPR or a
8320 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8321 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8322 && (TREE_CODE (arg0) == ADDR_EXPR
8323 || TREE_CODE (arg1) == ADDR_EXPR
8324 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8325 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8327 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8328 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8329 machine_mode mode;
8330 int volatilep, reversep, unsignedp;
8331 bool indirect_base0 = false, indirect_base1 = false;
8333 /* Get base and offset for the access. Strip ADDR_EXPR for
8334 get_inner_reference, but put it back by stripping INDIRECT_REF
8335 off the base object if possible. indirect_baseN will be true
8336 if baseN is not an address but refers to the object itself. */
8337 base0 = arg0;
8338 if (TREE_CODE (arg0) == ADDR_EXPR)
8340 base0
8341 = get_inner_reference (TREE_OPERAND (arg0, 0),
8342 &bitsize, &bitpos0, &offset0, &mode,
8343 &unsignedp, &reversep, &volatilep);
8344 if (TREE_CODE (base0) == INDIRECT_REF)
8345 base0 = TREE_OPERAND (base0, 0);
8346 else
8347 indirect_base0 = true;
8349 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8351 base0 = TREE_OPERAND (arg0, 0);
8352 STRIP_SIGN_NOPS (base0);
8353 if (TREE_CODE (base0) == ADDR_EXPR)
8355 base0
8356 = get_inner_reference (TREE_OPERAND (base0, 0),
8357 &bitsize, &bitpos0, &offset0, &mode,
8358 &unsignedp, &reversep, &volatilep);
8359 if (TREE_CODE (base0) == INDIRECT_REF)
8360 base0 = TREE_OPERAND (base0, 0);
8361 else
8362 indirect_base0 = true;
8364 if (offset0 == NULL_TREE || integer_zerop (offset0))
8365 offset0 = TREE_OPERAND (arg0, 1);
8366 else
8367 offset0 = size_binop (PLUS_EXPR, offset0,
8368 TREE_OPERAND (arg0, 1));
8369 if (TREE_CODE (offset0) == INTEGER_CST)
8371 offset_int tem = wi::sext (wi::to_offset (offset0),
8372 TYPE_PRECISION (sizetype));
8373 tem <<= LOG2_BITS_PER_UNIT;
8374 tem += bitpos0;
8375 if (wi::fits_shwi_p (tem))
8377 bitpos0 = tem.to_shwi ();
8378 offset0 = NULL_TREE;
8383 base1 = arg1;
8384 if (TREE_CODE (arg1) == ADDR_EXPR)
8386 base1
8387 = get_inner_reference (TREE_OPERAND (arg1, 0),
8388 &bitsize, &bitpos1, &offset1, &mode,
8389 &unsignedp, &reversep, &volatilep);
8390 if (TREE_CODE (base1) == INDIRECT_REF)
8391 base1 = TREE_OPERAND (base1, 0);
8392 else
8393 indirect_base1 = true;
8395 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8397 base1 = TREE_OPERAND (arg1, 0);
8398 STRIP_SIGN_NOPS (base1);
8399 if (TREE_CODE (base1) == ADDR_EXPR)
8401 base1
8402 = get_inner_reference (TREE_OPERAND (base1, 0),
8403 &bitsize, &bitpos1, &offset1, &mode,
8404 &unsignedp, &reversep, &volatilep);
8405 if (TREE_CODE (base1) == INDIRECT_REF)
8406 base1 = TREE_OPERAND (base1, 0);
8407 else
8408 indirect_base1 = true;
8410 if (offset1 == NULL_TREE || integer_zerop (offset1))
8411 offset1 = TREE_OPERAND (arg1, 1);
8412 else
8413 offset1 = size_binop (PLUS_EXPR, offset1,
8414 TREE_OPERAND (arg1, 1));
8415 if (TREE_CODE (offset1) == INTEGER_CST)
8417 offset_int tem = wi::sext (wi::to_offset (offset1),
8418 TYPE_PRECISION (sizetype));
8419 tem <<= LOG2_BITS_PER_UNIT;
8420 tem += bitpos1;
8421 if (wi::fits_shwi_p (tem))
8423 bitpos1 = tem.to_shwi ();
8424 offset1 = NULL_TREE;
8429 /* If we have equivalent bases we might be able to simplify. */
8430 if (indirect_base0 == indirect_base1
8431 && operand_equal_p (base0, base1,
8432 indirect_base0 ? OEP_ADDRESS_OF : 0))
8434 /* We can fold this expression to a constant if the non-constant
8435 offset parts are equal. */
8436 if ((offset0 == offset1
8437 || (offset0 && offset1
8438 && operand_equal_p (offset0, offset1, 0)))
8439 && (equality_code
8440 || (indirect_base0
8441 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8442 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8445 if (!equality_code
8446 && bitpos0 != bitpos1
8447 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8448 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8449 fold_overflow_warning (("assuming pointer wraparound does not "
8450 "occur when comparing P +- C1 with "
8451 "P +- C2"),
8452 WARN_STRICT_OVERFLOW_CONDITIONAL);
8454 switch (code)
8456 case EQ_EXPR:
8457 return constant_boolean_node (bitpos0 == bitpos1, type);
8458 case NE_EXPR:
8459 return constant_boolean_node (bitpos0 != bitpos1, type);
8460 case LT_EXPR:
8461 return constant_boolean_node (bitpos0 < bitpos1, type);
8462 case LE_EXPR:
8463 return constant_boolean_node (bitpos0 <= bitpos1, type);
8464 case GE_EXPR:
8465 return constant_boolean_node (bitpos0 >= bitpos1, type);
8466 case GT_EXPR:
8467 return constant_boolean_node (bitpos0 > bitpos1, type);
8468 default:;
8471 /* We can simplify the comparison to a comparison of the variable
8472 offset parts if the constant offset parts are equal.
8473 Be careful to use signed sizetype here because otherwise we
8474 mess with array offsets in the wrong way. This is possible
8475 because pointer arithmetic is restricted to retain within an
8476 object and overflow on pointer differences is undefined as of
8477 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8478 else if (bitpos0 == bitpos1
8479 && (equality_code
8480 || (indirect_base0
8481 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8482 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8484 /* By converting to signed sizetype we cover middle-end pointer
8485 arithmetic which operates on unsigned pointer types of size
8486 type size and ARRAY_REF offsets which are properly sign or
8487 zero extended from their type in case it is narrower than
8488 sizetype. */
8489 if (offset0 == NULL_TREE)
8490 offset0 = build_int_cst (ssizetype, 0);
8491 else
8492 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8493 if (offset1 == NULL_TREE)
8494 offset1 = build_int_cst (ssizetype, 0);
8495 else
8496 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8498 if (!equality_code
8499 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8500 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8501 fold_overflow_warning (("assuming pointer wraparound does not "
8502 "occur when comparing P +- C1 with "
8503 "P +- C2"),
8504 WARN_STRICT_OVERFLOW_COMPARISON);
8506 return fold_build2_loc (loc, code, type, offset0, offset1);
8509 /* For equal offsets we can simplify to a comparison of the
8510 base addresses. */
8511 else if (bitpos0 == bitpos1
8512 && (indirect_base0
8513 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8514 && (indirect_base1
8515 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8516 && ((offset0 == offset1)
8517 || (offset0 && offset1
8518 && operand_equal_p (offset0, offset1, 0))))
8520 if (indirect_base0)
8521 base0 = build_fold_addr_expr_loc (loc, base0);
8522 if (indirect_base1)
8523 base1 = build_fold_addr_expr_loc (loc, base1);
8524 return fold_build2_loc (loc, code, type, base0, base1);
8526 /* Comparison between an ordinary (non-weak) symbol and a null
8527 pointer can be eliminated since such symbols must have a non
8528 null address. In C, relational expressions between pointers
8529 to objects and null pointers are undefined. The results
8530 below follow the C++ rules with the additional property that
8531 every object pointer compares greater than a null pointer.
8533 else if (((DECL_P (base0)
8534 && maybe_nonzero_address (base0) > 0
8535 /* Avoid folding references to struct members at offset 0 to
8536 prevent tests like '&ptr->firstmember == 0' from getting
8537 eliminated. When ptr is null, although the -> expression
8538 is strictly speaking invalid, GCC retains it as a matter
8539 of QoI. See PR c/44555. */
8540 && (offset0 == NULL_TREE && bitpos0 != 0))
8541 || CONSTANT_CLASS_P (base0))
8542 && indirect_base0
8543 /* The caller guarantees that when one of the arguments is
8544 constant (i.e., null in this case) it is second. */
8545 && integer_zerop (arg1))
8547 switch (code)
8549 case EQ_EXPR:
8550 case LE_EXPR:
8551 case LT_EXPR:
8552 return constant_boolean_node (false, type);
8553 case GE_EXPR:
8554 case GT_EXPR:
8555 case NE_EXPR:
8556 return constant_boolean_node (true, type);
8557 default:
8558 gcc_unreachable ();
8563 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8564 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8565 the resulting offset is smaller in absolute value than the
8566 original one and has the same sign. */
8567 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8568 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8569 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8570 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8571 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8572 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8573 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8574 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8576 tree const1 = TREE_OPERAND (arg0, 1);
8577 tree const2 = TREE_OPERAND (arg1, 1);
8578 tree variable1 = TREE_OPERAND (arg0, 0);
8579 tree variable2 = TREE_OPERAND (arg1, 0);
8580 tree cst;
8581 const char * const warnmsg = G_("assuming signed overflow does not "
8582 "occur when combining constants around "
8583 "a comparison");
8585 /* Put the constant on the side where it doesn't overflow and is
8586 of lower absolute value and of same sign than before. */
8587 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8588 ? MINUS_EXPR : PLUS_EXPR,
8589 const2, const1);
8590 if (!TREE_OVERFLOW (cst)
8591 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8592 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8594 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8595 return fold_build2_loc (loc, code, type,
8596 variable1,
8597 fold_build2_loc (loc, TREE_CODE (arg1),
8598 TREE_TYPE (arg1),
8599 variable2, cst));
8602 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8603 ? MINUS_EXPR : PLUS_EXPR,
8604 const1, const2);
8605 if (!TREE_OVERFLOW (cst)
8606 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8607 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8609 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8610 return fold_build2_loc (loc, code, type,
8611 fold_build2_loc (loc, TREE_CODE (arg0),
8612 TREE_TYPE (arg0),
8613 variable1, cst),
8614 variable2);
8618 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8619 if (tem)
8620 return tem;
8622 /* If we are comparing an expression that just has comparisons
8623 of two integer values, arithmetic expressions of those comparisons,
8624 and constants, we can simplify it. There are only three cases
8625 to check: the two values can either be equal, the first can be
8626 greater, or the second can be greater. Fold the expression for
8627 those three values. Since each value must be 0 or 1, we have
8628 eight possibilities, each of which corresponds to the constant 0
8629 or 1 or one of the six possible comparisons.
8631 This handles common cases like (a > b) == 0 but also handles
8632 expressions like ((x > y) - (y > x)) > 0, which supposedly
8633 occur in macroized code. */
8635 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8637 tree cval1 = 0, cval2 = 0;
8638 int save_p = 0;
8640 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8641 /* Don't handle degenerate cases here; they should already
8642 have been handled anyway. */
8643 && cval1 != 0 && cval2 != 0
8644 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8645 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8646 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8647 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8648 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8649 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8650 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8652 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8653 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8655 /* We can't just pass T to eval_subst in case cval1 or cval2
8656 was the same as ARG1. */
8658 tree high_result
8659 = fold_build2_loc (loc, code, type,
8660 eval_subst (loc, arg0, cval1, maxval,
8661 cval2, minval),
8662 arg1);
8663 tree equal_result
8664 = fold_build2_loc (loc, code, type,
8665 eval_subst (loc, arg0, cval1, maxval,
8666 cval2, maxval),
8667 arg1);
8668 tree low_result
8669 = fold_build2_loc (loc, code, type,
8670 eval_subst (loc, arg0, cval1, minval,
8671 cval2, maxval),
8672 arg1);
8674 /* All three of these results should be 0 or 1. Confirm they are.
8675 Then use those values to select the proper code to use. */
8677 if (TREE_CODE (high_result) == INTEGER_CST
8678 && TREE_CODE (equal_result) == INTEGER_CST
8679 && TREE_CODE (low_result) == INTEGER_CST)
8681 /* Make a 3-bit mask with the high-order bit being the
8682 value for `>', the next for '=', and the low for '<'. */
8683 switch ((integer_onep (high_result) * 4)
8684 + (integer_onep (equal_result) * 2)
8685 + integer_onep (low_result))
8687 case 0:
8688 /* Always false. */
8689 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8690 case 1:
8691 code = LT_EXPR;
8692 break;
8693 case 2:
8694 code = EQ_EXPR;
8695 break;
8696 case 3:
8697 code = LE_EXPR;
8698 break;
8699 case 4:
8700 code = GT_EXPR;
8701 break;
8702 case 5:
8703 code = NE_EXPR;
8704 break;
8705 case 6:
8706 code = GE_EXPR;
8707 break;
8708 case 7:
8709 /* Always true. */
8710 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8713 if (save_p)
8715 tem = save_expr (build2 (code, type, cval1, cval2));
8716 protected_set_expr_location (tem, loc);
8717 return tem;
8719 return fold_build2_loc (loc, code, type, cval1, cval2);
8724 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8725 into a single range test. */
8726 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8727 && TREE_CODE (arg1) == INTEGER_CST
8728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8729 && !integer_zerop (TREE_OPERAND (arg0, 1))
8730 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8731 && !TREE_OVERFLOW (arg1))
8733 tem = fold_div_compare (loc, code, type, arg0, arg1);
8734 if (tem != NULL_TREE)
8735 return tem;
8738 return NULL_TREE;
8742 /* Subroutine of fold_binary. Optimize complex multiplications of the
8743 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8744 argument EXPR represents the expression "z" of type TYPE. */
8746 static tree
8747 fold_mult_zconjz (location_t loc, tree type, tree expr)
8749 tree itype = TREE_TYPE (type);
8750 tree rpart, ipart, tem;
8752 if (TREE_CODE (expr) == COMPLEX_EXPR)
8754 rpart = TREE_OPERAND (expr, 0);
8755 ipart = TREE_OPERAND (expr, 1);
8757 else if (TREE_CODE (expr) == COMPLEX_CST)
8759 rpart = TREE_REALPART (expr);
8760 ipart = TREE_IMAGPART (expr);
8762 else
8764 expr = save_expr (expr);
8765 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8766 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8769 rpart = save_expr (rpart);
8770 ipart = save_expr (ipart);
8771 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8772 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8773 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8774 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8775 build_zero_cst (itype));
8779 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8780 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8782 static bool
8783 vec_cst_ctor_to_array (tree arg, tree *elts)
8785 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8787 if (TREE_CODE (arg) == VECTOR_CST)
8789 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8790 elts[i] = VECTOR_CST_ELT (arg, i);
8792 else if (TREE_CODE (arg) == CONSTRUCTOR)
8794 constructor_elt *elt;
8796 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8797 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8798 return false;
8799 else
8800 elts[i] = elt->value;
8802 else
8803 return false;
8804 for (; i < nelts; i++)
8805 elts[i]
8806 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8807 return true;
8810 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8811 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8812 NULL_TREE otherwise. */
8814 static tree
8815 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8817 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8818 tree *elts;
8819 bool need_ctor = false;
8821 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8822 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8823 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8824 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8825 return NULL_TREE;
8827 elts = XALLOCAVEC (tree, nelts * 3);
8828 if (!vec_cst_ctor_to_array (arg0, elts)
8829 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8830 return NULL_TREE;
8832 for (i = 0; i < nelts; i++)
8834 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8835 need_ctor = true;
8836 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8839 if (need_ctor)
8841 vec<constructor_elt, va_gc> *v;
8842 vec_alloc (v, nelts);
8843 for (i = 0; i < nelts; i++)
8844 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8845 return build_constructor (type, v);
8847 else
8848 return build_vector (type, &elts[2 * nelts]);
8851 /* Try to fold a pointer difference of type TYPE two address expressions of
8852 array references AREF0 and AREF1 using location LOC. Return a
8853 simplified expression for the difference or NULL_TREE. */
8855 static tree
8856 fold_addr_of_array_ref_difference (location_t loc, tree type,
8857 tree aref0, tree aref1)
8859 tree base0 = TREE_OPERAND (aref0, 0);
8860 tree base1 = TREE_OPERAND (aref1, 0);
8861 tree base_offset = build_int_cst (type, 0);
8863 /* If the bases are array references as well, recurse. If the bases
8864 are pointer indirections compute the difference of the pointers.
8865 If the bases are equal, we are set. */
8866 if ((TREE_CODE (base0) == ARRAY_REF
8867 && TREE_CODE (base1) == ARRAY_REF
8868 && (base_offset
8869 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8870 || (INDIRECT_REF_P (base0)
8871 && INDIRECT_REF_P (base1)
8872 && (base_offset
8873 = fold_binary_loc (loc, MINUS_EXPR, type,
8874 fold_convert (type, TREE_OPERAND (base0, 0)),
8875 fold_convert (type,
8876 TREE_OPERAND (base1, 0)))))
8877 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8879 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8880 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8881 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8882 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8883 return fold_build2_loc (loc, PLUS_EXPR, type,
8884 base_offset,
8885 fold_build2_loc (loc, MULT_EXPR, type,
8886 diff, esz));
8888 return NULL_TREE;
8891 /* If the real or vector real constant CST of type TYPE has an exact
8892 inverse, return it, else return NULL. */
8894 tree
8895 exact_inverse (tree type, tree cst)
8897 REAL_VALUE_TYPE r;
8898 tree unit_type, *elts;
8899 machine_mode mode;
8900 unsigned vec_nelts, i;
8902 switch (TREE_CODE (cst))
8904 case REAL_CST:
8905 r = TREE_REAL_CST (cst);
8907 if (exact_real_inverse (TYPE_MODE (type), &r))
8908 return build_real (type, r);
8910 return NULL_TREE;
8912 case VECTOR_CST:
8913 vec_nelts = VECTOR_CST_NELTS (cst);
8914 elts = XALLOCAVEC (tree, vec_nelts);
8915 unit_type = TREE_TYPE (type);
8916 mode = TYPE_MODE (unit_type);
8918 for (i = 0; i < vec_nelts; i++)
8920 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8921 if (!exact_real_inverse (mode, &r))
8922 return NULL_TREE;
8923 elts[i] = build_real (unit_type, r);
8926 return build_vector (type, elts);
8928 default:
8929 return NULL_TREE;
8933 /* Mask out the tz least significant bits of X of type TYPE where
8934 tz is the number of trailing zeroes in Y. */
8935 static wide_int
8936 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8938 int tz = wi::ctz (y);
8939 if (tz > 0)
8940 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8941 return x;
8944 /* Return true when T is an address and is known to be nonzero.
8945 For floating point we further ensure that T is not denormal.
8946 Similar logic is present in nonzero_address in rtlanal.h.
8948 If the return value is based on the assumption that signed overflow
8949 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8950 change *STRICT_OVERFLOW_P. */
8952 static bool
8953 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8955 tree type = TREE_TYPE (t);
8956 enum tree_code code;
8958 /* Doing something useful for floating point would need more work. */
8959 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8960 return false;
8962 code = TREE_CODE (t);
8963 switch (TREE_CODE_CLASS (code))
8965 case tcc_unary:
8966 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8967 strict_overflow_p);
8968 case tcc_binary:
8969 case tcc_comparison:
8970 return tree_binary_nonzero_warnv_p (code, type,
8971 TREE_OPERAND (t, 0),
8972 TREE_OPERAND (t, 1),
8973 strict_overflow_p);
8974 case tcc_constant:
8975 case tcc_declaration:
8976 case tcc_reference:
8977 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8979 default:
8980 break;
8983 switch (code)
8985 case TRUTH_NOT_EXPR:
8986 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8987 strict_overflow_p);
8989 case TRUTH_AND_EXPR:
8990 case TRUTH_OR_EXPR:
8991 case TRUTH_XOR_EXPR:
8992 return tree_binary_nonzero_warnv_p (code, type,
8993 TREE_OPERAND (t, 0),
8994 TREE_OPERAND (t, 1),
8995 strict_overflow_p);
8997 case COND_EXPR:
8998 case CONSTRUCTOR:
8999 case OBJ_TYPE_REF:
9000 case ASSERT_EXPR:
9001 case ADDR_EXPR:
9002 case WITH_SIZE_EXPR:
9003 case SSA_NAME:
9004 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9006 case COMPOUND_EXPR:
9007 case MODIFY_EXPR:
9008 case BIND_EXPR:
9009 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9010 strict_overflow_p);
9012 case SAVE_EXPR:
9013 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9014 strict_overflow_p);
9016 case CALL_EXPR:
9018 tree fndecl = get_callee_fndecl (t);
9019 if (!fndecl) return false;
9020 if (flag_delete_null_pointer_checks && !flag_check_new
9021 && DECL_IS_OPERATOR_NEW (fndecl)
9022 && !TREE_NOTHROW (fndecl))
9023 return true;
9024 if (flag_delete_null_pointer_checks
9025 && lookup_attribute ("returns_nonnull",
9026 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9027 return true;
9028 return alloca_call_p (t);
9031 default:
9032 break;
9034 return false;
9037 /* Return true when T is an address and is known to be nonzero.
9038 Handle warnings about undefined signed overflow. */
9040 bool
9041 tree_expr_nonzero_p (tree t)
9043 bool ret, strict_overflow_p;
9045 strict_overflow_p = false;
9046 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9047 if (strict_overflow_p)
9048 fold_overflow_warning (("assuming signed overflow does not occur when "
9049 "determining that expression is always "
9050 "non-zero"),
9051 WARN_STRICT_OVERFLOW_MISC);
9052 return ret;
9055 /* Return true if T is known not to be equal to an integer W. */
9057 bool
9058 expr_not_equal_to (tree t, const wide_int &w)
9060 wide_int min, max, nz;
9061 value_range_type rtype;
9062 switch (TREE_CODE (t))
9064 case INTEGER_CST:
9065 return wi::ne_p (t, w);
9067 case SSA_NAME:
9068 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9069 return false;
9070 rtype = get_range_info (t, &min, &max);
9071 if (rtype == VR_RANGE)
9073 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9074 return true;
9075 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9076 return true;
9078 else if (rtype == VR_ANTI_RANGE
9079 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9080 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9081 return true;
9082 /* If T has some known zero bits and W has any of those bits set,
9083 then T is known not to be equal to W. */
9084 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9085 TYPE_PRECISION (TREE_TYPE (t))), 0))
9086 return true;
9087 return false;
9089 default:
9090 return false;
9094 /* Fold a binary expression of code CODE and type TYPE with operands
9095 OP0 and OP1. LOC is the location of the resulting expression.
9096 Return the folded expression if folding is successful. Otherwise,
9097 return NULL_TREE. */
9099 tree
9100 fold_binary_loc (location_t loc,
9101 enum tree_code code, tree type, tree op0, tree op1)
9103 enum tree_code_class kind = TREE_CODE_CLASS (code);
9104 tree arg0, arg1, tem;
9105 tree t1 = NULL_TREE;
9106 bool strict_overflow_p;
9107 unsigned int prec;
9109 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9110 && TREE_CODE_LENGTH (code) == 2
9111 && op0 != NULL_TREE
9112 && op1 != NULL_TREE);
9114 arg0 = op0;
9115 arg1 = op1;
9117 /* Strip any conversions that don't change the mode. This is
9118 safe for every expression, except for a comparison expression
9119 because its signedness is derived from its operands. So, in
9120 the latter case, only strip conversions that don't change the
9121 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9122 preserved.
9124 Note that this is done as an internal manipulation within the
9125 constant folder, in order to find the simplest representation
9126 of the arguments so that their form can be studied. In any
9127 cases, the appropriate type conversions should be put back in
9128 the tree that will get out of the constant folder. */
9130 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9132 STRIP_SIGN_NOPS (arg0);
9133 STRIP_SIGN_NOPS (arg1);
9135 else
9137 STRIP_NOPS (arg0);
9138 STRIP_NOPS (arg1);
9141 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9142 constant but we can't do arithmetic on them. */
9143 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9145 tem = const_binop (code, type, arg0, arg1);
9146 if (tem != NULL_TREE)
9148 if (TREE_TYPE (tem) != type)
9149 tem = fold_convert_loc (loc, type, tem);
9150 return tem;
9154 /* If this is a commutative operation, and ARG0 is a constant, move it
9155 to ARG1 to reduce the number of tests below. */
9156 if (commutative_tree_code (code)
9157 && tree_swap_operands_p (arg0, arg1))
9158 return fold_build2_loc (loc, code, type, op1, op0);
9160 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9161 to ARG1 to reduce the number of tests below. */
9162 if (kind == tcc_comparison
9163 && tree_swap_operands_p (arg0, arg1))
9164 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9166 tem = generic_simplify (loc, code, type, op0, op1);
9167 if (tem)
9168 return tem;
9170 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9172 First check for cases where an arithmetic operation is applied to a
9173 compound, conditional, or comparison operation. Push the arithmetic
9174 operation inside the compound or conditional to see if any folding
9175 can then be done. Convert comparison to conditional for this purpose.
9176 The also optimizes non-constant cases that used to be done in
9177 expand_expr.
9179 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9180 one of the operands is a comparison and the other is a comparison, a
9181 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9182 code below would make the expression more complex. Change it to a
9183 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9184 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9186 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9187 || code == EQ_EXPR || code == NE_EXPR)
9188 && TREE_CODE (type) != VECTOR_TYPE
9189 && ((truth_value_p (TREE_CODE (arg0))
9190 && (truth_value_p (TREE_CODE (arg1))
9191 || (TREE_CODE (arg1) == BIT_AND_EXPR
9192 && integer_onep (TREE_OPERAND (arg1, 1)))))
9193 || (truth_value_p (TREE_CODE (arg1))
9194 && (truth_value_p (TREE_CODE (arg0))
9195 || (TREE_CODE (arg0) == BIT_AND_EXPR
9196 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9198 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9199 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9200 : TRUTH_XOR_EXPR,
9201 boolean_type_node,
9202 fold_convert_loc (loc, boolean_type_node, arg0),
9203 fold_convert_loc (loc, boolean_type_node, arg1));
9205 if (code == EQ_EXPR)
9206 tem = invert_truthvalue_loc (loc, tem);
9208 return fold_convert_loc (loc, type, tem);
9211 if (TREE_CODE_CLASS (code) == tcc_binary
9212 || TREE_CODE_CLASS (code) == tcc_comparison)
9214 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9216 tem = fold_build2_loc (loc, code, type,
9217 fold_convert_loc (loc, TREE_TYPE (op0),
9218 TREE_OPERAND (arg0, 1)), op1);
9219 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9220 tem);
9222 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9224 tem = fold_build2_loc (loc, code, type, op0,
9225 fold_convert_loc (loc, TREE_TYPE (op1),
9226 TREE_OPERAND (arg1, 1)));
9227 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9228 tem);
9231 if (TREE_CODE (arg0) == COND_EXPR
9232 || TREE_CODE (arg0) == VEC_COND_EXPR
9233 || COMPARISON_CLASS_P (arg0))
9235 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9236 arg0, arg1,
9237 /*cond_first_p=*/1);
9238 if (tem != NULL_TREE)
9239 return tem;
9242 if (TREE_CODE (arg1) == COND_EXPR
9243 || TREE_CODE (arg1) == VEC_COND_EXPR
9244 || COMPARISON_CLASS_P (arg1))
9246 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9247 arg1, arg0,
9248 /*cond_first_p=*/0);
9249 if (tem != NULL_TREE)
9250 return tem;
9254 switch (code)
9256 case MEM_REF:
9257 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9258 if (TREE_CODE (arg0) == ADDR_EXPR
9259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9261 tree iref = TREE_OPERAND (arg0, 0);
9262 return fold_build2 (MEM_REF, type,
9263 TREE_OPERAND (iref, 0),
9264 int_const_binop (PLUS_EXPR, arg1,
9265 TREE_OPERAND (iref, 1)));
9268 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9269 if (TREE_CODE (arg0) == ADDR_EXPR
9270 && handled_component_p (TREE_OPERAND (arg0, 0)))
9272 tree base;
9273 HOST_WIDE_INT coffset;
9274 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9275 &coffset);
9276 if (!base)
9277 return NULL_TREE;
9278 return fold_build2 (MEM_REF, type,
9279 build_fold_addr_expr (base),
9280 int_const_binop (PLUS_EXPR, arg1,
9281 size_int (coffset)));
9284 return NULL_TREE;
9286 case POINTER_PLUS_EXPR:
9287 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9288 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9289 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9290 return fold_convert_loc (loc, type,
9291 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9292 fold_convert_loc (loc, sizetype,
9293 arg1),
9294 fold_convert_loc (loc, sizetype,
9295 arg0)));
9297 return NULL_TREE;
9299 case PLUS_EXPR:
9300 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9302 /* X + (X / CST) * -CST is X % CST. */
9303 if (TREE_CODE (arg1) == MULT_EXPR
9304 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9305 && operand_equal_p (arg0,
9306 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9308 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9309 tree cst1 = TREE_OPERAND (arg1, 1);
9310 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9311 cst1, cst0);
9312 if (sum && integer_zerop (sum))
9313 return fold_convert_loc (loc, type,
9314 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9315 TREE_TYPE (arg0), arg0,
9316 cst0));
9320 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9321 one. Make sure the type is not saturating and has the signedness of
9322 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9323 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9324 if ((TREE_CODE (arg0) == MULT_EXPR
9325 || TREE_CODE (arg1) == MULT_EXPR)
9326 && !TYPE_SATURATING (type)
9327 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9328 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9329 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9331 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9332 if (tem)
9333 return tem;
9336 if (! FLOAT_TYPE_P (type))
9338 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9339 (plus (plus (mult) (mult)) (foo)) so that we can
9340 take advantage of the factoring cases below. */
9341 if (ANY_INTEGRAL_TYPE_P (type)
9342 && TYPE_OVERFLOW_WRAPS (type)
9343 && (((TREE_CODE (arg0) == PLUS_EXPR
9344 || TREE_CODE (arg0) == MINUS_EXPR)
9345 && TREE_CODE (arg1) == MULT_EXPR)
9346 || ((TREE_CODE (arg1) == PLUS_EXPR
9347 || TREE_CODE (arg1) == MINUS_EXPR)
9348 && TREE_CODE (arg0) == MULT_EXPR)))
9350 tree parg0, parg1, parg, marg;
9351 enum tree_code pcode;
9353 if (TREE_CODE (arg1) == MULT_EXPR)
9354 parg = arg0, marg = arg1;
9355 else
9356 parg = arg1, marg = arg0;
9357 pcode = TREE_CODE (parg);
9358 parg0 = TREE_OPERAND (parg, 0);
9359 parg1 = TREE_OPERAND (parg, 1);
9360 STRIP_NOPS (parg0);
9361 STRIP_NOPS (parg1);
9363 if (TREE_CODE (parg0) == MULT_EXPR
9364 && TREE_CODE (parg1) != MULT_EXPR)
9365 return fold_build2_loc (loc, pcode, type,
9366 fold_build2_loc (loc, PLUS_EXPR, type,
9367 fold_convert_loc (loc, type,
9368 parg0),
9369 fold_convert_loc (loc, type,
9370 marg)),
9371 fold_convert_loc (loc, type, parg1));
9372 if (TREE_CODE (parg0) != MULT_EXPR
9373 && TREE_CODE (parg1) == MULT_EXPR)
9374 return
9375 fold_build2_loc (loc, PLUS_EXPR, type,
9376 fold_convert_loc (loc, type, parg0),
9377 fold_build2_loc (loc, pcode, type,
9378 fold_convert_loc (loc, type, marg),
9379 fold_convert_loc (loc, type,
9380 parg1)));
9383 else
9385 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9386 to __complex__ ( x, y ). This is not the same for SNaNs or
9387 if signed zeros are involved. */
9388 if (!HONOR_SNANS (element_mode (arg0))
9389 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9390 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9392 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9393 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9394 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9395 bool arg0rz = false, arg0iz = false;
9396 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9397 || (arg0i && (arg0iz = real_zerop (arg0i))))
9399 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9400 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9401 if (arg0rz && arg1i && real_zerop (arg1i))
9403 tree rp = arg1r ? arg1r
9404 : build1 (REALPART_EXPR, rtype, arg1);
9405 tree ip = arg0i ? arg0i
9406 : build1 (IMAGPART_EXPR, rtype, arg0);
9407 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9409 else if (arg0iz && arg1r && real_zerop (arg1r))
9411 tree rp = arg0r ? arg0r
9412 : build1 (REALPART_EXPR, rtype, arg0);
9413 tree ip = arg1i ? arg1i
9414 : build1 (IMAGPART_EXPR, rtype, arg1);
9415 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9420 if (flag_unsafe_math_optimizations
9421 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9422 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9423 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9424 return tem;
9426 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9427 We associate floats only if the user has specified
9428 -fassociative-math. */
9429 if (flag_associative_math
9430 && TREE_CODE (arg1) == PLUS_EXPR
9431 && TREE_CODE (arg0) != MULT_EXPR)
9433 tree tree10 = TREE_OPERAND (arg1, 0);
9434 tree tree11 = TREE_OPERAND (arg1, 1);
9435 if (TREE_CODE (tree11) == MULT_EXPR
9436 && TREE_CODE (tree10) == MULT_EXPR)
9438 tree tree0;
9439 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9440 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9443 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9444 We associate floats only if the user has specified
9445 -fassociative-math. */
9446 if (flag_associative_math
9447 && TREE_CODE (arg0) == PLUS_EXPR
9448 && TREE_CODE (arg1) != MULT_EXPR)
9450 tree tree00 = TREE_OPERAND (arg0, 0);
9451 tree tree01 = TREE_OPERAND (arg0, 1);
9452 if (TREE_CODE (tree01) == MULT_EXPR
9453 && TREE_CODE (tree00) == MULT_EXPR)
9455 tree tree0;
9456 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9457 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9462 bit_rotate:
9463 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9464 is a rotate of A by C1 bits. */
9465 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9466 is a rotate of A by B bits. */
9468 enum tree_code code0, code1;
9469 tree rtype;
9470 code0 = TREE_CODE (arg0);
9471 code1 = TREE_CODE (arg1);
9472 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9473 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9474 && operand_equal_p (TREE_OPERAND (arg0, 0),
9475 TREE_OPERAND (arg1, 0), 0)
9476 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9477 TYPE_UNSIGNED (rtype))
9478 /* Only create rotates in complete modes. Other cases are not
9479 expanded properly. */
9480 && (element_precision (rtype)
9481 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9483 tree tree01, tree11;
9484 enum tree_code code01, code11;
9486 tree01 = TREE_OPERAND (arg0, 1);
9487 tree11 = TREE_OPERAND (arg1, 1);
9488 STRIP_NOPS (tree01);
9489 STRIP_NOPS (tree11);
9490 code01 = TREE_CODE (tree01);
9491 code11 = TREE_CODE (tree11);
9492 if (code01 == INTEGER_CST
9493 && code11 == INTEGER_CST
9494 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9495 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9497 tem = build2_loc (loc, LROTATE_EXPR,
9498 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9499 TREE_OPERAND (arg0, 0),
9500 code0 == LSHIFT_EXPR
9501 ? TREE_OPERAND (arg0, 1)
9502 : TREE_OPERAND (arg1, 1));
9503 return fold_convert_loc (loc, type, tem);
9505 else if (code11 == MINUS_EXPR)
9507 tree tree110, tree111;
9508 tree110 = TREE_OPERAND (tree11, 0);
9509 tree111 = TREE_OPERAND (tree11, 1);
9510 STRIP_NOPS (tree110);
9511 STRIP_NOPS (tree111);
9512 if (TREE_CODE (tree110) == INTEGER_CST
9513 && 0 == compare_tree_int (tree110,
9514 element_precision
9515 (TREE_TYPE (TREE_OPERAND
9516 (arg0, 0))))
9517 && operand_equal_p (tree01, tree111, 0))
9518 return
9519 fold_convert_loc (loc, type,
9520 build2 ((code0 == LSHIFT_EXPR
9521 ? LROTATE_EXPR
9522 : RROTATE_EXPR),
9523 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9524 TREE_OPERAND (arg0, 0),
9525 TREE_OPERAND (arg0, 1)));
9527 else if (code01 == MINUS_EXPR)
9529 tree tree010, tree011;
9530 tree010 = TREE_OPERAND (tree01, 0);
9531 tree011 = TREE_OPERAND (tree01, 1);
9532 STRIP_NOPS (tree010);
9533 STRIP_NOPS (tree011);
9534 if (TREE_CODE (tree010) == INTEGER_CST
9535 && 0 == compare_tree_int (tree010,
9536 element_precision
9537 (TREE_TYPE (TREE_OPERAND
9538 (arg0, 0))))
9539 && operand_equal_p (tree11, tree011, 0))
9540 return fold_convert_loc
9541 (loc, type,
9542 build2 ((code0 != LSHIFT_EXPR
9543 ? LROTATE_EXPR
9544 : RROTATE_EXPR),
9545 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9546 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9551 associate:
9552 /* In most languages, can't associate operations on floats through
9553 parentheses. Rather than remember where the parentheses were, we
9554 don't associate floats at all, unless the user has specified
9555 -fassociative-math.
9556 And, we need to make sure type is not saturating. */
9558 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9559 && !TYPE_SATURATING (type))
9561 tree var0, con0, lit0, minus_lit0;
9562 tree var1, con1, lit1, minus_lit1;
9563 tree atype = type;
9564 bool ok = true;
9566 /* Split both trees into variables, constants, and literals. Then
9567 associate each group together, the constants with literals,
9568 then the result with variables. This increases the chances of
9569 literals being recombined later and of generating relocatable
9570 expressions for the sum of a constant and literal. */
9571 var0 = split_tree (loc, arg0, type, code,
9572 &con0, &lit0, &minus_lit0, 0);
9573 var1 = split_tree (loc, arg1, type, code,
9574 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9576 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9577 if (code == MINUS_EXPR)
9578 code = PLUS_EXPR;
9580 /* With undefined overflow prefer doing association in a type
9581 which wraps on overflow, if that is one of the operand types. */
9582 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9583 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9585 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9586 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9587 atype = TREE_TYPE (arg0);
9588 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9589 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9590 atype = TREE_TYPE (arg1);
9591 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9594 /* With undefined overflow we can only associate constants with one
9595 variable, and constants whose association doesn't overflow. */
9596 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9597 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9599 if (var0 && var1)
9601 tree tmp0 = var0;
9602 tree tmp1 = var1;
9603 bool one_neg = false;
9605 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9607 tmp0 = TREE_OPERAND (tmp0, 0);
9608 one_neg = !one_neg;
9610 if (CONVERT_EXPR_P (tmp0)
9611 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9612 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9613 <= TYPE_PRECISION (atype)))
9614 tmp0 = TREE_OPERAND (tmp0, 0);
9615 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9617 tmp1 = TREE_OPERAND (tmp1, 0);
9618 one_neg = !one_neg;
9620 if (CONVERT_EXPR_P (tmp1)
9621 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9623 <= TYPE_PRECISION (atype)))
9624 tmp1 = TREE_OPERAND (tmp1, 0);
9625 /* The only case we can still associate with two variables
9626 is if they cancel out. */
9627 if (!one_neg
9628 || !operand_equal_p (tmp0, tmp1, 0))
9629 ok = false;
9633 /* Only do something if we found more than two objects. Otherwise,
9634 nothing has changed and we risk infinite recursion. */
9635 if (ok
9636 && (2 < ((var0 != 0) + (var1 != 0)
9637 + (con0 != 0) + (con1 != 0)
9638 + (lit0 != 0) + (lit1 != 0)
9639 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9641 bool any_overflows = false;
9642 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9643 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9644 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9645 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9646 var0 = associate_trees (loc, var0, var1, code, atype);
9647 con0 = associate_trees (loc, con0, con1, code, atype);
9648 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9649 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9650 code, atype);
9652 /* Preserve the MINUS_EXPR if the negative part of the literal is
9653 greater than the positive part. Otherwise, the multiplicative
9654 folding code (i.e extract_muldiv) may be fooled in case
9655 unsigned constants are subtracted, like in the following
9656 example: ((X*2 + 4) - 8U)/2. */
9657 if (minus_lit0 && lit0)
9659 if (TREE_CODE (lit0) == INTEGER_CST
9660 && TREE_CODE (minus_lit0) == INTEGER_CST
9661 && tree_int_cst_lt (lit0, minus_lit0))
9663 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9664 MINUS_EXPR, atype);
9665 lit0 = 0;
9667 else
9669 lit0 = associate_trees (loc, lit0, minus_lit0,
9670 MINUS_EXPR, atype);
9671 minus_lit0 = 0;
9675 /* Don't introduce overflows through reassociation. */
9676 if (!any_overflows
9677 && ((lit0 && TREE_OVERFLOW_P (lit0))
9678 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9679 return NULL_TREE;
9681 if (minus_lit0)
9683 if (con0 == 0)
9684 return
9685 fold_convert_loc (loc, type,
9686 associate_trees (loc, var0, minus_lit0,
9687 MINUS_EXPR, atype));
9688 else
9690 con0 = associate_trees (loc, con0, minus_lit0,
9691 MINUS_EXPR, atype);
9692 return
9693 fold_convert_loc (loc, type,
9694 associate_trees (loc, var0, con0,
9695 PLUS_EXPR, atype));
9699 con0 = associate_trees (loc, con0, lit0, code, atype);
9700 return
9701 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9702 code, atype));
9706 return NULL_TREE;
9708 case MINUS_EXPR:
9709 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9710 if (TREE_CODE (arg0) == NEGATE_EXPR
9711 && negate_expr_p (op1))
9712 return fold_build2_loc (loc, MINUS_EXPR, type,
9713 negate_expr (op1),
9714 fold_convert_loc (loc, type,
9715 TREE_OPERAND (arg0, 0)));
9717 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9718 __complex__ ( x, -y ). This is not the same for SNaNs or if
9719 signed zeros are involved. */
9720 if (!HONOR_SNANS (element_mode (arg0))
9721 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9722 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9724 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9725 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9726 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9727 bool arg0rz = false, arg0iz = false;
9728 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9729 || (arg0i && (arg0iz = real_zerop (arg0i))))
9731 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9732 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9733 if (arg0rz && arg1i && real_zerop (arg1i))
9735 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9736 arg1r ? arg1r
9737 : build1 (REALPART_EXPR, rtype, arg1));
9738 tree ip = arg0i ? arg0i
9739 : build1 (IMAGPART_EXPR, rtype, arg0);
9740 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9742 else if (arg0iz && arg1r && real_zerop (arg1r))
9744 tree rp = arg0r ? arg0r
9745 : build1 (REALPART_EXPR, rtype, arg0);
9746 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9747 arg1i ? arg1i
9748 : build1 (IMAGPART_EXPR, rtype, arg1));
9749 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9754 /* A - B -> A + (-B) if B is easily negatable. */
9755 if (negate_expr_p (op1)
9756 && ! TYPE_OVERFLOW_SANITIZED (type)
9757 && ((FLOAT_TYPE_P (type)
9758 /* Avoid this transformation if B is a positive REAL_CST. */
9759 && (TREE_CODE (op1) != REAL_CST
9760 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9761 || INTEGRAL_TYPE_P (type)))
9762 return fold_build2_loc (loc, PLUS_EXPR, type,
9763 fold_convert_loc (loc, type, arg0),
9764 negate_expr (op1));
9766 /* Fold &a[i] - &a[j] to i-j. */
9767 if (TREE_CODE (arg0) == ADDR_EXPR
9768 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9769 && TREE_CODE (arg1) == ADDR_EXPR
9770 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9772 tree tem = fold_addr_of_array_ref_difference (loc, type,
9773 TREE_OPERAND (arg0, 0),
9774 TREE_OPERAND (arg1, 0));
9775 if (tem)
9776 return tem;
9779 if (FLOAT_TYPE_P (type)
9780 && flag_unsafe_math_optimizations
9781 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9782 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9783 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9784 return tem;
9786 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9787 one. Make sure the type is not saturating and has the signedness of
9788 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9789 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9790 if ((TREE_CODE (arg0) == MULT_EXPR
9791 || TREE_CODE (arg1) == MULT_EXPR)
9792 && !TYPE_SATURATING (type)
9793 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9794 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9795 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9797 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9798 if (tem)
9799 return tem;
9802 goto associate;
9804 case MULT_EXPR:
9805 if (! FLOAT_TYPE_P (type))
9807 /* Transform x * -C into -x * C if x is easily negatable. */
9808 if (TREE_CODE (op1) == INTEGER_CST
9809 && tree_int_cst_sgn (op1) == -1
9810 && negate_expr_p (op0)
9811 && (tem = negate_expr (op1)) != op1
9812 && ! TREE_OVERFLOW (tem))
9813 return fold_build2_loc (loc, MULT_EXPR, type,
9814 fold_convert_loc (loc, type,
9815 negate_expr (op0)), tem);
9817 strict_overflow_p = false;
9818 if (TREE_CODE (arg1) == INTEGER_CST
9819 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9820 &strict_overflow_p)))
9822 if (strict_overflow_p)
9823 fold_overflow_warning (("assuming signed overflow does not "
9824 "occur when simplifying "
9825 "multiplication"),
9826 WARN_STRICT_OVERFLOW_MISC);
9827 return fold_convert_loc (loc, type, tem);
9830 /* Optimize z * conj(z) for integer complex numbers. */
9831 if (TREE_CODE (arg0) == CONJ_EXPR
9832 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9833 return fold_mult_zconjz (loc, type, arg1);
9834 if (TREE_CODE (arg1) == CONJ_EXPR
9835 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9836 return fold_mult_zconjz (loc, type, arg0);
9838 else
9840 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9841 This is not the same for NaNs or if signed zeros are
9842 involved. */
9843 if (!HONOR_NANS (arg0)
9844 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9845 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9846 && TREE_CODE (arg1) == COMPLEX_CST
9847 && real_zerop (TREE_REALPART (arg1)))
9849 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9850 if (real_onep (TREE_IMAGPART (arg1)))
9851 return
9852 fold_build2_loc (loc, COMPLEX_EXPR, type,
9853 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9854 rtype, arg0)),
9855 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9856 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9857 return
9858 fold_build2_loc (loc, COMPLEX_EXPR, type,
9859 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9860 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9861 rtype, arg0)));
9864 /* Optimize z * conj(z) for floating point complex numbers.
9865 Guarded by flag_unsafe_math_optimizations as non-finite
9866 imaginary components don't produce scalar results. */
9867 if (flag_unsafe_math_optimizations
9868 && TREE_CODE (arg0) == CONJ_EXPR
9869 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9870 return fold_mult_zconjz (loc, type, arg1);
9871 if (flag_unsafe_math_optimizations
9872 && TREE_CODE (arg1) == CONJ_EXPR
9873 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9874 return fold_mult_zconjz (loc, type, arg0);
9876 goto associate;
9878 case BIT_IOR_EXPR:
9879 /* Canonicalize (X & C1) | C2. */
9880 if (TREE_CODE (arg0) == BIT_AND_EXPR
9881 && TREE_CODE (arg1) == INTEGER_CST
9882 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9884 int width = TYPE_PRECISION (type), w;
9885 wide_int c1 = TREE_OPERAND (arg0, 1);
9886 wide_int c2 = arg1;
9888 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9889 if ((c1 & c2) == c1)
9890 return omit_one_operand_loc (loc, type, arg1,
9891 TREE_OPERAND (arg0, 0));
9893 wide_int msk = wi::mask (width, false,
9894 TYPE_PRECISION (TREE_TYPE (arg1)));
9896 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9897 if (msk.and_not (c1 | c2) == 0)
9898 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9899 TREE_OPERAND (arg0, 0), arg1);
9901 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9902 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9903 mode which allows further optimizations. */
9904 c1 &= msk;
9905 c2 &= msk;
9906 wide_int c3 = c1.and_not (c2);
9907 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9909 wide_int mask = wi::mask (w, false,
9910 TYPE_PRECISION (type));
9911 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9913 c3 = mask;
9914 break;
9918 if (c3 != c1)
9919 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9920 fold_build2_loc (loc, BIT_AND_EXPR, type,
9921 TREE_OPERAND (arg0, 0),
9922 wide_int_to_tree (type,
9923 c3)),
9924 arg1);
9927 /* See if this can be simplified into a rotate first. If that
9928 is unsuccessful continue in the association code. */
9929 goto bit_rotate;
9931 case BIT_XOR_EXPR:
9932 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9933 if (TREE_CODE (arg0) == BIT_AND_EXPR
9934 && INTEGRAL_TYPE_P (type)
9935 && integer_onep (TREE_OPERAND (arg0, 1))
9936 && integer_onep (arg1))
9937 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9938 build_zero_cst (TREE_TYPE (arg0)));
9940 /* See if this can be simplified into a rotate first. If that
9941 is unsuccessful continue in the association code. */
9942 goto bit_rotate;
9944 case BIT_AND_EXPR:
9945 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9946 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9947 && INTEGRAL_TYPE_P (type)
9948 && integer_onep (TREE_OPERAND (arg0, 1))
9949 && integer_onep (arg1))
9951 tree tem2;
9952 tem = TREE_OPERAND (arg0, 0);
9953 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9954 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9955 tem, tem2);
9956 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9957 build_zero_cst (TREE_TYPE (tem)));
9959 /* Fold ~X & 1 as (X & 1) == 0. */
9960 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9961 && INTEGRAL_TYPE_P (type)
9962 && integer_onep (arg1))
9964 tree tem2;
9965 tem = TREE_OPERAND (arg0, 0);
9966 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9967 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9968 tem, tem2);
9969 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9970 build_zero_cst (TREE_TYPE (tem)));
9972 /* Fold !X & 1 as X == 0. */
9973 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9974 && integer_onep (arg1))
9976 tem = TREE_OPERAND (arg0, 0);
9977 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9978 build_zero_cst (TREE_TYPE (tem)));
9981 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9982 multiple of 1 << CST. */
9983 if (TREE_CODE (arg1) == INTEGER_CST)
9985 wide_int cst1 = arg1;
9986 wide_int ncst1 = -cst1;
9987 if ((cst1 & ncst1) == ncst1
9988 && multiple_of_p (type, arg0,
9989 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
9990 return fold_convert_loc (loc, type, arg0);
9993 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
9994 bits from CST2. */
9995 if (TREE_CODE (arg1) == INTEGER_CST
9996 && TREE_CODE (arg0) == MULT_EXPR
9997 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9999 wide_int warg1 = arg1;
10000 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10002 if (masked == 0)
10003 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10004 arg0, arg1);
10005 else if (masked != warg1)
10007 /* Avoid the transform if arg1 is a mask of some
10008 mode which allows further optimizations. */
10009 int pop = wi::popcount (warg1);
10010 if (!(pop >= BITS_PER_UNIT
10011 && pow2p_hwi (pop)
10012 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10013 return fold_build2_loc (loc, code, type, op0,
10014 wide_int_to_tree (type, masked));
10018 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10019 ((A & N) + B) & M -> (A + B) & M
10020 Similarly if (N & M) == 0,
10021 ((A | N) + B) & M -> (A + B) & M
10022 and for - instead of + (or unary - instead of +)
10023 and/or ^ instead of |.
10024 If B is constant and (B & M) == 0, fold into A & M. */
10025 if (TREE_CODE (arg1) == INTEGER_CST)
10027 wide_int cst1 = arg1;
10028 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10029 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10030 && (TREE_CODE (arg0) == PLUS_EXPR
10031 || TREE_CODE (arg0) == MINUS_EXPR
10032 || TREE_CODE (arg0) == NEGATE_EXPR)
10033 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10034 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10036 tree pmop[2];
10037 int which = 0;
10038 wide_int cst0;
10040 /* Now we know that arg0 is (C + D) or (C - D) or
10041 -C and arg1 (M) is == (1LL << cst) - 1.
10042 Store C into PMOP[0] and D into PMOP[1]. */
10043 pmop[0] = TREE_OPERAND (arg0, 0);
10044 pmop[1] = NULL;
10045 if (TREE_CODE (arg0) != NEGATE_EXPR)
10047 pmop[1] = TREE_OPERAND (arg0, 1);
10048 which = 1;
10051 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10052 which = -1;
10054 for (; which >= 0; which--)
10055 switch (TREE_CODE (pmop[which]))
10057 case BIT_AND_EXPR:
10058 case BIT_IOR_EXPR:
10059 case BIT_XOR_EXPR:
10060 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10061 != INTEGER_CST)
10062 break;
10063 cst0 = TREE_OPERAND (pmop[which], 1);
10064 cst0 &= cst1;
10065 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10067 if (cst0 != cst1)
10068 break;
10070 else if (cst0 != 0)
10071 break;
10072 /* If C or D is of the form (A & N) where
10073 (N & M) == M, or of the form (A | N) or
10074 (A ^ N) where (N & M) == 0, replace it with A. */
10075 pmop[which] = TREE_OPERAND (pmop[which], 0);
10076 break;
10077 case INTEGER_CST:
10078 /* If C or D is a N where (N & M) == 0, it can be
10079 omitted (assumed 0). */
10080 if ((TREE_CODE (arg0) == PLUS_EXPR
10081 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10082 && (cst1 & pmop[which]) == 0)
10083 pmop[which] = NULL;
10084 break;
10085 default:
10086 break;
10089 /* Only build anything new if we optimized one or both arguments
10090 above. */
10091 if (pmop[0] != TREE_OPERAND (arg0, 0)
10092 || (TREE_CODE (arg0) != NEGATE_EXPR
10093 && pmop[1] != TREE_OPERAND (arg0, 1)))
10095 tree utype = TREE_TYPE (arg0);
10096 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10098 /* Perform the operations in a type that has defined
10099 overflow behavior. */
10100 utype = unsigned_type_for (TREE_TYPE (arg0));
10101 if (pmop[0] != NULL)
10102 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10103 if (pmop[1] != NULL)
10104 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10107 if (TREE_CODE (arg0) == NEGATE_EXPR)
10108 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10109 else if (TREE_CODE (arg0) == PLUS_EXPR)
10111 if (pmop[0] != NULL && pmop[1] != NULL)
10112 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10113 pmop[0], pmop[1]);
10114 else if (pmop[0] != NULL)
10115 tem = pmop[0];
10116 else if (pmop[1] != NULL)
10117 tem = pmop[1];
10118 else
10119 return build_int_cst (type, 0);
10121 else if (pmop[0] == NULL)
10122 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10123 else
10124 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10125 pmop[0], pmop[1]);
10126 /* TEM is now the new binary +, - or unary - replacement. */
10127 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10128 fold_convert_loc (loc, utype, arg1));
10129 return fold_convert_loc (loc, type, tem);
10134 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10135 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10136 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10138 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10140 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10141 if (mask == -1)
10142 return
10143 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10146 goto associate;
10148 case RDIV_EXPR:
10149 /* Don't touch a floating-point divide by zero unless the mode
10150 of the constant can represent infinity. */
10151 if (TREE_CODE (arg1) == REAL_CST
10152 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10153 && real_zerop (arg1))
10154 return NULL_TREE;
10156 /* (-A) / (-B) -> A / B */
10157 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10158 return fold_build2_loc (loc, RDIV_EXPR, type,
10159 TREE_OPERAND (arg0, 0),
10160 negate_expr (arg1));
10161 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10162 return fold_build2_loc (loc, RDIV_EXPR, type,
10163 negate_expr (arg0),
10164 TREE_OPERAND (arg1, 0));
10165 return NULL_TREE;
10167 case TRUNC_DIV_EXPR:
10168 /* Fall through */
10170 case FLOOR_DIV_EXPR:
10171 /* Simplify A / (B << N) where A and B are positive and B is
10172 a power of 2, to A >> (N + log2(B)). */
10173 strict_overflow_p = false;
10174 if (TREE_CODE (arg1) == LSHIFT_EXPR
10175 && (TYPE_UNSIGNED (type)
10176 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10178 tree sval = TREE_OPERAND (arg1, 0);
10179 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10181 tree sh_cnt = TREE_OPERAND (arg1, 1);
10182 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10183 wi::exact_log2 (sval));
10185 if (strict_overflow_p)
10186 fold_overflow_warning (("assuming signed overflow does not "
10187 "occur when simplifying A / (B << N)"),
10188 WARN_STRICT_OVERFLOW_MISC);
10190 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10191 sh_cnt, pow2);
10192 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10193 fold_convert_loc (loc, type, arg0), sh_cnt);
10197 /* Fall through */
10199 case ROUND_DIV_EXPR:
10200 case CEIL_DIV_EXPR:
10201 case EXACT_DIV_EXPR:
10202 if (integer_zerop (arg1))
10203 return NULL_TREE;
10205 /* Convert -A / -B to A / B when the type is signed and overflow is
10206 undefined. */
10207 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10208 && TREE_CODE (arg0) == NEGATE_EXPR
10209 && negate_expr_p (op1))
10211 if (INTEGRAL_TYPE_P (type))
10212 fold_overflow_warning (("assuming signed overflow does not occur "
10213 "when distributing negation across "
10214 "division"),
10215 WARN_STRICT_OVERFLOW_MISC);
10216 return fold_build2_loc (loc, code, type,
10217 fold_convert_loc (loc, type,
10218 TREE_OPERAND (arg0, 0)),
10219 negate_expr (op1));
10221 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10222 && TREE_CODE (arg1) == NEGATE_EXPR
10223 && negate_expr_p (op0))
10225 if (INTEGRAL_TYPE_P (type))
10226 fold_overflow_warning (("assuming signed overflow does not occur "
10227 "when distributing negation across "
10228 "division"),
10229 WARN_STRICT_OVERFLOW_MISC);
10230 return fold_build2_loc (loc, code, type,
10231 negate_expr (op0),
10232 fold_convert_loc (loc, type,
10233 TREE_OPERAND (arg1, 0)));
10236 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10237 operation, EXACT_DIV_EXPR.
10239 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10240 At one time others generated faster code, it's not clear if they do
10241 after the last round to changes to the DIV code in expmed.c. */
10242 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10243 && multiple_of_p (type, arg0, arg1))
10244 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10245 fold_convert (type, arg0),
10246 fold_convert (type, arg1));
10248 strict_overflow_p = false;
10249 if (TREE_CODE (arg1) == INTEGER_CST
10250 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10251 &strict_overflow_p)))
10253 if (strict_overflow_p)
10254 fold_overflow_warning (("assuming signed overflow does not occur "
10255 "when simplifying division"),
10256 WARN_STRICT_OVERFLOW_MISC);
10257 return fold_convert_loc (loc, type, tem);
10260 return NULL_TREE;
10262 case CEIL_MOD_EXPR:
10263 case FLOOR_MOD_EXPR:
10264 case ROUND_MOD_EXPR:
10265 case TRUNC_MOD_EXPR:
10266 strict_overflow_p = false;
10267 if (TREE_CODE (arg1) == INTEGER_CST
10268 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10269 &strict_overflow_p)))
10271 if (strict_overflow_p)
10272 fold_overflow_warning (("assuming signed overflow does not occur "
10273 "when simplifying modulus"),
10274 WARN_STRICT_OVERFLOW_MISC);
10275 return fold_convert_loc (loc, type, tem);
10278 return NULL_TREE;
10280 case LROTATE_EXPR:
10281 case RROTATE_EXPR:
10282 case RSHIFT_EXPR:
10283 case LSHIFT_EXPR:
10284 /* Since negative shift count is not well-defined,
10285 don't try to compute it in the compiler. */
10286 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10287 return NULL_TREE;
10289 prec = element_precision (type);
10291 /* If we have a rotate of a bit operation with the rotate count and
10292 the second operand of the bit operation both constant,
10293 permute the two operations. */
10294 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10295 && (TREE_CODE (arg0) == BIT_AND_EXPR
10296 || TREE_CODE (arg0) == BIT_IOR_EXPR
10297 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10298 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10300 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10301 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10302 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10303 fold_build2_loc (loc, code, type,
10304 arg00, arg1),
10305 fold_build2_loc (loc, code, type,
10306 arg01, arg1));
10309 /* Two consecutive rotates adding up to the some integer
10310 multiple of the precision of the type can be ignored. */
10311 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10312 && TREE_CODE (arg0) == RROTATE_EXPR
10313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10314 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10315 prec) == 0)
10316 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10318 return NULL_TREE;
10320 case MIN_EXPR:
10321 case MAX_EXPR:
10322 goto associate;
10324 case TRUTH_ANDIF_EXPR:
10325 /* Note that the operands of this must be ints
10326 and their values must be 0 or 1.
10327 ("true" is a fixed value perhaps depending on the language.) */
10328 /* If first arg is constant zero, return it. */
10329 if (integer_zerop (arg0))
10330 return fold_convert_loc (loc, type, arg0);
10331 /* FALLTHRU */
10332 case TRUTH_AND_EXPR:
10333 /* If either arg is constant true, drop it. */
10334 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10335 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10336 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10337 /* Preserve sequence points. */
10338 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10339 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10340 /* If second arg is constant zero, result is zero, but first arg
10341 must be evaluated. */
10342 if (integer_zerop (arg1))
10343 return omit_one_operand_loc (loc, type, arg1, arg0);
10344 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10345 case will be handled here. */
10346 if (integer_zerop (arg0))
10347 return omit_one_operand_loc (loc, type, arg0, arg1);
10349 /* !X && X is always false. */
10350 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10351 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10352 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10353 /* X && !X is always false. */
10354 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10355 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10356 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10358 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10359 means A >= Y && A != MAX, but in this case we know that
10360 A < X <= MAX. */
10362 if (!TREE_SIDE_EFFECTS (arg0)
10363 && !TREE_SIDE_EFFECTS (arg1))
10365 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10366 if (tem && !operand_equal_p (tem, arg0, 0))
10367 return fold_build2_loc (loc, code, type, tem, arg1);
10369 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10370 if (tem && !operand_equal_p (tem, arg1, 0))
10371 return fold_build2_loc (loc, code, type, arg0, tem);
10374 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10375 != NULL_TREE)
10376 return tem;
10378 return NULL_TREE;
10380 case TRUTH_ORIF_EXPR:
10381 /* Note that the operands of this must be ints
10382 and their values must be 0 or true.
10383 ("true" is a fixed value perhaps depending on the language.) */
10384 /* If first arg is constant true, return it. */
10385 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10386 return fold_convert_loc (loc, type, arg0);
10387 /* FALLTHRU */
10388 case TRUTH_OR_EXPR:
10389 /* If either arg is constant zero, drop it. */
10390 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10391 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10392 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10393 /* Preserve sequence points. */
10394 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10395 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10396 /* If second arg is constant true, result is true, but we must
10397 evaluate first arg. */
10398 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10399 return omit_one_operand_loc (loc, type, arg1, arg0);
10400 /* Likewise for first arg, but note this only occurs here for
10401 TRUTH_OR_EXPR. */
10402 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10403 return omit_one_operand_loc (loc, type, arg0, arg1);
10405 /* !X || X is always true. */
10406 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10407 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10408 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10409 /* X || !X is always true. */
10410 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10411 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10412 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10414 /* (X && !Y) || (!X && Y) is X ^ Y */
10415 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10416 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10418 tree a0, a1, l0, l1, n0, n1;
10420 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10421 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10423 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10424 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10426 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10427 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10429 if ((operand_equal_p (n0, a0, 0)
10430 && operand_equal_p (n1, a1, 0))
10431 || (operand_equal_p (n0, a1, 0)
10432 && operand_equal_p (n1, a0, 0)))
10433 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10436 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10437 != NULL_TREE)
10438 return tem;
10440 return NULL_TREE;
10442 case TRUTH_XOR_EXPR:
10443 /* If the second arg is constant zero, drop it. */
10444 if (integer_zerop (arg1))
10445 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10446 /* If the second arg is constant true, this is a logical inversion. */
10447 if (integer_onep (arg1))
10449 tem = invert_truthvalue_loc (loc, arg0);
10450 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10452 /* Identical arguments cancel to zero. */
10453 if (operand_equal_p (arg0, arg1, 0))
10454 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10456 /* !X ^ X is always true. */
10457 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10459 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10461 /* X ^ !X is always true. */
10462 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10463 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10464 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10466 return NULL_TREE;
10468 case EQ_EXPR:
10469 case NE_EXPR:
10470 STRIP_NOPS (arg0);
10471 STRIP_NOPS (arg1);
10473 tem = fold_comparison (loc, code, type, op0, op1);
10474 if (tem != NULL_TREE)
10475 return tem;
10477 /* bool_var != 1 becomes !bool_var. */
10478 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10479 && code == NE_EXPR)
10480 return fold_convert_loc (loc, type,
10481 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10482 TREE_TYPE (arg0), arg0));
10484 /* bool_var == 0 becomes !bool_var. */
10485 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10486 && code == EQ_EXPR)
10487 return fold_convert_loc (loc, type,
10488 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10489 TREE_TYPE (arg0), arg0));
10491 /* !exp != 0 becomes !exp */
10492 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10493 && code == NE_EXPR)
10494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10496 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10497 if ((TREE_CODE (arg0) == PLUS_EXPR
10498 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10499 || TREE_CODE (arg0) == MINUS_EXPR)
10500 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10501 0)),
10502 arg1, 0)
10503 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10504 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10506 tree val = TREE_OPERAND (arg0, 1);
10507 val = fold_build2_loc (loc, code, type, val,
10508 build_int_cst (TREE_TYPE (val), 0));
10509 return omit_two_operands_loc (loc, type, val,
10510 TREE_OPERAND (arg0, 0), arg1);
10513 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10514 if ((TREE_CODE (arg1) == PLUS_EXPR
10515 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10516 || TREE_CODE (arg1) == MINUS_EXPR)
10517 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10518 0)),
10519 arg0, 0)
10520 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10521 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10523 tree val = TREE_OPERAND (arg1, 1);
10524 val = fold_build2_loc (loc, code, type, val,
10525 build_int_cst (TREE_TYPE (val), 0));
10526 return omit_two_operands_loc (loc, type, val,
10527 TREE_OPERAND (arg1, 0), arg0);
10530 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10531 if (TREE_CODE (arg0) == MINUS_EXPR
10532 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10533 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10534 1)),
10535 arg1, 0)
10536 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10537 return omit_two_operands_loc (loc, type,
10538 code == NE_EXPR
10539 ? boolean_true_node : boolean_false_node,
10540 TREE_OPERAND (arg0, 1), arg1);
10542 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10543 if (TREE_CODE (arg1) == MINUS_EXPR
10544 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10545 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10546 1)),
10547 arg0, 0)
10548 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10549 return omit_two_operands_loc (loc, type,
10550 code == NE_EXPR
10551 ? boolean_true_node : boolean_false_node,
10552 TREE_OPERAND (arg1, 1), arg0);
10554 /* If this is an EQ or NE comparison with zero and ARG0 is
10555 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10556 two operations, but the latter can be done in one less insn
10557 on machines that have only two-operand insns or on which a
10558 constant cannot be the first operand. */
10559 if (TREE_CODE (arg0) == BIT_AND_EXPR
10560 && integer_zerop (arg1))
10562 tree arg00 = TREE_OPERAND (arg0, 0);
10563 tree arg01 = TREE_OPERAND (arg0, 1);
10564 if (TREE_CODE (arg00) == LSHIFT_EXPR
10565 && integer_onep (TREE_OPERAND (arg00, 0)))
10567 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10568 arg01, TREE_OPERAND (arg00, 1));
10569 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10570 build_int_cst (TREE_TYPE (arg0), 1));
10571 return fold_build2_loc (loc, code, type,
10572 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10573 arg1);
10575 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10576 && integer_onep (TREE_OPERAND (arg01, 0)))
10578 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10579 arg00, TREE_OPERAND (arg01, 1));
10580 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10581 build_int_cst (TREE_TYPE (arg0), 1));
10582 return fold_build2_loc (loc, code, type,
10583 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10584 arg1);
10588 /* If this is an NE or EQ comparison of zero against the result of a
10589 signed MOD operation whose second operand is a power of 2, make
10590 the MOD operation unsigned since it is simpler and equivalent. */
10591 if (integer_zerop (arg1)
10592 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10593 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10594 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10595 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10596 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10597 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10599 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10600 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10601 fold_convert_loc (loc, newtype,
10602 TREE_OPERAND (arg0, 0)),
10603 fold_convert_loc (loc, newtype,
10604 TREE_OPERAND (arg0, 1)));
10606 return fold_build2_loc (loc, code, type, newmod,
10607 fold_convert_loc (loc, newtype, arg1));
10610 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10611 C1 is a valid shift constant, and C2 is a power of two, i.e.
10612 a single bit. */
10613 if (TREE_CODE (arg0) == BIT_AND_EXPR
10614 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10615 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10616 == INTEGER_CST
10617 && integer_pow2p (TREE_OPERAND (arg0, 1))
10618 && integer_zerop (arg1))
10620 tree itype = TREE_TYPE (arg0);
10621 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10622 prec = TYPE_PRECISION (itype);
10624 /* Check for a valid shift count. */
10625 if (wi::ltu_p (arg001, prec))
10627 tree arg01 = TREE_OPERAND (arg0, 1);
10628 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10629 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10630 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10631 can be rewritten as (X & (C2 << C1)) != 0. */
10632 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10634 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10635 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10636 return fold_build2_loc (loc, code, type, tem,
10637 fold_convert_loc (loc, itype, arg1));
10639 /* Otherwise, for signed (arithmetic) shifts,
10640 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10641 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10642 else if (!TYPE_UNSIGNED (itype))
10643 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10644 arg000, build_int_cst (itype, 0));
10645 /* Otherwise, of unsigned (logical) shifts,
10646 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10647 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10648 else
10649 return omit_one_operand_loc (loc, type,
10650 code == EQ_EXPR ? integer_one_node
10651 : integer_zero_node,
10652 arg000);
10656 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10657 Similarly for NE_EXPR. */
10658 if (TREE_CODE (arg0) == BIT_AND_EXPR
10659 && TREE_CODE (arg1) == INTEGER_CST
10660 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10662 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10663 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10664 TREE_OPERAND (arg0, 1));
10665 tree dandnotc
10666 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10667 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10668 notc);
10669 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10670 if (integer_nonzerop (dandnotc))
10671 return omit_one_operand_loc (loc, type, rslt, arg0);
10674 /* If this is a comparison of a field, we may be able to simplify it. */
10675 if ((TREE_CODE (arg0) == COMPONENT_REF
10676 || TREE_CODE (arg0) == BIT_FIELD_REF)
10677 /* Handle the constant case even without -O
10678 to make sure the warnings are given. */
10679 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10681 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10682 if (t1)
10683 return t1;
10686 /* Optimize comparisons of strlen vs zero to a compare of the
10687 first character of the string vs zero. To wit,
10688 strlen(ptr) == 0 => *ptr == 0
10689 strlen(ptr) != 0 => *ptr != 0
10690 Other cases should reduce to one of these two (or a constant)
10691 due to the return value of strlen being unsigned. */
10692 if (TREE_CODE (arg0) == CALL_EXPR
10693 && integer_zerop (arg1))
10695 tree fndecl = get_callee_fndecl (arg0);
10697 if (fndecl
10698 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10699 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10700 && call_expr_nargs (arg0) == 1
10701 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10703 tree iref = build_fold_indirect_ref_loc (loc,
10704 CALL_EXPR_ARG (arg0, 0));
10705 return fold_build2_loc (loc, code, type, iref,
10706 build_int_cst (TREE_TYPE (iref), 0));
10710 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10711 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10712 if (TREE_CODE (arg0) == RSHIFT_EXPR
10713 && integer_zerop (arg1)
10714 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10716 tree arg00 = TREE_OPERAND (arg0, 0);
10717 tree arg01 = TREE_OPERAND (arg0, 1);
10718 tree itype = TREE_TYPE (arg00);
10719 if (wi::eq_p (arg01, element_precision (itype) - 1))
10721 if (TYPE_UNSIGNED (itype))
10723 itype = signed_type_for (itype);
10724 arg00 = fold_convert_loc (loc, itype, arg00);
10726 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10727 type, arg00, build_zero_cst (itype));
10731 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10732 (X & C) == 0 when C is a single bit. */
10733 if (TREE_CODE (arg0) == BIT_AND_EXPR
10734 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10735 && integer_zerop (arg1)
10736 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10738 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10739 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10740 TREE_OPERAND (arg0, 1));
10741 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10742 type, tem,
10743 fold_convert_loc (loc, TREE_TYPE (arg0),
10744 arg1));
10747 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10748 constant C is a power of two, i.e. a single bit. */
10749 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10750 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10751 && integer_zerop (arg1)
10752 && integer_pow2p (TREE_OPERAND (arg0, 1))
10753 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10754 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10756 tree arg00 = TREE_OPERAND (arg0, 0);
10757 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10758 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10761 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10762 when is C is a power of two, i.e. a single bit. */
10763 if (TREE_CODE (arg0) == BIT_AND_EXPR
10764 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10765 && integer_zerop (arg1)
10766 && integer_pow2p (TREE_OPERAND (arg0, 1))
10767 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10768 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10770 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10771 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10772 arg000, TREE_OPERAND (arg0, 1));
10773 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10774 tem, build_int_cst (TREE_TYPE (tem), 0));
10777 if (integer_zerop (arg1)
10778 && tree_expr_nonzero_p (arg0))
10780 tree res = constant_boolean_node (code==NE_EXPR, type);
10781 return omit_one_operand_loc (loc, type, res, arg0);
10784 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10785 if (TREE_CODE (arg0) == BIT_AND_EXPR
10786 && TREE_CODE (arg1) == BIT_AND_EXPR)
10788 tree arg00 = TREE_OPERAND (arg0, 0);
10789 tree arg01 = TREE_OPERAND (arg0, 1);
10790 tree arg10 = TREE_OPERAND (arg1, 0);
10791 tree arg11 = TREE_OPERAND (arg1, 1);
10792 tree itype = TREE_TYPE (arg0);
10794 if (operand_equal_p (arg01, arg11, 0))
10795 return fold_build2_loc (loc, code, type,
10796 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10797 fold_build2_loc (loc,
10798 BIT_XOR_EXPR, itype,
10799 arg00, arg10),
10800 arg01),
10801 build_zero_cst (itype));
10803 if (operand_equal_p (arg01, arg10, 0))
10804 return fold_build2_loc (loc, code, type,
10805 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10806 fold_build2_loc (loc,
10807 BIT_XOR_EXPR, itype,
10808 arg00, arg11),
10809 arg01),
10810 build_zero_cst (itype));
10812 if (operand_equal_p (arg00, arg11, 0))
10813 return fold_build2_loc (loc, code, type,
10814 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10815 fold_build2_loc (loc,
10816 BIT_XOR_EXPR, itype,
10817 arg01, arg10),
10818 arg00),
10819 build_zero_cst (itype));
10821 if (operand_equal_p (arg00, arg10, 0))
10822 return fold_build2_loc (loc, code, type,
10823 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10824 fold_build2_loc (loc,
10825 BIT_XOR_EXPR, itype,
10826 arg01, arg11),
10827 arg00),
10828 build_zero_cst (itype));
10831 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10832 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10834 tree arg00 = TREE_OPERAND (arg0, 0);
10835 tree arg01 = TREE_OPERAND (arg0, 1);
10836 tree arg10 = TREE_OPERAND (arg1, 0);
10837 tree arg11 = TREE_OPERAND (arg1, 1);
10838 tree itype = TREE_TYPE (arg0);
10840 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10841 operand_equal_p guarantees no side-effects so we don't need
10842 to use omit_one_operand on Z. */
10843 if (operand_equal_p (arg01, arg11, 0))
10844 return fold_build2_loc (loc, code, type, arg00,
10845 fold_convert_loc (loc, TREE_TYPE (arg00),
10846 arg10));
10847 if (operand_equal_p (arg01, arg10, 0))
10848 return fold_build2_loc (loc, code, type, arg00,
10849 fold_convert_loc (loc, TREE_TYPE (arg00),
10850 arg11));
10851 if (operand_equal_p (arg00, arg11, 0))
10852 return fold_build2_loc (loc, code, type, arg01,
10853 fold_convert_loc (loc, TREE_TYPE (arg01),
10854 arg10));
10855 if (operand_equal_p (arg00, arg10, 0))
10856 return fold_build2_loc (loc, code, type, arg01,
10857 fold_convert_loc (loc, TREE_TYPE (arg01),
10858 arg11));
10860 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10861 if (TREE_CODE (arg01) == INTEGER_CST
10862 && TREE_CODE (arg11) == INTEGER_CST)
10864 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10865 fold_convert_loc (loc, itype, arg11));
10866 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10867 return fold_build2_loc (loc, code, type, tem,
10868 fold_convert_loc (loc, itype, arg10));
10872 /* Attempt to simplify equality/inequality comparisons of complex
10873 values. Only lower the comparison if the result is known or
10874 can be simplified to a single scalar comparison. */
10875 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10876 || TREE_CODE (arg0) == COMPLEX_CST)
10877 && (TREE_CODE (arg1) == COMPLEX_EXPR
10878 || TREE_CODE (arg1) == COMPLEX_CST))
10880 tree real0, imag0, real1, imag1;
10881 tree rcond, icond;
10883 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10885 real0 = TREE_OPERAND (arg0, 0);
10886 imag0 = TREE_OPERAND (arg0, 1);
10888 else
10890 real0 = TREE_REALPART (arg0);
10891 imag0 = TREE_IMAGPART (arg0);
10894 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10896 real1 = TREE_OPERAND (arg1, 0);
10897 imag1 = TREE_OPERAND (arg1, 1);
10899 else
10901 real1 = TREE_REALPART (arg1);
10902 imag1 = TREE_IMAGPART (arg1);
10905 rcond = fold_binary_loc (loc, code, type, real0, real1);
10906 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10908 if (integer_zerop (rcond))
10910 if (code == EQ_EXPR)
10911 return omit_two_operands_loc (loc, type, boolean_false_node,
10912 imag0, imag1);
10913 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10915 else
10917 if (code == NE_EXPR)
10918 return omit_two_operands_loc (loc, type, boolean_true_node,
10919 imag0, imag1);
10920 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10924 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10925 if (icond && TREE_CODE (icond) == INTEGER_CST)
10927 if (integer_zerop (icond))
10929 if (code == EQ_EXPR)
10930 return omit_two_operands_loc (loc, type, boolean_false_node,
10931 real0, real1);
10932 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10934 else
10936 if (code == NE_EXPR)
10937 return omit_two_operands_loc (loc, type, boolean_true_node,
10938 real0, real1);
10939 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10944 return NULL_TREE;
10946 case LT_EXPR:
10947 case GT_EXPR:
10948 case LE_EXPR:
10949 case GE_EXPR:
10950 tem = fold_comparison (loc, code, type, op0, op1);
10951 if (tem != NULL_TREE)
10952 return tem;
10954 /* Transform comparisons of the form X +- C CMP X. */
10955 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10956 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10957 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10958 && !HONOR_SNANS (arg0))
10959 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10960 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10962 tree arg01 = TREE_OPERAND (arg0, 1);
10963 enum tree_code code0 = TREE_CODE (arg0);
10964 int is_positive;
10966 if (TREE_CODE (arg01) == REAL_CST)
10967 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10968 else
10969 is_positive = tree_int_cst_sgn (arg01);
10971 /* (X - c) > X becomes false. */
10972 if (code == GT_EXPR
10973 && ((code0 == MINUS_EXPR && is_positive >= 0)
10974 || (code0 == PLUS_EXPR && is_positive <= 0)))
10976 if (TREE_CODE (arg01) == INTEGER_CST
10977 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10978 fold_overflow_warning (("assuming signed overflow does not "
10979 "occur when assuming that (X - c) > X "
10980 "is always false"),
10981 WARN_STRICT_OVERFLOW_ALL);
10982 return constant_boolean_node (0, type);
10985 /* Likewise (X + c) < X becomes false. */
10986 if (code == LT_EXPR
10987 && ((code0 == PLUS_EXPR && is_positive >= 0)
10988 || (code0 == MINUS_EXPR && is_positive <= 0)))
10990 if (TREE_CODE (arg01) == INTEGER_CST
10991 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10992 fold_overflow_warning (("assuming signed overflow does not "
10993 "occur when assuming that "
10994 "(X + c) < X is always false"),
10995 WARN_STRICT_OVERFLOW_ALL);
10996 return constant_boolean_node (0, type);
10999 /* Convert (X - c) <= X to true. */
11000 if (!HONOR_NANS (arg1)
11001 && code == LE_EXPR
11002 && ((code0 == MINUS_EXPR && is_positive >= 0)
11003 || (code0 == PLUS_EXPR && is_positive <= 0)))
11005 if (TREE_CODE (arg01) == INTEGER_CST
11006 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11007 fold_overflow_warning (("assuming signed overflow does not "
11008 "occur when assuming that "
11009 "(X - c) <= X is always true"),
11010 WARN_STRICT_OVERFLOW_ALL);
11011 return constant_boolean_node (1, type);
11014 /* Convert (X + c) >= X to true. */
11015 if (!HONOR_NANS (arg1)
11016 && code == GE_EXPR
11017 && ((code0 == PLUS_EXPR && is_positive >= 0)
11018 || (code0 == MINUS_EXPR && is_positive <= 0)))
11020 if (TREE_CODE (arg01) == INTEGER_CST
11021 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11022 fold_overflow_warning (("assuming signed overflow does not "
11023 "occur when assuming that "
11024 "(X + c) >= X is always true"),
11025 WARN_STRICT_OVERFLOW_ALL);
11026 return constant_boolean_node (1, type);
11029 if (TREE_CODE (arg01) == INTEGER_CST)
11031 /* Convert X + c > X and X - c < X to true for integers. */
11032 if (code == GT_EXPR
11033 && ((code0 == PLUS_EXPR && is_positive > 0)
11034 || (code0 == MINUS_EXPR && is_positive < 0)))
11036 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11037 fold_overflow_warning (("assuming signed overflow does "
11038 "not occur when assuming that "
11039 "(X + c) > X is always true"),
11040 WARN_STRICT_OVERFLOW_ALL);
11041 return constant_boolean_node (1, type);
11044 if (code == LT_EXPR
11045 && ((code0 == MINUS_EXPR && is_positive > 0)
11046 || (code0 == PLUS_EXPR && is_positive < 0)))
11048 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11049 fold_overflow_warning (("assuming signed overflow does "
11050 "not occur when assuming that "
11051 "(X - c) < X is always true"),
11052 WARN_STRICT_OVERFLOW_ALL);
11053 return constant_boolean_node (1, type);
11056 /* Convert X + c <= X and X - c >= X to false for integers. */
11057 if (code == LE_EXPR
11058 && ((code0 == PLUS_EXPR && is_positive > 0)
11059 || (code0 == MINUS_EXPR && is_positive < 0)))
11061 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11062 fold_overflow_warning (("assuming signed overflow does "
11063 "not occur when assuming that "
11064 "(X + c) <= X is always false"),
11065 WARN_STRICT_OVERFLOW_ALL);
11066 return constant_boolean_node (0, type);
11069 if (code == GE_EXPR
11070 && ((code0 == MINUS_EXPR && is_positive > 0)
11071 || (code0 == PLUS_EXPR && is_positive < 0)))
11073 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11074 fold_overflow_warning (("assuming signed overflow does "
11075 "not occur when assuming that "
11076 "(X - c) >= X is always false"),
11077 WARN_STRICT_OVERFLOW_ALL);
11078 return constant_boolean_node (0, type);
11083 /* If we are comparing an ABS_EXPR with a constant, we can
11084 convert all the cases into explicit comparisons, but they may
11085 well not be faster than doing the ABS and one comparison.
11086 But ABS (X) <= C is a range comparison, which becomes a subtraction
11087 and a comparison, and is probably faster. */
11088 if (code == LE_EXPR
11089 && TREE_CODE (arg1) == INTEGER_CST
11090 && TREE_CODE (arg0) == ABS_EXPR
11091 && ! TREE_SIDE_EFFECTS (arg0)
11092 && (0 != (tem = negate_expr (arg1)))
11093 && TREE_CODE (tem) == INTEGER_CST
11094 && !TREE_OVERFLOW (tem))
11095 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11096 build2 (GE_EXPR, type,
11097 TREE_OPERAND (arg0, 0), tem),
11098 build2 (LE_EXPR, type,
11099 TREE_OPERAND (arg0, 0), arg1));
11101 /* Convert ABS_EXPR<x> >= 0 to true. */
11102 strict_overflow_p = false;
11103 if (code == GE_EXPR
11104 && (integer_zerop (arg1)
11105 || (! HONOR_NANS (arg0)
11106 && real_zerop (arg1)))
11107 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11109 if (strict_overflow_p)
11110 fold_overflow_warning (("assuming signed overflow does not occur "
11111 "when simplifying comparison of "
11112 "absolute value and zero"),
11113 WARN_STRICT_OVERFLOW_CONDITIONAL);
11114 return omit_one_operand_loc (loc, type,
11115 constant_boolean_node (true, type),
11116 arg0);
11119 /* Convert ABS_EXPR<x> < 0 to false. */
11120 strict_overflow_p = false;
11121 if (code == LT_EXPR
11122 && (integer_zerop (arg1) || real_zerop (arg1))
11123 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11125 if (strict_overflow_p)
11126 fold_overflow_warning (("assuming signed overflow does not occur "
11127 "when simplifying comparison of "
11128 "absolute value and zero"),
11129 WARN_STRICT_OVERFLOW_CONDITIONAL);
11130 return omit_one_operand_loc (loc, type,
11131 constant_boolean_node (false, type),
11132 arg0);
11135 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11136 and similarly for >= into !=. */
11137 if ((code == LT_EXPR || code == GE_EXPR)
11138 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11139 && TREE_CODE (arg1) == LSHIFT_EXPR
11140 && integer_onep (TREE_OPERAND (arg1, 0)))
11141 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11142 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11143 TREE_OPERAND (arg1, 1)),
11144 build_zero_cst (TREE_TYPE (arg0)));
11146 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11147 otherwise Y might be >= # of bits in X's type and thus e.g.
11148 (unsigned char) (1 << Y) for Y 15 might be 0.
11149 If the cast is widening, then 1 << Y should have unsigned type,
11150 otherwise if Y is number of bits in the signed shift type minus 1,
11151 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11152 31 might be 0xffffffff80000000. */
11153 if ((code == LT_EXPR || code == GE_EXPR)
11154 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11155 && CONVERT_EXPR_P (arg1)
11156 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11157 && (element_precision (TREE_TYPE (arg1))
11158 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11159 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11160 || (element_precision (TREE_TYPE (arg1))
11161 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11162 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11164 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11165 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11166 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11167 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11168 build_zero_cst (TREE_TYPE (arg0)));
11171 return NULL_TREE;
11173 case UNORDERED_EXPR:
11174 case ORDERED_EXPR:
11175 case UNLT_EXPR:
11176 case UNLE_EXPR:
11177 case UNGT_EXPR:
11178 case UNGE_EXPR:
11179 case UNEQ_EXPR:
11180 case LTGT_EXPR:
11181 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11183 tree targ0 = strip_float_extensions (arg0);
11184 tree targ1 = strip_float_extensions (arg1);
11185 tree newtype = TREE_TYPE (targ0);
11187 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11188 newtype = TREE_TYPE (targ1);
11190 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11191 return fold_build2_loc (loc, code, type,
11192 fold_convert_loc (loc, newtype, targ0),
11193 fold_convert_loc (loc, newtype, targ1));
11196 return NULL_TREE;
11198 case COMPOUND_EXPR:
11199 /* When pedantic, a compound expression can be neither an lvalue
11200 nor an integer constant expression. */
11201 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11202 return NULL_TREE;
11203 /* Don't let (0, 0) be null pointer constant. */
11204 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11205 : fold_convert_loc (loc, type, arg1);
11206 return pedantic_non_lvalue_loc (loc, tem);
11208 case ASSERT_EXPR:
11209 /* An ASSERT_EXPR should never be passed to fold_binary. */
11210 gcc_unreachable ();
11212 default:
11213 return NULL_TREE;
11214 } /* switch (code) */
11217 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11218 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11219 of GOTO_EXPR. */
11221 static tree
11222 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11224 switch (TREE_CODE (*tp))
11226 case LABEL_EXPR:
11227 return *tp;
11229 case GOTO_EXPR:
11230 *walk_subtrees = 0;
11232 /* fall through */
11234 default:
11235 return NULL_TREE;
11239 /* Return whether the sub-tree ST contains a label which is accessible from
11240 outside the sub-tree. */
11242 static bool
11243 contains_label_p (tree st)
11245 return
11246 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11249 /* Fold a ternary expression of code CODE and type TYPE with operands
11250 OP0, OP1, and OP2. Return the folded expression if folding is
11251 successful. Otherwise, return NULL_TREE. */
11253 tree
11254 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11255 tree op0, tree op1, tree op2)
11257 tree tem;
11258 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11259 enum tree_code_class kind = TREE_CODE_CLASS (code);
11261 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11262 && TREE_CODE_LENGTH (code) == 3);
11264 /* If this is a commutative operation, and OP0 is a constant, move it
11265 to OP1 to reduce the number of tests below. */
11266 if (commutative_ternary_tree_code (code)
11267 && tree_swap_operands_p (op0, op1))
11268 return fold_build3_loc (loc, code, type, op1, op0, op2);
11270 tem = generic_simplify (loc, code, type, op0, op1, op2);
11271 if (tem)
11272 return tem;
11274 /* Strip any conversions that don't change the mode. This is safe
11275 for every expression, except for a comparison expression because
11276 its signedness is derived from its operands. So, in the latter
11277 case, only strip conversions that don't change the signedness.
11279 Note that this is done as an internal manipulation within the
11280 constant folder, in order to find the simplest representation of
11281 the arguments so that their form can be studied. In any cases,
11282 the appropriate type conversions should be put back in the tree
11283 that will get out of the constant folder. */
11284 if (op0)
11286 arg0 = op0;
11287 STRIP_NOPS (arg0);
11290 if (op1)
11292 arg1 = op1;
11293 STRIP_NOPS (arg1);
11296 if (op2)
11298 arg2 = op2;
11299 STRIP_NOPS (arg2);
11302 switch (code)
11304 case COMPONENT_REF:
11305 if (TREE_CODE (arg0) == CONSTRUCTOR
11306 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11308 unsigned HOST_WIDE_INT idx;
11309 tree field, value;
11310 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11311 if (field == arg1)
11312 return value;
11314 return NULL_TREE;
11316 case COND_EXPR:
11317 case VEC_COND_EXPR:
11318 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11319 so all simple results must be passed through pedantic_non_lvalue. */
11320 if (TREE_CODE (arg0) == INTEGER_CST)
11322 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11323 tem = integer_zerop (arg0) ? op2 : op1;
11324 /* Only optimize constant conditions when the selected branch
11325 has the same type as the COND_EXPR. This avoids optimizing
11326 away "c ? x : throw", where the throw has a void type.
11327 Avoid throwing away that operand which contains label. */
11328 if ((!TREE_SIDE_EFFECTS (unused_op)
11329 || !contains_label_p (unused_op))
11330 && (! VOID_TYPE_P (TREE_TYPE (tem))
11331 || VOID_TYPE_P (type)))
11332 return pedantic_non_lvalue_loc (loc, tem);
11333 return NULL_TREE;
11335 else if (TREE_CODE (arg0) == VECTOR_CST)
11337 if ((TREE_CODE (arg1) == VECTOR_CST
11338 || TREE_CODE (arg1) == CONSTRUCTOR)
11339 && (TREE_CODE (arg2) == VECTOR_CST
11340 || TREE_CODE (arg2) == CONSTRUCTOR))
11342 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11343 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11344 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11345 for (i = 0; i < nelts; i++)
11347 tree val = VECTOR_CST_ELT (arg0, i);
11348 if (integer_all_onesp (val))
11349 sel[i] = i;
11350 else if (integer_zerop (val))
11351 sel[i] = nelts + i;
11352 else /* Currently unreachable. */
11353 return NULL_TREE;
11355 tree t = fold_vec_perm (type, arg1, arg2, sel);
11356 if (t != NULL_TREE)
11357 return t;
11361 /* If we have A op B ? A : C, we may be able to convert this to a
11362 simpler expression, depending on the operation and the values
11363 of B and C. Signed zeros prevent all of these transformations,
11364 for reasons given above each one.
11366 Also try swapping the arguments and inverting the conditional. */
11367 if (COMPARISON_CLASS_P (arg0)
11368 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11369 arg1, TREE_OPERAND (arg0, 1))
11370 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11372 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11373 if (tem)
11374 return tem;
11377 if (COMPARISON_CLASS_P (arg0)
11378 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11379 op2,
11380 TREE_OPERAND (arg0, 1))
11381 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11383 location_t loc0 = expr_location_or (arg0, loc);
11384 tem = fold_invert_truthvalue (loc0, arg0);
11385 if (tem && COMPARISON_CLASS_P (tem))
11387 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11388 if (tem)
11389 return tem;
11393 /* If the second operand is simpler than the third, swap them
11394 since that produces better jump optimization results. */
11395 if (truth_value_p (TREE_CODE (arg0))
11396 && tree_swap_operands_p (op1, op2))
11398 location_t loc0 = expr_location_or (arg0, loc);
11399 /* See if this can be inverted. If it can't, possibly because
11400 it was a floating-point inequality comparison, don't do
11401 anything. */
11402 tem = fold_invert_truthvalue (loc0, arg0);
11403 if (tem)
11404 return fold_build3_loc (loc, code, type, tem, op2, op1);
11407 /* Convert A ? 1 : 0 to simply A. */
11408 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11409 : (integer_onep (op1)
11410 && !VECTOR_TYPE_P (type)))
11411 && integer_zerop (op2)
11412 /* If we try to convert OP0 to our type, the
11413 call to fold will try to move the conversion inside
11414 a COND, which will recurse. In that case, the COND_EXPR
11415 is probably the best choice, so leave it alone. */
11416 && type == TREE_TYPE (arg0))
11417 return pedantic_non_lvalue_loc (loc, arg0);
11419 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11420 over COND_EXPR in cases such as floating point comparisons. */
11421 if (integer_zerop (op1)
11422 && code == COND_EXPR
11423 && integer_onep (op2)
11424 && !VECTOR_TYPE_P (type)
11425 && truth_value_p (TREE_CODE (arg0)))
11426 return pedantic_non_lvalue_loc (loc,
11427 fold_convert_loc (loc, type,
11428 invert_truthvalue_loc (loc,
11429 arg0)));
11431 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11432 if (TREE_CODE (arg0) == LT_EXPR
11433 && integer_zerop (TREE_OPERAND (arg0, 1))
11434 && integer_zerop (op2)
11435 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11437 /* sign_bit_p looks through both zero and sign extensions,
11438 but for this optimization only sign extensions are
11439 usable. */
11440 tree tem2 = TREE_OPERAND (arg0, 0);
11441 while (tem != tem2)
11443 if (TREE_CODE (tem2) != NOP_EXPR
11444 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11446 tem = NULL_TREE;
11447 break;
11449 tem2 = TREE_OPERAND (tem2, 0);
11451 /* sign_bit_p only checks ARG1 bits within A's precision.
11452 If <sign bit of A> has wider type than A, bits outside
11453 of A's precision in <sign bit of A> need to be checked.
11454 If they are all 0, this optimization needs to be done
11455 in unsigned A's type, if they are all 1 in signed A's type,
11456 otherwise this can't be done. */
11457 if (tem
11458 && TYPE_PRECISION (TREE_TYPE (tem))
11459 < TYPE_PRECISION (TREE_TYPE (arg1))
11460 && TYPE_PRECISION (TREE_TYPE (tem))
11461 < TYPE_PRECISION (type))
11463 int inner_width, outer_width;
11464 tree tem_type;
11466 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11467 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11468 if (outer_width > TYPE_PRECISION (type))
11469 outer_width = TYPE_PRECISION (type);
11471 wide_int mask = wi::shifted_mask
11472 (inner_width, outer_width - inner_width, false,
11473 TYPE_PRECISION (TREE_TYPE (arg1)));
11475 wide_int common = mask & arg1;
11476 if (common == mask)
11478 tem_type = signed_type_for (TREE_TYPE (tem));
11479 tem = fold_convert_loc (loc, tem_type, tem);
11481 else if (common == 0)
11483 tem_type = unsigned_type_for (TREE_TYPE (tem));
11484 tem = fold_convert_loc (loc, tem_type, tem);
11486 else
11487 tem = NULL;
11490 if (tem)
11491 return
11492 fold_convert_loc (loc, type,
11493 fold_build2_loc (loc, BIT_AND_EXPR,
11494 TREE_TYPE (tem), tem,
11495 fold_convert_loc (loc,
11496 TREE_TYPE (tem),
11497 arg1)));
11500 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11501 already handled above. */
11502 if (TREE_CODE (arg0) == BIT_AND_EXPR
11503 && integer_onep (TREE_OPERAND (arg0, 1))
11504 && integer_zerop (op2)
11505 && integer_pow2p (arg1))
11507 tree tem = TREE_OPERAND (arg0, 0);
11508 STRIP_NOPS (tem);
11509 if (TREE_CODE (tem) == RSHIFT_EXPR
11510 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11511 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11512 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11513 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11514 TREE_OPERAND (tem, 0), arg1);
11517 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11518 is probably obsolete because the first operand should be a
11519 truth value (that's why we have the two cases above), but let's
11520 leave it in until we can confirm this for all front-ends. */
11521 if (integer_zerop (op2)
11522 && TREE_CODE (arg0) == NE_EXPR
11523 && integer_zerop (TREE_OPERAND (arg0, 1))
11524 && integer_pow2p (arg1)
11525 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11526 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11527 arg1, OEP_ONLY_CONST))
11528 return pedantic_non_lvalue_loc (loc,
11529 fold_convert_loc (loc, type,
11530 TREE_OPERAND (arg0, 0)));
11532 /* Disable the transformations below for vectors, since
11533 fold_binary_op_with_conditional_arg may undo them immediately,
11534 yielding an infinite loop. */
11535 if (code == VEC_COND_EXPR)
11536 return NULL_TREE;
11538 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11539 if (integer_zerop (op2)
11540 && truth_value_p (TREE_CODE (arg0))
11541 && truth_value_p (TREE_CODE (arg1))
11542 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11543 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11544 : TRUTH_ANDIF_EXPR,
11545 type, fold_convert_loc (loc, type, arg0), arg1);
11547 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11548 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11549 && truth_value_p (TREE_CODE (arg0))
11550 && truth_value_p (TREE_CODE (arg1))
11551 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11553 location_t loc0 = expr_location_or (arg0, loc);
11554 /* Only perform transformation if ARG0 is easily inverted. */
11555 tem = fold_invert_truthvalue (loc0, arg0);
11556 if (tem)
11557 return fold_build2_loc (loc, code == VEC_COND_EXPR
11558 ? BIT_IOR_EXPR
11559 : TRUTH_ORIF_EXPR,
11560 type, fold_convert_loc (loc, type, tem),
11561 arg1);
11564 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11565 if (integer_zerop (arg1)
11566 && truth_value_p (TREE_CODE (arg0))
11567 && truth_value_p (TREE_CODE (op2))
11568 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11570 location_t loc0 = expr_location_or (arg0, loc);
11571 /* Only perform transformation if ARG0 is easily inverted. */
11572 tem = fold_invert_truthvalue (loc0, arg0);
11573 if (tem)
11574 return fold_build2_loc (loc, code == VEC_COND_EXPR
11575 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11576 type, fold_convert_loc (loc, type, tem),
11577 op2);
11580 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11581 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11582 && truth_value_p (TREE_CODE (arg0))
11583 && truth_value_p (TREE_CODE (op2))
11584 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11585 return fold_build2_loc (loc, code == VEC_COND_EXPR
11586 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11587 type, fold_convert_loc (loc, type, arg0), op2);
11589 return NULL_TREE;
11591 case CALL_EXPR:
11592 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11593 of fold_ternary on them. */
11594 gcc_unreachable ();
11596 case BIT_FIELD_REF:
11597 if (TREE_CODE (arg0) == VECTOR_CST
11598 && (type == TREE_TYPE (TREE_TYPE (arg0))
11599 || (TREE_CODE (type) == VECTOR_TYPE
11600 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11602 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11603 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11604 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11605 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11607 if (n != 0
11608 && (idx % width) == 0
11609 && (n % width) == 0
11610 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11612 idx = idx / width;
11613 n = n / width;
11615 if (TREE_CODE (arg0) == VECTOR_CST)
11617 if (n == 1)
11618 return VECTOR_CST_ELT (arg0, idx);
11620 tree *vals = XALLOCAVEC (tree, n);
11621 for (unsigned i = 0; i < n; ++i)
11622 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11623 return build_vector (type, vals);
11628 /* On constants we can use native encode/interpret to constant
11629 fold (nearly) all BIT_FIELD_REFs. */
11630 if (CONSTANT_CLASS_P (arg0)
11631 && can_native_interpret_type_p (type)
11632 && BITS_PER_UNIT == 8)
11634 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11635 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11636 /* Limit us to a reasonable amount of work. To relax the
11637 other limitations we need bit-shifting of the buffer
11638 and rounding up the size. */
11639 if (bitpos % BITS_PER_UNIT == 0
11640 && bitsize % BITS_PER_UNIT == 0
11641 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11643 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11644 unsigned HOST_WIDE_INT len
11645 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11646 bitpos / BITS_PER_UNIT);
11647 if (len > 0
11648 && len * BITS_PER_UNIT >= bitsize)
11650 tree v = native_interpret_expr (type, b,
11651 bitsize / BITS_PER_UNIT);
11652 if (v)
11653 return v;
11658 return NULL_TREE;
11660 case FMA_EXPR:
11661 /* For integers we can decompose the FMA if possible. */
11662 if (TREE_CODE (arg0) == INTEGER_CST
11663 && TREE_CODE (arg1) == INTEGER_CST)
11664 return fold_build2_loc (loc, PLUS_EXPR, type,
11665 const_binop (MULT_EXPR, arg0, arg1), arg2);
11666 if (integer_zerop (arg2))
11667 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11669 return fold_fma (loc, type, arg0, arg1, arg2);
11671 case VEC_PERM_EXPR:
11672 if (TREE_CODE (arg2) == VECTOR_CST)
11674 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11675 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11676 unsigned char *sel2 = sel + nelts;
11677 bool need_mask_canon = false;
11678 bool need_mask_canon2 = false;
11679 bool all_in_vec0 = true;
11680 bool all_in_vec1 = true;
11681 bool maybe_identity = true;
11682 bool single_arg = (op0 == op1);
11683 bool changed = false;
11685 mask2 = 2 * nelts - 1;
11686 mask = single_arg ? (nelts - 1) : mask2;
11687 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11688 for (i = 0; i < nelts; i++)
11690 tree val = VECTOR_CST_ELT (arg2, i);
11691 if (TREE_CODE (val) != INTEGER_CST)
11692 return NULL_TREE;
11694 /* Make sure that the perm value is in an acceptable
11695 range. */
11696 wide_int t = val;
11697 need_mask_canon |= wi::gtu_p (t, mask);
11698 need_mask_canon2 |= wi::gtu_p (t, mask2);
11699 sel[i] = t.to_uhwi () & mask;
11700 sel2[i] = t.to_uhwi () & mask2;
11702 if (sel[i] < nelts)
11703 all_in_vec1 = false;
11704 else
11705 all_in_vec0 = false;
11707 if ((sel[i] & (nelts-1)) != i)
11708 maybe_identity = false;
11711 if (maybe_identity)
11713 if (all_in_vec0)
11714 return op0;
11715 if (all_in_vec1)
11716 return op1;
11719 if (all_in_vec0)
11720 op1 = op0;
11721 else if (all_in_vec1)
11723 op0 = op1;
11724 for (i = 0; i < nelts; i++)
11725 sel[i] -= nelts;
11726 need_mask_canon = true;
11729 if ((TREE_CODE (op0) == VECTOR_CST
11730 || TREE_CODE (op0) == CONSTRUCTOR)
11731 && (TREE_CODE (op1) == VECTOR_CST
11732 || TREE_CODE (op1) == CONSTRUCTOR))
11734 tree t = fold_vec_perm (type, op0, op1, sel);
11735 if (t != NULL_TREE)
11736 return t;
11739 if (op0 == op1 && !single_arg)
11740 changed = true;
11742 /* Some targets are deficient and fail to expand a single
11743 argument permutation while still allowing an equivalent
11744 2-argument version. */
11745 if (need_mask_canon && arg2 == op2
11746 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11747 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11749 need_mask_canon = need_mask_canon2;
11750 sel = sel2;
11753 if (need_mask_canon && arg2 == op2)
11755 tree *tsel = XALLOCAVEC (tree, nelts);
11756 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11757 for (i = 0; i < nelts; i++)
11758 tsel[i] = build_int_cst (eltype, sel[i]);
11759 op2 = build_vector (TREE_TYPE (arg2), tsel);
11760 changed = true;
11763 if (changed)
11764 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11766 return NULL_TREE;
11768 case BIT_INSERT_EXPR:
11769 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11770 if (TREE_CODE (arg0) == INTEGER_CST
11771 && TREE_CODE (arg1) == INTEGER_CST)
11773 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11774 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11775 wide_int tem = wi::bit_and (arg0,
11776 wi::shifted_mask (bitpos, bitsize, true,
11777 TYPE_PRECISION (type)));
11778 wide_int tem2
11779 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11780 bitsize), bitpos);
11781 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11783 else if (TREE_CODE (arg0) == VECTOR_CST
11784 && CONSTANT_CLASS_P (arg1)
11785 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11786 TREE_TYPE (arg1)))
11788 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11789 unsigned HOST_WIDE_INT elsize
11790 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11791 if (bitpos % elsize == 0)
11793 unsigned k = bitpos / elsize;
11794 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11795 return arg0;
11796 else
11798 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11799 memcpy (elts, VECTOR_CST_ELTS (arg0),
11800 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11801 elts[k] = arg1;
11802 return build_vector (type, elts);
11806 return NULL_TREE;
11808 default:
11809 return NULL_TREE;
11810 } /* switch (code) */
11813 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11814 of an array (or vector). */
11816 tree
11817 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11819 tree index_type = NULL_TREE;
11820 offset_int low_bound = 0;
11822 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11824 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11825 if (domain_type && TYPE_MIN_VALUE (domain_type))
11827 /* Static constructors for variably sized objects makes no sense. */
11828 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11829 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11830 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11834 if (index_type)
11835 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11836 TYPE_SIGN (index_type));
11838 offset_int index = low_bound - 1;
11839 if (index_type)
11840 index = wi::ext (index, TYPE_PRECISION (index_type),
11841 TYPE_SIGN (index_type));
11843 offset_int max_index;
11844 unsigned HOST_WIDE_INT cnt;
11845 tree cfield, cval;
11847 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11849 /* Array constructor might explicitly set index, or specify a range,
11850 or leave index NULL meaning that it is next index after previous
11851 one. */
11852 if (cfield)
11854 if (TREE_CODE (cfield) == INTEGER_CST)
11855 max_index = index = wi::to_offset (cfield);
11856 else
11858 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11859 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11860 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11863 else
11865 index += 1;
11866 if (index_type)
11867 index = wi::ext (index, TYPE_PRECISION (index_type),
11868 TYPE_SIGN (index_type));
11869 max_index = index;
11872 /* Do we have match? */
11873 if (wi::cmpu (access_index, index) >= 0
11874 && wi::cmpu (access_index, max_index) <= 0)
11875 return cval;
11877 return NULL_TREE;
11880 /* Perform constant folding and related simplification of EXPR.
11881 The related simplifications include x*1 => x, x*0 => 0, etc.,
11882 and application of the associative law.
11883 NOP_EXPR conversions may be removed freely (as long as we
11884 are careful not to change the type of the overall expression).
11885 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11886 but we can constant-fold them if they have constant operands. */
11888 #ifdef ENABLE_FOLD_CHECKING
11889 # define fold(x) fold_1 (x)
11890 static tree fold_1 (tree);
11891 static
11892 #endif
11893 tree
11894 fold (tree expr)
11896 const tree t = expr;
11897 enum tree_code code = TREE_CODE (t);
11898 enum tree_code_class kind = TREE_CODE_CLASS (code);
11899 tree tem;
11900 location_t loc = EXPR_LOCATION (expr);
11902 /* Return right away if a constant. */
11903 if (kind == tcc_constant)
11904 return t;
11906 /* CALL_EXPR-like objects with variable numbers of operands are
11907 treated specially. */
11908 if (kind == tcc_vl_exp)
11910 if (code == CALL_EXPR)
11912 tem = fold_call_expr (loc, expr, false);
11913 return tem ? tem : expr;
11915 return expr;
11918 if (IS_EXPR_CODE_CLASS (kind))
11920 tree type = TREE_TYPE (t);
11921 tree op0, op1, op2;
11923 switch (TREE_CODE_LENGTH (code))
11925 case 1:
11926 op0 = TREE_OPERAND (t, 0);
11927 tem = fold_unary_loc (loc, code, type, op0);
11928 return tem ? tem : expr;
11929 case 2:
11930 op0 = TREE_OPERAND (t, 0);
11931 op1 = TREE_OPERAND (t, 1);
11932 tem = fold_binary_loc (loc, code, type, op0, op1);
11933 return tem ? tem : expr;
11934 case 3:
11935 op0 = TREE_OPERAND (t, 0);
11936 op1 = TREE_OPERAND (t, 1);
11937 op2 = TREE_OPERAND (t, 2);
11938 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11939 return tem ? tem : expr;
11940 default:
11941 break;
11945 switch (code)
11947 case ARRAY_REF:
11949 tree op0 = TREE_OPERAND (t, 0);
11950 tree op1 = TREE_OPERAND (t, 1);
11952 if (TREE_CODE (op1) == INTEGER_CST
11953 && TREE_CODE (op0) == CONSTRUCTOR
11954 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11956 tree val = get_array_ctor_element_at_index (op0,
11957 wi::to_offset (op1));
11958 if (val)
11959 return val;
11962 return t;
11965 /* Return a VECTOR_CST if possible. */
11966 case CONSTRUCTOR:
11968 tree type = TREE_TYPE (t);
11969 if (TREE_CODE (type) != VECTOR_TYPE)
11970 return t;
11972 unsigned i;
11973 tree val;
11974 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11975 if (! CONSTANT_CLASS_P (val))
11976 return t;
11978 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11981 case CONST_DECL:
11982 return fold (DECL_INITIAL (t));
11984 default:
11985 return t;
11986 } /* switch (code) */
11989 #ifdef ENABLE_FOLD_CHECKING
11990 #undef fold
11992 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11993 hash_table<nofree_ptr_hash<const tree_node> > *);
11994 static void fold_check_failed (const_tree, const_tree);
11995 void print_fold_checksum (const_tree);
11997 /* When --enable-checking=fold, compute a digest of expr before
11998 and after actual fold call to see if fold did not accidentally
11999 change original expr. */
12001 tree
12002 fold (tree expr)
12004 tree ret;
12005 struct md5_ctx ctx;
12006 unsigned char checksum_before[16], checksum_after[16];
12007 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12009 md5_init_ctx (&ctx);
12010 fold_checksum_tree (expr, &ctx, &ht);
12011 md5_finish_ctx (&ctx, checksum_before);
12012 ht.empty ();
12014 ret = fold_1 (expr);
12016 md5_init_ctx (&ctx);
12017 fold_checksum_tree (expr, &ctx, &ht);
12018 md5_finish_ctx (&ctx, checksum_after);
12020 if (memcmp (checksum_before, checksum_after, 16))
12021 fold_check_failed (expr, ret);
12023 return ret;
12026 void
12027 print_fold_checksum (const_tree expr)
12029 struct md5_ctx ctx;
12030 unsigned char checksum[16], cnt;
12031 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12033 md5_init_ctx (&ctx);
12034 fold_checksum_tree (expr, &ctx, &ht);
12035 md5_finish_ctx (&ctx, checksum);
12036 for (cnt = 0; cnt < 16; ++cnt)
12037 fprintf (stderr, "%02x", checksum[cnt]);
12038 putc ('\n', stderr);
12041 static void
12042 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12044 internal_error ("fold check: original tree changed by fold");
12047 static void
12048 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12049 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12051 const tree_node **slot;
12052 enum tree_code code;
12053 union tree_node buf;
12054 int i, len;
12056 recursive_label:
12057 if (expr == NULL)
12058 return;
12059 slot = ht->find_slot (expr, INSERT);
12060 if (*slot != NULL)
12061 return;
12062 *slot = expr;
12063 code = TREE_CODE (expr);
12064 if (TREE_CODE_CLASS (code) == tcc_declaration
12065 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12067 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12068 memcpy ((char *) &buf, expr, tree_size (expr));
12069 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12070 buf.decl_with_vis.symtab_node = NULL;
12071 expr = (tree) &buf;
12073 else if (TREE_CODE_CLASS (code) == tcc_type
12074 && (TYPE_POINTER_TO (expr)
12075 || TYPE_REFERENCE_TO (expr)
12076 || TYPE_CACHED_VALUES_P (expr)
12077 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12078 || TYPE_NEXT_VARIANT (expr)
12079 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12081 /* Allow these fields to be modified. */
12082 tree tmp;
12083 memcpy ((char *) &buf, expr, tree_size (expr));
12084 expr = tmp = (tree) &buf;
12085 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12086 TYPE_POINTER_TO (tmp) = NULL;
12087 TYPE_REFERENCE_TO (tmp) = NULL;
12088 TYPE_NEXT_VARIANT (tmp) = NULL;
12089 TYPE_ALIAS_SET (tmp) = -1;
12090 if (TYPE_CACHED_VALUES_P (tmp))
12092 TYPE_CACHED_VALUES_P (tmp) = 0;
12093 TYPE_CACHED_VALUES (tmp) = NULL;
12096 md5_process_bytes (expr, tree_size (expr), ctx);
12097 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12098 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12099 if (TREE_CODE_CLASS (code) != tcc_type
12100 && TREE_CODE_CLASS (code) != tcc_declaration
12101 && code != TREE_LIST
12102 && code != SSA_NAME
12103 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12104 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12105 switch (TREE_CODE_CLASS (code))
12107 case tcc_constant:
12108 switch (code)
12110 case STRING_CST:
12111 md5_process_bytes (TREE_STRING_POINTER (expr),
12112 TREE_STRING_LENGTH (expr), ctx);
12113 break;
12114 case COMPLEX_CST:
12115 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12116 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12117 break;
12118 case VECTOR_CST:
12119 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12120 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12121 break;
12122 default:
12123 break;
12125 break;
12126 case tcc_exceptional:
12127 switch (code)
12129 case TREE_LIST:
12130 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12131 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12132 expr = TREE_CHAIN (expr);
12133 goto recursive_label;
12134 break;
12135 case TREE_VEC:
12136 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12137 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12138 break;
12139 default:
12140 break;
12142 break;
12143 case tcc_expression:
12144 case tcc_reference:
12145 case tcc_comparison:
12146 case tcc_unary:
12147 case tcc_binary:
12148 case tcc_statement:
12149 case tcc_vl_exp:
12150 len = TREE_OPERAND_LENGTH (expr);
12151 for (i = 0; i < len; ++i)
12152 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12153 break;
12154 case tcc_declaration:
12155 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12156 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12157 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12159 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12160 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12161 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12162 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12163 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12166 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12168 if (TREE_CODE (expr) == FUNCTION_DECL)
12170 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12171 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12173 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12175 break;
12176 case tcc_type:
12177 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12178 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12179 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12180 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12181 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12182 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12183 if (INTEGRAL_TYPE_P (expr)
12184 || SCALAR_FLOAT_TYPE_P (expr))
12186 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12187 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12189 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12190 if (TREE_CODE (expr) == RECORD_TYPE
12191 || TREE_CODE (expr) == UNION_TYPE
12192 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12193 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12194 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12195 break;
12196 default:
12197 break;
12201 /* Helper function for outputting the checksum of a tree T. When
12202 debugging with gdb, you can "define mynext" to be "next" followed
12203 by "call debug_fold_checksum (op0)", then just trace down till the
12204 outputs differ. */
12206 DEBUG_FUNCTION void
12207 debug_fold_checksum (const_tree t)
12209 int i;
12210 unsigned char checksum[16];
12211 struct md5_ctx ctx;
12212 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12214 md5_init_ctx (&ctx);
12215 fold_checksum_tree (t, &ctx, &ht);
12216 md5_finish_ctx (&ctx, checksum);
12217 ht.empty ();
12219 for (i = 0; i < 16; i++)
12220 fprintf (stderr, "%d ", checksum[i]);
12222 fprintf (stderr, "\n");
12225 #endif
12227 /* Fold a unary tree expression with code CODE of type TYPE with an
12228 operand OP0. LOC is the location of the resulting expression.
12229 Return a folded expression if successful. Otherwise, return a tree
12230 expression with code CODE of type TYPE with an operand OP0. */
12232 tree
12233 fold_build1_stat_loc (location_t loc,
12234 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12236 tree tem;
12237 #ifdef ENABLE_FOLD_CHECKING
12238 unsigned char checksum_before[16], checksum_after[16];
12239 struct md5_ctx ctx;
12240 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12242 md5_init_ctx (&ctx);
12243 fold_checksum_tree (op0, &ctx, &ht);
12244 md5_finish_ctx (&ctx, checksum_before);
12245 ht.empty ();
12246 #endif
12248 tem = fold_unary_loc (loc, code, type, op0);
12249 if (!tem)
12250 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12252 #ifdef ENABLE_FOLD_CHECKING
12253 md5_init_ctx (&ctx);
12254 fold_checksum_tree (op0, &ctx, &ht);
12255 md5_finish_ctx (&ctx, checksum_after);
12257 if (memcmp (checksum_before, checksum_after, 16))
12258 fold_check_failed (op0, tem);
12259 #endif
12260 return tem;
12263 /* Fold a binary tree expression with code CODE of type TYPE with
12264 operands OP0 and OP1. LOC is the location of the resulting
12265 expression. Return a folded expression if successful. Otherwise,
12266 return a tree expression with code CODE of type TYPE with operands
12267 OP0 and OP1. */
12269 tree
12270 fold_build2_stat_loc (location_t loc,
12271 enum tree_code code, tree type, tree op0, tree op1
12272 MEM_STAT_DECL)
12274 tree tem;
12275 #ifdef ENABLE_FOLD_CHECKING
12276 unsigned char checksum_before_op0[16],
12277 checksum_before_op1[16],
12278 checksum_after_op0[16],
12279 checksum_after_op1[16];
12280 struct md5_ctx ctx;
12281 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12283 md5_init_ctx (&ctx);
12284 fold_checksum_tree (op0, &ctx, &ht);
12285 md5_finish_ctx (&ctx, checksum_before_op0);
12286 ht.empty ();
12288 md5_init_ctx (&ctx);
12289 fold_checksum_tree (op1, &ctx, &ht);
12290 md5_finish_ctx (&ctx, checksum_before_op1);
12291 ht.empty ();
12292 #endif
12294 tem = fold_binary_loc (loc, code, type, op0, op1);
12295 if (!tem)
12296 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12298 #ifdef ENABLE_FOLD_CHECKING
12299 md5_init_ctx (&ctx);
12300 fold_checksum_tree (op0, &ctx, &ht);
12301 md5_finish_ctx (&ctx, checksum_after_op0);
12302 ht.empty ();
12304 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12305 fold_check_failed (op0, tem);
12307 md5_init_ctx (&ctx);
12308 fold_checksum_tree (op1, &ctx, &ht);
12309 md5_finish_ctx (&ctx, checksum_after_op1);
12311 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12312 fold_check_failed (op1, tem);
12313 #endif
12314 return tem;
12317 /* Fold a ternary tree expression with code CODE of type TYPE with
12318 operands OP0, OP1, and OP2. Return a folded expression if
12319 successful. Otherwise, return a tree expression with code CODE of
12320 type TYPE with operands OP0, OP1, and OP2. */
12322 tree
12323 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12324 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12326 tree tem;
12327 #ifdef ENABLE_FOLD_CHECKING
12328 unsigned char checksum_before_op0[16],
12329 checksum_before_op1[16],
12330 checksum_before_op2[16],
12331 checksum_after_op0[16],
12332 checksum_after_op1[16],
12333 checksum_after_op2[16];
12334 struct md5_ctx ctx;
12335 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12337 md5_init_ctx (&ctx);
12338 fold_checksum_tree (op0, &ctx, &ht);
12339 md5_finish_ctx (&ctx, checksum_before_op0);
12340 ht.empty ();
12342 md5_init_ctx (&ctx);
12343 fold_checksum_tree (op1, &ctx, &ht);
12344 md5_finish_ctx (&ctx, checksum_before_op1);
12345 ht.empty ();
12347 md5_init_ctx (&ctx);
12348 fold_checksum_tree (op2, &ctx, &ht);
12349 md5_finish_ctx (&ctx, checksum_before_op2);
12350 ht.empty ();
12351 #endif
12353 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12354 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12355 if (!tem)
12356 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12358 #ifdef ENABLE_FOLD_CHECKING
12359 md5_init_ctx (&ctx);
12360 fold_checksum_tree (op0, &ctx, &ht);
12361 md5_finish_ctx (&ctx, checksum_after_op0);
12362 ht.empty ();
12364 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12365 fold_check_failed (op0, tem);
12367 md5_init_ctx (&ctx);
12368 fold_checksum_tree (op1, &ctx, &ht);
12369 md5_finish_ctx (&ctx, checksum_after_op1);
12370 ht.empty ();
12372 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12373 fold_check_failed (op1, tem);
12375 md5_init_ctx (&ctx);
12376 fold_checksum_tree (op2, &ctx, &ht);
12377 md5_finish_ctx (&ctx, checksum_after_op2);
12379 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12380 fold_check_failed (op2, tem);
12381 #endif
12382 return tem;
12385 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12386 arguments in ARGARRAY, and a null static chain.
12387 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12388 of type TYPE from the given operands as constructed by build_call_array. */
12390 tree
12391 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12392 int nargs, tree *argarray)
12394 tree tem;
12395 #ifdef ENABLE_FOLD_CHECKING
12396 unsigned char checksum_before_fn[16],
12397 checksum_before_arglist[16],
12398 checksum_after_fn[16],
12399 checksum_after_arglist[16];
12400 struct md5_ctx ctx;
12401 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12402 int i;
12404 md5_init_ctx (&ctx);
12405 fold_checksum_tree (fn, &ctx, &ht);
12406 md5_finish_ctx (&ctx, checksum_before_fn);
12407 ht.empty ();
12409 md5_init_ctx (&ctx);
12410 for (i = 0; i < nargs; i++)
12411 fold_checksum_tree (argarray[i], &ctx, &ht);
12412 md5_finish_ctx (&ctx, checksum_before_arglist);
12413 ht.empty ();
12414 #endif
12416 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12417 if (!tem)
12418 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12420 #ifdef ENABLE_FOLD_CHECKING
12421 md5_init_ctx (&ctx);
12422 fold_checksum_tree (fn, &ctx, &ht);
12423 md5_finish_ctx (&ctx, checksum_after_fn);
12424 ht.empty ();
12426 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12427 fold_check_failed (fn, tem);
12429 md5_init_ctx (&ctx);
12430 for (i = 0; i < nargs; i++)
12431 fold_checksum_tree (argarray[i], &ctx, &ht);
12432 md5_finish_ctx (&ctx, checksum_after_arglist);
12434 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12435 fold_check_failed (NULL_TREE, tem);
12436 #endif
12437 return tem;
12440 /* Perform constant folding and related simplification of initializer
12441 expression EXPR. These behave identically to "fold_buildN" but ignore
12442 potential run-time traps and exceptions that fold must preserve. */
12444 #define START_FOLD_INIT \
12445 int saved_signaling_nans = flag_signaling_nans;\
12446 int saved_trapping_math = flag_trapping_math;\
12447 int saved_rounding_math = flag_rounding_math;\
12448 int saved_trapv = flag_trapv;\
12449 int saved_folding_initializer = folding_initializer;\
12450 flag_signaling_nans = 0;\
12451 flag_trapping_math = 0;\
12452 flag_rounding_math = 0;\
12453 flag_trapv = 0;\
12454 folding_initializer = 1;
12456 #define END_FOLD_INIT \
12457 flag_signaling_nans = saved_signaling_nans;\
12458 flag_trapping_math = saved_trapping_math;\
12459 flag_rounding_math = saved_rounding_math;\
12460 flag_trapv = saved_trapv;\
12461 folding_initializer = saved_folding_initializer;
12463 tree
12464 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12465 tree type, tree op)
12467 tree result;
12468 START_FOLD_INIT;
12470 result = fold_build1_loc (loc, code, type, op);
12472 END_FOLD_INIT;
12473 return result;
12476 tree
12477 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12478 tree type, tree op0, tree op1)
12480 tree result;
12481 START_FOLD_INIT;
12483 result = fold_build2_loc (loc, code, type, op0, op1);
12485 END_FOLD_INIT;
12486 return result;
12489 tree
12490 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12491 int nargs, tree *argarray)
12493 tree result;
12494 START_FOLD_INIT;
12496 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12498 END_FOLD_INIT;
12499 return result;
12502 #undef START_FOLD_INIT
12503 #undef END_FOLD_INIT
12505 /* Determine if first argument is a multiple of second argument. Return 0 if
12506 it is not, or we cannot easily determined it to be.
12508 An example of the sort of thing we care about (at this point; this routine
12509 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12510 fold cases do now) is discovering that
12512 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12514 is a multiple of
12516 SAVE_EXPR (J * 8)
12518 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12520 This code also handles discovering that
12522 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12524 is a multiple of 8 so we don't have to worry about dealing with a
12525 possible remainder.
12527 Note that we *look* inside a SAVE_EXPR only to determine how it was
12528 calculated; it is not safe for fold to do much of anything else with the
12529 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12530 at run time. For example, the latter example above *cannot* be implemented
12531 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12532 evaluation time of the original SAVE_EXPR is not necessarily the same at
12533 the time the new expression is evaluated. The only optimization of this
12534 sort that would be valid is changing
12536 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12538 divided by 8 to
12540 SAVE_EXPR (I) * SAVE_EXPR (J)
12542 (where the same SAVE_EXPR (J) is used in the original and the
12543 transformed version). */
12546 multiple_of_p (tree type, const_tree top, const_tree bottom)
12548 gimple *stmt;
12549 tree t1, op1, op2;
12551 if (operand_equal_p (top, bottom, 0))
12552 return 1;
12554 if (TREE_CODE (type) != INTEGER_TYPE)
12555 return 0;
12557 switch (TREE_CODE (top))
12559 case BIT_AND_EXPR:
12560 /* Bitwise and provides a power of two multiple. If the mask is
12561 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12562 if (!integer_pow2p (bottom))
12563 return 0;
12564 /* FALLTHRU */
12566 case MULT_EXPR:
12567 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12568 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12570 case MINUS_EXPR:
12571 /* It is impossible to prove if op0 - op1 is multiple of bottom
12572 precisely, so be conservative here checking if both op0 and op1
12573 are multiple of bottom. Note we check the second operand first
12574 since it's usually simpler. */
12575 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12576 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12578 case PLUS_EXPR:
12579 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12580 as op0 - 3 if the expression has unsigned type. For example,
12581 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12582 op1 = TREE_OPERAND (top, 1);
12583 if (TYPE_UNSIGNED (type)
12584 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12585 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12586 return (multiple_of_p (type, op1, bottom)
12587 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12589 case LSHIFT_EXPR:
12590 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12592 op1 = TREE_OPERAND (top, 1);
12593 /* const_binop may not detect overflow correctly,
12594 so check for it explicitly here. */
12595 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12596 && 0 != (t1 = fold_convert (type,
12597 const_binop (LSHIFT_EXPR,
12598 size_one_node,
12599 op1)))
12600 && !TREE_OVERFLOW (t1))
12601 return multiple_of_p (type, t1, bottom);
12603 return 0;
12605 case NOP_EXPR:
12606 /* Can't handle conversions from non-integral or wider integral type. */
12607 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12608 || (TYPE_PRECISION (type)
12609 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12610 return 0;
12612 /* fall through */
12614 case SAVE_EXPR:
12615 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12617 case COND_EXPR:
12618 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12619 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12621 case INTEGER_CST:
12622 if (TREE_CODE (bottom) != INTEGER_CST
12623 || integer_zerop (bottom)
12624 || (TYPE_UNSIGNED (type)
12625 && (tree_int_cst_sgn (top) < 0
12626 || tree_int_cst_sgn (bottom) < 0)))
12627 return 0;
12628 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12629 SIGNED);
12631 case SSA_NAME:
12632 if (TREE_CODE (bottom) == INTEGER_CST
12633 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12634 && gimple_code (stmt) == GIMPLE_ASSIGN)
12636 enum tree_code code = gimple_assign_rhs_code (stmt);
12638 /* Check for special cases to see if top is defined as multiple
12639 of bottom:
12641 top = (X & ~(bottom - 1) ; bottom is power of 2
12645 Y = X % bottom
12646 top = X - Y. */
12647 if (code == BIT_AND_EXPR
12648 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12649 && TREE_CODE (op2) == INTEGER_CST
12650 && integer_pow2p (bottom)
12651 && wi::multiple_of_p (wi::to_widest (op2),
12652 wi::to_widest (bottom), UNSIGNED))
12653 return 1;
12655 op1 = gimple_assign_rhs1 (stmt);
12656 if (code == MINUS_EXPR
12657 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12658 && TREE_CODE (op2) == SSA_NAME
12659 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12660 && gimple_code (stmt) == GIMPLE_ASSIGN
12661 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12662 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12663 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12664 return 1;
12667 /* fall through */
12669 default:
12670 return 0;
12674 #define tree_expr_nonnegative_warnv_p(X, Y) \
12675 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12677 #define RECURSE(X) \
12678 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12680 /* Return true if CODE or TYPE is known to be non-negative. */
12682 static bool
12683 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12685 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12686 && truth_value_p (code))
12687 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12688 have a signed:1 type (where the value is -1 and 0). */
12689 return true;
12690 return false;
12693 /* Return true if (CODE OP0) is known to be non-negative. If the return
12694 value is based on the assumption that signed overflow is undefined,
12695 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12696 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12698 bool
12699 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12700 bool *strict_overflow_p, int depth)
12702 if (TYPE_UNSIGNED (type))
12703 return true;
12705 switch (code)
12707 case ABS_EXPR:
12708 /* We can't return 1 if flag_wrapv is set because
12709 ABS_EXPR<INT_MIN> = INT_MIN. */
12710 if (!ANY_INTEGRAL_TYPE_P (type))
12711 return true;
12712 if (TYPE_OVERFLOW_UNDEFINED (type))
12714 *strict_overflow_p = true;
12715 return true;
12717 break;
12719 case NON_LVALUE_EXPR:
12720 case FLOAT_EXPR:
12721 case FIX_TRUNC_EXPR:
12722 return RECURSE (op0);
12724 CASE_CONVERT:
12726 tree inner_type = TREE_TYPE (op0);
12727 tree outer_type = type;
12729 if (TREE_CODE (outer_type) == REAL_TYPE)
12731 if (TREE_CODE (inner_type) == REAL_TYPE)
12732 return RECURSE (op0);
12733 if (INTEGRAL_TYPE_P (inner_type))
12735 if (TYPE_UNSIGNED (inner_type))
12736 return true;
12737 return RECURSE (op0);
12740 else if (INTEGRAL_TYPE_P (outer_type))
12742 if (TREE_CODE (inner_type) == REAL_TYPE)
12743 return RECURSE (op0);
12744 if (INTEGRAL_TYPE_P (inner_type))
12745 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12746 && TYPE_UNSIGNED (inner_type);
12749 break;
12751 default:
12752 return tree_simple_nonnegative_warnv_p (code, type);
12755 /* We don't know sign of `t', so be conservative and return false. */
12756 return false;
12759 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12760 value is based on the assumption that signed overflow is undefined,
12761 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12762 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12764 bool
12765 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12766 tree op1, bool *strict_overflow_p,
12767 int depth)
12769 if (TYPE_UNSIGNED (type))
12770 return true;
12772 switch (code)
12774 case POINTER_PLUS_EXPR:
12775 case PLUS_EXPR:
12776 if (FLOAT_TYPE_P (type))
12777 return RECURSE (op0) && RECURSE (op1);
12779 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12780 both unsigned and at least 2 bits shorter than the result. */
12781 if (TREE_CODE (type) == INTEGER_TYPE
12782 && TREE_CODE (op0) == NOP_EXPR
12783 && TREE_CODE (op1) == NOP_EXPR)
12785 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12786 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12787 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12788 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12790 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12791 TYPE_PRECISION (inner2)) + 1;
12792 return prec < TYPE_PRECISION (type);
12795 break;
12797 case MULT_EXPR:
12798 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12800 /* x * x is always non-negative for floating point x
12801 or without overflow. */
12802 if (operand_equal_p (op0, op1, 0)
12803 || (RECURSE (op0) && RECURSE (op1)))
12805 if (ANY_INTEGRAL_TYPE_P (type)
12806 && TYPE_OVERFLOW_UNDEFINED (type))
12807 *strict_overflow_p = true;
12808 return true;
12812 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12813 both unsigned and their total bits is shorter than the result. */
12814 if (TREE_CODE (type) == INTEGER_TYPE
12815 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12816 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12818 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12819 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12820 : TREE_TYPE (op0);
12821 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12822 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12823 : TREE_TYPE (op1);
12825 bool unsigned0 = TYPE_UNSIGNED (inner0);
12826 bool unsigned1 = TYPE_UNSIGNED (inner1);
12828 if (TREE_CODE (op0) == INTEGER_CST)
12829 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12831 if (TREE_CODE (op1) == INTEGER_CST)
12832 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12834 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12835 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12837 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12838 ? tree_int_cst_min_precision (op0, UNSIGNED)
12839 : TYPE_PRECISION (inner0);
12841 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12842 ? tree_int_cst_min_precision (op1, UNSIGNED)
12843 : TYPE_PRECISION (inner1);
12845 return precision0 + precision1 < TYPE_PRECISION (type);
12848 return false;
12850 case BIT_AND_EXPR:
12851 case MAX_EXPR:
12852 return RECURSE (op0) || RECURSE (op1);
12854 case BIT_IOR_EXPR:
12855 case BIT_XOR_EXPR:
12856 case MIN_EXPR:
12857 case RDIV_EXPR:
12858 case TRUNC_DIV_EXPR:
12859 case CEIL_DIV_EXPR:
12860 case FLOOR_DIV_EXPR:
12861 case ROUND_DIV_EXPR:
12862 return RECURSE (op0) && RECURSE (op1);
12864 case TRUNC_MOD_EXPR:
12865 return RECURSE (op0);
12867 case FLOOR_MOD_EXPR:
12868 return RECURSE (op1);
12870 case CEIL_MOD_EXPR:
12871 case ROUND_MOD_EXPR:
12872 default:
12873 return tree_simple_nonnegative_warnv_p (code, type);
12876 /* We don't know sign of `t', so be conservative and return false. */
12877 return false;
12880 /* Return true if T is known to be non-negative. If the return
12881 value is based on the assumption that signed overflow is undefined,
12882 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12883 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12885 bool
12886 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12888 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12889 return true;
12891 switch (TREE_CODE (t))
12893 case INTEGER_CST:
12894 return tree_int_cst_sgn (t) >= 0;
12896 case REAL_CST:
12897 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12899 case FIXED_CST:
12900 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12902 case COND_EXPR:
12903 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12905 case SSA_NAME:
12906 /* Limit the depth of recursion to avoid quadratic behavior.
12907 This is expected to catch almost all occurrences in practice.
12908 If this code misses important cases that unbounded recursion
12909 would not, passes that need this information could be revised
12910 to provide it through dataflow propagation. */
12911 return (!name_registered_for_update_p (t)
12912 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12913 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12914 strict_overflow_p, depth));
12916 default:
12917 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12921 /* Return true if T is known to be non-negative. If the return
12922 value is based on the assumption that signed overflow is undefined,
12923 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12924 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12926 bool
12927 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12928 bool *strict_overflow_p, int depth)
12930 switch (fn)
12932 CASE_CFN_ACOS:
12933 CASE_CFN_ACOSH:
12934 CASE_CFN_CABS:
12935 CASE_CFN_COSH:
12936 CASE_CFN_ERFC:
12937 CASE_CFN_EXP:
12938 CASE_CFN_EXP10:
12939 CASE_CFN_EXP2:
12940 CASE_CFN_FABS:
12941 CASE_CFN_FDIM:
12942 CASE_CFN_HYPOT:
12943 CASE_CFN_POW10:
12944 CASE_CFN_FFS:
12945 CASE_CFN_PARITY:
12946 CASE_CFN_POPCOUNT:
12947 CASE_CFN_CLZ:
12948 CASE_CFN_CLRSB:
12949 case CFN_BUILT_IN_BSWAP32:
12950 case CFN_BUILT_IN_BSWAP64:
12951 /* Always true. */
12952 return true;
12954 CASE_CFN_SQRT:
12955 /* sqrt(-0.0) is -0.0. */
12956 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12957 return true;
12958 return RECURSE (arg0);
12960 CASE_CFN_ASINH:
12961 CASE_CFN_ATAN:
12962 CASE_CFN_ATANH:
12963 CASE_CFN_CBRT:
12964 CASE_CFN_CEIL:
12965 CASE_CFN_ERF:
12966 CASE_CFN_EXPM1:
12967 CASE_CFN_FLOOR:
12968 CASE_CFN_FMOD:
12969 CASE_CFN_FREXP:
12970 CASE_CFN_ICEIL:
12971 CASE_CFN_IFLOOR:
12972 CASE_CFN_IRINT:
12973 CASE_CFN_IROUND:
12974 CASE_CFN_LCEIL:
12975 CASE_CFN_LDEXP:
12976 CASE_CFN_LFLOOR:
12977 CASE_CFN_LLCEIL:
12978 CASE_CFN_LLFLOOR:
12979 CASE_CFN_LLRINT:
12980 CASE_CFN_LLROUND:
12981 CASE_CFN_LRINT:
12982 CASE_CFN_LROUND:
12983 CASE_CFN_MODF:
12984 CASE_CFN_NEARBYINT:
12985 CASE_CFN_RINT:
12986 CASE_CFN_ROUND:
12987 CASE_CFN_SCALB:
12988 CASE_CFN_SCALBLN:
12989 CASE_CFN_SCALBN:
12990 CASE_CFN_SIGNBIT:
12991 CASE_CFN_SIGNIFICAND:
12992 CASE_CFN_SINH:
12993 CASE_CFN_TANH:
12994 CASE_CFN_TRUNC:
12995 /* True if the 1st argument is nonnegative. */
12996 return RECURSE (arg0);
12998 CASE_CFN_FMAX:
12999 /* True if the 1st OR 2nd arguments are nonnegative. */
13000 return RECURSE (arg0) || RECURSE (arg1);
13002 CASE_CFN_FMIN:
13003 /* True if the 1st AND 2nd arguments are nonnegative. */
13004 return RECURSE (arg0) && RECURSE (arg1);
13006 CASE_CFN_COPYSIGN:
13007 /* True if the 2nd argument is nonnegative. */
13008 return RECURSE (arg1);
13010 CASE_CFN_POWI:
13011 /* True if the 1st argument is nonnegative or the second
13012 argument is an even integer. */
13013 if (TREE_CODE (arg1) == INTEGER_CST
13014 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13015 return true;
13016 return RECURSE (arg0);
13018 CASE_CFN_POW:
13019 /* True if the 1st argument is nonnegative or the second
13020 argument is an even integer valued real. */
13021 if (TREE_CODE (arg1) == REAL_CST)
13023 REAL_VALUE_TYPE c;
13024 HOST_WIDE_INT n;
13026 c = TREE_REAL_CST (arg1);
13027 n = real_to_integer (&c);
13028 if ((n & 1) == 0)
13030 REAL_VALUE_TYPE cint;
13031 real_from_integer (&cint, VOIDmode, n, SIGNED);
13032 if (real_identical (&c, &cint))
13033 return true;
13036 return RECURSE (arg0);
13038 default:
13039 break;
13041 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13044 /* Return true if T is known to be non-negative. If the return
13045 value is based on the assumption that signed overflow is undefined,
13046 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13047 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13049 static bool
13050 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13052 enum tree_code code = TREE_CODE (t);
13053 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13054 return true;
13056 switch (code)
13058 case TARGET_EXPR:
13060 tree temp = TARGET_EXPR_SLOT (t);
13061 t = TARGET_EXPR_INITIAL (t);
13063 /* If the initializer is non-void, then it's a normal expression
13064 that will be assigned to the slot. */
13065 if (!VOID_TYPE_P (t))
13066 return RECURSE (t);
13068 /* Otherwise, the initializer sets the slot in some way. One common
13069 way is an assignment statement at the end of the initializer. */
13070 while (1)
13072 if (TREE_CODE (t) == BIND_EXPR)
13073 t = expr_last (BIND_EXPR_BODY (t));
13074 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13075 || TREE_CODE (t) == TRY_CATCH_EXPR)
13076 t = expr_last (TREE_OPERAND (t, 0));
13077 else if (TREE_CODE (t) == STATEMENT_LIST)
13078 t = expr_last (t);
13079 else
13080 break;
13082 if (TREE_CODE (t) == MODIFY_EXPR
13083 && TREE_OPERAND (t, 0) == temp)
13084 return RECURSE (TREE_OPERAND (t, 1));
13086 return false;
13089 case CALL_EXPR:
13091 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13092 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13094 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13095 get_call_combined_fn (t),
13096 arg0,
13097 arg1,
13098 strict_overflow_p, depth);
13100 case COMPOUND_EXPR:
13101 case MODIFY_EXPR:
13102 return RECURSE (TREE_OPERAND (t, 1));
13104 case BIND_EXPR:
13105 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13107 case SAVE_EXPR:
13108 return RECURSE (TREE_OPERAND (t, 0));
13110 default:
13111 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13115 #undef RECURSE
13116 #undef tree_expr_nonnegative_warnv_p
13118 /* Return true if T is known to be non-negative. If the return
13119 value is based on the assumption that signed overflow is undefined,
13120 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13121 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13123 bool
13124 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13126 enum tree_code code;
13127 if (t == error_mark_node)
13128 return false;
13130 code = TREE_CODE (t);
13131 switch (TREE_CODE_CLASS (code))
13133 case tcc_binary:
13134 case tcc_comparison:
13135 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13136 TREE_TYPE (t),
13137 TREE_OPERAND (t, 0),
13138 TREE_OPERAND (t, 1),
13139 strict_overflow_p, depth);
13141 case tcc_unary:
13142 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13143 TREE_TYPE (t),
13144 TREE_OPERAND (t, 0),
13145 strict_overflow_p, depth);
13147 case tcc_constant:
13148 case tcc_declaration:
13149 case tcc_reference:
13150 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13152 default:
13153 break;
13156 switch (code)
13158 case TRUTH_AND_EXPR:
13159 case TRUTH_OR_EXPR:
13160 case TRUTH_XOR_EXPR:
13161 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13162 TREE_TYPE (t),
13163 TREE_OPERAND (t, 0),
13164 TREE_OPERAND (t, 1),
13165 strict_overflow_p, depth);
13166 case TRUTH_NOT_EXPR:
13167 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13168 TREE_TYPE (t),
13169 TREE_OPERAND (t, 0),
13170 strict_overflow_p, depth);
13172 case COND_EXPR:
13173 case CONSTRUCTOR:
13174 case OBJ_TYPE_REF:
13175 case ASSERT_EXPR:
13176 case ADDR_EXPR:
13177 case WITH_SIZE_EXPR:
13178 case SSA_NAME:
13179 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13181 default:
13182 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13186 /* Return true if `t' is known to be non-negative. Handle warnings
13187 about undefined signed overflow. */
13189 bool
13190 tree_expr_nonnegative_p (tree t)
13192 bool ret, strict_overflow_p;
13194 strict_overflow_p = false;
13195 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13196 if (strict_overflow_p)
13197 fold_overflow_warning (("assuming signed overflow does not occur when "
13198 "determining that expression is always "
13199 "non-negative"),
13200 WARN_STRICT_OVERFLOW_MISC);
13201 return ret;
13205 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13206 For floating point we further ensure that T is not denormal.
13207 Similar logic is present in nonzero_address in rtlanal.h.
13209 If the return value is based on the assumption that signed overflow
13210 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13211 change *STRICT_OVERFLOW_P. */
13213 bool
13214 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13215 bool *strict_overflow_p)
13217 switch (code)
13219 case ABS_EXPR:
13220 return tree_expr_nonzero_warnv_p (op0,
13221 strict_overflow_p);
13223 case NOP_EXPR:
13225 tree inner_type = TREE_TYPE (op0);
13226 tree outer_type = type;
13228 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13229 && tree_expr_nonzero_warnv_p (op0,
13230 strict_overflow_p));
13232 break;
13234 case NON_LVALUE_EXPR:
13235 return tree_expr_nonzero_warnv_p (op0,
13236 strict_overflow_p);
13238 default:
13239 break;
13242 return false;
13245 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13246 For floating point we further ensure that T is not denormal.
13247 Similar logic is present in nonzero_address in rtlanal.h.
13249 If the return value is based on the assumption that signed overflow
13250 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13251 change *STRICT_OVERFLOW_P. */
13253 bool
13254 tree_binary_nonzero_warnv_p (enum tree_code code,
13255 tree type,
13256 tree op0,
13257 tree op1, bool *strict_overflow_p)
13259 bool sub_strict_overflow_p;
13260 switch (code)
13262 case POINTER_PLUS_EXPR:
13263 case PLUS_EXPR:
13264 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13266 /* With the presence of negative values it is hard
13267 to say something. */
13268 sub_strict_overflow_p = false;
13269 if (!tree_expr_nonnegative_warnv_p (op0,
13270 &sub_strict_overflow_p)
13271 || !tree_expr_nonnegative_warnv_p (op1,
13272 &sub_strict_overflow_p))
13273 return false;
13274 /* One of operands must be positive and the other non-negative. */
13275 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13276 overflows, on a twos-complement machine the sum of two
13277 nonnegative numbers can never be zero. */
13278 return (tree_expr_nonzero_warnv_p (op0,
13279 strict_overflow_p)
13280 || tree_expr_nonzero_warnv_p (op1,
13281 strict_overflow_p));
13283 break;
13285 case MULT_EXPR:
13286 if (TYPE_OVERFLOW_UNDEFINED (type))
13288 if (tree_expr_nonzero_warnv_p (op0,
13289 strict_overflow_p)
13290 && tree_expr_nonzero_warnv_p (op1,
13291 strict_overflow_p))
13293 *strict_overflow_p = true;
13294 return true;
13297 break;
13299 case MIN_EXPR:
13300 sub_strict_overflow_p = false;
13301 if (tree_expr_nonzero_warnv_p (op0,
13302 &sub_strict_overflow_p)
13303 && tree_expr_nonzero_warnv_p (op1,
13304 &sub_strict_overflow_p))
13306 if (sub_strict_overflow_p)
13307 *strict_overflow_p = true;
13309 break;
13311 case MAX_EXPR:
13312 sub_strict_overflow_p = false;
13313 if (tree_expr_nonzero_warnv_p (op0,
13314 &sub_strict_overflow_p))
13316 if (sub_strict_overflow_p)
13317 *strict_overflow_p = true;
13319 /* When both operands are nonzero, then MAX must be too. */
13320 if (tree_expr_nonzero_warnv_p (op1,
13321 strict_overflow_p))
13322 return true;
13324 /* MAX where operand 0 is positive is positive. */
13325 return tree_expr_nonnegative_warnv_p (op0,
13326 strict_overflow_p);
13328 /* MAX where operand 1 is positive is positive. */
13329 else if (tree_expr_nonzero_warnv_p (op1,
13330 &sub_strict_overflow_p)
13331 && tree_expr_nonnegative_warnv_p (op1,
13332 &sub_strict_overflow_p))
13334 if (sub_strict_overflow_p)
13335 *strict_overflow_p = true;
13336 return true;
13338 break;
13340 case BIT_IOR_EXPR:
13341 return (tree_expr_nonzero_warnv_p (op1,
13342 strict_overflow_p)
13343 || tree_expr_nonzero_warnv_p (op0,
13344 strict_overflow_p));
13346 default:
13347 break;
13350 return false;
13353 /* Return true when T is an address and is known to be nonzero.
13354 For floating point we further ensure that T is not denormal.
13355 Similar logic is present in nonzero_address in rtlanal.h.
13357 If the return value is based on the assumption that signed overflow
13358 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13359 change *STRICT_OVERFLOW_P. */
13361 bool
13362 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13364 bool sub_strict_overflow_p;
13365 switch (TREE_CODE (t))
13367 case INTEGER_CST:
13368 return !integer_zerop (t);
13370 case ADDR_EXPR:
13372 tree base = TREE_OPERAND (t, 0);
13374 if (!DECL_P (base))
13375 base = get_base_address (base);
13377 if (base && TREE_CODE (base) == TARGET_EXPR)
13378 base = TARGET_EXPR_SLOT (base);
13380 if (!base)
13381 return false;
13383 /* For objects in symbol table check if we know they are non-zero.
13384 Don't do anything for variables and functions before symtab is built;
13385 it is quite possible that they will be declared weak later. */
13386 int nonzero_addr = maybe_nonzero_address (base);
13387 if (nonzero_addr >= 0)
13388 return nonzero_addr;
13390 /* Constants are never weak. */
13391 if (CONSTANT_CLASS_P (base))
13392 return true;
13394 return false;
13397 case COND_EXPR:
13398 sub_strict_overflow_p = false;
13399 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13400 &sub_strict_overflow_p)
13401 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13402 &sub_strict_overflow_p))
13404 if (sub_strict_overflow_p)
13405 *strict_overflow_p = true;
13406 return true;
13408 break;
13410 default:
13411 break;
13413 return false;
13416 #define integer_valued_real_p(X) \
13417 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13419 #define RECURSE(X) \
13420 ((integer_valued_real_p) (X, depth + 1))
13422 /* Return true if the floating point result of (CODE OP0) has an
13423 integer value. We also allow +Inf, -Inf and NaN to be considered
13424 integer values. Return false for signaling NaN.
13426 DEPTH is the current nesting depth of the query. */
13428 bool
13429 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13431 switch (code)
13433 case FLOAT_EXPR:
13434 return true;
13436 case ABS_EXPR:
13437 return RECURSE (op0);
13439 CASE_CONVERT:
13441 tree type = TREE_TYPE (op0);
13442 if (TREE_CODE (type) == INTEGER_TYPE)
13443 return true;
13444 if (TREE_CODE (type) == REAL_TYPE)
13445 return RECURSE (op0);
13446 break;
13449 default:
13450 break;
13452 return false;
13455 /* Return true if the floating point result of (CODE OP0 OP1) has an
13456 integer value. We also allow +Inf, -Inf and NaN to be considered
13457 integer values. Return false for signaling NaN.
13459 DEPTH is the current nesting depth of the query. */
13461 bool
13462 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13464 switch (code)
13466 case PLUS_EXPR:
13467 case MINUS_EXPR:
13468 case MULT_EXPR:
13469 case MIN_EXPR:
13470 case MAX_EXPR:
13471 return RECURSE (op0) && RECURSE (op1);
13473 default:
13474 break;
13476 return false;
13479 /* Return true if the floating point result of calling FNDECL with arguments
13480 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13481 considered integer values. Return false for signaling NaN. If FNDECL
13482 takes fewer than 2 arguments, the remaining ARGn are null.
13484 DEPTH is the current nesting depth of the query. */
13486 bool
13487 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13489 switch (fn)
13491 CASE_CFN_CEIL:
13492 CASE_CFN_FLOOR:
13493 CASE_CFN_NEARBYINT:
13494 CASE_CFN_RINT:
13495 CASE_CFN_ROUND:
13496 CASE_CFN_TRUNC:
13497 return true;
13499 CASE_CFN_FMIN:
13500 CASE_CFN_FMAX:
13501 return RECURSE (arg0) && RECURSE (arg1);
13503 default:
13504 break;
13506 return false;
13509 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13510 has an integer value. We also allow +Inf, -Inf and NaN to be
13511 considered integer values. Return false for signaling NaN.
13513 DEPTH is the current nesting depth of the query. */
13515 bool
13516 integer_valued_real_single_p (tree t, int depth)
13518 switch (TREE_CODE (t))
13520 case REAL_CST:
13521 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13523 case COND_EXPR:
13524 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13526 case SSA_NAME:
13527 /* Limit the depth of recursion to avoid quadratic behavior.
13528 This is expected to catch almost all occurrences in practice.
13529 If this code misses important cases that unbounded recursion
13530 would not, passes that need this information could be revised
13531 to provide it through dataflow propagation. */
13532 return (!name_registered_for_update_p (t)
13533 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13534 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13535 depth));
13537 default:
13538 break;
13540 return false;
13543 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13544 has an integer value. We also allow +Inf, -Inf and NaN to be
13545 considered integer values. Return false for signaling NaN.
13547 DEPTH is the current nesting depth of the query. */
13549 static bool
13550 integer_valued_real_invalid_p (tree t, int depth)
13552 switch (TREE_CODE (t))
13554 case COMPOUND_EXPR:
13555 case MODIFY_EXPR:
13556 case BIND_EXPR:
13557 return RECURSE (TREE_OPERAND (t, 1));
13559 case SAVE_EXPR:
13560 return RECURSE (TREE_OPERAND (t, 0));
13562 default:
13563 break;
13565 return false;
13568 #undef RECURSE
13569 #undef integer_valued_real_p
13571 /* Return true if the floating point expression T has an integer value.
13572 We also allow +Inf, -Inf and NaN to be considered integer values.
13573 Return false for signaling NaN.
13575 DEPTH is the current nesting depth of the query. */
13577 bool
13578 integer_valued_real_p (tree t, int depth)
13580 if (t == error_mark_node)
13581 return false;
13583 tree_code code = TREE_CODE (t);
13584 switch (TREE_CODE_CLASS (code))
13586 case tcc_binary:
13587 case tcc_comparison:
13588 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13589 TREE_OPERAND (t, 1), depth);
13591 case tcc_unary:
13592 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13594 case tcc_constant:
13595 case tcc_declaration:
13596 case tcc_reference:
13597 return integer_valued_real_single_p (t, depth);
13599 default:
13600 break;
13603 switch (code)
13605 case COND_EXPR:
13606 case SSA_NAME:
13607 return integer_valued_real_single_p (t, depth);
13609 case CALL_EXPR:
13611 tree arg0 = (call_expr_nargs (t) > 0
13612 ? CALL_EXPR_ARG (t, 0)
13613 : NULL_TREE);
13614 tree arg1 = (call_expr_nargs (t) > 1
13615 ? CALL_EXPR_ARG (t, 1)
13616 : NULL_TREE);
13617 return integer_valued_real_call_p (get_call_combined_fn (t),
13618 arg0, arg1, depth);
13621 default:
13622 return integer_valued_real_invalid_p (t, depth);
13626 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13627 attempt to fold the expression to a constant without modifying TYPE,
13628 OP0 or OP1.
13630 If the expression could be simplified to a constant, then return
13631 the constant. If the expression would not be simplified to a
13632 constant, then return NULL_TREE. */
13634 tree
13635 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13637 tree tem = fold_binary (code, type, op0, op1);
13638 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13641 /* Given the components of a unary expression CODE, TYPE and OP0,
13642 attempt to fold the expression to a constant without modifying
13643 TYPE or OP0.
13645 If the expression could be simplified to a constant, then return
13646 the constant. If the expression would not be simplified to a
13647 constant, then return NULL_TREE. */
13649 tree
13650 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13652 tree tem = fold_unary (code, type, op0);
13653 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13656 /* If EXP represents referencing an element in a constant string
13657 (either via pointer arithmetic or array indexing), return the
13658 tree representing the value accessed, otherwise return NULL. */
13660 tree
13661 fold_read_from_constant_string (tree exp)
13663 if ((TREE_CODE (exp) == INDIRECT_REF
13664 || TREE_CODE (exp) == ARRAY_REF)
13665 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13667 tree exp1 = TREE_OPERAND (exp, 0);
13668 tree index;
13669 tree string;
13670 location_t loc = EXPR_LOCATION (exp);
13672 if (TREE_CODE (exp) == INDIRECT_REF)
13673 string = string_constant (exp1, &index);
13674 else
13676 tree low_bound = array_ref_low_bound (exp);
13677 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13679 /* Optimize the special-case of a zero lower bound.
13681 We convert the low_bound to sizetype to avoid some problems
13682 with constant folding. (E.g. suppose the lower bound is 1,
13683 and its mode is QI. Without the conversion,l (ARRAY
13684 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13685 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13686 if (! integer_zerop (low_bound))
13687 index = size_diffop_loc (loc, index,
13688 fold_convert_loc (loc, sizetype, low_bound));
13690 string = exp1;
13693 if (string
13694 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13695 && TREE_CODE (string) == STRING_CST
13696 && TREE_CODE (index) == INTEGER_CST
13697 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13698 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13699 == MODE_INT)
13700 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13701 return build_int_cst_type (TREE_TYPE (exp),
13702 (TREE_STRING_POINTER (string)
13703 [TREE_INT_CST_LOW (index)]));
13705 return NULL;
13708 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13709 an integer constant, real, or fixed-point constant.
13711 TYPE is the type of the result. */
13713 static tree
13714 fold_negate_const (tree arg0, tree type)
13716 tree t = NULL_TREE;
13718 switch (TREE_CODE (arg0))
13720 case INTEGER_CST:
13722 bool overflow;
13723 wide_int val = wi::neg (arg0, &overflow);
13724 t = force_fit_type (type, val, 1,
13725 (overflow | TREE_OVERFLOW (arg0))
13726 && !TYPE_UNSIGNED (type));
13727 break;
13730 case REAL_CST:
13731 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13732 break;
13734 case FIXED_CST:
13736 FIXED_VALUE_TYPE f;
13737 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13738 &(TREE_FIXED_CST (arg0)), NULL,
13739 TYPE_SATURATING (type));
13740 t = build_fixed (type, f);
13741 /* Propagate overflow flags. */
13742 if (overflow_p | TREE_OVERFLOW (arg0))
13743 TREE_OVERFLOW (t) = 1;
13744 break;
13747 default:
13748 gcc_unreachable ();
13751 return t;
13754 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13755 an integer constant or real constant.
13757 TYPE is the type of the result. */
13759 tree
13760 fold_abs_const (tree arg0, tree type)
13762 tree t = NULL_TREE;
13764 switch (TREE_CODE (arg0))
13766 case INTEGER_CST:
13768 /* If the value is unsigned or non-negative, then the absolute value
13769 is the same as the ordinary value. */
13770 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13771 t = arg0;
13773 /* If the value is negative, then the absolute value is
13774 its negation. */
13775 else
13777 bool overflow;
13778 wide_int val = wi::neg (arg0, &overflow);
13779 t = force_fit_type (type, val, -1,
13780 overflow | TREE_OVERFLOW (arg0));
13783 break;
13785 case REAL_CST:
13786 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13787 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13788 else
13789 t = arg0;
13790 break;
13792 default:
13793 gcc_unreachable ();
13796 return t;
13799 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13800 constant. TYPE is the type of the result. */
13802 static tree
13803 fold_not_const (const_tree arg0, tree type)
13805 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13807 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13810 /* Given CODE, a relational operator, the target type, TYPE and two
13811 constant operands OP0 and OP1, return the result of the
13812 relational operation. If the result is not a compile time
13813 constant, then return NULL_TREE. */
13815 static tree
13816 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13818 int result, invert;
13820 /* From here on, the only cases we handle are when the result is
13821 known to be a constant. */
13823 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13825 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13826 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13828 /* Handle the cases where either operand is a NaN. */
13829 if (real_isnan (c0) || real_isnan (c1))
13831 switch (code)
13833 case EQ_EXPR:
13834 case ORDERED_EXPR:
13835 result = 0;
13836 break;
13838 case NE_EXPR:
13839 case UNORDERED_EXPR:
13840 case UNLT_EXPR:
13841 case UNLE_EXPR:
13842 case UNGT_EXPR:
13843 case UNGE_EXPR:
13844 case UNEQ_EXPR:
13845 result = 1;
13846 break;
13848 case LT_EXPR:
13849 case LE_EXPR:
13850 case GT_EXPR:
13851 case GE_EXPR:
13852 case LTGT_EXPR:
13853 if (flag_trapping_math)
13854 return NULL_TREE;
13855 result = 0;
13856 break;
13858 default:
13859 gcc_unreachable ();
13862 return constant_boolean_node (result, type);
13865 return constant_boolean_node (real_compare (code, c0, c1), type);
13868 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13870 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13871 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13872 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13875 /* Handle equality/inequality of complex constants. */
13876 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13878 tree rcond = fold_relational_const (code, type,
13879 TREE_REALPART (op0),
13880 TREE_REALPART (op1));
13881 tree icond = fold_relational_const (code, type,
13882 TREE_IMAGPART (op0),
13883 TREE_IMAGPART (op1));
13884 if (code == EQ_EXPR)
13885 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13886 else if (code == NE_EXPR)
13887 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13888 else
13889 return NULL_TREE;
13892 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13894 if (!VECTOR_TYPE_P (type))
13896 /* Have vector comparison with scalar boolean result. */
13897 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13898 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13899 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13901 tree elem0 = VECTOR_CST_ELT (op0, i);
13902 tree elem1 = VECTOR_CST_ELT (op1, i);
13903 tree tmp = fold_relational_const (code, type, elem0, elem1);
13904 if (tmp == NULL_TREE)
13905 return NULL_TREE;
13906 if (integer_zerop (tmp))
13907 return constant_boolean_node (false, type);
13909 return constant_boolean_node (true, type);
13911 unsigned count = VECTOR_CST_NELTS (op0);
13912 tree *elts = XALLOCAVEC (tree, count);
13913 gcc_assert (VECTOR_CST_NELTS (op1) == count
13914 && TYPE_VECTOR_SUBPARTS (type) == count);
13916 for (unsigned i = 0; i < count; i++)
13918 tree elem_type = TREE_TYPE (type);
13919 tree elem0 = VECTOR_CST_ELT (op0, i);
13920 tree elem1 = VECTOR_CST_ELT (op1, i);
13922 tree tem = fold_relational_const (code, elem_type,
13923 elem0, elem1);
13925 if (tem == NULL_TREE)
13926 return NULL_TREE;
13928 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13931 return build_vector (type, elts);
13934 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13936 To compute GT, swap the arguments and do LT.
13937 To compute GE, do LT and invert the result.
13938 To compute LE, swap the arguments, do LT and invert the result.
13939 To compute NE, do EQ and invert the result.
13941 Therefore, the code below must handle only EQ and LT. */
13943 if (code == LE_EXPR || code == GT_EXPR)
13945 std::swap (op0, op1);
13946 code = swap_tree_comparison (code);
13949 /* Note that it is safe to invert for real values here because we
13950 have already handled the one case that it matters. */
13952 invert = 0;
13953 if (code == NE_EXPR || code == GE_EXPR)
13955 invert = 1;
13956 code = invert_tree_comparison (code, false);
13959 /* Compute a result for LT or EQ if args permit;
13960 Otherwise return T. */
13961 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13963 if (code == EQ_EXPR)
13964 result = tree_int_cst_equal (op0, op1);
13965 else
13966 result = tree_int_cst_lt (op0, op1);
13968 else
13969 return NULL_TREE;
13971 if (invert)
13972 result ^= 1;
13973 return constant_boolean_node (result, type);
13976 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13977 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13978 itself. */
13980 tree
13981 fold_build_cleanup_point_expr (tree type, tree expr)
13983 /* If the expression does not have side effects then we don't have to wrap
13984 it with a cleanup point expression. */
13985 if (!TREE_SIDE_EFFECTS (expr))
13986 return expr;
13988 /* If the expression is a return, check to see if the expression inside the
13989 return has no side effects or the right hand side of the modify expression
13990 inside the return. If either don't have side effects set we don't need to
13991 wrap the expression in a cleanup point expression. Note we don't check the
13992 left hand side of the modify because it should always be a return decl. */
13993 if (TREE_CODE (expr) == RETURN_EXPR)
13995 tree op = TREE_OPERAND (expr, 0);
13996 if (!op || !TREE_SIDE_EFFECTS (op))
13997 return expr;
13998 op = TREE_OPERAND (op, 1);
13999 if (!TREE_SIDE_EFFECTS (op))
14000 return expr;
14003 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14006 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14007 of an indirection through OP0, or NULL_TREE if no simplification is
14008 possible. */
14010 tree
14011 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14013 tree sub = op0;
14014 tree subtype;
14016 STRIP_NOPS (sub);
14017 subtype = TREE_TYPE (sub);
14018 if (!POINTER_TYPE_P (subtype)
14019 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14020 return NULL_TREE;
14022 if (TREE_CODE (sub) == ADDR_EXPR)
14024 tree op = TREE_OPERAND (sub, 0);
14025 tree optype = TREE_TYPE (op);
14026 /* *&CONST_DECL -> to the value of the const decl. */
14027 if (TREE_CODE (op) == CONST_DECL)
14028 return DECL_INITIAL (op);
14029 /* *&p => p; make sure to handle *&"str"[cst] here. */
14030 if (type == optype)
14032 tree fop = fold_read_from_constant_string (op);
14033 if (fop)
14034 return fop;
14035 else
14036 return op;
14038 /* *(foo *)&fooarray => fooarray[0] */
14039 else if (TREE_CODE (optype) == ARRAY_TYPE
14040 && type == TREE_TYPE (optype)
14041 && (!in_gimple_form
14042 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14044 tree type_domain = TYPE_DOMAIN (optype);
14045 tree min_val = size_zero_node;
14046 if (type_domain && TYPE_MIN_VALUE (type_domain))
14047 min_val = TYPE_MIN_VALUE (type_domain);
14048 if (in_gimple_form
14049 && TREE_CODE (min_val) != INTEGER_CST)
14050 return NULL_TREE;
14051 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14052 NULL_TREE, NULL_TREE);
14054 /* *(foo *)&complexfoo => __real__ complexfoo */
14055 else if (TREE_CODE (optype) == COMPLEX_TYPE
14056 && type == TREE_TYPE (optype))
14057 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14058 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14059 else if (TREE_CODE (optype) == VECTOR_TYPE
14060 && type == TREE_TYPE (optype))
14062 tree part_width = TYPE_SIZE (type);
14063 tree index = bitsize_int (0);
14064 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14068 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14069 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14071 tree op00 = TREE_OPERAND (sub, 0);
14072 tree op01 = TREE_OPERAND (sub, 1);
14074 STRIP_NOPS (op00);
14075 if (TREE_CODE (op00) == ADDR_EXPR)
14077 tree op00type;
14078 op00 = TREE_OPERAND (op00, 0);
14079 op00type = TREE_TYPE (op00);
14081 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14082 if (TREE_CODE (op00type) == VECTOR_TYPE
14083 && type == TREE_TYPE (op00type))
14085 tree part_width = TYPE_SIZE (type);
14086 unsigned HOST_WIDE_INT max_offset
14087 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14088 * TYPE_VECTOR_SUBPARTS (op00type));
14089 if (tree_int_cst_sign_bit (op01) == 0
14090 && compare_tree_int (op01, max_offset) == -1)
14092 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14093 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14094 tree index = bitsize_int (indexi);
14095 return fold_build3_loc (loc,
14096 BIT_FIELD_REF, type, op00,
14097 part_width, index);
14100 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14101 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14102 && type == TREE_TYPE (op00type))
14104 tree size = TYPE_SIZE_UNIT (type);
14105 if (tree_int_cst_equal (size, op01))
14106 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14108 /* ((foo *)&fooarray)[1] => fooarray[1] */
14109 else if (TREE_CODE (op00type) == ARRAY_TYPE
14110 && type == TREE_TYPE (op00type))
14112 tree type_domain = TYPE_DOMAIN (op00type);
14113 tree min_val = size_zero_node;
14114 if (type_domain && TYPE_MIN_VALUE (type_domain))
14115 min_val = TYPE_MIN_VALUE (type_domain);
14116 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14117 TYPE_SIZE_UNIT (type));
14118 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14119 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14120 NULL_TREE, NULL_TREE);
14125 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14126 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14127 && type == TREE_TYPE (TREE_TYPE (subtype))
14128 && (!in_gimple_form
14129 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14131 tree type_domain;
14132 tree min_val = size_zero_node;
14133 sub = build_fold_indirect_ref_loc (loc, sub);
14134 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14135 if (type_domain && TYPE_MIN_VALUE (type_domain))
14136 min_val = TYPE_MIN_VALUE (type_domain);
14137 if (in_gimple_form
14138 && TREE_CODE (min_val) != INTEGER_CST)
14139 return NULL_TREE;
14140 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14141 NULL_TREE);
14144 return NULL_TREE;
14147 /* Builds an expression for an indirection through T, simplifying some
14148 cases. */
14150 tree
14151 build_fold_indirect_ref_loc (location_t loc, tree t)
14153 tree type = TREE_TYPE (TREE_TYPE (t));
14154 tree sub = fold_indirect_ref_1 (loc, type, t);
14156 if (sub)
14157 return sub;
14159 return build1_loc (loc, INDIRECT_REF, type, t);
14162 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14164 tree
14165 fold_indirect_ref_loc (location_t loc, tree t)
14167 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14169 if (sub)
14170 return sub;
14171 else
14172 return t;
14175 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14176 whose result is ignored. The type of the returned tree need not be
14177 the same as the original expression. */
14179 tree
14180 fold_ignored_result (tree t)
14182 if (!TREE_SIDE_EFFECTS (t))
14183 return integer_zero_node;
14185 for (;;)
14186 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14188 case tcc_unary:
14189 t = TREE_OPERAND (t, 0);
14190 break;
14192 case tcc_binary:
14193 case tcc_comparison:
14194 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14195 t = TREE_OPERAND (t, 0);
14196 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14197 t = TREE_OPERAND (t, 1);
14198 else
14199 return t;
14200 break;
14202 case tcc_expression:
14203 switch (TREE_CODE (t))
14205 case COMPOUND_EXPR:
14206 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14207 return t;
14208 t = TREE_OPERAND (t, 0);
14209 break;
14211 case COND_EXPR:
14212 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14213 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14214 return t;
14215 t = TREE_OPERAND (t, 0);
14216 break;
14218 default:
14219 return t;
14221 break;
14223 default:
14224 return t;
14228 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14230 tree
14231 round_up_loc (location_t loc, tree value, unsigned int divisor)
14233 tree div = NULL_TREE;
14235 if (divisor == 1)
14236 return value;
14238 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14239 have to do anything. Only do this when we are not given a const,
14240 because in that case, this check is more expensive than just
14241 doing it. */
14242 if (TREE_CODE (value) != INTEGER_CST)
14244 div = build_int_cst (TREE_TYPE (value), divisor);
14246 if (multiple_of_p (TREE_TYPE (value), value, div))
14247 return value;
14250 /* If divisor is a power of two, simplify this to bit manipulation. */
14251 if (pow2_or_zerop (divisor))
14253 if (TREE_CODE (value) == INTEGER_CST)
14255 wide_int val = value;
14256 bool overflow_p;
14258 if ((val & (divisor - 1)) == 0)
14259 return value;
14261 overflow_p = TREE_OVERFLOW (value);
14262 val += divisor - 1;
14263 val &= (int) -divisor;
14264 if (val == 0)
14265 overflow_p = true;
14267 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14269 else
14271 tree t;
14273 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14274 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14275 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14276 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14279 else
14281 if (!div)
14282 div = build_int_cst (TREE_TYPE (value), divisor);
14283 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14284 value = size_binop_loc (loc, MULT_EXPR, value, div);
14287 return value;
14290 /* Likewise, but round down. */
14292 tree
14293 round_down_loc (location_t loc, tree value, int divisor)
14295 tree div = NULL_TREE;
14297 gcc_assert (divisor > 0);
14298 if (divisor == 1)
14299 return value;
14301 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14302 have to do anything. Only do this when we are not given a const,
14303 because in that case, this check is more expensive than just
14304 doing it. */
14305 if (TREE_CODE (value) != INTEGER_CST)
14307 div = build_int_cst (TREE_TYPE (value), divisor);
14309 if (multiple_of_p (TREE_TYPE (value), value, div))
14310 return value;
14313 /* If divisor is a power of two, simplify this to bit manipulation. */
14314 if (pow2_or_zerop (divisor))
14316 tree t;
14318 t = build_int_cst (TREE_TYPE (value), -divisor);
14319 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14321 else
14323 if (!div)
14324 div = build_int_cst (TREE_TYPE (value), divisor);
14325 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14326 value = size_binop_loc (loc, MULT_EXPR, value, div);
14329 return value;
14332 /* Returns the pointer to the base of the object addressed by EXP and
14333 extracts the information about the offset of the access, storing it
14334 to PBITPOS and POFFSET. */
14336 static tree
14337 split_address_to_core_and_offset (tree exp,
14338 HOST_WIDE_INT *pbitpos, tree *poffset)
14340 tree core;
14341 machine_mode mode;
14342 int unsignedp, reversep, volatilep;
14343 HOST_WIDE_INT bitsize;
14344 location_t loc = EXPR_LOCATION (exp);
14346 if (TREE_CODE (exp) == ADDR_EXPR)
14348 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14349 poffset, &mode, &unsignedp, &reversep,
14350 &volatilep);
14351 core = build_fold_addr_expr_loc (loc, core);
14353 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14355 core = TREE_OPERAND (exp, 0);
14356 STRIP_NOPS (core);
14357 *pbitpos = 0;
14358 *poffset = TREE_OPERAND (exp, 1);
14359 if (TREE_CODE (*poffset) == INTEGER_CST)
14361 offset_int tem = wi::sext (wi::to_offset (*poffset),
14362 TYPE_PRECISION (TREE_TYPE (*poffset)));
14363 tem <<= LOG2_BITS_PER_UNIT;
14364 if (wi::fits_shwi_p (tem))
14366 *pbitpos = tem.to_shwi ();
14367 *poffset = NULL_TREE;
14371 else
14373 core = exp;
14374 *pbitpos = 0;
14375 *poffset = NULL_TREE;
14378 return core;
14381 /* Returns true if addresses of E1 and E2 differ by a constant, false
14382 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14384 bool
14385 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14387 tree core1, core2;
14388 HOST_WIDE_INT bitpos1, bitpos2;
14389 tree toffset1, toffset2, tdiff, type;
14391 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14392 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14394 if (bitpos1 % BITS_PER_UNIT != 0
14395 || bitpos2 % BITS_PER_UNIT != 0
14396 || !operand_equal_p (core1, core2, 0))
14397 return false;
14399 if (toffset1 && toffset2)
14401 type = TREE_TYPE (toffset1);
14402 if (type != TREE_TYPE (toffset2))
14403 toffset2 = fold_convert (type, toffset2);
14405 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14406 if (!cst_and_fits_in_hwi (tdiff))
14407 return false;
14409 *diff = int_cst_value (tdiff);
14411 else if (toffset1 || toffset2)
14413 /* If only one of the offsets is non-constant, the difference cannot
14414 be a constant. */
14415 return false;
14417 else
14418 *diff = 0;
14420 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14421 return true;
14424 /* Return OFF converted to a pointer offset type suitable as offset for
14425 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14426 tree
14427 convert_to_ptrofftype_loc (location_t loc, tree off)
14429 return fold_convert_loc (loc, sizetype, off);
14432 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14433 tree
14434 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14436 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14437 ptr, convert_to_ptrofftype_loc (loc, off));
14440 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14441 tree
14442 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14444 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14445 ptr, size_int (off));
14448 /* Return a char pointer for a C string if it is a string constant
14449 or sum of string constant and integer constant. We only support
14450 string constants properly terminated with '\0' character.
14451 If STRLEN is a valid pointer, length (including terminating character)
14452 of returned string is stored to the argument. */
14454 const char *
14455 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14457 tree offset_node;
14459 if (strlen)
14460 *strlen = 0;
14462 src = string_constant (src, &offset_node);
14463 if (src == 0)
14464 return NULL;
14466 unsigned HOST_WIDE_INT offset = 0;
14467 if (offset_node != NULL_TREE)
14469 if (!tree_fits_uhwi_p (offset_node))
14470 return NULL;
14471 else
14472 offset = tree_to_uhwi (offset_node);
14475 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14476 const char *string = TREE_STRING_POINTER (src);
14478 /* Support only properly null-terminated strings. */
14479 if (string_length == 0
14480 || string[string_length - 1] != '\0'
14481 || offset >= string_length)
14482 return NULL;
14484 if (strlen)
14485 *strlen = string_length - offset;
14486 return string + offset;
14489 #if CHECKING_P
14491 namespace selftest {
14493 /* Helper functions for writing tests of folding trees. */
14495 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14497 static void
14498 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14499 tree constant)
14501 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14504 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14505 wrapping WRAPPED_EXPR. */
14507 static void
14508 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14509 tree wrapped_expr)
14511 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14512 ASSERT_NE (wrapped_expr, result);
14513 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14514 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14517 /* Verify that various arithmetic binary operations are folded
14518 correctly. */
14520 static void
14521 test_arithmetic_folding ()
14523 tree type = integer_type_node;
14524 tree x = create_tmp_var_raw (type, "x");
14525 tree zero = build_zero_cst (type);
14526 tree one = build_int_cst (type, 1);
14528 /* Addition. */
14529 /* 1 <-- (0 + 1) */
14530 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14531 one);
14532 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14533 one);
14535 /* (nonlvalue)x <-- (x + 0) */
14536 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14539 /* Subtraction. */
14540 /* 0 <-- (x - x) */
14541 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14542 zero);
14543 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14546 /* Multiplication. */
14547 /* 0 <-- (x * 0) */
14548 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14549 zero);
14551 /* (nonlvalue)x <-- (x * 1) */
14552 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14556 /* Verify that various binary operations on vectors are folded
14557 correctly. */
14559 static void
14560 test_vector_folding ()
14562 tree inner_type = integer_type_node;
14563 tree type = build_vector_type (inner_type, 4);
14564 tree zero = build_zero_cst (type);
14565 tree one = build_one_cst (type);
14567 /* Verify equality tests that return a scalar boolean result. */
14568 tree res_type = boolean_type_node;
14569 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14570 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14571 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14572 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14575 /* Run all of the selftests within this file. */
14577 void
14578 fold_const_c_tests ()
14580 test_arithmetic_folding ();
14581 test_vector_folding ();
14584 } // namespace selftest
14586 #endif /* CHECKING_P */