Fix missing ChangeLog entry for Graphite head files fix.
[official-gcc.git] / gcc / fold-const.c
blob698062ee814cdf62a1da0d24febf17aa4611ad89
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
78 #ifndef LOAD_EXTEND_OP
79 #define LOAD_EXTEND_OP(M) UNKNOWN
80 #endif
82 /* Nonzero if we are folding constants inside an initializer; zero
83 otherwise. */
84 int folding_initializer = 0;
86 /* The following constants represent a bit based encoding of GCC's
87 comparison operators. This encoding simplifies transformations
88 on relational comparison operators, such as AND and OR. */
89 enum comparison_code {
90 COMPCODE_FALSE = 0,
91 COMPCODE_LT = 1,
92 COMPCODE_EQ = 2,
93 COMPCODE_LE = 3,
94 COMPCODE_GT = 4,
95 COMPCODE_LTGT = 5,
96 COMPCODE_GE = 6,
97 COMPCODE_ORD = 7,
98 COMPCODE_UNORD = 8,
99 COMPCODE_UNLT = 9,
100 COMPCODE_UNEQ = 10,
101 COMPCODE_UNLE = 11,
102 COMPCODE_UNGT = 12,
103 COMPCODE_NE = 13,
104 COMPCODE_UNGE = 14,
105 COMPCODE_TRUE = 15
108 static bool negate_expr_p (tree);
109 static tree negate_expr (tree);
110 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
111 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
112 static enum comparison_code comparison_to_compcode (enum tree_code);
113 static enum tree_code compcode_to_comparison (enum comparison_code);
114 static int operand_equal_for_comparison_p (tree, tree, tree);
115 static int twoval_comparison_p (tree, tree *, tree *, int *);
116 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
117 static tree make_bit_field_ref (location_t, tree, tree,
118 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
122 HOST_WIDE_INT *,
123 machine_mode *, int *, int *, int *,
124 tree *, tree *);
125 static int simple_operand_p (const_tree);
126 static bool simple_operand_p_2 (tree);
127 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
128 static tree range_predecessor (tree);
129 static tree range_successor (tree);
130 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
131 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
132 static tree unextend (tree, int, int, tree);
133 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 tree, tree, tree);
135 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
136 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
137 static tree fold_binary_op_with_conditional_arg (location_t,
138 enum tree_code, tree,
139 tree, tree,
140 tree, tree, int);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147 static tree fold_view_convert_expr (tree, tree);
148 static bool vec_cst_ctor_to_array (tree, tree *);
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
154 static location_t
155 expr_location_or (tree t, location_t loc)
157 location_t tloc = EXPR_LOCATION (t);
158 return tloc == UNKNOWN_LOCATION ? loc : tloc;
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
164 static inline tree
165 protected_set_expr_location_unshare (tree x, location_t loc)
167 if (CAN_HAVE_LOCATION_P (x)
168 && EXPR_LOCATION (x) != loc
169 && !(TREE_CODE (x) == SAVE_EXPR
170 || TREE_CODE (x) == TARGET_EXPR
171 || TREE_CODE (x) == BIND_EXPR))
173 x = copy_node (x);
174 SET_EXPR_LOCATION (x, loc);
176 return x;
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
181 NULL_TREE. */
183 tree
184 div_if_zero_remainder (const_tree arg1, const_tree arg2)
186 widest_int quo;
188 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
189 SIGNED, &quo))
190 return wide_int_to_tree (TREE_TYPE (arg1), quo);
192 return NULL_TREE;
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
202 used. */
204 static int fold_deferring_overflow_warnings;
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
211 static const char* fold_deferred_overflow_warning;
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
216 static enum warn_strict_overflow_code fold_deferred_overflow_code;
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
221 void
222 fold_defer_overflow_warnings (void)
224 ++fold_deferring_overflow_warnings;
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
234 deferred code. */
236 void
237 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
239 const char *warnmsg;
240 location_t locus;
242 gcc_assert (fold_deferring_overflow_warnings > 0);
243 --fold_deferring_overflow_warnings;
244 if (fold_deferring_overflow_warnings > 0)
246 if (fold_deferred_overflow_warning != NULL
247 && code != 0
248 && code < (int) fold_deferred_overflow_code)
249 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
250 return;
253 warnmsg = fold_deferred_overflow_warning;
254 fold_deferred_overflow_warning = NULL;
256 if (!issue || warnmsg == NULL)
257 return;
259 if (gimple_no_warning_p (stmt))
260 return;
262 /* Use the smallest code level when deciding to issue the
263 warning. */
264 if (code == 0 || code > (int) fold_deferred_overflow_code)
265 code = fold_deferred_overflow_code;
267 if (!issue_strict_overflow_warning (code))
268 return;
270 if (stmt == NULL)
271 locus = input_location;
272 else
273 locus = gimple_location (stmt);
274 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
277 /* Stop deferring overflow warnings, ignoring any deferred
278 warnings. */
280 void
281 fold_undefer_and_ignore_overflow_warnings (void)
283 fold_undefer_overflow_warnings (false, NULL, 0);
286 /* Whether we are deferring overflow warnings. */
288 bool
289 fold_deferring_overflow_warnings_p (void)
291 return fold_deferring_overflow_warnings > 0;
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
297 static void
298 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
300 if (fold_deferring_overflow_warnings > 0)
302 if (fold_deferred_overflow_warning == NULL
303 || wc < fold_deferred_overflow_code)
305 fold_deferred_overflow_warning = gmsgid;
306 fold_deferred_overflow_code = wc;
309 else if (issue_strict_overflow_warning (wc))
310 warning (OPT_Wstrict_overflow, gmsgid);
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
316 bool
317 negate_mathfn_p (combined_fn fn)
319 switch (fn)
321 CASE_CFN_ASIN:
322 CASE_CFN_ASINH:
323 CASE_CFN_ATAN:
324 CASE_CFN_ATANH:
325 CASE_CFN_CASIN:
326 CASE_CFN_CASINH:
327 CASE_CFN_CATAN:
328 CASE_CFN_CATANH:
329 CASE_CFN_CBRT:
330 CASE_CFN_CPROJ:
331 CASE_CFN_CSIN:
332 CASE_CFN_CSINH:
333 CASE_CFN_CTAN:
334 CASE_CFN_CTANH:
335 CASE_CFN_ERF:
336 CASE_CFN_LLROUND:
337 CASE_CFN_LROUND:
338 CASE_CFN_ROUND:
339 CASE_CFN_SIN:
340 CASE_CFN_SINH:
341 CASE_CFN_TAN:
342 CASE_CFN_TANH:
343 CASE_CFN_TRUNC:
344 return true;
346 CASE_CFN_LLRINT:
347 CASE_CFN_LRINT:
348 CASE_CFN_NEARBYINT:
349 CASE_CFN_RINT:
350 return !flag_rounding_math;
352 default:
353 break;
355 return false;
358 /* Check whether we may negate an integer constant T without causing
359 overflow. */
361 bool
362 may_negate_without_overflow_p (const_tree t)
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 return !wi::only_sign_bit_p (t);
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
378 static bool
379 negate_expr_p (tree t)
381 tree type;
383 if (t == 0)
384 return false;
386 type = TREE_TYPE (t);
388 STRIP_SIGN_NOPS (t);
389 switch (TREE_CODE (t))
391 case INTEGER_CST:
392 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
393 return true;
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t);
397 case BIT_NOT_EXPR:
398 return (INTEGRAL_TYPE_P (type)
399 && TYPE_OVERFLOW_WRAPS (type));
401 case FIXED_CST:
402 return true;
404 case NEGATE_EXPR:
405 return !TYPE_OVERFLOW_SANITIZED (type);
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
416 case VECTOR_CST:
418 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
419 return true;
421 int count = TYPE_VECTOR_SUBPARTS (type), i;
423 for (i = 0; i < count; i++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
425 return false;
427 return true;
430 case COMPLEX_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0))
432 && negate_expr_p (TREE_OPERAND (t, 1));
434 case CONJ_EXPR:
435 return negate_expr_p (TREE_OPERAND (t, 0));
437 case PLUS_EXPR:
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
439 || HONOR_SIGNED_ZEROS (element_mode (type))
440 || (INTEGRAL_TYPE_P (type)
441 && ! TYPE_OVERFLOW_WRAPS (type)))
442 return false;
443 /* -(A + B) -> (-B) - A. */
444 if (negate_expr_p (TREE_OPERAND (t, 1))
445 && reorder_operands_p (TREE_OPERAND (t, 0),
446 TREE_OPERAND (t, 1)))
447 return true;
448 /* -(A + B) -> (-A) - B. */
449 return negate_expr_p (TREE_OPERAND (t, 0));
451 case MINUS_EXPR:
452 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
453 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
454 && !HONOR_SIGNED_ZEROS (element_mode (type))
455 && (! INTEGRAL_TYPE_P (type)
456 || TYPE_OVERFLOW_WRAPS (type))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1));
460 case MULT_EXPR:
461 if (TYPE_UNSIGNED (type))
462 break;
463 /* INT_MIN/n * n doesn't overflow while negating one operand it does
464 if n is a power of two. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
467 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
468 && ! integer_pow2p (TREE_OPERAND (t, 0)))
469 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
471 break;
473 /* Fall through. */
475 case RDIV_EXPR:
476 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
479 break;
481 case TRUNC_DIV_EXPR:
482 case ROUND_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 if (TYPE_UNSIGNED (type))
485 break;
486 if (negate_expr_p (TREE_OPERAND (t, 0)))
487 return true;
488 /* In general we can't negate B in A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. */
491 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
492 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
493 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
494 && ! integer_onep (TREE_OPERAND (t, 1))))
495 return negate_expr_p (TREE_OPERAND (t, 1));
496 break;
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
506 break;
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (get_call_combined_fn (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
520 return true;
522 break;
524 default:
525 break;
527 return false;
530 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
531 simplification is possible.
532 If negate_expr_p would return true for T, NULL_TREE will never be
533 returned. */
535 static tree
536 fold_negate_expr (location_t loc, tree t)
538 tree type = TREE_TYPE (t);
539 tree tem;
541 switch (TREE_CODE (t))
543 /* Convert - (~A) to A + 1. */
544 case BIT_NOT_EXPR:
545 if (INTEGRAL_TYPE_P (type))
546 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
547 build_one_cst (type));
548 break;
550 case INTEGER_CST:
551 tem = fold_negate_const (t, type);
552 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
553 || (ANY_INTEGRAL_TYPE_P (type)
554 && !TYPE_OVERFLOW_TRAPS (type)
555 && TYPE_OVERFLOW_WRAPS (type))
556 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
557 return tem;
558 break;
560 case REAL_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case FIXED_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
568 case COMPLEX_CST:
570 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
571 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
572 if (rpart && ipart)
573 return build_complex (type, rpart, ipart);
575 break;
577 case VECTOR_CST:
579 int count = TYPE_VECTOR_SUBPARTS (type), i;
580 tree *elts = XALLOCAVEC (tree, count);
582 for (i = 0; i < count; i++)
584 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
585 if (elts[i] == NULL_TREE)
586 return NULL_TREE;
589 return build_vector (type, elts);
592 case COMPLEX_EXPR:
593 if (negate_expr_p (t))
594 return fold_build2_loc (loc, COMPLEX_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
596 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
597 break;
599 case CONJ_EXPR:
600 if (negate_expr_p (t))
601 return fold_build1_loc (loc, CONJ_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
603 break;
605 case NEGATE_EXPR:
606 if (!TYPE_OVERFLOW_SANITIZED (type))
607 return TREE_OPERAND (t, 0);
608 break;
610 case PLUS_EXPR:
611 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
612 && !HONOR_SIGNED_ZEROS (element_mode (type)))
614 /* -(A + B) -> (-B) - A. */
615 if (negate_expr_p (TREE_OPERAND (t, 1))
616 && reorder_operands_p (TREE_OPERAND (t, 0),
617 TREE_OPERAND (t, 1)))
619 tem = negate_expr (TREE_OPERAND (t, 1));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 0));
624 /* -(A + B) -> (-A) - B. */
625 if (negate_expr_p (TREE_OPERAND (t, 0)))
627 tem = negate_expr (TREE_OPERAND (t, 0));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 1));
632 break;
634 case MINUS_EXPR:
635 /* - (A - B) -> B - A */
636 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
637 && !HONOR_SIGNED_ZEROS (element_mode (type))
638 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
641 break;
643 case MULT_EXPR:
644 if (TYPE_UNSIGNED (type))
645 break;
647 /* Fall through. */
649 case RDIV_EXPR:
650 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
652 tem = TREE_OPERAND (t, 1);
653 if (negate_expr_p (tem))
654 return fold_build2_loc (loc, TREE_CODE (t), type,
655 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 negate_expr (tem), TREE_OPERAND (t, 1));
661 break;
663 case TRUNC_DIV_EXPR:
664 case ROUND_DIV_EXPR:
665 case EXACT_DIV_EXPR:
666 if (TYPE_UNSIGNED (type))
667 break;
668 if (negate_expr_p (TREE_OPERAND (t, 0)))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (TREE_OPERAND (t, 0)),
671 TREE_OPERAND (t, 1));
672 /* In general we can't negate B in A / B, because if A is INT_MIN and
673 B is 1, we may turn this into INT_MIN / -1 which is undefined
674 and actually traps on some architectures. */
675 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
676 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
677 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
678 && ! integer_onep (TREE_OPERAND (t, 1))))
679 && negate_expr_p (TREE_OPERAND (t, 1)))
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 TREE_OPERAND (t, 0),
682 negate_expr (TREE_OPERAND (t, 1)));
683 break;
685 case NOP_EXPR:
686 /* Convert -((double)float) into (double)(-float). */
687 if (TREE_CODE (type) == REAL_TYPE)
689 tem = strip_float_extensions (t);
690 if (tem != t && negate_expr_p (tem))
691 return fold_convert_loc (loc, type, negate_expr (tem));
693 break;
695 case CALL_EXPR:
696 /* Negate -f(x) as f(-x). */
697 if (negate_mathfn_p (get_call_combined_fn (t))
698 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
700 tree fndecl, arg;
702 fndecl = get_callee_fndecl (t);
703 arg = negate_expr (CALL_EXPR_ARG (t, 0));
704 return build_call_expr_loc (loc, fndecl, 1, arg);
706 break;
708 case RSHIFT_EXPR:
709 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
710 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
712 tree op1 = TREE_OPERAND (t, 1);
713 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
723 break;
725 default:
726 break;
729 return NULL_TREE;
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
779 tree var = 0;
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
836 else if (TREE_CODE (in) == BIT_NOT_EXPR
837 && code == PLUS_EXPR)
839 /* -X - 1 is folded to ~X, undo that here. */
840 *minus_litp = build_one_cst (TREE_TYPE (in));
841 var = negate_expr (TREE_OPERAND (in, 0));
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else
846 var = in;
848 if (negate_p)
850 if (*litp)
851 *minus_litp = *litp, *litp = 0;
852 else if (*minus_litp)
853 *litp = *minus_litp, *minus_litp = 0;
854 *conp = negate_expr (*conp);
855 var = negate_expr (var);
858 return var;
861 /* Re-associate trees split by the above function. T1 and T2 are
862 either expressions to associate or null. Return the new
863 expression, if any. LOC is the location of the new expression. If
864 we build an operation, do it in TYPE and with CODE. */
866 static tree
867 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
869 if (t1 == 0)
870 return t2;
871 else if (t2 == 0)
872 return t1;
874 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
875 try to fold this since we will have infinite recursion. But do
876 deal with any NEGATE_EXPRs. */
877 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
878 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
880 if (code == PLUS_EXPR)
882 if (TREE_CODE (t1) == NEGATE_EXPR)
883 return build2_loc (loc, MINUS_EXPR, type,
884 fold_convert_loc (loc, type, t2),
885 fold_convert_loc (loc, type,
886 TREE_OPERAND (t1, 0)));
887 else if (TREE_CODE (t2) == NEGATE_EXPR)
888 return build2_loc (loc, MINUS_EXPR, type,
889 fold_convert_loc (loc, type, t1),
890 fold_convert_loc (loc, type,
891 TREE_OPERAND (t2, 0)));
892 else if (integer_zerop (t2))
893 return fold_convert_loc (loc, type, t1);
895 else if (code == MINUS_EXPR)
897 if (integer_zerop (t2))
898 return fold_convert_loc (loc, type, t1);
901 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
905 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
906 fold_convert_loc (loc, type, t2));
909 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
910 for use in int_const_binop, size_binop and size_diffop. */
912 static bool
913 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
915 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
916 return false;
917 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
918 return false;
920 switch (code)
922 case LSHIFT_EXPR:
923 case RSHIFT_EXPR:
924 case LROTATE_EXPR:
925 case RROTATE_EXPR:
926 return true;
928 default:
929 break;
932 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
933 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
934 && TYPE_MODE (type1) == TYPE_MODE (type2);
938 /* Combine two integer constants ARG1 and ARG2 under operation CODE
939 to produce a new constant. Return NULL_TREE if we don't know how
940 to evaluate CODE at compile-time. */
942 static tree
943 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
944 int overflowable)
946 wide_int res;
947 tree t;
948 tree type = TREE_TYPE (arg1);
949 signop sign = TYPE_SIGN (type);
950 bool overflow = false;
952 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
953 TYPE_SIGN (TREE_TYPE (parg2)));
955 switch (code)
957 case BIT_IOR_EXPR:
958 res = wi::bit_or (arg1, arg2);
959 break;
961 case BIT_XOR_EXPR:
962 res = wi::bit_xor (arg1, arg2);
963 break;
965 case BIT_AND_EXPR:
966 res = wi::bit_and (arg1, arg2);
967 break;
969 case RSHIFT_EXPR:
970 case LSHIFT_EXPR:
971 if (wi::neg_p (arg2))
973 arg2 = -arg2;
974 if (code == RSHIFT_EXPR)
975 code = LSHIFT_EXPR;
976 else
977 code = RSHIFT_EXPR;
980 if (code == RSHIFT_EXPR)
981 /* It's unclear from the C standard whether shifts can overflow.
982 The following code ignores overflow; perhaps a C standard
983 interpretation ruling is needed. */
984 res = wi::rshift (arg1, arg2, sign);
985 else
986 res = wi::lshift (arg1, arg2);
987 break;
989 case RROTATE_EXPR:
990 case LROTATE_EXPR:
991 if (wi::neg_p (arg2))
993 arg2 = -arg2;
994 if (code == RROTATE_EXPR)
995 code = LROTATE_EXPR;
996 else
997 code = RROTATE_EXPR;
1000 if (code == RROTATE_EXPR)
1001 res = wi::rrotate (arg1, arg2);
1002 else
1003 res = wi::lrotate (arg1, arg2);
1004 break;
1006 case PLUS_EXPR:
1007 res = wi::add (arg1, arg2, sign, &overflow);
1008 break;
1010 case MINUS_EXPR:
1011 res = wi::sub (arg1, arg2, sign, &overflow);
1012 break;
1014 case MULT_EXPR:
1015 res = wi::mul (arg1, arg2, sign, &overflow);
1016 break;
1018 case MULT_HIGHPART_EXPR:
1019 res = wi::mul_high (arg1, arg2, sign);
1020 break;
1022 case TRUNC_DIV_EXPR:
1023 case EXACT_DIV_EXPR:
1024 if (arg2 == 0)
1025 return NULL_TREE;
1026 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1027 break;
1029 case FLOOR_DIV_EXPR:
1030 if (arg2 == 0)
1031 return NULL_TREE;
1032 res = wi::div_floor (arg1, arg2, sign, &overflow);
1033 break;
1035 case CEIL_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1039 break;
1041 case ROUND_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_round (arg1, arg2, sign, &overflow);
1045 break;
1047 case TRUNC_MOD_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_MOD_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case MIN_EXPR:
1072 res = wi::min (arg1, arg2, sign);
1073 break;
1075 case MAX_EXPR:
1076 res = wi::max (arg1, arg2, sign);
1077 break;
1079 default:
1080 return NULL_TREE;
1083 t = force_fit_type (type, res, overflowable,
1084 (((sign == SIGNED || overflowable == -1)
1085 && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1088 return t;
1091 tree
1092 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1094 return int_const_binop_1 (code, arg1, arg2, 1);
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1102 static tree
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1105 /* Sanity check for the recursive cases. */
1106 if (!arg1 || !arg2)
1107 return NULL_TREE;
1109 STRIP_NOPS (arg1);
1110 STRIP_NOPS (arg2);
1112 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1114 if (code == POINTER_PLUS_EXPR)
1115 return int_const_binop (PLUS_EXPR,
1116 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1118 return int_const_binop (code, arg1, arg2);
1121 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1123 machine_mode mode;
1124 REAL_VALUE_TYPE d1;
1125 REAL_VALUE_TYPE d2;
1126 REAL_VALUE_TYPE value;
1127 REAL_VALUE_TYPE result;
1128 bool inexact;
1129 tree t, type;
1131 /* The following codes are handled by real_arithmetic. */
1132 switch (code)
1134 case PLUS_EXPR:
1135 case MINUS_EXPR:
1136 case MULT_EXPR:
1137 case RDIV_EXPR:
1138 case MIN_EXPR:
1139 case MAX_EXPR:
1140 break;
1142 default:
1143 return NULL_TREE;
1146 d1 = TREE_REAL_CST (arg1);
1147 d2 = TREE_REAL_CST (arg2);
1149 type = TREE_TYPE (arg1);
1150 mode = TYPE_MODE (type);
1152 /* Don't perform operation if we honor signaling NaNs and
1153 either operand is a NaN. */
1154 if (HONOR_SNANS (mode)
1155 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1156 return NULL_TREE;
1158 /* Don't perform operation if it would raise a division
1159 by zero exception. */
1160 if (code == RDIV_EXPR
1161 && real_equal (&d2, &dconst0)
1162 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1163 return NULL_TREE;
1165 /* If either operand is a NaN, just return it. Otherwise, set up
1166 for floating-point trap; we return an overflow. */
1167 if (REAL_VALUE_ISNAN (d1))
1168 return arg1;
1169 else if (REAL_VALUE_ISNAN (d2))
1170 return arg2;
1172 inexact = real_arithmetic (&value, code, &d1, &d2);
1173 real_convert (&result, mode, &value);
1175 /* Don't constant fold this floating point operation if
1176 the result has overflowed and flag_trapping_math. */
1177 if (flag_trapping_math
1178 && MODE_HAS_INFINITIES (mode)
1179 && REAL_VALUE_ISINF (result)
1180 && !REAL_VALUE_ISINF (d1)
1181 && !REAL_VALUE_ISINF (d2))
1182 return NULL_TREE;
1184 /* Don't constant fold this floating point operation if the
1185 result may dependent upon the run-time rounding mode and
1186 flag_rounding_math is set, or if GCC's software emulation
1187 is unable to accurately represent the result. */
1188 if ((flag_rounding_math
1189 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1190 && (inexact || !real_identical (&result, &value)))
1191 return NULL_TREE;
1193 t = build_real (type, result);
1195 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1196 return t;
1199 if (TREE_CODE (arg1) == FIXED_CST)
1201 FIXED_VALUE_TYPE f1;
1202 FIXED_VALUE_TYPE f2;
1203 FIXED_VALUE_TYPE result;
1204 tree t, type;
1205 int sat_p;
1206 bool overflow_p;
1208 /* The following codes are handled by fixed_arithmetic. */
1209 switch (code)
1211 case PLUS_EXPR:
1212 case MINUS_EXPR:
1213 case MULT_EXPR:
1214 case TRUNC_DIV_EXPR:
1215 if (TREE_CODE (arg2) != FIXED_CST)
1216 return NULL_TREE;
1217 f2 = TREE_FIXED_CST (arg2);
1218 break;
1220 case LSHIFT_EXPR:
1221 case RSHIFT_EXPR:
1223 if (TREE_CODE (arg2) != INTEGER_CST)
1224 return NULL_TREE;
1225 wide_int w2 = arg2;
1226 f2.data.high = w2.elt (1);
1227 f2.data.low = w2.elt (0);
1228 f2.mode = SImode;
1230 break;
1232 default:
1233 return NULL_TREE;
1236 f1 = TREE_FIXED_CST (arg1);
1237 type = TREE_TYPE (arg1);
1238 sat_p = TYPE_SATURATING (type);
1239 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1240 t = build_fixed (type, result);
1241 /* Propagate overflow flags. */
1242 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1243 TREE_OVERFLOW (t) = 1;
1244 return t;
1247 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1249 tree type = TREE_TYPE (arg1);
1250 tree r1 = TREE_REALPART (arg1);
1251 tree i1 = TREE_IMAGPART (arg1);
1252 tree r2 = TREE_REALPART (arg2);
1253 tree i2 = TREE_IMAGPART (arg2);
1254 tree real, imag;
1256 switch (code)
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 real = const_binop (code, r1, r2);
1261 imag = const_binop (code, i1, i2);
1262 break;
1264 case MULT_EXPR:
1265 if (COMPLEX_FLOAT_TYPE_P (type))
1266 return do_mpc_arg2 (arg1, arg2, type,
1267 /* do_nonfinite= */ folding_initializer,
1268 mpc_mul);
1270 real = const_binop (MINUS_EXPR,
1271 const_binop (MULT_EXPR, r1, r2),
1272 const_binop (MULT_EXPR, i1, i2));
1273 imag = const_binop (PLUS_EXPR,
1274 const_binop (MULT_EXPR, r1, i2),
1275 const_binop (MULT_EXPR, i1, r2));
1276 break;
1278 case RDIV_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_div);
1283 /* Fallthru ... */
1284 case TRUNC_DIV_EXPR:
1285 case CEIL_DIV_EXPR:
1286 case FLOOR_DIV_EXPR:
1287 case ROUND_DIV_EXPR:
1288 if (flag_complex_method == 0)
1290 /* Keep this algorithm in sync with
1291 tree-complex.c:expand_complex_div_straight().
1293 Expand complex division to scalars, straightforward algorithm.
1294 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1295 t = br*br + bi*bi
1297 tree magsquared
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r2, r2),
1300 const_binop (MULT_EXPR, i2, i2));
1301 tree t1
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r1, r2),
1304 const_binop (MULT_EXPR, i1, i2));
1305 tree t2
1306 = const_binop (MINUS_EXPR,
1307 const_binop (MULT_EXPR, i1, r2),
1308 const_binop (MULT_EXPR, r1, i2));
1310 real = const_binop (code, t1, magsquared);
1311 imag = const_binop (code, t2, magsquared);
1313 else
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_wide().
1318 Expand complex division to scalars, modified algorithm to minimize
1319 overflow with wide input ranges. */
1320 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1321 fold_abs_const (r2, TREE_TYPE (type)),
1322 fold_abs_const (i2, TREE_TYPE (type)));
1324 if (integer_nonzerop (compare))
1326 /* In the TRUE branch, we compute
1327 ratio = br/bi;
1328 div = (br * ratio) + bi;
1329 tr = (ar * ratio) + ai;
1330 ti = (ai * ratio) - ar;
1331 tr = tr / div;
1332 ti = ti / div; */
1333 tree ratio = const_binop (code, r2, i2);
1334 tree div = const_binop (PLUS_EXPR, i2,
1335 const_binop (MULT_EXPR, r2, ratio));
1336 real = const_binop (MULT_EXPR, r1, ratio);
1337 real = const_binop (PLUS_EXPR, real, i1);
1338 real = const_binop (code, real, div);
1340 imag = const_binop (MULT_EXPR, i1, ratio);
1341 imag = const_binop (MINUS_EXPR, imag, r1);
1342 imag = const_binop (code, imag, div);
1344 else
1346 /* In the FALSE branch, we compute
1347 ratio = d/c;
1348 divisor = (d * ratio) + c;
1349 tr = (b * ratio) + a;
1350 ti = b - (a * ratio);
1351 tr = tr / div;
1352 ti = ti / div; */
1353 tree ratio = const_binop (code, i2, r2);
1354 tree div = const_binop (PLUS_EXPR, r2,
1355 const_binop (MULT_EXPR, i2, ratio));
1357 real = const_binop (MULT_EXPR, i1, ratio);
1358 real = const_binop (PLUS_EXPR, real, r1);
1359 real = const_binop (code, real, div);
1361 imag = const_binop (MULT_EXPR, r1, ratio);
1362 imag = const_binop (MINUS_EXPR, i1, imag);
1363 imag = const_binop (code, imag, div);
1366 break;
1368 default:
1369 return NULL_TREE;
1372 if (real && imag)
1373 return build_complex (type, real, imag);
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == VECTOR_CST)
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1383 for (i = 0; i < count; i++)
1385 tree elem1 = VECTOR_CST_ELT (arg1, i);
1386 tree elem2 = VECTOR_CST_ELT (arg2, i);
1388 elts[i] = const_binop (code, elem1, elem2);
1390 /* It is possible that const_binop cannot handle the given
1391 code and return NULL_TREE */
1392 if (elts[i] == NULL_TREE)
1393 return NULL_TREE;
1396 return build_vector (type, elts);
1399 /* Shifts allow a scalar offset for a vector. */
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == INTEGER_CST)
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1407 for (i = 0; i < count; i++)
1409 tree elem1 = VECTOR_CST_ELT (arg1, i);
1411 elts[i] = const_binop (code, elem1, arg2);
1413 /* It is possible that const_binop cannot handle the given
1414 code and return NULL_TREE. */
1415 if (elts[i] == NULL_TREE)
1416 return NULL_TREE;
1419 return build_vector (type, elts);
1421 return NULL_TREE;
1424 /* Overload that adds a TYPE parameter to be able to dispatch
1425 to fold_relational_const. */
1427 tree
1428 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1430 if (TREE_CODE_CLASS (code) == tcc_comparison)
1431 return fold_relational_const (code, type, arg1, arg2);
1433 /* ??? Until we make the const_binop worker take the type of the
1434 result as argument put those cases that need it here. */
1435 switch (code)
1437 case COMPLEX_EXPR:
1438 if ((TREE_CODE (arg1) == REAL_CST
1439 && TREE_CODE (arg2) == REAL_CST)
1440 || (TREE_CODE (arg1) == INTEGER_CST
1441 && TREE_CODE (arg2) == INTEGER_CST))
1442 return build_complex (type, arg1, arg2);
1443 return NULL_TREE;
1445 case VEC_PACK_TRUNC_EXPR:
1446 case VEC_PACK_FIX_TRUNC_EXPR:
1448 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1449 tree *elts;
1451 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1452 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1453 if (TREE_CODE (arg1) != VECTOR_CST
1454 || TREE_CODE (arg2) != VECTOR_CST)
1455 return NULL_TREE;
1457 elts = XALLOCAVEC (tree, nelts);
1458 if (!vec_cst_ctor_to_array (arg1, elts)
1459 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1460 return NULL_TREE;
1462 for (i = 0; i < nelts; i++)
1464 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1465 ? NOP_EXPR : FIX_TRUNC_EXPR,
1466 TREE_TYPE (type), elts[i]);
1467 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1468 return NULL_TREE;
1471 return build_vector (type, elts);
1474 case VEC_WIDEN_MULT_LO_EXPR:
1475 case VEC_WIDEN_MULT_HI_EXPR:
1476 case VEC_WIDEN_MULT_EVEN_EXPR:
1477 case VEC_WIDEN_MULT_ODD_EXPR:
1479 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1480 unsigned int out, ofs, scale;
1481 tree *elts;
1483 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1484 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1485 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1486 return NULL_TREE;
1488 elts = XALLOCAVEC (tree, nelts * 4);
1489 if (!vec_cst_ctor_to_array (arg1, elts)
1490 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1491 return NULL_TREE;
1493 if (code == VEC_WIDEN_MULT_LO_EXPR)
1494 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1495 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1496 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1497 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1498 scale = 1, ofs = 0;
1499 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1500 scale = 1, ofs = 1;
1502 for (out = 0; out < nelts; out++)
1504 unsigned int in1 = (out << scale) + ofs;
1505 unsigned int in2 = in1 + nelts * 2;
1506 tree t1, t2;
1508 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1509 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1511 if (t1 == NULL_TREE || t2 == NULL_TREE)
1512 return NULL_TREE;
1513 elts[out] = const_binop (MULT_EXPR, t1, t2);
1514 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1515 return NULL_TREE;
1518 return build_vector (type, elts);
1521 default:;
1524 if (TREE_CODE_CLASS (code) != tcc_binary)
1525 return NULL_TREE;
1527 /* Make sure type and arg0 have the same saturating flag. */
1528 gcc_checking_assert (TYPE_SATURATING (type)
1529 == TYPE_SATURATING (TREE_TYPE (arg1)));
1531 return const_binop (code, arg1, arg2);
1534 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1535 Return zero if computing the constants is not possible. */
1537 tree
1538 const_unop (enum tree_code code, tree type, tree arg0)
1540 switch (code)
1542 CASE_CONVERT:
1543 case FLOAT_EXPR:
1544 case FIX_TRUNC_EXPR:
1545 case FIXED_CONVERT_EXPR:
1546 return fold_convert_const (code, type, arg0);
1548 case ADDR_SPACE_CONVERT_EXPR:
1549 /* If the source address is 0, and the source address space
1550 cannot have a valid object at 0, fold to dest type null. */
1551 if (integer_zerop (arg0)
1552 && !(targetm.addr_space.zero_address_valid
1553 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1554 return fold_convert_const (code, type, arg0);
1555 break;
1557 case VIEW_CONVERT_EXPR:
1558 return fold_view_convert_expr (type, arg0);
1560 case NEGATE_EXPR:
1562 /* Can't call fold_negate_const directly here as that doesn't
1563 handle all cases and we might not be able to negate some
1564 constants. */
1565 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1566 if (tem && CONSTANT_CLASS_P (tem))
1567 return tem;
1568 break;
1571 case ABS_EXPR:
1572 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1573 return fold_abs_const (arg0, type);
1574 break;
1576 case CONJ_EXPR:
1577 if (TREE_CODE (arg0) == COMPLEX_CST)
1579 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1580 TREE_TYPE (type));
1581 return build_complex (type, TREE_REALPART (arg0), ipart);
1583 break;
1585 case BIT_NOT_EXPR:
1586 if (TREE_CODE (arg0) == INTEGER_CST)
1587 return fold_not_const (arg0, type);
1588 /* Perform BIT_NOT_EXPR on each element individually. */
1589 else if (TREE_CODE (arg0) == VECTOR_CST)
1591 tree *elements;
1592 tree elem;
1593 unsigned count = VECTOR_CST_NELTS (arg0), i;
1595 elements = XALLOCAVEC (tree, count);
1596 for (i = 0; i < count; i++)
1598 elem = VECTOR_CST_ELT (arg0, i);
1599 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1600 if (elem == NULL_TREE)
1601 break;
1602 elements[i] = elem;
1604 if (i == count)
1605 return build_vector (type, elements);
1607 break;
1609 case TRUTH_NOT_EXPR:
1610 if (TREE_CODE (arg0) == INTEGER_CST)
1611 return constant_boolean_node (integer_zerop (arg0), type);
1612 break;
1614 case REALPART_EXPR:
1615 if (TREE_CODE (arg0) == COMPLEX_CST)
1616 return fold_convert (type, TREE_REALPART (arg0));
1617 break;
1619 case IMAGPART_EXPR:
1620 if (TREE_CODE (arg0) == COMPLEX_CST)
1621 return fold_convert (type, TREE_IMAGPART (arg0));
1622 break;
1624 case VEC_UNPACK_LO_EXPR:
1625 case VEC_UNPACK_HI_EXPR:
1626 case VEC_UNPACK_FLOAT_LO_EXPR:
1627 case VEC_UNPACK_FLOAT_HI_EXPR:
1629 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1630 tree *elts;
1631 enum tree_code subcode;
1633 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1634 if (TREE_CODE (arg0) != VECTOR_CST)
1635 return NULL_TREE;
1637 elts = XALLOCAVEC (tree, nelts * 2);
1638 if (!vec_cst_ctor_to_array (arg0, elts))
1639 return NULL_TREE;
1641 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1642 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1643 elts += nelts;
1645 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1646 subcode = NOP_EXPR;
1647 else
1648 subcode = FLOAT_EXPR;
1650 for (i = 0; i < nelts; i++)
1652 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1653 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1654 return NULL_TREE;
1657 return build_vector (type, elts);
1660 case REDUC_MIN_EXPR:
1661 case REDUC_MAX_EXPR:
1662 case REDUC_PLUS_EXPR:
1664 unsigned int nelts, i;
1665 tree *elts;
1666 enum tree_code subcode;
1668 if (TREE_CODE (arg0) != VECTOR_CST)
1669 return NULL_TREE;
1670 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1672 elts = XALLOCAVEC (tree, nelts);
1673 if (!vec_cst_ctor_to_array (arg0, elts))
1674 return NULL_TREE;
1676 switch (code)
1678 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1679 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1680 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1681 default: gcc_unreachable ();
1684 for (i = 1; i < nelts; i++)
1686 elts[0] = const_binop (subcode, elts[0], elts[i]);
1687 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1688 return NULL_TREE;
1691 return elts[0];
1694 default:
1695 break;
1698 return NULL_TREE;
1701 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1702 indicates which particular sizetype to create. */
1704 tree
1705 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1707 return build_int_cst (sizetype_tab[(int) kind], number);
1710 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1711 is a tree code. The type of the result is taken from the operands.
1712 Both must be equivalent integer types, ala int_binop_types_match_p.
1713 If the operands are constant, so is the result. */
1715 tree
1716 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1718 tree type = TREE_TYPE (arg0);
1720 if (arg0 == error_mark_node || arg1 == error_mark_node)
1721 return error_mark_node;
1723 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1724 TREE_TYPE (arg1)));
1726 /* Handle the special case of two integer constants faster. */
1727 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1729 /* And some specific cases even faster than that. */
1730 if (code == PLUS_EXPR)
1732 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1733 return arg1;
1734 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1735 return arg0;
1737 else if (code == MINUS_EXPR)
1739 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1740 return arg0;
1742 else if (code == MULT_EXPR)
1744 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1745 return arg1;
1748 /* Handle general case of two integer constants. For sizetype
1749 constant calculations we always want to know about overflow,
1750 even in the unsigned case. */
1751 return int_const_binop_1 (code, arg0, arg1, -1);
1754 return fold_build2_loc (loc, code, type, arg0, arg1);
1757 /* Given two values, either both of sizetype or both of bitsizetype,
1758 compute the difference between the two values. Return the value
1759 in signed type corresponding to the type of the operands. */
1761 tree
1762 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1764 tree type = TREE_TYPE (arg0);
1765 tree ctype;
1767 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1768 TREE_TYPE (arg1)));
1770 /* If the type is already signed, just do the simple thing. */
1771 if (!TYPE_UNSIGNED (type))
1772 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1774 if (type == sizetype)
1775 ctype = ssizetype;
1776 else if (type == bitsizetype)
1777 ctype = sbitsizetype;
1778 else
1779 ctype = signed_type_for (type);
1781 /* If either operand is not a constant, do the conversions to the signed
1782 type and subtract. The hardware will do the right thing with any
1783 overflow in the subtraction. */
1784 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1785 return size_binop_loc (loc, MINUS_EXPR,
1786 fold_convert_loc (loc, ctype, arg0),
1787 fold_convert_loc (loc, ctype, arg1));
1789 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1790 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1791 overflow) and negate (which can't either). Special-case a result
1792 of zero while we're here. */
1793 if (tree_int_cst_equal (arg0, arg1))
1794 return build_int_cst (ctype, 0);
1795 else if (tree_int_cst_lt (arg1, arg0))
1796 return fold_convert_loc (loc, ctype,
1797 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1798 else
1799 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1800 fold_convert_loc (loc, ctype,
1801 size_binop_loc (loc,
1802 MINUS_EXPR,
1803 arg1, arg0)));
1806 /* A subroutine of fold_convert_const handling conversions of an
1807 INTEGER_CST to another integer type. */
1809 static tree
1810 fold_convert_const_int_from_int (tree type, const_tree arg1)
1812 /* Given an integer constant, make new constant with new type,
1813 appropriately sign-extended or truncated. Use widest_int
1814 so that any extension is done according ARG1's type. */
1815 return force_fit_type (type, wi::to_widest (arg1),
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 TREE_OVERFLOW (arg1));
1820 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1821 to an integer type. */
1823 static tree
1824 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1826 bool overflow = false;
1827 tree t;
1829 /* The following code implements the floating point to integer
1830 conversion rules required by the Java Language Specification,
1831 that IEEE NaNs are mapped to zero and values that overflow
1832 the target precision saturate, i.e. values greater than
1833 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1834 are mapped to INT_MIN. These semantics are allowed by the
1835 C and C++ standards that simply state that the behavior of
1836 FP-to-integer conversion is unspecified upon overflow. */
1838 wide_int val;
1839 REAL_VALUE_TYPE r;
1840 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1842 switch (code)
1844 case FIX_TRUNC_EXPR:
1845 real_trunc (&r, VOIDmode, &x);
1846 break;
1848 default:
1849 gcc_unreachable ();
1852 /* If R is NaN, return zero and show we have an overflow. */
1853 if (REAL_VALUE_ISNAN (r))
1855 overflow = true;
1856 val = wi::zero (TYPE_PRECISION (type));
1859 /* See if R is less than the lower bound or greater than the
1860 upper bound. */
1862 if (! overflow)
1864 tree lt = TYPE_MIN_VALUE (type);
1865 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1866 if (real_less (&r, &l))
1868 overflow = true;
1869 val = lt;
1873 if (! overflow)
1875 tree ut = TYPE_MAX_VALUE (type);
1876 if (ut)
1878 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1879 if (real_less (&u, &r))
1881 overflow = true;
1882 val = ut;
1887 if (! overflow)
1888 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1890 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1891 return t;
1894 /* A subroutine of fold_convert_const handling conversions of a
1895 FIXED_CST to an integer type. */
1897 static tree
1898 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1900 tree t;
1901 double_int temp, temp_trunc;
1902 unsigned int mode;
1904 /* Right shift FIXED_CST to temp by fbit. */
1905 temp = TREE_FIXED_CST (arg1).data;
1906 mode = TREE_FIXED_CST (arg1).mode;
1907 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1909 temp = temp.rshift (GET_MODE_FBIT (mode),
1910 HOST_BITS_PER_DOUBLE_INT,
1911 SIGNED_FIXED_POINT_MODE_P (mode));
1913 /* Left shift temp to temp_trunc by fbit. */
1914 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1915 HOST_BITS_PER_DOUBLE_INT,
1916 SIGNED_FIXED_POINT_MODE_P (mode));
1918 else
1920 temp = double_int_zero;
1921 temp_trunc = double_int_zero;
1924 /* If FIXED_CST is negative, we need to round the value toward 0.
1925 By checking if the fractional bits are not zero to add 1 to temp. */
1926 if (SIGNED_FIXED_POINT_MODE_P (mode)
1927 && temp_trunc.is_negative ()
1928 && TREE_FIXED_CST (arg1).data != temp_trunc)
1929 temp += double_int_one;
1931 /* Given a fixed-point constant, make new constant with new type,
1932 appropriately sign-extended or truncated. */
1933 t = force_fit_type (type, temp, -1,
1934 (temp.is_negative ()
1935 && (TYPE_UNSIGNED (type)
1936 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1937 | TREE_OVERFLOW (arg1));
1939 return t;
1942 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1943 to another floating point type. */
1945 static tree
1946 fold_convert_const_real_from_real (tree type, const_tree arg1)
1948 REAL_VALUE_TYPE value;
1949 tree t;
1951 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1952 t = build_real (type, value);
1954 /* If converting an infinity or NAN to a representation that doesn't
1955 have one, set the overflow bit so that we can produce some kind of
1956 error message at the appropriate point if necessary. It's not the
1957 most user-friendly message, but it's better than nothing. */
1958 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1959 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1960 TREE_OVERFLOW (t) = 1;
1961 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1962 && !MODE_HAS_NANS (TYPE_MODE (type)))
1963 TREE_OVERFLOW (t) = 1;
1964 /* Regular overflow, conversion produced an infinity in a mode that
1965 can't represent them. */
1966 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1967 && REAL_VALUE_ISINF (value)
1968 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1969 TREE_OVERFLOW (t) = 1;
1970 else
1971 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1972 return t;
1975 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1976 to a floating point type. */
1978 static tree
1979 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1981 REAL_VALUE_TYPE value;
1982 tree t;
1984 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1985 t = build_real (type, value);
1987 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1988 return t;
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to another fixed-point type. */
1994 static tree
1995 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1997 FIXED_VALUE_TYPE value;
1998 tree t;
1999 bool overflow_p;
2001 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2002 TYPE_SATURATING (type));
2003 t = build_fixed (type, value);
2005 /* Propagate overflow flags. */
2006 if (overflow_p | TREE_OVERFLOW (arg1))
2007 TREE_OVERFLOW (t) = 1;
2008 return t;
2011 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2012 to a fixed-point type. */
2014 static tree
2015 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2017 FIXED_VALUE_TYPE value;
2018 tree t;
2019 bool overflow_p;
2020 double_int di;
2022 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2024 di.low = TREE_INT_CST_ELT (arg1, 0);
2025 if (TREE_INT_CST_NUNITS (arg1) == 1)
2026 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2027 else
2028 di.high = TREE_INT_CST_ELT (arg1, 1);
2030 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2031 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2032 TYPE_SATURATING (type));
2033 t = build_fixed (type, value);
2035 /* Propagate overflow flags. */
2036 if (overflow_p | TREE_OVERFLOW (arg1))
2037 TREE_OVERFLOW (t) = 1;
2038 return t;
2041 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2042 to a fixed-point type. */
2044 static tree
2045 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2047 FIXED_VALUE_TYPE value;
2048 tree t;
2049 bool overflow_p;
2051 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2052 &TREE_REAL_CST (arg1),
2053 TYPE_SATURATING (type));
2054 t = build_fixed (type, value);
2056 /* Propagate overflow flags. */
2057 if (overflow_p | TREE_OVERFLOW (arg1))
2058 TREE_OVERFLOW (t) = 1;
2059 return t;
2062 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2063 type TYPE. If no simplification can be done return NULL_TREE. */
2065 static tree
2066 fold_convert_const (enum tree_code code, tree type, tree arg1)
2068 if (TREE_TYPE (arg1) == type)
2069 return arg1;
2071 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2072 || TREE_CODE (type) == OFFSET_TYPE)
2074 if (TREE_CODE (arg1) == INTEGER_CST)
2075 return fold_convert_const_int_from_int (type, arg1);
2076 else if (TREE_CODE (arg1) == REAL_CST)
2077 return fold_convert_const_int_from_real (code, type, arg1);
2078 else if (TREE_CODE (arg1) == FIXED_CST)
2079 return fold_convert_const_int_from_fixed (type, arg1);
2081 else if (TREE_CODE (type) == REAL_TYPE)
2083 if (TREE_CODE (arg1) == INTEGER_CST)
2084 return build_real_from_int_cst (type, arg1);
2085 else if (TREE_CODE (arg1) == REAL_CST)
2086 return fold_convert_const_real_from_real (type, arg1);
2087 else if (TREE_CODE (arg1) == FIXED_CST)
2088 return fold_convert_const_real_from_fixed (type, arg1);
2090 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2092 if (TREE_CODE (arg1) == FIXED_CST)
2093 return fold_convert_const_fixed_from_fixed (type, arg1);
2094 else if (TREE_CODE (arg1) == INTEGER_CST)
2095 return fold_convert_const_fixed_from_int (type, arg1);
2096 else if (TREE_CODE (arg1) == REAL_CST)
2097 return fold_convert_const_fixed_from_real (type, arg1);
2099 else if (TREE_CODE (type) == VECTOR_TYPE)
2101 if (TREE_CODE (arg1) == VECTOR_CST
2102 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2104 int len = TYPE_VECTOR_SUBPARTS (type);
2105 tree elttype = TREE_TYPE (type);
2106 tree *v = XALLOCAVEC (tree, len);
2107 for (int i = 0; i < len; ++i)
2109 tree elt = VECTOR_CST_ELT (arg1, i);
2110 tree cvt = fold_convert_const (code, elttype, elt);
2111 if (cvt == NULL_TREE)
2112 return NULL_TREE;
2113 v[i] = cvt;
2115 return build_vector (type, v);
2118 return NULL_TREE;
2121 /* Construct a vector of zero elements of vector type TYPE. */
2123 static tree
2124 build_zero_vector (tree type)
2126 tree t;
2128 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2129 return build_vector_from_val (type, t);
2132 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2134 bool
2135 fold_convertible_p (const_tree type, const_tree arg)
2137 tree orig = TREE_TYPE (arg);
2139 if (type == orig)
2140 return true;
2142 if (TREE_CODE (arg) == ERROR_MARK
2143 || TREE_CODE (type) == ERROR_MARK
2144 || TREE_CODE (orig) == ERROR_MARK)
2145 return false;
2147 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2148 return true;
2150 switch (TREE_CODE (type))
2152 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2153 case POINTER_TYPE: case REFERENCE_TYPE:
2154 case OFFSET_TYPE:
2155 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2156 || TREE_CODE (orig) == OFFSET_TYPE)
2157 return true;
2158 return (TREE_CODE (orig) == VECTOR_TYPE
2159 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2161 case REAL_TYPE:
2162 case FIXED_POINT_TYPE:
2163 case COMPLEX_TYPE:
2164 case VECTOR_TYPE:
2165 case VOID_TYPE:
2166 return TREE_CODE (type) == TREE_CODE (orig);
2168 default:
2169 return false;
2173 /* Convert expression ARG to type TYPE. Used by the middle-end for
2174 simple conversions in preference to calling the front-end's convert. */
2176 tree
2177 fold_convert_loc (location_t loc, tree type, tree arg)
2179 tree orig = TREE_TYPE (arg);
2180 tree tem;
2182 if (type == orig)
2183 return arg;
2185 if (TREE_CODE (arg) == ERROR_MARK
2186 || TREE_CODE (type) == ERROR_MARK
2187 || TREE_CODE (orig) == ERROR_MARK)
2188 return error_mark_node;
2190 switch (TREE_CODE (type))
2192 case POINTER_TYPE:
2193 case REFERENCE_TYPE:
2194 /* Handle conversions between pointers to different address spaces. */
2195 if (POINTER_TYPE_P (orig)
2196 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2197 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2198 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2199 /* fall through */
2201 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2202 case OFFSET_TYPE:
2203 if (TREE_CODE (arg) == INTEGER_CST)
2205 tem = fold_convert_const (NOP_EXPR, type, arg);
2206 if (tem != NULL_TREE)
2207 return tem;
2209 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2210 || TREE_CODE (orig) == OFFSET_TYPE)
2211 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2212 if (TREE_CODE (orig) == COMPLEX_TYPE)
2213 return fold_convert_loc (loc, type,
2214 fold_build1_loc (loc, REALPART_EXPR,
2215 TREE_TYPE (orig), arg));
2216 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2217 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2218 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2220 case REAL_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2223 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2227 else if (TREE_CODE (arg) == REAL_CST)
2229 tem = fold_convert_const (NOP_EXPR, type, arg);
2230 if (tem != NULL_TREE)
2231 return tem;
2233 else if (TREE_CODE (arg) == FIXED_CST)
2235 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2240 switch (TREE_CODE (orig))
2242 case INTEGER_TYPE:
2243 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2244 case POINTER_TYPE: case REFERENCE_TYPE:
2245 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2247 case REAL_TYPE:
2248 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2250 case FIXED_POINT_TYPE:
2251 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2253 case COMPLEX_TYPE:
2254 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2255 return fold_convert_loc (loc, type, tem);
2257 default:
2258 gcc_unreachable ();
2261 case FIXED_POINT_TYPE:
2262 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2263 || TREE_CODE (arg) == REAL_CST)
2265 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2266 if (tem != NULL_TREE)
2267 goto fold_convert_exit;
2270 switch (TREE_CODE (orig))
2272 case FIXED_POINT_TYPE:
2273 case INTEGER_TYPE:
2274 case ENUMERAL_TYPE:
2275 case BOOLEAN_TYPE:
2276 case REAL_TYPE:
2277 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2279 case COMPLEX_TYPE:
2280 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2281 return fold_convert_loc (loc, type, tem);
2283 default:
2284 gcc_unreachable ();
2287 case COMPLEX_TYPE:
2288 switch (TREE_CODE (orig))
2290 case INTEGER_TYPE:
2291 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2292 case POINTER_TYPE: case REFERENCE_TYPE:
2293 case REAL_TYPE:
2294 case FIXED_POINT_TYPE:
2295 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2296 fold_convert_loc (loc, TREE_TYPE (type), arg),
2297 fold_convert_loc (loc, TREE_TYPE (type),
2298 integer_zero_node));
2299 case COMPLEX_TYPE:
2301 tree rpart, ipart;
2303 if (TREE_CODE (arg) == COMPLEX_EXPR)
2305 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 0));
2307 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2308 TREE_OPERAND (arg, 1));
2309 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2312 arg = save_expr (arg);
2313 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2314 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2315 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2316 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2317 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2320 default:
2321 gcc_unreachable ();
2324 case VECTOR_TYPE:
2325 if (integer_zerop (arg))
2326 return build_zero_vector (type);
2327 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2328 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2329 || TREE_CODE (orig) == VECTOR_TYPE);
2330 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2332 case VOID_TYPE:
2333 tem = fold_ignored_result (arg);
2334 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2336 default:
2337 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2338 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2339 gcc_unreachable ();
2341 fold_convert_exit:
2342 protected_set_expr_location_unshare (tem, loc);
2343 return tem;
2346 /* Return false if expr can be assumed not to be an lvalue, true
2347 otherwise. */
2349 static bool
2350 maybe_lvalue_p (const_tree x)
2352 /* We only need to wrap lvalue tree codes. */
2353 switch (TREE_CODE (x))
2355 case VAR_DECL:
2356 case PARM_DECL:
2357 case RESULT_DECL:
2358 case LABEL_DECL:
2359 case FUNCTION_DECL:
2360 case SSA_NAME:
2362 case COMPONENT_REF:
2363 case MEM_REF:
2364 case INDIRECT_REF:
2365 case ARRAY_REF:
2366 case ARRAY_RANGE_REF:
2367 case BIT_FIELD_REF:
2368 case OBJ_TYPE_REF:
2370 case REALPART_EXPR:
2371 case IMAGPART_EXPR:
2372 case PREINCREMENT_EXPR:
2373 case PREDECREMENT_EXPR:
2374 case SAVE_EXPR:
2375 case TRY_CATCH_EXPR:
2376 case WITH_CLEANUP_EXPR:
2377 case COMPOUND_EXPR:
2378 case MODIFY_EXPR:
2379 case TARGET_EXPR:
2380 case COND_EXPR:
2381 case BIND_EXPR:
2382 break;
2384 default:
2385 /* Assume the worst for front-end tree codes. */
2386 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2387 break;
2388 return false;
2391 return true;
2394 /* Return an expr equal to X but certainly not valid as an lvalue. */
2396 tree
2397 non_lvalue_loc (location_t loc, tree x)
2399 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2400 us. */
2401 if (in_gimple_form)
2402 return x;
2404 if (! maybe_lvalue_p (x))
2405 return x;
2406 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2409 /* When pedantic, return an expr equal to X but certainly not valid as a
2410 pedantic lvalue. Otherwise, return X. */
2412 static tree
2413 pedantic_non_lvalue_loc (location_t loc, tree x)
2415 return protected_set_expr_location_unshare (x, loc);
2418 /* Given a tree comparison code, return the code that is the logical inverse.
2419 It is generally not safe to do this for floating-point comparisons, except
2420 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2421 ERROR_MARK in this case. */
2423 enum tree_code
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2426 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2427 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2428 return ERROR_MARK;
2430 switch (code)
2432 case EQ_EXPR:
2433 return NE_EXPR;
2434 case NE_EXPR:
2435 return EQ_EXPR;
2436 case GT_EXPR:
2437 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 case GE_EXPR:
2439 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 case LT_EXPR:
2441 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 case LE_EXPR:
2443 return honor_nans ? UNGT_EXPR : GT_EXPR;
2444 case LTGT_EXPR:
2445 return UNEQ_EXPR;
2446 case UNEQ_EXPR:
2447 return LTGT_EXPR;
2448 case UNGT_EXPR:
2449 return LE_EXPR;
2450 case UNGE_EXPR:
2451 return LT_EXPR;
2452 case UNLT_EXPR:
2453 return GE_EXPR;
2454 case UNLE_EXPR:
2455 return GT_EXPR;
2456 case ORDERED_EXPR:
2457 return UNORDERED_EXPR;
2458 case UNORDERED_EXPR:
2459 return ORDERED_EXPR;
2460 default:
2461 gcc_unreachable ();
2465 /* Similar, but return the comparison that results if the operands are
2466 swapped. This is safe for floating-point. */
2468 enum tree_code
2469 swap_tree_comparison (enum tree_code code)
2471 switch (code)
2473 case EQ_EXPR:
2474 case NE_EXPR:
2475 case ORDERED_EXPR:
2476 case UNORDERED_EXPR:
2477 case LTGT_EXPR:
2478 case UNEQ_EXPR:
2479 return code;
2480 case GT_EXPR:
2481 return LT_EXPR;
2482 case GE_EXPR:
2483 return LE_EXPR;
2484 case LT_EXPR:
2485 return GT_EXPR;
2486 case LE_EXPR:
2487 return GE_EXPR;
2488 case UNGT_EXPR:
2489 return UNLT_EXPR;
2490 case UNGE_EXPR:
2491 return UNLE_EXPR;
2492 case UNLT_EXPR:
2493 return UNGT_EXPR;
2494 case UNLE_EXPR:
2495 return UNGE_EXPR;
2496 default:
2497 gcc_unreachable ();
2502 /* Convert a comparison tree code from an enum tree_code representation
2503 into a compcode bit-based encoding. This function is the inverse of
2504 compcode_to_comparison. */
2506 static enum comparison_code
2507 comparison_to_compcode (enum tree_code code)
2509 switch (code)
2511 case LT_EXPR:
2512 return COMPCODE_LT;
2513 case EQ_EXPR:
2514 return COMPCODE_EQ;
2515 case LE_EXPR:
2516 return COMPCODE_LE;
2517 case GT_EXPR:
2518 return COMPCODE_GT;
2519 case NE_EXPR:
2520 return COMPCODE_NE;
2521 case GE_EXPR:
2522 return COMPCODE_GE;
2523 case ORDERED_EXPR:
2524 return COMPCODE_ORD;
2525 case UNORDERED_EXPR:
2526 return COMPCODE_UNORD;
2527 case UNLT_EXPR:
2528 return COMPCODE_UNLT;
2529 case UNEQ_EXPR:
2530 return COMPCODE_UNEQ;
2531 case UNLE_EXPR:
2532 return COMPCODE_UNLE;
2533 case UNGT_EXPR:
2534 return COMPCODE_UNGT;
2535 case LTGT_EXPR:
2536 return COMPCODE_LTGT;
2537 case UNGE_EXPR:
2538 return COMPCODE_UNGE;
2539 default:
2540 gcc_unreachable ();
2544 /* Convert a compcode bit-based encoding of a comparison operator back
2545 to GCC's enum tree_code representation. This function is the
2546 inverse of comparison_to_compcode. */
2548 static enum tree_code
2549 compcode_to_comparison (enum comparison_code code)
2551 switch (code)
2553 case COMPCODE_LT:
2554 return LT_EXPR;
2555 case COMPCODE_EQ:
2556 return EQ_EXPR;
2557 case COMPCODE_LE:
2558 return LE_EXPR;
2559 case COMPCODE_GT:
2560 return GT_EXPR;
2561 case COMPCODE_NE:
2562 return NE_EXPR;
2563 case COMPCODE_GE:
2564 return GE_EXPR;
2565 case COMPCODE_ORD:
2566 return ORDERED_EXPR;
2567 case COMPCODE_UNORD:
2568 return UNORDERED_EXPR;
2569 case COMPCODE_UNLT:
2570 return UNLT_EXPR;
2571 case COMPCODE_UNEQ:
2572 return UNEQ_EXPR;
2573 case COMPCODE_UNLE:
2574 return UNLE_EXPR;
2575 case COMPCODE_UNGT:
2576 return UNGT_EXPR;
2577 case COMPCODE_LTGT:
2578 return LTGT_EXPR;
2579 case COMPCODE_UNGE:
2580 return UNGE_EXPR;
2581 default:
2582 gcc_unreachable ();
2586 /* Return a tree for the comparison which is the combination of
2587 doing the AND or OR (depending on CODE) of the two operations LCODE
2588 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2589 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2590 if this makes the transformation invalid. */
2592 tree
2593 combine_comparisons (location_t loc,
2594 enum tree_code code, enum tree_code lcode,
2595 enum tree_code rcode, tree truth_type,
2596 tree ll_arg, tree lr_arg)
2598 bool honor_nans = HONOR_NANS (ll_arg);
2599 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2600 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2601 int compcode;
2603 switch (code)
2605 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2606 compcode = lcompcode & rcompcode;
2607 break;
2609 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2610 compcode = lcompcode | rcompcode;
2611 break;
2613 default:
2614 return NULL_TREE;
2617 if (!honor_nans)
2619 /* Eliminate unordered comparisons, as well as LTGT and ORD
2620 which are not used unless the mode has NaNs. */
2621 compcode &= ~COMPCODE_UNORD;
2622 if (compcode == COMPCODE_LTGT)
2623 compcode = COMPCODE_NE;
2624 else if (compcode == COMPCODE_ORD)
2625 compcode = COMPCODE_TRUE;
2627 else if (flag_trapping_math)
2629 /* Check that the original operation and the optimized ones will trap
2630 under the same condition. */
2631 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2632 && (lcompcode != COMPCODE_EQ)
2633 && (lcompcode != COMPCODE_ORD);
2634 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2635 && (rcompcode != COMPCODE_EQ)
2636 && (rcompcode != COMPCODE_ORD);
2637 bool trap = (compcode & COMPCODE_UNORD) == 0
2638 && (compcode != COMPCODE_EQ)
2639 && (compcode != COMPCODE_ORD);
2641 /* In a short-circuited boolean expression the LHS might be
2642 such that the RHS, if evaluated, will never trap. For
2643 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2644 if neither x nor y is NaN. (This is a mixed blessing: for
2645 example, the expression above will never trap, hence
2646 optimizing it to x < y would be invalid). */
2647 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2648 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2649 rtrap = false;
2651 /* If the comparison was short-circuited, and only the RHS
2652 trapped, we may now generate a spurious trap. */
2653 if (rtrap && !ltrap
2654 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2655 return NULL_TREE;
2657 /* If we changed the conditions that cause a trap, we lose. */
2658 if ((ltrap || rtrap) != trap)
2659 return NULL_TREE;
2662 if (compcode == COMPCODE_TRUE)
2663 return constant_boolean_node (true, truth_type);
2664 else if (compcode == COMPCODE_FALSE)
2665 return constant_boolean_node (false, truth_type);
2666 else
2668 enum tree_code tcode;
2670 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2671 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2675 /* Return nonzero if two operands (typically of the same tree node)
2676 are necessarily equal. FLAGS modifies behavior as follows:
2678 If OEP_ONLY_CONST is set, only return nonzero for constants.
2679 This function tests whether the operands are indistinguishable;
2680 it does not test whether they are equal using C's == operation.
2681 The distinction is important for IEEE floating point, because
2682 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2683 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2685 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2686 even though it may hold multiple values during a function.
2687 This is because a GCC tree node guarantees that nothing else is
2688 executed between the evaluation of its "operands" (which may often
2689 be evaluated in arbitrary order). Hence if the operands themselves
2690 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2691 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2692 unset means assuming isochronic (or instantaneous) tree equivalence.
2693 Unless comparing arbitrary expression trees, such as from different
2694 statements, this flag can usually be left unset.
2696 If OEP_PURE_SAME is set, then pure functions with identical arguments
2697 are considered the same. It is used when the caller has other ways
2698 to ensure that global memory is unchanged in between.
2700 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2701 not values of expressions.
2703 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2704 any operand with side effect. This is unnecesarily conservative in the
2705 case we know that arg0 and arg1 are in disjoint code paths (such as in
2706 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2707 addresses with TREE_CONSTANT flag set so we know that &var == &var
2708 even if var is volatile. */
2711 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2713 /* If either is ERROR_MARK, they aren't equal. */
2714 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2715 || TREE_TYPE (arg0) == error_mark_node
2716 || TREE_TYPE (arg1) == error_mark_node)
2717 return 0;
2719 /* Similar, if either does not have a type (like a released SSA name),
2720 they aren't equal. */
2721 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2722 return 0;
2724 /* We cannot consider pointers to different address space equal. */
2725 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2726 && POINTER_TYPE_P (TREE_TYPE (arg1))
2727 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2728 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2729 return 0;
2731 /* Check equality of integer constants before bailing out due to
2732 precision differences. */
2733 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2735 /* Address of INTEGER_CST is not defined; check that we did not forget
2736 to drop the OEP_ADDRESS_OF flags. */
2737 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2738 return tree_int_cst_equal (arg0, arg1);
2741 if (!(flags & OEP_ADDRESS_OF))
2743 /* If both types don't have the same signedness, then we can't consider
2744 them equal. We must check this before the STRIP_NOPS calls
2745 because they may change the signedness of the arguments. As pointers
2746 strictly don't have a signedness, require either two pointers or
2747 two non-pointers as well. */
2748 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2749 || POINTER_TYPE_P (TREE_TYPE (arg0))
2750 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2751 return 0;
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2762 #if 0
2763 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2764 sanity check once the issue is solved. */
2765 else
2766 /* Addresses of conversions and SSA_NAMEs (and many other things)
2767 are not defined. Check that we did not forget to drop the
2768 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2769 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2770 && TREE_CODE (arg0) != SSA_NAME);
2771 #endif
2773 /* In case both args are comparisons but with different comparison
2774 code, try to swap the comparison operands of one arg to produce
2775 a match and compare that variant. */
2776 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2777 && COMPARISON_CLASS_P (arg0)
2778 && COMPARISON_CLASS_P (arg1))
2780 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2782 if (TREE_CODE (arg0) == swap_code)
2783 return operand_equal_p (TREE_OPERAND (arg0, 0),
2784 TREE_OPERAND (arg1, 1), flags)
2785 && operand_equal_p (TREE_OPERAND (arg0, 1),
2786 TREE_OPERAND (arg1, 0), flags);
2789 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2791 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2792 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2794 else if (flags & OEP_ADDRESS_OF)
2796 /* If we are interested in comparing addresses ignore
2797 MEM_REF wrappings of the base that can appear just for
2798 TBAA reasons. */
2799 if (TREE_CODE (arg0) == MEM_REF
2800 && DECL_P (arg1)
2801 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2802 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2803 && integer_zerop (TREE_OPERAND (arg0, 1)))
2804 return 1;
2805 else if (TREE_CODE (arg1) == MEM_REF
2806 && DECL_P (arg0)
2807 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2808 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2809 && integer_zerop (TREE_OPERAND (arg1, 1)))
2810 return 1;
2811 return 0;
2813 else
2814 return 0;
2817 /* When not checking adddresses, this is needed for conversions and for
2818 COMPONENT_REF. Might as well play it safe and always test this. */
2819 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2820 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2821 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2822 && !(flags & OEP_ADDRESS_OF)))
2823 return 0;
2825 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2826 We don't care about side effects in that case because the SAVE_EXPR
2827 takes care of that for us. In all other cases, two expressions are
2828 equal if they have no side effects. If we have two identical
2829 expressions with side effects that should be treated the same due
2830 to the only side effects being identical SAVE_EXPR's, that will
2831 be detected in the recursive calls below.
2832 If we are taking an invariant address of two identical objects
2833 they are necessarily equal as well. */
2834 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2835 && (TREE_CODE (arg0) == SAVE_EXPR
2836 || (flags & OEP_MATCH_SIDE_EFFECTS)
2837 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2838 return 1;
2840 /* Next handle constant cases, those for which we can return 1 even
2841 if ONLY_CONST is set. */
2842 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2843 switch (TREE_CODE (arg0))
2845 case INTEGER_CST:
2846 return tree_int_cst_equal (arg0, arg1);
2848 case FIXED_CST:
2849 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2850 TREE_FIXED_CST (arg1));
2852 case REAL_CST:
2853 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2854 return 1;
2857 if (!HONOR_SIGNED_ZEROS (arg0))
2859 /* If we do not distinguish between signed and unsigned zero,
2860 consider them equal. */
2861 if (real_zerop (arg0) && real_zerop (arg1))
2862 return 1;
2864 return 0;
2866 case VECTOR_CST:
2868 unsigned i;
2870 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2871 return 0;
2873 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2875 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2876 VECTOR_CST_ELT (arg1, i), flags))
2877 return 0;
2879 return 1;
2882 case COMPLEX_CST:
2883 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2884 flags)
2885 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2886 flags));
2888 case STRING_CST:
2889 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2890 && ! memcmp (TREE_STRING_POINTER (arg0),
2891 TREE_STRING_POINTER (arg1),
2892 TREE_STRING_LENGTH (arg0)));
2894 case ADDR_EXPR:
2895 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2896 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2897 flags | OEP_ADDRESS_OF
2898 | OEP_MATCH_SIDE_EFFECTS);
2899 case CONSTRUCTOR:
2900 /* In GIMPLE empty constructors are allowed in initializers of
2901 aggregates. */
2902 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2903 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2904 default:
2905 break;
2908 if (flags & OEP_ONLY_CONST)
2909 return 0;
2911 /* Define macros to test an operand from arg0 and arg1 for equality and a
2912 variant that allows null and views null as being different from any
2913 non-null value. In the latter case, if either is null, the both
2914 must be; otherwise, do the normal comparison. */
2915 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2916 TREE_OPERAND (arg1, N), flags)
2918 #define OP_SAME_WITH_NULL(N) \
2919 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2920 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2922 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2924 case tcc_unary:
2925 /* Two conversions are equal only if signedness and modes match. */
2926 switch (TREE_CODE (arg0))
2928 CASE_CONVERT:
2929 case FIX_TRUNC_EXPR:
2930 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2931 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2932 return 0;
2933 break;
2934 default:
2935 break;
2938 return OP_SAME (0);
2941 case tcc_comparison:
2942 case tcc_binary:
2943 if (OP_SAME (0) && OP_SAME (1))
2944 return 1;
2946 /* For commutative ops, allow the other order. */
2947 return (commutative_tree_code (TREE_CODE (arg0))
2948 && operand_equal_p (TREE_OPERAND (arg0, 0),
2949 TREE_OPERAND (arg1, 1), flags)
2950 && operand_equal_p (TREE_OPERAND (arg0, 1),
2951 TREE_OPERAND (arg1, 0), flags));
2953 case tcc_reference:
2954 /* If either of the pointer (or reference) expressions we are
2955 dereferencing contain a side effect, these cannot be equal,
2956 but their addresses can be. */
2957 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2958 && (TREE_SIDE_EFFECTS (arg0)
2959 || TREE_SIDE_EFFECTS (arg1)))
2960 return 0;
2962 switch (TREE_CODE (arg0))
2964 case INDIRECT_REF:
2965 if (!(flags & OEP_ADDRESS_OF)
2966 && (TYPE_ALIGN (TREE_TYPE (arg0))
2967 != TYPE_ALIGN (TREE_TYPE (arg1))))
2968 return 0;
2969 flags &= ~OEP_ADDRESS_OF;
2970 return OP_SAME (0);
2972 case REALPART_EXPR:
2973 case IMAGPART_EXPR:
2974 case VIEW_CONVERT_EXPR:
2975 return OP_SAME (0);
2977 case TARGET_MEM_REF:
2978 case MEM_REF:
2979 if (!(flags & OEP_ADDRESS_OF))
2981 /* Require equal access sizes */
2982 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2983 && (!TYPE_SIZE (TREE_TYPE (arg0))
2984 || !TYPE_SIZE (TREE_TYPE (arg1))
2985 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2986 TYPE_SIZE (TREE_TYPE (arg1)),
2987 flags)))
2988 return 0;
2989 /* Verify that accesses are TBAA compatible. */
2990 if (flag_strict_aliasing
2991 && (!alias_ptr_types_compatible_p
2992 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2993 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2994 || (MR_DEPENDENCE_CLIQUE (arg0)
2995 != MR_DEPENDENCE_CLIQUE (arg1))
2996 || (MR_DEPENDENCE_BASE (arg0)
2997 != MR_DEPENDENCE_BASE (arg1))))
2998 return 0;
2999 /* Verify that alignment is compatible. */
3000 if (TYPE_ALIGN (TREE_TYPE (arg0))
3001 != TYPE_ALIGN (TREE_TYPE (arg1)))
3002 return 0;
3004 flags &= ~OEP_ADDRESS_OF;
3005 return (OP_SAME (0) && OP_SAME (1)
3006 /* TARGET_MEM_REF require equal extra operands. */
3007 && (TREE_CODE (arg0) != TARGET_MEM_REF
3008 || (OP_SAME_WITH_NULL (2)
3009 && OP_SAME_WITH_NULL (3)
3010 && OP_SAME_WITH_NULL (4))));
3012 case ARRAY_REF:
3013 case ARRAY_RANGE_REF:
3014 /* Operands 2 and 3 may be null.
3015 Compare the array index by value if it is constant first as we
3016 may have different types but same value here. */
3017 if (!OP_SAME (0))
3018 return 0;
3019 flags &= ~OEP_ADDRESS_OF;
3020 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3021 TREE_OPERAND (arg1, 1))
3022 || OP_SAME (1))
3023 && OP_SAME_WITH_NULL (2)
3024 && OP_SAME_WITH_NULL (3));
3026 case COMPONENT_REF:
3027 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3028 may be NULL when we're called to compare MEM_EXPRs. */
3029 if (!OP_SAME_WITH_NULL (0)
3030 || !OP_SAME (1))
3031 return 0;
3032 flags &= ~OEP_ADDRESS_OF;
3033 return OP_SAME_WITH_NULL (2);
3035 case BIT_FIELD_REF:
3036 if (!OP_SAME (0))
3037 return 0;
3038 flags &= ~OEP_ADDRESS_OF;
3039 return OP_SAME (1) && OP_SAME (2);
3041 default:
3042 return 0;
3045 case tcc_expression:
3046 switch (TREE_CODE (arg0))
3048 case ADDR_EXPR:
3049 /* Be sure we pass right ADDRESS_OF flag. */
3050 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3051 return operand_equal_p (TREE_OPERAND (arg0, 0),
3052 TREE_OPERAND (arg1, 0),
3053 flags | OEP_ADDRESS_OF);
3055 case TRUTH_NOT_EXPR:
3056 return OP_SAME (0);
3058 case TRUTH_ANDIF_EXPR:
3059 case TRUTH_ORIF_EXPR:
3060 return OP_SAME (0) && OP_SAME (1);
3062 case FMA_EXPR:
3063 case WIDEN_MULT_PLUS_EXPR:
3064 case WIDEN_MULT_MINUS_EXPR:
3065 if (!OP_SAME (2))
3066 return 0;
3067 /* The multiplcation operands are commutative. */
3068 /* FALLTHRU */
3070 case TRUTH_AND_EXPR:
3071 case TRUTH_OR_EXPR:
3072 case TRUTH_XOR_EXPR:
3073 if (OP_SAME (0) && OP_SAME (1))
3074 return 1;
3076 /* Otherwise take into account this is a commutative operation. */
3077 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3078 TREE_OPERAND (arg1, 1), flags)
3079 && operand_equal_p (TREE_OPERAND (arg0, 1),
3080 TREE_OPERAND (arg1, 0), flags));
3082 case COND_EXPR:
3083 case VEC_COND_EXPR:
3084 case DOT_PROD_EXPR:
3085 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3087 default:
3088 return 0;
3091 case tcc_vl_exp:
3092 switch (TREE_CODE (arg0))
3094 case CALL_EXPR:
3095 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3096 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3097 /* If not both CALL_EXPRs are either internal or normal function
3098 functions, then they are not equal. */
3099 return 0;
3100 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3102 /* If the CALL_EXPRs call different internal functions, then they
3103 are not equal. */
3104 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3105 return 0;
3107 else
3109 /* If the CALL_EXPRs call different functions, then they are not
3110 equal. */
3111 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3112 flags))
3113 return 0;
3116 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3118 unsigned int cef = call_expr_flags (arg0);
3119 if (flags & OEP_PURE_SAME)
3120 cef &= ECF_CONST | ECF_PURE;
3121 else
3122 cef &= ECF_CONST;
3123 if (!cef)
3124 return 0;
3127 /* Now see if all the arguments are the same. */
3129 const_call_expr_arg_iterator iter0, iter1;
3130 const_tree a0, a1;
3131 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3132 a1 = first_const_call_expr_arg (arg1, &iter1);
3133 a0 && a1;
3134 a0 = next_const_call_expr_arg (&iter0),
3135 a1 = next_const_call_expr_arg (&iter1))
3136 if (! operand_equal_p (a0, a1, flags))
3137 return 0;
3139 /* If we get here and both argument lists are exhausted
3140 then the CALL_EXPRs are equal. */
3141 return ! (a0 || a1);
3143 default:
3144 return 0;
3147 case tcc_declaration:
3148 /* Consider __builtin_sqrt equal to sqrt. */
3149 return (TREE_CODE (arg0) == FUNCTION_DECL
3150 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3151 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3152 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3154 case tcc_exceptional:
3155 if (TREE_CODE (arg0) == CONSTRUCTOR)
3157 /* In GIMPLE constructors are used only to build vectors from
3158 elements. Individual elements in the constructor must be
3159 indexed in increasing order and form an initial sequence.
3161 We make no effort to compare constructors in generic.
3162 (see sem_variable::equals in ipa-icf which can do so for
3163 constants). */
3164 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3165 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3166 return 0;
3168 /* Be sure that vectors constructed have the same representation.
3169 We only tested element precision and modes to match.
3170 Vectors may be BLKmode and thus also check that the number of
3171 parts match. */
3172 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3173 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3174 return 0;
3176 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3177 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3178 unsigned int len = vec_safe_length (v0);
3180 if (len != vec_safe_length (v1))
3181 return 0;
3183 for (unsigned int i = 0; i < len; i++)
3185 constructor_elt *c0 = &(*v0)[i];
3186 constructor_elt *c1 = &(*v1)[i];
3188 if (!operand_equal_p (c0->value, c1->value, flags)
3189 /* In GIMPLE the indexes can be either NULL or matching i.
3190 Double check this so we won't get false
3191 positives for GENERIC. */
3192 || (c0->index
3193 && (TREE_CODE (c0->index) != INTEGER_CST
3194 || !compare_tree_int (c0->index, i)))
3195 || (c1->index
3196 && (TREE_CODE (c1->index) != INTEGER_CST
3197 || !compare_tree_int (c1->index, i))))
3198 return 0;
3200 return 1;
3202 return 0;
3204 default:
3205 return 0;
3208 #undef OP_SAME
3209 #undef OP_SAME_WITH_NULL
3212 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3213 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3215 When in doubt, return 0. */
3217 static int
3218 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3220 int unsignedp1, unsignedpo;
3221 tree primarg0, primarg1, primother;
3222 unsigned int correct_width;
3224 if (operand_equal_p (arg0, arg1, 0))
3225 return 1;
3227 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3228 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3229 return 0;
3231 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3232 and see if the inner values are the same. This removes any
3233 signedness comparison, which doesn't matter here. */
3234 primarg0 = arg0, primarg1 = arg1;
3235 STRIP_NOPS (primarg0);
3236 STRIP_NOPS (primarg1);
3237 if (operand_equal_p (primarg0, primarg1, 0))
3238 return 1;
3240 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3241 actual comparison operand, ARG0.
3243 First throw away any conversions to wider types
3244 already present in the operands. */
3246 primarg1 = get_narrower (arg1, &unsignedp1);
3247 primother = get_narrower (other, &unsignedpo);
3249 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3250 if (unsignedp1 == unsignedpo
3251 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3252 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3254 tree type = TREE_TYPE (arg0);
3256 /* Make sure shorter operand is extended the right way
3257 to match the longer operand. */
3258 primarg1 = fold_convert (signed_or_unsigned_type_for
3259 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3261 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3262 return 1;
3265 return 0;
3268 /* See if ARG is an expression that is either a comparison or is performing
3269 arithmetic on comparisons. The comparisons must only be comparing
3270 two different values, which will be stored in *CVAL1 and *CVAL2; if
3271 they are nonzero it means that some operands have already been found.
3272 No variables may be used anywhere else in the expression except in the
3273 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3274 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3276 If this is true, return 1. Otherwise, return zero. */
3278 static int
3279 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3281 enum tree_code code = TREE_CODE (arg);
3282 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3284 /* We can handle some of the tcc_expression cases here. */
3285 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3286 tclass = tcc_unary;
3287 else if (tclass == tcc_expression
3288 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3289 || code == COMPOUND_EXPR))
3290 tclass = tcc_binary;
3292 else if (tclass == tcc_expression && code == SAVE_EXPR
3293 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3295 /* If we've already found a CVAL1 or CVAL2, this expression is
3296 two complex to handle. */
3297 if (*cval1 || *cval2)
3298 return 0;
3300 tclass = tcc_unary;
3301 *save_p = 1;
3304 switch (tclass)
3306 case tcc_unary:
3307 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3309 case tcc_binary:
3310 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3311 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3312 cval1, cval2, save_p));
3314 case tcc_constant:
3315 return 1;
3317 case tcc_expression:
3318 if (code == COND_EXPR)
3319 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3320 cval1, cval2, save_p)
3321 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3322 cval1, cval2, save_p)
3323 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3324 cval1, cval2, save_p));
3325 return 0;
3327 case tcc_comparison:
3328 /* First see if we can handle the first operand, then the second. For
3329 the second operand, we know *CVAL1 can't be zero. It must be that
3330 one side of the comparison is each of the values; test for the
3331 case where this isn't true by failing if the two operands
3332 are the same. */
3334 if (operand_equal_p (TREE_OPERAND (arg, 0),
3335 TREE_OPERAND (arg, 1), 0))
3336 return 0;
3338 if (*cval1 == 0)
3339 *cval1 = TREE_OPERAND (arg, 0);
3340 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3342 else if (*cval2 == 0)
3343 *cval2 = TREE_OPERAND (arg, 0);
3344 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3346 else
3347 return 0;
3349 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3351 else if (*cval2 == 0)
3352 *cval2 = TREE_OPERAND (arg, 1);
3353 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3355 else
3356 return 0;
3358 return 1;
3360 default:
3361 return 0;
3365 /* ARG is a tree that is known to contain just arithmetic operations and
3366 comparisons. Evaluate the operations in the tree substituting NEW0 for
3367 any occurrence of OLD0 as an operand of a comparison and likewise for
3368 NEW1 and OLD1. */
3370 static tree
3371 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3372 tree old1, tree new1)
3374 tree type = TREE_TYPE (arg);
3375 enum tree_code code = TREE_CODE (arg);
3376 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3378 /* We can handle some of the tcc_expression cases here. */
3379 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3380 tclass = tcc_unary;
3381 else if (tclass == tcc_expression
3382 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3383 tclass = tcc_binary;
3385 switch (tclass)
3387 case tcc_unary:
3388 return fold_build1_loc (loc, code, type,
3389 eval_subst (loc, TREE_OPERAND (arg, 0),
3390 old0, new0, old1, new1));
3392 case tcc_binary:
3393 return fold_build2_loc (loc, code, type,
3394 eval_subst (loc, TREE_OPERAND (arg, 0),
3395 old0, new0, old1, new1),
3396 eval_subst (loc, TREE_OPERAND (arg, 1),
3397 old0, new0, old1, new1));
3399 case tcc_expression:
3400 switch (code)
3402 case SAVE_EXPR:
3403 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3404 old1, new1);
3406 case COMPOUND_EXPR:
3407 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3408 old1, new1);
3410 case COND_EXPR:
3411 return fold_build3_loc (loc, code, type,
3412 eval_subst (loc, TREE_OPERAND (arg, 0),
3413 old0, new0, old1, new1),
3414 eval_subst (loc, TREE_OPERAND (arg, 1),
3415 old0, new0, old1, new1),
3416 eval_subst (loc, TREE_OPERAND (arg, 2),
3417 old0, new0, old1, new1));
3418 default:
3419 break;
3421 /* Fall through - ??? */
3423 case tcc_comparison:
3425 tree arg0 = TREE_OPERAND (arg, 0);
3426 tree arg1 = TREE_OPERAND (arg, 1);
3428 /* We need to check both for exact equality and tree equality. The
3429 former will be true if the operand has a side-effect. In that
3430 case, we know the operand occurred exactly once. */
3432 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3433 arg0 = new0;
3434 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3435 arg0 = new1;
3437 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3438 arg1 = new0;
3439 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3440 arg1 = new1;
3442 return fold_build2_loc (loc, code, type, arg0, arg1);
3445 default:
3446 return arg;
3450 /* Return a tree for the case when the result of an expression is RESULT
3451 converted to TYPE and OMITTED was previously an operand of the expression
3452 but is now not needed (e.g., we folded OMITTED * 0).
3454 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3455 the conversion of RESULT to TYPE. */
3457 tree
3458 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3460 tree t = fold_convert_loc (loc, type, result);
3462 /* If the resulting operand is an empty statement, just return the omitted
3463 statement casted to void. */
3464 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3465 return build1_loc (loc, NOP_EXPR, void_type_node,
3466 fold_ignored_result (omitted));
3468 if (TREE_SIDE_EFFECTS (omitted))
3469 return build2_loc (loc, COMPOUND_EXPR, type,
3470 fold_ignored_result (omitted), t);
3472 return non_lvalue_loc (loc, t);
3475 /* Return a tree for the case when the result of an expression is RESULT
3476 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3477 of the expression but are now not needed.
3479 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3480 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3481 evaluated before OMITTED2. Otherwise, if neither has side effects,
3482 just do the conversion of RESULT to TYPE. */
3484 tree
3485 omit_two_operands_loc (location_t loc, tree type, tree result,
3486 tree omitted1, tree omitted2)
3488 tree t = fold_convert_loc (loc, type, result);
3490 if (TREE_SIDE_EFFECTS (omitted2))
3491 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3492 if (TREE_SIDE_EFFECTS (omitted1))
3493 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3495 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3499 /* Return a simplified tree node for the truth-negation of ARG. This
3500 never alters ARG itself. We assume that ARG is an operation that
3501 returns a truth value (0 or 1).
3503 FIXME: one would think we would fold the result, but it causes
3504 problems with the dominator optimizer. */
3506 static tree
3507 fold_truth_not_expr (location_t loc, tree arg)
3509 tree type = TREE_TYPE (arg);
3510 enum tree_code code = TREE_CODE (arg);
3511 location_t loc1, loc2;
3513 /* If this is a comparison, we can simply invert it, except for
3514 floating-point non-equality comparisons, in which case we just
3515 enclose a TRUTH_NOT_EXPR around what we have. */
3517 if (TREE_CODE_CLASS (code) == tcc_comparison)
3519 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3520 if (FLOAT_TYPE_P (op_type)
3521 && flag_trapping_math
3522 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3523 && code != NE_EXPR && code != EQ_EXPR)
3524 return NULL_TREE;
3526 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3527 if (code == ERROR_MARK)
3528 return NULL_TREE;
3530 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3531 TREE_OPERAND (arg, 1));
3534 switch (code)
3536 case INTEGER_CST:
3537 return constant_boolean_node (integer_zerop (arg), type);
3539 case TRUTH_AND_EXPR:
3540 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3541 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3542 return build2_loc (loc, TRUTH_OR_EXPR, type,
3543 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3544 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3546 case TRUTH_OR_EXPR:
3547 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3548 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3549 return build2_loc (loc, TRUTH_AND_EXPR, type,
3550 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3551 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3553 case TRUTH_XOR_EXPR:
3554 /* Here we can invert either operand. We invert the first operand
3555 unless the second operand is a TRUTH_NOT_EXPR in which case our
3556 result is the XOR of the first operand with the inside of the
3557 negation of the second operand. */
3559 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3560 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3561 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3562 else
3563 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3564 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3565 TREE_OPERAND (arg, 1));
3567 case TRUTH_ANDIF_EXPR:
3568 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3569 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3570 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3571 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3572 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3574 case TRUTH_ORIF_EXPR:
3575 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3576 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3577 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3578 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3579 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3581 case TRUTH_NOT_EXPR:
3582 return TREE_OPERAND (arg, 0);
3584 case COND_EXPR:
3586 tree arg1 = TREE_OPERAND (arg, 1);
3587 tree arg2 = TREE_OPERAND (arg, 2);
3589 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3590 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3592 /* A COND_EXPR may have a throw as one operand, which
3593 then has void type. Just leave void operands
3594 as they are. */
3595 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3596 VOID_TYPE_P (TREE_TYPE (arg1))
3597 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3598 VOID_TYPE_P (TREE_TYPE (arg2))
3599 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3602 case COMPOUND_EXPR:
3603 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3604 return build2_loc (loc, COMPOUND_EXPR, type,
3605 TREE_OPERAND (arg, 0),
3606 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3608 case NON_LVALUE_EXPR:
3609 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3610 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3612 CASE_CONVERT:
3613 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3614 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3616 /* ... fall through ... */
3618 case FLOAT_EXPR:
3619 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3620 return build1_loc (loc, TREE_CODE (arg), type,
3621 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3623 case BIT_AND_EXPR:
3624 if (!integer_onep (TREE_OPERAND (arg, 1)))
3625 return NULL_TREE;
3626 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3628 case SAVE_EXPR:
3629 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3631 case CLEANUP_POINT_EXPR:
3632 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3633 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3634 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3636 default:
3637 return NULL_TREE;
3641 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3642 assume that ARG is an operation that returns a truth value (0 or 1
3643 for scalars, 0 or -1 for vectors). Return the folded expression if
3644 folding is successful. Otherwise, return NULL_TREE. */
3646 static tree
3647 fold_invert_truthvalue (location_t loc, tree arg)
3649 tree type = TREE_TYPE (arg);
3650 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3651 ? BIT_NOT_EXPR
3652 : TRUTH_NOT_EXPR,
3653 type, arg);
3656 /* Return a simplified tree node for the truth-negation of ARG. This
3657 never alters ARG itself. We assume that ARG is an operation that
3658 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3660 tree
3661 invert_truthvalue_loc (location_t loc, tree arg)
3663 if (TREE_CODE (arg) == ERROR_MARK)
3664 return arg;
3666 tree type = TREE_TYPE (arg);
3667 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3668 ? BIT_NOT_EXPR
3669 : TRUTH_NOT_EXPR,
3670 type, arg);
3673 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3674 with code CODE. This optimization is unsafe. */
3675 static tree
3676 distribute_real_division (location_t loc, enum tree_code code, tree type,
3677 tree arg0, tree arg1)
3679 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3680 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3682 /* (A / C) +- (B / C) -> (A +- B) / C. */
3683 if (mul0 == mul1
3684 && operand_equal_p (TREE_OPERAND (arg0, 1),
3685 TREE_OPERAND (arg1, 1), 0))
3686 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3687 fold_build2_loc (loc, code, type,
3688 TREE_OPERAND (arg0, 0),
3689 TREE_OPERAND (arg1, 0)),
3690 TREE_OPERAND (arg0, 1));
3692 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3693 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3694 TREE_OPERAND (arg1, 0), 0)
3695 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3696 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3698 REAL_VALUE_TYPE r0, r1;
3699 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3700 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3701 if (!mul0)
3702 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3703 if (!mul1)
3704 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3705 real_arithmetic (&r0, code, &r0, &r1);
3706 return fold_build2_loc (loc, MULT_EXPR, type,
3707 TREE_OPERAND (arg0, 0),
3708 build_real (type, r0));
3711 return NULL_TREE;
3714 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3715 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3716 and uses reverse storage order if REVERSEP is nonzero. */
3718 static tree
3719 make_bit_field_ref (location_t loc, tree inner, tree type,
3720 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3721 int unsignedp, int reversep)
3723 tree result, bftype;
3725 if (bitpos == 0 && !reversep)
3727 tree size = TYPE_SIZE (TREE_TYPE (inner));
3728 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3729 || POINTER_TYPE_P (TREE_TYPE (inner)))
3730 && tree_fits_shwi_p (size)
3731 && tree_to_shwi (size) == bitsize)
3732 return fold_convert_loc (loc, type, inner);
3735 bftype = type;
3736 if (TYPE_PRECISION (bftype) != bitsize
3737 || TYPE_UNSIGNED (bftype) == !unsignedp)
3738 bftype = build_nonstandard_integer_type (bitsize, 0);
3740 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3741 size_int (bitsize), bitsize_int (bitpos));
3742 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3744 if (bftype != type)
3745 result = fold_convert_loc (loc, type, result);
3747 return result;
3750 /* Optimize a bit-field compare.
3752 There are two cases: First is a compare against a constant and the
3753 second is a comparison of two items where the fields are at the same
3754 bit position relative to the start of a chunk (byte, halfword, word)
3755 large enough to contain it. In these cases we can avoid the shift
3756 implicit in bitfield extractions.
3758 For constants, we emit a compare of the shifted constant with the
3759 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3760 compared. For two fields at the same position, we do the ANDs with the
3761 similar mask and compare the result of the ANDs.
3763 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3764 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3765 are the left and right operands of the comparison, respectively.
3767 If the optimization described above can be done, we return the resulting
3768 tree. Otherwise we return zero. */
3770 static tree
3771 optimize_bit_field_compare (location_t loc, enum tree_code code,
3772 tree compare_type, tree lhs, tree rhs)
3774 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3775 tree type = TREE_TYPE (lhs);
3776 tree unsigned_type;
3777 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3778 machine_mode lmode, rmode, nmode;
3779 int lunsignedp, runsignedp;
3780 int lreversep, rreversep;
3781 int lvolatilep = 0, rvolatilep = 0;
3782 tree linner, rinner = NULL_TREE;
3783 tree mask;
3784 tree offset;
3786 /* Get all the information about the extractions being done. If the bit size
3787 if the same as the size of the underlying object, we aren't doing an
3788 extraction at all and so can do nothing. We also don't want to
3789 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3790 then will no longer be able to replace it. */
3791 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3792 &lunsignedp, &lreversep, &lvolatilep, false);
3793 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3794 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3795 return 0;
3797 if (const_p)
3798 rreversep = lreversep;
3799 else
3801 /* If this is not a constant, we can only do something if bit positions,
3802 sizes, signedness and storage order are the same. */
3803 rinner
3804 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3805 &runsignedp, &rreversep, &rvolatilep, false);
3807 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3808 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3809 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3810 return 0;
3813 /* See if we can find a mode to refer to this field. We should be able to,
3814 but fail if we can't. */
3815 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3816 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3817 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3818 TYPE_ALIGN (TREE_TYPE (rinner))),
3819 word_mode, false);
3820 if (nmode == VOIDmode)
3821 return 0;
3823 /* Set signed and unsigned types of the precision of this mode for the
3824 shifts below. */
3825 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3827 /* Compute the bit position and size for the new reference and our offset
3828 within it. If the new reference is the same size as the original, we
3829 won't optimize anything, so return zero. */
3830 nbitsize = GET_MODE_BITSIZE (nmode);
3831 nbitpos = lbitpos & ~ (nbitsize - 1);
3832 lbitpos -= nbitpos;
3833 if (nbitsize == lbitsize)
3834 return 0;
3836 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3837 lbitpos = nbitsize - lbitsize - lbitpos;
3839 /* Make the mask to be used against the extracted field. */
3840 mask = build_int_cst_type (unsigned_type, -1);
3841 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3842 mask = const_binop (RSHIFT_EXPR, mask,
3843 size_int (nbitsize - lbitsize - lbitpos));
3845 if (! const_p)
3846 /* If not comparing with constant, just rework the comparison
3847 and return. */
3848 return fold_build2_loc (loc, code, compare_type,
3849 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3850 make_bit_field_ref (loc, linner,
3851 unsigned_type,
3852 nbitsize, nbitpos,
3853 1, lreversep),
3854 mask),
3855 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3856 make_bit_field_ref (loc, rinner,
3857 unsigned_type,
3858 nbitsize, nbitpos,
3859 1, rreversep),
3860 mask));
3862 /* Otherwise, we are handling the constant case. See if the constant is too
3863 big for the field. Warn and return a tree for 0 (false) if so. We do
3864 this not only for its own sake, but to avoid having to test for this
3865 error case below. If we didn't, we might generate wrong code.
3867 For unsigned fields, the constant shifted right by the field length should
3868 be all zero. For signed fields, the high-order bits should agree with
3869 the sign bit. */
3871 if (lunsignedp)
3873 if (wi::lrshift (rhs, lbitsize) != 0)
3875 warning (0, "comparison is always %d due to width of bit-field",
3876 code == NE_EXPR);
3877 return constant_boolean_node (code == NE_EXPR, compare_type);
3880 else
3882 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3883 if (tem != 0 && tem != -1)
3885 warning (0, "comparison is always %d due to width of bit-field",
3886 code == NE_EXPR);
3887 return constant_boolean_node (code == NE_EXPR, compare_type);
3891 /* Single-bit compares should always be against zero. */
3892 if (lbitsize == 1 && ! integer_zerop (rhs))
3894 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3895 rhs = build_int_cst (type, 0);
3898 /* Make a new bitfield reference, shift the constant over the
3899 appropriate number of bits and mask it with the computed mask
3900 (in case this was a signed field). If we changed it, make a new one. */
3901 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3902 lreversep);
3904 rhs = const_binop (BIT_AND_EXPR,
3905 const_binop (LSHIFT_EXPR,
3906 fold_convert_loc (loc, unsigned_type, rhs),
3907 size_int (lbitpos)),
3908 mask);
3910 lhs = build2_loc (loc, code, compare_type,
3911 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3912 return lhs;
3915 /* Subroutine for fold_truth_andor_1: decode a field reference.
3917 If EXP is a comparison reference, we return the innermost reference.
3919 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3920 set to the starting bit number.
3922 If the innermost field can be completely contained in a mode-sized
3923 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3925 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3926 otherwise it is not changed.
3928 *PUNSIGNEDP is set to the signedness of the field.
3930 *PREVERSEP is set to the storage order of the field.
3932 *PMASK is set to the mask used. This is either contained in a
3933 BIT_AND_EXPR or derived from the width of the field.
3935 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3937 Return 0 if this is not a component reference or is one that we can't
3938 do anything with. */
3940 static tree
3941 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3942 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3943 int *punsignedp, int *preversep, int *pvolatilep,
3944 tree *pmask, tree *pand_mask)
3946 tree outer_type = 0;
3947 tree and_mask = 0;
3948 tree mask, inner, offset;
3949 tree unsigned_type;
3950 unsigned int precision;
3952 /* All the optimizations using this function assume integer fields.
3953 There are problems with FP fields since the type_for_size call
3954 below can fail for, e.g., XFmode. */
3955 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3956 return 0;
3958 /* We are interested in the bare arrangement of bits, so strip everything
3959 that doesn't affect the machine mode. However, record the type of the
3960 outermost expression if it may matter below. */
3961 if (CONVERT_EXPR_P (exp)
3962 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3963 outer_type = TREE_TYPE (exp);
3964 STRIP_NOPS (exp);
3966 if (TREE_CODE (exp) == BIT_AND_EXPR)
3968 and_mask = TREE_OPERAND (exp, 1);
3969 exp = TREE_OPERAND (exp, 0);
3970 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3971 if (TREE_CODE (and_mask) != INTEGER_CST)
3972 return 0;
3975 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3976 punsignedp, preversep, pvolatilep, false);
3977 if ((inner == exp && and_mask == 0)
3978 || *pbitsize < 0 || offset != 0
3979 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3980 return 0;
3982 /* If the number of bits in the reference is the same as the bitsize of
3983 the outer type, then the outer type gives the signedness. Otherwise
3984 (in case of a small bitfield) the signedness is unchanged. */
3985 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3986 *punsignedp = TYPE_UNSIGNED (outer_type);
3988 /* Compute the mask to access the bitfield. */
3989 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3990 precision = TYPE_PRECISION (unsigned_type);
3992 mask = build_int_cst_type (unsigned_type, -1);
3994 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3995 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3997 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3998 if (and_mask != 0)
3999 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4000 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4002 *pmask = mask;
4003 *pand_mask = and_mask;
4004 return inner;
4007 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4008 bit positions and MASK is SIGNED. */
4010 static int
4011 all_ones_mask_p (const_tree mask, unsigned int size)
4013 tree type = TREE_TYPE (mask);
4014 unsigned int precision = TYPE_PRECISION (type);
4016 /* If this function returns true when the type of the mask is
4017 UNSIGNED, then there will be errors. In particular see
4018 gcc.c-torture/execute/990326-1.c. There does not appear to be
4019 any documentation paper trail as to why this is so. But the pre
4020 wide-int worked with that restriction and it has been preserved
4021 here. */
4022 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4023 return false;
4025 return wi::mask (size, false, precision) == mask;
4028 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4029 represents the sign bit of EXP's type. If EXP represents a sign
4030 or zero extension, also test VAL against the unextended type.
4031 The return value is the (sub)expression whose sign bit is VAL,
4032 or NULL_TREE otherwise. */
4034 tree
4035 sign_bit_p (tree exp, const_tree val)
4037 int width;
4038 tree t;
4040 /* Tree EXP must have an integral type. */
4041 t = TREE_TYPE (exp);
4042 if (! INTEGRAL_TYPE_P (t))
4043 return NULL_TREE;
4045 /* Tree VAL must be an integer constant. */
4046 if (TREE_CODE (val) != INTEGER_CST
4047 || TREE_OVERFLOW (val))
4048 return NULL_TREE;
4050 width = TYPE_PRECISION (t);
4051 if (wi::only_sign_bit_p (val, width))
4052 return exp;
4054 /* Handle extension from a narrower type. */
4055 if (TREE_CODE (exp) == NOP_EXPR
4056 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4057 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4059 return NULL_TREE;
4062 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4063 to be evaluated unconditionally. */
4065 static int
4066 simple_operand_p (const_tree exp)
4068 /* Strip any conversions that don't change the machine mode. */
4069 STRIP_NOPS (exp);
4071 return (CONSTANT_CLASS_P (exp)
4072 || TREE_CODE (exp) == SSA_NAME
4073 || (DECL_P (exp)
4074 && ! TREE_ADDRESSABLE (exp)
4075 && ! TREE_THIS_VOLATILE (exp)
4076 && ! DECL_NONLOCAL (exp)
4077 /* Don't regard global variables as simple. They may be
4078 allocated in ways unknown to the compiler (shared memory,
4079 #pragma weak, etc). */
4080 && ! TREE_PUBLIC (exp)
4081 && ! DECL_EXTERNAL (exp)
4082 /* Weakrefs are not safe to be read, since they can be NULL.
4083 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4084 have DECL_WEAK flag set. */
4085 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4086 /* Loading a static variable is unduly expensive, but global
4087 registers aren't expensive. */
4088 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4091 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4092 to be evaluated unconditionally.
4093 I addition to simple_operand_p, we assume that comparisons, conversions,
4094 and logic-not operations are simple, if their operands are simple, too. */
4096 static bool
4097 simple_operand_p_2 (tree exp)
4099 enum tree_code code;
4101 if (TREE_SIDE_EFFECTS (exp)
4102 || tree_could_trap_p (exp))
4103 return false;
4105 while (CONVERT_EXPR_P (exp))
4106 exp = TREE_OPERAND (exp, 0);
4108 code = TREE_CODE (exp);
4110 if (TREE_CODE_CLASS (code) == tcc_comparison)
4111 return (simple_operand_p (TREE_OPERAND (exp, 0))
4112 && simple_operand_p (TREE_OPERAND (exp, 1)));
4114 if (code == TRUTH_NOT_EXPR)
4115 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4117 return simple_operand_p (exp);
4121 /* The following functions are subroutines to fold_range_test and allow it to
4122 try to change a logical combination of comparisons into a range test.
4124 For example, both
4125 X == 2 || X == 3 || X == 4 || X == 5
4127 X >= 2 && X <= 5
4128 are converted to
4129 (unsigned) (X - 2) <= 3
4131 We describe each set of comparisons as being either inside or outside
4132 a range, using a variable named like IN_P, and then describe the
4133 range with a lower and upper bound. If one of the bounds is omitted,
4134 it represents either the highest or lowest value of the type.
4136 In the comments below, we represent a range by two numbers in brackets
4137 preceded by a "+" to designate being inside that range, or a "-" to
4138 designate being outside that range, so the condition can be inverted by
4139 flipping the prefix. An omitted bound is represented by a "-". For
4140 example, "- [-, 10]" means being outside the range starting at the lowest
4141 possible value and ending at 10, in other words, being greater than 10.
4142 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4143 always false.
4145 We set up things so that the missing bounds are handled in a consistent
4146 manner so neither a missing bound nor "true" and "false" need to be
4147 handled using a special case. */
4149 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4150 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4151 and UPPER1_P are nonzero if the respective argument is an upper bound
4152 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4153 must be specified for a comparison. ARG1 will be converted to ARG0's
4154 type if both are specified. */
4156 static tree
4157 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4158 tree arg1, int upper1_p)
4160 tree tem;
4161 int result;
4162 int sgn0, sgn1;
4164 /* If neither arg represents infinity, do the normal operation.
4165 Else, if not a comparison, return infinity. Else handle the special
4166 comparison rules. Note that most of the cases below won't occur, but
4167 are handled for consistency. */
4169 if (arg0 != 0 && arg1 != 0)
4171 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4172 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4173 STRIP_NOPS (tem);
4174 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4177 if (TREE_CODE_CLASS (code) != tcc_comparison)
4178 return 0;
4180 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4181 for neither. In real maths, we cannot assume open ended ranges are
4182 the same. But, this is computer arithmetic, where numbers are finite.
4183 We can therefore make the transformation of any unbounded range with
4184 the value Z, Z being greater than any representable number. This permits
4185 us to treat unbounded ranges as equal. */
4186 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4187 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4188 switch (code)
4190 case EQ_EXPR:
4191 result = sgn0 == sgn1;
4192 break;
4193 case NE_EXPR:
4194 result = sgn0 != sgn1;
4195 break;
4196 case LT_EXPR:
4197 result = sgn0 < sgn1;
4198 break;
4199 case LE_EXPR:
4200 result = sgn0 <= sgn1;
4201 break;
4202 case GT_EXPR:
4203 result = sgn0 > sgn1;
4204 break;
4205 case GE_EXPR:
4206 result = sgn0 >= sgn1;
4207 break;
4208 default:
4209 gcc_unreachable ();
4212 return constant_boolean_node (result, type);
4215 /* Helper routine for make_range. Perform one step for it, return
4216 new expression if the loop should continue or NULL_TREE if it should
4217 stop. */
4219 tree
4220 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4221 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4222 bool *strict_overflow_p)
4224 tree arg0_type = TREE_TYPE (arg0);
4225 tree n_low, n_high, low = *p_low, high = *p_high;
4226 int in_p = *p_in_p, n_in_p;
4228 switch (code)
4230 case TRUTH_NOT_EXPR:
4231 /* We can only do something if the range is testing for zero. */
4232 if (low == NULL_TREE || high == NULL_TREE
4233 || ! integer_zerop (low) || ! integer_zerop (high))
4234 return NULL_TREE;
4235 *p_in_p = ! in_p;
4236 return arg0;
4238 case EQ_EXPR: case NE_EXPR:
4239 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4240 /* We can only do something if the range is testing for zero
4241 and if the second operand is an integer constant. Note that
4242 saying something is "in" the range we make is done by
4243 complementing IN_P since it will set in the initial case of
4244 being not equal to zero; "out" is leaving it alone. */
4245 if (low == NULL_TREE || high == NULL_TREE
4246 || ! integer_zerop (low) || ! integer_zerop (high)
4247 || TREE_CODE (arg1) != INTEGER_CST)
4248 return NULL_TREE;
4250 switch (code)
4252 case NE_EXPR: /* - [c, c] */
4253 low = high = arg1;
4254 break;
4255 case EQ_EXPR: /* + [c, c] */
4256 in_p = ! in_p, low = high = arg1;
4257 break;
4258 case GT_EXPR: /* - [-, c] */
4259 low = 0, high = arg1;
4260 break;
4261 case GE_EXPR: /* + [c, -] */
4262 in_p = ! in_p, low = arg1, high = 0;
4263 break;
4264 case LT_EXPR: /* - [c, -] */
4265 low = arg1, high = 0;
4266 break;
4267 case LE_EXPR: /* + [-, c] */
4268 in_p = ! in_p, low = 0, high = arg1;
4269 break;
4270 default:
4271 gcc_unreachable ();
4274 /* If this is an unsigned comparison, we also know that EXP is
4275 greater than or equal to zero. We base the range tests we make
4276 on that fact, so we record it here so we can parse existing
4277 range tests. We test arg0_type since often the return type
4278 of, e.g. EQ_EXPR, is boolean. */
4279 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4281 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4282 in_p, low, high, 1,
4283 build_int_cst (arg0_type, 0),
4284 NULL_TREE))
4285 return NULL_TREE;
4287 in_p = n_in_p, low = n_low, high = n_high;
4289 /* If the high bound is missing, but we have a nonzero low
4290 bound, reverse the range so it goes from zero to the low bound
4291 minus 1. */
4292 if (high == 0 && low && ! integer_zerop (low))
4294 in_p = ! in_p;
4295 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4296 build_int_cst (TREE_TYPE (low), 1), 0);
4297 low = build_int_cst (arg0_type, 0);
4301 *p_low = low;
4302 *p_high = high;
4303 *p_in_p = in_p;
4304 return arg0;
4306 case NEGATE_EXPR:
4307 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4308 low and high are non-NULL, then normalize will DTRT. */
4309 if (!TYPE_UNSIGNED (arg0_type)
4310 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4312 if (low == NULL_TREE)
4313 low = TYPE_MIN_VALUE (arg0_type);
4314 if (high == NULL_TREE)
4315 high = TYPE_MAX_VALUE (arg0_type);
4318 /* (-x) IN [a,b] -> x in [-b, -a] */
4319 n_low = range_binop (MINUS_EXPR, exp_type,
4320 build_int_cst (exp_type, 0),
4321 0, high, 1);
4322 n_high = range_binop (MINUS_EXPR, exp_type,
4323 build_int_cst (exp_type, 0),
4324 0, low, 0);
4325 if (n_high != 0 && TREE_OVERFLOW (n_high))
4326 return NULL_TREE;
4327 goto normalize;
4329 case BIT_NOT_EXPR:
4330 /* ~ X -> -X - 1 */
4331 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4332 build_int_cst (exp_type, 1));
4334 case PLUS_EXPR:
4335 case MINUS_EXPR:
4336 if (TREE_CODE (arg1) != INTEGER_CST)
4337 return NULL_TREE;
4339 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4340 move a constant to the other side. */
4341 if (!TYPE_UNSIGNED (arg0_type)
4342 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4343 return NULL_TREE;
4345 /* If EXP is signed, any overflow in the computation is undefined,
4346 so we don't worry about it so long as our computations on
4347 the bounds don't overflow. For unsigned, overflow is defined
4348 and this is exactly the right thing. */
4349 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4350 arg0_type, low, 0, arg1, 0);
4351 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4352 arg0_type, high, 1, arg1, 0);
4353 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4354 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4355 return NULL_TREE;
4357 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4358 *strict_overflow_p = true;
4360 normalize:
4361 /* Check for an unsigned range which has wrapped around the maximum
4362 value thus making n_high < n_low, and normalize it. */
4363 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4365 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4366 build_int_cst (TREE_TYPE (n_high), 1), 0);
4367 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4368 build_int_cst (TREE_TYPE (n_low), 1), 0);
4370 /* If the range is of the form +/- [ x+1, x ], we won't
4371 be able to normalize it. But then, it represents the
4372 whole range or the empty set, so make it
4373 +/- [ -, - ]. */
4374 if (tree_int_cst_equal (n_low, low)
4375 && tree_int_cst_equal (n_high, high))
4376 low = high = 0;
4377 else
4378 in_p = ! in_p;
4380 else
4381 low = n_low, high = n_high;
4383 *p_low = low;
4384 *p_high = high;
4385 *p_in_p = in_p;
4386 return arg0;
4388 CASE_CONVERT:
4389 case NON_LVALUE_EXPR:
4390 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4391 return NULL_TREE;
4393 if (! INTEGRAL_TYPE_P (arg0_type)
4394 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4395 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4396 return NULL_TREE;
4398 n_low = low, n_high = high;
4400 if (n_low != 0)
4401 n_low = fold_convert_loc (loc, arg0_type, n_low);
4403 if (n_high != 0)
4404 n_high = fold_convert_loc (loc, arg0_type, n_high);
4406 /* If we're converting arg0 from an unsigned type, to exp,
4407 a signed type, we will be doing the comparison as unsigned.
4408 The tests above have already verified that LOW and HIGH
4409 are both positive.
4411 So we have to ensure that we will handle large unsigned
4412 values the same way that the current signed bounds treat
4413 negative values. */
4415 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4417 tree high_positive;
4418 tree equiv_type;
4419 /* For fixed-point modes, we need to pass the saturating flag
4420 as the 2nd parameter. */
4421 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4422 equiv_type
4423 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4424 TYPE_SATURATING (arg0_type));
4425 else
4426 equiv_type
4427 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4429 /* A range without an upper bound is, naturally, unbounded.
4430 Since convert would have cropped a very large value, use
4431 the max value for the destination type. */
4432 high_positive
4433 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4434 : TYPE_MAX_VALUE (arg0_type);
4436 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4437 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4438 fold_convert_loc (loc, arg0_type,
4439 high_positive),
4440 build_int_cst (arg0_type, 1));
4442 /* If the low bound is specified, "and" the range with the
4443 range for which the original unsigned value will be
4444 positive. */
4445 if (low != 0)
4447 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4448 1, fold_convert_loc (loc, arg0_type,
4449 integer_zero_node),
4450 high_positive))
4451 return NULL_TREE;
4453 in_p = (n_in_p == in_p);
4455 else
4457 /* Otherwise, "or" the range with the range of the input
4458 that will be interpreted as negative. */
4459 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4460 1, fold_convert_loc (loc, arg0_type,
4461 integer_zero_node),
4462 high_positive))
4463 return NULL_TREE;
4465 in_p = (in_p != n_in_p);
4469 *p_low = n_low;
4470 *p_high = n_high;
4471 *p_in_p = in_p;
4472 return arg0;
4474 default:
4475 return NULL_TREE;
4479 /* Given EXP, a logical expression, set the range it is testing into
4480 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4481 actually being tested. *PLOW and *PHIGH will be made of the same
4482 type as the returned expression. If EXP is not a comparison, we
4483 will most likely not be returning a useful value and range. Set
4484 *STRICT_OVERFLOW_P to true if the return value is only valid
4485 because signed overflow is undefined; otherwise, do not change
4486 *STRICT_OVERFLOW_P. */
4488 tree
4489 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4490 bool *strict_overflow_p)
4492 enum tree_code code;
4493 tree arg0, arg1 = NULL_TREE;
4494 tree exp_type, nexp;
4495 int in_p;
4496 tree low, high;
4497 location_t loc = EXPR_LOCATION (exp);
4499 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4500 and see if we can refine the range. Some of the cases below may not
4501 happen, but it doesn't seem worth worrying about this. We "continue"
4502 the outer loop when we've changed something; otherwise we "break"
4503 the switch, which will "break" the while. */
4505 in_p = 0;
4506 low = high = build_int_cst (TREE_TYPE (exp), 0);
4508 while (1)
4510 code = TREE_CODE (exp);
4511 exp_type = TREE_TYPE (exp);
4512 arg0 = NULL_TREE;
4514 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4516 if (TREE_OPERAND_LENGTH (exp) > 0)
4517 arg0 = TREE_OPERAND (exp, 0);
4518 if (TREE_CODE_CLASS (code) == tcc_binary
4519 || TREE_CODE_CLASS (code) == tcc_comparison
4520 || (TREE_CODE_CLASS (code) == tcc_expression
4521 && TREE_OPERAND_LENGTH (exp) > 1))
4522 arg1 = TREE_OPERAND (exp, 1);
4524 if (arg0 == NULL_TREE)
4525 break;
4527 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4528 &high, &in_p, strict_overflow_p);
4529 if (nexp == NULL_TREE)
4530 break;
4531 exp = nexp;
4534 /* If EXP is a constant, we can evaluate whether this is true or false. */
4535 if (TREE_CODE (exp) == INTEGER_CST)
4537 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4538 exp, 0, low, 0))
4539 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4540 exp, 1, high, 1)));
4541 low = high = 0;
4542 exp = 0;
4545 *pin_p = in_p, *plow = low, *phigh = high;
4546 return exp;
4549 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4550 type, TYPE, return an expression to test if EXP is in (or out of, depending
4551 on IN_P) the range. Return 0 if the test couldn't be created. */
4553 tree
4554 build_range_check (location_t loc, tree type, tree exp, int in_p,
4555 tree low, tree high)
4557 tree etype = TREE_TYPE (exp), value;
4559 /* Disable this optimization for function pointer expressions
4560 on targets that require function pointer canonicalization. */
4561 if (targetm.have_canonicalize_funcptr_for_compare ()
4562 && TREE_CODE (etype) == POINTER_TYPE
4563 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4564 return NULL_TREE;
4566 if (! in_p)
4568 value = build_range_check (loc, type, exp, 1, low, high);
4569 if (value != 0)
4570 return invert_truthvalue_loc (loc, value);
4572 return 0;
4575 if (low == 0 && high == 0)
4576 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4578 if (low == 0)
4579 return fold_build2_loc (loc, LE_EXPR, type, exp,
4580 fold_convert_loc (loc, etype, high));
4582 if (high == 0)
4583 return fold_build2_loc (loc, GE_EXPR, type, exp,
4584 fold_convert_loc (loc, etype, low));
4586 if (operand_equal_p (low, high, 0))
4587 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4588 fold_convert_loc (loc, etype, low));
4590 if (integer_zerop (low))
4592 if (! TYPE_UNSIGNED (etype))
4594 etype = unsigned_type_for (etype);
4595 high = fold_convert_loc (loc, etype, high);
4596 exp = fold_convert_loc (loc, etype, exp);
4598 return build_range_check (loc, type, exp, 1, 0, high);
4601 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4602 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4604 int prec = TYPE_PRECISION (etype);
4606 if (wi::mask (prec - 1, false, prec) == high)
4608 if (TYPE_UNSIGNED (etype))
4610 tree signed_etype = signed_type_for (etype);
4611 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4612 etype
4613 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4614 else
4615 etype = signed_etype;
4616 exp = fold_convert_loc (loc, etype, exp);
4618 return fold_build2_loc (loc, GT_EXPR, type, exp,
4619 build_int_cst (etype, 0));
4623 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4624 This requires wrap-around arithmetics for the type of the expression.
4625 First make sure that arithmetics in this type is valid, then make sure
4626 that it wraps around. */
4627 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4628 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4629 TYPE_UNSIGNED (etype));
4631 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4633 tree utype, minv, maxv;
4635 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4636 for the type in question, as we rely on this here. */
4637 utype = unsigned_type_for (etype);
4638 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4639 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4640 build_int_cst (TREE_TYPE (maxv), 1), 1);
4641 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4643 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4644 minv, 1, maxv, 1)))
4645 etype = utype;
4646 else
4647 return 0;
4650 high = fold_convert_loc (loc, etype, high);
4651 low = fold_convert_loc (loc, etype, low);
4652 exp = fold_convert_loc (loc, etype, exp);
4654 value = const_binop (MINUS_EXPR, high, low);
4657 if (POINTER_TYPE_P (etype))
4659 if (value != 0 && !TREE_OVERFLOW (value))
4661 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4662 return build_range_check (loc, type,
4663 fold_build_pointer_plus_loc (loc, exp, low),
4664 1, build_int_cst (etype, 0), value);
4666 return 0;
4669 if (value != 0 && !TREE_OVERFLOW (value))
4670 return build_range_check (loc, type,
4671 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4672 1, build_int_cst (etype, 0), value);
4674 return 0;
4677 /* Return the predecessor of VAL in its type, handling the infinite case. */
4679 static tree
4680 range_predecessor (tree val)
4682 tree type = TREE_TYPE (val);
4684 if (INTEGRAL_TYPE_P (type)
4685 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4686 return 0;
4687 else
4688 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4689 build_int_cst (TREE_TYPE (val), 1), 0);
4692 /* Return the successor of VAL in its type, handling the infinite case. */
4694 static tree
4695 range_successor (tree val)
4697 tree type = TREE_TYPE (val);
4699 if (INTEGRAL_TYPE_P (type)
4700 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4701 return 0;
4702 else
4703 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4704 build_int_cst (TREE_TYPE (val), 1), 0);
4707 /* Given two ranges, see if we can merge them into one. Return 1 if we
4708 can, 0 if we can't. Set the output range into the specified parameters. */
4710 bool
4711 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4712 tree high0, int in1_p, tree low1, tree high1)
4714 int no_overlap;
4715 int subset;
4716 int temp;
4717 tree tem;
4718 int in_p;
4719 tree low, high;
4720 int lowequal = ((low0 == 0 && low1 == 0)
4721 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4722 low0, 0, low1, 0)));
4723 int highequal = ((high0 == 0 && high1 == 0)
4724 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4725 high0, 1, high1, 1)));
4727 /* Make range 0 be the range that starts first, or ends last if they
4728 start at the same value. Swap them if it isn't. */
4729 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4730 low0, 0, low1, 0))
4731 || (lowequal
4732 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4733 high1, 1, high0, 1))))
4735 temp = in0_p, in0_p = in1_p, in1_p = temp;
4736 tem = low0, low0 = low1, low1 = tem;
4737 tem = high0, high0 = high1, high1 = tem;
4740 /* Now flag two cases, whether the ranges are disjoint or whether the
4741 second range is totally subsumed in the first. Note that the tests
4742 below are simplified by the ones above. */
4743 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4744 high0, 1, low1, 0));
4745 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4746 high1, 1, high0, 1));
4748 /* We now have four cases, depending on whether we are including or
4749 excluding the two ranges. */
4750 if (in0_p && in1_p)
4752 /* If they don't overlap, the result is false. If the second range
4753 is a subset it is the result. Otherwise, the range is from the start
4754 of the second to the end of the first. */
4755 if (no_overlap)
4756 in_p = 0, low = high = 0;
4757 else if (subset)
4758 in_p = 1, low = low1, high = high1;
4759 else
4760 in_p = 1, low = low1, high = high0;
4763 else if (in0_p && ! in1_p)
4765 /* If they don't overlap, the result is the first range. If they are
4766 equal, the result is false. If the second range is a subset of the
4767 first, and the ranges begin at the same place, we go from just after
4768 the end of the second range to the end of the first. If the second
4769 range is not a subset of the first, or if it is a subset and both
4770 ranges end at the same place, the range starts at the start of the
4771 first range and ends just before the second range.
4772 Otherwise, we can't describe this as a single range. */
4773 if (no_overlap)
4774 in_p = 1, low = low0, high = high0;
4775 else if (lowequal && highequal)
4776 in_p = 0, low = high = 0;
4777 else if (subset && lowequal)
4779 low = range_successor (high1);
4780 high = high0;
4781 in_p = 1;
4782 if (low == 0)
4784 /* We are in the weird situation where high0 > high1 but
4785 high1 has no successor. Punt. */
4786 return 0;
4789 else if (! subset || highequal)
4791 low = low0;
4792 high = range_predecessor (low1);
4793 in_p = 1;
4794 if (high == 0)
4796 /* low0 < low1 but low1 has no predecessor. Punt. */
4797 return 0;
4800 else
4801 return 0;
4804 else if (! in0_p && in1_p)
4806 /* If they don't overlap, the result is the second range. If the second
4807 is a subset of the first, the result is false. Otherwise,
4808 the range starts just after the first range and ends at the
4809 end of the second. */
4810 if (no_overlap)
4811 in_p = 1, low = low1, high = high1;
4812 else if (subset || highequal)
4813 in_p = 0, low = high = 0;
4814 else
4816 low = range_successor (high0);
4817 high = high1;
4818 in_p = 1;
4819 if (low == 0)
4821 /* high1 > high0 but high0 has no successor. Punt. */
4822 return 0;
4827 else
4829 /* The case where we are excluding both ranges. Here the complex case
4830 is if they don't overlap. In that case, the only time we have a
4831 range is if they are adjacent. If the second is a subset of the
4832 first, the result is the first. Otherwise, the range to exclude
4833 starts at the beginning of the first range and ends at the end of the
4834 second. */
4835 if (no_overlap)
4837 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4838 range_successor (high0),
4839 1, low1, 0)))
4840 in_p = 0, low = low0, high = high1;
4841 else
4843 /* Canonicalize - [min, x] into - [-, x]. */
4844 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4845 switch (TREE_CODE (TREE_TYPE (low0)))
4847 case ENUMERAL_TYPE:
4848 if (TYPE_PRECISION (TREE_TYPE (low0))
4849 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4850 break;
4851 /* FALLTHROUGH */
4852 case INTEGER_TYPE:
4853 if (tree_int_cst_equal (low0,
4854 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4855 low0 = 0;
4856 break;
4857 case POINTER_TYPE:
4858 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4859 && integer_zerop (low0))
4860 low0 = 0;
4861 break;
4862 default:
4863 break;
4866 /* Canonicalize - [x, max] into - [x, -]. */
4867 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4868 switch (TREE_CODE (TREE_TYPE (high1)))
4870 case ENUMERAL_TYPE:
4871 if (TYPE_PRECISION (TREE_TYPE (high1))
4872 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4873 break;
4874 /* FALLTHROUGH */
4875 case INTEGER_TYPE:
4876 if (tree_int_cst_equal (high1,
4877 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4878 high1 = 0;
4879 break;
4880 case POINTER_TYPE:
4881 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4882 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4883 high1, 1,
4884 build_int_cst (TREE_TYPE (high1), 1),
4885 1)))
4886 high1 = 0;
4887 break;
4888 default:
4889 break;
4892 /* The ranges might be also adjacent between the maximum and
4893 minimum values of the given type. For
4894 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4895 return + [x + 1, y - 1]. */
4896 if (low0 == 0 && high1 == 0)
4898 low = range_successor (high0);
4899 high = range_predecessor (low1);
4900 if (low == 0 || high == 0)
4901 return 0;
4903 in_p = 1;
4905 else
4906 return 0;
4909 else if (subset)
4910 in_p = 0, low = low0, high = high0;
4911 else
4912 in_p = 0, low = low0, high = high1;
4915 *pin_p = in_p, *plow = low, *phigh = high;
4916 return 1;
4920 /* Subroutine of fold, looking inside expressions of the form
4921 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4922 of the COND_EXPR. This function is being used also to optimize
4923 A op B ? C : A, by reversing the comparison first.
4925 Return a folded expression whose code is not a COND_EXPR
4926 anymore, or NULL_TREE if no folding opportunity is found. */
4928 static tree
4929 fold_cond_expr_with_comparison (location_t loc, tree type,
4930 tree arg0, tree arg1, tree arg2)
4932 enum tree_code comp_code = TREE_CODE (arg0);
4933 tree arg00 = TREE_OPERAND (arg0, 0);
4934 tree arg01 = TREE_OPERAND (arg0, 1);
4935 tree arg1_type = TREE_TYPE (arg1);
4936 tree tem;
4938 STRIP_NOPS (arg1);
4939 STRIP_NOPS (arg2);
4941 /* If we have A op 0 ? A : -A, consider applying the following
4942 transformations:
4944 A == 0? A : -A same as -A
4945 A != 0? A : -A same as A
4946 A >= 0? A : -A same as abs (A)
4947 A > 0? A : -A same as abs (A)
4948 A <= 0? A : -A same as -abs (A)
4949 A < 0? A : -A same as -abs (A)
4951 None of these transformations work for modes with signed
4952 zeros. If A is +/-0, the first two transformations will
4953 change the sign of the result (from +0 to -0, or vice
4954 versa). The last four will fix the sign of the result,
4955 even though the original expressions could be positive or
4956 negative, depending on the sign of A.
4958 Note that all these transformations are correct if A is
4959 NaN, since the two alternatives (A and -A) are also NaNs. */
4960 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4961 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4962 ? real_zerop (arg01)
4963 : integer_zerop (arg01))
4964 && ((TREE_CODE (arg2) == NEGATE_EXPR
4965 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4966 /* In the case that A is of the form X-Y, '-A' (arg2) may
4967 have already been folded to Y-X, check for that. */
4968 || (TREE_CODE (arg1) == MINUS_EXPR
4969 && TREE_CODE (arg2) == MINUS_EXPR
4970 && operand_equal_p (TREE_OPERAND (arg1, 0),
4971 TREE_OPERAND (arg2, 1), 0)
4972 && operand_equal_p (TREE_OPERAND (arg1, 1),
4973 TREE_OPERAND (arg2, 0), 0))))
4974 switch (comp_code)
4976 case EQ_EXPR:
4977 case UNEQ_EXPR:
4978 tem = fold_convert_loc (loc, arg1_type, arg1);
4979 return pedantic_non_lvalue_loc (loc,
4980 fold_convert_loc (loc, type,
4981 negate_expr (tem)));
4982 case NE_EXPR:
4983 case LTGT_EXPR:
4984 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4985 case UNGE_EXPR:
4986 case UNGT_EXPR:
4987 if (flag_trapping_math)
4988 break;
4989 /* Fall through. */
4990 case GE_EXPR:
4991 case GT_EXPR:
4992 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4993 break;
4994 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4995 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4996 case UNLE_EXPR:
4997 case UNLT_EXPR:
4998 if (flag_trapping_math)
4999 break;
5000 case LE_EXPR:
5001 case LT_EXPR:
5002 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5003 break;
5004 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5005 return negate_expr (fold_convert_loc (loc, type, tem));
5006 default:
5007 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5008 break;
5011 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5012 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5013 both transformations are correct when A is NaN: A != 0
5014 is then true, and A == 0 is false. */
5016 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5017 && integer_zerop (arg01) && integer_zerop (arg2))
5019 if (comp_code == NE_EXPR)
5020 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5021 else if (comp_code == EQ_EXPR)
5022 return build_zero_cst (type);
5025 /* Try some transformations of A op B ? A : B.
5027 A == B? A : B same as B
5028 A != B? A : B same as A
5029 A >= B? A : B same as max (A, B)
5030 A > B? A : B same as max (B, A)
5031 A <= B? A : B same as min (A, B)
5032 A < B? A : B same as min (B, A)
5034 As above, these transformations don't work in the presence
5035 of signed zeros. For example, if A and B are zeros of
5036 opposite sign, the first two transformations will change
5037 the sign of the result. In the last four, the original
5038 expressions give different results for (A=+0, B=-0) and
5039 (A=-0, B=+0), but the transformed expressions do not.
5041 The first two transformations are correct if either A or B
5042 is a NaN. In the first transformation, the condition will
5043 be false, and B will indeed be chosen. In the case of the
5044 second transformation, the condition A != B will be true,
5045 and A will be chosen.
5047 The conversions to max() and min() are not correct if B is
5048 a number and A is not. The conditions in the original
5049 expressions will be false, so all four give B. The min()
5050 and max() versions would give a NaN instead. */
5051 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5052 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5053 /* Avoid these transformations if the COND_EXPR may be used
5054 as an lvalue in the C++ front-end. PR c++/19199. */
5055 && (in_gimple_form
5056 || VECTOR_TYPE_P (type)
5057 || (! lang_GNU_CXX ()
5058 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5059 || ! maybe_lvalue_p (arg1)
5060 || ! maybe_lvalue_p (arg2)))
5062 tree comp_op0 = arg00;
5063 tree comp_op1 = arg01;
5064 tree comp_type = TREE_TYPE (comp_op0);
5066 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5067 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5069 comp_type = type;
5070 comp_op0 = arg1;
5071 comp_op1 = arg2;
5074 switch (comp_code)
5076 case EQ_EXPR:
5077 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5078 case NE_EXPR:
5079 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5080 case LE_EXPR:
5081 case LT_EXPR:
5082 case UNLE_EXPR:
5083 case UNLT_EXPR:
5084 /* In C++ a ?: expression can be an lvalue, so put the
5085 operand which will be used if they are equal first
5086 so that we can convert this back to the
5087 corresponding COND_EXPR. */
5088 if (!HONOR_NANS (arg1))
5090 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5091 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5092 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5093 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5094 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5095 comp_op1, comp_op0);
5096 return pedantic_non_lvalue_loc (loc,
5097 fold_convert_loc (loc, type, tem));
5099 break;
5100 case GE_EXPR:
5101 case GT_EXPR:
5102 case UNGE_EXPR:
5103 case UNGT_EXPR:
5104 if (!HONOR_NANS (arg1))
5106 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5107 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5108 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5109 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5110 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5111 comp_op1, comp_op0);
5112 return pedantic_non_lvalue_loc (loc,
5113 fold_convert_loc (loc, type, tem));
5115 break;
5116 case UNEQ_EXPR:
5117 if (!HONOR_NANS (arg1))
5118 return pedantic_non_lvalue_loc (loc,
5119 fold_convert_loc (loc, type, arg2));
5120 break;
5121 case LTGT_EXPR:
5122 if (!HONOR_NANS (arg1))
5123 return pedantic_non_lvalue_loc (loc,
5124 fold_convert_loc (loc, type, arg1));
5125 break;
5126 default:
5127 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5128 break;
5132 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5133 we might still be able to simplify this. For example,
5134 if C1 is one less or one more than C2, this might have started
5135 out as a MIN or MAX and been transformed by this function.
5136 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5138 if (INTEGRAL_TYPE_P (type)
5139 && TREE_CODE (arg01) == INTEGER_CST
5140 && TREE_CODE (arg2) == INTEGER_CST)
5141 switch (comp_code)
5143 case EQ_EXPR:
5144 if (TREE_CODE (arg1) == INTEGER_CST)
5145 break;
5146 /* We can replace A with C1 in this case. */
5147 arg1 = fold_convert_loc (loc, type, arg01);
5148 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5150 case LT_EXPR:
5151 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5152 MIN_EXPR, to preserve the signedness of the comparison. */
5153 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5154 OEP_ONLY_CONST)
5155 && operand_equal_p (arg01,
5156 const_binop (PLUS_EXPR, arg2,
5157 build_int_cst (type, 1)),
5158 OEP_ONLY_CONST))
5160 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5161 fold_convert_loc (loc, TREE_TYPE (arg00),
5162 arg2));
5163 return pedantic_non_lvalue_loc (loc,
5164 fold_convert_loc (loc, type, tem));
5166 break;
5168 case LE_EXPR:
5169 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5170 as above. */
5171 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5172 OEP_ONLY_CONST)
5173 && operand_equal_p (arg01,
5174 const_binop (MINUS_EXPR, arg2,
5175 build_int_cst (type, 1)),
5176 OEP_ONLY_CONST))
5178 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5179 fold_convert_loc (loc, TREE_TYPE (arg00),
5180 arg2));
5181 return pedantic_non_lvalue_loc (loc,
5182 fold_convert_loc (loc, type, tem));
5184 break;
5186 case GT_EXPR:
5187 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5188 MAX_EXPR, to preserve the signedness of the comparison. */
5189 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5190 OEP_ONLY_CONST)
5191 && operand_equal_p (arg01,
5192 const_binop (MINUS_EXPR, arg2,
5193 build_int_cst (type, 1)),
5194 OEP_ONLY_CONST))
5196 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5197 fold_convert_loc (loc, TREE_TYPE (arg00),
5198 arg2));
5199 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5201 break;
5203 case GE_EXPR:
5204 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5205 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5206 OEP_ONLY_CONST)
5207 && operand_equal_p (arg01,
5208 const_binop (PLUS_EXPR, arg2,
5209 build_int_cst (type, 1)),
5210 OEP_ONLY_CONST))
5212 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5213 fold_convert_loc (loc, TREE_TYPE (arg00),
5214 arg2));
5215 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5217 break;
5218 case NE_EXPR:
5219 break;
5220 default:
5221 gcc_unreachable ();
5224 return NULL_TREE;
5229 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5230 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5231 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5232 false) >= 2)
5233 #endif
5235 /* EXP is some logical combination of boolean tests. See if we can
5236 merge it into some range test. Return the new tree if so. */
5238 static tree
5239 fold_range_test (location_t loc, enum tree_code code, tree type,
5240 tree op0, tree op1)
5242 int or_op = (code == TRUTH_ORIF_EXPR
5243 || code == TRUTH_OR_EXPR);
5244 int in0_p, in1_p, in_p;
5245 tree low0, low1, low, high0, high1, high;
5246 bool strict_overflow_p = false;
5247 tree tem, lhs, rhs;
5248 const char * const warnmsg = G_("assuming signed overflow does not occur "
5249 "when simplifying range test");
5251 if (!INTEGRAL_TYPE_P (type))
5252 return 0;
5254 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5255 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5257 /* If this is an OR operation, invert both sides; we will invert
5258 again at the end. */
5259 if (or_op)
5260 in0_p = ! in0_p, in1_p = ! in1_p;
5262 /* If both expressions are the same, if we can merge the ranges, and we
5263 can build the range test, return it or it inverted. If one of the
5264 ranges is always true or always false, consider it to be the same
5265 expression as the other. */
5266 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5267 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5268 in1_p, low1, high1)
5269 && 0 != (tem = (build_range_check (loc, type,
5270 lhs != 0 ? lhs
5271 : rhs != 0 ? rhs : integer_zero_node,
5272 in_p, low, high))))
5274 if (strict_overflow_p)
5275 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5276 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5279 /* On machines where the branch cost is expensive, if this is a
5280 short-circuited branch and the underlying object on both sides
5281 is the same, make a non-short-circuit operation. */
5282 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5283 && lhs != 0 && rhs != 0
5284 && (code == TRUTH_ANDIF_EXPR
5285 || code == TRUTH_ORIF_EXPR)
5286 && operand_equal_p (lhs, rhs, 0))
5288 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5289 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5290 which cases we can't do this. */
5291 if (simple_operand_p (lhs))
5292 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5293 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5294 type, op0, op1);
5296 else if (!lang_hooks.decls.global_bindings_p ()
5297 && !CONTAINS_PLACEHOLDER_P (lhs))
5299 tree common = save_expr (lhs);
5301 if (0 != (lhs = build_range_check (loc, type, common,
5302 or_op ? ! in0_p : in0_p,
5303 low0, high0))
5304 && (0 != (rhs = build_range_check (loc, type, common,
5305 or_op ? ! in1_p : in1_p,
5306 low1, high1))))
5308 if (strict_overflow_p)
5309 fold_overflow_warning (warnmsg,
5310 WARN_STRICT_OVERFLOW_COMPARISON);
5311 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5312 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5313 type, lhs, rhs);
5318 return 0;
5321 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5322 bit value. Arrange things so the extra bits will be set to zero if and
5323 only if C is signed-extended to its full width. If MASK is nonzero,
5324 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5326 static tree
5327 unextend (tree c, int p, int unsignedp, tree mask)
5329 tree type = TREE_TYPE (c);
5330 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5331 tree temp;
5333 if (p == modesize || unsignedp)
5334 return c;
5336 /* We work by getting just the sign bit into the low-order bit, then
5337 into the high-order bit, then sign-extend. We then XOR that value
5338 with C. */
5339 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5341 /* We must use a signed type in order to get an arithmetic right shift.
5342 However, we must also avoid introducing accidental overflows, so that
5343 a subsequent call to integer_zerop will work. Hence we must
5344 do the type conversion here. At this point, the constant is either
5345 zero or one, and the conversion to a signed type can never overflow.
5346 We could get an overflow if this conversion is done anywhere else. */
5347 if (TYPE_UNSIGNED (type))
5348 temp = fold_convert (signed_type_for (type), temp);
5350 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5351 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5352 if (mask != 0)
5353 temp = const_binop (BIT_AND_EXPR, temp,
5354 fold_convert (TREE_TYPE (c), mask));
5355 /* If necessary, convert the type back to match the type of C. */
5356 if (TYPE_UNSIGNED (type))
5357 temp = fold_convert (type, temp);
5359 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5362 /* For an expression that has the form
5363 (A && B) || ~B
5365 (A || B) && ~B,
5366 we can drop one of the inner expressions and simplify to
5367 A || ~B
5369 A && ~B
5370 LOC is the location of the resulting expression. OP is the inner
5371 logical operation; the left-hand side in the examples above, while CMPOP
5372 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5373 removing a condition that guards another, as in
5374 (A != NULL && A->...) || A == NULL
5375 which we must not transform. If RHS_ONLY is true, only eliminate the
5376 right-most operand of the inner logical operation. */
5378 static tree
5379 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5380 bool rhs_only)
5382 tree type = TREE_TYPE (cmpop);
5383 enum tree_code code = TREE_CODE (cmpop);
5384 enum tree_code truthop_code = TREE_CODE (op);
5385 tree lhs = TREE_OPERAND (op, 0);
5386 tree rhs = TREE_OPERAND (op, 1);
5387 tree orig_lhs = lhs, orig_rhs = rhs;
5388 enum tree_code rhs_code = TREE_CODE (rhs);
5389 enum tree_code lhs_code = TREE_CODE (lhs);
5390 enum tree_code inv_code;
5392 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5393 return NULL_TREE;
5395 if (TREE_CODE_CLASS (code) != tcc_comparison)
5396 return NULL_TREE;
5398 if (rhs_code == truthop_code)
5400 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5401 if (newrhs != NULL_TREE)
5403 rhs = newrhs;
5404 rhs_code = TREE_CODE (rhs);
5407 if (lhs_code == truthop_code && !rhs_only)
5409 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5410 if (newlhs != NULL_TREE)
5412 lhs = newlhs;
5413 lhs_code = TREE_CODE (lhs);
5417 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5418 if (inv_code == rhs_code
5419 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5420 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5421 return lhs;
5422 if (!rhs_only && inv_code == lhs_code
5423 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5424 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5425 return rhs;
5426 if (rhs != orig_rhs || lhs != orig_lhs)
5427 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5428 lhs, rhs);
5429 return NULL_TREE;
5432 /* Find ways of folding logical expressions of LHS and RHS:
5433 Try to merge two comparisons to the same innermost item.
5434 Look for range tests like "ch >= '0' && ch <= '9'".
5435 Look for combinations of simple terms on machines with expensive branches
5436 and evaluate the RHS unconditionally.
5438 For example, if we have p->a == 2 && p->b == 4 and we can make an
5439 object large enough to span both A and B, we can do this with a comparison
5440 against the object ANDed with the a mask.
5442 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5443 operations to do this with one comparison.
5445 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5446 function and the one above.
5448 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5449 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5451 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5452 two operands.
5454 We return the simplified tree or 0 if no optimization is possible. */
5456 static tree
5457 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5458 tree lhs, tree rhs)
5460 /* If this is the "or" of two comparisons, we can do something if
5461 the comparisons are NE_EXPR. If this is the "and", we can do something
5462 if the comparisons are EQ_EXPR. I.e.,
5463 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5465 WANTED_CODE is this operation code. For single bit fields, we can
5466 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5467 comparison for one-bit fields. */
5469 enum tree_code wanted_code;
5470 enum tree_code lcode, rcode;
5471 tree ll_arg, lr_arg, rl_arg, rr_arg;
5472 tree ll_inner, lr_inner, rl_inner, rr_inner;
5473 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5474 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5475 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5476 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5477 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5478 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5479 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5480 machine_mode lnmode, rnmode;
5481 tree ll_mask, lr_mask, rl_mask, rr_mask;
5482 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5483 tree l_const, r_const;
5484 tree lntype, rntype, result;
5485 HOST_WIDE_INT first_bit, end_bit;
5486 int volatilep;
5488 /* Start by getting the comparison codes. Fail if anything is volatile.
5489 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5490 it were surrounded with a NE_EXPR. */
5492 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5493 return 0;
5495 lcode = TREE_CODE (lhs);
5496 rcode = TREE_CODE (rhs);
5498 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5500 lhs = build2 (NE_EXPR, truth_type, lhs,
5501 build_int_cst (TREE_TYPE (lhs), 0));
5502 lcode = NE_EXPR;
5505 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5507 rhs = build2 (NE_EXPR, truth_type, rhs,
5508 build_int_cst (TREE_TYPE (rhs), 0));
5509 rcode = NE_EXPR;
5512 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5513 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5514 return 0;
5516 ll_arg = TREE_OPERAND (lhs, 0);
5517 lr_arg = TREE_OPERAND (lhs, 1);
5518 rl_arg = TREE_OPERAND (rhs, 0);
5519 rr_arg = TREE_OPERAND (rhs, 1);
5521 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5522 if (simple_operand_p (ll_arg)
5523 && simple_operand_p (lr_arg))
5525 if (operand_equal_p (ll_arg, rl_arg, 0)
5526 && operand_equal_p (lr_arg, rr_arg, 0))
5528 result = combine_comparisons (loc, code, lcode, rcode,
5529 truth_type, ll_arg, lr_arg);
5530 if (result)
5531 return result;
5533 else if (operand_equal_p (ll_arg, rr_arg, 0)
5534 && operand_equal_p (lr_arg, rl_arg, 0))
5536 result = combine_comparisons (loc, code, lcode,
5537 swap_tree_comparison (rcode),
5538 truth_type, ll_arg, lr_arg);
5539 if (result)
5540 return result;
5544 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5545 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5547 /* If the RHS can be evaluated unconditionally and its operands are
5548 simple, it wins to evaluate the RHS unconditionally on machines
5549 with expensive branches. In this case, this isn't a comparison
5550 that can be merged. */
5552 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5553 false) >= 2
5554 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5555 && simple_operand_p (rl_arg)
5556 && simple_operand_p (rr_arg))
5558 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5559 if (code == TRUTH_OR_EXPR
5560 && lcode == NE_EXPR && integer_zerop (lr_arg)
5561 && rcode == NE_EXPR && integer_zerop (rr_arg)
5562 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5563 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5564 return build2_loc (loc, NE_EXPR, truth_type,
5565 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5566 ll_arg, rl_arg),
5567 build_int_cst (TREE_TYPE (ll_arg), 0));
5569 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5570 if (code == TRUTH_AND_EXPR
5571 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5572 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5573 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5574 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5575 return build2_loc (loc, EQ_EXPR, truth_type,
5576 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5577 ll_arg, rl_arg),
5578 build_int_cst (TREE_TYPE (ll_arg), 0));
5581 /* See if the comparisons can be merged. Then get all the parameters for
5582 each side. */
5584 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5585 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5586 return 0;
5588 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5589 volatilep = 0;
5590 ll_inner = decode_field_reference (loc, ll_arg,
5591 &ll_bitsize, &ll_bitpos, &ll_mode,
5592 &ll_unsignedp, &ll_reversep, &volatilep,
5593 &ll_mask, &ll_and_mask);
5594 lr_inner = decode_field_reference (loc, lr_arg,
5595 &lr_bitsize, &lr_bitpos, &lr_mode,
5596 &lr_unsignedp, &lr_reversep, &volatilep,
5597 &lr_mask, &lr_and_mask);
5598 rl_inner = decode_field_reference (loc, rl_arg,
5599 &rl_bitsize, &rl_bitpos, &rl_mode,
5600 &rl_unsignedp, &rl_reversep, &volatilep,
5601 &rl_mask, &rl_and_mask);
5602 rr_inner = decode_field_reference (loc, rr_arg,
5603 &rr_bitsize, &rr_bitpos, &rr_mode,
5604 &rr_unsignedp, &rr_reversep, &volatilep,
5605 &rr_mask, &rr_and_mask);
5607 /* It must be true that the inner operation on the lhs of each
5608 comparison must be the same if we are to be able to do anything.
5609 Then see if we have constants. If not, the same must be true for
5610 the rhs's. */
5611 if (volatilep
5612 || ll_reversep != rl_reversep
5613 || ll_inner == 0 || rl_inner == 0
5614 || ! operand_equal_p (ll_inner, rl_inner, 0))
5615 return 0;
5617 if (TREE_CODE (lr_arg) == INTEGER_CST
5618 && TREE_CODE (rr_arg) == INTEGER_CST)
5620 l_const = lr_arg, r_const = rr_arg;
5621 lr_reversep = ll_reversep;
5623 else if (lr_reversep != rr_reversep
5624 || lr_inner == 0 || rr_inner == 0
5625 || ! operand_equal_p (lr_inner, rr_inner, 0))
5626 return 0;
5627 else
5628 l_const = r_const = 0;
5630 /* If either comparison code is not correct for our logical operation,
5631 fail. However, we can convert a one-bit comparison against zero into
5632 the opposite comparison against that bit being set in the field. */
5634 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5635 if (lcode != wanted_code)
5637 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5639 /* Make the left operand unsigned, since we are only interested
5640 in the value of one bit. Otherwise we are doing the wrong
5641 thing below. */
5642 ll_unsignedp = 1;
5643 l_const = ll_mask;
5645 else
5646 return 0;
5649 /* This is analogous to the code for l_const above. */
5650 if (rcode != wanted_code)
5652 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5654 rl_unsignedp = 1;
5655 r_const = rl_mask;
5657 else
5658 return 0;
5661 /* See if we can find a mode that contains both fields being compared on
5662 the left. If we can't, fail. Otherwise, update all constants and masks
5663 to be relative to a field of that size. */
5664 first_bit = MIN (ll_bitpos, rl_bitpos);
5665 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5666 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5667 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5668 volatilep);
5669 if (lnmode == VOIDmode)
5670 return 0;
5672 lnbitsize = GET_MODE_BITSIZE (lnmode);
5673 lnbitpos = first_bit & ~ (lnbitsize - 1);
5674 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5675 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5677 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5679 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5680 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5683 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5684 size_int (xll_bitpos));
5685 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5686 size_int (xrl_bitpos));
5688 if (l_const)
5690 l_const = fold_convert_loc (loc, lntype, l_const);
5691 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5692 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5693 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5694 fold_build1_loc (loc, BIT_NOT_EXPR,
5695 lntype, ll_mask))))
5697 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5699 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5702 if (r_const)
5704 r_const = fold_convert_loc (loc, lntype, r_const);
5705 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5706 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5707 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5708 fold_build1_loc (loc, BIT_NOT_EXPR,
5709 lntype, rl_mask))))
5711 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5713 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5717 /* If the right sides are not constant, do the same for it. Also,
5718 disallow this optimization if a size or signedness mismatch occurs
5719 between the left and right sides. */
5720 if (l_const == 0)
5722 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5723 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5724 /* Make sure the two fields on the right
5725 correspond to the left without being swapped. */
5726 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5727 return 0;
5729 first_bit = MIN (lr_bitpos, rr_bitpos);
5730 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5731 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5732 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5733 volatilep);
5734 if (rnmode == VOIDmode)
5735 return 0;
5737 rnbitsize = GET_MODE_BITSIZE (rnmode);
5738 rnbitpos = first_bit & ~ (rnbitsize - 1);
5739 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5740 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5742 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5744 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5745 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5748 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5749 rntype, lr_mask),
5750 size_int (xlr_bitpos));
5751 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5752 rntype, rr_mask),
5753 size_int (xrr_bitpos));
5755 /* Make a mask that corresponds to both fields being compared.
5756 Do this for both items being compared. If the operands are the
5757 same size and the bits being compared are in the same position
5758 then we can do this by masking both and comparing the masked
5759 results. */
5760 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5761 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5762 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5764 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5765 ll_unsignedp || rl_unsignedp, ll_reversep);
5766 if (! all_ones_mask_p (ll_mask, lnbitsize))
5767 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5769 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5770 lr_unsignedp || rr_unsignedp, lr_reversep);
5771 if (! all_ones_mask_p (lr_mask, rnbitsize))
5772 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5774 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5777 /* There is still another way we can do something: If both pairs of
5778 fields being compared are adjacent, we may be able to make a wider
5779 field containing them both.
5781 Note that we still must mask the lhs/rhs expressions. Furthermore,
5782 the mask must be shifted to account for the shift done by
5783 make_bit_field_ref. */
5784 if ((ll_bitsize + ll_bitpos == rl_bitpos
5785 && lr_bitsize + lr_bitpos == rr_bitpos)
5786 || (ll_bitpos == rl_bitpos + rl_bitsize
5787 && lr_bitpos == rr_bitpos + rr_bitsize))
5789 tree type;
5791 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5792 ll_bitsize + rl_bitsize,
5793 MIN (ll_bitpos, rl_bitpos),
5794 ll_unsignedp, ll_reversep);
5795 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5796 lr_bitsize + rr_bitsize,
5797 MIN (lr_bitpos, rr_bitpos),
5798 lr_unsignedp, lr_reversep);
5800 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5801 size_int (MIN (xll_bitpos, xrl_bitpos)));
5802 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5803 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5805 /* Convert to the smaller type before masking out unwanted bits. */
5806 type = lntype;
5807 if (lntype != rntype)
5809 if (lnbitsize > rnbitsize)
5811 lhs = fold_convert_loc (loc, rntype, lhs);
5812 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5813 type = rntype;
5815 else if (lnbitsize < rnbitsize)
5817 rhs = fold_convert_loc (loc, lntype, rhs);
5818 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5819 type = lntype;
5823 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5824 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5826 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5827 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5829 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5832 return 0;
5835 /* Handle the case of comparisons with constants. If there is something in
5836 common between the masks, those bits of the constants must be the same.
5837 If not, the condition is always false. Test for this to avoid generating
5838 incorrect code below. */
5839 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5840 if (! integer_zerop (result)
5841 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5842 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5844 if (wanted_code == NE_EXPR)
5846 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5847 return constant_boolean_node (true, truth_type);
5849 else
5851 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5852 return constant_boolean_node (false, truth_type);
5856 /* Construct the expression we will return. First get the component
5857 reference we will make. Unless the mask is all ones the width of
5858 that field, perform the mask operation. Then compare with the
5859 merged constant. */
5860 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5861 ll_unsignedp || rl_unsignedp, ll_reversep);
5863 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5864 if (! all_ones_mask_p (ll_mask, lnbitsize))
5865 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5867 return build2_loc (loc, wanted_code, truth_type, result,
5868 const_binop (BIT_IOR_EXPR, l_const, r_const));
5871 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5872 constant. */
5874 static tree
5875 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5876 tree op0, tree op1)
5878 tree arg0 = op0;
5879 enum tree_code op_code;
5880 tree comp_const;
5881 tree minmax_const;
5882 int consts_equal, consts_lt;
5883 tree inner;
5885 STRIP_SIGN_NOPS (arg0);
5887 op_code = TREE_CODE (arg0);
5888 minmax_const = TREE_OPERAND (arg0, 1);
5889 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5890 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5891 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5892 inner = TREE_OPERAND (arg0, 0);
5894 /* If something does not permit us to optimize, return the original tree. */
5895 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5896 || TREE_CODE (comp_const) != INTEGER_CST
5897 || TREE_OVERFLOW (comp_const)
5898 || TREE_CODE (minmax_const) != INTEGER_CST
5899 || TREE_OVERFLOW (minmax_const))
5900 return NULL_TREE;
5902 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5903 and GT_EXPR, doing the rest with recursive calls using logical
5904 simplifications. */
5905 switch (code)
5907 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5909 tree tem
5910 = optimize_minmax_comparison (loc,
5911 invert_tree_comparison (code, false),
5912 type, op0, op1);
5913 if (tem)
5914 return invert_truthvalue_loc (loc, tem);
5915 return NULL_TREE;
5918 case GE_EXPR:
5919 return
5920 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5921 optimize_minmax_comparison
5922 (loc, EQ_EXPR, type, arg0, comp_const),
5923 optimize_minmax_comparison
5924 (loc, GT_EXPR, type, arg0, comp_const));
5926 case EQ_EXPR:
5927 if (op_code == MAX_EXPR && consts_equal)
5928 /* MAX (X, 0) == 0 -> X <= 0 */
5929 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5931 else if (op_code == MAX_EXPR && consts_lt)
5932 /* MAX (X, 0) == 5 -> X == 5 */
5933 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5935 else if (op_code == MAX_EXPR)
5936 /* MAX (X, 0) == -1 -> false */
5937 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5939 else if (consts_equal)
5940 /* MIN (X, 0) == 0 -> X >= 0 */
5941 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5943 else if (consts_lt)
5944 /* MIN (X, 0) == 5 -> false */
5945 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5947 else
5948 /* MIN (X, 0) == -1 -> X == -1 */
5949 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5951 case GT_EXPR:
5952 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5953 /* MAX (X, 0) > 0 -> X > 0
5954 MAX (X, 0) > 5 -> X > 5 */
5955 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5957 else if (op_code == MAX_EXPR)
5958 /* MAX (X, 0) > -1 -> true */
5959 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5961 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5962 /* MIN (X, 0) > 0 -> false
5963 MIN (X, 0) > 5 -> false */
5964 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5966 else
5967 /* MIN (X, 0) > -1 -> X > -1 */
5968 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5970 default:
5971 return NULL_TREE;
5975 /* T is an integer expression that is being multiplied, divided, or taken a
5976 modulus (CODE says which and what kind of divide or modulus) by a
5977 constant C. See if we can eliminate that operation by folding it with
5978 other operations already in T. WIDE_TYPE, if non-null, is a type that
5979 should be used for the computation if wider than our type.
5981 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5982 (X * 2) + (Y * 4). We must, however, be assured that either the original
5983 expression would not overflow or that overflow is undefined for the type
5984 in the language in question.
5986 If we return a non-null expression, it is an equivalent form of the
5987 original computation, but need not be in the original type.
5989 We set *STRICT_OVERFLOW_P to true if the return values depends on
5990 signed overflow being undefined. Otherwise we do not change
5991 *STRICT_OVERFLOW_P. */
5993 static tree
5994 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5995 bool *strict_overflow_p)
5997 /* To avoid exponential search depth, refuse to allow recursion past
5998 three levels. Beyond that (1) it's highly unlikely that we'll find
5999 something interesting and (2) we've probably processed it before
6000 when we built the inner expression. */
6002 static int depth;
6003 tree ret;
6005 if (depth > 3)
6006 return NULL;
6008 depth++;
6009 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6010 depth--;
6012 return ret;
6015 static tree
6016 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6017 bool *strict_overflow_p)
6019 tree type = TREE_TYPE (t);
6020 enum tree_code tcode = TREE_CODE (t);
6021 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6022 > GET_MODE_SIZE (TYPE_MODE (type)))
6023 ? wide_type : type);
6024 tree t1, t2;
6025 int same_p = tcode == code;
6026 tree op0 = NULL_TREE, op1 = NULL_TREE;
6027 bool sub_strict_overflow_p;
6029 /* Don't deal with constants of zero here; they confuse the code below. */
6030 if (integer_zerop (c))
6031 return NULL_TREE;
6033 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6034 op0 = TREE_OPERAND (t, 0);
6036 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6037 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6039 /* Note that we need not handle conditional operations here since fold
6040 already handles those cases. So just do arithmetic here. */
6041 switch (tcode)
6043 case INTEGER_CST:
6044 /* For a constant, we can always simplify if we are a multiply
6045 or (for divide and modulus) if it is a multiple of our constant. */
6046 if (code == MULT_EXPR
6047 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6049 tree tem = const_binop (code, fold_convert (ctype, t),
6050 fold_convert (ctype, c));
6051 /* If the multiplication overflowed to INT_MIN then we lost sign
6052 information on it and a subsequent multiplication might
6053 spuriously overflow. See PR68142. */
6054 if (TREE_OVERFLOW (tem)
6055 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6056 return NULL_TREE;
6057 return tem;
6059 break;
6061 CASE_CONVERT: case NON_LVALUE_EXPR:
6062 /* If op0 is an expression ... */
6063 if ((COMPARISON_CLASS_P (op0)
6064 || UNARY_CLASS_P (op0)
6065 || BINARY_CLASS_P (op0)
6066 || VL_EXP_CLASS_P (op0)
6067 || EXPRESSION_CLASS_P (op0))
6068 /* ... and has wrapping overflow, and its type is smaller
6069 than ctype, then we cannot pass through as widening. */
6070 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6071 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6072 && (TYPE_PRECISION (ctype)
6073 > TYPE_PRECISION (TREE_TYPE (op0))))
6074 /* ... or this is a truncation (t is narrower than op0),
6075 then we cannot pass through this narrowing. */
6076 || (TYPE_PRECISION (type)
6077 < TYPE_PRECISION (TREE_TYPE (op0)))
6078 /* ... or signedness changes for division or modulus,
6079 then we cannot pass through this conversion. */
6080 || (code != MULT_EXPR
6081 && (TYPE_UNSIGNED (ctype)
6082 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6083 /* ... or has undefined overflow while the converted to
6084 type has not, we cannot do the operation in the inner type
6085 as that would introduce undefined overflow. */
6086 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6088 && !TYPE_OVERFLOW_UNDEFINED (type))))
6089 break;
6091 /* Pass the constant down and see if we can make a simplification. If
6092 we can, replace this expression with the inner simplification for
6093 possible later conversion to our or some other type. */
6094 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6095 && TREE_CODE (t2) == INTEGER_CST
6096 && !TREE_OVERFLOW (t2)
6097 && (0 != (t1 = extract_muldiv (op0, t2, code,
6098 code == MULT_EXPR
6099 ? ctype : NULL_TREE,
6100 strict_overflow_p))))
6101 return t1;
6102 break;
6104 case ABS_EXPR:
6105 /* If widening the type changes it from signed to unsigned, then we
6106 must avoid building ABS_EXPR itself as unsigned. */
6107 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6109 tree cstype = (*signed_type_for) (ctype);
6110 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6111 != 0)
6113 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6114 return fold_convert (ctype, t1);
6116 break;
6118 /* If the constant is negative, we cannot simplify this. */
6119 if (tree_int_cst_sgn (c) == -1)
6120 break;
6121 /* FALLTHROUGH */
6122 case NEGATE_EXPR:
6123 /* For division and modulus, type can't be unsigned, as e.g.
6124 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6125 For signed types, even with wrapping overflow, this is fine. */
6126 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6127 break;
6128 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6129 != 0)
6130 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6131 break;
6133 case MIN_EXPR: case MAX_EXPR:
6134 /* If widening the type changes the signedness, then we can't perform
6135 this optimization as that changes the result. */
6136 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6137 break;
6139 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6140 sub_strict_overflow_p = false;
6141 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6142 &sub_strict_overflow_p)) != 0
6143 && (t2 = extract_muldiv (op1, c, code, wide_type,
6144 &sub_strict_overflow_p)) != 0)
6146 if (tree_int_cst_sgn (c) < 0)
6147 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6148 if (sub_strict_overflow_p)
6149 *strict_overflow_p = true;
6150 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6151 fold_convert (ctype, t2));
6153 break;
6155 case LSHIFT_EXPR: case RSHIFT_EXPR:
6156 /* If the second operand is constant, this is a multiplication
6157 or floor division, by a power of two, so we can treat it that
6158 way unless the multiplier or divisor overflows. Signed
6159 left-shift overflow is implementation-defined rather than
6160 undefined in C90, so do not convert signed left shift into
6161 multiplication. */
6162 if (TREE_CODE (op1) == INTEGER_CST
6163 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6164 /* const_binop may not detect overflow correctly,
6165 so check for it explicitly here. */
6166 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6167 && 0 != (t1 = fold_convert (ctype,
6168 const_binop (LSHIFT_EXPR,
6169 size_one_node,
6170 op1)))
6171 && !TREE_OVERFLOW (t1))
6172 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6173 ? MULT_EXPR : FLOOR_DIV_EXPR,
6174 ctype,
6175 fold_convert (ctype, op0),
6176 t1),
6177 c, code, wide_type, strict_overflow_p);
6178 break;
6180 case PLUS_EXPR: case MINUS_EXPR:
6181 /* See if we can eliminate the operation on both sides. If we can, we
6182 can return a new PLUS or MINUS. If we can't, the only remaining
6183 cases where we can do anything are if the second operand is a
6184 constant. */
6185 sub_strict_overflow_p = false;
6186 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6187 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6188 if (t1 != 0 && t2 != 0
6189 && (code == MULT_EXPR
6190 /* If not multiplication, we can only do this if both operands
6191 are divisible by c. */
6192 || (multiple_of_p (ctype, op0, c)
6193 && multiple_of_p (ctype, op1, c))))
6195 if (sub_strict_overflow_p)
6196 *strict_overflow_p = true;
6197 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6198 fold_convert (ctype, t2));
6201 /* If this was a subtraction, negate OP1 and set it to be an addition.
6202 This simplifies the logic below. */
6203 if (tcode == MINUS_EXPR)
6205 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6206 /* If OP1 was not easily negatable, the constant may be OP0. */
6207 if (TREE_CODE (op0) == INTEGER_CST)
6209 std::swap (op0, op1);
6210 std::swap (t1, t2);
6214 if (TREE_CODE (op1) != INTEGER_CST)
6215 break;
6217 /* If either OP1 or C are negative, this optimization is not safe for
6218 some of the division and remainder types while for others we need
6219 to change the code. */
6220 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6222 if (code == CEIL_DIV_EXPR)
6223 code = FLOOR_DIV_EXPR;
6224 else if (code == FLOOR_DIV_EXPR)
6225 code = CEIL_DIV_EXPR;
6226 else if (code != MULT_EXPR
6227 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6228 break;
6231 /* If it's a multiply or a division/modulus operation of a multiple
6232 of our constant, do the operation and verify it doesn't overflow. */
6233 if (code == MULT_EXPR
6234 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6236 op1 = const_binop (code, fold_convert (ctype, op1),
6237 fold_convert (ctype, c));
6238 /* We allow the constant to overflow with wrapping semantics. */
6239 if (op1 == 0
6240 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6241 break;
6243 else
6244 break;
6246 /* If we have an unsigned type, we cannot widen the operation since it
6247 will change the result if the original computation overflowed. */
6248 if (TYPE_UNSIGNED (ctype) && ctype != type)
6249 break;
6251 /* If we were able to eliminate our operation from the first side,
6252 apply our operation to the second side and reform the PLUS. */
6253 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6254 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6256 /* The last case is if we are a multiply. In that case, we can
6257 apply the distributive law to commute the multiply and addition
6258 if the multiplication of the constants doesn't overflow
6259 and overflow is defined. With undefined overflow
6260 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6261 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6262 return fold_build2 (tcode, ctype,
6263 fold_build2 (code, ctype,
6264 fold_convert (ctype, op0),
6265 fold_convert (ctype, c)),
6266 op1);
6268 break;
6270 case MULT_EXPR:
6271 /* We have a special case here if we are doing something like
6272 (C * 8) % 4 since we know that's zero. */
6273 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6274 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6275 /* If the multiplication can overflow we cannot optimize this. */
6276 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6277 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6278 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6280 *strict_overflow_p = true;
6281 return omit_one_operand (type, integer_zero_node, op0);
6284 /* ... fall through ... */
6286 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6287 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6288 /* If we can extract our operation from the LHS, do so and return a
6289 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6290 do something only if the second operand is a constant. */
6291 if (same_p
6292 && (t1 = extract_muldiv (op0, c, code, wide_type,
6293 strict_overflow_p)) != 0)
6294 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6295 fold_convert (ctype, op1));
6296 else if (tcode == MULT_EXPR && code == MULT_EXPR
6297 && (t1 = extract_muldiv (op1, c, code, wide_type,
6298 strict_overflow_p)) != 0)
6299 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6300 fold_convert (ctype, t1));
6301 else if (TREE_CODE (op1) != INTEGER_CST)
6302 return 0;
6304 /* If these are the same operation types, we can associate them
6305 assuming no overflow. */
6306 if (tcode == code)
6308 bool overflow_p = false;
6309 bool overflow_mul_p;
6310 signop sign = TYPE_SIGN (ctype);
6311 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6312 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6313 if (overflow_mul_p
6314 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6315 overflow_p = true;
6316 if (!overflow_p)
6318 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6319 TYPE_SIGN (TREE_TYPE (op1)));
6320 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6321 wide_int_to_tree (ctype, mul));
6325 /* If these operations "cancel" each other, we have the main
6326 optimizations of this pass, which occur when either constant is a
6327 multiple of the other, in which case we replace this with either an
6328 operation or CODE or TCODE.
6330 If we have an unsigned type, we cannot do this since it will change
6331 the result if the original computation overflowed. */
6332 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6333 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6334 || (tcode == MULT_EXPR
6335 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6336 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6337 && code != MULT_EXPR)))
6339 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6341 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6342 *strict_overflow_p = true;
6343 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6344 fold_convert (ctype,
6345 const_binop (TRUNC_DIV_EXPR,
6346 op1, c)));
6348 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6350 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6351 *strict_overflow_p = true;
6352 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6353 fold_convert (ctype,
6354 const_binop (TRUNC_DIV_EXPR,
6355 c, op1)));
6358 break;
6360 default:
6361 break;
6364 return 0;
6367 /* Return a node which has the indicated constant VALUE (either 0 or
6368 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6369 and is of the indicated TYPE. */
6371 tree
6372 constant_boolean_node (bool value, tree type)
6374 if (type == integer_type_node)
6375 return value ? integer_one_node : integer_zero_node;
6376 else if (type == boolean_type_node)
6377 return value ? boolean_true_node : boolean_false_node;
6378 else if (TREE_CODE (type) == VECTOR_TYPE)
6379 return build_vector_from_val (type,
6380 build_int_cst (TREE_TYPE (type),
6381 value ? -1 : 0));
6382 else
6383 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6387 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6388 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6389 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6390 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6391 COND is the first argument to CODE; otherwise (as in the example
6392 given here), it is the second argument. TYPE is the type of the
6393 original expression. Return NULL_TREE if no simplification is
6394 possible. */
6396 static tree
6397 fold_binary_op_with_conditional_arg (location_t loc,
6398 enum tree_code code,
6399 tree type, tree op0, tree op1,
6400 tree cond, tree arg, int cond_first_p)
6402 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6403 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6404 tree test, true_value, false_value;
6405 tree lhs = NULL_TREE;
6406 tree rhs = NULL_TREE;
6407 enum tree_code cond_code = COND_EXPR;
6409 if (TREE_CODE (cond) == COND_EXPR
6410 || TREE_CODE (cond) == VEC_COND_EXPR)
6412 test = TREE_OPERAND (cond, 0);
6413 true_value = TREE_OPERAND (cond, 1);
6414 false_value = TREE_OPERAND (cond, 2);
6415 /* If this operand throws an expression, then it does not make
6416 sense to try to perform a logical or arithmetic operation
6417 involving it. */
6418 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6419 lhs = true_value;
6420 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6421 rhs = false_value;
6423 else
6425 tree testtype = TREE_TYPE (cond);
6426 test = cond;
6427 true_value = constant_boolean_node (true, testtype);
6428 false_value = constant_boolean_node (false, testtype);
6431 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6432 cond_code = VEC_COND_EXPR;
6434 /* This transformation is only worthwhile if we don't have to wrap ARG
6435 in a SAVE_EXPR and the operation can be simplified without recursing
6436 on at least one of the branches once its pushed inside the COND_EXPR. */
6437 if (!TREE_CONSTANT (arg)
6438 && (TREE_SIDE_EFFECTS (arg)
6439 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6440 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6441 return NULL_TREE;
6443 arg = fold_convert_loc (loc, arg_type, arg);
6444 if (lhs == 0)
6446 true_value = fold_convert_loc (loc, cond_type, true_value);
6447 if (cond_first_p)
6448 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6449 else
6450 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6452 if (rhs == 0)
6454 false_value = fold_convert_loc (loc, cond_type, false_value);
6455 if (cond_first_p)
6456 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6457 else
6458 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6461 /* Check that we have simplified at least one of the branches. */
6462 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6463 return NULL_TREE;
6465 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6469 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6471 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6472 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6473 ADDEND is the same as X.
6475 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6476 and finite. The problematic cases are when X is zero, and its mode
6477 has signed zeros. In the case of rounding towards -infinity,
6478 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6479 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6481 bool
6482 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6484 if (!real_zerop (addend))
6485 return false;
6487 /* Don't allow the fold with -fsignaling-nans. */
6488 if (HONOR_SNANS (element_mode (type)))
6489 return false;
6491 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6492 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6493 return true;
6495 /* In a vector or complex, we would need to check the sign of all zeros. */
6496 if (TREE_CODE (addend) != REAL_CST)
6497 return false;
6499 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6500 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6501 negate = !negate;
6503 /* The mode has signed zeros, and we have to honor their sign.
6504 In this situation, there is only one case we can return true for.
6505 X - 0 is the same as X unless rounding towards -infinity is
6506 supported. */
6507 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6510 /* Subroutine of fold() that optimizes comparisons of a division by
6511 a nonzero integer constant against an integer constant, i.e.
6512 X/C1 op C2.
6514 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6515 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6516 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6518 The function returns the constant folded tree if a simplification
6519 can be made, and NULL_TREE otherwise. */
6521 static tree
6522 fold_div_compare (location_t loc,
6523 enum tree_code code, tree type, tree arg0, tree arg1)
6525 tree prod, tmp, hi, lo;
6526 tree arg00 = TREE_OPERAND (arg0, 0);
6527 tree arg01 = TREE_OPERAND (arg0, 1);
6528 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6529 bool neg_overflow = false;
6530 bool overflow;
6532 /* We have to do this the hard way to detect unsigned overflow.
6533 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6534 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6535 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6536 neg_overflow = false;
6538 if (sign == UNSIGNED)
6540 tmp = int_const_binop (MINUS_EXPR, arg01,
6541 build_int_cst (TREE_TYPE (arg01), 1));
6542 lo = prod;
6544 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6545 val = wi::add (prod, tmp, sign, &overflow);
6546 hi = force_fit_type (TREE_TYPE (arg00), val,
6547 -1, overflow | TREE_OVERFLOW (prod));
6549 else if (tree_int_cst_sgn (arg01) >= 0)
6551 tmp = int_const_binop (MINUS_EXPR, arg01,
6552 build_int_cst (TREE_TYPE (arg01), 1));
6553 switch (tree_int_cst_sgn (arg1))
6555 case -1:
6556 neg_overflow = true;
6557 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6558 hi = prod;
6559 break;
6561 case 0:
6562 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6563 hi = tmp;
6564 break;
6566 case 1:
6567 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6568 lo = prod;
6569 break;
6571 default:
6572 gcc_unreachable ();
6575 else
6577 /* A negative divisor reverses the relational operators. */
6578 code = swap_tree_comparison (code);
6580 tmp = int_const_binop (PLUS_EXPR, arg01,
6581 build_int_cst (TREE_TYPE (arg01), 1));
6582 switch (tree_int_cst_sgn (arg1))
6584 case -1:
6585 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6586 lo = prod;
6587 break;
6589 case 0:
6590 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6591 lo = tmp;
6592 break;
6594 case 1:
6595 neg_overflow = true;
6596 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6597 hi = prod;
6598 break;
6600 default:
6601 gcc_unreachable ();
6605 switch (code)
6607 case EQ_EXPR:
6608 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6609 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6610 if (TREE_OVERFLOW (hi))
6611 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6612 if (TREE_OVERFLOW (lo))
6613 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6614 return build_range_check (loc, type, arg00, 1, lo, hi);
6616 case NE_EXPR:
6617 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6618 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6619 if (TREE_OVERFLOW (hi))
6620 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6621 if (TREE_OVERFLOW (lo))
6622 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6623 return build_range_check (loc, type, arg00, 0, lo, hi);
6625 case LT_EXPR:
6626 if (TREE_OVERFLOW (lo))
6628 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6629 return omit_one_operand_loc (loc, type, tmp, arg00);
6631 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6633 case LE_EXPR:
6634 if (TREE_OVERFLOW (hi))
6636 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6637 return omit_one_operand_loc (loc, type, tmp, arg00);
6639 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6641 case GT_EXPR:
6642 if (TREE_OVERFLOW (hi))
6644 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6645 return omit_one_operand_loc (loc, type, tmp, arg00);
6647 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6649 case GE_EXPR:
6650 if (TREE_OVERFLOW (lo))
6652 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6653 return omit_one_operand_loc (loc, type, tmp, arg00);
6655 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6657 default:
6658 break;
6661 return NULL_TREE;
6665 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6666 equality/inequality test, then return a simplified form of the test
6667 using a sign testing. Otherwise return NULL. TYPE is the desired
6668 result type. */
6670 static tree
6671 fold_single_bit_test_into_sign_test (location_t loc,
6672 enum tree_code code, tree arg0, tree arg1,
6673 tree result_type)
6675 /* If this is testing a single bit, we can optimize the test. */
6676 if ((code == NE_EXPR || code == EQ_EXPR)
6677 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6678 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6680 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6681 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6682 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6684 if (arg00 != NULL_TREE
6685 /* This is only a win if casting to a signed type is cheap,
6686 i.e. when arg00's type is not a partial mode. */
6687 && TYPE_PRECISION (TREE_TYPE (arg00))
6688 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6690 tree stype = signed_type_for (TREE_TYPE (arg00));
6691 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6692 result_type,
6693 fold_convert_loc (loc, stype, arg00),
6694 build_int_cst (stype, 0));
6698 return NULL_TREE;
6701 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6702 equality/inequality test, then return a simplified form of
6703 the test using shifts and logical operations. Otherwise return
6704 NULL. TYPE is the desired result type. */
6706 tree
6707 fold_single_bit_test (location_t loc, enum tree_code code,
6708 tree arg0, tree arg1, tree result_type)
6710 /* If this is testing a single bit, we can optimize the test. */
6711 if ((code == NE_EXPR || code == EQ_EXPR)
6712 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6713 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6715 tree inner = TREE_OPERAND (arg0, 0);
6716 tree type = TREE_TYPE (arg0);
6717 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6718 machine_mode operand_mode = TYPE_MODE (type);
6719 int ops_unsigned;
6720 tree signed_type, unsigned_type, intermediate_type;
6721 tree tem, one;
6723 /* First, see if we can fold the single bit test into a sign-bit
6724 test. */
6725 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6726 result_type);
6727 if (tem)
6728 return tem;
6730 /* Otherwise we have (A & C) != 0 where C is a single bit,
6731 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6732 Similarly for (A & C) == 0. */
6734 /* If INNER is a right shift of a constant and it plus BITNUM does
6735 not overflow, adjust BITNUM and INNER. */
6736 if (TREE_CODE (inner) == RSHIFT_EXPR
6737 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6738 && bitnum < TYPE_PRECISION (type)
6739 && wi::ltu_p (TREE_OPERAND (inner, 1),
6740 TYPE_PRECISION (type) - bitnum))
6742 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6743 inner = TREE_OPERAND (inner, 0);
6746 /* If we are going to be able to omit the AND below, we must do our
6747 operations as unsigned. If we must use the AND, we have a choice.
6748 Normally unsigned is faster, but for some machines signed is. */
6749 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6750 && !flag_syntax_only) ? 0 : 1;
6752 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6753 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6754 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6755 inner = fold_convert_loc (loc, intermediate_type, inner);
6757 if (bitnum != 0)
6758 inner = build2 (RSHIFT_EXPR, intermediate_type,
6759 inner, size_int (bitnum));
6761 one = build_int_cst (intermediate_type, 1);
6763 if (code == EQ_EXPR)
6764 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6766 /* Put the AND last so it can combine with more things. */
6767 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6769 /* Make sure to return the proper type. */
6770 inner = fold_convert_loc (loc, result_type, inner);
6772 return inner;
6774 return NULL_TREE;
6777 /* Check whether we are allowed to reorder operands arg0 and arg1,
6778 such that the evaluation of arg1 occurs before arg0. */
6780 static bool
6781 reorder_operands_p (const_tree arg0, const_tree arg1)
6783 if (! flag_evaluation_order)
6784 return true;
6785 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6786 return true;
6787 return ! TREE_SIDE_EFFECTS (arg0)
6788 && ! TREE_SIDE_EFFECTS (arg1);
6791 /* Test whether it is preferable two swap two operands, ARG0 and
6792 ARG1, for example because ARG0 is an integer constant and ARG1
6793 isn't. If REORDER is true, only recommend swapping if we can
6794 evaluate the operands in reverse order. */
6796 bool
6797 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6799 if (CONSTANT_CLASS_P (arg1))
6800 return 0;
6801 if (CONSTANT_CLASS_P (arg0))
6802 return 1;
6804 STRIP_NOPS (arg0);
6805 STRIP_NOPS (arg1);
6807 if (TREE_CONSTANT (arg1))
6808 return 0;
6809 if (TREE_CONSTANT (arg0))
6810 return 1;
6812 if (reorder && flag_evaluation_order
6813 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6814 return 0;
6816 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6817 for commutative and comparison operators. Ensuring a canonical
6818 form allows the optimizers to find additional redundancies without
6819 having to explicitly check for both orderings. */
6820 if (TREE_CODE (arg0) == SSA_NAME
6821 && TREE_CODE (arg1) == SSA_NAME
6822 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6823 return 1;
6825 /* Put SSA_NAMEs last. */
6826 if (TREE_CODE (arg1) == SSA_NAME)
6827 return 0;
6828 if (TREE_CODE (arg0) == SSA_NAME)
6829 return 1;
6831 /* Put variables last. */
6832 if (DECL_P (arg1))
6833 return 0;
6834 if (DECL_P (arg0))
6835 return 1;
6837 return 0;
6841 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6842 means A >= Y && A != MAX, but in this case we know that
6843 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6845 static tree
6846 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6848 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6850 if (TREE_CODE (bound) == LT_EXPR)
6851 a = TREE_OPERAND (bound, 0);
6852 else if (TREE_CODE (bound) == GT_EXPR)
6853 a = TREE_OPERAND (bound, 1);
6854 else
6855 return NULL_TREE;
6857 typea = TREE_TYPE (a);
6858 if (!INTEGRAL_TYPE_P (typea)
6859 && !POINTER_TYPE_P (typea))
6860 return NULL_TREE;
6862 if (TREE_CODE (ineq) == LT_EXPR)
6864 a1 = TREE_OPERAND (ineq, 1);
6865 y = TREE_OPERAND (ineq, 0);
6867 else if (TREE_CODE (ineq) == GT_EXPR)
6869 a1 = TREE_OPERAND (ineq, 0);
6870 y = TREE_OPERAND (ineq, 1);
6872 else
6873 return NULL_TREE;
6875 if (TREE_TYPE (a1) != typea)
6876 return NULL_TREE;
6878 if (POINTER_TYPE_P (typea))
6880 /* Convert the pointer types into integer before taking the difference. */
6881 tree ta = fold_convert_loc (loc, ssizetype, a);
6882 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6883 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6885 else
6886 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6888 if (!diff || !integer_onep (diff))
6889 return NULL_TREE;
6891 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6894 /* Fold a sum or difference of at least one multiplication.
6895 Returns the folded tree or NULL if no simplification could be made. */
6897 static tree
6898 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6899 tree arg0, tree arg1)
6901 tree arg00, arg01, arg10, arg11;
6902 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6904 /* (A * C) +- (B * C) -> (A+-B) * C.
6905 (A * C) +- A -> A * (C+-1).
6906 We are most concerned about the case where C is a constant,
6907 but other combinations show up during loop reduction. Since
6908 it is not difficult, try all four possibilities. */
6910 if (TREE_CODE (arg0) == MULT_EXPR)
6912 arg00 = TREE_OPERAND (arg0, 0);
6913 arg01 = TREE_OPERAND (arg0, 1);
6915 else if (TREE_CODE (arg0) == INTEGER_CST)
6917 arg00 = build_one_cst (type);
6918 arg01 = arg0;
6920 else
6922 /* We cannot generate constant 1 for fract. */
6923 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6924 return NULL_TREE;
6925 arg00 = arg0;
6926 arg01 = build_one_cst (type);
6928 if (TREE_CODE (arg1) == MULT_EXPR)
6930 arg10 = TREE_OPERAND (arg1, 0);
6931 arg11 = TREE_OPERAND (arg1, 1);
6933 else if (TREE_CODE (arg1) == INTEGER_CST)
6935 arg10 = build_one_cst (type);
6936 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6937 the purpose of this canonicalization. */
6938 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6939 && negate_expr_p (arg1)
6940 && code == PLUS_EXPR)
6942 arg11 = negate_expr (arg1);
6943 code = MINUS_EXPR;
6945 else
6946 arg11 = arg1;
6948 else
6950 /* We cannot generate constant 1 for fract. */
6951 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6952 return NULL_TREE;
6953 arg10 = arg1;
6954 arg11 = build_one_cst (type);
6956 same = NULL_TREE;
6958 if (operand_equal_p (arg01, arg11, 0))
6959 same = arg01, alt0 = arg00, alt1 = arg10;
6960 else if (operand_equal_p (arg00, arg10, 0))
6961 same = arg00, alt0 = arg01, alt1 = arg11;
6962 else if (operand_equal_p (arg00, arg11, 0))
6963 same = arg00, alt0 = arg01, alt1 = arg10;
6964 else if (operand_equal_p (arg01, arg10, 0))
6965 same = arg01, alt0 = arg00, alt1 = arg11;
6967 /* No identical multiplicands; see if we can find a common
6968 power-of-two factor in non-power-of-two multiplies. This
6969 can help in multi-dimensional array access. */
6970 else if (tree_fits_shwi_p (arg01)
6971 && tree_fits_shwi_p (arg11))
6973 HOST_WIDE_INT int01, int11, tmp;
6974 bool swap = false;
6975 tree maybe_same;
6976 int01 = tree_to_shwi (arg01);
6977 int11 = tree_to_shwi (arg11);
6979 /* Move min of absolute values to int11. */
6980 if (absu_hwi (int01) < absu_hwi (int11))
6982 tmp = int01, int01 = int11, int11 = tmp;
6983 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6984 maybe_same = arg01;
6985 swap = true;
6987 else
6988 maybe_same = arg11;
6990 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6991 /* The remainder should not be a constant, otherwise we
6992 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6993 increased the number of multiplications necessary. */
6994 && TREE_CODE (arg10) != INTEGER_CST)
6996 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6997 build_int_cst (TREE_TYPE (arg00),
6998 int01 / int11));
6999 alt1 = arg10;
7000 same = maybe_same;
7001 if (swap)
7002 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7006 if (same)
7007 return fold_build2_loc (loc, MULT_EXPR, type,
7008 fold_build2_loc (loc, code, type,
7009 fold_convert_loc (loc, type, alt0),
7010 fold_convert_loc (loc, type, alt1)),
7011 fold_convert_loc (loc, type, same));
7013 return NULL_TREE;
7016 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7017 specified by EXPR into the buffer PTR of length LEN bytes.
7018 Return the number of bytes placed in the buffer, or zero
7019 upon failure. */
7021 static int
7022 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7024 tree type = TREE_TYPE (expr);
7025 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7026 int byte, offset, word, words;
7027 unsigned char value;
7029 if ((off == -1 && total_bytes > len)
7030 || off >= total_bytes)
7031 return 0;
7032 if (off == -1)
7033 off = 0;
7034 words = total_bytes / UNITS_PER_WORD;
7036 for (byte = 0; byte < total_bytes; byte++)
7038 int bitpos = byte * BITS_PER_UNIT;
7039 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7040 number of bytes. */
7041 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7043 if (total_bytes > UNITS_PER_WORD)
7045 word = byte / UNITS_PER_WORD;
7046 if (WORDS_BIG_ENDIAN)
7047 word = (words - 1) - word;
7048 offset = word * UNITS_PER_WORD;
7049 if (BYTES_BIG_ENDIAN)
7050 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7051 else
7052 offset += byte % UNITS_PER_WORD;
7054 else
7055 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7056 if (offset >= off
7057 && offset - off < len)
7058 ptr[offset - off] = value;
7060 return MIN (len, total_bytes - off);
7064 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7065 specified by EXPR into the buffer PTR of length LEN bytes.
7066 Return the number of bytes placed in the buffer, or zero
7067 upon failure. */
7069 static int
7070 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7072 tree type = TREE_TYPE (expr);
7073 machine_mode mode = TYPE_MODE (type);
7074 int total_bytes = GET_MODE_SIZE (mode);
7075 FIXED_VALUE_TYPE value;
7076 tree i_value, i_type;
7078 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7079 return 0;
7081 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7083 if (NULL_TREE == i_type
7084 || TYPE_PRECISION (i_type) != total_bytes)
7085 return 0;
7087 value = TREE_FIXED_CST (expr);
7088 i_value = double_int_to_tree (i_type, value.data);
7090 return native_encode_int (i_value, ptr, len, off);
7094 /* Subroutine of native_encode_expr. Encode the REAL_CST
7095 specified by EXPR into the buffer PTR of length LEN bytes.
7096 Return the number of bytes placed in the buffer, or zero
7097 upon failure. */
7099 static int
7100 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7102 tree type = TREE_TYPE (expr);
7103 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7104 int byte, offset, word, words, bitpos;
7105 unsigned char value;
7107 /* There are always 32 bits in each long, no matter the size of
7108 the hosts long. We handle floating point representations with
7109 up to 192 bits. */
7110 long tmp[6];
7112 if ((off == -1 && total_bytes > len)
7113 || off >= total_bytes)
7114 return 0;
7115 if (off == -1)
7116 off = 0;
7117 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7119 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7121 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7122 bitpos += BITS_PER_UNIT)
7124 byte = (bitpos / BITS_PER_UNIT) & 3;
7125 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7127 if (UNITS_PER_WORD < 4)
7129 word = byte / UNITS_PER_WORD;
7130 if (WORDS_BIG_ENDIAN)
7131 word = (words - 1) - word;
7132 offset = word * UNITS_PER_WORD;
7133 if (BYTES_BIG_ENDIAN)
7134 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7135 else
7136 offset += byte % UNITS_PER_WORD;
7138 else
7139 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7140 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7141 if (offset >= off
7142 && offset - off < len)
7143 ptr[offset - off] = value;
7145 return MIN (len, total_bytes - off);
7148 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7151 upon failure. */
7153 static int
7154 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7156 int rsize, isize;
7157 tree part;
7159 part = TREE_REALPART (expr);
7160 rsize = native_encode_expr (part, ptr, len, off);
7161 if (off == -1
7162 && rsize == 0)
7163 return 0;
7164 part = TREE_IMAGPART (expr);
7165 if (off != -1)
7166 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7167 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7168 if (off == -1
7169 && isize != rsize)
7170 return 0;
7171 return rsize + isize;
7175 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7178 upon failure. */
7180 static int
7181 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7183 unsigned i, count;
7184 int size, offset;
7185 tree itype, elem;
7187 offset = 0;
7188 count = VECTOR_CST_NELTS (expr);
7189 itype = TREE_TYPE (TREE_TYPE (expr));
7190 size = GET_MODE_SIZE (TYPE_MODE (itype));
7191 for (i = 0; i < count; i++)
7193 if (off >= size)
7195 off -= size;
7196 continue;
7198 elem = VECTOR_CST_ELT (expr, i);
7199 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7200 if ((off == -1 && res != size)
7201 || res == 0)
7202 return 0;
7203 offset += res;
7204 if (offset >= len)
7205 return offset;
7206 if (off != -1)
7207 off = 0;
7209 return offset;
7213 /* Subroutine of native_encode_expr. Encode the STRING_CST
7214 specified by EXPR into the buffer PTR of length LEN bytes.
7215 Return the number of bytes placed in the buffer, or zero
7216 upon failure. */
7218 static int
7219 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7221 tree type = TREE_TYPE (expr);
7222 HOST_WIDE_INT total_bytes;
7224 if (TREE_CODE (type) != ARRAY_TYPE
7225 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7226 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7227 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7228 return 0;
7229 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7230 if ((off == -1 && total_bytes > len)
7231 || off >= total_bytes)
7232 return 0;
7233 if (off == -1)
7234 off = 0;
7235 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7237 int written = 0;
7238 if (off < TREE_STRING_LENGTH (expr))
7240 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7241 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7243 memset (ptr + written, 0,
7244 MIN (total_bytes - written, len - written));
7246 else
7247 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7248 return MIN (total_bytes - off, len);
7252 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7253 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7254 buffer PTR of length LEN bytes. If OFF is not -1 then start
7255 the encoding at byte offset OFF and encode at most LEN bytes.
7256 Return the number of bytes placed in the buffer, or zero upon failure. */
7259 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7261 /* We don't support starting at negative offset and -1 is special. */
7262 if (off < -1)
7263 return 0;
7265 switch (TREE_CODE (expr))
7267 case INTEGER_CST:
7268 return native_encode_int (expr, ptr, len, off);
7270 case REAL_CST:
7271 return native_encode_real (expr, ptr, len, off);
7273 case FIXED_CST:
7274 return native_encode_fixed (expr, ptr, len, off);
7276 case COMPLEX_CST:
7277 return native_encode_complex (expr, ptr, len, off);
7279 case VECTOR_CST:
7280 return native_encode_vector (expr, ptr, len, off);
7282 case STRING_CST:
7283 return native_encode_string (expr, ptr, len, off);
7285 default:
7286 return 0;
7291 /* Subroutine of native_interpret_expr. Interpret the contents of
7292 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7293 If the buffer cannot be interpreted, return NULL_TREE. */
7295 static tree
7296 native_interpret_int (tree type, const unsigned char *ptr, int len)
7298 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7300 if (total_bytes > len
7301 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7302 return NULL_TREE;
7304 wide_int result = wi::from_buffer (ptr, total_bytes);
7306 return wide_int_to_tree (type, result);
7310 /* Subroutine of native_interpret_expr. Interpret the contents of
7311 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7312 If the buffer cannot be interpreted, return NULL_TREE. */
7314 static tree
7315 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7317 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7318 double_int result;
7319 FIXED_VALUE_TYPE fixed_value;
7321 if (total_bytes > len
7322 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7323 return NULL_TREE;
7325 result = double_int::from_buffer (ptr, total_bytes);
7326 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7328 return build_fixed (type, fixed_value);
7332 /* Subroutine of native_interpret_expr. Interpret the contents of
7333 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7334 If the buffer cannot be interpreted, return NULL_TREE. */
7336 static tree
7337 native_interpret_real (tree type, const unsigned char *ptr, int len)
7339 machine_mode mode = TYPE_MODE (type);
7340 int total_bytes = GET_MODE_SIZE (mode);
7341 unsigned char value;
7342 /* There are always 32 bits in each long, no matter the size of
7343 the hosts long. We handle floating point representations with
7344 up to 192 bits. */
7345 REAL_VALUE_TYPE r;
7346 long tmp[6];
7348 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7349 if (total_bytes > len || total_bytes > 24)
7350 return NULL_TREE;
7351 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7353 memset (tmp, 0, sizeof (tmp));
7354 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7355 bitpos += BITS_PER_UNIT)
7357 /* Both OFFSET and BYTE index within a long;
7358 bitpos indexes the whole float. */
7359 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7360 if (UNITS_PER_WORD < 4)
7362 int word = byte / UNITS_PER_WORD;
7363 if (WORDS_BIG_ENDIAN)
7364 word = (words - 1) - word;
7365 offset = word * UNITS_PER_WORD;
7366 if (BYTES_BIG_ENDIAN)
7367 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7368 else
7369 offset += byte % UNITS_PER_WORD;
7371 else
7373 offset = byte;
7374 if (BYTES_BIG_ENDIAN)
7376 /* Reverse bytes within each long, or within the entire float
7377 if it's smaller than a long (for HFmode). */
7378 offset = MIN (3, total_bytes - 1) - offset;
7379 gcc_assert (offset >= 0);
7382 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7384 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7387 real_from_target (&r, tmp, mode);
7388 return build_real (type, r);
7392 /* Subroutine of native_interpret_expr. Interpret the contents of
7393 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7394 If the buffer cannot be interpreted, return NULL_TREE. */
7396 static tree
7397 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7399 tree etype, rpart, ipart;
7400 int size;
7402 etype = TREE_TYPE (type);
7403 size = GET_MODE_SIZE (TYPE_MODE (etype));
7404 if (size * 2 > len)
7405 return NULL_TREE;
7406 rpart = native_interpret_expr (etype, ptr, size);
7407 if (!rpart)
7408 return NULL_TREE;
7409 ipart = native_interpret_expr (etype, ptr+size, size);
7410 if (!ipart)
7411 return NULL_TREE;
7412 return build_complex (type, rpart, ipart);
7416 /* Subroutine of native_interpret_expr. Interpret the contents of
7417 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7418 If the buffer cannot be interpreted, return NULL_TREE. */
7420 static tree
7421 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7423 tree etype, elem;
7424 int i, size, count;
7425 tree *elements;
7427 etype = TREE_TYPE (type);
7428 size = GET_MODE_SIZE (TYPE_MODE (etype));
7429 count = TYPE_VECTOR_SUBPARTS (type);
7430 if (size * count > len)
7431 return NULL_TREE;
7433 elements = XALLOCAVEC (tree, count);
7434 for (i = count - 1; i >= 0; i--)
7436 elem = native_interpret_expr (etype, ptr+(i*size), size);
7437 if (!elem)
7438 return NULL_TREE;
7439 elements[i] = elem;
7441 return build_vector (type, elements);
7445 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7446 the buffer PTR of length LEN as a constant of type TYPE. For
7447 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7448 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7449 return NULL_TREE. */
7451 tree
7452 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7454 switch (TREE_CODE (type))
7456 case INTEGER_TYPE:
7457 case ENUMERAL_TYPE:
7458 case BOOLEAN_TYPE:
7459 case POINTER_TYPE:
7460 case REFERENCE_TYPE:
7461 return native_interpret_int (type, ptr, len);
7463 case REAL_TYPE:
7464 return native_interpret_real (type, ptr, len);
7466 case FIXED_POINT_TYPE:
7467 return native_interpret_fixed (type, ptr, len);
7469 case COMPLEX_TYPE:
7470 return native_interpret_complex (type, ptr, len);
7472 case VECTOR_TYPE:
7473 return native_interpret_vector (type, ptr, len);
7475 default:
7476 return NULL_TREE;
7480 /* Returns true if we can interpret the contents of a native encoding
7481 as TYPE. */
7483 static bool
7484 can_native_interpret_type_p (tree type)
7486 switch (TREE_CODE (type))
7488 case INTEGER_TYPE:
7489 case ENUMERAL_TYPE:
7490 case BOOLEAN_TYPE:
7491 case POINTER_TYPE:
7492 case REFERENCE_TYPE:
7493 case FIXED_POINT_TYPE:
7494 case REAL_TYPE:
7495 case COMPLEX_TYPE:
7496 case VECTOR_TYPE:
7497 return true;
7498 default:
7499 return false;
7503 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7504 TYPE at compile-time. If we're unable to perform the conversion
7505 return NULL_TREE. */
7507 static tree
7508 fold_view_convert_expr (tree type, tree expr)
7510 /* We support up to 512-bit values (for V8DFmode). */
7511 unsigned char buffer[64];
7512 int len;
7514 /* Check that the host and target are sane. */
7515 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7516 return NULL_TREE;
7518 len = native_encode_expr (expr, buffer, sizeof (buffer));
7519 if (len == 0)
7520 return NULL_TREE;
7522 return native_interpret_expr (type, buffer, len);
7525 /* Build an expression for the address of T. Folds away INDIRECT_REF
7526 to avoid confusing the gimplify process. */
7528 tree
7529 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7531 /* The size of the object is not relevant when talking about its address. */
7532 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7533 t = TREE_OPERAND (t, 0);
7535 if (TREE_CODE (t) == INDIRECT_REF)
7537 t = TREE_OPERAND (t, 0);
7539 if (TREE_TYPE (t) != ptrtype)
7540 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7542 else if (TREE_CODE (t) == MEM_REF
7543 && integer_zerop (TREE_OPERAND (t, 1)))
7544 return TREE_OPERAND (t, 0);
7545 else if (TREE_CODE (t) == MEM_REF
7546 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7547 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7548 TREE_OPERAND (t, 0),
7549 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7550 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7552 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7554 if (TREE_TYPE (t) != ptrtype)
7555 t = fold_convert_loc (loc, ptrtype, t);
7557 else
7558 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7560 return t;
7563 /* Build an expression for the address of T. */
7565 tree
7566 build_fold_addr_expr_loc (location_t loc, tree t)
7568 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7570 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7573 /* Fold a unary expression of code CODE and type TYPE with operand
7574 OP0. Return the folded expression if folding is successful.
7575 Otherwise, return NULL_TREE. */
7577 tree
7578 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7580 tree tem;
7581 tree arg0;
7582 enum tree_code_class kind = TREE_CODE_CLASS (code);
7584 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7585 && TREE_CODE_LENGTH (code) == 1);
7587 arg0 = op0;
7588 if (arg0)
7590 if (CONVERT_EXPR_CODE_P (code)
7591 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7593 /* Don't use STRIP_NOPS, because signedness of argument type
7594 matters. */
7595 STRIP_SIGN_NOPS (arg0);
7597 else
7599 /* Strip any conversions that don't change the mode. This
7600 is safe for every expression, except for a comparison
7601 expression because its signedness is derived from its
7602 operands.
7604 Note that this is done as an internal manipulation within
7605 the constant folder, in order to find the simplest
7606 representation of the arguments so that their form can be
7607 studied. In any cases, the appropriate type conversions
7608 should be put back in the tree that will get out of the
7609 constant folder. */
7610 STRIP_NOPS (arg0);
7613 if (CONSTANT_CLASS_P (arg0))
7615 tree tem = const_unop (code, type, arg0);
7616 if (tem)
7618 if (TREE_TYPE (tem) != type)
7619 tem = fold_convert_loc (loc, type, tem);
7620 return tem;
7625 tem = generic_simplify (loc, code, type, op0);
7626 if (tem)
7627 return tem;
7629 if (TREE_CODE_CLASS (code) == tcc_unary)
7631 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7632 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7633 fold_build1_loc (loc, code, type,
7634 fold_convert_loc (loc, TREE_TYPE (op0),
7635 TREE_OPERAND (arg0, 1))));
7636 else if (TREE_CODE (arg0) == COND_EXPR)
7638 tree arg01 = TREE_OPERAND (arg0, 1);
7639 tree arg02 = TREE_OPERAND (arg0, 2);
7640 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7641 arg01 = fold_build1_loc (loc, code, type,
7642 fold_convert_loc (loc,
7643 TREE_TYPE (op0), arg01));
7644 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7645 arg02 = fold_build1_loc (loc, code, type,
7646 fold_convert_loc (loc,
7647 TREE_TYPE (op0), arg02));
7648 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7649 arg01, arg02);
7651 /* If this was a conversion, and all we did was to move into
7652 inside the COND_EXPR, bring it back out. But leave it if
7653 it is a conversion from integer to integer and the
7654 result precision is no wider than a word since such a
7655 conversion is cheap and may be optimized away by combine,
7656 while it couldn't if it were outside the COND_EXPR. Then return
7657 so we don't get into an infinite recursion loop taking the
7658 conversion out and then back in. */
7660 if ((CONVERT_EXPR_CODE_P (code)
7661 || code == NON_LVALUE_EXPR)
7662 && TREE_CODE (tem) == COND_EXPR
7663 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7664 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7665 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7666 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7667 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7668 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7669 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7670 && (INTEGRAL_TYPE_P
7671 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7672 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7673 || flag_syntax_only))
7674 tem = build1_loc (loc, code, type,
7675 build3 (COND_EXPR,
7676 TREE_TYPE (TREE_OPERAND
7677 (TREE_OPERAND (tem, 1), 0)),
7678 TREE_OPERAND (tem, 0),
7679 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7680 TREE_OPERAND (TREE_OPERAND (tem, 2),
7681 0)));
7682 return tem;
7686 switch (code)
7688 case NON_LVALUE_EXPR:
7689 if (!maybe_lvalue_p (op0))
7690 return fold_convert_loc (loc, type, op0);
7691 return NULL_TREE;
7693 CASE_CONVERT:
7694 case FLOAT_EXPR:
7695 case FIX_TRUNC_EXPR:
7696 if (COMPARISON_CLASS_P (op0))
7698 /* If we have (type) (a CMP b) and type is an integral type, return
7699 new expression involving the new type. Canonicalize
7700 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7701 non-integral type.
7702 Do not fold the result as that would not simplify further, also
7703 folding again results in recursions. */
7704 if (TREE_CODE (type) == BOOLEAN_TYPE)
7705 return build2_loc (loc, TREE_CODE (op0), type,
7706 TREE_OPERAND (op0, 0),
7707 TREE_OPERAND (op0, 1));
7708 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7709 && TREE_CODE (type) != VECTOR_TYPE)
7710 return build3_loc (loc, COND_EXPR, type, op0,
7711 constant_boolean_node (true, type),
7712 constant_boolean_node (false, type));
7715 /* Handle (T *)&A.B.C for A being of type T and B and C
7716 living at offset zero. This occurs frequently in
7717 C++ upcasting and then accessing the base. */
7718 if (TREE_CODE (op0) == ADDR_EXPR
7719 && POINTER_TYPE_P (type)
7720 && handled_component_p (TREE_OPERAND (op0, 0)))
7722 HOST_WIDE_INT bitsize, bitpos;
7723 tree offset;
7724 machine_mode mode;
7725 int unsignedp, reversep, volatilep;
7726 tree base
7727 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7728 &offset, &mode, &unsignedp, &reversep,
7729 &volatilep, false);
7730 /* If the reference was to a (constant) zero offset, we can use
7731 the address of the base if it has the same base type
7732 as the result type and the pointer type is unqualified. */
7733 if (! offset && bitpos == 0
7734 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7735 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7736 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7737 return fold_convert_loc (loc, type,
7738 build_fold_addr_expr_loc (loc, base));
7741 if (TREE_CODE (op0) == MODIFY_EXPR
7742 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7743 /* Detect assigning a bitfield. */
7744 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7745 && DECL_BIT_FIELD
7746 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7748 /* Don't leave an assignment inside a conversion
7749 unless assigning a bitfield. */
7750 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7751 /* First do the assignment, then return converted constant. */
7752 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7753 TREE_NO_WARNING (tem) = 1;
7754 TREE_USED (tem) = 1;
7755 return tem;
7758 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7759 constants (if x has signed type, the sign bit cannot be set
7760 in c). This folds extension into the BIT_AND_EXPR.
7761 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7762 very likely don't have maximal range for their precision and this
7763 transformation effectively doesn't preserve non-maximal ranges. */
7764 if (TREE_CODE (type) == INTEGER_TYPE
7765 && TREE_CODE (op0) == BIT_AND_EXPR
7766 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7768 tree and_expr = op0;
7769 tree and0 = TREE_OPERAND (and_expr, 0);
7770 tree and1 = TREE_OPERAND (and_expr, 1);
7771 int change = 0;
7773 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7774 || (TYPE_PRECISION (type)
7775 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7776 change = 1;
7777 else if (TYPE_PRECISION (TREE_TYPE (and1))
7778 <= HOST_BITS_PER_WIDE_INT
7779 && tree_fits_uhwi_p (and1))
7781 unsigned HOST_WIDE_INT cst;
7783 cst = tree_to_uhwi (and1);
7784 cst &= HOST_WIDE_INT_M1U
7785 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7786 change = (cst == 0);
7787 if (change
7788 && !flag_syntax_only
7789 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7790 == ZERO_EXTEND))
7792 tree uns = unsigned_type_for (TREE_TYPE (and0));
7793 and0 = fold_convert_loc (loc, uns, and0);
7794 and1 = fold_convert_loc (loc, uns, and1);
7797 if (change)
7799 tem = force_fit_type (type, wi::to_widest (and1), 0,
7800 TREE_OVERFLOW (and1));
7801 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7802 fold_convert_loc (loc, type, and0), tem);
7806 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7807 cast (T1)X will fold away. We assume that this happens when X itself
7808 is a cast. */
7809 if (POINTER_TYPE_P (type)
7810 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7811 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7813 tree arg00 = TREE_OPERAND (arg0, 0);
7814 tree arg01 = TREE_OPERAND (arg0, 1);
7816 return fold_build_pointer_plus_loc
7817 (loc, fold_convert_loc (loc, type, arg00), arg01);
7820 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7821 of the same precision, and X is an integer type not narrower than
7822 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7823 if (INTEGRAL_TYPE_P (type)
7824 && TREE_CODE (op0) == BIT_NOT_EXPR
7825 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7826 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7827 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7829 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7830 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7831 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7832 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7833 fold_convert_loc (loc, type, tem));
7836 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7837 type of X and Y (integer types only). */
7838 if (INTEGRAL_TYPE_P (type)
7839 && TREE_CODE (op0) == MULT_EXPR
7840 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7841 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7843 /* Be careful not to introduce new overflows. */
7844 tree mult_type;
7845 if (TYPE_OVERFLOW_WRAPS (type))
7846 mult_type = type;
7847 else
7848 mult_type = unsigned_type_for (type);
7850 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7852 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7853 fold_convert_loc (loc, mult_type,
7854 TREE_OPERAND (op0, 0)),
7855 fold_convert_loc (loc, mult_type,
7856 TREE_OPERAND (op0, 1)));
7857 return fold_convert_loc (loc, type, tem);
7861 return NULL_TREE;
7863 case VIEW_CONVERT_EXPR:
7864 if (TREE_CODE (op0) == MEM_REF)
7866 tem = fold_build2_loc (loc, MEM_REF, type,
7867 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7868 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7869 return tem;
7872 return NULL_TREE;
7874 case NEGATE_EXPR:
7875 tem = fold_negate_expr (loc, arg0);
7876 if (tem)
7877 return fold_convert_loc (loc, type, tem);
7878 return NULL_TREE;
7880 case ABS_EXPR:
7881 /* Convert fabs((double)float) into (double)fabsf(float). */
7882 if (TREE_CODE (arg0) == NOP_EXPR
7883 && TREE_CODE (type) == REAL_TYPE)
7885 tree targ0 = strip_float_extensions (arg0);
7886 if (targ0 != arg0)
7887 return fold_convert_loc (loc, type,
7888 fold_build1_loc (loc, ABS_EXPR,
7889 TREE_TYPE (targ0),
7890 targ0));
7892 return NULL_TREE;
7894 case BIT_NOT_EXPR:
7895 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7896 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7897 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7898 fold_convert_loc (loc, type,
7899 TREE_OPERAND (arg0, 0)))))
7900 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7901 fold_convert_loc (loc, type,
7902 TREE_OPERAND (arg0, 1)));
7903 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7904 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7905 fold_convert_loc (loc, type,
7906 TREE_OPERAND (arg0, 1)))))
7907 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7908 fold_convert_loc (loc, type,
7909 TREE_OPERAND (arg0, 0)), tem);
7911 return NULL_TREE;
7913 case TRUTH_NOT_EXPR:
7914 /* Note that the operand of this must be an int
7915 and its values must be 0 or 1.
7916 ("true" is a fixed value perhaps depending on the language,
7917 but we don't handle values other than 1 correctly yet.) */
7918 tem = fold_truth_not_expr (loc, arg0);
7919 if (!tem)
7920 return NULL_TREE;
7921 return fold_convert_loc (loc, type, tem);
7923 case INDIRECT_REF:
7924 /* Fold *&X to X if X is an lvalue. */
7925 if (TREE_CODE (op0) == ADDR_EXPR)
7927 tree op00 = TREE_OPERAND (op0, 0);
7928 if ((TREE_CODE (op00) == VAR_DECL
7929 || TREE_CODE (op00) == PARM_DECL
7930 || TREE_CODE (op00) == RESULT_DECL)
7931 && !TREE_READONLY (op00))
7932 return op00;
7934 return NULL_TREE;
7936 default:
7937 return NULL_TREE;
7938 } /* switch (code) */
7942 /* If the operation was a conversion do _not_ mark a resulting constant
7943 with TREE_OVERFLOW if the original constant was not. These conversions
7944 have implementation defined behavior and retaining the TREE_OVERFLOW
7945 flag here would confuse later passes such as VRP. */
7946 tree
7947 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7948 tree type, tree op0)
7950 tree res = fold_unary_loc (loc, code, type, op0);
7951 if (res
7952 && TREE_CODE (res) == INTEGER_CST
7953 && TREE_CODE (op0) == INTEGER_CST
7954 && CONVERT_EXPR_CODE_P (code))
7955 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7957 return res;
7960 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7961 operands OP0 and OP1. LOC is the location of the resulting expression.
7962 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7963 Return the folded expression if folding is successful. Otherwise,
7964 return NULL_TREE. */
7965 static tree
7966 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7967 tree arg0, tree arg1, tree op0, tree op1)
7969 tree tem;
7971 /* We only do these simplifications if we are optimizing. */
7972 if (!optimize)
7973 return NULL_TREE;
7975 /* Check for things like (A || B) && (A || C). We can convert this
7976 to A || (B && C). Note that either operator can be any of the four
7977 truth and/or operations and the transformation will still be
7978 valid. Also note that we only care about order for the
7979 ANDIF and ORIF operators. If B contains side effects, this
7980 might change the truth-value of A. */
7981 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7982 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7983 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7984 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7985 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7986 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7988 tree a00 = TREE_OPERAND (arg0, 0);
7989 tree a01 = TREE_OPERAND (arg0, 1);
7990 tree a10 = TREE_OPERAND (arg1, 0);
7991 tree a11 = TREE_OPERAND (arg1, 1);
7992 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7993 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7994 && (code == TRUTH_AND_EXPR
7995 || code == TRUTH_OR_EXPR));
7997 if (operand_equal_p (a00, a10, 0))
7998 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7999 fold_build2_loc (loc, code, type, a01, a11));
8000 else if (commutative && operand_equal_p (a00, a11, 0))
8001 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8002 fold_build2_loc (loc, code, type, a01, a10));
8003 else if (commutative && operand_equal_p (a01, a10, 0))
8004 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8005 fold_build2_loc (loc, code, type, a00, a11));
8007 /* This case if tricky because we must either have commutative
8008 operators or else A10 must not have side-effects. */
8010 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8011 && operand_equal_p (a01, a11, 0))
8012 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8013 fold_build2_loc (loc, code, type, a00, a10),
8014 a01);
8017 /* See if we can build a range comparison. */
8018 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8019 return tem;
8021 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8022 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8024 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8025 if (tem)
8026 return fold_build2_loc (loc, code, type, tem, arg1);
8029 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8030 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8032 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8033 if (tem)
8034 return fold_build2_loc (loc, code, type, arg0, tem);
8037 /* Check for the possibility of merging component references. If our
8038 lhs is another similar operation, try to merge its rhs with our
8039 rhs. Then try to merge our lhs and rhs. */
8040 if (TREE_CODE (arg0) == code
8041 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8042 TREE_OPERAND (arg0, 1), arg1)))
8043 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8045 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8046 return tem;
8048 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8049 && (code == TRUTH_AND_EXPR
8050 || code == TRUTH_ANDIF_EXPR
8051 || code == TRUTH_OR_EXPR
8052 || code == TRUTH_ORIF_EXPR))
8054 enum tree_code ncode, icode;
8056 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8057 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8058 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8060 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8061 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8062 We don't want to pack more than two leafs to a non-IF AND/OR
8063 expression.
8064 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8065 equal to IF-CODE, then we don't want to add right-hand operand.
8066 If the inner right-hand side of left-hand operand has
8067 side-effects, or isn't simple, then we can't add to it,
8068 as otherwise we might destroy if-sequence. */
8069 if (TREE_CODE (arg0) == icode
8070 && simple_operand_p_2 (arg1)
8071 /* Needed for sequence points to handle trappings, and
8072 side-effects. */
8073 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8075 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8076 arg1);
8077 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8078 tem);
8080 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8081 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8082 else if (TREE_CODE (arg1) == icode
8083 && simple_operand_p_2 (arg0)
8084 /* Needed for sequence points to handle trappings, and
8085 side-effects. */
8086 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8088 tem = fold_build2_loc (loc, ncode, type,
8089 arg0, TREE_OPERAND (arg1, 0));
8090 return fold_build2_loc (loc, icode, type, tem,
8091 TREE_OPERAND (arg1, 1));
8093 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8094 into (A OR B).
8095 For sequence point consistancy, we need to check for trapping,
8096 and side-effects. */
8097 else if (code == icode && simple_operand_p_2 (arg0)
8098 && simple_operand_p_2 (arg1))
8099 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8102 return NULL_TREE;
8105 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8106 by changing CODE to reduce the magnitude of constants involved in
8107 ARG0 of the comparison.
8108 Returns a canonicalized comparison tree if a simplification was
8109 possible, otherwise returns NULL_TREE.
8110 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8111 valid if signed overflow is undefined. */
8113 static tree
8114 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8115 tree arg0, tree arg1,
8116 bool *strict_overflow_p)
8118 enum tree_code code0 = TREE_CODE (arg0);
8119 tree t, cst0 = NULL_TREE;
8120 int sgn0;
8122 /* Match A +- CST code arg1. We can change this only if overflow
8123 is undefined. */
8124 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8125 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8126 /* In principle pointers also have undefined overflow behavior,
8127 but that causes problems elsewhere. */
8128 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8129 && (code0 == MINUS_EXPR
8130 || code0 == PLUS_EXPR)
8131 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8132 return NULL_TREE;
8134 /* Identify the constant in arg0 and its sign. */
8135 cst0 = TREE_OPERAND (arg0, 1);
8136 sgn0 = tree_int_cst_sgn (cst0);
8138 /* Overflowed constants and zero will cause problems. */
8139 if (integer_zerop (cst0)
8140 || TREE_OVERFLOW (cst0))
8141 return NULL_TREE;
8143 /* See if we can reduce the magnitude of the constant in
8144 arg0 by changing the comparison code. */
8145 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8146 if (code == LT_EXPR
8147 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8148 code = LE_EXPR;
8149 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8150 else if (code == GT_EXPR
8151 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8152 code = GE_EXPR;
8153 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8154 else if (code == LE_EXPR
8155 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8156 code = LT_EXPR;
8157 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8158 else if (code == GE_EXPR
8159 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8160 code = GT_EXPR;
8161 else
8162 return NULL_TREE;
8163 *strict_overflow_p = true;
8165 /* Now build the constant reduced in magnitude. But not if that
8166 would produce one outside of its types range. */
8167 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8168 && ((sgn0 == 1
8169 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8170 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8171 || (sgn0 == -1
8172 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8173 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8174 return NULL_TREE;
8176 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8177 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8178 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8179 t = fold_convert (TREE_TYPE (arg1), t);
8181 return fold_build2_loc (loc, code, type, t, arg1);
8184 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8185 overflow further. Try to decrease the magnitude of constants involved
8186 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8187 and put sole constants at the second argument position.
8188 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8190 static tree
8191 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8192 tree arg0, tree arg1)
8194 tree t;
8195 bool strict_overflow_p;
8196 const char * const warnmsg = G_("assuming signed overflow does not occur "
8197 "when reducing constant in comparison");
8199 /* Try canonicalization by simplifying arg0. */
8200 strict_overflow_p = false;
8201 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8202 &strict_overflow_p);
8203 if (t)
8205 if (strict_overflow_p)
8206 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8207 return t;
8210 /* Try canonicalization by simplifying arg1 using the swapped
8211 comparison. */
8212 code = swap_tree_comparison (code);
8213 strict_overflow_p = false;
8214 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8215 &strict_overflow_p);
8216 if (t && strict_overflow_p)
8217 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8218 return t;
8221 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8222 space. This is used to avoid issuing overflow warnings for
8223 expressions like &p->x which can not wrap. */
8225 static bool
8226 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8228 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8229 return true;
8231 if (bitpos < 0)
8232 return true;
8234 wide_int wi_offset;
8235 int precision = TYPE_PRECISION (TREE_TYPE (base));
8236 if (offset == NULL_TREE)
8237 wi_offset = wi::zero (precision);
8238 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8239 return true;
8240 else
8241 wi_offset = offset;
8243 bool overflow;
8244 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8245 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8246 if (overflow)
8247 return true;
8249 if (!wi::fits_uhwi_p (total))
8250 return true;
8252 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8253 if (size <= 0)
8254 return true;
8256 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8257 array. */
8258 if (TREE_CODE (base) == ADDR_EXPR)
8260 HOST_WIDE_INT base_size;
8262 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8263 if (base_size > 0 && size < base_size)
8264 size = base_size;
8267 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8270 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8271 kind INTEGER_CST. This makes sure to properly sign-extend the
8272 constant. */
8274 static HOST_WIDE_INT
8275 size_low_cst (const_tree t)
8277 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8278 int prec = TYPE_PRECISION (TREE_TYPE (t));
8279 if (prec < HOST_BITS_PER_WIDE_INT)
8280 return sext_hwi (w, prec);
8281 return w;
8284 /* Subroutine of fold_binary. This routine performs all of the
8285 transformations that are common to the equality/inequality
8286 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8287 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8288 fold_binary should call fold_binary. Fold a comparison with
8289 tree code CODE and type TYPE with operands OP0 and OP1. Return
8290 the folded comparison or NULL_TREE. */
8292 static tree
8293 fold_comparison (location_t loc, enum tree_code code, tree type,
8294 tree op0, tree op1)
8296 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8297 tree arg0, arg1, tem;
8299 arg0 = op0;
8300 arg1 = op1;
8302 STRIP_SIGN_NOPS (arg0);
8303 STRIP_SIGN_NOPS (arg1);
8305 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8306 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8307 && (equality_code
8308 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8309 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8311 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8312 && TREE_CODE (arg1) == INTEGER_CST
8313 && !TREE_OVERFLOW (arg1))
8315 const enum tree_code
8316 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8317 tree const1 = TREE_OPERAND (arg0, 1);
8318 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8319 tree variable = TREE_OPERAND (arg0, 0);
8320 tree new_const = int_const_binop (reverse_op, const2, const1);
8322 /* If the constant operation overflowed this can be
8323 simplified as a comparison against INT_MAX/INT_MIN. */
8324 if (TREE_OVERFLOW (new_const)
8325 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8327 int const1_sgn = tree_int_cst_sgn (const1);
8328 enum tree_code code2 = code;
8330 /* Get the sign of the constant on the lhs if the
8331 operation were VARIABLE + CONST1. */
8332 if (TREE_CODE (arg0) == MINUS_EXPR)
8333 const1_sgn = -const1_sgn;
8335 /* The sign of the constant determines if we overflowed
8336 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8337 Canonicalize to the INT_MIN overflow by swapping the comparison
8338 if necessary. */
8339 if (const1_sgn == -1)
8340 code2 = swap_tree_comparison (code);
8342 /* We now can look at the canonicalized case
8343 VARIABLE + 1 CODE2 INT_MIN
8344 and decide on the result. */
8345 switch (code2)
8347 case EQ_EXPR:
8348 case LT_EXPR:
8349 case LE_EXPR:
8350 return
8351 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8353 case NE_EXPR:
8354 case GE_EXPR:
8355 case GT_EXPR:
8356 return
8357 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8359 default:
8360 gcc_unreachable ();
8363 else
8365 if (!equality_code)
8366 fold_overflow_warning ("assuming signed overflow does not occur "
8367 "when changing X +- C1 cmp C2 to "
8368 "X cmp C2 -+ C1",
8369 WARN_STRICT_OVERFLOW_COMPARISON);
8370 return fold_build2_loc (loc, code, type, variable, new_const);
8374 /* For comparisons of pointers we can decompose it to a compile time
8375 comparison of the base objects and the offsets into the object.
8376 This requires at least one operand being an ADDR_EXPR or a
8377 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8378 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8379 && (TREE_CODE (arg0) == ADDR_EXPR
8380 || TREE_CODE (arg1) == ADDR_EXPR
8381 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8382 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8384 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8385 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8386 machine_mode mode;
8387 int volatilep, reversep, unsignedp;
8388 bool indirect_base0 = false, indirect_base1 = false;
8390 /* Get base and offset for the access. Strip ADDR_EXPR for
8391 get_inner_reference, but put it back by stripping INDIRECT_REF
8392 off the base object if possible. indirect_baseN will be true
8393 if baseN is not an address but refers to the object itself. */
8394 base0 = arg0;
8395 if (TREE_CODE (arg0) == ADDR_EXPR)
8397 base0
8398 = get_inner_reference (TREE_OPERAND (arg0, 0),
8399 &bitsize, &bitpos0, &offset0, &mode,
8400 &unsignedp, &reversep, &volatilep, false);
8401 if (TREE_CODE (base0) == INDIRECT_REF)
8402 base0 = TREE_OPERAND (base0, 0);
8403 else
8404 indirect_base0 = true;
8406 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8408 base0 = TREE_OPERAND (arg0, 0);
8409 STRIP_SIGN_NOPS (base0);
8410 if (TREE_CODE (base0) == ADDR_EXPR)
8412 base0 = TREE_OPERAND (base0, 0);
8413 indirect_base0 = true;
8415 offset0 = TREE_OPERAND (arg0, 1);
8416 if (tree_fits_shwi_p (offset0))
8418 HOST_WIDE_INT off = size_low_cst (offset0);
8419 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8420 * BITS_PER_UNIT)
8421 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8423 bitpos0 = off * BITS_PER_UNIT;
8424 offset0 = NULL_TREE;
8429 base1 = arg1;
8430 if (TREE_CODE (arg1) == ADDR_EXPR)
8432 base1
8433 = get_inner_reference (TREE_OPERAND (arg1, 0),
8434 &bitsize, &bitpos1, &offset1, &mode,
8435 &unsignedp, &reversep, &volatilep, false);
8436 if (TREE_CODE (base1) == INDIRECT_REF)
8437 base1 = TREE_OPERAND (base1, 0);
8438 else
8439 indirect_base1 = true;
8441 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8443 base1 = TREE_OPERAND (arg1, 0);
8444 STRIP_SIGN_NOPS (base1);
8445 if (TREE_CODE (base1) == ADDR_EXPR)
8447 base1 = TREE_OPERAND (base1, 0);
8448 indirect_base1 = true;
8450 offset1 = TREE_OPERAND (arg1, 1);
8451 if (tree_fits_shwi_p (offset1))
8453 HOST_WIDE_INT off = size_low_cst (offset1);
8454 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8455 * BITS_PER_UNIT)
8456 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8458 bitpos1 = off * BITS_PER_UNIT;
8459 offset1 = NULL_TREE;
8464 /* If we have equivalent bases we might be able to simplify. */
8465 if (indirect_base0 == indirect_base1
8466 && operand_equal_p (base0, base1,
8467 indirect_base0 ? OEP_ADDRESS_OF : 0))
8469 /* We can fold this expression to a constant if the non-constant
8470 offset parts are equal. */
8471 if ((offset0 == offset1
8472 || (offset0 && offset1
8473 && operand_equal_p (offset0, offset1, 0)))
8474 && (code == EQ_EXPR
8475 || code == NE_EXPR
8476 || (indirect_base0 && DECL_P (base0))
8477 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8480 if (!equality_code
8481 && bitpos0 != bitpos1
8482 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8483 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8484 fold_overflow_warning (("assuming pointer wraparound does not "
8485 "occur when comparing P +- C1 with "
8486 "P +- C2"),
8487 WARN_STRICT_OVERFLOW_CONDITIONAL);
8489 switch (code)
8491 case EQ_EXPR:
8492 return constant_boolean_node (bitpos0 == bitpos1, type);
8493 case NE_EXPR:
8494 return constant_boolean_node (bitpos0 != bitpos1, type);
8495 case LT_EXPR:
8496 return constant_boolean_node (bitpos0 < bitpos1, type);
8497 case LE_EXPR:
8498 return constant_boolean_node (bitpos0 <= bitpos1, type);
8499 case GE_EXPR:
8500 return constant_boolean_node (bitpos0 >= bitpos1, type);
8501 case GT_EXPR:
8502 return constant_boolean_node (bitpos0 > bitpos1, type);
8503 default:;
8506 /* We can simplify the comparison to a comparison of the variable
8507 offset parts if the constant offset parts are equal.
8508 Be careful to use signed sizetype here because otherwise we
8509 mess with array offsets in the wrong way. This is possible
8510 because pointer arithmetic is restricted to retain within an
8511 object and overflow on pointer differences is undefined as of
8512 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8513 else if (bitpos0 == bitpos1
8514 && (equality_code
8515 || (indirect_base0 && DECL_P (base0))
8516 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8518 /* By converting to signed sizetype we cover middle-end pointer
8519 arithmetic which operates on unsigned pointer types of size
8520 type size and ARRAY_REF offsets which are properly sign or
8521 zero extended from their type in case it is narrower than
8522 sizetype. */
8523 if (offset0 == NULL_TREE)
8524 offset0 = build_int_cst (ssizetype, 0);
8525 else
8526 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8527 if (offset1 == NULL_TREE)
8528 offset1 = build_int_cst (ssizetype, 0);
8529 else
8530 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8532 if (!equality_code
8533 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8534 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8535 fold_overflow_warning (("assuming pointer wraparound does not "
8536 "occur when comparing P +- C1 with "
8537 "P +- C2"),
8538 WARN_STRICT_OVERFLOW_COMPARISON);
8540 return fold_build2_loc (loc, code, type, offset0, offset1);
8543 /* For equal offsets we can simplify to a comparison of the
8544 base addresses. */
8545 else if (bitpos0 == bitpos1
8546 && (indirect_base0
8547 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8548 && (indirect_base1
8549 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8550 && ((offset0 == offset1)
8551 || (offset0 && offset1
8552 && operand_equal_p (offset0, offset1, 0))))
8554 if (indirect_base0)
8555 base0 = build_fold_addr_expr_loc (loc, base0);
8556 if (indirect_base1)
8557 base1 = build_fold_addr_expr_loc (loc, base1);
8558 return fold_build2_loc (loc, code, type, base0, base1);
8562 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8563 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8564 the resulting offset is smaller in absolute value than the
8565 original one and has the same sign. */
8566 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8567 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8568 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8569 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8570 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8571 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8572 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8573 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8575 tree const1 = TREE_OPERAND (arg0, 1);
8576 tree const2 = TREE_OPERAND (arg1, 1);
8577 tree variable1 = TREE_OPERAND (arg0, 0);
8578 tree variable2 = TREE_OPERAND (arg1, 0);
8579 tree cst;
8580 const char * const warnmsg = G_("assuming signed overflow does not "
8581 "occur when combining constants around "
8582 "a comparison");
8584 /* Put the constant on the side where it doesn't overflow and is
8585 of lower absolute value and of same sign than before. */
8586 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8587 ? MINUS_EXPR : PLUS_EXPR,
8588 const2, const1);
8589 if (!TREE_OVERFLOW (cst)
8590 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8591 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8593 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8594 return fold_build2_loc (loc, code, type,
8595 variable1,
8596 fold_build2_loc (loc, TREE_CODE (arg1),
8597 TREE_TYPE (arg1),
8598 variable2, cst));
8601 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8602 ? MINUS_EXPR : PLUS_EXPR,
8603 const1, const2);
8604 if (!TREE_OVERFLOW (cst)
8605 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8606 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8608 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8609 return fold_build2_loc (loc, code, type,
8610 fold_build2_loc (loc, TREE_CODE (arg0),
8611 TREE_TYPE (arg0),
8612 variable1, cst),
8613 variable2);
8617 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8618 if (tem)
8619 return tem;
8621 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8622 constant, we can simplify it. */
8623 if (TREE_CODE (arg1) == INTEGER_CST
8624 && (TREE_CODE (arg0) == MIN_EXPR
8625 || TREE_CODE (arg0) == MAX_EXPR)
8626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8628 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8629 if (tem)
8630 return tem;
8633 /* If we are comparing an expression that just has comparisons
8634 of two integer values, arithmetic expressions of those comparisons,
8635 and constants, we can simplify it. There are only three cases
8636 to check: the two values can either be equal, the first can be
8637 greater, or the second can be greater. Fold the expression for
8638 those three values. Since each value must be 0 or 1, we have
8639 eight possibilities, each of which corresponds to the constant 0
8640 or 1 or one of the six possible comparisons.
8642 This handles common cases like (a > b) == 0 but also handles
8643 expressions like ((x > y) - (y > x)) > 0, which supposedly
8644 occur in macroized code. */
8646 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8648 tree cval1 = 0, cval2 = 0;
8649 int save_p = 0;
8651 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8652 /* Don't handle degenerate cases here; they should already
8653 have been handled anyway. */
8654 && cval1 != 0 && cval2 != 0
8655 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8656 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8657 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8658 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8659 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8660 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8661 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8663 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8664 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8666 /* We can't just pass T to eval_subst in case cval1 or cval2
8667 was the same as ARG1. */
8669 tree high_result
8670 = fold_build2_loc (loc, code, type,
8671 eval_subst (loc, arg0, cval1, maxval,
8672 cval2, minval),
8673 arg1);
8674 tree equal_result
8675 = fold_build2_loc (loc, code, type,
8676 eval_subst (loc, arg0, cval1, maxval,
8677 cval2, maxval),
8678 arg1);
8679 tree low_result
8680 = fold_build2_loc (loc, code, type,
8681 eval_subst (loc, arg0, cval1, minval,
8682 cval2, maxval),
8683 arg1);
8685 /* All three of these results should be 0 or 1. Confirm they are.
8686 Then use those values to select the proper code to use. */
8688 if (TREE_CODE (high_result) == INTEGER_CST
8689 && TREE_CODE (equal_result) == INTEGER_CST
8690 && TREE_CODE (low_result) == INTEGER_CST)
8692 /* Make a 3-bit mask with the high-order bit being the
8693 value for `>', the next for '=', and the low for '<'. */
8694 switch ((integer_onep (high_result) * 4)
8695 + (integer_onep (equal_result) * 2)
8696 + integer_onep (low_result))
8698 case 0:
8699 /* Always false. */
8700 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8701 case 1:
8702 code = LT_EXPR;
8703 break;
8704 case 2:
8705 code = EQ_EXPR;
8706 break;
8707 case 3:
8708 code = LE_EXPR;
8709 break;
8710 case 4:
8711 code = GT_EXPR;
8712 break;
8713 case 5:
8714 code = NE_EXPR;
8715 break;
8716 case 6:
8717 code = GE_EXPR;
8718 break;
8719 case 7:
8720 /* Always true. */
8721 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8724 if (save_p)
8726 tem = save_expr (build2 (code, type, cval1, cval2));
8727 SET_EXPR_LOCATION (tem, loc);
8728 return tem;
8730 return fold_build2_loc (loc, code, type, cval1, cval2);
8735 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8736 into a single range test. */
8737 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8738 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8739 && TREE_CODE (arg1) == INTEGER_CST
8740 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8741 && !integer_zerop (TREE_OPERAND (arg0, 1))
8742 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8743 && !TREE_OVERFLOW (arg1))
8745 tem = fold_div_compare (loc, code, type, arg0, arg1);
8746 if (tem != NULL_TREE)
8747 return tem;
8750 return NULL_TREE;
8754 /* Subroutine of fold_binary. Optimize complex multiplications of the
8755 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8756 argument EXPR represents the expression "z" of type TYPE. */
8758 static tree
8759 fold_mult_zconjz (location_t loc, tree type, tree expr)
8761 tree itype = TREE_TYPE (type);
8762 tree rpart, ipart, tem;
8764 if (TREE_CODE (expr) == COMPLEX_EXPR)
8766 rpart = TREE_OPERAND (expr, 0);
8767 ipart = TREE_OPERAND (expr, 1);
8769 else if (TREE_CODE (expr) == COMPLEX_CST)
8771 rpart = TREE_REALPART (expr);
8772 ipart = TREE_IMAGPART (expr);
8774 else
8776 expr = save_expr (expr);
8777 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8778 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8781 rpart = save_expr (rpart);
8782 ipart = save_expr (ipart);
8783 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8784 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8785 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8786 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8787 build_zero_cst (itype));
8791 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8792 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8794 static bool
8795 vec_cst_ctor_to_array (tree arg, tree *elts)
8797 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8799 if (TREE_CODE (arg) == VECTOR_CST)
8801 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8802 elts[i] = VECTOR_CST_ELT (arg, i);
8804 else if (TREE_CODE (arg) == CONSTRUCTOR)
8806 constructor_elt *elt;
8808 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8809 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8810 return false;
8811 else
8812 elts[i] = elt->value;
8814 else
8815 return false;
8816 for (; i < nelts; i++)
8817 elts[i]
8818 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8819 return true;
8822 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8823 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8824 NULL_TREE otherwise. */
8826 static tree
8827 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8829 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8830 tree *elts;
8831 bool need_ctor = false;
8833 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8834 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8835 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8836 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8837 return NULL_TREE;
8839 elts = XALLOCAVEC (tree, nelts * 3);
8840 if (!vec_cst_ctor_to_array (arg0, elts)
8841 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8842 return NULL_TREE;
8844 for (i = 0; i < nelts; i++)
8846 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8847 need_ctor = true;
8848 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8851 if (need_ctor)
8853 vec<constructor_elt, va_gc> *v;
8854 vec_alloc (v, nelts);
8855 for (i = 0; i < nelts; i++)
8856 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8857 return build_constructor (type, v);
8859 else
8860 return build_vector (type, &elts[2 * nelts]);
8863 /* Try to fold a pointer difference of type TYPE two address expressions of
8864 array references AREF0 and AREF1 using location LOC. Return a
8865 simplified expression for the difference or NULL_TREE. */
8867 static tree
8868 fold_addr_of_array_ref_difference (location_t loc, tree type,
8869 tree aref0, tree aref1)
8871 tree base0 = TREE_OPERAND (aref0, 0);
8872 tree base1 = TREE_OPERAND (aref1, 0);
8873 tree base_offset = build_int_cst (type, 0);
8875 /* If the bases are array references as well, recurse. If the bases
8876 are pointer indirections compute the difference of the pointers.
8877 If the bases are equal, we are set. */
8878 if ((TREE_CODE (base0) == ARRAY_REF
8879 && TREE_CODE (base1) == ARRAY_REF
8880 && (base_offset
8881 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8882 || (INDIRECT_REF_P (base0)
8883 && INDIRECT_REF_P (base1)
8884 && (base_offset
8885 = fold_binary_loc (loc, MINUS_EXPR, type,
8886 fold_convert (type, TREE_OPERAND (base0, 0)),
8887 fold_convert (type,
8888 TREE_OPERAND (base1, 0)))))
8889 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8891 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8892 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8893 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8894 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8895 return fold_build2_loc (loc, PLUS_EXPR, type,
8896 base_offset,
8897 fold_build2_loc (loc, MULT_EXPR, type,
8898 diff, esz));
8900 return NULL_TREE;
8903 /* If the real or vector real constant CST of type TYPE has an exact
8904 inverse, return it, else return NULL. */
8906 tree
8907 exact_inverse (tree type, tree cst)
8909 REAL_VALUE_TYPE r;
8910 tree unit_type, *elts;
8911 machine_mode mode;
8912 unsigned vec_nelts, i;
8914 switch (TREE_CODE (cst))
8916 case REAL_CST:
8917 r = TREE_REAL_CST (cst);
8919 if (exact_real_inverse (TYPE_MODE (type), &r))
8920 return build_real (type, r);
8922 return NULL_TREE;
8924 case VECTOR_CST:
8925 vec_nelts = VECTOR_CST_NELTS (cst);
8926 elts = XALLOCAVEC (tree, vec_nelts);
8927 unit_type = TREE_TYPE (type);
8928 mode = TYPE_MODE (unit_type);
8930 for (i = 0; i < vec_nelts; i++)
8932 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8933 if (!exact_real_inverse (mode, &r))
8934 return NULL_TREE;
8935 elts[i] = build_real (unit_type, r);
8938 return build_vector (type, elts);
8940 default:
8941 return NULL_TREE;
8945 /* Mask out the tz least significant bits of X of type TYPE where
8946 tz is the number of trailing zeroes in Y. */
8947 static wide_int
8948 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8950 int tz = wi::ctz (y);
8951 if (tz > 0)
8952 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8953 return x;
8956 /* Return true when T is an address and is known to be nonzero.
8957 For floating point we further ensure that T is not denormal.
8958 Similar logic is present in nonzero_address in rtlanal.h.
8960 If the return value is based on the assumption that signed overflow
8961 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8962 change *STRICT_OVERFLOW_P. */
8964 static bool
8965 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8967 tree type = TREE_TYPE (t);
8968 enum tree_code code;
8970 /* Doing something useful for floating point would need more work. */
8971 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8972 return false;
8974 code = TREE_CODE (t);
8975 switch (TREE_CODE_CLASS (code))
8977 case tcc_unary:
8978 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8979 strict_overflow_p);
8980 case tcc_binary:
8981 case tcc_comparison:
8982 return tree_binary_nonzero_warnv_p (code, type,
8983 TREE_OPERAND (t, 0),
8984 TREE_OPERAND (t, 1),
8985 strict_overflow_p);
8986 case tcc_constant:
8987 case tcc_declaration:
8988 case tcc_reference:
8989 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8991 default:
8992 break;
8995 switch (code)
8997 case TRUTH_NOT_EXPR:
8998 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8999 strict_overflow_p);
9001 case TRUTH_AND_EXPR:
9002 case TRUTH_OR_EXPR:
9003 case TRUTH_XOR_EXPR:
9004 return tree_binary_nonzero_warnv_p (code, type,
9005 TREE_OPERAND (t, 0),
9006 TREE_OPERAND (t, 1),
9007 strict_overflow_p);
9009 case COND_EXPR:
9010 case CONSTRUCTOR:
9011 case OBJ_TYPE_REF:
9012 case ASSERT_EXPR:
9013 case ADDR_EXPR:
9014 case WITH_SIZE_EXPR:
9015 case SSA_NAME:
9016 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9018 case COMPOUND_EXPR:
9019 case MODIFY_EXPR:
9020 case BIND_EXPR:
9021 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9022 strict_overflow_p);
9024 case SAVE_EXPR:
9025 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9026 strict_overflow_p);
9028 case CALL_EXPR:
9030 tree fndecl = get_callee_fndecl (t);
9031 if (!fndecl) return false;
9032 if (flag_delete_null_pointer_checks && !flag_check_new
9033 && DECL_IS_OPERATOR_NEW (fndecl)
9034 && !TREE_NOTHROW (fndecl))
9035 return true;
9036 if (flag_delete_null_pointer_checks
9037 && lookup_attribute ("returns_nonnull",
9038 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9039 return true;
9040 return alloca_call_p (t);
9043 default:
9044 break;
9046 return false;
9049 /* Return true when T is an address and is known to be nonzero.
9050 Handle warnings about undefined signed overflow. */
9052 static bool
9053 tree_expr_nonzero_p (tree t)
9055 bool ret, strict_overflow_p;
9057 strict_overflow_p = false;
9058 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9059 if (strict_overflow_p)
9060 fold_overflow_warning (("assuming signed overflow does not occur when "
9061 "determining that expression is always "
9062 "non-zero"),
9063 WARN_STRICT_OVERFLOW_MISC);
9064 return ret;
9067 /* Fold a binary expression of code CODE and type TYPE with operands
9068 OP0 and OP1. LOC is the location of the resulting expression.
9069 Return the folded expression if folding is successful. Otherwise,
9070 return NULL_TREE. */
9072 tree
9073 fold_binary_loc (location_t loc,
9074 enum tree_code code, tree type, tree op0, tree op1)
9076 enum tree_code_class kind = TREE_CODE_CLASS (code);
9077 tree arg0, arg1, tem;
9078 tree t1 = NULL_TREE;
9079 bool strict_overflow_p;
9080 unsigned int prec;
9082 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9083 && TREE_CODE_LENGTH (code) == 2
9084 && op0 != NULL_TREE
9085 && op1 != NULL_TREE);
9087 arg0 = op0;
9088 arg1 = op1;
9090 /* Strip any conversions that don't change the mode. This is
9091 safe for every expression, except for a comparison expression
9092 because its signedness is derived from its operands. So, in
9093 the latter case, only strip conversions that don't change the
9094 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9095 preserved.
9097 Note that this is done as an internal manipulation within the
9098 constant folder, in order to find the simplest representation
9099 of the arguments so that their form can be studied. In any
9100 cases, the appropriate type conversions should be put back in
9101 the tree that will get out of the constant folder. */
9103 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9105 STRIP_SIGN_NOPS (arg0);
9106 STRIP_SIGN_NOPS (arg1);
9108 else
9110 STRIP_NOPS (arg0);
9111 STRIP_NOPS (arg1);
9114 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9115 constant but we can't do arithmetic on them. */
9116 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9118 tem = const_binop (code, type, arg0, arg1);
9119 if (tem != NULL_TREE)
9121 if (TREE_TYPE (tem) != type)
9122 tem = fold_convert_loc (loc, type, tem);
9123 return tem;
9127 /* If this is a commutative operation, and ARG0 is a constant, move it
9128 to ARG1 to reduce the number of tests below. */
9129 if (commutative_tree_code (code)
9130 && tree_swap_operands_p (arg0, arg1, true))
9131 return fold_build2_loc (loc, code, type, op1, op0);
9133 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9134 to ARG1 to reduce the number of tests below. */
9135 if (kind == tcc_comparison
9136 && tree_swap_operands_p (arg0, arg1, true))
9137 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9139 tem = generic_simplify (loc, code, type, op0, op1);
9140 if (tem)
9141 return tem;
9143 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9145 First check for cases where an arithmetic operation is applied to a
9146 compound, conditional, or comparison operation. Push the arithmetic
9147 operation inside the compound or conditional to see if any folding
9148 can then be done. Convert comparison to conditional for this purpose.
9149 The also optimizes non-constant cases that used to be done in
9150 expand_expr.
9152 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9153 one of the operands is a comparison and the other is a comparison, a
9154 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9155 code below would make the expression more complex. Change it to a
9156 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9157 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9159 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9160 || code == EQ_EXPR || code == NE_EXPR)
9161 && TREE_CODE (type) != VECTOR_TYPE
9162 && ((truth_value_p (TREE_CODE (arg0))
9163 && (truth_value_p (TREE_CODE (arg1))
9164 || (TREE_CODE (arg1) == BIT_AND_EXPR
9165 && integer_onep (TREE_OPERAND (arg1, 1)))))
9166 || (truth_value_p (TREE_CODE (arg1))
9167 && (truth_value_p (TREE_CODE (arg0))
9168 || (TREE_CODE (arg0) == BIT_AND_EXPR
9169 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9171 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9172 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9173 : TRUTH_XOR_EXPR,
9174 boolean_type_node,
9175 fold_convert_loc (loc, boolean_type_node, arg0),
9176 fold_convert_loc (loc, boolean_type_node, arg1));
9178 if (code == EQ_EXPR)
9179 tem = invert_truthvalue_loc (loc, tem);
9181 return fold_convert_loc (loc, type, tem);
9184 if (TREE_CODE_CLASS (code) == tcc_binary
9185 || TREE_CODE_CLASS (code) == tcc_comparison)
9187 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9189 tem = fold_build2_loc (loc, code, type,
9190 fold_convert_loc (loc, TREE_TYPE (op0),
9191 TREE_OPERAND (arg0, 1)), op1);
9192 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9193 tem);
9195 if (TREE_CODE (arg1) == COMPOUND_EXPR
9196 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9198 tem = fold_build2_loc (loc, code, type, op0,
9199 fold_convert_loc (loc, TREE_TYPE (op1),
9200 TREE_OPERAND (arg1, 1)));
9201 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9202 tem);
9205 if (TREE_CODE (arg0) == COND_EXPR
9206 || TREE_CODE (arg0) == VEC_COND_EXPR
9207 || COMPARISON_CLASS_P (arg0))
9209 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9210 arg0, arg1,
9211 /*cond_first_p=*/1);
9212 if (tem != NULL_TREE)
9213 return tem;
9216 if (TREE_CODE (arg1) == COND_EXPR
9217 || TREE_CODE (arg1) == VEC_COND_EXPR
9218 || COMPARISON_CLASS_P (arg1))
9220 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9221 arg1, arg0,
9222 /*cond_first_p=*/0);
9223 if (tem != NULL_TREE)
9224 return tem;
9228 switch (code)
9230 case MEM_REF:
9231 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9232 if (TREE_CODE (arg0) == ADDR_EXPR
9233 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9235 tree iref = TREE_OPERAND (arg0, 0);
9236 return fold_build2 (MEM_REF, type,
9237 TREE_OPERAND (iref, 0),
9238 int_const_binop (PLUS_EXPR, arg1,
9239 TREE_OPERAND (iref, 1)));
9242 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9243 if (TREE_CODE (arg0) == ADDR_EXPR
9244 && handled_component_p (TREE_OPERAND (arg0, 0)))
9246 tree base;
9247 HOST_WIDE_INT coffset;
9248 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9249 &coffset);
9250 if (!base)
9251 return NULL_TREE;
9252 return fold_build2 (MEM_REF, type,
9253 build_fold_addr_expr (base),
9254 int_const_binop (PLUS_EXPR, arg1,
9255 size_int (coffset)));
9258 return NULL_TREE;
9260 case POINTER_PLUS_EXPR:
9261 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9262 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9263 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9264 return fold_convert_loc (loc, type,
9265 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9266 fold_convert_loc (loc, sizetype,
9267 arg1),
9268 fold_convert_loc (loc, sizetype,
9269 arg0)));
9271 return NULL_TREE;
9273 case PLUS_EXPR:
9274 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9276 /* X + (X / CST) * -CST is X % CST. */
9277 if (TREE_CODE (arg1) == MULT_EXPR
9278 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9279 && operand_equal_p (arg0,
9280 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9282 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9283 tree cst1 = TREE_OPERAND (arg1, 1);
9284 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9285 cst1, cst0);
9286 if (sum && integer_zerop (sum))
9287 return fold_convert_loc (loc, type,
9288 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9289 TREE_TYPE (arg0), arg0,
9290 cst0));
9294 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9295 one. Make sure the type is not saturating and has the signedness of
9296 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9297 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9298 if ((TREE_CODE (arg0) == MULT_EXPR
9299 || TREE_CODE (arg1) == MULT_EXPR)
9300 && !TYPE_SATURATING (type)
9301 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9302 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9303 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9305 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9306 if (tem)
9307 return tem;
9310 if (! FLOAT_TYPE_P (type))
9312 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9313 (plus (plus (mult) (mult)) (foo)) so that we can
9314 take advantage of the factoring cases below. */
9315 if (ANY_INTEGRAL_TYPE_P (type)
9316 && TYPE_OVERFLOW_WRAPS (type)
9317 && (((TREE_CODE (arg0) == PLUS_EXPR
9318 || TREE_CODE (arg0) == MINUS_EXPR)
9319 && TREE_CODE (arg1) == MULT_EXPR)
9320 || ((TREE_CODE (arg1) == PLUS_EXPR
9321 || TREE_CODE (arg1) == MINUS_EXPR)
9322 && TREE_CODE (arg0) == MULT_EXPR)))
9324 tree parg0, parg1, parg, marg;
9325 enum tree_code pcode;
9327 if (TREE_CODE (arg1) == MULT_EXPR)
9328 parg = arg0, marg = arg1;
9329 else
9330 parg = arg1, marg = arg0;
9331 pcode = TREE_CODE (parg);
9332 parg0 = TREE_OPERAND (parg, 0);
9333 parg1 = TREE_OPERAND (parg, 1);
9334 STRIP_NOPS (parg0);
9335 STRIP_NOPS (parg1);
9337 if (TREE_CODE (parg0) == MULT_EXPR
9338 && TREE_CODE (parg1) != MULT_EXPR)
9339 return fold_build2_loc (loc, pcode, type,
9340 fold_build2_loc (loc, PLUS_EXPR, type,
9341 fold_convert_loc (loc, type,
9342 parg0),
9343 fold_convert_loc (loc, type,
9344 marg)),
9345 fold_convert_loc (loc, type, parg1));
9346 if (TREE_CODE (parg0) != MULT_EXPR
9347 && TREE_CODE (parg1) == MULT_EXPR)
9348 return
9349 fold_build2_loc (loc, PLUS_EXPR, type,
9350 fold_convert_loc (loc, type, parg0),
9351 fold_build2_loc (loc, pcode, type,
9352 fold_convert_loc (loc, type, marg),
9353 fold_convert_loc (loc, type,
9354 parg1)));
9357 else
9359 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9360 to __complex__ ( x, y ). This is not the same for SNaNs or
9361 if signed zeros are involved. */
9362 if (!HONOR_SNANS (element_mode (arg0))
9363 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9364 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9366 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9367 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9368 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9369 bool arg0rz = false, arg0iz = false;
9370 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9371 || (arg0i && (arg0iz = real_zerop (arg0i))))
9373 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9374 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9375 if (arg0rz && arg1i && real_zerop (arg1i))
9377 tree rp = arg1r ? arg1r
9378 : build1 (REALPART_EXPR, rtype, arg1);
9379 tree ip = arg0i ? arg0i
9380 : build1 (IMAGPART_EXPR, rtype, arg0);
9381 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9383 else if (arg0iz && arg1r && real_zerop (arg1r))
9385 tree rp = arg0r ? arg0r
9386 : build1 (REALPART_EXPR, rtype, arg0);
9387 tree ip = arg1i ? arg1i
9388 : build1 (IMAGPART_EXPR, rtype, arg1);
9389 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9394 if (flag_unsafe_math_optimizations
9395 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9396 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9397 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9398 return tem;
9400 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9401 We associate floats only if the user has specified
9402 -fassociative-math. */
9403 if (flag_associative_math
9404 && TREE_CODE (arg1) == PLUS_EXPR
9405 && TREE_CODE (arg0) != MULT_EXPR)
9407 tree tree10 = TREE_OPERAND (arg1, 0);
9408 tree tree11 = TREE_OPERAND (arg1, 1);
9409 if (TREE_CODE (tree11) == MULT_EXPR
9410 && TREE_CODE (tree10) == MULT_EXPR)
9412 tree tree0;
9413 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9414 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9417 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9418 We associate floats only if the user has specified
9419 -fassociative-math. */
9420 if (flag_associative_math
9421 && TREE_CODE (arg0) == PLUS_EXPR
9422 && TREE_CODE (arg1) != MULT_EXPR)
9424 tree tree00 = TREE_OPERAND (arg0, 0);
9425 tree tree01 = TREE_OPERAND (arg0, 1);
9426 if (TREE_CODE (tree01) == MULT_EXPR
9427 && TREE_CODE (tree00) == MULT_EXPR)
9429 tree tree0;
9430 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9431 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9436 bit_rotate:
9437 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9438 is a rotate of A by C1 bits. */
9439 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9440 is a rotate of A by B bits. */
9442 enum tree_code code0, code1;
9443 tree rtype;
9444 code0 = TREE_CODE (arg0);
9445 code1 = TREE_CODE (arg1);
9446 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9447 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9448 && operand_equal_p (TREE_OPERAND (arg0, 0),
9449 TREE_OPERAND (arg1, 0), 0)
9450 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9451 TYPE_UNSIGNED (rtype))
9452 /* Only create rotates in complete modes. Other cases are not
9453 expanded properly. */
9454 && (element_precision (rtype)
9455 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9457 tree tree01, tree11;
9458 enum tree_code code01, code11;
9460 tree01 = TREE_OPERAND (arg0, 1);
9461 tree11 = TREE_OPERAND (arg1, 1);
9462 STRIP_NOPS (tree01);
9463 STRIP_NOPS (tree11);
9464 code01 = TREE_CODE (tree01);
9465 code11 = TREE_CODE (tree11);
9466 if (code01 == INTEGER_CST
9467 && code11 == INTEGER_CST
9468 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9469 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9471 tem = build2_loc (loc, LROTATE_EXPR,
9472 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9473 TREE_OPERAND (arg0, 0),
9474 code0 == LSHIFT_EXPR
9475 ? TREE_OPERAND (arg0, 1)
9476 : TREE_OPERAND (arg1, 1));
9477 return fold_convert_loc (loc, type, tem);
9479 else if (code11 == MINUS_EXPR)
9481 tree tree110, tree111;
9482 tree110 = TREE_OPERAND (tree11, 0);
9483 tree111 = TREE_OPERAND (tree11, 1);
9484 STRIP_NOPS (tree110);
9485 STRIP_NOPS (tree111);
9486 if (TREE_CODE (tree110) == INTEGER_CST
9487 && 0 == compare_tree_int (tree110,
9488 element_precision
9489 (TREE_TYPE (TREE_OPERAND
9490 (arg0, 0))))
9491 && operand_equal_p (tree01, tree111, 0))
9492 return
9493 fold_convert_loc (loc, type,
9494 build2 ((code0 == LSHIFT_EXPR
9495 ? LROTATE_EXPR
9496 : RROTATE_EXPR),
9497 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9498 TREE_OPERAND (arg0, 0),
9499 TREE_OPERAND (arg0, 1)));
9501 else if (code01 == MINUS_EXPR)
9503 tree tree010, tree011;
9504 tree010 = TREE_OPERAND (tree01, 0);
9505 tree011 = TREE_OPERAND (tree01, 1);
9506 STRIP_NOPS (tree010);
9507 STRIP_NOPS (tree011);
9508 if (TREE_CODE (tree010) == INTEGER_CST
9509 && 0 == compare_tree_int (tree010,
9510 element_precision
9511 (TREE_TYPE (TREE_OPERAND
9512 (arg0, 0))))
9513 && operand_equal_p (tree11, tree011, 0))
9514 return fold_convert_loc
9515 (loc, type,
9516 build2 ((code0 != LSHIFT_EXPR
9517 ? LROTATE_EXPR
9518 : RROTATE_EXPR),
9519 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9520 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9525 associate:
9526 /* In most languages, can't associate operations on floats through
9527 parentheses. Rather than remember where the parentheses were, we
9528 don't associate floats at all, unless the user has specified
9529 -fassociative-math.
9530 And, we need to make sure type is not saturating. */
9532 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9533 && !TYPE_SATURATING (type))
9535 tree var0, con0, lit0, minus_lit0;
9536 tree var1, con1, lit1, minus_lit1;
9537 tree atype = type;
9538 bool ok = true;
9540 /* Split both trees into variables, constants, and literals. Then
9541 associate each group together, the constants with literals,
9542 then the result with variables. This increases the chances of
9543 literals being recombined later and of generating relocatable
9544 expressions for the sum of a constant and literal. */
9545 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9546 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9547 code == MINUS_EXPR);
9549 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9550 if (code == MINUS_EXPR)
9551 code = PLUS_EXPR;
9553 /* With undefined overflow prefer doing association in a type
9554 which wraps on overflow, if that is one of the operand types. */
9555 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9556 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9558 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9559 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9560 atype = TREE_TYPE (arg0);
9561 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9562 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9563 atype = TREE_TYPE (arg1);
9564 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9567 /* With undefined overflow we can only associate constants with one
9568 variable, and constants whose association doesn't overflow. */
9569 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9570 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9572 if (var0 && var1)
9574 tree tmp0 = var0;
9575 tree tmp1 = var1;
9576 bool one_neg = false;
9578 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9580 tmp0 = TREE_OPERAND (tmp0, 0);
9581 one_neg = !one_neg;
9583 if (CONVERT_EXPR_P (tmp0)
9584 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9586 <= TYPE_PRECISION (atype)))
9587 tmp0 = TREE_OPERAND (tmp0, 0);
9588 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9590 tmp1 = TREE_OPERAND (tmp1, 0);
9591 one_neg = !one_neg;
9593 if (CONVERT_EXPR_P (tmp1)
9594 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9595 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9596 <= TYPE_PRECISION (atype)))
9597 tmp1 = TREE_OPERAND (tmp1, 0);
9598 /* The only case we can still associate with two variables
9599 is if they cancel out. */
9600 if (!one_neg
9601 || !operand_equal_p (tmp0, tmp1, 0))
9602 ok = false;
9606 /* Only do something if we found more than two objects. Otherwise,
9607 nothing has changed and we risk infinite recursion. */
9608 if (ok
9609 && (2 < ((var0 != 0) + (var1 != 0)
9610 + (con0 != 0) + (con1 != 0)
9611 + (lit0 != 0) + (lit1 != 0)
9612 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9614 bool any_overflows = false;
9615 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9616 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9617 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9618 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9619 var0 = associate_trees (loc, var0, var1, code, atype);
9620 con0 = associate_trees (loc, con0, con1, code, atype);
9621 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9622 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9623 code, atype);
9625 /* Preserve the MINUS_EXPR if the negative part of the literal is
9626 greater than the positive part. Otherwise, the multiplicative
9627 folding code (i.e extract_muldiv) may be fooled in case
9628 unsigned constants are subtracted, like in the following
9629 example: ((X*2 + 4) - 8U)/2. */
9630 if (minus_lit0 && lit0)
9632 if (TREE_CODE (lit0) == INTEGER_CST
9633 && TREE_CODE (minus_lit0) == INTEGER_CST
9634 && tree_int_cst_lt (lit0, minus_lit0))
9636 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9637 MINUS_EXPR, atype);
9638 lit0 = 0;
9640 else
9642 lit0 = associate_trees (loc, lit0, minus_lit0,
9643 MINUS_EXPR, atype);
9644 minus_lit0 = 0;
9648 /* Don't introduce overflows through reassociation. */
9649 if (!any_overflows
9650 && ((lit0 && TREE_OVERFLOW_P (lit0))
9651 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9652 return NULL_TREE;
9654 if (minus_lit0)
9656 if (con0 == 0)
9657 return
9658 fold_convert_loc (loc, type,
9659 associate_trees (loc, var0, minus_lit0,
9660 MINUS_EXPR, atype));
9661 else
9663 con0 = associate_trees (loc, con0, minus_lit0,
9664 MINUS_EXPR, atype);
9665 return
9666 fold_convert_loc (loc, type,
9667 associate_trees (loc, var0, con0,
9668 PLUS_EXPR, atype));
9672 con0 = associate_trees (loc, con0, lit0, code, atype);
9673 return
9674 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9675 code, atype));
9679 return NULL_TREE;
9681 case MINUS_EXPR:
9682 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9683 if (TREE_CODE (arg0) == NEGATE_EXPR
9684 && negate_expr_p (arg1)
9685 && reorder_operands_p (arg0, arg1))
9686 return fold_build2_loc (loc, MINUS_EXPR, type,
9687 fold_convert_loc (loc, type,
9688 negate_expr (arg1)),
9689 fold_convert_loc (loc, type,
9690 TREE_OPERAND (arg0, 0)));
9692 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9693 __complex__ ( x, -y ). This is not the same for SNaNs or if
9694 signed zeros are involved. */
9695 if (!HONOR_SNANS (element_mode (arg0))
9696 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9697 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9699 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9700 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9701 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9702 bool arg0rz = false, arg0iz = false;
9703 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9704 || (arg0i && (arg0iz = real_zerop (arg0i))))
9706 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9707 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9708 if (arg0rz && arg1i && real_zerop (arg1i))
9710 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9711 arg1r ? arg1r
9712 : build1 (REALPART_EXPR, rtype, arg1));
9713 tree ip = arg0i ? arg0i
9714 : build1 (IMAGPART_EXPR, rtype, arg0);
9715 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9717 else if (arg0iz && arg1r && real_zerop (arg1r))
9719 tree rp = arg0r ? arg0r
9720 : build1 (REALPART_EXPR, rtype, arg0);
9721 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9722 arg1i ? arg1i
9723 : build1 (IMAGPART_EXPR, rtype, arg1));
9724 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9729 /* A - B -> A + (-B) if B is easily negatable. */
9730 if (negate_expr_p (arg1)
9731 && !TYPE_OVERFLOW_SANITIZED (type)
9732 && ((FLOAT_TYPE_P (type)
9733 /* Avoid this transformation if B is a positive REAL_CST. */
9734 && (TREE_CODE (arg1) != REAL_CST
9735 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9736 || INTEGRAL_TYPE_P (type)))
9737 return fold_build2_loc (loc, PLUS_EXPR, type,
9738 fold_convert_loc (loc, type, arg0),
9739 fold_convert_loc (loc, type,
9740 negate_expr (arg1)));
9742 /* Fold &a[i] - &a[j] to i-j. */
9743 if (TREE_CODE (arg0) == ADDR_EXPR
9744 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9745 && TREE_CODE (arg1) == ADDR_EXPR
9746 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9748 tree tem = fold_addr_of_array_ref_difference (loc, type,
9749 TREE_OPERAND (arg0, 0),
9750 TREE_OPERAND (arg1, 0));
9751 if (tem)
9752 return tem;
9755 if (FLOAT_TYPE_P (type)
9756 && flag_unsafe_math_optimizations
9757 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9758 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9759 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9760 return tem;
9762 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9763 one. Make sure the type is not saturating and has the signedness of
9764 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9765 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9766 if ((TREE_CODE (arg0) == MULT_EXPR
9767 || TREE_CODE (arg1) == MULT_EXPR)
9768 && !TYPE_SATURATING (type)
9769 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9770 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9771 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9773 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9774 if (tem)
9775 return tem;
9778 goto associate;
9780 case MULT_EXPR:
9781 if (! FLOAT_TYPE_P (type))
9783 /* Transform x * -C into -x * C if x is easily negatable. */
9784 if (TREE_CODE (arg1) == INTEGER_CST
9785 && tree_int_cst_sgn (arg1) == -1
9786 && negate_expr_p (arg0)
9787 && (tem = negate_expr (arg1)) != arg1
9788 && !TREE_OVERFLOW (tem))
9789 return fold_build2_loc (loc, MULT_EXPR, type,
9790 fold_convert_loc (loc, type,
9791 negate_expr (arg0)),
9792 tem);
9794 /* (A + A) * C -> A * 2 * C */
9795 if (TREE_CODE (arg0) == PLUS_EXPR
9796 && TREE_CODE (arg1) == INTEGER_CST
9797 && operand_equal_p (TREE_OPERAND (arg0, 0),
9798 TREE_OPERAND (arg0, 1), 0))
9799 return fold_build2_loc (loc, MULT_EXPR, type,
9800 omit_one_operand_loc (loc, type,
9801 TREE_OPERAND (arg0, 0),
9802 TREE_OPERAND (arg0, 1)),
9803 fold_build2_loc (loc, MULT_EXPR, type,
9804 build_int_cst (type, 2) , arg1));
9806 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9807 sign-changing only. */
9808 if (TREE_CODE (arg1) == INTEGER_CST
9809 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9810 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9811 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9813 strict_overflow_p = false;
9814 if (TREE_CODE (arg1) == INTEGER_CST
9815 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9816 &strict_overflow_p)))
9818 if (strict_overflow_p)
9819 fold_overflow_warning (("assuming signed overflow does not "
9820 "occur when simplifying "
9821 "multiplication"),
9822 WARN_STRICT_OVERFLOW_MISC);
9823 return fold_convert_loc (loc, type, tem);
9826 /* Optimize z * conj(z) for integer complex numbers. */
9827 if (TREE_CODE (arg0) == CONJ_EXPR
9828 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9829 return fold_mult_zconjz (loc, type, arg1);
9830 if (TREE_CODE (arg1) == CONJ_EXPR
9831 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9832 return fold_mult_zconjz (loc, type, arg0);
9834 else
9836 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9837 This is not the same for NaNs or if signed zeros are
9838 involved. */
9839 if (!HONOR_NANS (arg0)
9840 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9841 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9842 && TREE_CODE (arg1) == COMPLEX_CST
9843 && real_zerop (TREE_REALPART (arg1)))
9845 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9846 if (real_onep (TREE_IMAGPART (arg1)))
9847 return
9848 fold_build2_loc (loc, COMPLEX_EXPR, type,
9849 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9850 rtype, arg0)),
9851 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9852 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9853 return
9854 fold_build2_loc (loc, COMPLEX_EXPR, type,
9855 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9856 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9857 rtype, arg0)));
9860 /* Optimize z * conj(z) for floating point complex numbers.
9861 Guarded by flag_unsafe_math_optimizations as non-finite
9862 imaginary components don't produce scalar results. */
9863 if (flag_unsafe_math_optimizations
9864 && TREE_CODE (arg0) == CONJ_EXPR
9865 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9866 return fold_mult_zconjz (loc, type, arg1);
9867 if (flag_unsafe_math_optimizations
9868 && TREE_CODE (arg1) == CONJ_EXPR
9869 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9870 return fold_mult_zconjz (loc, type, arg0);
9872 if (flag_unsafe_math_optimizations)
9875 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9876 if (!in_gimple_form
9877 && optimize
9878 && operand_equal_p (arg0, arg1, 0))
9880 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9882 if (powfn)
9884 tree arg = build_real (type, dconst2);
9885 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9890 goto associate;
9892 case BIT_IOR_EXPR:
9893 /* Canonicalize (X & C1) | C2. */
9894 if (TREE_CODE (arg0) == BIT_AND_EXPR
9895 && TREE_CODE (arg1) == INTEGER_CST
9896 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9898 int width = TYPE_PRECISION (type), w;
9899 wide_int c1 = TREE_OPERAND (arg0, 1);
9900 wide_int c2 = arg1;
9902 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9903 if ((c1 & c2) == c1)
9904 return omit_one_operand_loc (loc, type, arg1,
9905 TREE_OPERAND (arg0, 0));
9907 wide_int msk = wi::mask (width, false,
9908 TYPE_PRECISION (TREE_TYPE (arg1)));
9910 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9911 if (msk.and_not (c1 | c2) == 0)
9912 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9913 TREE_OPERAND (arg0, 0), arg1);
9915 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9916 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9917 mode which allows further optimizations. */
9918 c1 &= msk;
9919 c2 &= msk;
9920 wide_int c3 = c1.and_not (c2);
9921 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9923 wide_int mask = wi::mask (w, false,
9924 TYPE_PRECISION (type));
9925 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9927 c3 = mask;
9928 break;
9932 if (c3 != c1)
9933 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9934 fold_build2_loc (loc, BIT_AND_EXPR, type,
9935 TREE_OPERAND (arg0, 0),
9936 wide_int_to_tree (type,
9937 c3)),
9938 arg1);
9941 /* See if this can be simplified into a rotate first. If that
9942 is unsuccessful continue in the association code. */
9943 goto bit_rotate;
9945 case BIT_XOR_EXPR:
9946 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9947 if (TREE_CODE (arg0) == BIT_AND_EXPR
9948 && INTEGRAL_TYPE_P (type)
9949 && integer_onep (TREE_OPERAND (arg0, 1))
9950 && integer_onep (arg1))
9951 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9952 build_zero_cst (TREE_TYPE (arg0)));
9954 /* See if this can be simplified into a rotate first. If that
9955 is unsuccessful continue in the association code. */
9956 goto bit_rotate;
9958 case BIT_AND_EXPR:
9959 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9960 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9961 && INTEGRAL_TYPE_P (type)
9962 && integer_onep (TREE_OPERAND (arg0, 1))
9963 && integer_onep (arg1))
9965 tree tem2;
9966 tem = TREE_OPERAND (arg0, 0);
9967 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9968 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9969 tem, tem2);
9970 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9971 build_zero_cst (TREE_TYPE (tem)));
9973 /* Fold ~X & 1 as (X & 1) == 0. */
9974 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9975 && INTEGRAL_TYPE_P (type)
9976 && integer_onep (arg1))
9978 tree tem2;
9979 tem = TREE_OPERAND (arg0, 0);
9980 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9981 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9982 tem, tem2);
9983 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9984 build_zero_cst (TREE_TYPE (tem)));
9986 /* Fold !X & 1 as X == 0. */
9987 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9988 && integer_onep (arg1))
9990 tem = TREE_OPERAND (arg0, 0);
9991 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9992 build_zero_cst (TREE_TYPE (tem)));
9995 /* Fold (X ^ Y) & Y as ~X & Y. */
9996 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9997 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9999 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10000 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10001 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10002 fold_convert_loc (loc, type, arg1));
10004 /* Fold (X ^ Y) & X as ~Y & X. */
10005 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10006 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10007 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10009 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10010 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10011 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10012 fold_convert_loc (loc, type, arg1));
10014 /* Fold X & (X ^ Y) as X & ~Y. */
10015 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10016 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10018 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10019 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10020 fold_convert_loc (loc, type, arg0),
10021 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10023 /* Fold X & (Y ^ X) as ~Y & X. */
10024 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10025 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10026 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10028 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10029 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10030 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10031 fold_convert_loc (loc, type, arg0));
10034 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10035 multiple of 1 << CST. */
10036 if (TREE_CODE (arg1) == INTEGER_CST)
10038 wide_int cst1 = arg1;
10039 wide_int ncst1 = -cst1;
10040 if ((cst1 & ncst1) == ncst1
10041 && multiple_of_p (type, arg0,
10042 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10043 return fold_convert_loc (loc, type, arg0);
10046 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10047 bits from CST2. */
10048 if (TREE_CODE (arg1) == INTEGER_CST
10049 && TREE_CODE (arg0) == MULT_EXPR
10050 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10052 wide_int warg1 = arg1;
10053 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10055 if (masked == 0)
10056 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10057 arg0, arg1);
10058 else if (masked != warg1)
10060 /* Avoid the transform if arg1 is a mask of some
10061 mode which allows further optimizations. */
10062 int pop = wi::popcount (warg1);
10063 if (!(pop >= BITS_PER_UNIT
10064 && exact_log2 (pop) != -1
10065 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10066 return fold_build2_loc (loc, code, type, op0,
10067 wide_int_to_tree (type, masked));
10071 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10072 ((A & N) + B) & M -> (A + B) & M
10073 Similarly if (N & M) == 0,
10074 ((A | N) + B) & M -> (A + B) & M
10075 and for - instead of + (or unary - instead of +)
10076 and/or ^ instead of |.
10077 If B is constant and (B & M) == 0, fold into A & M. */
10078 if (TREE_CODE (arg1) == INTEGER_CST)
10080 wide_int cst1 = arg1;
10081 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10082 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10083 && (TREE_CODE (arg0) == PLUS_EXPR
10084 || TREE_CODE (arg0) == MINUS_EXPR
10085 || TREE_CODE (arg0) == NEGATE_EXPR)
10086 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10087 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10089 tree pmop[2];
10090 int which = 0;
10091 wide_int cst0;
10093 /* Now we know that arg0 is (C + D) or (C - D) or
10094 -C and arg1 (M) is == (1LL << cst) - 1.
10095 Store C into PMOP[0] and D into PMOP[1]. */
10096 pmop[0] = TREE_OPERAND (arg0, 0);
10097 pmop[1] = NULL;
10098 if (TREE_CODE (arg0) != NEGATE_EXPR)
10100 pmop[1] = TREE_OPERAND (arg0, 1);
10101 which = 1;
10104 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10105 which = -1;
10107 for (; which >= 0; which--)
10108 switch (TREE_CODE (pmop[which]))
10110 case BIT_AND_EXPR:
10111 case BIT_IOR_EXPR:
10112 case BIT_XOR_EXPR:
10113 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10114 != INTEGER_CST)
10115 break;
10116 cst0 = TREE_OPERAND (pmop[which], 1);
10117 cst0 &= cst1;
10118 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10120 if (cst0 != cst1)
10121 break;
10123 else if (cst0 != 0)
10124 break;
10125 /* If C or D is of the form (A & N) where
10126 (N & M) == M, or of the form (A | N) or
10127 (A ^ N) where (N & M) == 0, replace it with A. */
10128 pmop[which] = TREE_OPERAND (pmop[which], 0);
10129 break;
10130 case INTEGER_CST:
10131 /* If C or D is a N where (N & M) == 0, it can be
10132 omitted (assumed 0). */
10133 if ((TREE_CODE (arg0) == PLUS_EXPR
10134 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10135 && (cst1 & pmop[which]) == 0)
10136 pmop[which] = NULL;
10137 break;
10138 default:
10139 break;
10142 /* Only build anything new if we optimized one or both arguments
10143 above. */
10144 if (pmop[0] != TREE_OPERAND (arg0, 0)
10145 || (TREE_CODE (arg0) != NEGATE_EXPR
10146 && pmop[1] != TREE_OPERAND (arg0, 1)))
10148 tree utype = TREE_TYPE (arg0);
10149 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10151 /* Perform the operations in a type that has defined
10152 overflow behavior. */
10153 utype = unsigned_type_for (TREE_TYPE (arg0));
10154 if (pmop[0] != NULL)
10155 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10156 if (pmop[1] != NULL)
10157 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10160 if (TREE_CODE (arg0) == NEGATE_EXPR)
10161 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10162 else if (TREE_CODE (arg0) == PLUS_EXPR)
10164 if (pmop[0] != NULL && pmop[1] != NULL)
10165 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10166 pmop[0], pmop[1]);
10167 else if (pmop[0] != NULL)
10168 tem = pmop[0];
10169 else if (pmop[1] != NULL)
10170 tem = pmop[1];
10171 else
10172 return build_int_cst (type, 0);
10174 else if (pmop[0] == NULL)
10175 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10176 else
10177 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10178 pmop[0], pmop[1]);
10179 /* TEM is now the new binary +, - or unary - replacement. */
10180 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10181 fold_convert_loc (loc, utype, arg1));
10182 return fold_convert_loc (loc, type, tem);
10187 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10188 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10189 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10191 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10193 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10194 if (mask == -1)
10195 return
10196 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10199 goto associate;
10201 case RDIV_EXPR:
10202 /* Don't touch a floating-point divide by zero unless the mode
10203 of the constant can represent infinity. */
10204 if (TREE_CODE (arg1) == REAL_CST
10205 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10206 && real_zerop (arg1))
10207 return NULL_TREE;
10209 /* (-A) / (-B) -> A / B */
10210 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10211 return fold_build2_loc (loc, RDIV_EXPR, type,
10212 TREE_OPERAND (arg0, 0),
10213 negate_expr (arg1));
10214 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10215 return fold_build2_loc (loc, RDIV_EXPR, type,
10216 negate_expr (arg0),
10217 TREE_OPERAND (arg1, 0));
10218 return NULL_TREE;
10220 case TRUNC_DIV_EXPR:
10221 /* Fall through */
10223 case FLOOR_DIV_EXPR:
10224 /* Simplify A / (B << N) where A and B are positive and B is
10225 a power of 2, to A >> (N + log2(B)). */
10226 strict_overflow_p = false;
10227 if (TREE_CODE (arg1) == LSHIFT_EXPR
10228 && (TYPE_UNSIGNED (type)
10229 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10231 tree sval = TREE_OPERAND (arg1, 0);
10232 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10234 tree sh_cnt = TREE_OPERAND (arg1, 1);
10235 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10236 wi::exact_log2 (sval));
10238 if (strict_overflow_p)
10239 fold_overflow_warning (("assuming signed overflow does not "
10240 "occur when simplifying A / (B << N)"),
10241 WARN_STRICT_OVERFLOW_MISC);
10243 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10244 sh_cnt, pow2);
10245 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10246 fold_convert_loc (loc, type, arg0), sh_cnt);
10250 /* Fall through */
10252 case ROUND_DIV_EXPR:
10253 case CEIL_DIV_EXPR:
10254 case EXACT_DIV_EXPR:
10255 if (integer_zerop (arg1))
10256 return NULL_TREE;
10258 /* Convert -A / -B to A / B when the type is signed and overflow is
10259 undefined. */
10260 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10261 && TREE_CODE (arg0) == NEGATE_EXPR
10262 && negate_expr_p (arg1))
10264 if (INTEGRAL_TYPE_P (type))
10265 fold_overflow_warning (("assuming signed overflow does not occur "
10266 "when distributing negation across "
10267 "division"),
10268 WARN_STRICT_OVERFLOW_MISC);
10269 return fold_build2_loc (loc, code, type,
10270 fold_convert_loc (loc, type,
10271 TREE_OPERAND (arg0, 0)),
10272 fold_convert_loc (loc, type,
10273 negate_expr (arg1)));
10275 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10276 && TREE_CODE (arg1) == NEGATE_EXPR
10277 && negate_expr_p (arg0))
10279 if (INTEGRAL_TYPE_P (type))
10280 fold_overflow_warning (("assuming signed overflow does not occur "
10281 "when distributing negation across "
10282 "division"),
10283 WARN_STRICT_OVERFLOW_MISC);
10284 return fold_build2_loc (loc, code, type,
10285 fold_convert_loc (loc, type,
10286 negate_expr (arg0)),
10287 fold_convert_loc (loc, type,
10288 TREE_OPERAND (arg1, 0)));
10291 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10292 operation, EXACT_DIV_EXPR.
10294 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10295 At one time others generated faster code, it's not clear if they do
10296 after the last round to changes to the DIV code in expmed.c. */
10297 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10298 && multiple_of_p (type, arg0, arg1))
10299 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10300 fold_convert (type, arg0),
10301 fold_convert (type, arg1));
10303 strict_overflow_p = false;
10304 if (TREE_CODE (arg1) == INTEGER_CST
10305 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10306 &strict_overflow_p)))
10308 if (strict_overflow_p)
10309 fold_overflow_warning (("assuming signed overflow does not occur "
10310 "when simplifying division"),
10311 WARN_STRICT_OVERFLOW_MISC);
10312 return fold_convert_loc (loc, type, tem);
10315 return NULL_TREE;
10317 case CEIL_MOD_EXPR:
10318 case FLOOR_MOD_EXPR:
10319 case ROUND_MOD_EXPR:
10320 case TRUNC_MOD_EXPR:
10321 strict_overflow_p = false;
10322 if (TREE_CODE (arg1) == INTEGER_CST
10323 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10324 &strict_overflow_p)))
10326 if (strict_overflow_p)
10327 fold_overflow_warning (("assuming signed overflow does not occur "
10328 "when simplifying modulus"),
10329 WARN_STRICT_OVERFLOW_MISC);
10330 return fold_convert_loc (loc, type, tem);
10333 return NULL_TREE;
10335 case LROTATE_EXPR:
10336 case RROTATE_EXPR:
10337 case RSHIFT_EXPR:
10338 case LSHIFT_EXPR:
10339 /* Since negative shift count is not well-defined,
10340 don't try to compute it in the compiler. */
10341 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10342 return NULL_TREE;
10344 prec = element_precision (type);
10346 /* If we have a rotate of a bit operation with the rotate count and
10347 the second operand of the bit operation both constant,
10348 permute the two operations. */
10349 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10350 && (TREE_CODE (arg0) == BIT_AND_EXPR
10351 || TREE_CODE (arg0) == BIT_IOR_EXPR
10352 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10354 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10355 fold_build2_loc (loc, code, type,
10356 TREE_OPERAND (arg0, 0), arg1),
10357 fold_build2_loc (loc, code, type,
10358 TREE_OPERAND (arg0, 1), arg1));
10360 /* Two consecutive rotates adding up to the some integer
10361 multiple of the precision of the type can be ignored. */
10362 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10363 && TREE_CODE (arg0) == RROTATE_EXPR
10364 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10365 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10366 prec) == 0)
10367 return TREE_OPERAND (arg0, 0);
10369 return NULL_TREE;
10371 case MIN_EXPR:
10372 case MAX_EXPR:
10373 goto associate;
10375 case TRUTH_ANDIF_EXPR:
10376 /* Note that the operands of this must be ints
10377 and their values must be 0 or 1.
10378 ("true" is a fixed value perhaps depending on the language.) */
10379 /* If first arg is constant zero, return it. */
10380 if (integer_zerop (arg0))
10381 return fold_convert_loc (loc, type, arg0);
10382 case TRUTH_AND_EXPR:
10383 /* If either arg is constant true, drop it. */
10384 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10385 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10386 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10387 /* Preserve sequence points. */
10388 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10389 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10390 /* If second arg is constant zero, result is zero, but first arg
10391 must be evaluated. */
10392 if (integer_zerop (arg1))
10393 return omit_one_operand_loc (loc, type, arg1, arg0);
10394 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10395 case will be handled here. */
10396 if (integer_zerop (arg0))
10397 return omit_one_operand_loc (loc, type, arg0, arg1);
10399 /* !X && X is always false. */
10400 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10401 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10402 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10403 /* X && !X is always false. */
10404 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10405 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10406 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10408 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10409 means A >= Y && A != MAX, but in this case we know that
10410 A < X <= MAX. */
10412 if (!TREE_SIDE_EFFECTS (arg0)
10413 && !TREE_SIDE_EFFECTS (arg1))
10415 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10416 if (tem && !operand_equal_p (tem, arg0, 0))
10417 return fold_build2_loc (loc, code, type, tem, arg1);
10419 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10420 if (tem && !operand_equal_p (tem, arg1, 0))
10421 return fold_build2_loc (loc, code, type, arg0, tem);
10424 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10425 != NULL_TREE)
10426 return tem;
10428 return NULL_TREE;
10430 case TRUTH_ORIF_EXPR:
10431 /* Note that the operands of this must be ints
10432 and their values must be 0 or true.
10433 ("true" is a fixed value perhaps depending on the language.) */
10434 /* If first arg is constant true, return it. */
10435 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10436 return fold_convert_loc (loc, type, arg0);
10437 case TRUTH_OR_EXPR:
10438 /* If either arg is constant zero, drop it. */
10439 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10440 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10441 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10442 /* Preserve sequence points. */
10443 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10444 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10445 /* If second arg is constant true, result is true, but we must
10446 evaluate first arg. */
10447 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10448 return omit_one_operand_loc (loc, type, arg1, arg0);
10449 /* Likewise for first arg, but note this only occurs here for
10450 TRUTH_OR_EXPR. */
10451 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10452 return omit_one_operand_loc (loc, type, arg0, arg1);
10454 /* !X || X is always true. */
10455 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10456 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10457 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10458 /* X || !X is always true. */
10459 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10460 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10461 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10463 /* (X && !Y) || (!X && Y) is X ^ Y */
10464 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10465 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10467 tree a0, a1, l0, l1, n0, n1;
10469 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10470 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10472 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10473 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10475 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10476 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10478 if ((operand_equal_p (n0, a0, 0)
10479 && operand_equal_p (n1, a1, 0))
10480 || (operand_equal_p (n0, a1, 0)
10481 && operand_equal_p (n1, a0, 0)))
10482 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10485 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10486 != NULL_TREE)
10487 return tem;
10489 return NULL_TREE;
10491 case TRUTH_XOR_EXPR:
10492 /* If the second arg is constant zero, drop it. */
10493 if (integer_zerop (arg1))
10494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10495 /* If the second arg is constant true, this is a logical inversion. */
10496 if (integer_onep (arg1))
10498 tem = invert_truthvalue_loc (loc, arg0);
10499 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10501 /* Identical arguments cancel to zero. */
10502 if (operand_equal_p (arg0, arg1, 0))
10503 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10505 /* !X ^ X is always true. */
10506 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10507 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10508 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10510 /* X ^ !X is always true. */
10511 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10512 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10513 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10515 return NULL_TREE;
10517 case EQ_EXPR:
10518 case NE_EXPR:
10519 STRIP_NOPS (arg0);
10520 STRIP_NOPS (arg1);
10522 tem = fold_comparison (loc, code, type, op0, op1);
10523 if (tem != NULL_TREE)
10524 return tem;
10526 /* bool_var != 1 becomes !bool_var. */
10527 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10528 && code == NE_EXPR)
10529 return fold_convert_loc (loc, type,
10530 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10531 TREE_TYPE (arg0), arg0));
10533 /* bool_var == 0 becomes !bool_var. */
10534 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10535 && code == EQ_EXPR)
10536 return fold_convert_loc (loc, type,
10537 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10538 TREE_TYPE (arg0), arg0));
10540 /* !exp != 0 becomes !exp */
10541 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10542 && code == NE_EXPR)
10543 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10545 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10546 if ((TREE_CODE (arg0) == PLUS_EXPR
10547 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10548 || TREE_CODE (arg0) == MINUS_EXPR)
10549 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10550 0)),
10551 arg1, 0)
10552 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10553 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10555 tree val = TREE_OPERAND (arg0, 1);
10556 return omit_two_operands_loc (loc, type,
10557 fold_build2_loc (loc, code, type,
10558 val,
10559 build_int_cst (TREE_TYPE (val),
10560 0)),
10561 TREE_OPERAND (arg0, 0), arg1);
10564 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10565 if (TREE_CODE (arg0) == MINUS_EXPR
10566 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10567 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10568 1)),
10569 arg1, 0)
10570 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10572 return omit_two_operands_loc (loc, type,
10573 code == NE_EXPR
10574 ? boolean_true_node : boolean_false_node,
10575 TREE_OPERAND (arg0, 1), arg1);
10578 /* If this is an EQ or NE comparison with zero and ARG0 is
10579 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10580 two operations, but the latter can be done in one less insn
10581 on machines that have only two-operand insns or on which a
10582 constant cannot be the first operand. */
10583 if (TREE_CODE (arg0) == BIT_AND_EXPR
10584 && integer_zerop (arg1))
10586 tree arg00 = TREE_OPERAND (arg0, 0);
10587 tree arg01 = TREE_OPERAND (arg0, 1);
10588 if (TREE_CODE (arg00) == LSHIFT_EXPR
10589 && integer_onep (TREE_OPERAND (arg00, 0)))
10591 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10592 arg01, TREE_OPERAND (arg00, 1));
10593 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10594 build_int_cst (TREE_TYPE (arg0), 1));
10595 return fold_build2_loc (loc, code, type,
10596 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10597 arg1);
10599 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10600 && integer_onep (TREE_OPERAND (arg01, 0)))
10602 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10603 arg00, TREE_OPERAND (arg01, 1));
10604 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10605 build_int_cst (TREE_TYPE (arg0), 1));
10606 return fold_build2_loc (loc, code, type,
10607 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10608 arg1);
10612 /* If this is an NE or EQ comparison of zero against the result of a
10613 signed MOD operation whose second operand is a power of 2, make
10614 the MOD operation unsigned since it is simpler and equivalent. */
10615 if (integer_zerop (arg1)
10616 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10617 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10618 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10619 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10620 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10621 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10623 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10624 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10625 fold_convert_loc (loc, newtype,
10626 TREE_OPERAND (arg0, 0)),
10627 fold_convert_loc (loc, newtype,
10628 TREE_OPERAND (arg0, 1)));
10630 return fold_build2_loc (loc, code, type, newmod,
10631 fold_convert_loc (loc, newtype, arg1));
10634 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10635 C1 is a valid shift constant, and C2 is a power of two, i.e.
10636 a single bit. */
10637 if (TREE_CODE (arg0) == BIT_AND_EXPR
10638 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10639 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10640 == INTEGER_CST
10641 && integer_pow2p (TREE_OPERAND (arg0, 1))
10642 && integer_zerop (arg1))
10644 tree itype = TREE_TYPE (arg0);
10645 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10646 prec = TYPE_PRECISION (itype);
10648 /* Check for a valid shift count. */
10649 if (wi::ltu_p (arg001, prec))
10651 tree arg01 = TREE_OPERAND (arg0, 1);
10652 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10653 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10654 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10655 can be rewritten as (X & (C2 << C1)) != 0. */
10656 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10658 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10659 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10660 return fold_build2_loc (loc, code, type, tem,
10661 fold_convert_loc (loc, itype, arg1));
10663 /* Otherwise, for signed (arithmetic) shifts,
10664 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10665 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10666 else if (!TYPE_UNSIGNED (itype))
10667 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10668 arg000, build_int_cst (itype, 0));
10669 /* Otherwise, of unsigned (logical) shifts,
10670 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10671 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10672 else
10673 return omit_one_operand_loc (loc, type,
10674 code == EQ_EXPR ? integer_one_node
10675 : integer_zero_node,
10676 arg000);
10680 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10681 Similarly for NE_EXPR. */
10682 if (TREE_CODE (arg0) == BIT_AND_EXPR
10683 && TREE_CODE (arg1) == INTEGER_CST
10684 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10686 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10687 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10688 TREE_OPERAND (arg0, 1));
10689 tree dandnotc
10690 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10691 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10692 notc);
10693 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10694 if (integer_nonzerop (dandnotc))
10695 return omit_one_operand_loc (loc, type, rslt, arg0);
10698 /* If this is a comparison of a field, we may be able to simplify it. */
10699 if ((TREE_CODE (arg0) == COMPONENT_REF
10700 || TREE_CODE (arg0) == BIT_FIELD_REF)
10701 /* Handle the constant case even without -O
10702 to make sure the warnings are given. */
10703 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10705 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10706 if (t1)
10707 return t1;
10710 /* Optimize comparisons of strlen vs zero to a compare of the
10711 first character of the string vs zero. To wit,
10712 strlen(ptr) == 0 => *ptr == 0
10713 strlen(ptr) != 0 => *ptr != 0
10714 Other cases should reduce to one of these two (or a constant)
10715 due to the return value of strlen being unsigned. */
10716 if (TREE_CODE (arg0) == CALL_EXPR
10717 && integer_zerop (arg1))
10719 tree fndecl = get_callee_fndecl (arg0);
10721 if (fndecl
10722 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10723 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10724 && call_expr_nargs (arg0) == 1
10725 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10727 tree iref = build_fold_indirect_ref_loc (loc,
10728 CALL_EXPR_ARG (arg0, 0));
10729 return fold_build2_loc (loc, code, type, iref,
10730 build_int_cst (TREE_TYPE (iref), 0));
10734 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10735 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10736 if (TREE_CODE (arg0) == RSHIFT_EXPR
10737 && integer_zerop (arg1)
10738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10740 tree arg00 = TREE_OPERAND (arg0, 0);
10741 tree arg01 = TREE_OPERAND (arg0, 1);
10742 tree itype = TREE_TYPE (arg00);
10743 if (wi::eq_p (arg01, element_precision (itype) - 1))
10745 if (TYPE_UNSIGNED (itype))
10747 itype = signed_type_for (itype);
10748 arg00 = fold_convert_loc (loc, itype, arg00);
10750 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10751 type, arg00, build_zero_cst (itype));
10755 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10756 (X & C) == 0 when C is a single bit. */
10757 if (TREE_CODE (arg0) == BIT_AND_EXPR
10758 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10759 && integer_zerop (arg1)
10760 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10762 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10763 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10764 TREE_OPERAND (arg0, 1));
10765 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10766 type, tem,
10767 fold_convert_loc (loc, TREE_TYPE (arg0),
10768 arg1));
10771 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10772 constant C is a power of two, i.e. a single bit. */
10773 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10774 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10775 && integer_zerop (arg1)
10776 && integer_pow2p (TREE_OPERAND (arg0, 1))
10777 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10778 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10780 tree arg00 = TREE_OPERAND (arg0, 0);
10781 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10782 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10785 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10786 when is C is a power of two, i.e. a single bit. */
10787 if (TREE_CODE (arg0) == BIT_AND_EXPR
10788 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10789 && integer_zerop (arg1)
10790 && integer_pow2p (TREE_OPERAND (arg0, 1))
10791 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10792 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10794 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10795 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10796 arg000, TREE_OPERAND (arg0, 1));
10797 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10798 tem, build_int_cst (TREE_TYPE (tem), 0));
10801 if (integer_zerop (arg1)
10802 && tree_expr_nonzero_p (arg0))
10804 tree res = constant_boolean_node (code==NE_EXPR, type);
10805 return omit_one_operand_loc (loc, type, res, arg0);
10808 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10809 if (TREE_CODE (arg0) == BIT_AND_EXPR
10810 && TREE_CODE (arg1) == BIT_AND_EXPR)
10812 tree arg00 = TREE_OPERAND (arg0, 0);
10813 tree arg01 = TREE_OPERAND (arg0, 1);
10814 tree arg10 = TREE_OPERAND (arg1, 0);
10815 tree arg11 = TREE_OPERAND (arg1, 1);
10816 tree itype = TREE_TYPE (arg0);
10818 if (operand_equal_p (arg01, arg11, 0))
10819 return fold_build2_loc (loc, code, type,
10820 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10821 fold_build2_loc (loc,
10822 BIT_XOR_EXPR, itype,
10823 arg00, arg10),
10824 arg01),
10825 build_zero_cst (itype));
10827 if (operand_equal_p (arg01, arg10, 0))
10828 return fold_build2_loc (loc, code, type,
10829 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10830 fold_build2_loc (loc,
10831 BIT_XOR_EXPR, itype,
10832 arg00, arg11),
10833 arg01),
10834 build_zero_cst (itype));
10836 if (operand_equal_p (arg00, arg11, 0))
10837 return fold_build2_loc (loc, code, type,
10838 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10839 fold_build2_loc (loc,
10840 BIT_XOR_EXPR, itype,
10841 arg01, arg10),
10842 arg00),
10843 build_zero_cst (itype));
10845 if (operand_equal_p (arg00, arg10, 0))
10846 return fold_build2_loc (loc, code, type,
10847 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10848 fold_build2_loc (loc,
10849 BIT_XOR_EXPR, itype,
10850 arg01, arg11),
10851 arg00),
10852 build_zero_cst (itype));
10855 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10856 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10858 tree arg00 = TREE_OPERAND (arg0, 0);
10859 tree arg01 = TREE_OPERAND (arg0, 1);
10860 tree arg10 = TREE_OPERAND (arg1, 0);
10861 tree arg11 = TREE_OPERAND (arg1, 1);
10862 tree itype = TREE_TYPE (arg0);
10864 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10865 operand_equal_p guarantees no side-effects so we don't need
10866 to use omit_one_operand on Z. */
10867 if (operand_equal_p (arg01, arg11, 0))
10868 return fold_build2_loc (loc, code, type, arg00,
10869 fold_convert_loc (loc, TREE_TYPE (arg00),
10870 arg10));
10871 if (operand_equal_p (arg01, arg10, 0))
10872 return fold_build2_loc (loc, code, type, arg00,
10873 fold_convert_loc (loc, TREE_TYPE (arg00),
10874 arg11));
10875 if (operand_equal_p (arg00, arg11, 0))
10876 return fold_build2_loc (loc, code, type, arg01,
10877 fold_convert_loc (loc, TREE_TYPE (arg01),
10878 arg10));
10879 if (operand_equal_p (arg00, arg10, 0))
10880 return fold_build2_loc (loc, code, type, arg01,
10881 fold_convert_loc (loc, TREE_TYPE (arg01),
10882 arg11));
10884 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10885 if (TREE_CODE (arg01) == INTEGER_CST
10886 && TREE_CODE (arg11) == INTEGER_CST)
10888 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10889 fold_convert_loc (loc, itype, arg11));
10890 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10891 return fold_build2_loc (loc, code, type, tem,
10892 fold_convert_loc (loc, itype, arg10));
10896 /* Attempt to simplify equality/inequality comparisons of complex
10897 values. Only lower the comparison if the result is known or
10898 can be simplified to a single scalar comparison. */
10899 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10900 || TREE_CODE (arg0) == COMPLEX_CST)
10901 && (TREE_CODE (arg1) == COMPLEX_EXPR
10902 || TREE_CODE (arg1) == COMPLEX_CST))
10904 tree real0, imag0, real1, imag1;
10905 tree rcond, icond;
10907 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10909 real0 = TREE_OPERAND (arg0, 0);
10910 imag0 = TREE_OPERAND (arg0, 1);
10912 else
10914 real0 = TREE_REALPART (arg0);
10915 imag0 = TREE_IMAGPART (arg0);
10918 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10920 real1 = TREE_OPERAND (arg1, 0);
10921 imag1 = TREE_OPERAND (arg1, 1);
10923 else
10925 real1 = TREE_REALPART (arg1);
10926 imag1 = TREE_IMAGPART (arg1);
10929 rcond = fold_binary_loc (loc, code, type, real0, real1);
10930 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10932 if (integer_zerop (rcond))
10934 if (code == EQ_EXPR)
10935 return omit_two_operands_loc (loc, type, boolean_false_node,
10936 imag0, imag1);
10937 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10939 else
10941 if (code == NE_EXPR)
10942 return omit_two_operands_loc (loc, type, boolean_true_node,
10943 imag0, imag1);
10944 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10948 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10949 if (icond && TREE_CODE (icond) == INTEGER_CST)
10951 if (integer_zerop (icond))
10953 if (code == EQ_EXPR)
10954 return omit_two_operands_loc (loc, type, boolean_false_node,
10955 real0, real1);
10956 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10958 else
10960 if (code == NE_EXPR)
10961 return omit_two_operands_loc (loc, type, boolean_true_node,
10962 real0, real1);
10963 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10968 return NULL_TREE;
10970 case LT_EXPR:
10971 case GT_EXPR:
10972 case LE_EXPR:
10973 case GE_EXPR:
10974 tem = fold_comparison (loc, code, type, op0, op1);
10975 if (tem != NULL_TREE)
10976 return tem;
10978 /* Transform comparisons of the form X +- C CMP X. */
10979 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10980 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10981 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10982 && !HONOR_SNANS (arg0))
10983 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10984 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10986 tree arg01 = TREE_OPERAND (arg0, 1);
10987 enum tree_code code0 = TREE_CODE (arg0);
10988 int is_positive;
10990 if (TREE_CODE (arg01) == REAL_CST)
10991 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10992 else
10993 is_positive = tree_int_cst_sgn (arg01);
10995 /* (X - c) > X becomes false. */
10996 if (code == GT_EXPR
10997 && ((code0 == MINUS_EXPR && is_positive >= 0)
10998 || (code0 == PLUS_EXPR && is_positive <= 0)))
11000 if (TREE_CODE (arg01) == INTEGER_CST
11001 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11002 fold_overflow_warning (("assuming signed overflow does not "
11003 "occur when assuming that (X - c) > X "
11004 "is always false"),
11005 WARN_STRICT_OVERFLOW_ALL);
11006 return constant_boolean_node (0, type);
11009 /* Likewise (X + c) < X becomes false. */
11010 if (code == LT_EXPR
11011 && ((code0 == PLUS_EXPR && is_positive >= 0)
11012 || (code0 == MINUS_EXPR && is_positive <= 0)))
11014 if (TREE_CODE (arg01) == INTEGER_CST
11015 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11016 fold_overflow_warning (("assuming signed overflow does not "
11017 "occur when assuming that "
11018 "(X + c) < X is always false"),
11019 WARN_STRICT_OVERFLOW_ALL);
11020 return constant_boolean_node (0, type);
11023 /* Convert (X - c) <= X to true. */
11024 if (!HONOR_NANS (arg1)
11025 && code == LE_EXPR
11026 && ((code0 == MINUS_EXPR && is_positive >= 0)
11027 || (code0 == PLUS_EXPR && is_positive <= 0)))
11029 if (TREE_CODE (arg01) == INTEGER_CST
11030 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11031 fold_overflow_warning (("assuming signed overflow does not "
11032 "occur when assuming that "
11033 "(X - c) <= X is always true"),
11034 WARN_STRICT_OVERFLOW_ALL);
11035 return constant_boolean_node (1, type);
11038 /* Convert (X + c) >= X to true. */
11039 if (!HONOR_NANS (arg1)
11040 && code == GE_EXPR
11041 && ((code0 == PLUS_EXPR && is_positive >= 0)
11042 || (code0 == MINUS_EXPR && is_positive <= 0)))
11044 if (TREE_CODE (arg01) == INTEGER_CST
11045 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11046 fold_overflow_warning (("assuming signed overflow does not "
11047 "occur when assuming that "
11048 "(X + c) >= X is always true"),
11049 WARN_STRICT_OVERFLOW_ALL);
11050 return constant_boolean_node (1, type);
11053 if (TREE_CODE (arg01) == INTEGER_CST)
11055 /* Convert X + c > X and X - c < X to true for integers. */
11056 if (code == GT_EXPR
11057 && ((code0 == PLUS_EXPR && is_positive > 0)
11058 || (code0 == MINUS_EXPR && is_positive < 0)))
11060 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11061 fold_overflow_warning (("assuming signed overflow does "
11062 "not occur when assuming that "
11063 "(X + c) > X is always true"),
11064 WARN_STRICT_OVERFLOW_ALL);
11065 return constant_boolean_node (1, type);
11068 if (code == LT_EXPR
11069 && ((code0 == MINUS_EXPR && is_positive > 0)
11070 || (code0 == PLUS_EXPR && is_positive < 0)))
11072 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11073 fold_overflow_warning (("assuming signed overflow does "
11074 "not occur when assuming that "
11075 "(X - c) < X is always true"),
11076 WARN_STRICT_OVERFLOW_ALL);
11077 return constant_boolean_node (1, type);
11080 /* Convert X + c <= X and X - c >= X to false for integers. */
11081 if (code == LE_EXPR
11082 && ((code0 == PLUS_EXPR && is_positive > 0)
11083 || (code0 == MINUS_EXPR && is_positive < 0)))
11085 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11086 fold_overflow_warning (("assuming signed overflow does "
11087 "not occur when assuming that "
11088 "(X + c) <= X is always false"),
11089 WARN_STRICT_OVERFLOW_ALL);
11090 return constant_boolean_node (0, type);
11093 if (code == GE_EXPR
11094 && ((code0 == MINUS_EXPR && is_positive > 0)
11095 || (code0 == PLUS_EXPR && is_positive < 0)))
11097 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11098 fold_overflow_warning (("assuming signed overflow does "
11099 "not occur when assuming that "
11100 "(X - c) >= X is always false"),
11101 WARN_STRICT_OVERFLOW_ALL);
11102 return constant_boolean_node (0, type);
11107 /* If we are comparing an ABS_EXPR with a constant, we can
11108 convert all the cases into explicit comparisons, but they may
11109 well not be faster than doing the ABS and one comparison.
11110 But ABS (X) <= C is a range comparison, which becomes a subtraction
11111 and a comparison, and is probably faster. */
11112 if (code == LE_EXPR
11113 && TREE_CODE (arg1) == INTEGER_CST
11114 && TREE_CODE (arg0) == ABS_EXPR
11115 && ! TREE_SIDE_EFFECTS (arg0)
11116 && (0 != (tem = negate_expr (arg1)))
11117 && TREE_CODE (tem) == INTEGER_CST
11118 && !TREE_OVERFLOW (tem))
11119 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11120 build2 (GE_EXPR, type,
11121 TREE_OPERAND (arg0, 0), tem),
11122 build2 (LE_EXPR, type,
11123 TREE_OPERAND (arg0, 0), arg1));
11125 /* Convert ABS_EXPR<x> >= 0 to true. */
11126 strict_overflow_p = false;
11127 if (code == GE_EXPR
11128 && (integer_zerop (arg1)
11129 || (! HONOR_NANS (arg0)
11130 && real_zerop (arg1)))
11131 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11133 if (strict_overflow_p)
11134 fold_overflow_warning (("assuming signed overflow does not occur "
11135 "when simplifying comparison of "
11136 "absolute value and zero"),
11137 WARN_STRICT_OVERFLOW_CONDITIONAL);
11138 return omit_one_operand_loc (loc, type,
11139 constant_boolean_node (true, type),
11140 arg0);
11143 /* Convert ABS_EXPR<x> < 0 to false. */
11144 strict_overflow_p = false;
11145 if (code == LT_EXPR
11146 && (integer_zerop (arg1) || real_zerop (arg1))
11147 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11149 if (strict_overflow_p)
11150 fold_overflow_warning (("assuming signed overflow does not occur "
11151 "when simplifying comparison of "
11152 "absolute value and zero"),
11153 WARN_STRICT_OVERFLOW_CONDITIONAL);
11154 return omit_one_operand_loc (loc, type,
11155 constant_boolean_node (false, type),
11156 arg0);
11159 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11160 and similarly for >= into !=. */
11161 if ((code == LT_EXPR || code == GE_EXPR)
11162 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11163 && TREE_CODE (arg1) == LSHIFT_EXPR
11164 && integer_onep (TREE_OPERAND (arg1, 0)))
11165 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11166 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11167 TREE_OPERAND (arg1, 1)),
11168 build_zero_cst (TREE_TYPE (arg0)));
11170 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11171 otherwise Y might be >= # of bits in X's type and thus e.g.
11172 (unsigned char) (1 << Y) for Y 15 might be 0.
11173 If the cast is widening, then 1 << Y should have unsigned type,
11174 otherwise if Y is number of bits in the signed shift type minus 1,
11175 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11176 31 might be 0xffffffff80000000. */
11177 if ((code == LT_EXPR || code == GE_EXPR)
11178 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11179 && CONVERT_EXPR_P (arg1)
11180 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11181 && (element_precision (TREE_TYPE (arg1))
11182 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11183 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11184 || (element_precision (TREE_TYPE (arg1))
11185 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11186 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11188 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11189 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11190 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11191 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11192 build_zero_cst (TREE_TYPE (arg0)));
11195 return NULL_TREE;
11197 case UNORDERED_EXPR:
11198 case ORDERED_EXPR:
11199 case UNLT_EXPR:
11200 case UNLE_EXPR:
11201 case UNGT_EXPR:
11202 case UNGE_EXPR:
11203 case UNEQ_EXPR:
11204 case LTGT_EXPR:
11205 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11207 tree targ0 = strip_float_extensions (arg0);
11208 tree targ1 = strip_float_extensions (arg1);
11209 tree newtype = TREE_TYPE (targ0);
11211 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11212 newtype = TREE_TYPE (targ1);
11214 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11215 return fold_build2_loc (loc, code, type,
11216 fold_convert_loc (loc, newtype, targ0),
11217 fold_convert_loc (loc, newtype, targ1));
11220 return NULL_TREE;
11222 case COMPOUND_EXPR:
11223 /* When pedantic, a compound expression can be neither an lvalue
11224 nor an integer constant expression. */
11225 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11226 return NULL_TREE;
11227 /* Don't let (0, 0) be null pointer constant. */
11228 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11229 : fold_convert_loc (loc, type, arg1);
11230 return pedantic_non_lvalue_loc (loc, tem);
11232 case ASSERT_EXPR:
11233 /* An ASSERT_EXPR should never be passed to fold_binary. */
11234 gcc_unreachable ();
11236 default:
11237 return NULL_TREE;
11238 } /* switch (code) */
11241 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11242 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11243 of GOTO_EXPR. */
11245 static tree
11246 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11248 switch (TREE_CODE (*tp))
11250 case LABEL_EXPR:
11251 return *tp;
11253 case GOTO_EXPR:
11254 *walk_subtrees = 0;
11256 /* ... fall through ... */
11258 default:
11259 return NULL_TREE;
11263 /* Return whether the sub-tree ST contains a label which is accessible from
11264 outside the sub-tree. */
11266 static bool
11267 contains_label_p (tree st)
11269 return
11270 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11273 /* Fold a ternary expression of code CODE and type TYPE with operands
11274 OP0, OP1, and OP2. Return the folded expression if folding is
11275 successful. Otherwise, return NULL_TREE. */
11277 tree
11278 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11279 tree op0, tree op1, tree op2)
11281 tree tem;
11282 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11283 enum tree_code_class kind = TREE_CODE_CLASS (code);
11285 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11286 && TREE_CODE_LENGTH (code) == 3);
11288 /* If this is a commutative operation, and OP0 is a constant, move it
11289 to OP1 to reduce the number of tests below. */
11290 if (commutative_ternary_tree_code (code)
11291 && tree_swap_operands_p (op0, op1, true))
11292 return fold_build3_loc (loc, code, type, op1, op0, op2);
11294 tem = generic_simplify (loc, code, type, op0, op1, op2);
11295 if (tem)
11296 return tem;
11298 /* Strip any conversions that don't change the mode. This is safe
11299 for every expression, except for a comparison expression because
11300 its signedness is derived from its operands. So, in the latter
11301 case, only strip conversions that don't change the signedness.
11303 Note that this is done as an internal manipulation within the
11304 constant folder, in order to find the simplest representation of
11305 the arguments so that their form can be studied. In any cases,
11306 the appropriate type conversions should be put back in the tree
11307 that will get out of the constant folder. */
11308 if (op0)
11310 arg0 = op0;
11311 STRIP_NOPS (arg0);
11314 if (op1)
11316 arg1 = op1;
11317 STRIP_NOPS (arg1);
11320 if (op2)
11322 arg2 = op2;
11323 STRIP_NOPS (arg2);
11326 switch (code)
11328 case COMPONENT_REF:
11329 if (TREE_CODE (arg0) == CONSTRUCTOR
11330 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11332 unsigned HOST_WIDE_INT idx;
11333 tree field, value;
11334 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11335 if (field == arg1)
11336 return value;
11338 return NULL_TREE;
11340 case COND_EXPR:
11341 case VEC_COND_EXPR:
11342 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11343 so all simple results must be passed through pedantic_non_lvalue. */
11344 if (TREE_CODE (arg0) == INTEGER_CST)
11346 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11347 tem = integer_zerop (arg0) ? op2 : op1;
11348 /* Only optimize constant conditions when the selected branch
11349 has the same type as the COND_EXPR. This avoids optimizing
11350 away "c ? x : throw", where the throw has a void type.
11351 Avoid throwing away that operand which contains label. */
11352 if ((!TREE_SIDE_EFFECTS (unused_op)
11353 || !contains_label_p (unused_op))
11354 && (! VOID_TYPE_P (TREE_TYPE (tem))
11355 || VOID_TYPE_P (type)))
11356 return pedantic_non_lvalue_loc (loc, tem);
11357 return NULL_TREE;
11359 else if (TREE_CODE (arg0) == VECTOR_CST)
11361 if ((TREE_CODE (arg1) == VECTOR_CST
11362 || TREE_CODE (arg1) == CONSTRUCTOR)
11363 && (TREE_CODE (arg2) == VECTOR_CST
11364 || TREE_CODE (arg2) == CONSTRUCTOR))
11366 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11367 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11368 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11369 for (i = 0; i < nelts; i++)
11371 tree val = VECTOR_CST_ELT (arg0, i);
11372 if (integer_all_onesp (val))
11373 sel[i] = i;
11374 else if (integer_zerop (val))
11375 sel[i] = nelts + i;
11376 else /* Currently unreachable. */
11377 return NULL_TREE;
11379 tree t = fold_vec_perm (type, arg1, arg2, sel);
11380 if (t != NULL_TREE)
11381 return t;
11385 /* If we have A op B ? A : C, we may be able to convert this to a
11386 simpler expression, depending on the operation and the values
11387 of B and C. Signed zeros prevent all of these transformations,
11388 for reasons given above each one.
11390 Also try swapping the arguments and inverting the conditional. */
11391 if (COMPARISON_CLASS_P (arg0)
11392 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11393 arg1, TREE_OPERAND (arg0, 1))
11394 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11396 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11397 if (tem)
11398 return tem;
11401 if (COMPARISON_CLASS_P (arg0)
11402 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11403 op2,
11404 TREE_OPERAND (arg0, 1))
11405 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11407 location_t loc0 = expr_location_or (arg0, loc);
11408 tem = fold_invert_truthvalue (loc0, arg0);
11409 if (tem && COMPARISON_CLASS_P (tem))
11411 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11412 if (tem)
11413 return tem;
11417 /* If the second operand is simpler than the third, swap them
11418 since that produces better jump optimization results. */
11419 if (truth_value_p (TREE_CODE (arg0))
11420 && tree_swap_operands_p (op1, op2, false))
11422 location_t loc0 = expr_location_or (arg0, loc);
11423 /* See if this can be inverted. If it can't, possibly because
11424 it was a floating-point inequality comparison, don't do
11425 anything. */
11426 tem = fold_invert_truthvalue (loc0, arg0);
11427 if (tem)
11428 return fold_build3_loc (loc, code, type, tem, op2, op1);
11431 /* Convert A ? 1 : 0 to simply A. */
11432 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11433 : (integer_onep (op1)
11434 && !VECTOR_TYPE_P (type)))
11435 && integer_zerop (op2)
11436 /* If we try to convert OP0 to our type, the
11437 call to fold will try to move the conversion inside
11438 a COND, which will recurse. In that case, the COND_EXPR
11439 is probably the best choice, so leave it alone. */
11440 && type == TREE_TYPE (arg0))
11441 return pedantic_non_lvalue_loc (loc, arg0);
11443 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11444 over COND_EXPR in cases such as floating point comparisons. */
11445 if (integer_zerop (op1)
11446 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11447 : (integer_onep (op2)
11448 && !VECTOR_TYPE_P (type)))
11449 && truth_value_p (TREE_CODE (arg0)))
11450 return pedantic_non_lvalue_loc (loc,
11451 fold_convert_loc (loc, type,
11452 invert_truthvalue_loc (loc,
11453 arg0)));
11455 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11456 if (TREE_CODE (arg0) == LT_EXPR
11457 && integer_zerop (TREE_OPERAND (arg0, 1))
11458 && integer_zerop (op2)
11459 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11461 /* sign_bit_p looks through both zero and sign extensions,
11462 but for this optimization only sign extensions are
11463 usable. */
11464 tree tem2 = TREE_OPERAND (arg0, 0);
11465 while (tem != tem2)
11467 if (TREE_CODE (tem2) != NOP_EXPR
11468 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11470 tem = NULL_TREE;
11471 break;
11473 tem2 = TREE_OPERAND (tem2, 0);
11475 /* sign_bit_p only checks ARG1 bits within A's precision.
11476 If <sign bit of A> has wider type than A, bits outside
11477 of A's precision in <sign bit of A> need to be checked.
11478 If they are all 0, this optimization needs to be done
11479 in unsigned A's type, if they are all 1 in signed A's type,
11480 otherwise this can't be done. */
11481 if (tem
11482 && TYPE_PRECISION (TREE_TYPE (tem))
11483 < TYPE_PRECISION (TREE_TYPE (arg1))
11484 && TYPE_PRECISION (TREE_TYPE (tem))
11485 < TYPE_PRECISION (type))
11487 int inner_width, outer_width;
11488 tree tem_type;
11490 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11491 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11492 if (outer_width > TYPE_PRECISION (type))
11493 outer_width = TYPE_PRECISION (type);
11495 wide_int mask = wi::shifted_mask
11496 (inner_width, outer_width - inner_width, false,
11497 TYPE_PRECISION (TREE_TYPE (arg1)));
11499 wide_int common = mask & arg1;
11500 if (common == mask)
11502 tem_type = signed_type_for (TREE_TYPE (tem));
11503 tem = fold_convert_loc (loc, tem_type, tem);
11505 else if (common == 0)
11507 tem_type = unsigned_type_for (TREE_TYPE (tem));
11508 tem = fold_convert_loc (loc, tem_type, tem);
11510 else
11511 tem = NULL;
11514 if (tem)
11515 return
11516 fold_convert_loc (loc, type,
11517 fold_build2_loc (loc, BIT_AND_EXPR,
11518 TREE_TYPE (tem), tem,
11519 fold_convert_loc (loc,
11520 TREE_TYPE (tem),
11521 arg1)));
11524 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11525 already handled above. */
11526 if (TREE_CODE (arg0) == BIT_AND_EXPR
11527 && integer_onep (TREE_OPERAND (arg0, 1))
11528 && integer_zerop (op2)
11529 && integer_pow2p (arg1))
11531 tree tem = TREE_OPERAND (arg0, 0);
11532 STRIP_NOPS (tem);
11533 if (TREE_CODE (tem) == RSHIFT_EXPR
11534 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11535 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11536 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11537 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11538 TREE_OPERAND (tem, 0), arg1);
11541 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11542 is probably obsolete because the first operand should be a
11543 truth value (that's why we have the two cases above), but let's
11544 leave it in until we can confirm this for all front-ends. */
11545 if (integer_zerop (op2)
11546 && TREE_CODE (arg0) == NE_EXPR
11547 && integer_zerop (TREE_OPERAND (arg0, 1))
11548 && integer_pow2p (arg1)
11549 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11550 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11551 arg1, OEP_ONLY_CONST))
11552 return pedantic_non_lvalue_loc (loc,
11553 fold_convert_loc (loc, type,
11554 TREE_OPERAND (arg0, 0)));
11556 /* Disable the transformations below for vectors, since
11557 fold_binary_op_with_conditional_arg may undo them immediately,
11558 yielding an infinite loop. */
11559 if (code == VEC_COND_EXPR)
11560 return NULL_TREE;
11562 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11563 if (integer_zerop (op2)
11564 && truth_value_p (TREE_CODE (arg0))
11565 && truth_value_p (TREE_CODE (arg1))
11566 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11567 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11568 : TRUTH_ANDIF_EXPR,
11569 type, fold_convert_loc (loc, type, arg0), arg1);
11571 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11572 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11573 && truth_value_p (TREE_CODE (arg0))
11574 && truth_value_p (TREE_CODE (arg1))
11575 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11577 location_t loc0 = expr_location_or (arg0, loc);
11578 /* Only perform transformation if ARG0 is easily inverted. */
11579 tem = fold_invert_truthvalue (loc0, arg0);
11580 if (tem)
11581 return fold_build2_loc (loc, code == VEC_COND_EXPR
11582 ? BIT_IOR_EXPR
11583 : TRUTH_ORIF_EXPR,
11584 type, fold_convert_loc (loc, type, tem),
11585 arg1);
11588 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11589 if (integer_zerop (arg1)
11590 && truth_value_p (TREE_CODE (arg0))
11591 && truth_value_p (TREE_CODE (op2))
11592 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11594 location_t loc0 = expr_location_or (arg0, loc);
11595 /* Only perform transformation if ARG0 is easily inverted. */
11596 tem = fold_invert_truthvalue (loc0, arg0);
11597 if (tem)
11598 return fold_build2_loc (loc, code == VEC_COND_EXPR
11599 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11600 type, fold_convert_loc (loc, type, tem),
11601 op2);
11604 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11605 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11606 && truth_value_p (TREE_CODE (arg0))
11607 && truth_value_p (TREE_CODE (op2))
11608 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11609 return fold_build2_loc (loc, code == VEC_COND_EXPR
11610 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11611 type, fold_convert_loc (loc, type, arg0), op2);
11613 return NULL_TREE;
11615 case CALL_EXPR:
11616 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11617 of fold_ternary on them. */
11618 gcc_unreachable ();
11620 case BIT_FIELD_REF:
11621 if ((TREE_CODE (arg0) == VECTOR_CST
11622 || (TREE_CODE (arg0) == CONSTRUCTOR
11623 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11624 && (type == TREE_TYPE (TREE_TYPE (arg0))
11625 || (TREE_CODE (type) == VECTOR_TYPE
11626 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11628 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11629 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11630 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11631 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11633 if (n != 0
11634 && (idx % width) == 0
11635 && (n % width) == 0
11636 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11638 idx = idx / width;
11639 n = n / width;
11641 if (TREE_CODE (arg0) == VECTOR_CST)
11643 if (n == 1)
11644 return VECTOR_CST_ELT (arg0, idx);
11646 tree *vals = XALLOCAVEC (tree, n);
11647 for (unsigned i = 0; i < n; ++i)
11648 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11649 return build_vector (type, vals);
11652 /* Constructor elements can be subvectors. */
11653 unsigned HOST_WIDE_INT k = 1;
11654 if (CONSTRUCTOR_NELTS (arg0) != 0)
11656 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11657 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11658 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11661 /* We keep an exact subset of the constructor elements. */
11662 if ((idx % k) == 0 && (n % k) == 0)
11664 if (CONSTRUCTOR_NELTS (arg0) == 0)
11665 return build_constructor (type, NULL);
11666 idx /= k;
11667 n /= k;
11668 if (n == 1)
11670 if (idx < CONSTRUCTOR_NELTS (arg0))
11671 return CONSTRUCTOR_ELT (arg0, idx)->value;
11672 return build_zero_cst (type);
11675 vec<constructor_elt, va_gc> *vals;
11676 vec_alloc (vals, n);
11677 for (unsigned i = 0;
11678 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11679 ++i)
11680 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11681 CONSTRUCTOR_ELT
11682 (arg0, idx + i)->value);
11683 return build_constructor (type, vals);
11685 /* The bitfield references a single constructor element. */
11686 else if (idx + n <= (idx / k + 1) * k)
11688 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11689 return build_zero_cst (type);
11690 else if (n == k)
11691 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11692 else
11693 return fold_build3_loc (loc, code, type,
11694 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11695 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11700 /* A bit-field-ref that referenced the full argument can be stripped. */
11701 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11702 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11703 && integer_zerop (op2))
11704 return fold_convert_loc (loc, type, arg0);
11706 /* On constants we can use native encode/interpret to constant
11707 fold (nearly) all BIT_FIELD_REFs. */
11708 if (CONSTANT_CLASS_P (arg0)
11709 && can_native_interpret_type_p (type)
11710 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11711 /* This limitation should not be necessary, we just need to
11712 round this up to mode size. */
11713 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11714 /* Need bit-shifting of the buffer to relax the following. */
11715 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11717 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11718 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11719 unsigned HOST_WIDE_INT clen;
11720 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11721 /* ??? We cannot tell native_encode_expr to start at
11722 some random byte only. So limit us to a reasonable amount
11723 of work. */
11724 if (clen <= 4096)
11726 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11727 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11728 if (len > 0
11729 && len * BITS_PER_UNIT >= bitpos + bitsize)
11731 tree v = native_interpret_expr (type,
11732 b + bitpos / BITS_PER_UNIT,
11733 bitsize / BITS_PER_UNIT);
11734 if (v)
11735 return v;
11740 return NULL_TREE;
11742 case FMA_EXPR:
11743 /* For integers we can decompose the FMA if possible. */
11744 if (TREE_CODE (arg0) == INTEGER_CST
11745 && TREE_CODE (arg1) == INTEGER_CST)
11746 return fold_build2_loc (loc, PLUS_EXPR, type,
11747 const_binop (MULT_EXPR, arg0, arg1), arg2);
11748 if (integer_zerop (arg2))
11749 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11751 return fold_fma (loc, type, arg0, arg1, arg2);
11753 case VEC_PERM_EXPR:
11754 if (TREE_CODE (arg2) == VECTOR_CST)
11756 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11757 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11758 unsigned char *sel2 = sel + nelts;
11759 bool need_mask_canon = false;
11760 bool need_mask_canon2 = false;
11761 bool all_in_vec0 = true;
11762 bool all_in_vec1 = true;
11763 bool maybe_identity = true;
11764 bool single_arg = (op0 == op1);
11765 bool changed = false;
11767 mask2 = 2 * nelts - 1;
11768 mask = single_arg ? (nelts - 1) : mask2;
11769 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11770 for (i = 0; i < nelts; i++)
11772 tree val = VECTOR_CST_ELT (arg2, i);
11773 if (TREE_CODE (val) != INTEGER_CST)
11774 return NULL_TREE;
11776 /* Make sure that the perm value is in an acceptable
11777 range. */
11778 wide_int t = val;
11779 need_mask_canon |= wi::gtu_p (t, mask);
11780 need_mask_canon2 |= wi::gtu_p (t, mask2);
11781 sel[i] = t.to_uhwi () & mask;
11782 sel2[i] = t.to_uhwi () & mask2;
11784 if (sel[i] < nelts)
11785 all_in_vec1 = false;
11786 else
11787 all_in_vec0 = false;
11789 if ((sel[i] & (nelts-1)) != i)
11790 maybe_identity = false;
11793 if (maybe_identity)
11795 if (all_in_vec0)
11796 return op0;
11797 if (all_in_vec1)
11798 return op1;
11801 if (all_in_vec0)
11802 op1 = op0;
11803 else if (all_in_vec1)
11805 op0 = op1;
11806 for (i = 0; i < nelts; i++)
11807 sel[i] -= nelts;
11808 need_mask_canon = true;
11811 if ((TREE_CODE (op0) == VECTOR_CST
11812 || TREE_CODE (op0) == CONSTRUCTOR)
11813 && (TREE_CODE (op1) == VECTOR_CST
11814 || TREE_CODE (op1) == CONSTRUCTOR))
11816 tree t = fold_vec_perm (type, op0, op1, sel);
11817 if (t != NULL_TREE)
11818 return t;
11821 if (op0 == op1 && !single_arg)
11822 changed = true;
11824 /* Some targets are deficient and fail to expand a single
11825 argument permutation while still allowing an equivalent
11826 2-argument version. */
11827 if (need_mask_canon && arg2 == op2
11828 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11829 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11831 need_mask_canon = need_mask_canon2;
11832 sel = sel2;
11835 if (need_mask_canon && arg2 == op2)
11837 tree *tsel = XALLOCAVEC (tree, nelts);
11838 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11839 for (i = 0; i < nelts; i++)
11840 tsel[i] = build_int_cst (eltype, sel[i]);
11841 op2 = build_vector (TREE_TYPE (arg2), tsel);
11842 changed = true;
11845 if (changed)
11846 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11848 return NULL_TREE;
11850 default:
11851 return NULL_TREE;
11852 } /* switch (code) */
11855 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11856 of an array (or vector). */
11858 tree
11859 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11861 tree index_type = NULL_TREE;
11862 offset_int low_bound = 0;
11864 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11866 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11867 if (domain_type && TYPE_MIN_VALUE (domain_type))
11869 /* Static constructors for variably sized objects makes no sense. */
11870 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11871 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11872 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11876 if (index_type)
11877 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11878 TYPE_SIGN (index_type));
11880 offset_int index = low_bound - 1;
11881 if (index_type)
11882 index = wi::ext (index, TYPE_PRECISION (index_type),
11883 TYPE_SIGN (index_type));
11885 offset_int max_index;
11886 unsigned HOST_WIDE_INT cnt;
11887 tree cfield, cval;
11889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11891 /* Array constructor might explicitly set index, or specify a range,
11892 or leave index NULL meaning that it is next index after previous
11893 one. */
11894 if (cfield)
11896 if (TREE_CODE (cfield) == INTEGER_CST)
11897 max_index = index = wi::to_offset (cfield);
11898 else
11900 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11901 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11902 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11905 else
11907 index += 1;
11908 if (index_type)
11909 index = wi::ext (index, TYPE_PRECISION (index_type),
11910 TYPE_SIGN (index_type));
11911 max_index = index;
11914 /* Do we have match? */
11915 if (wi::cmpu (access_index, index) >= 0
11916 && wi::cmpu (access_index, max_index) <= 0)
11917 return cval;
11919 return NULL_TREE;
11922 /* Perform constant folding and related simplification of EXPR.
11923 The related simplifications include x*1 => x, x*0 => 0, etc.,
11924 and application of the associative law.
11925 NOP_EXPR conversions may be removed freely (as long as we
11926 are careful not to change the type of the overall expression).
11927 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11928 but we can constant-fold them if they have constant operands. */
11930 #ifdef ENABLE_FOLD_CHECKING
11931 # define fold(x) fold_1 (x)
11932 static tree fold_1 (tree);
11933 static
11934 #endif
11935 tree
11936 fold (tree expr)
11938 const tree t = expr;
11939 enum tree_code code = TREE_CODE (t);
11940 enum tree_code_class kind = TREE_CODE_CLASS (code);
11941 tree tem;
11942 location_t loc = EXPR_LOCATION (expr);
11944 /* Return right away if a constant. */
11945 if (kind == tcc_constant)
11946 return t;
11948 /* CALL_EXPR-like objects with variable numbers of operands are
11949 treated specially. */
11950 if (kind == tcc_vl_exp)
11952 if (code == CALL_EXPR)
11954 tem = fold_call_expr (loc, expr, false);
11955 return tem ? tem : expr;
11957 return expr;
11960 if (IS_EXPR_CODE_CLASS (kind))
11962 tree type = TREE_TYPE (t);
11963 tree op0, op1, op2;
11965 switch (TREE_CODE_LENGTH (code))
11967 case 1:
11968 op0 = TREE_OPERAND (t, 0);
11969 tem = fold_unary_loc (loc, code, type, op0);
11970 return tem ? tem : expr;
11971 case 2:
11972 op0 = TREE_OPERAND (t, 0);
11973 op1 = TREE_OPERAND (t, 1);
11974 tem = fold_binary_loc (loc, code, type, op0, op1);
11975 return tem ? tem : expr;
11976 case 3:
11977 op0 = TREE_OPERAND (t, 0);
11978 op1 = TREE_OPERAND (t, 1);
11979 op2 = TREE_OPERAND (t, 2);
11980 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11981 return tem ? tem : expr;
11982 default:
11983 break;
11987 switch (code)
11989 case ARRAY_REF:
11991 tree op0 = TREE_OPERAND (t, 0);
11992 tree op1 = TREE_OPERAND (t, 1);
11994 if (TREE_CODE (op1) == INTEGER_CST
11995 && TREE_CODE (op0) == CONSTRUCTOR
11996 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11998 tree val = get_array_ctor_element_at_index (op0,
11999 wi::to_offset (op1));
12000 if (val)
12001 return val;
12004 return t;
12007 /* Return a VECTOR_CST if possible. */
12008 case CONSTRUCTOR:
12010 tree type = TREE_TYPE (t);
12011 if (TREE_CODE (type) != VECTOR_TYPE)
12012 return t;
12014 unsigned i;
12015 tree val;
12016 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12017 if (! CONSTANT_CLASS_P (val))
12018 return t;
12020 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12023 case CONST_DECL:
12024 return fold (DECL_INITIAL (t));
12026 default:
12027 return t;
12028 } /* switch (code) */
12031 #ifdef ENABLE_FOLD_CHECKING
12032 #undef fold
12034 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12035 hash_table<nofree_ptr_hash<const tree_node> > *);
12036 static void fold_check_failed (const_tree, const_tree);
12037 void print_fold_checksum (const_tree);
12039 /* When --enable-checking=fold, compute a digest of expr before
12040 and after actual fold call to see if fold did not accidentally
12041 change original expr. */
12043 tree
12044 fold (tree expr)
12046 tree ret;
12047 struct md5_ctx ctx;
12048 unsigned char checksum_before[16], checksum_after[16];
12049 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12051 md5_init_ctx (&ctx);
12052 fold_checksum_tree (expr, &ctx, &ht);
12053 md5_finish_ctx (&ctx, checksum_before);
12054 ht.empty ();
12056 ret = fold_1 (expr);
12058 md5_init_ctx (&ctx);
12059 fold_checksum_tree (expr, &ctx, &ht);
12060 md5_finish_ctx (&ctx, checksum_after);
12062 if (memcmp (checksum_before, checksum_after, 16))
12063 fold_check_failed (expr, ret);
12065 return ret;
12068 void
12069 print_fold_checksum (const_tree expr)
12071 struct md5_ctx ctx;
12072 unsigned char checksum[16], cnt;
12073 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12075 md5_init_ctx (&ctx);
12076 fold_checksum_tree (expr, &ctx, &ht);
12077 md5_finish_ctx (&ctx, checksum);
12078 for (cnt = 0; cnt < 16; ++cnt)
12079 fprintf (stderr, "%02x", checksum[cnt]);
12080 putc ('\n', stderr);
12083 static void
12084 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12086 internal_error ("fold check: original tree changed by fold");
12089 static void
12090 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12091 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12093 const tree_node **slot;
12094 enum tree_code code;
12095 union tree_node buf;
12096 int i, len;
12098 recursive_label:
12099 if (expr == NULL)
12100 return;
12101 slot = ht->find_slot (expr, INSERT);
12102 if (*slot != NULL)
12103 return;
12104 *slot = expr;
12105 code = TREE_CODE (expr);
12106 if (TREE_CODE_CLASS (code) == tcc_declaration
12107 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12109 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12110 memcpy ((char *) &buf, expr, tree_size (expr));
12111 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12112 buf.decl_with_vis.symtab_node = NULL;
12113 expr = (tree) &buf;
12115 else if (TREE_CODE_CLASS (code) == tcc_type
12116 && (TYPE_POINTER_TO (expr)
12117 || TYPE_REFERENCE_TO (expr)
12118 || TYPE_CACHED_VALUES_P (expr)
12119 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12120 || TYPE_NEXT_VARIANT (expr)))
12122 /* Allow these fields to be modified. */
12123 tree tmp;
12124 memcpy ((char *) &buf, expr, tree_size (expr));
12125 expr = tmp = (tree) &buf;
12126 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12127 TYPE_POINTER_TO (tmp) = NULL;
12128 TYPE_REFERENCE_TO (tmp) = NULL;
12129 TYPE_NEXT_VARIANT (tmp) = NULL;
12130 if (TYPE_CACHED_VALUES_P (tmp))
12132 TYPE_CACHED_VALUES_P (tmp) = 0;
12133 TYPE_CACHED_VALUES (tmp) = NULL;
12136 md5_process_bytes (expr, tree_size (expr), ctx);
12137 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12138 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12139 if (TREE_CODE_CLASS (code) != tcc_type
12140 && TREE_CODE_CLASS (code) != tcc_declaration
12141 && code != TREE_LIST
12142 && code != SSA_NAME
12143 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12144 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12145 switch (TREE_CODE_CLASS (code))
12147 case tcc_constant:
12148 switch (code)
12150 case STRING_CST:
12151 md5_process_bytes (TREE_STRING_POINTER (expr),
12152 TREE_STRING_LENGTH (expr), ctx);
12153 break;
12154 case COMPLEX_CST:
12155 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12156 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12157 break;
12158 case VECTOR_CST:
12159 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12160 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12161 break;
12162 default:
12163 break;
12165 break;
12166 case tcc_exceptional:
12167 switch (code)
12169 case TREE_LIST:
12170 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12171 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12172 expr = TREE_CHAIN (expr);
12173 goto recursive_label;
12174 break;
12175 case TREE_VEC:
12176 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12177 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12178 break;
12179 default:
12180 break;
12182 break;
12183 case tcc_expression:
12184 case tcc_reference:
12185 case tcc_comparison:
12186 case tcc_unary:
12187 case tcc_binary:
12188 case tcc_statement:
12189 case tcc_vl_exp:
12190 len = TREE_OPERAND_LENGTH (expr);
12191 for (i = 0; i < len; ++i)
12192 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12193 break;
12194 case tcc_declaration:
12195 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12196 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12197 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12199 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12200 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12201 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12202 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12203 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12206 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12208 if (TREE_CODE (expr) == FUNCTION_DECL)
12210 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12211 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12213 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12215 break;
12216 case tcc_type:
12217 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12218 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12219 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12220 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12221 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12222 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12223 if (INTEGRAL_TYPE_P (expr)
12224 || SCALAR_FLOAT_TYPE_P (expr))
12226 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12227 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12229 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12230 if (TREE_CODE (expr) == RECORD_TYPE
12231 || TREE_CODE (expr) == UNION_TYPE
12232 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12233 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12234 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12235 break;
12236 default:
12237 break;
12241 /* Helper function for outputting the checksum of a tree T. When
12242 debugging with gdb, you can "define mynext" to be "next" followed
12243 by "call debug_fold_checksum (op0)", then just trace down till the
12244 outputs differ. */
12246 DEBUG_FUNCTION void
12247 debug_fold_checksum (const_tree t)
12249 int i;
12250 unsigned char checksum[16];
12251 struct md5_ctx ctx;
12252 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12254 md5_init_ctx (&ctx);
12255 fold_checksum_tree (t, &ctx, &ht);
12256 md5_finish_ctx (&ctx, checksum);
12257 ht.empty ();
12259 for (i = 0; i < 16; i++)
12260 fprintf (stderr, "%d ", checksum[i]);
12262 fprintf (stderr, "\n");
12265 #endif
12267 /* Fold a unary tree expression with code CODE of type TYPE with an
12268 operand OP0. LOC is the location of the resulting expression.
12269 Return a folded expression if successful. Otherwise, return a tree
12270 expression with code CODE of type TYPE with an operand OP0. */
12272 tree
12273 fold_build1_stat_loc (location_t loc,
12274 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12276 tree tem;
12277 #ifdef ENABLE_FOLD_CHECKING
12278 unsigned char checksum_before[16], checksum_after[16];
12279 struct md5_ctx ctx;
12280 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12282 md5_init_ctx (&ctx);
12283 fold_checksum_tree (op0, &ctx, &ht);
12284 md5_finish_ctx (&ctx, checksum_before);
12285 ht.empty ();
12286 #endif
12288 tem = fold_unary_loc (loc, code, type, op0);
12289 if (!tem)
12290 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12292 #ifdef ENABLE_FOLD_CHECKING
12293 md5_init_ctx (&ctx);
12294 fold_checksum_tree (op0, &ctx, &ht);
12295 md5_finish_ctx (&ctx, checksum_after);
12297 if (memcmp (checksum_before, checksum_after, 16))
12298 fold_check_failed (op0, tem);
12299 #endif
12300 return tem;
12303 /* Fold a binary tree expression with code CODE of type TYPE with
12304 operands OP0 and OP1. LOC is the location of the resulting
12305 expression. Return a folded expression if successful. Otherwise,
12306 return a tree expression with code CODE of type TYPE with operands
12307 OP0 and OP1. */
12309 tree
12310 fold_build2_stat_loc (location_t loc,
12311 enum tree_code code, tree type, tree op0, tree op1
12312 MEM_STAT_DECL)
12314 tree tem;
12315 #ifdef ENABLE_FOLD_CHECKING
12316 unsigned char checksum_before_op0[16],
12317 checksum_before_op1[16],
12318 checksum_after_op0[16],
12319 checksum_after_op1[16];
12320 struct md5_ctx ctx;
12321 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12323 md5_init_ctx (&ctx);
12324 fold_checksum_tree (op0, &ctx, &ht);
12325 md5_finish_ctx (&ctx, checksum_before_op0);
12326 ht.empty ();
12328 md5_init_ctx (&ctx);
12329 fold_checksum_tree (op1, &ctx, &ht);
12330 md5_finish_ctx (&ctx, checksum_before_op1);
12331 ht.empty ();
12332 #endif
12334 tem = fold_binary_loc (loc, code, type, op0, op1);
12335 if (!tem)
12336 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12338 #ifdef ENABLE_FOLD_CHECKING
12339 md5_init_ctx (&ctx);
12340 fold_checksum_tree (op0, &ctx, &ht);
12341 md5_finish_ctx (&ctx, checksum_after_op0);
12342 ht.empty ();
12344 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12345 fold_check_failed (op0, tem);
12347 md5_init_ctx (&ctx);
12348 fold_checksum_tree (op1, &ctx, &ht);
12349 md5_finish_ctx (&ctx, checksum_after_op1);
12351 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12352 fold_check_failed (op1, tem);
12353 #endif
12354 return tem;
12357 /* Fold a ternary tree expression with code CODE of type TYPE with
12358 operands OP0, OP1, and OP2. Return a folded expression if
12359 successful. Otherwise, return a tree expression with code CODE of
12360 type TYPE with operands OP0, OP1, and OP2. */
12362 tree
12363 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12364 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12366 tree tem;
12367 #ifdef ENABLE_FOLD_CHECKING
12368 unsigned char checksum_before_op0[16],
12369 checksum_before_op1[16],
12370 checksum_before_op2[16],
12371 checksum_after_op0[16],
12372 checksum_after_op1[16],
12373 checksum_after_op2[16];
12374 struct md5_ctx ctx;
12375 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12377 md5_init_ctx (&ctx);
12378 fold_checksum_tree (op0, &ctx, &ht);
12379 md5_finish_ctx (&ctx, checksum_before_op0);
12380 ht.empty ();
12382 md5_init_ctx (&ctx);
12383 fold_checksum_tree (op1, &ctx, &ht);
12384 md5_finish_ctx (&ctx, checksum_before_op1);
12385 ht.empty ();
12387 md5_init_ctx (&ctx);
12388 fold_checksum_tree (op2, &ctx, &ht);
12389 md5_finish_ctx (&ctx, checksum_before_op2);
12390 ht.empty ();
12391 #endif
12393 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12394 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12395 if (!tem)
12396 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12398 #ifdef ENABLE_FOLD_CHECKING
12399 md5_init_ctx (&ctx);
12400 fold_checksum_tree (op0, &ctx, &ht);
12401 md5_finish_ctx (&ctx, checksum_after_op0);
12402 ht.empty ();
12404 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12405 fold_check_failed (op0, tem);
12407 md5_init_ctx (&ctx);
12408 fold_checksum_tree (op1, &ctx, &ht);
12409 md5_finish_ctx (&ctx, checksum_after_op1);
12410 ht.empty ();
12412 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12413 fold_check_failed (op1, tem);
12415 md5_init_ctx (&ctx);
12416 fold_checksum_tree (op2, &ctx, &ht);
12417 md5_finish_ctx (&ctx, checksum_after_op2);
12419 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12420 fold_check_failed (op2, tem);
12421 #endif
12422 return tem;
12425 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12426 arguments in ARGARRAY, and a null static chain.
12427 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12428 of type TYPE from the given operands as constructed by build_call_array. */
12430 tree
12431 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12432 int nargs, tree *argarray)
12434 tree tem;
12435 #ifdef ENABLE_FOLD_CHECKING
12436 unsigned char checksum_before_fn[16],
12437 checksum_before_arglist[16],
12438 checksum_after_fn[16],
12439 checksum_after_arglist[16];
12440 struct md5_ctx ctx;
12441 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12442 int i;
12444 md5_init_ctx (&ctx);
12445 fold_checksum_tree (fn, &ctx, &ht);
12446 md5_finish_ctx (&ctx, checksum_before_fn);
12447 ht.empty ();
12449 md5_init_ctx (&ctx);
12450 for (i = 0; i < nargs; i++)
12451 fold_checksum_tree (argarray[i], &ctx, &ht);
12452 md5_finish_ctx (&ctx, checksum_before_arglist);
12453 ht.empty ();
12454 #endif
12456 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12457 if (!tem)
12458 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12460 #ifdef ENABLE_FOLD_CHECKING
12461 md5_init_ctx (&ctx);
12462 fold_checksum_tree (fn, &ctx, &ht);
12463 md5_finish_ctx (&ctx, checksum_after_fn);
12464 ht.empty ();
12466 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12467 fold_check_failed (fn, tem);
12469 md5_init_ctx (&ctx);
12470 for (i = 0; i < nargs; i++)
12471 fold_checksum_tree (argarray[i], &ctx, &ht);
12472 md5_finish_ctx (&ctx, checksum_after_arglist);
12474 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12475 fold_check_failed (NULL_TREE, tem);
12476 #endif
12477 return tem;
12480 /* Perform constant folding and related simplification of initializer
12481 expression EXPR. These behave identically to "fold_buildN" but ignore
12482 potential run-time traps and exceptions that fold must preserve. */
12484 #define START_FOLD_INIT \
12485 int saved_signaling_nans = flag_signaling_nans;\
12486 int saved_trapping_math = flag_trapping_math;\
12487 int saved_rounding_math = flag_rounding_math;\
12488 int saved_trapv = flag_trapv;\
12489 int saved_folding_initializer = folding_initializer;\
12490 flag_signaling_nans = 0;\
12491 flag_trapping_math = 0;\
12492 flag_rounding_math = 0;\
12493 flag_trapv = 0;\
12494 folding_initializer = 1;
12496 #define END_FOLD_INIT \
12497 flag_signaling_nans = saved_signaling_nans;\
12498 flag_trapping_math = saved_trapping_math;\
12499 flag_rounding_math = saved_rounding_math;\
12500 flag_trapv = saved_trapv;\
12501 folding_initializer = saved_folding_initializer;
12503 tree
12504 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12505 tree type, tree op)
12507 tree result;
12508 START_FOLD_INIT;
12510 result = fold_build1_loc (loc, code, type, op);
12512 END_FOLD_INIT;
12513 return result;
12516 tree
12517 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12518 tree type, tree op0, tree op1)
12520 tree result;
12521 START_FOLD_INIT;
12523 result = fold_build2_loc (loc, code, type, op0, op1);
12525 END_FOLD_INIT;
12526 return result;
12529 tree
12530 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12531 int nargs, tree *argarray)
12533 tree result;
12534 START_FOLD_INIT;
12536 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12538 END_FOLD_INIT;
12539 return result;
12542 #undef START_FOLD_INIT
12543 #undef END_FOLD_INIT
12545 /* Determine if first argument is a multiple of second argument. Return 0 if
12546 it is not, or we cannot easily determined it to be.
12548 An example of the sort of thing we care about (at this point; this routine
12549 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12550 fold cases do now) is discovering that
12552 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12554 is a multiple of
12556 SAVE_EXPR (J * 8)
12558 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12560 This code also handles discovering that
12562 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12564 is a multiple of 8 so we don't have to worry about dealing with a
12565 possible remainder.
12567 Note that we *look* inside a SAVE_EXPR only to determine how it was
12568 calculated; it is not safe for fold to do much of anything else with the
12569 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12570 at run time. For example, the latter example above *cannot* be implemented
12571 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12572 evaluation time of the original SAVE_EXPR is not necessarily the same at
12573 the time the new expression is evaluated. The only optimization of this
12574 sort that would be valid is changing
12576 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12578 divided by 8 to
12580 SAVE_EXPR (I) * SAVE_EXPR (J)
12582 (where the same SAVE_EXPR (J) is used in the original and the
12583 transformed version). */
12586 multiple_of_p (tree type, const_tree top, const_tree bottom)
12588 if (operand_equal_p (top, bottom, 0))
12589 return 1;
12591 if (TREE_CODE (type) != INTEGER_TYPE)
12592 return 0;
12594 switch (TREE_CODE (top))
12596 case BIT_AND_EXPR:
12597 /* Bitwise and provides a power of two multiple. If the mask is
12598 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12599 if (!integer_pow2p (bottom))
12600 return 0;
12601 /* FALLTHRU */
12603 case MULT_EXPR:
12604 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12605 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12607 case PLUS_EXPR:
12608 case MINUS_EXPR:
12609 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12610 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12612 case LSHIFT_EXPR:
12613 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12615 tree op1, t1;
12617 op1 = TREE_OPERAND (top, 1);
12618 /* const_binop may not detect overflow correctly,
12619 so check for it explicitly here. */
12620 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12621 && 0 != (t1 = fold_convert (type,
12622 const_binop (LSHIFT_EXPR,
12623 size_one_node,
12624 op1)))
12625 && !TREE_OVERFLOW (t1))
12626 return multiple_of_p (type, t1, bottom);
12628 return 0;
12630 case NOP_EXPR:
12631 /* Can't handle conversions from non-integral or wider integral type. */
12632 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12633 || (TYPE_PRECISION (type)
12634 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12635 return 0;
12637 /* .. fall through ... */
12639 case SAVE_EXPR:
12640 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12642 case COND_EXPR:
12643 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12644 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12646 case INTEGER_CST:
12647 if (TREE_CODE (bottom) != INTEGER_CST
12648 || integer_zerop (bottom)
12649 || (TYPE_UNSIGNED (type)
12650 && (tree_int_cst_sgn (top) < 0
12651 || tree_int_cst_sgn (bottom) < 0)))
12652 return 0;
12653 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12654 SIGNED);
12656 default:
12657 return 0;
12661 #define tree_expr_nonnegative_warnv_p(X, Y) \
12662 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12664 #define RECURSE(X) \
12665 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12667 /* Return true if CODE or TYPE is known to be non-negative. */
12669 static bool
12670 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12672 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12673 && truth_value_p (code))
12674 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12675 have a signed:1 type (where the value is -1 and 0). */
12676 return true;
12677 return false;
12680 /* Return true if (CODE OP0) is known to be non-negative. If the return
12681 value is based on the assumption that signed overflow is undefined,
12682 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12683 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12685 bool
12686 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12687 bool *strict_overflow_p, int depth)
12689 if (TYPE_UNSIGNED (type))
12690 return true;
12692 switch (code)
12694 case ABS_EXPR:
12695 /* We can't return 1 if flag_wrapv is set because
12696 ABS_EXPR<INT_MIN> = INT_MIN. */
12697 if (!ANY_INTEGRAL_TYPE_P (type))
12698 return true;
12699 if (TYPE_OVERFLOW_UNDEFINED (type))
12701 *strict_overflow_p = true;
12702 return true;
12704 break;
12706 case NON_LVALUE_EXPR:
12707 case FLOAT_EXPR:
12708 case FIX_TRUNC_EXPR:
12709 return RECURSE (op0);
12711 CASE_CONVERT:
12713 tree inner_type = TREE_TYPE (op0);
12714 tree outer_type = type;
12716 if (TREE_CODE (outer_type) == REAL_TYPE)
12718 if (TREE_CODE (inner_type) == REAL_TYPE)
12719 return RECURSE (op0);
12720 if (INTEGRAL_TYPE_P (inner_type))
12722 if (TYPE_UNSIGNED (inner_type))
12723 return true;
12724 return RECURSE (op0);
12727 else if (INTEGRAL_TYPE_P (outer_type))
12729 if (TREE_CODE (inner_type) == REAL_TYPE)
12730 return RECURSE (op0);
12731 if (INTEGRAL_TYPE_P (inner_type))
12732 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12733 && TYPE_UNSIGNED (inner_type);
12736 break;
12738 default:
12739 return tree_simple_nonnegative_warnv_p (code, type);
12742 /* We don't know sign of `t', so be conservative and return false. */
12743 return false;
12746 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12747 value is based on the assumption that signed overflow is undefined,
12748 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12749 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12751 bool
12752 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12753 tree op1, bool *strict_overflow_p,
12754 int depth)
12756 if (TYPE_UNSIGNED (type))
12757 return true;
12759 switch (code)
12761 case POINTER_PLUS_EXPR:
12762 case PLUS_EXPR:
12763 if (FLOAT_TYPE_P (type))
12764 return RECURSE (op0) && RECURSE (op1);
12766 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12767 both unsigned and at least 2 bits shorter than the result. */
12768 if (TREE_CODE (type) == INTEGER_TYPE
12769 && TREE_CODE (op0) == NOP_EXPR
12770 && TREE_CODE (op1) == NOP_EXPR)
12772 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12773 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12774 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12775 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12777 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12778 TYPE_PRECISION (inner2)) + 1;
12779 return prec < TYPE_PRECISION (type);
12782 break;
12784 case MULT_EXPR:
12785 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12787 /* x * x is always non-negative for floating point x
12788 or without overflow. */
12789 if (operand_equal_p (op0, op1, 0)
12790 || (RECURSE (op0) && RECURSE (op1)))
12792 if (ANY_INTEGRAL_TYPE_P (type)
12793 && TYPE_OVERFLOW_UNDEFINED (type))
12794 *strict_overflow_p = true;
12795 return true;
12799 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12800 both unsigned and their total bits is shorter than the result. */
12801 if (TREE_CODE (type) == INTEGER_TYPE
12802 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12803 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12805 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12806 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12807 : TREE_TYPE (op0);
12808 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12809 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12810 : TREE_TYPE (op1);
12812 bool unsigned0 = TYPE_UNSIGNED (inner0);
12813 bool unsigned1 = TYPE_UNSIGNED (inner1);
12815 if (TREE_CODE (op0) == INTEGER_CST)
12816 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12818 if (TREE_CODE (op1) == INTEGER_CST)
12819 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12821 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12822 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12824 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12825 ? tree_int_cst_min_precision (op0, UNSIGNED)
12826 : TYPE_PRECISION (inner0);
12828 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12829 ? tree_int_cst_min_precision (op1, UNSIGNED)
12830 : TYPE_PRECISION (inner1);
12832 return precision0 + precision1 < TYPE_PRECISION (type);
12835 return false;
12837 case BIT_AND_EXPR:
12838 case MAX_EXPR:
12839 return RECURSE (op0) || RECURSE (op1);
12841 case BIT_IOR_EXPR:
12842 case BIT_XOR_EXPR:
12843 case MIN_EXPR:
12844 case RDIV_EXPR:
12845 case TRUNC_DIV_EXPR:
12846 case CEIL_DIV_EXPR:
12847 case FLOOR_DIV_EXPR:
12848 case ROUND_DIV_EXPR:
12849 return RECURSE (op0) && RECURSE (op1);
12851 case TRUNC_MOD_EXPR:
12852 return RECURSE (op0);
12854 case FLOOR_MOD_EXPR:
12855 return RECURSE (op1);
12857 case CEIL_MOD_EXPR:
12858 case ROUND_MOD_EXPR:
12859 default:
12860 return tree_simple_nonnegative_warnv_p (code, type);
12863 /* We don't know sign of `t', so be conservative and return false. */
12864 return false;
12867 /* Return true if T is known to be non-negative. If the return
12868 value is based on the assumption that signed overflow is undefined,
12869 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12870 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12872 bool
12873 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12875 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12876 return true;
12878 switch (TREE_CODE (t))
12880 case INTEGER_CST:
12881 return tree_int_cst_sgn (t) >= 0;
12883 case REAL_CST:
12884 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12886 case FIXED_CST:
12887 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12889 case COND_EXPR:
12890 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12892 case SSA_NAME:
12893 /* Limit the depth of recursion to avoid quadratic behavior.
12894 This is expected to catch almost all occurrences in practice.
12895 If this code misses important cases that unbounded recursion
12896 would not, passes that need this information could be revised
12897 to provide it through dataflow propagation. */
12898 return (!name_registered_for_update_p (t)
12899 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12900 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12901 strict_overflow_p, depth));
12903 default:
12904 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12908 /* Return true if T is known to be non-negative. If the return
12909 value is based on the assumption that signed overflow is undefined,
12910 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12911 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12913 bool
12914 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12915 bool *strict_overflow_p, int depth)
12917 switch (fn)
12919 CASE_CFN_ACOS:
12920 CASE_CFN_ACOSH:
12921 CASE_CFN_CABS:
12922 CASE_CFN_COSH:
12923 CASE_CFN_ERFC:
12924 CASE_CFN_EXP:
12925 CASE_CFN_EXP10:
12926 CASE_CFN_EXP2:
12927 CASE_CFN_FABS:
12928 CASE_CFN_FDIM:
12929 CASE_CFN_HYPOT:
12930 CASE_CFN_POW10:
12931 CASE_CFN_FFS:
12932 CASE_CFN_PARITY:
12933 CASE_CFN_POPCOUNT:
12934 CASE_CFN_CLZ:
12935 CASE_CFN_CLRSB:
12936 case CFN_BUILT_IN_BSWAP32:
12937 case CFN_BUILT_IN_BSWAP64:
12938 /* Always true. */
12939 return true;
12941 CASE_CFN_SQRT:
12942 /* sqrt(-0.0) is -0.0. */
12943 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12944 return true;
12945 return RECURSE (arg0);
12947 CASE_CFN_ASINH:
12948 CASE_CFN_ATAN:
12949 CASE_CFN_ATANH:
12950 CASE_CFN_CBRT:
12951 CASE_CFN_CEIL:
12952 CASE_CFN_ERF:
12953 CASE_CFN_EXPM1:
12954 CASE_CFN_FLOOR:
12955 CASE_CFN_FMOD:
12956 CASE_CFN_FREXP:
12957 CASE_CFN_ICEIL:
12958 CASE_CFN_IFLOOR:
12959 CASE_CFN_IRINT:
12960 CASE_CFN_IROUND:
12961 CASE_CFN_LCEIL:
12962 CASE_CFN_LDEXP:
12963 CASE_CFN_LFLOOR:
12964 CASE_CFN_LLCEIL:
12965 CASE_CFN_LLFLOOR:
12966 CASE_CFN_LLRINT:
12967 CASE_CFN_LLROUND:
12968 CASE_CFN_LRINT:
12969 CASE_CFN_LROUND:
12970 CASE_CFN_MODF:
12971 CASE_CFN_NEARBYINT:
12972 CASE_CFN_RINT:
12973 CASE_CFN_ROUND:
12974 CASE_CFN_SCALB:
12975 CASE_CFN_SCALBLN:
12976 CASE_CFN_SCALBN:
12977 CASE_CFN_SIGNBIT:
12978 CASE_CFN_SIGNIFICAND:
12979 CASE_CFN_SINH:
12980 CASE_CFN_TANH:
12981 CASE_CFN_TRUNC:
12982 /* True if the 1st argument is nonnegative. */
12983 return RECURSE (arg0);
12985 CASE_CFN_FMAX:
12986 /* True if the 1st OR 2nd arguments are nonnegative. */
12987 return RECURSE (arg0) || RECURSE (arg1);
12989 CASE_CFN_FMIN:
12990 /* True if the 1st AND 2nd arguments are nonnegative. */
12991 return RECURSE (arg0) && RECURSE (arg1);
12993 CASE_CFN_COPYSIGN:
12994 /* True if the 2nd argument is nonnegative. */
12995 return RECURSE (arg1);
12997 CASE_CFN_POWI:
12998 /* True if the 1st argument is nonnegative or the second
12999 argument is an even integer. */
13000 if (TREE_CODE (arg1) == INTEGER_CST
13001 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13002 return true;
13003 return RECURSE (arg0);
13005 CASE_CFN_POW:
13006 /* True if the 1st argument is nonnegative or the second
13007 argument is an even integer valued real. */
13008 if (TREE_CODE (arg1) == REAL_CST)
13010 REAL_VALUE_TYPE c;
13011 HOST_WIDE_INT n;
13013 c = TREE_REAL_CST (arg1);
13014 n = real_to_integer (&c);
13015 if ((n & 1) == 0)
13017 REAL_VALUE_TYPE cint;
13018 real_from_integer (&cint, VOIDmode, n, SIGNED);
13019 if (real_identical (&c, &cint))
13020 return true;
13023 return RECURSE (arg0);
13025 default:
13026 break;
13028 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13031 /* Return true if T is known to be non-negative. If the return
13032 value is based on the assumption that signed overflow is undefined,
13033 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13034 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13036 static bool
13037 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13039 enum tree_code code = TREE_CODE (t);
13040 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13041 return true;
13043 switch (code)
13045 case TARGET_EXPR:
13047 tree temp = TARGET_EXPR_SLOT (t);
13048 t = TARGET_EXPR_INITIAL (t);
13050 /* If the initializer is non-void, then it's a normal expression
13051 that will be assigned to the slot. */
13052 if (!VOID_TYPE_P (t))
13053 return RECURSE (t);
13055 /* Otherwise, the initializer sets the slot in some way. One common
13056 way is an assignment statement at the end of the initializer. */
13057 while (1)
13059 if (TREE_CODE (t) == BIND_EXPR)
13060 t = expr_last (BIND_EXPR_BODY (t));
13061 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13062 || TREE_CODE (t) == TRY_CATCH_EXPR)
13063 t = expr_last (TREE_OPERAND (t, 0));
13064 else if (TREE_CODE (t) == STATEMENT_LIST)
13065 t = expr_last (t);
13066 else
13067 break;
13069 if (TREE_CODE (t) == MODIFY_EXPR
13070 && TREE_OPERAND (t, 0) == temp)
13071 return RECURSE (TREE_OPERAND (t, 1));
13073 return false;
13076 case CALL_EXPR:
13078 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13079 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13081 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13082 get_call_combined_fn (t),
13083 arg0,
13084 arg1,
13085 strict_overflow_p, depth);
13087 case COMPOUND_EXPR:
13088 case MODIFY_EXPR:
13089 return RECURSE (TREE_OPERAND (t, 1));
13091 case BIND_EXPR:
13092 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13094 case SAVE_EXPR:
13095 return RECURSE (TREE_OPERAND (t, 0));
13097 default:
13098 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13102 #undef RECURSE
13103 #undef tree_expr_nonnegative_warnv_p
13105 /* Return true if T is known to be non-negative. If the return
13106 value is based on the assumption that signed overflow is undefined,
13107 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13108 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13110 bool
13111 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13113 enum tree_code code;
13114 if (t == error_mark_node)
13115 return false;
13117 code = TREE_CODE (t);
13118 switch (TREE_CODE_CLASS (code))
13120 case tcc_binary:
13121 case tcc_comparison:
13122 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13123 TREE_TYPE (t),
13124 TREE_OPERAND (t, 0),
13125 TREE_OPERAND (t, 1),
13126 strict_overflow_p, depth);
13128 case tcc_unary:
13129 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13130 TREE_TYPE (t),
13131 TREE_OPERAND (t, 0),
13132 strict_overflow_p, depth);
13134 case tcc_constant:
13135 case tcc_declaration:
13136 case tcc_reference:
13137 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13139 default:
13140 break;
13143 switch (code)
13145 case TRUTH_AND_EXPR:
13146 case TRUTH_OR_EXPR:
13147 case TRUTH_XOR_EXPR:
13148 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13149 TREE_TYPE (t),
13150 TREE_OPERAND (t, 0),
13151 TREE_OPERAND (t, 1),
13152 strict_overflow_p, depth);
13153 case TRUTH_NOT_EXPR:
13154 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13155 TREE_TYPE (t),
13156 TREE_OPERAND (t, 0),
13157 strict_overflow_p, depth);
13159 case COND_EXPR:
13160 case CONSTRUCTOR:
13161 case OBJ_TYPE_REF:
13162 case ASSERT_EXPR:
13163 case ADDR_EXPR:
13164 case WITH_SIZE_EXPR:
13165 case SSA_NAME:
13166 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13168 default:
13169 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13173 /* Return true if `t' is known to be non-negative. Handle warnings
13174 about undefined signed overflow. */
13176 bool
13177 tree_expr_nonnegative_p (tree t)
13179 bool ret, strict_overflow_p;
13181 strict_overflow_p = false;
13182 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13183 if (strict_overflow_p)
13184 fold_overflow_warning (("assuming signed overflow does not occur when "
13185 "determining that expression is always "
13186 "non-negative"),
13187 WARN_STRICT_OVERFLOW_MISC);
13188 return ret;
13192 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13193 For floating point we further ensure that T is not denormal.
13194 Similar logic is present in nonzero_address in rtlanal.h.
13196 If the return value is based on the assumption that signed overflow
13197 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13198 change *STRICT_OVERFLOW_P. */
13200 bool
13201 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13202 bool *strict_overflow_p)
13204 switch (code)
13206 case ABS_EXPR:
13207 return tree_expr_nonzero_warnv_p (op0,
13208 strict_overflow_p);
13210 case NOP_EXPR:
13212 tree inner_type = TREE_TYPE (op0);
13213 tree outer_type = type;
13215 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13216 && tree_expr_nonzero_warnv_p (op0,
13217 strict_overflow_p));
13219 break;
13221 case NON_LVALUE_EXPR:
13222 return tree_expr_nonzero_warnv_p (op0,
13223 strict_overflow_p);
13225 default:
13226 break;
13229 return false;
13232 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13233 For floating point we further ensure that T is not denormal.
13234 Similar logic is present in nonzero_address in rtlanal.h.
13236 If the return value is based on the assumption that signed overflow
13237 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13238 change *STRICT_OVERFLOW_P. */
13240 bool
13241 tree_binary_nonzero_warnv_p (enum tree_code code,
13242 tree type,
13243 tree op0,
13244 tree op1, bool *strict_overflow_p)
13246 bool sub_strict_overflow_p;
13247 switch (code)
13249 case POINTER_PLUS_EXPR:
13250 case PLUS_EXPR:
13251 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13253 /* With the presence of negative values it is hard
13254 to say something. */
13255 sub_strict_overflow_p = false;
13256 if (!tree_expr_nonnegative_warnv_p (op0,
13257 &sub_strict_overflow_p)
13258 || !tree_expr_nonnegative_warnv_p (op1,
13259 &sub_strict_overflow_p))
13260 return false;
13261 /* One of operands must be positive and the other non-negative. */
13262 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13263 overflows, on a twos-complement machine the sum of two
13264 nonnegative numbers can never be zero. */
13265 return (tree_expr_nonzero_warnv_p (op0,
13266 strict_overflow_p)
13267 || tree_expr_nonzero_warnv_p (op1,
13268 strict_overflow_p));
13270 break;
13272 case MULT_EXPR:
13273 if (TYPE_OVERFLOW_UNDEFINED (type))
13275 if (tree_expr_nonzero_warnv_p (op0,
13276 strict_overflow_p)
13277 && tree_expr_nonzero_warnv_p (op1,
13278 strict_overflow_p))
13280 *strict_overflow_p = true;
13281 return true;
13284 break;
13286 case MIN_EXPR:
13287 sub_strict_overflow_p = false;
13288 if (tree_expr_nonzero_warnv_p (op0,
13289 &sub_strict_overflow_p)
13290 && tree_expr_nonzero_warnv_p (op1,
13291 &sub_strict_overflow_p))
13293 if (sub_strict_overflow_p)
13294 *strict_overflow_p = true;
13296 break;
13298 case MAX_EXPR:
13299 sub_strict_overflow_p = false;
13300 if (tree_expr_nonzero_warnv_p (op0,
13301 &sub_strict_overflow_p))
13303 if (sub_strict_overflow_p)
13304 *strict_overflow_p = true;
13306 /* When both operands are nonzero, then MAX must be too. */
13307 if (tree_expr_nonzero_warnv_p (op1,
13308 strict_overflow_p))
13309 return true;
13311 /* MAX where operand 0 is positive is positive. */
13312 return tree_expr_nonnegative_warnv_p (op0,
13313 strict_overflow_p);
13315 /* MAX where operand 1 is positive is positive. */
13316 else if (tree_expr_nonzero_warnv_p (op1,
13317 &sub_strict_overflow_p)
13318 && tree_expr_nonnegative_warnv_p (op1,
13319 &sub_strict_overflow_p))
13321 if (sub_strict_overflow_p)
13322 *strict_overflow_p = true;
13323 return true;
13325 break;
13327 case BIT_IOR_EXPR:
13328 return (tree_expr_nonzero_warnv_p (op1,
13329 strict_overflow_p)
13330 || tree_expr_nonzero_warnv_p (op0,
13331 strict_overflow_p));
13333 default:
13334 break;
13337 return false;
13340 /* Return true when T is an address and is known to be nonzero.
13341 For floating point we further ensure that T is not denormal.
13342 Similar logic is present in nonzero_address in rtlanal.h.
13344 If the return value is based on the assumption that signed overflow
13345 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13346 change *STRICT_OVERFLOW_P. */
13348 bool
13349 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13351 bool sub_strict_overflow_p;
13352 switch (TREE_CODE (t))
13354 case INTEGER_CST:
13355 return !integer_zerop (t);
13357 case ADDR_EXPR:
13359 tree base = TREE_OPERAND (t, 0);
13361 if (!DECL_P (base))
13362 base = get_base_address (base);
13364 if (!base)
13365 return false;
13367 /* For objects in symbol table check if we know they are non-zero.
13368 Don't do anything for variables and functions before symtab is built;
13369 it is quite possible that they will be declared weak later. */
13370 if (DECL_P (base) && decl_in_symtab_p (base))
13372 struct symtab_node *symbol;
13374 symbol = symtab_node::get_create (base);
13375 if (symbol)
13376 return symbol->nonzero_address ();
13377 else
13378 return false;
13381 /* Function local objects are never NULL. */
13382 if (DECL_P (base)
13383 && (DECL_CONTEXT (base)
13384 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13385 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13386 return true;
13388 /* Constants are never weak. */
13389 if (CONSTANT_CLASS_P (base))
13390 return true;
13392 return false;
13395 case COND_EXPR:
13396 sub_strict_overflow_p = false;
13397 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13398 &sub_strict_overflow_p)
13399 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13400 &sub_strict_overflow_p))
13402 if (sub_strict_overflow_p)
13403 *strict_overflow_p = true;
13404 return true;
13406 break;
13408 default:
13409 break;
13411 return false;
13414 #define integer_valued_real_p(X) \
13415 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13417 #define RECURSE(X) \
13418 ((integer_valued_real_p) (X, depth + 1))
13420 /* Return true if the floating point result of (CODE OP0) has an
13421 integer value. We also allow +Inf, -Inf and NaN to be considered
13422 integer values.
13424 DEPTH is the current nesting depth of the query. */
13426 bool
13427 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13429 switch (code)
13431 case FLOAT_EXPR:
13432 return true;
13434 case ABS_EXPR:
13435 return RECURSE (op0);
13437 CASE_CONVERT:
13439 tree type = TREE_TYPE (op0);
13440 if (TREE_CODE (type) == INTEGER_TYPE)
13441 return true;
13442 if (TREE_CODE (type) == REAL_TYPE)
13443 return RECURSE (op0);
13444 break;
13447 default:
13448 break;
13450 return false;
13453 /* Return true if the floating point result of (CODE OP0 OP1) has an
13454 integer value. We also allow +Inf, -Inf and NaN to be considered
13455 integer values.
13457 DEPTH is the current nesting depth of the query. */
13459 bool
13460 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13462 switch (code)
13464 case PLUS_EXPR:
13465 case MINUS_EXPR:
13466 case MULT_EXPR:
13467 case MIN_EXPR:
13468 case MAX_EXPR:
13469 return RECURSE (op0) && RECURSE (op1);
13471 default:
13472 break;
13474 return false;
13477 /* Return true if the floating point result of calling FNDECL with arguments
13478 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13479 considered integer values. If FNDECL takes fewer than 2 arguments,
13480 the remaining ARGn are null.
13482 DEPTH is the current nesting depth of the query. */
13484 bool
13485 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13487 switch (fn)
13489 CASE_CFN_CEIL:
13490 CASE_CFN_FLOOR:
13491 CASE_CFN_NEARBYINT:
13492 CASE_CFN_RINT:
13493 CASE_CFN_ROUND:
13494 CASE_CFN_TRUNC:
13495 return true;
13497 CASE_CFN_FMIN:
13498 CASE_CFN_FMAX:
13499 return RECURSE (arg0) && RECURSE (arg1);
13501 default:
13502 break;
13504 return false;
13507 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13508 has an integer value. We also allow +Inf, -Inf and NaN to be
13509 considered integer values.
13511 DEPTH is the current nesting depth of the query. */
13513 bool
13514 integer_valued_real_single_p (tree t, int depth)
13516 switch (TREE_CODE (t))
13518 case REAL_CST:
13519 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13521 case COND_EXPR:
13522 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13524 case SSA_NAME:
13525 /* Limit the depth of recursion to avoid quadratic behavior.
13526 This is expected to catch almost all occurrences in practice.
13527 If this code misses important cases that unbounded recursion
13528 would not, passes that need this information could be revised
13529 to provide it through dataflow propagation. */
13530 return (!name_registered_for_update_p (t)
13531 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13532 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13533 depth));
13535 default:
13536 break;
13538 return false;
13541 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13542 has an integer value. We also allow +Inf, -Inf and NaN to be
13543 considered integer values.
13545 DEPTH is the current nesting depth of the query. */
13547 static bool
13548 integer_valued_real_invalid_p (tree t, int depth)
13550 switch (TREE_CODE (t))
13552 case COMPOUND_EXPR:
13553 case MODIFY_EXPR:
13554 case BIND_EXPR:
13555 return RECURSE (TREE_OPERAND (t, 1));
13557 case SAVE_EXPR:
13558 return RECURSE (TREE_OPERAND (t, 0));
13560 default:
13561 break;
13563 return false;
13566 #undef RECURSE
13567 #undef integer_valued_real_p
13569 /* Return true if the floating point expression T has an integer value.
13570 We also allow +Inf, -Inf and NaN to be considered integer values.
13572 DEPTH is the current nesting depth of the query. */
13574 bool
13575 integer_valued_real_p (tree t, int depth)
13577 if (t == error_mark_node)
13578 return false;
13580 tree_code code = TREE_CODE (t);
13581 switch (TREE_CODE_CLASS (code))
13583 case tcc_binary:
13584 case tcc_comparison:
13585 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13586 TREE_OPERAND (t, 1), depth);
13588 case tcc_unary:
13589 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13591 case tcc_constant:
13592 case tcc_declaration:
13593 case tcc_reference:
13594 return integer_valued_real_single_p (t, depth);
13596 default:
13597 break;
13600 switch (code)
13602 case COND_EXPR:
13603 case SSA_NAME:
13604 return integer_valued_real_single_p (t, depth);
13606 case CALL_EXPR:
13608 tree arg0 = (call_expr_nargs (t) > 0
13609 ? CALL_EXPR_ARG (t, 0)
13610 : NULL_TREE);
13611 tree arg1 = (call_expr_nargs (t) > 1
13612 ? CALL_EXPR_ARG (t, 1)
13613 : NULL_TREE);
13614 return integer_valued_real_call_p (get_call_combined_fn (t),
13615 arg0, arg1, depth);
13618 default:
13619 return integer_valued_real_invalid_p (t, depth);
13623 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13624 attempt to fold the expression to a constant without modifying TYPE,
13625 OP0 or OP1.
13627 If the expression could be simplified to a constant, then return
13628 the constant. If the expression would not be simplified to a
13629 constant, then return NULL_TREE. */
13631 tree
13632 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13634 tree tem = fold_binary (code, type, op0, op1);
13635 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13638 /* Given the components of a unary expression CODE, TYPE and OP0,
13639 attempt to fold the expression to a constant without modifying
13640 TYPE or OP0.
13642 If the expression could be simplified to a constant, then return
13643 the constant. If the expression would not be simplified to a
13644 constant, then return NULL_TREE. */
13646 tree
13647 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13649 tree tem = fold_unary (code, type, op0);
13650 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13653 /* If EXP represents referencing an element in a constant string
13654 (either via pointer arithmetic or array indexing), return the
13655 tree representing the value accessed, otherwise return NULL. */
13657 tree
13658 fold_read_from_constant_string (tree exp)
13660 if ((TREE_CODE (exp) == INDIRECT_REF
13661 || TREE_CODE (exp) == ARRAY_REF)
13662 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13664 tree exp1 = TREE_OPERAND (exp, 0);
13665 tree index;
13666 tree string;
13667 location_t loc = EXPR_LOCATION (exp);
13669 if (TREE_CODE (exp) == INDIRECT_REF)
13670 string = string_constant (exp1, &index);
13671 else
13673 tree low_bound = array_ref_low_bound (exp);
13674 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13676 /* Optimize the special-case of a zero lower bound.
13678 We convert the low_bound to sizetype to avoid some problems
13679 with constant folding. (E.g. suppose the lower bound is 1,
13680 and its mode is QI. Without the conversion,l (ARRAY
13681 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13682 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13683 if (! integer_zerop (low_bound))
13684 index = size_diffop_loc (loc, index,
13685 fold_convert_loc (loc, sizetype, low_bound));
13687 string = exp1;
13690 if (string
13691 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13692 && TREE_CODE (string) == STRING_CST
13693 && TREE_CODE (index) == INTEGER_CST
13694 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13695 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13696 == MODE_INT)
13697 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13698 return build_int_cst_type (TREE_TYPE (exp),
13699 (TREE_STRING_POINTER (string)
13700 [TREE_INT_CST_LOW (index)]));
13702 return NULL;
13705 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13706 an integer constant, real, or fixed-point constant.
13708 TYPE is the type of the result. */
13710 static tree
13711 fold_negate_const (tree arg0, tree type)
13713 tree t = NULL_TREE;
13715 switch (TREE_CODE (arg0))
13717 case INTEGER_CST:
13719 bool overflow;
13720 wide_int val = wi::neg (arg0, &overflow);
13721 t = force_fit_type (type, val, 1,
13722 (overflow | TREE_OVERFLOW (arg0))
13723 && !TYPE_UNSIGNED (type));
13724 break;
13727 case REAL_CST:
13728 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13729 break;
13731 case FIXED_CST:
13733 FIXED_VALUE_TYPE f;
13734 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13735 &(TREE_FIXED_CST (arg0)), NULL,
13736 TYPE_SATURATING (type));
13737 t = build_fixed (type, f);
13738 /* Propagate overflow flags. */
13739 if (overflow_p | TREE_OVERFLOW (arg0))
13740 TREE_OVERFLOW (t) = 1;
13741 break;
13744 default:
13745 gcc_unreachable ();
13748 return t;
13751 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13752 an integer constant or real constant.
13754 TYPE is the type of the result. */
13756 tree
13757 fold_abs_const (tree arg0, tree type)
13759 tree t = NULL_TREE;
13761 switch (TREE_CODE (arg0))
13763 case INTEGER_CST:
13765 /* If the value is unsigned or non-negative, then the absolute value
13766 is the same as the ordinary value. */
13767 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13768 t = arg0;
13770 /* If the value is negative, then the absolute value is
13771 its negation. */
13772 else
13774 bool overflow;
13775 wide_int val = wi::neg (arg0, &overflow);
13776 t = force_fit_type (type, val, -1,
13777 overflow | TREE_OVERFLOW (arg0));
13780 break;
13782 case REAL_CST:
13783 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13784 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13785 else
13786 t = arg0;
13787 break;
13789 default:
13790 gcc_unreachable ();
13793 return t;
13796 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13797 constant. TYPE is the type of the result. */
13799 static tree
13800 fold_not_const (const_tree arg0, tree type)
13802 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13804 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13807 /* Given CODE, a relational operator, the target type, TYPE and two
13808 constant operands OP0 and OP1, return the result of the
13809 relational operation. If the result is not a compile time
13810 constant, then return NULL_TREE. */
13812 static tree
13813 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13815 int result, invert;
13817 /* From here on, the only cases we handle are when the result is
13818 known to be a constant. */
13820 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13822 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13823 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13825 /* Handle the cases where either operand is a NaN. */
13826 if (real_isnan (c0) || real_isnan (c1))
13828 switch (code)
13830 case EQ_EXPR:
13831 case ORDERED_EXPR:
13832 result = 0;
13833 break;
13835 case NE_EXPR:
13836 case UNORDERED_EXPR:
13837 case UNLT_EXPR:
13838 case UNLE_EXPR:
13839 case UNGT_EXPR:
13840 case UNGE_EXPR:
13841 case UNEQ_EXPR:
13842 result = 1;
13843 break;
13845 case LT_EXPR:
13846 case LE_EXPR:
13847 case GT_EXPR:
13848 case GE_EXPR:
13849 case LTGT_EXPR:
13850 if (flag_trapping_math)
13851 return NULL_TREE;
13852 result = 0;
13853 break;
13855 default:
13856 gcc_unreachable ();
13859 return constant_boolean_node (result, type);
13862 return constant_boolean_node (real_compare (code, c0, c1), type);
13865 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13867 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13868 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13869 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13872 /* Handle equality/inequality of complex constants. */
13873 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13875 tree rcond = fold_relational_const (code, type,
13876 TREE_REALPART (op0),
13877 TREE_REALPART (op1));
13878 tree icond = fold_relational_const (code, type,
13879 TREE_IMAGPART (op0),
13880 TREE_IMAGPART (op1));
13881 if (code == EQ_EXPR)
13882 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13883 else if (code == NE_EXPR)
13884 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13885 else
13886 return NULL_TREE;
13889 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13891 unsigned count = VECTOR_CST_NELTS (op0);
13892 tree *elts = XALLOCAVEC (tree, count);
13893 gcc_assert (VECTOR_CST_NELTS (op1) == count
13894 && TYPE_VECTOR_SUBPARTS (type) == count);
13896 for (unsigned i = 0; i < count; i++)
13898 tree elem_type = TREE_TYPE (type);
13899 tree elem0 = VECTOR_CST_ELT (op0, i);
13900 tree elem1 = VECTOR_CST_ELT (op1, i);
13902 tree tem = fold_relational_const (code, elem_type,
13903 elem0, elem1);
13905 if (tem == NULL_TREE)
13906 return NULL_TREE;
13908 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13911 return build_vector (type, elts);
13914 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13916 To compute GT, swap the arguments and do LT.
13917 To compute GE, do LT and invert the result.
13918 To compute LE, swap the arguments, do LT and invert the result.
13919 To compute NE, do EQ and invert the result.
13921 Therefore, the code below must handle only EQ and LT. */
13923 if (code == LE_EXPR || code == GT_EXPR)
13925 std::swap (op0, op1);
13926 code = swap_tree_comparison (code);
13929 /* Note that it is safe to invert for real values here because we
13930 have already handled the one case that it matters. */
13932 invert = 0;
13933 if (code == NE_EXPR || code == GE_EXPR)
13935 invert = 1;
13936 code = invert_tree_comparison (code, false);
13939 /* Compute a result for LT or EQ if args permit;
13940 Otherwise return T. */
13941 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13943 if (code == EQ_EXPR)
13944 result = tree_int_cst_equal (op0, op1);
13945 else
13946 result = tree_int_cst_lt (op0, op1);
13948 else
13949 return NULL_TREE;
13951 if (invert)
13952 result ^= 1;
13953 return constant_boolean_node (result, type);
13956 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13957 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13958 itself. */
13960 tree
13961 fold_build_cleanup_point_expr (tree type, tree expr)
13963 /* If the expression does not have side effects then we don't have to wrap
13964 it with a cleanup point expression. */
13965 if (!TREE_SIDE_EFFECTS (expr))
13966 return expr;
13968 /* If the expression is a return, check to see if the expression inside the
13969 return has no side effects or the right hand side of the modify expression
13970 inside the return. If either don't have side effects set we don't need to
13971 wrap the expression in a cleanup point expression. Note we don't check the
13972 left hand side of the modify because it should always be a return decl. */
13973 if (TREE_CODE (expr) == RETURN_EXPR)
13975 tree op = TREE_OPERAND (expr, 0);
13976 if (!op || !TREE_SIDE_EFFECTS (op))
13977 return expr;
13978 op = TREE_OPERAND (op, 1);
13979 if (!TREE_SIDE_EFFECTS (op))
13980 return expr;
13983 return build1 (CLEANUP_POINT_EXPR, type, expr);
13986 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13987 of an indirection through OP0, or NULL_TREE if no simplification is
13988 possible. */
13990 tree
13991 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13993 tree sub = op0;
13994 tree subtype;
13996 STRIP_NOPS (sub);
13997 subtype = TREE_TYPE (sub);
13998 if (!POINTER_TYPE_P (subtype))
13999 return NULL_TREE;
14001 if (TREE_CODE (sub) == ADDR_EXPR)
14003 tree op = TREE_OPERAND (sub, 0);
14004 tree optype = TREE_TYPE (op);
14005 /* *&CONST_DECL -> to the value of the const decl. */
14006 if (TREE_CODE (op) == CONST_DECL)
14007 return DECL_INITIAL (op);
14008 /* *&p => p; make sure to handle *&"str"[cst] here. */
14009 if (type == optype)
14011 tree fop = fold_read_from_constant_string (op);
14012 if (fop)
14013 return fop;
14014 else
14015 return op;
14017 /* *(foo *)&fooarray => fooarray[0] */
14018 else if (TREE_CODE (optype) == ARRAY_TYPE
14019 && type == TREE_TYPE (optype)
14020 && (!in_gimple_form
14021 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14023 tree type_domain = TYPE_DOMAIN (optype);
14024 tree min_val = size_zero_node;
14025 if (type_domain && TYPE_MIN_VALUE (type_domain))
14026 min_val = TYPE_MIN_VALUE (type_domain);
14027 if (in_gimple_form
14028 && TREE_CODE (min_val) != INTEGER_CST)
14029 return NULL_TREE;
14030 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14031 NULL_TREE, NULL_TREE);
14033 /* *(foo *)&complexfoo => __real__ complexfoo */
14034 else if (TREE_CODE (optype) == COMPLEX_TYPE
14035 && type == TREE_TYPE (optype))
14036 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14037 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14038 else if (TREE_CODE (optype) == VECTOR_TYPE
14039 && type == TREE_TYPE (optype))
14041 tree part_width = TYPE_SIZE (type);
14042 tree index = bitsize_int (0);
14043 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14047 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14048 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14050 tree op00 = TREE_OPERAND (sub, 0);
14051 tree op01 = TREE_OPERAND (sub, 1);
14053 STRIP_NOPS (op00);
14054 if (TREE_CODE (op00) == ADDR_EXPR)
14056 tree op00type;
14057 op00 = TREE_OPERAND (op00, 0);
14058 op00type = TREE_TYPE (op00);
14060 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14061 if (TREE_CODE (op00type) == VECTOR_TYPE
14062 && type == TREE_TYPE (op00type))
14064 HOST_WIDE_INT offset = tree_to_shwi (op01);
14065 tree part_width = TYPE_SIZE (type);
14066 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14067 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14068 tree index = bitsize_int (indexi);
14070 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14071 return fold_build3_loc (loc,
14072 BIT_FIELD_REF, type, op00,
14073 part_width, index);
14076 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14077 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14078 && type == TREE_TYPE (op00type))
14080 tree size = TYPE_SIZE_UNIT (type);
14081 if (tree_int_cst_equal (size, op01))
14082 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14084 /* ((foo *)&fooarray)[1] => fooarray[1] */
14085 else if (TREE_CODE (op00type) == ARRAY_TYPE
14086 && type == TREE_TYPE (op00type))
14088 tree type_domain = TYPE_DOMAIN (op00type);
14089 tree min_val = size_zero_node;
14090 if (type_domain && TYPE_MIN_VALUE (type_domain))
14091 min_val = TYPE_MIN_VALUE (type_domain);
14092 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14093 TYPE_SIZE_UNIT (type));
14094 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14095 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14096 NULL_TREE, NULL_TREE);
14101 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14102 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14103 && type == TREE_TYPE (TREE_TYPE (subtype))
14104 && (!in_gimple_form
14105 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14107 tree type_domain;
14108 tree min_val = size_zero_node;
14109 sub = build_fold_indirect_ref_loc (loc, sub);
14110 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14111 if (type_domain && TYPE_MIN_VALUE (type_domain))
14112 min_val = TYPE_MIN_VALUE (type_domain);
14113 if (in_gimple_form
14114 && TREE_CODE (min_val) != INTEGER_CST)
14115 return NULL_TREE;
14116 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14117 NULL_TREE);
14120 return NULL_TREE;
14123 /* Builds an expression for an indirection through T, simplifying some
14124 cases. */
14126 tree
14127 build_fold_indirect_ref_loc (location_t loc, tree t)
14129 tree type = TREE_TYPE (TREE_TYPE (t));
14130 tree sub = fold_indirect_ref_1 (loc, type, t);
14132 if (sub)
14133 return sub;
14135 return build1_loc (loc, INDIRECT_REF, type, t);
14138 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14140 tree
14141 fold_indirect_ref_loc (location_t loc, tree t)
14143 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14145 if (sub)
14146 return sub;
14147 else
14148 return t;
14151 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14152 whose result is ignored. The type of the returned tree need not be
14153 the same as the original expression. */
14155 tree
14156 fold_ignored_result (tree t)
14158 if (!TREE_SIDE_EFFECTS (t))
14159 return integer_zero_node;
14161 for (;;)
14162 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14164 case tcc_unary:
14165 t = TREE_OPERAND (t, 0);
14166 break;
14168 case tcc_binary:
14169 case tcc_comparison:
14170 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14171 t = TREE_OPERAND (t, 0);
14172 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14173 t = TREE_OPERAND (t, 1);
14174 else
14175 return t;
14176 break;
14178 case tcc_expression:
14179 switch (TREE_CODE (t))
14181 case COMPOUND_EXPR:
14182 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14183 return t;
14184 t = TREE_OPERAND (t, 0);
14185 break;
14187 case COND_EXPR:
14188 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14189 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14190 return t;
14191 t = TREE_OPERAND (t, 0);
14192 break;
14194 default:
14195 return t;
14197 break;
14199 default:
14200 return t;
14204 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14206 tree
14207 round_up_loc (location_t loc, tree value, unsigned int divisor)
14209 tree div = NULL_TREE;
14211 if (divisor == 1)
14212 return value;
14214 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14215 have to do anything. Only do this when we are not given a const,
14216 because in that case, this check is more expensive than just
14217 doing it. */
14218 if (TREE_CODE (value) != INTEGER_CST)
14220 div = build_int_cst (TREE_TYPE (value), divisor);
14222 if (multiple_of_p (TREE_TYPE (value), value, div))
14223 return value;
14226 /* If divisor is a power of two, simplify this to bit manipulation. */
14227 if (divisor == (divisor & -divisor))
14229 if (TREE_CODE (value) == INTEGER_CST)
14231 wide_int val = value;
14232 bool overflow_p;
14234 if ((val & (divisor - 1)) == 0)
14235 return value;
14237 overflow_p = TREE_OVERFLOW (value);
14238 val += divisor - 1;
14239 val &= - (int) divisor;
14240 if (val == 0)
14241 overflow_p = true;
14243 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14245 else
14247 tree t;
14249 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14250 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14251 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14252 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14255 else
14257 if (!div)
14258 div = build_int_cst (TREE_TYPE (value), divisor);
14259 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14260 value = size_binop_loc (loc, MULT_EXPR, value, div);
14263 return value;
14266 /* Likewise, but round down. */
14268 tree
14269 round_down_loc (location_t loc, tree value, int divisor)
14271 tree div = NULL_TREE;
14273 gcc_assert (divisor > 0);
14274 if (divisor == 1)
14275 return value;
14277 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14278 have to do anything. Only do this when we are not given a const,
14279 because in that case, this check is more expensive than just
14280 doing it. */
14281 if (TREE_CODE (value) != INTEGER_CST)
14283 div = build_int_cst (TREE_TYPE (value), divisor);
14285 if (multiple_of_p (TREE_TYPE (value), value, div))
14286 return value;
14289 /* If divisor is a power of two, simplify this to bit manipulation. */
14290 if (divisor == (divisor & -divisor))
14292 tree t;
14294 t = build_int_cst (TREE_TYPE (value), -divisor);
14295 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14297 else
14299 if (!div)
14300 div = build_int_cst (TREE_TYPE (value), divisor);
14301 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14302 value = size_binop_loc (loc, MULT_EXPR, value, div);
14305 return value;
14308 /* Returns the pointer to the base of the object addressed by EXP and
14309 extracts the information about the offset of the access, storing it
14310 to PBITPOS and POFFSET. */
14312 static tree
14313 split_address_to_core_and_offset (tree exp,
14314 HOST_WIDE_INT *pbitpos, tree *poffset)
14316 tree core;
14317 machine_mode mode;
14318 int unsignedp, reversep, volatilep;
14319 HOST_WIDE_INT bitsize;
14320 location_t loc = EXPR_LOCATION (exp);
14322 if (TREE_CODE (exp) == ADDR_EXPR)
14324 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14325 poffset, &mode, &unsignedp, &reversep,
14326 &volatilep, false);
14327 core = build_fold_addr_expr_loc (loc, core);
14329 else
14331 core = exp;
14332 *pbitpos = 0;
14333 *poffset = NULL_TREE;
14336 return core;
14339 /* Returns true if addresses of E1 and E2 differ by a constant, false
14340 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14342 bool
14343 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14345 tree core1, core2;
14346 HOST_WIDE_INT bitpos1, bitpos2;
14347 tree toffset1, toffset2, tdiff, type;
14349 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14350 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14352 if (bitpos1 % BITS_PER_UNIT != 0
14353 || bitpos2 % BITS_PER_UNIT != 0
14354 || !operand_equal_p (core1, core2, 0))
14355 return false;
14357 if (toffset1 && toffset2)
14359 type = TREE_TYPE (toffset1);
14360 if (type != TREE_TYPE (toffset2))
14361 toffset2 = fold_convert (type, toffset2);
14363 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14364 if (!cst_and_fits_in_hwi (tdiff))
14365 return false;
14367 *diff = int_cst_value (tdiff);
14369 else if (toffset1 || toffset2)
14371 /* If only one of the offsets is non-constant, the difference cannot
14372 be a constant. */
14373 return false;
14375 else
14376 *diff = 0;
14378 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14379 return true;
14382 /* Return OFF converted to a pointer offset type suitable as offset for
14383 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14384 tree
14385 convert_to_ptrofftype_loc (location_t loc, tree off)
14387 return fold_convert_loc (loc, sizetype, off);
14390 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14391 tree
14392 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14394 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14395 ptr, convert_to_ptrofftype_loc (loc, off));
14398 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14399 tree
14400 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14402 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14403 ptr, size_int (off));
14406 /* Return a char pointer for a C string if it is a string constant
14407 or sum of string constant and integer constant. */
14409 const char *
14410 c_getstr (tree src)
14412 tree offset_node;
14414 src = string_constant (src, &offset_node);
14415 if (src == 0)
14416 return 0;
14418 if (offset_node == 0)
14419 return TREE_STRING_POINTER (src);
14420 else if (!tree_fits_uhwi_p (offset_node)
14421 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14422 return 0;
14424 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);