2015-11-09 Steve Ellcey <sellcey@imgtec.com>
[official-gcc.git] / gcc / fold-const.c
blobf9e50644a3e0069b613603775ae12ee4d2164577
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
77 #ifndef LOAD_EXTEND_OP
78 #define LOAD_EXTEND_OP(M) UNKNOWN
79 #endif
81 /* Nonzero if we are folding constants inside an initializer; zero
82 otherwise. */
83 int folding_initializer = 0;
85 /* The following constants represent a bit based encoding of GCC's
86 comparison operators. This encoding simplifies transformations
87 on relational comparison operators, such as AND and OR. */
88 enum comparison_code {
89 COMPCODE_FALSE = 0,
90 COMPCODE_LT = 1,
91 COMPCODE_EQ = 2,
92 COMPCODE_LE = 3,
93 COMPCODE_GT = 4,
94 COMPCODE_LTGT = 5,
95 COMPCODE_GE = 6,
96 COMPCODE_ORD = 7,
97 COMPCODE_UNORD = 8,
98 COMPCODE_UNLT = 9,
99 COMPCODE_UNEQ = 10,
100 COMPCODE_UNLE = 11,
101 COMPCODE_UNGT = 12,
102 COMPCODE_NE = 13,
103 COMPCODE_UNGE = 14,
104 COMPCODE_TRUE = 15
107 static bool negate_expr_p (tree);
108 static tree negate_expr (tree);
109 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
110 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
111 static enum comparison_code comparison_to_compcode (enum tree_code);
112 static enum tree_code compcode_to_comparison (enum comparison_code);
113 static int operand_equal_for_comparison_p (tree, tree, tree);
114 static int twoval_comparison_p (tree, tree *, tree *, int *);
115 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
116 static tree make_bit_field_ref (location_t, tree, tree,
117 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
118 static tree optimize_bit_field_compare (location_t, enum tree_code,
119 tree, tree, tree);
120 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
121 HOST_WIDE_INT *,
122 machine_mode *, int *, int *, int *,
123 tree *, tree *);
124 static int simple_operand_p (const_tree);
125 static bool simple_operand_p_2 (tree);
126 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
127 static tree range_predecessor (tree);
128 static tree range_successor (tree);
129 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
130 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
131 static tree unextend (tree, int, int, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (const_tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static tree fold_convert_const (enum tree_code, tree, tree);
146 static tree fold_view_convert_expr (tree, tree);
147 static bool vec_cst_ctor_to_array (tree, tree *);
150 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
151 Otherwise, return LOC. */
153 static location_t
154 expr_location_or (tree t, location_t loc)
156 location_t tloc = EXPR_LOCATION (t);
157 return tloc == UNKNOWN_LOCATION ? loc : tloc;
160 /* Similar to protected_set_expr_location, but never modify x in place,
161 if location can and needs to be set, unshare it. */
163 static inline tree
164 protected_set_expr_location_unshare (tree x, location_t loc)
166 if (CAN_HAVE_LOCATION_P (x)
167 && EXPR_LOCATION (x) != loc
168 && !(TREE_CODE (x) == SAVE_EXPR
169 || TREE_CODE (x) == TARGET_EXPR
170 || TREE_CODE (x) == BIND_EXPR))
172 x = copy_node (x);
173 SET_EXPR_LOCATION (x, loc);
175 return x;
178 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
179 division and returns the quotient. Otherwise returns
180 NULL_TREE. */
182 tree
183 div_if_zero_remainder (const_tree arg1, const_tree arg2)
185 widest_int quo;
187 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
188 SIGNED, &quo))
189 return wide_int_to_tree (TREE_TYPE (arg1), quo);
191 return NULL_TREE;
194 /* This is nonzero if we should defer warnings about undefined
195 overflow. This facility exists because these warnings are a
196 special case. The code to estimate loop iterations does not want
197 to issue any warnings, since it works with expressions which do not
198 occur in user code. Various bits of cleanup code call fold(), but
199 only use the result if it has certain characteristics (e.g., is a
200 constant); that code only wants to issue a warning if the result is
201 used. */
203 static int fold_deferring_overflow_warnings;
205 /* If a warning about undefined overflow is deferred, this is the
206 warning. Note that this may cause us to turn two warnings into
207 one, but that is fine since it is sufficient to only give one
208 warning per expression. */
210 static const char* fold_deferred_overflow_warning;
212 /* If a warning about undefined overflow is deferred, this is the
213 level at which the warning should be emitted. */
215 static enum warn_strict_overflow_code fold_deferred_overflow_code;
217 /* Start deferring overflow warnings. We could use a stack here to
218 permit nested calls, but at present it is not necessary. */
220 void
221 fold_defer_overflow_warnings (void)
223 ++fold_deferring_overflow_warnings;
226 /* Stop deferring overflow warnings. If there is a pending warning,
227 and ISSUE is true, then issue the warning if appropriate. STMT is
228 the statement with which the warning should be associated (used for
229 location information); STMT may be NULL. CODE is the level of the
230 warning--a warn_strict_overflow_code value. This function will use
231 the smaller of CODE and the deferred code when deciding whether to
232 issue the warning. CODE may be zero to mean to always use the
233 deferred code. */
235 void
236 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
238 const char *warnmsg;
239 location_t locus;
241 gcc_assert (fold_deferring_overflow_warnings > 0);
242 --fold_deferring_overflow_warnings;
243 if (fold_deferring_overflow_warnings > 0)
245 if (fold_deferred_overflow_warning != NULL
246 && code != 0
247 && code < (int) fold_deferred_overflow_code)
248 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
249 return;
252 warnmsg = fold_deferred_overflow_warning;
253 fold_deferred_overflow_warning = NULL;
255 if (!issue || warnmsg == NULL)
256 return;
258 if (gimple_no_warning_p (stmt))
259 return;
261 /* Use the smallest code level when deciding to issue the
262 warning. */
263 if (code == 0 || code > (int) fold_deferred_overflow_code)
264 code = fold_deferred_overflow_code;
266 if (!issue_strict_overflow_warning (code))
267 return;
269 if (stmt == NULL)
270 locus = input_location;
271 else
272 locus = gimple_location (stmt);
273 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
276 /* Stop deferring overflow warnings, ignoring any deferred
277 warnings. */
279 void
280 fold_undefer_and_ignore_overflow_warnings (void)
282 fold_undefer_overflow_warnings (false, NULL, 0);
285 /* Whether we are deferring overflow warnings. */
287 bool
288 fold_deferring_overflow_warnings_p (void)
290 return fold_deferring_overflow_warnings > 0;
293 /* This is called when we fold something based on the fact that signed
294 overflow is undefined. */
296 static void
297 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
299 if (fold_deferring_overflow_warnings > 0)
301 if (fold_deferred_overflow_warning == NULL
302 || wc < fold_deferred_overflow_code)
304 fold_deferred_overflow_warning = gmsgid;
305 fold_deferred_overflow_code = wc;
308 else if (issue_strict_overflow_warning (wc))
309 warning (OPT_Wstrict_overflow, gmsgid);
312 /* Return true if the built-in mathematical function specified by CODE
313 is odd, i.e. -f(x) == f(-x). */
315 bool
316 negate_mathfn_p (enum built_in_function code)
318 switch (code)
320 CASE_FLT_FN (BUILT_IN_ASIN):
321 CASE_FLT_FN (BUILT_IN_ASINH):
322 CASE_FLT_FN (BUILT_IN_ATAN):
323 CASE_FLT_FN (BUILT_IN_ATANH):
324 CASE_FLT_FN (BUILT_IN_CASIN):
325 CASE_FLT_FN (BUILT_IN_CASINH):
326 CASE_FLT_FN (BUILT_IN_CATAN):
327 CASE_FLT_FN (BUILT_IN_CATANH):
328 CASE_FLT_FN (BUILT_IN_CBRT):
329 CASE_FLT_FN (BUILT_IN_CPROJ):
330 CASE_FLT_FN (BUILT_IN_CSIN):
331 CASE_FLT_FN (BUILT_IN_CSINH):
332 CASE_FLT_FN (BUILT_IN_CTAN):
333 CASE_FLT_FN (BUILT_IN_CTANH):
334 CASE_FLT_FN (BUILT_IN_ERF):
335 CASE_FLT_FN (BUILT_IN_LLROUND):
336 CASE_FLT_FN (BUILT_IN_LROUND):
337 CASE_FLT_FN (BUILT_IN_ROUND):
338 CASE_FLT_FN (BUILT_IN_SIN):
339 CASE_FLT_FN (BUILT_IN_SINH):
340 CASE_FLT_FN (BUILT_IN_TAN):
341 CASE_FLT_FN (BUILT_IN_TANH):
342 CASE_FLT_FN (BUILT_IN_TRUNC):
343 return true;
345 CASE_FLT_FN (BUILT_IN_LLRINT):
346 CASE_FLT_FN (BUILT_IN_LRINT):
347 CASE_FLT_FN (BUILT_IN_NEARBYINT):
348 CASE_FLT_FN (BUILT_IN_RINT):
349 return !flag_rounding_math;
351 default:
352 break;
354 return false;
357 /* Check whether we may negate an integer constant T without causing
358 overflow. */
360 bool
361 may_negate_without_overflow_p (const_tree t)
363 tree type;
365 gcc_assert (TREE_CODE (t) == INTEGER_CST);
367 type = TREE_TYPE (t);
368 if (TYPE_UNSIGNED (type))
369 return false;
371 return !wi::only_sign_bit_p (t);
374 /* Determine whether an expression T can be cheaply negated using
375 the function negate_expr without introducing undefined overflow. */
377 static bool
378 negate_expr_p (tree t)
380 tree type;
382 if (t == 0)
383 return false;
385 type = TREE_TYPE (t);
387 STRIP_SIGN_NOPS (t);
388 switch (TREE_CODE (t))
390 case INTEGER_CST:
391 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
392 return true;
394 /* Check that -CST will not overflow type. */
395 return may_negate_without_overflow_p (t);
396 case BIT_NOT_EXPR:
397 return (INTEGRAL_TYPE_P (type)
398 && TYPE_OVERFLOW_WRAPS (type));
400 case FIXED_CST:
401 return true;
403 case NEGATE_EXPR:
404 return !TYPE_OVERFLOW_SANITIZED (type);
406 case REAL_CST:
407 /* We want to canonicalize to positive real constants. Pretend
408 that only negative ones can be easily negated. */
409 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
411 case COMPLEX_CST:
412 return negate_expr_p (TREE_REALPART (t))
413 && negate_expr_p (TREE_IMAGPART (t));
415 case VECTOR_CST:
417 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
418 return true;
420 int count = TYPE_VECTOR_SUBPARTS (type), i;
422 for (i = 0; i < count; i++)
423 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
424 return false;
426 return true;
429 case COMPLEX_EXPR:
430 return negate_expr_p (TREE_OPERAND (t, 0))
431 && negate_expr_p (TREE_OPERAND (t, 1));
433 case CONJ_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0));
436 case PLUS_EXPR:
437 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
438 || HONOR_SIGNED_ZEROS (element_mode (type))
439 || (INTEGRAL_TYPE_P (type)
440 && ! TYPE_OVERFLOW_WRAPS (type)))
441 return false;
442 /* -(A + B) -> (-B) - A. */
443 if (negate_expr_p (TREE_OPERAND (t, 1))
444 && reorder_operands_p (TREE_OPERAND (t, 0),
445 TREE_OPERAND (t, 1)))
446 return true;
447 /* -(A + B) -> (-A) - B. */
448 return negate_expr_p (TREE_OPERAND (t, 0));
450 case MINUS_EXPR:
451 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
452 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
453 && !HONOR_SIGNED_ZEROS (element_mode (type))
454 && (! INTEGRAL_TYPE_P (type)
455 || TYPE_OVERFLOW_WRAPS (type))
456 && reorder_operands_p (TREE_OPERAND (t, 0),
457 TREE_OPERAND (t, 1));
459 case MULT_EXPR:
460 if (TYPE_UNSIGNED (type))
461 break;
462 /* INT_MIN/n * n doesn't overflow while negating one operand it does
463 if n is a power of two. */
464 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
465 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
466 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
467 && ! integer_pow2p (TREE_OPERAND (t, 0)))
468 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
469 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
470 break;
472 /* Fall through. */
474 case RDIV_EXPR:
475 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
476 return negate_expr_p (TREE_OPERAND (t, 1))
477 || negate_expr_p (TREE_OPERAND (t, 0));
478 break;
480 case TRUNC_DIV_EXPR:
481 case ROUND_DIV_EXPR:
482 case EXACT_DIV_EXPR:
483 if (TYPE_UNSIGNED (type))
484 break;
485 if (negate_expr_p (TREE_OPERAND (t, 0)))
486 return true;
487 /* In general we can't negate B in A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. */
490 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
491 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
492 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
493 && ! integer_onep (TREE_OPERAND (t, 1))))
494 return negate_expr_p (TREE_OPERAND (t, 1));
495 break;
497 case NOP_EXPR:
498 /* Negate -((double)float) as (double)(-float). */
499 if (TREE_CODE (type) == REAL_TYPE)
501 tree tem = strip_float_extensions (t);
502 if (tem != t)
503 return negate_expr_p (tem);
505 break;
507 case CALL_EXPR:
508 /* Negate -f(x) as f(-x). */
509 if (negate_mathfn_p (builtin_mathfn_code (t)))
510 return negate_expr_p (CALL_EXPR_ARG (t, 0));
511 break;
513 case RSHIFT_EXPR:
514 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
515 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
517 tree op1 = TREE_OPERAND (t, 1);
518 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
519 return true;
521 break;
523 default:
524 break;
526 return false;
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
532 returned. */
534 static tree
535 fold_negate_expr (location_t loc, tree t)
537 tree type = TREE_TYPE (t);
538 tree tem;
540 switch (TREE_CODE (t))
542 /* Convert - (~A) to A + 1. */
543 case BIT_NOT_EXPR:
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_one_cst (type));
547 break;
549 case INTEGER_CST:
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || (ANY_INTEGRAL_TYPE_P (type)
553 && !TYPE_OVERFLOW_TRAPS (type)
554 && TYPE_OVERFLOW_WRAPS (type))
555 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
556 return tem;
557 break;
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 return tem;
563 case FIXED_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
567 case COMPLEX_CST:
569 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
570 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
571 if (rpart && ipart)
572 return build_complex (type, rpart, ipart);
574 break;
576 case VECTOR_CST:
578 int count = TYPE_VECTOR_SUBPARTS (type), i;
579 tree *elts = XALLOCAVEC (tree, count);
581 for (i = 0; i < count; i++)
583 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
584 if (elts[i] == NULL_TREE)
585 return NULL_TREE;
588 return build_vector (type, elts);
591 case COMPLEX_EXPR:
592 if (negate_expr_p (t))
593 return fold_build2_loc (loc, COMPLEX_EXPR, type,
594 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
595 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
596 break;
598 case CONJ_EXPR:
599 if (negate_expr_p (t))
600 return fold_build1_loc (loc, CONJ_EXPR, type,
601 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
602 break;
604 case NEGATE_EXPR:
605 if (!TYPE_OVERFLOW_SANITIZED (type))
606 return TREE_OPERAND (t, 0);
607 break;
609 case PLUS_EXPR:
610 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
611 && !HONOR_SIGNED_ZEROS (element_mode (type)))
613 /* -(A + B) -> (-B) - A. */
614 if (negate_expr_p (TREE_OPERAND (t, 1))
615 && reorder_operands_p (TREE_OPERAND (t, 0),
616 TREE_OPERAND (t, 1)))
618 tem = negate_expr (TREE_OPERAND (t, 1));
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 tem, TREE_OPERAND (t, 0));
623 /* -(A + B) -> (-A) - B. */
624 if (negate_expr_p (TREE_OPERAND (t, 0)))
626 tem = negate_expr (TREE_OPERAND (t, 0));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 1));
631 break;
633 case MINUS_EXPR:
634 /* - (A - B) -> B - A */
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type))
637 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
638 return fold_build2_loc (loc, MINUS_EXPR, type,
639 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
640 break;
642 case MULT_EXPR:
643 if (TYPE_UNSIGNED (type))
644 break;
646 /* Fall through. */
648 case RDIV_EXPR:
649 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
651 tem = TREE_OPERAND (t, 1);
652 if (negate_expr_p (tem))
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
655 tem = TREE_OPERAND (t, 0);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 negate_expr (tem), TREE_OPERAND (t, 1));
660 break;
662 case TRUNC_DIV_EXPR:
663 case ROUND_DIV_EXPR:
664 case EXACT_DIV_EXPR:
665 if (TYPE_UNSIGNED (type))
666 break;
667 if (negate_expr_p (TREE_OPERAND (t, 0)))
668 return fold_build2_loc (loc, TREE_CODE (t), type,
669 negate_expr (TREE_OPERAND (t, 0)),
670 TREE_OPERAND (t, 1));
671 /* In general we can't negate B in A / B, because if A is INT_MIN and
672 B is 1, we may turn this into INT_MIN / -1 which is undefined
673 and actually traps on some architectures. */
674 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
675 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
676 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
677 && ! integer_onep (TREE_OPERAND (t, 1))))
678 && negate_expr_p (TREE_OPERAND (t, 1)))
679 return fold_build2_loc (loc, TREE_CODE (t), type,
680 TREE_OPERAND (t, 0),
681 negate_expr (TREE_OPERAND (t, 1)));
682 break;
684 case NOP_EXPR:
685 /* Convert -((double)float) into (double)(-float). */
686 if (TREE_CODE (type) == REAL_TYPE)
688 tem = strip_float_extensions (t);
689 if (tem != t && negate_expr_p (tem))
690 return fold_convert_loc (loc, type, negate_expr (tem));
692 break;
694 case CALL_EXPR:
695 /* Negate -f(x) as f(-x). */
696 if (negate_mathfn_p (builtin_mathfn_code (t))
697 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
699 tree fndecl, arg;
701 fndecl = get_callee_fndecl (t);
702 arg = negate_expr (CALL_EXPR_ARG (t, 0));
703 return build_call_expr_loc (loc, fndecl, 1, arg);
705 break;
707 case RSHIFT_EXPR:
708 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
709 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
711 tree op1 = TREE_OPERAND (t, 1);
712 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
714 tree ntype = TYPE_UNSIGNED (type)
715 ? signed_type_for (type)
716 : unsigned_type_for (type);
717 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
718 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
719 return fold_convert_loc (loc, type, temp);
722 break;
724 default:
725 break;
728 return NULL_TREE;
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 return NULL_TREE. */
735 static tree
736 negate_expr (tree t)
738 tree type, tem;
739 location_t loc;
741 if (t == NULL_TREE)
742 return NULL_TREE;
744 loc = EXPR_LOCATION (t);
745 type = TREE_TYPE (t);
746 STRIP_SIGN_NOPS (t);
748 tem = fold_negate_expr (loc, t);
749 if (!tem)
750 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751 return fold_convert_loc (loc, type, tem);
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead.
769 If IN is itself a literal or constant, return it as appropriate.
771 Note that we do not guarantee that any of the three values will be the
772 same type as IN, but they will have the same signedness and mode. */
774 static tree
775 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
776 tree *minus_litp, int negate_p)
778 tree var = 0;
780 *conp = 0;
781 *litp = 0;
782 *minus_litp = 0;
784 /* Strip any conversions that don't change the machine mode or signedness. */
785 STRIP_SIGN_NOPS (in);
787 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
788 || TREE_CODE (in) == FIXED_CST)
789 *litp = in;
790 else if (TREE_CODE (in) == code
791 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
792 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
793 /* We can associate addition and subtraction together (even
794 though the C standard doesn't say so) for integers because
795 the value is not affected. For reals, the value might be
796 affected, so we can't. */
797 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
798 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
800 tree op0 = TREE_OPERAND (in, 0);
801 tree op1 = TREE_OPERAND (in, 1);
802 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
803 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
805 /* First see if either of the operands is a literal, then a constant. */
806 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
807 || TREE_CODE (op0) == FIXED_CST)
808 *litp = op0, op0 = 0;
809 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
810 || TREE_CODE (op1) == FIXED_CST)
811 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
813 if (op0 != 0 && TREE_CONSTANT (op0))
814 *conp = op0, op0 = 0;
815 else if (op1 != 0 && TREE_CONSTANT (op1))
816 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
818 /* If we haven't dealt with either operand, this is not a case we can
819 decompose. Otherwise, VAR is either of the ones remaining, if any. */
820 if (op0 != 0 && op1 != 0)
821 var = in;
822 else if (op0 != 0)
823 var = op0;
824 else
825 var = op1, neg_var_p = neg1_p;
827 /* Now do any needed negations. */
828 if (neg_litp_p)
829 *minus_litp = *litp, *litp = 0;
830 if (neg_conp_p)
831 *conp = negate_expr (*conp);
832 if (neg_var_p)
833 var = negate_expr (var);
835 else if (TREE_CODE (in) == BIT_NOT_EXPR
836 && code == PLUS_EXPR)
838 /* -X - 1 is folded to ~X, undo that here. */
839 *minus_litp = build_one_cst (TREE_TYPE (in));
840 var = negate_expr (TREE_OPERAND (in, 0));
842 else if (TREE_CONSTANT (in))
843 *conp = in;
844 else
845 var = in;
847 if (negate_p)
849 if (*litp)
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
857 return var;
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
865 static tree
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 if (t1 == 0)
869 return t2;
870 else if (t2 == 0)
871 return t1;
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
911 static bool
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
915 return false;
916 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
917 return false;
919 switch (code)
921 case LSHIFT_EXPR:
922 case RSHIFT_EXPR:
923 case LROTATE_EXPR:
924 case RROTATE_EXPR:
925 return true;
927 default:
928 break;
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
941 static tree
942 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
943 int overflowable)
945 wide_int res;
946 tree t;
947 tree type = TREE_TYPE (arg1);
948 signop sign = TYPE_SIGN (type);
949 bool overflow = false;
951 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
952 TYPE_SIGN (TREE_TYPE (parg2)));
954 switch (code)
956 case BIT_IOR_EXPR:
957 res = wi::bit_or (arg1, arg2);
958 break;
960 case BIT_XOR_EXPR:
961 res = wi::bit_xor (arg1, arg2);
962 break;
964 case BIT_AND_EXPR:
965 res = wi::bit_and (arg1, arg2);
966 break;
968 case RSHIFT_EXPR:
969 case LSHIFT_EXPR:
970 if (wi::neg_p (arg2))
972 arg2 = -arg2;
973 if (code == RSHIFT_EXPR)
974 code = LSHIFT_EXPR;
975 else
976 code = RSHIFT_EXPR;
979 if (code == RSHIFT_EXPR)
980 /* It's unclear from the C standard whether shifts can overflow.
981 The following code ignores overflow; perhaps a C standard
982 interpretation ruling is needed. */
983 res = wi::rshift (arg1, arg2, sign);
984 else
985 res = wi::lshift (arg1, arg2);
986 break;
988 case RROTATE_EXPR:
989 case LROTATE_EXPR:
990 if (wi::neg_p (arg2))
992 arg2 = -arg2;
993 if (code == RROTATE_EXPR)
994 code = LROTATE_EXPR;
995 else
996 code = RROTATE_EXPR;
999 if (code == RROTATE_EXPR)
1000 res = wi::rrotate (arg1, arg2);
1001 else
1002 res = wi::lrotate (arg1, arg2);
1003 break;
1005 case PLUS_EXPR:
1006 res = wi::add (arg1, arg2, sign, &overflow);
1007 break;
1009 case MINUS_EXPR:
1010 res = wi::sub (arg1, arg2, sign, &overflow);
1011 break;
1013 case MULT_EXPR:
1014 res = wi::mul (arg1, arg2, sign, &overflow);
1015 break;
1017 case MULT_HIGHPART_EXPR:
1018 res = wi::mul_high (arg1, arg2, sign);
1019 break;
1021 case TRUNC_DIV_EXPR:
1022 case EXACT_DIV_EXPR:
1023 if (arg2 == 0)
1024 return NULL_TREE;
1025 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1026 break;
1028 case FLOOR_DIV_EXPR:
1029 if (arg2 == 0)
1030 return NULL_TREE;
1031 res = wi::div_floor (arg1, arg2, sign, &overflow);
1032 break;
1034 case CEIL_DIV_EXPR:
1035 if (arg2 == 0)
1036 return NULL_TREE;
1037 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1038 break;
1040 case ROUND_DIV_EXPR:
1041 if (arg2 == 0)
1042 return NULL_TREE;
1043 res = wi::div_round (arg1, arg2, sign, &overflow);
1044 break;
1046 case TRUNC_MOD_EXPR:
1047 if (arg2 == 0)
1048 return NULL_TREE;
1049 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1050 break;
1052 case FLOOR_MOD_EXPR:
1053 if (arg2 == 0)
1054 return NULL_TREE;
1055 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1056 break;
1058 case CEIL_MOD_EXPR:
1059 if (arg2 == 0)
1060 return NULL_TREE;
1061 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1062 break;
1064 case ROUND_MOD_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::mod_round (arg1, arg2, sign, &overflow);
1068 break;
1070 case MIN_EXPR:
1071 res = wi::min (arg1, arg2, sign);
1072 break;
1074 case MAX_EXPR:
1075 res = wi::max (arg1, arg2, sign);
1076 break;
1078 default:
1079 return NULL_TREE;
1082 t = force_fit_type (type, res, overflowable,
1083 (((sign == SIGNED || overflowable == -1)
1084 && overflow)
1085 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1087 return t;
1090 tree
1091 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1093 return int_const_binop_1 (code, arg1, arg2, 1);
1096 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1097 constant. We assume ARG1 and ARG2 have the same data type, or at least
1098 are the same kind of constant and the same machine mode. Return zero if
1099 combining the constants is not allowed in the current operating mode. */
1101 static tree
1102 const_binop (enum tree_code code, tree arg1, tree arg2)
1104 /* Sanity check for the recursive cases. */
1105 if (!arg1 || !arg2)
1106 return NULL_TREE;
1108 STRIP_NOPS (arg1);
1109 STRIP_NOPS (arg2);
1111 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1113 if (code == POINTER_PLUS_EXPR)
1114 return int_const_binop (PLUS_EXPR,
1115 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1117 return int_const_binop (code, arg1, arg2);
1120 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1122 machine_mode mode;
1123 REAL_VALUE_TYPE d1;
1124 REAL_VALUE_TYPE d2;
1125 REAL_VALUE_TYPE value;
1126 REAL_VALUE_TYPE result;
1127 bool inexact;
1128 tree t, type;
1130 /* The following codes are handled by real_arithmetic. */
1131 switch (code)
1133 case PLUS_EXPR:
1134 case MINUS_EXPR:
1135 case MULT_EXPR:
1136 case RDIV_EXPR:
1137 case MIN_EXPR:
1138 case MAX_EXPR:
1139 break;
1141 default:
1142 return NULL_TREE;
1145 d1 = TREE_REAL_CST (arg1);
1146 d2 = TREE_REAL_CST (arg2);
1148 type = TREE_TYPE (arg1);
1149 mode = TYPE_MODE (type);
1151 /* Don't perform operation if we honor signaling NaNs and
1152 either operand is a NaN. */
1153 if (HONOR_SNANS (mode)
1154 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1155 return NULL_TREE;
1157 /* Don't perform operation if it would raise a division
1158 by zero exception. */
1159 if (code == RDIV_EXPR
1160 && real_equal (&d2, &dconst0)
1161 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1162 return NULL_TREE;
1164 /* If either operand is a NaN, just return it. Otherwise, set up
1165 for floating-point trap; we return an overflow. */
1166 if (REAL_VALUE_ISNAN (d1))
1167 return arg1;
1168 else if (REAL_VALUE_ISNAN (d2))
1169 return arg2;
1171 inexact = real_arithmetic (&value, code, &d1, &d2);
1172 real_convert (&result, mode, &value);
1174 /* Don't constant fold this floating point operation if
1175 the result has overflowed and flag_trapping_math. */
1176 if (flag_trapping_math
1177 && MODE_HAS_INFINITIES (mode)
1178 && REAL_VALUE_ISINF (result)
1179 && !REAL_VALUE_ISINF (d1)
1180 && !REAL_VALUE_ISINF (d2))
1181 return NULL_TREE;
1183 /* Don't constant fold this floating point operation if the
1184 result may dependent upon the run-time rounding mode and
1185 flag_rounding_math is set, or if GCC's software emulation
1186 is unable to accurately represent the result. */
1187 if ((flag_rounding_math
1188 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1189 && (inexact || !real_identical (&result, &value)))
1190 return NULL_TREE;
1192 t = build_real (type, result);
1194 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1195 return t;
1198 if (TREE_CODE (arg1) == FIXED_CST)
1200 FIXED_VALUE_TYPE f1;
1201 FIXED_VALUE_TYPE f2;
1202 FIXED_VALUE_TYPE result;
1203 tree t, type;
1204 int sat_p;
1205 bool overflow_p;
1207 /* The following codes are handled by fixed_arithmetic. */
1208 switch (code)
1210 case PLUS_EXPR:
1211 case MINUS_EXPR:
1212 case MULT_EXPR:
1213 case TRUNC_DIV_EXPR:
1214 if (TREE_CODE (arg2) != FIXED_CST)
1215 return NULL_TREE;
1216 f2 = TREE_FIXED_CST (arg2);
1217 break;
1219 case LSHIFT_EXPR:
1220 case RSHIFT_EXPR:
1222 if (TREE_CODE (arg2) != INTEGER_CST)
1223 return NULL_TREE;
1224 wide_int w2 = arg2;
1225 f2.data.high = w2.elt (1);
1226 f2.data.low = w2.elt (0);
1227 f2.mode = SImode;
1229 break;
1231 default:
1232 return NULL_TREE;
1235 f1 = TREE_FIXED_CST (arg1);
1236 type = TREE_TYPE (arg1);
1237 sat_p = TYPE_SATURATING (type);
1238 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1239 t = build_fixed (type, result);
1240 /* Propagate overflow flags. */
1241 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1242 TREE_OVERFLOW (t) = 1;
1243 return t;
1246 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1248 tree type = TREE_TYPE (arg1);
1249 tree r1 = TREE_REALPART (arg1);
1250 tree i1 = TREE_IMAGPART (arg1);
1251 tree r2 = TREE_REALPART (arg2);
1252 tree i2 = TREE_IMAGPART (arg2);
1253 tree real, imag;
1255 switch (code)
1257 case PLUS_EXPR:
1258 case MINUS_EXPR:
1259 real = const_binop (code, r1, r2);
1260 imag = const_binop (code, i1, i2);
1261 break;
1263 case MULT_EXPR:
1264 if (COMPLEX_FLOAT_TYPE_P (type))
1265 return do_mpc_arg2 (arg1, arg2, type,
1266 /* do_nonfinite= */ folding_initializer,
1267 mpc_mul);
1269 real = const_binop (MINUS_EXPR,
1270 const_binop (MULT_EXPR, r1, r2),
1271 const_binop (MULT_EXPR, i1, i2));
1272 imag = const_binop (PLUS_EXPR,
1273 const_binop (MULT_EXPR, r1, i2),
1274 const_binop (MULT_EXPR, i1, r2));
1275 break;
1277 case RDIV_EXPR:
1278 if (COMPLEX_FLOAT_TYPE_P (type))
1279 return do_mpc_arg2 (arg1, arg2, type,
1280 /* do_nonfinite= */ folding_initializer,
1281 mpc_div);
1282 /* Fallthru ... */
1283 case TRUNC_DIV_EXPR:
1284 case CEIL_DIV_EXPR:
1285 case FLOOR_DIV_EXPR:
1286 case ROUND_DIV_EXPR:
1287 if (flag_complex_method == 0)
1289 /* Keep this algorithm in sync with
1290 tree-complex.c:expand_complex_div_straight().
1292 Expand complex division to scalars, straightforward algorithm.
1293 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1294 t = br*br + bi*bi
1296 tree magsquared
1297 = const_binop (PLUS_EXPR,
1298 const_binop (MULT_EXPR, r2, r2),
1299 const_binop (MULT_EXPR, i2, i2));
1300 tree t1
1301 = const_binop (PLUS_EXPR,
1302 const_binop (MULT_EXPR, r1, r2),
1303 const_binop (MULT_EXPR, i1, i2));
1304 tree t2
1305 = const_binop (MINUS_EXPR,
1306 const_binop (MULT_EXPR, i1, r2),
1307 const_binop (MULT_EXPR, r1, i2));
1309 real = const_binop (code, t1, magsquared);
1310 imag = const_binop (code, t2, magsquared);
1312 else
1314 /* Keep this algorithm in sync with
1315 tree-complex.c:expand_complex_div_wide().
1317 Expand complex division to scalars, modified algorithm to minimize
1318 overflow with wide input ranges. */
1319 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1320 fold_abs_const (r2, TREE_TYPE (type)),
1321 fold_abs_const (i2, TREE_TYPE (type)));
1323 if (integer_nonzerop (compare))
1325 /* In the TRUE branch, we compute
1326 ratio = br/bi;
1327 div = (br * ratio) + bi;
1328 tr = (ar * ratio) + ai;
1329 ti = (ai * ratio) - ar;
1330 tr = tr / div;
1331 ti = ti / div; */
1332 tree ratio = const_binop (code, r2, i2);
1333 tree div = const_binop (PLUS_EXPR, i2,
1334 const_binop (MULT_EXPR, r2, ratio));
1335 real = const_binop (MULT_EXPR, r1, ratio);
1336 real = const_binop (PLUS_EXPR, real, i1);
1337 real = const_binop (code, real, div);
1339 imag = const_binop (MULT_EXPR, i1, ratio);
1340 imag = const_binop (MINUS_EXPR, imag, r1);
1341 imag = const_binop (code, imag, div);
1343 else
1345 /* In the FALSE branch, we compute
1346 ratio = d/c;
1347 divisor = (d * ratio) + c;
1348 tr = (b * ratio) + a;
1349 ti = b - (a * ratio);
1350 tr = tr / div;
1351 ti = ti / div; */
1352 tree ratio = const_binop (code, i2, r2);
1353 tree div = const_binop (PLUS_EXPR, r2,
1354 const_binop (MULT_EXPR, i2, ratio));
1356 real = const_binop (MULT_EXPR, i1, ratio);
1357 real = const_binop (PLUS_EXPR, real, r1);
1358 real = const_binop (code, real, div);
1360 imag = const_binop (MULT_EXPR, r1, ratio);
1361 imag = const_binop (MINUS_EXPR, i1, imag);
1362 imag = const_binop (code, imag, div);
1365 break;
1367 default:
1368 return NULL_TREE;
1371 if (real && imag)
1372 return build_complex (type, real, imag);
1375 if (TREE_CODE (arg1) == VECTOR_CST
1376 && TREE_CODE (arg2) == VECTOR_CST)
1378 tree type = TREE_TYPE (arg1);
1379 int count = TYPE_VECTOR_SUBPARTS (type), i;
1380 tree *elts = XALLOCAVEC (tree, count);
1382 for (i = 0; i < count; i++)
1384 tree elem1 = VECTOR_CST_ELT (arg1, i);
1385 tree elem2 = VECTOR_CST_ELT (arg2, i);
1387 elts[i] = const_binop (code, elem1, elem2);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if (elts[i] == NULL_TREE)
1392 return NULL_TREE;
1395 return build_vector (type, elts);
1398 /* Shifts allow a scalar offset for a vector. */
1399 if (TREE_CODE (arg1) == VECTOR_CST
1400 && TREE_CODE (arg2) == INTEGER_CST)
1402 tree type = TREE_TYPE (arg1);
1403 int count = TYPE_VECTOR_SUBPARTS (type), i;
1404 tree *elts = XALLOCAVEC (tree, count);
1406 for (i = 0; i < count; i++)
1408 tree elem1 = VECTOR_CST_ELT (arg1, i);
1410 elts[i] = const_binop (code, elem1, arg2);
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE. */
1414 if (elts[i] == NULL_TREE)
1415 return NULL_TREE;
1418 return build_vector (type, elts);
1420 return NULL_TREE;
1423 /* Overload that adds a TYPE parameter to be able to dispatch
1424 to fold_relational_const. */
1426 tree
1427 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1429 if (TREE_CODE_CLASS (code) == tcc_comparison)
1430 return fold_relational_const (code, type, arg1, arg2);
1432 /* ??? Until we make the const_binop worker take the type of the
1433 result as argument put those cases that need it here. */
1434 switch (code)
1436 case COMPLEX_EXPR:
1437 if ((TREE_CODE (arg1) == REAL_CST
1438 && TREE_CODE (arg2) == REAL_CST)
1439 || (TREE_CODE (arg1) == INTEGER_CST
1440 && TREE_CODE (arg2) == INTEGER_CST))
1441 return build_complex (type, arg1, arg2);
1442 return NULL_TREE;
1444 case VEC_PACK_TRUNC_EXPR:
1445 case VEC_PACK_FIX_TRUNC_EXPR:
1447 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1448 tree *elts;
1450 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1451 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1452 if (TREE_CODE (arg1) != VECTOR_CST
1453 || TREE_CODE (arg2) != VECTOR_CST)
1454 return NULL_TREE;
1456 elts = XALLOCAVEC (tree, nelts);
1457 if (!vec_cst_ctor_to_array (arg1, elts)
1458 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1459 return NULL_TREE;
1461 for (i = 0; i < nelts; i++)
1463 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1464 ? NOP_EXPR : FIX_TRUNC_EXPR,
1465 TREE_TYPE (type), elts[i]);
1466 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1467 return NULL_TREE;
1470 return build_vector (type, elts);
1473 case VEC_WIDEN_MULT_LO_EXPR:
1474 case VEC_WIDEN_MULT_HI_EXPR:
1475 case VEC_WIDEN_MULT_EVEN_EXPR:
1476 case VEC_WIDEN_MULT_ODD_EXPR:
1478 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1479 unsigned int out, ofs, scale;
1480 tree *elts;
1482 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1483 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1484 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1485 return NULL_TREE;
1487 elts = XALLOCAVEC (tree, nelts * 4);
1488 if (!vec_cst_ctor_to_array (arg1, elts)
1489 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1490 return NULL_TREE;
1492 if (code == VEC_WIDEN_MULT_LO_EXPR)
1493 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1494 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1495 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1496 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1497 scale = 1, ofs = 0;
1498 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1499 scale = 1, ofs = 1;
1501 for (out = 0; out < nelts; out++)
1503 unsigned int in1 = (out << scale) + ofs;
1504 unsigned int in2 = in1 + nelts * 2;
1505 tree t1, t2;
1507 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1508 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1510 if (t1 == NULL_TREE || t2 == NULL_TREE)
1511 return NULL_TREE;
1512 elts[out] = const_binop (MULT_EXPR, t1, t2);
1513 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1514 return NULL_TREE;
1517 return build_vector (type, elts);
1520 default:;
1523 if (TREE_CODE_CLASS (code) != tcc_binary)
1524 return NULL_TREE;
1526 /* Make sure type and arg0 have the same saturating flag. */
1527 gcc_checking_assert (TYPE_SATURATING (type)
1528 == TYPE_SATURATING (TREE_TYPE (arg1)));
1530 return const_binop (code, arg1, arg2);
1533 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1534 Return zero if computing the constants is not possible. */
1536 tree
1537 const_unop (enum tree_code code, tree type, tree arg0)
1539 switch (code)
1541 CASE_CONVERT:
1542 case FLOAT_EXPR:
1543 case FIX_TRUNC_EXPR:
1544 case FIXED_CONVERT_EXPR:
1545 return fold_convert_const (code, type, arg0);
1547 case ADDR_SPACE_CONVERT_EXPR:
1548 /* If the source address is 0, and the source address space
1549 cannot have a valid object at 0, fold to dest type null. */
1550 if (integer_zerop (arg0)
1551 && !(targetm.addr_space.zero_address_valid
1552 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1553 return fold_convert_const (code, type, arg0);
1554 break;
1556 case VIEW_CONVERT_EXPR:
1557 return fold_view_convert_expr (type, arg0);
1559 case NEGATE_EXPR:
1561 /* Can't call fold_negate_const directly here as that doesn't
1562 handle all cases and we might not be able to negate some
1563 constants. */
1564 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1565 if (tem && CONSTANT_CLASS_P (tem))
1566 return tem;
1567 break;
1570 case ABS_EXPR:
1571 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1572 return fold_abs_const (arg0, type);
1573 break;
1575 case CONJ_EXPR:
1576 if (TREE_CODE (arg0) == COMPLEX_CST)
1578 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1579 TREE_TYPE (type));
1580 return build_complex (type, TREE_REALPART (arg0), ipart);
1582 break;
1584 case BIT_NOT_EXPR:
1585 if (TREE_CODE (arg0) == INTEGER_CST)
1586 return fold_not_const (arg0, type);
1587 /* Perform BIT_NOT_EXPR on each element individually. */
1588 else if (TREE_CODE (arg0) == VECTOR_CST)
1590 tree *elements;
1591 tree elem;
1592 unsigned count = VECTOR_CST_NELTS (arg0), i;
1594 elements = XALLOCAVEC (tree, count);
1595 for (i = 0; i < count; i++)
1597 elem = VECTOR_CST_ELT (arg0, i);
1598 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1599 if (elem == NULL_TREE)
1600 break;
1601 elements[i] = elem;
1603 if (i == count)
1604 return build_vector (type, elements);
1606 break;
1608 case TRUTH_NOT_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST)
1610 return constant_boolean_node (integer_zerop (arg0), type);
1611 break;
1613 case REALPART_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1615 return fold_convert (type, TREE_REALPART (arg0));
1616 break;
1618 case IMAGPART_EXPR:
1619 if (TREE_CODE (arg0) == COMPLEX_CST)
1620 return fold_convert (type, TREE_IMAGPART (arg0));
1621 break;
1623 case VEC_UNPACK_LO_EXPR:
1624 case VEC_UNPACK_HI_EXPR:
1625 case VEC_UNPACK_FLOAT_LO_EXPR:
1626 case VEC_UNPACK_FLOAT_HI_EXPR:
1628 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1629 tree *elts;
1630 enum tree_code subcode;
1632 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1633 if (TREE_CODE (arg0) != VECTOR_CST)
1634 return NULL_TREE;
1636 elts = XALLOCAVEC (tree, nelts * 2);
1637 if (!vec_cst_ctor_to_array (arg0, elts))
1638 return NULL_TREE;
1640 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1641 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1642 elts += nelts;
1644 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1645 subcode = NOP_EXPR;
1646 else
1647 subcode = FLOAT_EXPR;
1649 for (i = 0; i < nelts; i++)
1651 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1652 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1653 return NULL_TREE;
1656 return build_vector (type, elts);
1659 case REDUC_MIN_EXPR:
1660 case REDUC_MAX_EXPR:
1661 case REDUC_PLUS_EXPR:
1663 unsigned int nelts, i;
1664 tree *elts;
1665 enum tree_code subcode;
1667 if (TREE_CODE (arg0) != VECTOR_CST)
1668 return NULL_TREE;
1669 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1671 elts = XALLOCAVEC (tree, nelts);
1672 if (!vec_cst_ctor_to_array (arg0, elts))
1673 return NULL_TREE;
1675 switch (code)
1677 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1678 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1679 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1680 default: gcc_unreachable ();
1683 for (i = 1; i < nelts; i++)
1685 elts[0] = const_binop (subcode, elts[0], elts[i]);
1686 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1687 return NULL_TREE;
1690 return elts[0];
1693 default:
1694 break;
1697 return NULL_TREE;
1700 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1701 indicates which particular sizetype to create. */
1703 tree
1704 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1706 return build_int_cst (sizetype_tab[(int) kind], number);
1709 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1710 is a tree code. The type of the result is taken from the operands.
1711 Both must be equivalent integer types, ala int_binop_types_match_p.
1712 If the operands are constant, so is the result. */
1714 tree
1715 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1717 tree type = TREE_TYPE (arg0);
1719 if (arg0 == error_mark_node || arg1 == error_mark_node)
1720 return error_mark_node;
1722 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1723 TREE_TYPE (arg1)));
1725 /* Handle the special case of two integer constants faster. */
1726 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1728 /* And some specific cases even faster than that. */
1729 if (code == PLUS_EXPR)
1731 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1732 return arg1;
1733 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1734 return arg0;
1736 else if (code == MINUS_EXPR)
1738 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1739 return arg0;
1741 else if (code == MULT_EXPR)
1743 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1744 return arg1;
1747 /* Handle general case of two integer constants. For sizetype
1748 constant calculations we always want to know about overflow,
1749 even in the unsigned case. */
1750 return int_const_binop_1 (code, arg0, arg1, -1);
1753 return fold_build2_loc (loc, code, type, arg0, arg1);
1756 /* Given two values, either both of sizetype or both of bitsizetype,
1757 compute the difference between the two values. Return the value
1758 in signed type corresponding to the type of the operands. */
1760 tree
1761 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1763 tree type = TREE_TYPE (arg0);
1764 tree ctype;
1766 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1767 TREE_TYPE (arg1)));
1769 /* If the type is already signed, just do the simple thing. */
1770 if (!TYPE_UNSIGNED (type))
1771 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1773 if (type == sizetype)
1774 ctype = ssizetype;
1775 else if (type == bitsizetype)
1776 ctype = sbitsizetype;
1777 else
1778 ctype = signed_type_for (type);
1780 /* If either operand is not a constant, do the conversions to the signed
1781 type and subtract. The hardware will do the right thing with any
1782 overflow in the subtraction. */
1783 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1784 return size_binop_loc (loc, MINUS_EXPR,
1785 fold_convert_loc (loc, ctype, arg0),
1786 fold_convert_loc (loc, ctype, arg1));
1788 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1789 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1790 overflow) and negate (which can't either). Special-case a result
1791 of zero while we're here. */
1792 if (tree_int_cst_equal (arg0, arg1))
1793 return build_int_cst (ctype, 0);
1794 else if (tree_int_cst_lt (arg1, arg0))
1795 return fold_convert_loc (loc, ctype,
1796 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1797 else
1798 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1799 fold_convert_loc (loc, ctype,
1800 size_binop_loc (loc,
1801 MINUS_EXPR,
1802 arg1, arg0)));
1805 /* A subroutine of fold_convert_const handling conversions of an
1806 INTEGER_CST to another integer type. */
1808 static tree
1809 fold_convert_const_int_from_int (tree type, const_tree arg1)
1811 /* Given an integer constant, make new constant with new type,
1812 appropriately sign-extended or truncated. Use widest_int
1813 so that any extension is done according ARG1's type. */
1814 return force_fit_type (type, wi::to_widest (arg1),
1815 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1816 TREE_OVERFLOW (arg1));
1819 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1820 to an integer type. */
1822 static tree
1823 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1825 bool overflow = false;
1826 tree t;
1828 /* The following code implements the floating point to integer
1829 conversion rules required by the Java Language Specification,
1830 that IEEE NaNs are mapped to zero and values that overflow
1831 the target precision saturate, i.e. values greater than
1832 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1833 are mapped to INT_MIN. These semantics are allowed by the
1834 C and C++ standards that simply state that the behavior of
1835 FP-to-integer conversion is unspecified upon overflow. */
1837 wide_int val;
1838 REAL_VALUE_TYPE r;
1839 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1841 switch (code)
1843 case FIX_TRUNC_EXPR:
1844 real_trunc (&r, VOIDmode, &x);
1845 break;
1847 default:
1848 gcc_unreachable ();
1851 /* If R is NaN, return zero and show we have an overflow. */
1852 if (REAL_VALUE_ISNAN (r))
1854 overflow = true;
1855 val = wi::zero (TYPE_PRECISION (type));
1858 /* See if R is less than the lower bound or greater than the
1859 upper bound. */
1861 if (! overflow)
1863 tree lt = TYPE_MIN_VALUE (type);
1864 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1865 if (real_less (&r, &l))
1867 overflow = true;
1868 val = lt;
1872 if (! overflow)
1874 tree ut = TYPE_MAX_VALUE (type);
1875 if (ut)
1877 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1878 if (real_less (&u, &r))
1880 overflow = true;
1881 val = ut;
1886 if (! overflow)
1887 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1889 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1890 return t;
1893 /* A subroutine of fold_convert_const handling conversions of a
1894 FIXED_CST to an integer type. */
1896 static tree
1897 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1899 tree t;
1900 double_int temp, temp_trunc;
1901 unsigned int mode;
1903 /* Right shift FIXED_CST to temp by fbit. */
1904 temp = TREE_FIXED_CST (arg1).data;
1905 mode = TREE_FIXED_CST (arg1).mode;
1906 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1908 temp = temp.rshift (GET_MODE_FBIT (mode),
1909 HOST_BITS_PER_DOUBLE_INT,
1910 SIGNED_FIXED_POINT_MODE_P (mode));
1912 /* Left shift temp to temp_trunc by fbit. */
1913 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1914 HOST_BITS_PER_DOUBLE_INT,
1915 SIGNED_FIXED_POINT_MODE_P (mode));
1917 else
1919 temp = double_int_zero;
1920 temp_trunc = double_int_zero;
1923 /* If FIXED_CST is negative, we need to round the value toward 0.
1924 By checking if the fractional bits are not zero to add 1 to temp. */
1925 if (SIGNED_FIXED_POINT_MODE_P (mode)
1926 && temp_trunc.is_negative ()
1927 && TREE_FIXED_CST (arg1).data != temp_trunc)
1928 temp += double_int_one;
1930 /* Given a fixed-point constant, make new constant with new type,
1931 appropriately sign-extended or truncated. */
1932 t = force_fit_type (type, temp, -1,
1933 (temp.is_negative ()
1934 && (TYPE_UNSIGNED (type)
1935 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1936 | TREE_OVERFLOW (arg1));
1938 return t;
1941 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1942 to another floating point type. */
1944 static tree
1945 fold_convert_const_real_from_real (tree type, const_tree arg1)
1947 REAL_VALUE_TYPE value;
1948 tree t;
1950 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1951 t = build_real (type, value);
1953 /* If converting an infinity or NAN to a representation that doesn't
1954 have one, set the overflow bit so that we can produce some kind of
1955 error message at the appropriate point if necessary. It's not the
1956 most user-friendly message, but it's better than nothing. */
1957 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1958 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1959 TREE_OVERFLOW (t) = 1;
1960 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1961 && !MODE_HAS_NANS (TYPE_MODE (type)))
1962 TREE_OVERFLOW (t) = 1;
1963 /* Regular overflow, conversion produced an infinity in a mode that
1964 can't represent them. */
1965 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1966 && REAL_VALUE_ISINF (value)
1967 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1968 TREE_OVERFLOW (t) = 1;
1969 else
1970 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1971 return t;
1974 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1975 to a floating point type. */
1977 static tree
1978 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1980 REAL_VALUE_TYPE value;
1981 tree t;
1983 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1984 t = build_real (type, value);
1986 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1987 return t;
1990 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1991 to another fixed-point type. */
1993 static tree
1994 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1996 FIXED_VALUE_TYPE value;
1997 tree t;
1998 bool overflow_p;
2000 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2001 TYPE_SATURATING (type));
2002 t = build_fixed (type, value);
2004 /* Propagate overflow flags. */
2005 if (overflow_p | TREE_OVERFLOW (arg1))
2006 TREE_OVERFLOW (t) = 1;
2007 return t;
2010 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2011 to a fixed-point type. */
2013 static tree
2014 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2016 FIXED_VALUE_TYPE value;
2017 tree t;
2018 bool overflow_p;
2019 double_int di;
2021 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2023 di.low = TREE_INT_CST_ELT (arg1, 0);
2024 if (TREE_INT_CST_NUNITS (arg1) == 1)
2025 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2026 else
2027 di.high = TREE_INT_CST_ELT (arg1, 1);
2029 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2030 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2031 TYPE_SATURATING (type));
2032 t = build_fixed (type, value);
2034 /* Propagate overflow flags. */
2035 if (overflow_p | TREE_OVERFLOW (arg1))
2036 TREE_OVERFLOW (t) = 1;
2037 return t;
2040 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2041 to a fixed-point type. */
2043 static tree
2044 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2046 FIXED_VALUE_TYPE value;
2047 tree t;
2048 bool overflow_p;
2050 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2051 &TREE_REAL_CST (arg1),
2052 TYPE_SATURATING (type));
2053 t = build_fixed (type, value);
2055 /* Propagate overflow flags. */
2056 if (overflow_p | TREE_OVERFLOW (arg1))
2057 TREE_OVERFLOW (t) = 1;
2058 return t;
2061 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2062 type TYPE. If no simplification can be done return NULL_TREE. */
2064 static tree
2065 fold_convert_const (enum tree_code code, tree type, tree arg1)
2067 if (TREE_TYPE (arg1) == type)
2068 return arg1;
2070 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2071 || TREE_CODE (type) == OFFSET_TYPE)
2073 if (TREE_CODE (arg1) == INTEGER_CST)
2074 return fold_convert_const_int_from_int (type, arg1);
2075 else if (TREE_CODE (arg1) == REAL_CST)
2076 return fold_convert_const_int_from_real (code, type, arg1);
2077 else if (TREE_CODE (arg1) == FIXED_CST)
2078 return fold_convert_const_int_from_fixed (type, arg1);
2080 else if (TREE_CODE (type) == REAL_TYPE)
2082 if (TREE_CODE (arg1) == INTEGER_CST)
2083 return build_real_from_int_cst (type, arg1);
2084 else if (TREE_CODE (arg1) == REAL_CST)
2085 return fold_convert_const_real_from_real (type, arg1);
2086 else if (TREE_CODE (arg1) == FIXED_CST)
2087 return fold_convert_const_real_from_fixed (type, arg1);
2089 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2091 if (TREE_CODE (arg1) == FIXED_CST)
2092 return fold_convert_const_fixed_from_fixed (type, arg1);
2093 else if (TREE_CODE (arg1) == INTEGER_CST)
2094 return fold_convert_const_fixed_from_int (type, arg1);
2095 else if (TREE_CODE (arg1) == REAL_CST)
2096 return fold_convert_const_fixed_from_real (type, arg1);
2098 return NULL_TREE;
2101 /* Construct a vector of zero elements of vector type TYPE. */
2103 static tree
2104 build_zero_vector (tree type)
2106 tree t;
2108 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2109 return build_vector_from_val (type, t);
2112 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2114 bool
2115 fold_convertible_p (const_tree type, const_tree arg)
2117 tree orig = TREE_TYPE (arg);
2119 if (type == orig)
2120 return true;
2122 if (TREE_CODE (arg) == ERROR_MARK
2123 || TREE_CODE (type) == ERROR_MARK
2124 || TREE_CODE (orig) == ERROR_MARK)
2125 return false;
2127 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2128 return true;
2130 switch (TREE_CODE (type))
2132 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2133 case POINTER_TYPE: case REFERENCE_TYPE:
2134 case OFFSET_TYPE:
2135 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2136 || TREE_CODE (orig) == OFFSET_TYPE)
2137 return true;
2138 return (TREE_CODE (orig) == VECTOR_TYPE
2139 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2141 case REAL_TYPE:
2142 case FIXED_POINT_TYPE:
2143 case COMPLEX_TYPE:
2144 case VECTOR_TYPE:
2145 case VOID_TYPE:
2146 return TREE_CODE (type) == TREE_CODE (orig);
2148 default:
2149 return false;
2153 /* Convert expression ARG to type TYPE. Used by the middle-end for
2154 simple conversions in preference to calling the front-end's convert. */
2156 tree
2157 fold_convert_loc (location_t loc, tree type, tree arg)
2159 tree orig = TREE_TYPE (arg);
2160 tree tem;
2162 if (type == orig)
2163 return arg;
2165 if (TREE_CODE (arg) == ERROR_MARK
2166 || TREE_CODE (type) == ERROR_MARK
2167 || TREE_CODE (orig) == ERROR_MARK)
2168 return error_mark_node;
2170 switch (TREE_CODE (type))
2172 case POINTER_TYPE:
2173 case REFERENCE_TYPE:
2174 /* Handle conversions between pointers to different address spaces. */
2175 if (POINTER_TYPE_P (orig)
2176 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2177 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2178 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2179 /* fall through */
2181 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2182 case OFFSET_TYPE:
2183 if (TREE_CODE (arg) == INTEGER_CST)
2185 tem = fold_convert_const (NOP_EXPR, type, arg);
2186 if (tem != NULL_TREE)
2187 return tem;
2189 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2190 || TREE_CODE (orig) == OFFSET_TYPE)
2191 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2192 if (TREE_CODE (orig) == COMPLEX_TYPE)
2193 return fold_convert_loc (loc, type,
2194 fold_build1_loc (loc, REALPART_EXPR,
2195 TREE_TYPE (orig), arg));
2196 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2197 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2198 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2200 case REAL_TYPE:
2201 if (TREE_CODE (arg) == INTEGER_CST)
2203 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2204 if (tem != NULL_TREE)
2205 return tem;
2207 else if (TREE_CODE (arg) == REAL_CST)
2209 tem = fold_convert_const (NOP_EXPR, type, arg);
2210 if (tem != NULL_TREE)
2211 return tem;
2213 else if (TREE_CODE (arg) == FIXED_CST)
2215 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2216 if (tem != NULL_TREE)
2217 return tem;
2220 switch (TREE_CODE (orig))
2222 case INTEGER_TYPE:
2223 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2224 case POINTER_TYPE: case REFERENCE_TYPE:
2225 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2227 case REAL_TYPE:
2228 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2230 case FIXED_POINT_TYPE:
2231 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2233 case COMPLEX_TYPE:
2234 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2235 return fold_convert_loc (loc, type, tem);
2237 default:
2238 gcc_unreachable ();
2241 case FIXED_POINT_TYPE:
2242 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2243 || TREE_CODE (arg) == REAL_CST)
2245 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2246 if (tem != NULL_TREE)
2247 goto fold_convert_exit;
2250 switch (TREE_CODE (orig))
2252 case FIXED_POINT_TYPE:
2253 case INTEGER_TYPE:
2254 case ENUMERAL_TYPE:
2255 case BOOLEAN_TYPE:
2256 case REAL_TYPE:
2257 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2259 case COMPLEX_TYPE:
2260 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2261 return fold_convert_loc (loc, type, tem);
2263 default:
2264 gcc_unreachable ();
2267 case COMPLEX_TYPE:
2268 switch (TREE_CODE (orig))
2270 case INTEGER_TYPE:
2271 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2272 case POINTER_TYPE: case REFERENCE_TYPE:
2273 case REAL_TYPE:
2274 case FIXED_POINT_TYPE:
2275 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2276 fold_convert_loc (loc, TREE_TYPE (type), arg),
2277 fold_convert_loc (loc, TREE_TYPE (type),
2278 integer_zero_node));
2279 case COMPLEX_TYPE:
2281 tree rpart, ipart;
2283 if (TREE_CODE (arg) == COMPLEX_EXPR)
2285 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2286 TREE_OPERAND (arg, 0));
2287 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2288 TREE_OPERAND (arg, 1));
2289 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2292 arg = save_expr (arg);
2293 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2295 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2296 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2297 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2300 default:
2301 gcc_unreachable ();
2304 case VECTOR_TYPE:
2305 if (integer_zerop (arg))
2306 return build_zero_vector (type);
2307 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2308 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2309 || TREE_CODE (orig) == VECTOR_TYPE);
2310 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2312 case VOID_TYPE:
2313 tem = fold_ignored_result (arg);
2314 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2316 default:
2317 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2318 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2319 gcc_unreachable ();
2321 fold_convert_exit:
2322 protected_set_expr_location_unshare (tem, loc);
2323 return tem;
2326 /* Return false if expr can be assumed not to be an lvalue, true
2327 otherwise. */
2329 static bool
2330 maybe_lvalue_p (const_tree x)
2332 /* We only need to wrap lvalue tree codes. */
2333 switch (TREE_CODE (x))
2335 case VAR_DECL:
2336 case PARM_DECL:
2337 case RESULT_DECL:
2338 case LABEL_DECL:
2339 case FUNCTION_DECL:
2340 case SSA_NAME:
2342 case COMPONENT_REF:
2343 case MEM_REF:
2344 case INDIRECT_REF:
2345 case ARRAY_REF:
2346 case ARRAY_RANGE_REF:
2347 case BIT_FIELD_REF:
2348 case OBJ_TYPE_REF:
2350 case REALPART_EXPR:
2351 case IMAGPART_EXPR:
2352 case PREINCREMENT_EXPR:
2353 case PREDECREMENT_EXPR:
2354 case SAVE_EXPR:
2355 case TRY_CATCH_EXPR:
2356 case WITH_CLEANUP_EXPR:
2357 case COMPOUND_EXPR:
2358 case MODIFY_EXPR:
2359 case TARGET_EXPR:
2360 case COND_EXPR:
2361 case BIND_EXPR:
2362 break;
2364 default:
2365 /* Assume the worst for front-end tree codes. */
2366 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2367 break;
2368 return false;
2371 return true;
2374 /* Return an expr equal to X but certainly not valid as an lvalue. */
2376 tree
2377 non_lvalue_loc (location_t loc, tree x)
2379 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2380 us. */
2381 if (in_gimple_form)
2382 return x;
2384 if (! maybe_lvalue_p (x))
2385 return x;
2386 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2389 /* When pedantic, return an expr equal to X but certainly not valid as a
2390 pedantic lvalue. Otherwise, return X. */
2392 static tree
2393 pedantic_non_lvalue_loc (location_t loc, tree x)
2395 return protected_set_expr_location_unshare (x, loc);
2398 /* Given a tree comparison code, return the code that is the logical inverse.
2399 It is generally not safe to do this for floating-point comparisons, except
2400 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2401 ERROR_MARK in this case. */
2403 enum tree_code
2404 invert_tree_comparison (enum tree_code code, bool honor_nans)
2406 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2407 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2408 return ERROR_MARK;
2410 switch (code)
2412 case EQ_EXPR:
2413 return NE_EXPR;
2414 case NE_EXPR:
2415 return EQ_EXPR;
2416 case GT_EXPR:
2417 return honor_nans ? UNLE_EXPR : LE_EXPR;
2418 case GE_EXPR:
2419 return honor_nans ? UNLT_EXPR : LT_EXPR;
2420 case LT_EXPR:
2421 return honor_nans ? UNGE_EXPR : GE_EXPR;
2422 case LE_EXPR:
2423 return honor_nans ? UNGT_EXPR : GT_EXPR;
2424 case LTGT_EXPR:
2425 return UNEQ_EXPR;
2426 case UNEQ_EXPR:
2427 return LTGT_EXPR;
2428 case UNGT_EXPR:
2429 return LE_EXPR;
2430 case UNGE_EXPR:
2431 return LT_EXPR;
2432 case UNLT_EXPR:
2433 return GE_EXPR;
2434 case UNLE_EXPR:
2435 return GT_EXPR;
2436 case ORDERED_EXPR:
2437 return UNORDERED_EXPR;
2438 case UNORDERED_EXPR:
2439 return ORDERED_EXPR;
2440 default:
2441 gcc_unreachable ();
2445 /* Similar, but return the comparison that results if the operands are
2446 swapped. This is safe for floating-point. */
2448 enum tree_code
2449 swap_tree_comparison (enum tree_code code)
2451 switch (code)
2453 case EQ_EXPR:
2454 case NE_EXPR:
2455 case ORDERED_EXPR:
2456 case UNORDERED_EXPR:
2457 case LTGT_EXPR:
2458 case UNEQ_EXPR:
2459 return code;
2460 case GT_EXPR:
2461 return LT_EXPR;
2462 case GE_EXPR:
2463 return LE_EXPR;
2464 case LT_EXPR:
2465 return GT_EXPR;
2466 case LE_EXPR:
2467 return GE_EXPR;
2468 case UNGT_EXPR:
2469 return UNLT_EXPR;
2470 case UNGE_EXPR:
2471 return UNLE_EXPR;
2472 case UNLT_EXPR:
2473 return UNGT_EXPR;
2474 case UNLE_EXPR:
2475 return UNGE_EXPR;
2476 default:
2477 gcc_unreachable ();
2482 /* Convert a comparison tree code from an enum tree_code representation
2483 into a compcode bit-based encoding. This function is the inverse of
2484 compcode_to_comparison. */
2486 static enum comparison_code
2487 comparison_to_compcode (enum tree_code code)
2489 switch (code)
2491 case LT_EXPR:
2492 return COMPCODE_LT;
2493 case EQ_EXPR:
2494 return COMPCODE_EQ;
2495 case LE_EXPR:
2496 return COMPCODE_LE;
2497 case GT_EXPR:
2498 return COMPCODE_GT;
2499 case NE_EXPR:
2500 return COMPCODE_NE;
2501 case GE_EXPR:
2502 return COMPCODE_GE;
2503 case ORDERED_EXPR:
2504 return COMPCODE_ORD;
2505 case UNORDERED_EXPR:
2506 return COMPCODE_UNORD;
2507 case UNLT_EXPR:
2508 return COMPCODE_UNLT;
2509 case UNEQ_EXPR:
2510 return COMPCODE_UNEQ;
2511 case UNLE_EXPR:
2512 return COMPCODE_UNLE;
2513 case UNGT_EXPR:
2514 return COMPCODE_UNGT;
2515 case LTGT_EXPR:
2516 return COMPCODE_LTGT;
2517 case UNGE_EXPR:
2518 return COMPCODE_UNGE;
2519 default:
2520 gcc_unreachable ();
2524 /* Convert a compcode bit-based encoding of a comparison operator back
2525 to GCC's enum tree_code representation. This function is the
2526 inverse of comparison_to_compcode. */
2528 static enum tree_code
2529 compcode_to_comparison (enum comparison_code code)
2531 switch (code)
2533 case COMPCODE_LT:
2534 return LT_EXPR;
2535 case COMPCODE_EQ:
2536 return EQ_EXPR;
2537 case COMPCODE_LE:
2538 return LE_EXPR;
2539 case COMPCODE_GT:
2540 return GT_EXPR;
2541 case COMPCODE_NE:
2542 return NE_EXPR;
2543 case COMPCODE_GE:
2544 return GE_EXPR;
2545 case COMPCODE_ORD:
2546 return ORDERED_EXPR;
2547 case COMPCODE_UNORD:
2548 return UNORDERED_EXPR;
2549 case COMPCODE_UNLT:
2550 return UNLT_EXPR;
2551 case COMPCODE_UNEQ:
2552 return UNEQ_EXPR;
2553 case COMPCODE_UNLE:
2554 return UNLE_EXPR;
2555 case COMPCODE_UNGT:
2556 return UNGT_EXPR;
2557 case COMPCODE_LTGT:
2558 return LTGT_EXPR;
2559 case COMPCODE_UNGE:
2560 return UNGE_EXPR;
2561 default:
2562 gcc_unreachable ();
2566 /* Return a tree for the comparison which is the combination of
2567 doing the AND or OR (depending on CODE) of the two operations LCODE
2568 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2569 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2570 if this makes the transformation invalid. */
2572 tree
2573 combine_comparisons (location_t loc,
2574 enum tree_code code, enum tree_code lcode,
2575 enum tree_code rcode, tree truth_type,
2576 tree ll_arg, tree lr_arg)
2578 bool honor_nans = HONOR_NANS (ll_arg);
2579 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2580 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2581 int compcode;
2583 switch (code)
2585 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2586 compcode = lcompcode & rcompcode;
2587 break;
2589 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2590 compcode = lcompcode | rcompcode;
2591 break;
2593 default:
2594 return NULL_TREE;
2597 if (!honor_nans)
2599 /* Eliminate unordered comparisons, as well as LTGT and ORD
2600 which are not used unless the mode has NaNs. */
2601 compcode &= ~COMPCODE_UNORD;
2602 if (compcode == COMPCODE_LTGT)
2603 compcode = COMPCODE_NE;
2604 else if (compcode == COMPCODE_ORD)
2605 compcode = COMPCODE_TRUE;
2607 else if (flag_trapping_math)
2609 /* Check that the original operation and the optimized ones will trap
2610 under the same condition. */
2611 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2612 && (lcompcode != COMPCODE_EQ)
2613 && (lcompcode != COMPCODE_ORD);
2614 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2615 && (rcompcode != COMPCODE_EQ)
2616 && (rcompcode != COMPCODE_ORD);
2617 bool trap = (compcode & COMPCODE_UNORD) == 0
2618 && (compcode != COMPCODE_EQ)
2619 && (compcode != COMPCODE_ORD);
2621 /* In a short-circuited boolean expression the LHS might be
2622 such that the RHS, if evaluated, will never trap. For
2623 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2624 if neither x nor y is NaN. (This is a mixed blessing: for
2625 example, the expression above will never trap, hence
2626 optimizing it to x < y would be invalid). */
2627 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2628 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2629 rtrap = false;
2631 /* If the comparison was short-circuited, and only the RHS
2632 trapped, we may now generate a spurious trap. */
2633 if (rtrap && !ltrap
2634 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2635 return NULL_TREE;
2637 /* If we changed the conditions that cause a trap, we lose. */
2638 if ((ltrap || rtrap) != trap)
2639 return NULL_TREE;
2642 if (compcode == COMPCODE_TRUE)
2643 return constant_boolean_node (true, truth_type);
2644 else if (compcode == COMPCODE_FALSE)
2645 return constant_boolean_node (false, truth_type);
2646 else
2648 enum tree_code tcode;
2650 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2651 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2655 /* Return nonzero if two operands (typically of the same tree node)
2656 are necessarily equal. FLAGS modifies behavior as follows:
2658 If OEP_ONLY_CONST is set, only return nonzero for constants.
2659 This function tests whether the operands are indistinguishable;
2660 it does not test whether they are equal using C's == operation.
2661 The distinction is important for IEEE floating point, because
2662 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2663 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2665 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2666 even though it may hold multiple values during a function.
2667 This is because a GCC tree node guarantees that nothing else is
2668 executed between the evaluation of its "operands" (which may often
2669 be evaluated in arbitrary order). Hence if the operands themselves
2670 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2671 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2672 unset means assuming isochronic (or instantaneous) tree equivalence.
2673 Unless comparing arbitrary expression trees, such as from different
2674 statements, this flag can usually be left unset.
2676 If OEP_PURE_SAME is set, then pure functions with identical arguments
2677 are considered the same. It is used when the caller has other ways
2678 to ensure that global memory is unchanged in between.
2680 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2681 not values of expressions.
2683 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2684 any operand with side effect. This is unnecesarily conservative in the
2685 case we know that arg0 and arg1 are in disjoint code paths (such as in
2686 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2687 addresses with TREE_CONSTANT flag set so we know that &var == &var
2688 even if var is volatile. */
2691 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2693 /* If either is ERROR_MARK, they aren't equal. */
2694 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2695 || TREE_TYPE (arg0) == error_mark_node
2696 || TREE_TYPE (arg1) == error_mark_node)
2697 return 0;
2699 /* Similar, if either does not have a type (like a released SSA name),
2700 they aren't equal. */
2701 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2702 return 0;
2704 /* We cannot consider pointers to different address space equal. */
2705 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2706 && POINTER_TYPE_P (TREE_TYPE (arg1))
2707 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2708 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2709 return 0;
2711 /* Check equality of integer constants before bailing out due to
2712 precision differences. */
2713 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2715 /* Address of INTEGER_CST is not defined; check that we did not forget
2716 to drop the OEP_ADDRESS_OF flags. */
2717 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2718 return tree_int_cst_equal (arg0, arg1);
2721 if (!(flags & OEP_ADDRESS_OF))
2723 /* If both types don't have the same signedness, then we can't consider
2724 them equal. We must check this before the STRIP_NOPS calls
2725 because they may change the signedness of the arguments. As pointers
2726 strictly don't have a signedness, require either two pointers or
2727 two non-pointers as well. */
2728 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2729 || POINTER_TYPE_P (TREE_TYPE (arg0))
2730 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2731 return 0;
2733 /* If both types don't have the same precision, then it is not safe
2734 to strip NOPs. */
2735 if (element_precision (TREE_TYPE (arg0))
2736 != element_precision (TREE_TYPE (arg1)))
2737 return 0;
2739 STRIP_NOPS (arg0);
2740 STRIP_NOPS (arg1);
2742 #if 0
2743 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2744 sanity check once the issue is solved. */
2745 else
2746 /* Addresses of conversions and SSA_NAMEs (and many other things)
2747 are not defined. Check that we did not forget to drop the
2748 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2749 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2750 && TREE_CODE (arg0) != SSA_NAME);
2751 #endif
2753 /* In case both args are comparisons but with different comparison
2754 code, try to swap the comparison operands of one arg to produce
2755 a match and compare that variant. */
2756 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2757 && COMPARISON_CLASS_P (arg0)
2758 && COMPARISON_CLASS_P (arg1))
2760 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2762 if (TREE_CODE (arg0) == swap_code)
2763 return operand_equal_p (TREE_OPERAND (arg0, 0),
2764 TREE_OPERAND (arg1, 1), flags)
2765 && operand_equal_p (TREE_OPERAND (arg0, 1),
2766 TREE_OPERAND (arg1, 0), flags);
2769 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2771 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2772 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2774 else if (flags & OEP_ADDRESS_OF)
2776 /* If we are interested in comparing addresses ignore
2777 MEM_REF wrappings of the base that can appear just for
2778 TBAA reasons. */
2779 if (TREE_CODE (arg0) == MEM_REF
2780 && DECL_P (arg1)
2781 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2782 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2783 && integer_zerop (TREE_OPERAND (arg0, 1)))
2784 return 1;
2785 else if (TREE_CODE (arg1) == MEM_REF
2786 && DECL_P (arg0)
2787 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2788 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2789 && integer_zerop (TREE_OPERAND (arg1, 1)))
2790 return 1;
2791 return 0;
2793 else
2794 return 0;
2797 /* When not checking adddresses, this is needed for conversions and for
2798 COMPONENT_REF. Might as well play it safe and always test this. */
2799 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2800 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2801 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2802 && !(flags & OEP_ADDRESS_OF)))
2803 return 0;
2805 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2806 We don't care about side effects in that case because the SAVE_EXPR
2807 takes care of that for us. In all other cases, two expressions are
2808 equal if they have no side effects. If we have two identical
2809 expressions with side effects that should be treated the same due
2810 to the only side effects being identical SAVE_EXPR's, that will
2811 be detected in the recursive calls below.
2812 If we are taking an invariant address of two identical objects
2813 they are necessarily equal as well. */
2814 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2815 && (TREE_CODE (arg0) == SAVE_EXPR
2816 || (flags & OEP_MATCH_SIDE_EFFECTS)
2817 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2818 return 1;
2820 /* Next handle constant cases, those for which we can return 1 even
2821 if ONLY_CONST is set. */
2822 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2823 switch (TREE_CODE (arg0))
2825 case INTEGER_CST:
2826 return tree_int_cst_equal (arg0, arg1);
2828 case FIXED_CST:
2829 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2830 TREE_FIXED_CST (arg1));
2832 case REAL_CST:
2833 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2834 return 1;
2837 if (!HONOR_SIGNED_ZEROS (arg0))
2839 /* If we do not distinguish between signed and unsigned zero,
2840 consider them equal. */
2841 if (real_zerop (arg0) && real_zerop (arg1))
2842 return 1;
2844 return 0;
2846 case VECTOR_CST:
2848 unsigned i;
2850 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2851 return 0;
2853 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2855 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2856 VECTOR_CST_ELT (arg1, i), flags))
2857 return 0;
2859 return 1;
2862 case COMPLEX_CST:
2863 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2864 flags)
2865 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2866 flags));
2868 case STRING_CST:
2869 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2870 && ! memcmp (TREE_STRING_POINTER (arg0),
2871 TREE_STRING_POINTER (arg1),
2872 TREE_STRING_LENGTH (arg0)));
2874 case ADDR_EXPR:
2875 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2876 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2877 flags | OEP_ADDRESS_OF
2878 | OEP_MATCH_SIDE_EFFECTS);
2879 case CONSTRUCTOR:
2880 /* In GIMPLE empty constructors are allowed in initializers of
2881 aggregates. */
2882 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2883 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2884 default:
2885 break;
2888 if (flags & OEP_ONLY_CONST)
2889 return 0;
2891 /* Define macros to test an operand from arg0 and arg1 for equality and a
2892 variant that allows null and views null as being different from any
2893 non-null value. In the latter case, if either is null, the both
2894 must be; otherwise, do the normal comparison. */
2895 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2896 TREE_OPERAND (arg1, N), flags)
2898 #define OP_SAME_WITH_NULL(N) \
2899 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2900 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2902 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2904 case tcc_unary:
2905 /* Two conversions are equal only if signedness and modes match. */
2906 switch (TREE_CODE (arg0))
2908 CASE_CONVERT:
2909 case FIX_TRUNC_EXPR:
2910 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2911 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2912 return 0;
2913 break;
2914 default:
2915 break;
2918 return OP_SAME (0);
2921 case tcc_comparison:
2922 case tcc_binary:
2923 if (OP_SAME (0) && OP_SAME (1))
2924 return 1;
2926 /* For commutative ops, allow the other order. */
2927 return (commutative_tree_code (TREE_CODE (arg0))
2928 && operand_equal_p (TREE_OPERAND (arg0, 0),
2929 TREE_OPERAND (arg1, 1), flags)
2930 && operand_equal_p (TREE_OPERAND (arg0, 1),
2931 TREE_OPERAND (arg1, 0), flags));
2933 case tcc_reference:
2934 /* If either of the pointer (or reference) expressions we are
2935 dereferencing contain a side effect, these cannot be equal,
2936 but their addresses can be. */
2937 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2938 && (TREE_SIDE_EFFECTS (arg0)
2939 || TREE_SIDE_EFFECTS (arg1)))
2940 return 0;
2942 switch (TREE_CODE (arg0))
2944 case INDIRECT_REF:
2945 if (!(flags & OEP_ADDRESS_OF)
2946 && (TYPE_ALIGN (TREE_TYPE (arg0))
2947 != TYPE_ALIGN (TREE_TYPE (arg1))))
2948 return 0;
2949 flags &= ~OEP_ADDRESS_OF;
2950 return OP_SAME (0);
2952 case REALPART_EXPR:
2953 case IMAGPART_EXPR:
2954 case VIEW_CONVERT_EXPR:
2955 return OP_SAME (0);
2957 case TARGET_MEM_REF:
2958 case MEM_REF:
2959 if (!(flags & OEP_ADDRESS_OF))
2961 /* Require equal access sizes */
2962 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2963 && (!TYPE_SIZE (TREE_TYPE (arg0))
2964 || !TYPE_SIZE (TREE_TYPE (arg1))
2965 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2966 TYPE_SIZE (TREE_TYPE (arg1)),
2967 flags)))
2968 return 0;
2969 /* Verify that accesses are TBAA compatible. */
2970 if (flag_strict_aliasing
2971 && (!alias_ptr_types_compatible_p
2972 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2973 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2974 || (MR_DEPENDENCE_CLIQUE (arg0)
2975 != MR_DEPENDENCE_CLIQUE (arg1))
2976 || (MR_DEPENDENCE_BASE (arg0)
2977 != MR_DEPENDENCE_BASE (arg1))))
2978 return 0;
2979 /* Verify that alignment is compatible. */
2980 if (TYPE_ALIGN (TREE_TYPE (arg0))
2981 != TYPE_ALIGN (TREE_TYPE (arg1)))
2982 return 0;
2984 flags &= ~OEP_ADDRESS_OF;
2985 return (OP_SAME (0) && OP_SAME (1)
2986 /* TARGET_MEM_REF require equal extra operands. */
2987 && (TREE_CODE (arg0) != TARGET_MEM_REF
2988 || (OP_SAME_WITH_NULL (2)
2989 && OP_SAME_WITH_NULL (3)
2990 && OP_SAME_WITH_NULL (4))));
2992 case ARRAY_REF:
2993 case ARRAY_RANGE_REF:
2994 /* Operands 2 and 3 may be null.
2995 Compare the array index by value if it is constant first as we
2996 may have different types but same value here. */
2997 if (!OP_SAME (0))
2998 return 0;
2999 flags &= ~OEP_ADDRESS_OF;
3000 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3001 TREE_OPERAND (arg1, 1))
3002 || OP_SAME (1))
3003 && OP_SAME_WITH_NULL (2)
3004 && OP_SAME_WITH_NULL (3));
3006 case COMPONENT_REF:
3007 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3008 may be NULL when we're called to compare MEM_EXPRs. */
3009 if (!OP_SAME_WITH_NULL (0)
3010 || !OP_SAME (1))
3011 return 0;
3012 flags &= ~OEP_ADDRESS_OF;
3013 return OP_SAME_WITH_NULL (2);
3015 case BIT_FIELD_REF:
3016 if (!OP_SAME (0))
3017 return 0;
3018 flags &= ~OEP_ADDRESS_OF;
3019 return OP_SAME (1) && OP_SAME (2);
3021 default:
3022 return 0;
3025 case tcc_expression:
3026 switch (TREE_CODE (arg0))
3028 case ADDR_EXPR:
3029 /* Be sure we pass right ADDRESS_OF flag. */
3030 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3031 return operand_equal_p (TREE_OPERAND (arg0, 0),
3032 TREE_OPERAND (arg1, 0),
3033 flags | OEP_ADDRESS_OF);
3035 case TRUTH_NOT_EXPR:
3036 return OP_SAME (0);
3038 case TRUTH_ANDIF_EXPR:
3039 case TRUTH_ORIF_EXPR:
3040 return OP_SAME (0) && OP_SAME (1);
3042 case FMA_EXPR:
3043 case WIDEN_MULT_PLUS_EXPR:
3044 case WIDEN_MULT_MINUS_EXPR:
3045 if (!OP_SAME (2))
3046 return 0;
3047 /* The multiplcation operands are commutative. */
3048 /* FALLTHRU */
3050 case TRUTH_AND_EXPR:
3051 case TRUTH_OR_EXPR:
3052 case TRUTH_XOR_EXPR:
3053 if (OP_SAME (0) && OP_SAME (1))
3054 return 1;
3056 /* Otherwise take into account this is a commutative operation. */
3057 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3058 TREE_OPERAND (arg1, 1), flags)
3059 && operand_equal_p (TREE_OPERAND (arg0, 1),
3060 TREE_OPERAND (arg1, 0), flags));
3062 case COND_EXPR:
3063 case VEC_COND_EXPR:
3064 case DOT_PROD_EXPR:
3065 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3067 default:
3068 return 0;
3071 case tcc_vl_exp:
3072 switch (TREE_CODE (arg0))
3074 case CALL_EXPR:
3075 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3076 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3077 /* If not both CALL_EXPRs are either internal or normal function
3078 functions, then they are not equal. */
3079 return 0;
3080 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3082 /* If the CALL_EXPRs call different internal functions, then they
3083 are not equal. */
3084 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3085 return 0;
3087 else
3089 /* If the CALL_EXPRs call different functions, then they are not
3090 equal. */
3091 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3092 flags))
3093 return 0;
3096 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3098 unsigned int cef = call_expr_flags (arg0);
3099 if (flags & OEP_PURE_SAME)
3100 cef &= ECF_CONST | ECF_PURE;
3101 else
3102 cef &= ECF_CONST;
3103 if (!cef)
3104 return 0;
3107 /* Now see if all the arguments are the same. */
3109 const_call_expr_arg_iterator iter0, iter1;
3110 const_tree a0, a1;
3111 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3112 a1 = first_const_call_expr_arg (arg1, &iter1);
3113 a0 && a1;
3114 a0 = next_const_call_expr_arg (&iter0),
3115 a1 = next_const_call_expr_arg (&iter1))
3116 if (! operand_equal_p (a0, a1, flags))
3117 return 0;
3119 /* If we get here and both argument lists are exhausted
3120 then the CALL_EXPRs are equal. */
3121 return ! (a0 || a1);
3123 default:
3124 return 0;
3127 case tcc_declaration:
3128 /* Consider __builtin_sqrt equal to sqrt. */
3129 return (TREE_CODE (arg0) == FUNCTION_DECL
3130 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3131 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3132 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3134 case tcc_exceptional:
3135 if (TREE_CODE (arg0) == CONSTRUCTOR)
3137 /* In GIMPLE constructors are used only to build vectors from
3138 elements. Individual elements in the constructor must be
3139 indexed in increasing order and form an initial sequence.
3141 We make no effort to compare constructors in generic.
3142 (see sem_variable::equals in ipa-icf which can do so for
3143 constants). */
3144 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3145 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3146 return 0;
3148 /* Be sure that vectors constructed have the same representation.
3149 We only tested element precision and modes to match.
3150 Vectors may be BLKmode and thus also check that the number of
3151 parts match. */
3152 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3153 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3154 return 0;
3156 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3157 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3158 unsigned int len = vec_safe_length (v0);
3160 if (len != vec_safe_length (v1))
3161 return 0;
3163 for (unsigned int i = 0; i < len; i++)
3165 constructor_elt *c0 = &(*v0)[i];
3166 constructor_elt *c1 = &(*v1)[i];
3168 if (!operand_equal_p (c0->value, c1->value, flags)
3169 /* In GIMPLE the indexes can be either NULL or matching i.
3170 Double check this so we won't get false
3171 positives for GENERIC. */
3172 || (c0->index
3173 && (TREE_CODE (c0->index) != INTEGER_CST
3174 || !compare_tree_int (c0->index, i)))
3175 || (c1->index
3176 && (TREE_CODE (c1->index) != INTEGER_CST
3177 || !compare_tree_int (c1->index, i))))
3178 return 0;
3180 return 1;
3182 return 0;
3184 default:
3185 return 0;
3188 #undef OP_SAME
3189 #undef OP_SAME_WITH_NULL
3192 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3193 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3195 When in doubt, return 0. */
3197 static int
3198 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3200 int unsignedp1, unsignedpo;
3201 tree primarg0, primarg1, primother;
3202 unsigned int correct_width;
3204 if (operand_equal_p (arg0, arg1, 0))
3205 return 1;
3207 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3208 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3209 return 0;
3211 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3212 and see if the inner values are the same. This removes any
3213 signedness comparison, which doesn't matter here. */
3214 primarg0 = arg0, primarg1 = arg1;
3215 STRIP_NOPS (primarg0);
3216 STRIP_NOPS (primarg1);
3217 if (operand_equal_p (primarg0, primarg1, 0))
3218 return 1;
3220 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3221 actual comparison operand, ARG0.
3223 First throw away any conversions to wider types
3224 already present in the operands. */
3226 primarg1 = get_narrower (arg1, &unsignedp1);
3227 primother = get_narrower (other, &unsignedpo);
3229 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3230 if (unsignedp1 == unsignedpo
3231 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3232 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3234 tree type = TREE_TYPE (arg0);
3236 /* Make sure shorter operand is extended the right way
3237 to match the longer operand. */
3238 primarg1 = fold_convert (signed_or_unsigned_type_for
3239 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3241 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3242 return 1;
3245 return 0;
3248 /* See if ARG is an expression that is either a comparison or is performing
3249 arithmetic on comparisons. The comparisons must only be comparing
3250 two different values, which will be stored in *CVAL1 and *CVAL2; if
3251 they are nonzero it means that some operands have already been found.
3252 No variables may be used anywhere else in the expression except in the
3253 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3254 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3256 If this is true, return 1. Otherwise, return zero. */
3258 static int
3259 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3261 enum tree_code code = TREE_CODE (arg);
3262 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3264 /* We can handle some of the tcc_expression cases here. */
3265 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3266 tclass = tcc_unary;
3267 else if (tclass == tcc_expression
3268 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3269 || code == COMPOUND_EXPR))
3270 tclass = tcc_binary;
3272 else if (tclass == tcc_expression && code == SAVE_EXPR
3273 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3275 /* If we've already found a CVAL1 or CVAL2, this expression is
3276 two complex to handle. */
3277 if (*cval1 || *cval2)
3278 return 0;
3280 tclass = tcc_unary;
3281 *save_p = 1;
3284 switch (tclass)
3286 case tcc_unary:
3287 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3289 case tcc_binary:
3290 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3291 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3292 cval1, cval2, save_p));
3294 case tcc_constant:
3295 return 1;
3297 case tcc_expression:
3298 if (code == COND_EXPR)
3299 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3300 cval1, cval2, save_p)
3301 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3302 cval1, cval2, save_p)
3303 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3304 cval1, cval2, save_p));
3305 return 0;
3307 case tcc_comparison:
3308 /* First see if we can handle the first operand, then the second. For
3309 the second operand, we know *CVAL1 can't be zero. It must be that
3310 one side of the comparison is each of the values; test for the
3311 case where this isn't true by failing if the two operands
3312 are the same. */
3314 if (operand_equal_p (TREE_OPERAND (arg, 0),
3315 TREE_OPERAND (arg, 1), 0))
3316 return 0;
3318 if (*cval1 == 0)
3319 *cval1 = TREE_OPERAND (arg, 0);
3320 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3322 else if (*cval2 == 0)
3323 *cval2 = TREE_OPERAND (arg, 0);
3324 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3326 else
3327 return 0;
3329 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3331 else if (*cval2 == 0)
3332 *cval2 = TREE_OPERAND (arg, 1);
3333 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3335 else
3336 return 0;
3338 return 1;
3340 default:
3341 return 0;
3345 /* ARG is a tree that is known to contain just arithmetic operations and
3346 comparisons. Evaluate the operations in the tree substituting NEW0 for
3347 any occurrence of OLD0 as an operand of a comparison and likewise for
3348 NEW1 and OLD1. */
3350 static tree
3351 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3352 tree old1, tree new1)
3354 tree type = TREE_TYPE (arg);
3355 enum tree_code code = TREE_CODE (arg);
3356 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3358 /* We can handle some of the tcc_expression cases here. */
3359 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3360 tclass = tcc_unary;
3361 else if (tclass == tcc_expression
3362 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3363 tclass = tcc_binary;
3365 switch (tclass)
3367 case tcc_unary:
3368 return fold_build1_loc (loc, code, type,
3369 eval_subst (loc, TREE_OPERAND (arg, 0),
3370 old0, new0, old1, new1));
3372 case tcc_binary:
3373 return fold_build2_loc (loc, code, type,
3374 eval_subst (loc, TREE_OPERAND (arg, 0),
3375 old0, new0, old1, new1),
3376 eval_subst (loc, TREE_OPERAND (arg, 1),
3377 old0, new0, old1, new1));
3379 case tcc_expression:
3380 switch (code)
3382 case SAVE_EXPR:
3383 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3384 old1, new1);
3386 case COMPOUND_EXPR:
3387 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3388 old1, new1);
3390 case COND_EXPR:
3391 return fold_build3_loc (loc, code, type,
3392 eval_subst (loc, TREE_OPERAND (arg, 0),
3393 old0, new0, old1, new1),
3394 eval_subst (loc, TREE_OPERAND (arg, 1),
3395 old0, new0, old1, new1),
3396 eval_subst (loc, TREE_OPERAND (arg, 2),
3397 old0, new0, old1, new1));
3398 default:
3399 break;
3401 /* Fall through - ??? */
3403 case tcc_comparison:
3405 tree arg0 = TREE_OPERAND (arg, 0);
3406 tree arg1 = TREE_OPERAND (arg, 1);
3408 /* We need to check both for exact equality and tree equality. The
3409 former will be true if the operand has a side-effect. In that
3410 case, we know the operand occurred exactly once. */
3412 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3413 arg0 = new0;
3414 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3415 arg0 = new1;
3417 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3418 arg1 = new0;
3419 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3420 arg1 = new1;
3422 return fold_build2_loc (loc, code, type, arg0, arg1);
3425 default:
3426 return arg;
3430 /* Return a tree for the case when the result of an expression is RESULT
3431 converted to TYPE and OMITTED was previously an operand of the expression
3432 but is now not needed (e.g., we folded OMITTED * 0).
3434 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3435 the conversion of RESULT to TYPE. */
3437 tree
3438 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3440 tree t = fold_convert_loc (loc, type, result);
3442 /* If the resulting operand is an empty statement, just return the omitted
3443 statement casted to void. */
3444 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3445 return build1_loc (loc, NOP_EXPR, void_type_node,
3446 fold_ignored_result (omitted));
3448 if (TREE_SIDE_EFFECTS (omitted))
3449 return build2_loc (loc, COMPOUND_EXPR, type,
3450 fold_ignored_result (omitted), t);
3452 return non_lvalue_loc (loc, t);
3455 /* Return a tree for the case when the result of an expression is RESULT
3456 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3457 of the expression but are now not needed.
3459 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3460 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3461 evaluated before OMITTED2. Otherwise, if neither has side effects,
3462 just do the conversion of RESULT to TYPE. */
3464 tree
3465 omit_two_operands_loc (location_t loc, tree type, tree result,
3466 tree omitted1, tree omitted2)
3468 tree t = fold_convert_loc (loc, type, result);
3470 if (TREE_SIDE_EFFECTS (omitted2))
3471 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3472 if (TREE_SIDE_EFFECTS (omitted1))
3473 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3475 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3479 /* Return a simplified tree node for the truth-negation of ARG. This
3480 never alters ARG itself. We assume that ARG is an operation that
3481 returns a truth value (0 or 1).
3483 FIXME: one would think we would fold the result, but it causes
3484 problems with the dominator optimizer. */
3486 static tree
3487 fold_truth_not_expr (location_t loc, tree arg)
3489 tree type = TREE_TYPE (arg);
3490 enum tree_code code = TREE_CODE (arg);
3491 location_t loc1, loc2;
3493 /* If this is a comparison, we can simply invert it, except for
3494 floating-point non-equality comparisons, in which case we just
3495 enclose a TRUTH_NOT_EXPR around what we have. */
3497 if (TREE_CODE_CLASS (code) == tcc_comparison)
3499 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3500 if (FLOAT_TYPE_P (op_type)
3501 && flag_trapping_math
3502 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3503 && code != NE_EXPR && code != EQ_EXPR)
3504 return NULL_TREE;
3506 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3507 if (code == ERROR_MARK)
3508 return NULL_TREE;
3510 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3511 TREE_OPERAND (arg, 1));
3514 switch (code)
3516 case INTEGER_CST:
3517 return constant_boolean_node (integer_zerop (arg), type);
3519 case TRUTH_AND_EXPR:
3520 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3521 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3522 return build2_loc (loc, TRUTH_OR_EXPR, type,
3523 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3524 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3526 case TRUTH_OR_EXPR:
3527 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3528 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3529 return build2_loc (loc, TRUTH_AND_EXPR, type,
3530 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3531 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3533 case TRUTH_XOR_EXPR:
3534 /* Here we can invert either operand. We invert the first operand
3535 unless the second operand is a TRUTH_NOT_EXPR in which case our
3536 result is the XOR of the first operand with the inside of the
3537 negation of the second operand. */
3539 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3540 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3541 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3542 else
3543 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3544 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3545 TREE_OPERAND (arg, 1));
3547 case TRUTH_ANDIF_EXPR:
3548 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3549 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3550 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3551 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3552 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3554 case TRUTH_ORIF_EXPR:
3555 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3556 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3557 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3558 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3559 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3561 case TRUTH_NOT_EXPR:
3562 return TREE_OPERAND (arg, 0);
3564 case COND_EXPR:
3566 tree arg1 = TREE_OPERAND (arg, 1);
3567 tree arg2 = TREE_OPERAND (arg, 2);
3569 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3570 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3572 /* A COND_EXPR may have a throw as one operand, which
3573 then has void type. Just leave void operands
3574 as they are. */
3575 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3576 VOID_TYPE_P (TREE_TYPE (arg1))
3577 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3578 VOID_TYPE_P (TREE_TYPE (arg2))
3579 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3582 case COMPOUND_EXPR:
3583 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3584 return build2_loc (loc, COMPOUND_EXPR, type,
3585 TREE_OPERAND (arg, 0),
3586 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3588 case NON_LVALUE_EXPR:
3589 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3590 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3592 CASE_CONVERT:
3593 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3594 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3596 /* ... fall through ... */
3598 case FLOAT_EXPR:
3599 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3600 return build1_loc (loc, TREE_CODE (arg), type,
3601 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3603 case BIT_AND_EXPR:
3604 if (!integer_onep (TREE_OPERAND (arg, 1)))
3605 return NULL_TREE;
3606 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3608 case SAVE_EXPR:
3609 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3611 case CLEANUP_POINT_EXPR:
3612 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3613 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3614 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3616 default:
3617 return NULL_TREE;
3621 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3622 assume that ARG is an operation that returns a truth value (0 or 1
3623 for scalars, 0 or -1 for vectors). Return the folded expression if
3624 folding is successful. Otherwise, return NULL_TREE. */
3626 static tree
3627 fold_invert_truthvalue (location_t loc, tree arg)
3629 tree type = TREE_TYPE (arg);
3630 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3631 ? BIT_NOT_EXPR
3632 : TRUTH_NOT_EXPR,
3633 type, arg);
3636 /* Return a simplified tree node for the truth-negation of ARG. This
3637 never alters ARG itself. We assume that ARG is an operation that
3638 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3640 tree
3641 invert_truthvalue_loc (location_t loc, tree arg)
3643 if (TREE_CODE (arg) == ERROR_MARK)
3644 return arg;
3646 tree type = TREE_TYPE (arg);
3647 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3648 ? BIT_NOT_EXPR
3649 : TRUTH_NOT_EXPR,
3650 type, arg);
3653 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3654 with code CODE. This optimization is unsafe. */
3655 static tree
3656 distribute_real_division (location_t loc, enum tree_code code, tree type,
3657 tree arg0, tree arg1)
3659 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3660 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3662 /* (A / C) +- (B / C) -> (A +- B) / C. */
3663 if (mul0 == mul1
3664 && operand_equal_p (TREE_OPERAND (arg0, 1),
3665 TREE_OPERAND (arg1, 1), 0))
3666 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3667 fold_build2_loc (loc, code, type,
3668 TREE_OPERAND (arg0, 0),
3669 TREE_OPERAND (arg1, 0)),
3670 TREE_OPERAND (arg0, 1));
3672 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3673 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3674 TREE_OPERAND (arg1, 0), 0)
3675 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3676 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3678 REAL_VALUE_TYPE r0, r1;
3679 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3680 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3681 if (!mul0)
3682 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3683 if (!mul1)
3684 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3685 real_arithmetic (&r0, code, &r0, &r1);
3686 return fold_build2_loc (loc, MULT_EXPR, type,
3687 TREE_OPERAND (arg0, 0),
3688 build_real (type, r0));
3691 return NULL_TREE;
3694 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3695 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3696 and uses reverse storage order if REVERSEP is nonzero. */
3698 static tree
3699 make_bit_field_ref (location_t loc, tree inner, tree type,
3700 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3701 int unsignedp, int reversep)
3703 tree result, bftype;
3705 if (bitpos == 0 && !reversep)
3707 tree size = TYPE_SIZE (TREE_TYPE (inner));
3708 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3709 || POINTER_TYPE_P (TREE_TYPE (inner)))
3710 && tree_fits_shwi_p (size)
3711 && tree_to_shwi (size) == bitsize)
3712 return fold_convert_loc (loc, type, inner);
3715 bftype = type;
3716 if (TYPE_PRECISION (bftype) != bitsize
3717 || TYPE_UNSIGNED (bftype) == !unsignedp)
3718 bftype = build_nonstandard_integer_type (bitsize, 0);
3720 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3721 size_int (bitsize), bitsize_int (bitpos));
3722 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3724 if (bftype != type)
3725 result = fold_convert_loc (loc, type, result);
3727 return result;
3730 /* Optimize a bit-field compare.
3732 There are two cases: First is a compare against a constant and the
3733 second is a comparison of two items where the fields are at the same
3734 bit position relative to the start of a chunk (byte, halfword, word)
3735 large enough to contain it. In these cases we can avoid the shift
3736 implicit in bitfield extractions.
3738 For constants, we emit a compare of the shifted constant with the
3739 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3740 compared. For two fields at the same position, we do the ANDs with the
3741 similar mask and compare the result of the ANDs.
3743 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3744 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3745 are the left and right operands of the comparison, respectively.
3747 If the optimization described above can be done, we return the resulting
3748 tree. Otherwise we return zero. */
3750 static tree
3751 optimize_bit_field_compare (location_t loc, enum tree_code code,
3752 tree compare_type, tree lhs, tree rhs)
3754 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3755 tree type = TREE_TYPE (lhs);
3756 tree unsigned_type;
3757 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3758 machine_mode lmode, rmode, nmode;
3759 int lunsignedp, runsignedp;
3760 int lreversep, rreversep;
3761 int lvolatilep = 0, rvolatilep = 0;
3762 tree linner, rinner = NULL_TREE;
3763 tree mask;
3764 tree offset;
3766 /* Get all the information about the extractions being done. If the bit size
3767 if the same as the size of the underlying object, we aren't doing an
3768 extraction at all and so can do nothing. We also don't want to
3769 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3770 then will no longer be able to replace it. */
3771 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3772 &lunsignedp, &lreversep, &lvolatilep, false);
3773 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3774 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3775 return 0;
3777 if (const_p)
3778 rreversep = lreversep;
3779 else
3781 /* If this is not a constant, we can only do something if bit positions,
3782 sizes, signedness and storage order are the same. */
3783 rinner
3784 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3785 &runsignedp, &rreversep, &rvolatilep, false);
3787 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3788 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3789 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3790 return 0;
3793 /* See if we can find a mode to refer to this field. We should be able to,
3794 but fail if we can't. */
3795 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3796 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3797 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3798 TYPE_ALIGN (TREE_TYPE (rinner))),
3799 word_mode, false);
3800 if (nmode == VOIDmode)
3801 return 0;
3803 /* Set signed and unsigned types of the precision of this mode for the
3804 shifts below. */
3805 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3807 /* Compute the bit position and size for the new reference and our offset
3808 within it. If the new reference is the same size as the original, we
3809 won't optimize anything, so return zero. */
3810 nbitsize = GET_MODE_BITSIZE (nmode);
3811 nbitpos = lbitpos & ~ (nbitsize - 1);
3812 lbitpos -= nbitpos;
3813 if (nbitsize == lbitsize)
3814 return 0;
3816 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3817 lbitpos = nbitsize - lbitsize - lbitpos;
3819 /* Make the mask to be used against the extracted field. */
3820 mask = build_int_cst_type (unsigned_type, -1);
3821 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3822 mask = const_binop (RSHIFT_EXPR, mask,
3823 size_int (nbitsize - lbitsize - lbitpos));
3825 if (! const_p)
3826 /* If not comparing with constant, just rework the comparison
3827 and return. */
3828 return fold_build2_loc (loc, code, compare_type,
3829 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3830 make_bit_field_ref (loc, linner,
3831 unsigned_type,
3832 nbitsize, nbitpos,
3833 1, lreversep),
3834 mask),
3835 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3836 make_bit_field_ref (loc, rinner,
3837 unsigned_type,
3838 nbitsize, nbitpos,
3839 1, rreversep),
3840 mask));
3842 /* Otherwise, we are handling the constant case. See if the constant is too
3843 big for the field. Warn and return a tree for 0 (false) if so. We do
3844 this not only for its own sake, but to avoid having to test for this
3845 error case below. If we didn't, we might generate wrong code.
3847 For unsigned fields, the constant shifted right by the field length should
3848 be all zero. For signed fields, the high-order bits should agree with
3849 the sign bit. */
3851 if (lunsignedp)
3853 if (wi::lrshift (rhs, lbitsize) != 0)
3855 warning (0, "comparison is always %d due to width of bit-field",
3856 code == NE_EXPR);
3857 return constant_boolean_node (code == NE_EXPR, compare_type);
3860 else
3862 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3863 if (tem != 0 && tem != -1)
3865 warning (0, "comparison is always %d due to width of bit-field",
3866 code == NE_EXPR);
3867 return constant_boolean_node (code == NE_EXPR, compare_type);
3871 /* Single-bit compares should always be against zero. */
3872 if (lbitsize == 1 && ! integer_zerop (rhs))
3874 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3875 rhs = build_int_cst (type, 0);
3878 /* Make a new bitfield reference, shift the constant over the
3879 appropriate number of bits and mask it with the computed mask
3880 (in case this was a signed field). If we changed it, make a new one. */
3881 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3882 lreversep);
3884 rhs = const_binop (BIT_AND_EXPR,
3885 const_binop (LSHIFT_EXPR,
3886 fold_convert_loc (loc, unsigned_type, rhs),
3887 size_int (lbitpos)),
3888 mask);
3890 lhs = build2_loc (loc, code, compare_type,
3891 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3892 return lhs;
3895 /* Subroutine for fold_truth_andor_1: decode a field reference.
3897 If EXP is a comparison reference, we return the innermost reference.
3899 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3900 set to the starting bit number.
3902 If the innermost field can be completely contained in a mode-sized
3903 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3905 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3906 otherwise it is not changed.
3908 *PUNSIGNEDP is set to the signedness of the field.
3910 *PREVERSEP is set to the storage order of the field.
3912 *PMASK is set to the mask used. This is either contained in a
3913 BIT_AND_EXPR or derived from the width of the field.
3915 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3917 Return 0 if this is not a component reference or is one that we can't
3918 do anything with. */
3920 static tree
3921 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3922 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3923 int *punsignedp, int *preversep, int *pvolatilep,
3924 tree *pmask, tree *pand_mask)
3926 tree outer_type = 0;
3927 tree and_mask = 0;
3928 tree mask, inner, offset;
3929 tree unsigned_type;
3930 unsigned int precision;
3932 /* All the optimizations using this function assume integer fields.
3933 There are problems with FP fields since the type_for_size call
3934 below can fail for, e.g., XFmode. */
3935 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3936 return 0;
3938 /* We are interested in the bare arrangement of bits, so strip everything
3939 that doesn't affect the machine mode. However, record the type of the
3940 outermost expression if it may matter below. */
3941 if (CONVERT_EXPR_P (exp)
3942 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3943 outer_type = TREE_TYPE (exp);
3944 STRIP_NOPS (exp);
3946 if (TREE_CODE (exp) == BIT_AND_EXPR)
3948 and_mask = TREE_OPERAND (exp, 1);
3949 exp = TREE_OPERAND (exp, 0);
3950 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3951 if (TREE_CODE (and_mask) != INTEGER_CST)
3952 return 0;
3955 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3956 punsignedp, preversep, pvolatilep, false);
3957 if ((inner == exp && and_mask == 0)
3958 || *pbitsize < 0 || offset != 0
3959 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3960 return 0;
3962 /* If the number of bits in the reference is the same as the bitsize of
3963 the outer type, then the outer type gives the signedness. Otherwise
3964 (in case of a small bitfield) the signedness is unchanged. */
3965 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3966 *punsignedp = TYPE_UNSIGNED (outer_type);
3968 /* Compute the mask to access the bitfield. */
3969 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3970 precision = TYPE_PRECISION (unsigned_type);
3972 mask = build_int_cst_type (unsigned_type, -1);
3974 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3975 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3977 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3978 if (and_mask != 0)
3979 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3980 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3982 *pmask = mask;
3983 *pand_mask = and_mask;
3984 return inner;
3987 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3988 bit positions and MASK is SIGNED. */
3990 static int
3991 all_ones_mask_p (const_tree mask, unsigned int size)
3993 tree type = TREE_TYPE (mask);
3994 unsigned int precision = TYPE_PRECISION (type);
3996 /* If this function returns true when the type of the mask is
3997 UNSIGNED, then there will be errors. In particular see
3998 gcc.c-torture/execute/990326-1.c. There does not appear to be
3999 any documentation paper trail as to why this is so. But the pre
4000 wide-int worked with that restriction and it has been preserved
4001 here. */
4002 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4003 return false;
4005 return wi::mask (size, false, precision) == mask;
4008 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4009 represents the sign bit of EXP's type. If EXP represents a sign
4010 or zero extension, also test VAL against the unextended type.
4011 The return value is the (sub)expression whose sign bit is VAL,
4012 or NULL_TREE otherwise. */
4014 tree
4015 sign_bit_p (tree exp, const_tree val)
4017 int width;
4018 tree t;
4020 /* Tree EXP must have an integral type. */
4021 t = TREE_TYPE (exp);
4022 if (! INTEGRAL_TYPE_P (t))
4023 return NULL_TREE;
4025 /* Tree VAL must be an integer constant. */
4026 if (TREE_CODE (val) != INTEGER_CST
4027 || TREE_OVERFLOW (val))
4028 return NULL_TREE;
4030 width = TYPE_PRECISION (t);
4031 if (wi::only_sign_bit_p (val, width))
4032 return exp;
4034 /* Handle extension from a narrower type. */
4035 if (TREE_CODE (exp) == NOP_EXPR
4036 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4037 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4039 return NULL_TREE;
4042 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4043 to be evaluated unconditionally. */
4045 static int
4046 simple_operand_p (const_tree exp)
4048 /* Strip any conversions that don't change the machine mode. */
4049 STRIP_NOPS (exp);
4051 return (CONSTANT_CLASS_P (exp)
4052 || TREE_CODE (exp) == SSA_NAME
4053 || (DECL_P (exp)
4054 && ! TREE_ADDRESSABLE (exp)
4055 && ! TREE_THIS_VOLATILE (exp)
4056 && ! DECL_NONLOCAL (exp)
4057 /* Don't regard global variables as simple. They may be
4058 allocated in ways unknown to the compiler (shared memory,
4059 #pragma weak, etc). */
4060 && ! TREE_PUBLIC (exp)
4061 && ! DECL_EXTERNAL (exp)
4062 /* Weakrefs are not safe to be read, since they can be NULL.
4063 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4064 have DECL_WEAK flag set. */
4065 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4066 /* Loading a static variable is unduly expensive, but global
4067 registers aren't expensive. */
4068 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4071 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4072 to be evaluated unconditionally.
4073 I addition to simple_operand_p, we assume that comparisons, conversions,
4074 and logic-not operations are simple, if their operands are simple, too. */
4076 static bool
4077 simple_operand_p_2 (tree exp)
4079 enum tree_code code;
4081 if (TREE_SIDE_EFFECTS (exp)
4082 || tree_could_trap_p (exp))
4083 return false;
4085 while (CONVERT_EXPR_P (exp))
4086 exp = TREE_OPERAND (exp, 0);
4088 code = TREE_CODE (exp);
4090 if (TREE_CODE_CLASS (code) == tcc_comparison)
4091 return (simple_operand_p (TREE_OPERAND (exp, 0))
4092 && simple_operand_p (TREE_OPERAND (exp, 1)));
4094 if (code == TRUTH_NOT_EXPR)
4095 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4097 return simple_operand_p (exp);
4101 /* The following functions are subroutines to fold_range_test and allow it to
4102 try to change a logical combination of comparisons into a range test.
4104 For example, both
4105 X == 2 || X == 3 || X == 4 || X == 5
4107 X >= 2 && X <= 5
4108 are converted to
4109 (unsigned) (X - 2) <= 3
4111 We describe each set of comparisons as being either inside or outside
4112 a range, using a variable named like IN_P, and then describe the
4113 range with a lower and upper bound. If one of the bounds is omitted,
4114 it represents either the highest or lowest value of the type.
4116 In the comments below, we represent a range by two numbers in brackets
4117 preceded by a "+" to designate being inside that range, or a "-" to
4118 designate being outside that range, so the condition can be inverted by
4119 flipping the prefix. An omitted bound is represented by a "-". For
4120 example, "- [-, 10]" means being outside the range starting at the lowest
4121 possible value and ending at 10, in other words, being greater than 10.
4122 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4123 always false.
4125 We set up things so that the missing bounds are handled in a consistent
4126 manner so neither a missing bound nor "true" and "false" need to be
4127 handled using a special case. */
4129 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4130 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4131 and UPPER1_P are nonzero if the respective argument is an upper bound
4132 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4133 must be specified for a comparison. ARG1 will be converted to ARG0's
4134 type if both are specified. */
4136 static tree
4137 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4138 tree arg1, int upper1_p)
4140 tree tem;
4141 int result;
4142 int sgn0, sgn1;
4144 /* If neither arg represents infinity, do the normal operation.
4145 Else, if not a comparison, return infinity. Else handle the special
4146 comparison rules. Note that most of the cases below won't occur, but
4147 are handled for consistency. */
4149 if (arg0 != 0 && arg1 != 0)
4151 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4152 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4153 STRIP_NOPS (tem);
4154 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4157 if (TREE_CODE_CLASS (code) != tcc_comparison)
4158 return 0;
4160 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4161 for neither. In real maths, we cannot assume open ended ranges are
4162 the same. But, this is computer arithmetic, where numbers are finite.
4163 We can therefore make the transformation of any unbounded range with
4164 the value Z, Z being greater than any representable number. This permits
4165 us to treat unbounded ranges as equal. */
4166 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4167 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4168 switch (code)
4170 case EQ_EXPR:
4171 result = sgn0 == sgn1;
4172 break;
4173 case NE_EXPR:
4174 result = sgn0 != sgn1;
4175 break;
4176 case LT_EXPR:
4177 result = sgn0 < sgn1;
4178 break;
4179 case LE_EXPR:
4180 result = sgn0 <= sgn1;
4181 break;
4182 case GT_EXPR:
4183 result = sgn0 > sgn1;
4184 break;
4185 case GE_EXPR:
4186 result = sgn0 >= sgn1;
4187 break;
4188 default:
4189 gcc_unreachable ();
4192 return constant_boolean_node (result, type);
4195 /* Helper routine for make_range. Perform one step for it, return
4196 new expression if the loop should continue or NULL_TREE if it should
4197 stop. */
4199 tree
4200 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4201 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4202 bool *strict_overflow_p)
4204 tree arg0_type = TREE_TYPE (arg0);
4205 tree n_low, n_high, low = *p_low, high = *p_high;
4206 int in_p = *p_in_p, n_in_p;
4208 switch (code)
4210 case TRUTH_NOT_EXPR:
4211 /* We can only do something if the range is testing for zero. */
4212 if (low == NULL_TREE || high == NULL_TREE
4213 || ! integer_zerop (low) || ! integer_zerop (high))
4214 return NULL_TREE;
4215 *p_in_p = ! in_p;
4216 return arg0;
4218 case EQ_EXPR: case NE_EXPR:
4219 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4220 /* We can only do something if the range is testing for zero
4221 and if the second operand is an integer constant. Note that
4222 saying something is "in" the range we make is done by
4223 complementing IN_P since it will set in the initial case of
4224 being not equal to zero; "out" is leaving it alone. */
4225 if (low == NULL_TREE || high == NULL_TREE
4226 || ! integer_zerop (low) || ! integer_zerop (high)
4227 || TREE_CODE (arg1) != INTEGER_CST)
4228 return NULL_TREE;
4230 switch (code)
4232 case NE_EXPR: /* - [c, c] */
4233 low = high = arg1;
4234 break;
4235 case EQ_EXPR: /* + [c, c] */
4236 in_p = ! in_p, low = high = arg1;
4237 break;
4238 case GT_EXPR: /* - [-, c] */
4239 low = 0, high = arg1;
4240 break;
4241 case GE_EXPR: /* + [c, -] */
4242 in_p = ! in_p, low = arg1, high = 0;
4243 break;
4244 case LT_EXPR: /* - [c, -] */
4245 low = arg1, high = 0;
4246 break;
4247 case LE_EXPR: /* + [-, c] */
4248 in_p = ! in_p, low = 0, high = arg1;
4249 break;
4250 default:
4251 gcc_unreachable ();
4254 /* If this is an unsigned comparison, we also know that EXP is
4255 greater than or equal to zero. We base the range tests we make
4256 on that fact, so we record it here so we can parse existing
4257 range tests. We test arg0_type since often the return type
4258 of, e.g. EQ_EXPR, is boolean. */
4259 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4261 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4262 in_p, low, high, 1,
4263 build_int_cst (arg0_type, 0),
4264 NULL_TREE))
4265 return NULL_TREE;
4267 in_p = n_in_p, low = n_low, high = n_high;
4269 /* If the high bound is missing, but we have a nonzero low
4270 bound, reverse the range so it goes from zero to the low bound
4271 minus 1. */
4272 if (high == 0 && low && ! integer_zerop (low))
4274 in_p = ! in_p;
4275 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4276 build_int_cst (TREE_TYPE (low), 1), 0);
4277 low = build_int_cst (arg0_type, 0);
4281 *p_low = low;
4282 *p_high = high;
4283 *p_in_p = in_p;
4284 return arg0;
4286 case NEGATE_EXPR:
4287 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4288 low and high are non-NULL, then normalize will DTRT. */
4289 if (!TYPE_UNSIGNED (arg0_type)
4290 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4292 if (low == NULL_TREE)
4293 low = TYPE_MIN_VALUE (arg0_type);
4294 if (high == NULL_TREE)
4295 high = TYPE_MAX_VALUE (arg0_type);
4298 /* (-x) IN [a,b] -> x in [-b, -a] */
4299 n_low = range_binop (MINUS_EXPR, exp_type,
4300 build_int_cst (exp_type, 0),
4301 0, high, 1);
4302 n_high = range_binop (MINUS_EXPR, exp_type,
4303 build_int_cst (exp_type, 0),
4304 0, low, 0);
4305 if (n_high != 0 && TREE_OVERFLOW (n_high))
4306 return NULL_TREE;
4307 goto normalize;
4309 case BIT_NOT_EXPR:
4310 /* ~ X -> -X - 1 */
4311 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4312 build_int_cst (exp_type, 1));
4314 case PLUS_EXPR:
4315 case MINUS_EXPR:
4316 if (TREE_CODE (arg1) != INTEGER_CST)
4317 return NULL_TREE;
4319 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4320 move a constant to the other side. */
4321 if (!TYPE_UNSIGNED (arg0_type)
4322 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4323 return NULL_TREE;
4325 /* If EXP is signed, any overflow in the computation is undefined,
4326 so we don't worry about it so long as our computations on
4327 the bounds don't overflow. For unsigned, overflow is defined
4328 and this is exactly the right thing. */
4329 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4330 arg0_type, low, 0, arg1, 0);
4331 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4332 arg0_type, high, 1, arg1, 0);
4333 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4334 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4335 return NULL_TREE;
4337 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4338 *strict_overflow_p = true;
4340 normalize:
4341 /* Check for an unsigned range which has wrapped around the maximum
4342 value thus making n_high < n_low, and normalize it. */
4343 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4345 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4346 build_int_cst (TREE_TYPE (n_high), 1), 0);
4347 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4348 build_int_cst (TREE_TYPE (n_low), 1), 0);
4350 /* If the range is of the form +/- [ x+1, x ], we won't
4351 be able to normalize it. But then, it represents the
4352 whole range or the empty set, so make it
4353 +/- [ -, - ]. */
4354 if (tree_int_cst_equal (n_low, low)
4355 && tree_int_cst_equal (n_high, high))
4356 low = high = 0;
4357 else
4358 in_p = ! in_p;
4360 else
4361 low = n_low, high = n_high;
4363 *p_low = low;
4364 *p_high = high;
4365 *p_in_p = in_p;
4366 return arg0;
4368 CASE_CONVERT:
4369 case NON_LVALUE_EXPR:
4370 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4371 return NULL_TREE;
4373 if (! INTEGRAL_TYPE_P (arg0_type)
4374 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4375 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4376 return NULL_TREE;
4378 n_low = low, n_high = high;
4380 if (n_low != 0)
4381 n_low = fold_convert_loc (loc, arg0_type, n_low);
4383 if (n_high != 0)
4384 n_high = fold_convert_loc (loc, arg0_type, n_high);
4386 /* If we're converting arg0 from an unsigned type, to exp,
4387 a signed type, we will be doing the comparison as unsigned.
4388 The tests above have already verified that LOW and HIGH
4389 are both positive.
4391 So we have to ensure that we will handle large unsigned
4392 values the same way that the current signed bounds treat
4393 negative values. */
4395 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4397 tree high_positive;
4398 tree equiv_type;
4399 /* For fixed-point modes, we need to pass the saturating flag
4400 as the 2nd parameter. */
4401 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4402 equiv_type
4403 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4404 TYPE_SATURATING (arg0_type));
4405 else
4406 equiv_type
4407 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4409 /* A range without an upper bound is, naturally, unbounded.
4410 Since convert would have cropped a very large value, use
4411 the max value for the destination type. */
4412 high_positive
4413 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4414 : TYPE_MAX_VALUE (arg0_type);
4416 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4417 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4418 fold_convert_loc (loc, arg0_type,
4419 high_positive),
4420 build_int_cst (arg0_type, 1));
4422 /* If the low bound is specified, "and" the range with the
4423 range for which the original unsigned value will be
4424 positive. */
4425 if (low != 0)
4427 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4428 1, fold_convert_loc (loc, arg0_type,
4429 integer_zero_node),
4430 high_positive))
4431 return NULL_TREE;
4433 in_p = (n_in_p == in_p);
4435 else
4437 /* Otherwise, "or" the range with the range of the input
4438 that will be interpreted as negative. */
4439 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4440 1, fold_convert_loc (loc, arg0_type,
4441 integer_zero_node),
4442 high_positive))
4443 return NULL_TREE;
4445 in_p = (in_p != n_in_p);
4449 *p_low = n_low;
4450 *p_high = n_high;
4451 *p_in_p = in_p;
4452 return arg0;
4454 default:
4455 return NULL_TREE;
4459 /* Given EXP, a logical expression, set the range it is testing into
4460 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4461 actually being tested. *PLOW and *PHIGH will be made of the same
4462 type as the returned expression. If EXP is not a comparison, we
4463 will most likely not be returning a useful value and range. Set
4464 *STRICT_OVERFLOW_P to true if the return value is only valid
4465 because signed overflow is undefined; otherwise, do not change
4466 *STRICT_OVERFLOW_P. */
4468 tree
4469 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4470 bool *strict_overflow_p)
4472 enum tree_code code;
4473 tree arg0, arg1 = NULL_TREE;
4474 tree exp_type, nexp;
4475 int in_p;
4476 tree low, high;
4477 location_t loc = EXPR_LOCATION (exp);
4479 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4480 and see if we can refine the range. Some of the cases below may not
4481 happen, but it doesn't seem worth worrying about this. We "continue"
4482 the outer loop when we've changed something; otherwise we "break"
4483 the switch, which will "break" the while. */
4485 in_p = 0;
4486 low = high = build_int_cst (TREE_TYPE (exp), 0);
4488 while (1)
4490 code = TREE_CODE (exp);
4491 exp_type = TREE_TYPE (exp);
4492 arg0 = NULL_TREE;
4494 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4496 if (TREE_OPERAND_LENGTH (exp) > 0)
4497 arg0 = TREE_OPERAND (exp, 0);
4498 if (TREE_CODE_CLASS (code) == tcc_binary
4499 || TREE_CODE_CLASS (code) == tcc_comparison
4500 || (TREE_CODE_CLASS (code) == tcc_expression
4501 && TREE_OPERAND_LENGTH (exp) > 1))
4502 arg1 = TREE_OPERAND (exp, 1);
4504 if (arg0 == NULL_TREE)
4505 break;
4507 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4508 &high, &in_p, strict_overflow_p);
4509 if (nexp == NULL_TREE)
4510 break;
4511 exp = nexp;
4514 /* If EXP is a constant, we can evaluate whether this is true or false. */
4515 if (TREE_CODE (exp) == INTEGER_CST)
4517 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4518 exp, 0, low, 0))
4519 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4520 exp, 1, high, 1)));
4521 low = high = 0;
4522 exp = 0;
4525 *pin_p = in_p, *plow = low, *phigh = high;
4526 return exp;
4529 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4530 type, TYPE, return an expression to test if EXP is in (or out of, depending
4531 on IN_P) the range. Return 0 if the test couldn't be created. */
4533 tree
4534 build_range_check (location_t loc, tree type, tree exp, int in_p,
4535 tree low, tree high)
4537 tree etype = TREE_TYPE (exp), value;
4539 /* Disable this optimization for function pointer expressions
4540 on targets that require function pointer canonicalization. */
4541 if (targetm.have_canonicalize_funcptr_for_compare ()
4542 && TREE_CODE (etype) == POINTER_TYPE
4543 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4544 return NULL_TREE;
4546 if (! in_p)
4548 value = build_range_check (loc, type, exp, 1, low, high);
4549 if (value != 0)
4550 return invert_truthvalue_loc (loc, value);
4552 return 0;
4555 if (low == 0 && high == 0)
4556 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4558 if (low == 0)
4559 return fold_build2_loc (loc, LE_EXPR, type, exp,
4560 fold_convert_loc (loc, etype, high));
4562 if (high == 0)
4563 return fold_build2_loc (loc, GE_EXPR, type, exp,
4564 fold_convert_loc (loc, etype, low));
4566 if (operand_equal_p (low, high, 0))
4567 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4568 fold_convert_loc (loc, etype, low));
4570 if (integer_zerop (low))
4572 if (! TYPE_UNSIGNED (etype))
4574 etype = unsigned_type_for (etype);
4575 high = fold_convert_loc (loc, etype, high);
4576 exp = fold_convert_loc (loc, etype, exp);
4578 return build_range_check (loc, type, exp, 1, 0, high);
4581 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4582 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4584 int prec = TYPE_PRECISION (etype);
4586 if (wi::mask (prec - 1, false, prec) == high)
4588 if (TYPE_UNSIGNED (etype))
4590 tree signed_etype = signed_type_for (etype);
4591 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4592 etype
4593 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4594 else
4595 etype = signed_etype;
4596 exp = fold_convert_loc (loc, etype, exp);
4598 return fold_build2_loc (loc, GT_EXPR, type, exp,
4599 build_int_cst (etype, 0));
4603 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4604 This requires wrap-around arithmetics for the type of the expression.
4605 First make sure that arithmetics in this type is valid, then make sure
4606 that it wraps around. */
4607 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4608 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4609 TYPE_UNSIGNED (etype));
4611 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4613 tree utype, minv, maxv;
4615 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4616 for the type in question, as we rely on this here. */
4617 utype = unsigned_type_for (etype);
4618 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4619 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4620 build_int_cst (TREE_TYPE (maxv), 1), 1);
4621 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4623 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4624 minv, 1, maxv, 1)))
4625 etype = utype;
4626 else
4627 return 0;
4630 high = fold_convert_loc (loc, etype, high);
4631 low = fold_convert_loc (loc, etype, low);
4632 exp = fold_convert_loc (loc, etype, exp);
4634 value = const_binop (MINUS_EXPR, high, low);
4637 if (POINTER_TYPE_P (etype))
4639 if (value != 0 && !TREE_OVERFLOW (value))
4641 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4642 return build_range_check (loc, type,
4643 fold_build_pointer_plus_loc (loc, exp, low),
4644 1, build_int_cst (etype, 0), value);
4646 return 0;
4649 if (value != 0 && !TREE_OVERFLOW (value))
4650 return build_range_check (loc, type,
4651 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4652 1, build_int_cst (etype, 0), value);
4654 return 0;
4657 /* Return the predecessor of VAL in its type, handling the infinite case. */
4659 static tree
4660 range_predecessor (tree val)
4662 tree type = TREE_TYPE (val);
4664 if (INTEGRAL_TYPE_P (type)
4665 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4666 return 0;
4667 else
4668 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4669 build_int_cst (TREE_TYPE (val), 1), 0);
4672 /* Return the successor of VAL in its type, handling the infinite case. */
4674 static tree
4675 range_successor (tree val)
4677 tree type = TREE_TYPE (val);
4679 if (INTEGRAL_TYPE_P (type)
4680 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4681 return 0;
4682 else
4683 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4684 build_int_cst (TREE_TYPE (val), 1), 0);
4687 /* Given two ranges, see if we can merge them into one. Return 1 if we
4688 can, 0 if we can't. Set the output range into the specified parameters. */
4690 bool
4691 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4692 tree high0, int in1_p, tree low1, tree high1)
4694 int no_overlap;
4695 int subset;
4696 int temp;
4697 tree tem;
4698 int in_p;
4699 tree low, high;
4700 int lowequal = ((low0 == 0 && low1 == 0)
4701 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4702 low0, 0, low1, 0)));
4703 int highequal = ((high0 == 0 && high1 == 0)
4704 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4705 high0, 1, high1, 1)));
4707 /* Make range 0 be the range that starts first, or ends last if they
4708 start at the same value. Swap them if it isn't. */
4709 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4710 low0, 0, low1, 0))
4711 || (lowequal
4712 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4713 high1, 1, high0, 1))))
4715 temp = in0_p, in0_p = in1_p, in1_p = temp;
4716 tem = low0, low0 = low1, low1 = tem;
4717 tem = high0, high0 = high1, high1 = tem;
4720 /* Now flag two cases, whether the ranges are disjoint or whether the
4721 second range is totally subsumed in the first. Note that the tests
4722 below are simplified by the ones above. */
4723 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4724 high0, 1, low1, 0));
4725 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4726 high1, 1, high0, 1));
4728 /* We now have four cases, depending on whether we are including or
4729 excluding the two ranges. */
4730 if (in0_p && in1_p)
4732 /* If they don't overlap, the result is false. If the second range
4733 is a subset it is the result. Otherwise, the range is from the start
4734 of the second to the end of the first. */
4735 if (no_overlap)
4736 in_p = 0, low = high = 0;
4737 else if (subset)
4738 in_p = 1, low = low1, high = high1;
4739 else
4740 in_p = 1, low = low1, high = high0;
4743 else if (in0_p && ! in1_p)
4745 /* If they don't overlap, the result is the first range. If they are
4746 equal, the result is false. If the second range is a subset of the
4747 first, and the ranges begin at the same place, we go from just after
4748 the end of the second range to the end of the first. If the second
4749 range is not a subset of the first, or if it is a subset and both
4750 ranges end at the same place, the range starts at the start of the
4751 first range and ends just before the second range.
4752 Otherwise, we can't describe this as a single range. */
4753 if (no_overlap)
4754 in_p = 1, low = low0, high = high0;
4755 else if (lowequal && highequal)
4756 in_p = 0, low = high = 0;
4757 else if (subset && lowequal)
4759 low = range_successor (high1);
4760 high = high0;
4761 in_p = 1;
4762 if (low == 0)
4764 /* We are in the weird situation where high0 > high1 but
4765 high1 has no successor. Punt. */
4766 return 0;
4769 else if (! subset || highequal)
4771 low = low0;
4772 high = range_predecessor (low1);
4773 in_p = 1;
4774 if (high == 0)
4776 /* low0 < low1 but low1 has no predecessor. Punt. */
4777 return 0;
4780 else
4781 return 0;
4784 else if (! in0_p && in1_p)
4786 /* If they don't overlap, the result is the second range. If the second
4787 is a subset of the first, the result is false. Otherwise,
4788 the range starts just after the first range and ends at the
4789 end of the second. */
4790 if (no_overlap)
4791 in_p = 1, low = low1, high = high1;
4792 else if (subset || highequal)
4793 in_p = 0, low = high = 0;
4794 else
4796 low = range_successor (high0);
4797 high = high1;
4798 in_p = 1;
4799 if (low == 0)
4801 /* high1 > high0 but high0 has no successor. Punt. */
4802 return 0;
4807 else
4809 /* The case where we are excluding both ranges. Here the complex case
4810 is if they don't overlap. In that case, the only time we have a
4811 range is if they are adjacent. If the second is a subset of the
4812 first, the result is the first. Otherwise, the range to exclude
4813 starts at the beginning of the first range and ends at the end of the
4814 second. */
4815 if (no_overlap)
4817 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4818 range_successor (high0),
4819 1, low1, 0)))
4820 in_p = 0, low = low0, high = high1;
4821 else
4823 /* Canonicalize - [min, x] into - [-, x]. */
4824 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4825 switch (TREE_CODE (TREE_TYPE (low0)))
4827 case ENUMERAL_TYPE:
4828 if (TYPE_PRECISION (TREE_TYPE (low0))
4829 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4830 break;
4831 /* FALLTHROUGH */
4832 case INTEGER_TYPE:
4833 if (tree_int_cst_equal (low0,
4834 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4835 low0 = 0;
4836 break;
4837 case POINTER_TYPE:
4838 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4839 && integer_zerop (low0))
4840 low0 = 0;
4841 break;
4842 default:
4843 break;
4846 /* Canonicalize - [x, max] into - [x, -]. */
4847 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4848 switch (TREE_CODE (TREE_TYPE (high1)))
4850 case ENUMERAL_TYPE:
4851 if (TYPE_PRECISION (TREE_TYPE (high1))
4852 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4853 break;
4854 /* FALLTHROUGH */
4855 case INTEGER_TYPE:
4856 if (tree_int_cst_equal (high1,
4857 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4858 high1 = 0;
4859 break;
4860 case POINTER_TYPE:
4861 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4862 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4863 high1, 1,
4864 build_int_cst (TREE_TYPE (high1), 1),
4865 1)))
4866 high1 = 0;
4867 break;
4868 default:
4869 break;
4872 /* The ranges might be also adjacent between the maximum and
4873 minimum values of the given type. For
4874 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4875 return + [x + 1, y - 1]. */
4876 if (low0 == 0 && high1 == 0)
4878 low = range_successor (high0);
4879 high = range_predecessor (low1);
4880 if (low == 0 || high == 0)
4881 return 0;
4883 in_p = 1;
4885 else
4886 return 0;
4889 else if (subset)
4890 in_p = 0, low = low0, high = high0;
4891 else
4892 in_p = 0, low = low0, high = high1;
4895 *pin_p = in_p, *plow = low, *phigh = high;
4896 return 1;
4900 /* Subroutine of fold, looking inside expressions of the form
4901 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4902 of the COND_EXPR. This function is being used also to optimize
4903 A op B ? C : A, by reversing the comparison first.
4905 Return a folded expression whose code is not a COND_EXPR
4906 anymore, or NULL_TREE if no folding opportunity is found. */
4908 static tree
4909 fold_cond_expr_with_comparison (location_t loc, tree type,
4910 tree arg0, tree arg1, tree arg2)
4912 enum tree_code comp_code = TREE_CODE (arg0);
4913 tree arg00 = TREE_OPERAND (arg0, 0);
4914 tree arg01 = TREE_OPERAND (arg0, 1);
4915 tree arg1_type = TREE_TYPE (arg1);
4916 tree tem;
4918 STRIP_NOPS (arg1);
4919 STRIP_NOPS (arg2);
4921 /* If we have A op 0 ? A : -A, consider applying the following
4922 transformations:
4924 A == 0? A : -A same as -A
4925 A != 0? A : -A same as A
4926 A >= 0? A : -A same as abs (A)
4927 A > 0? A : -A same as abs (A)
4928 A <= 0? A : -A same as -abs (A)
4929 A < 0? A : -A same as -abs (A)
4931 None of these transformations work for modes with signed
4932 zeros. If A is +/-0, the first two transformations will
4933 change the sign of the result (from +0 to -0, or vice
4934 versa). The last four will fix the sign of the result,
4935 even though the original expressions could be positive or
4936 negative, depending on the sign of A.
4938 Note that all these transformations are correct if A is
4939 NaN, since the two alternatives (A and -A) are also NaNs. */
4940 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4941 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4942 ? real_zerop (arg01)
4943 : integer_zerop (arg01))
4944 && ((TREE_CODE (arg2) == NEGATE_EXPR
4945 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4946 /* In the case that A is of the form X-Y, '-A' (arg2) may
4947 have already been folded to Y-X, check for that. */
4948 || (TREE_CODE (arg1) == MINUS_EXPR
4949 && TREE_CODE (arg2) == MINUS_EXPR
4950 && operand_equal_p (TREE_OPERAND (arg1, 0),
4951 TREE_OPERAND (arg2, 1), 0)
4952 && operand_equal_p (TREE_OPERAND (arg1, 1),
4953 TREE_OPERAND (arg2, 0), 0))))
4954 switch (comp_code)
4956 case EQ_EXPR:
4957 case UNEQ_EXPR:
4958 tem = fold_convert_loc (loc, arg1_type, arg1);
4959 return pedantic_non_lvalue_loc (loc,
4960 fold_convert_loc (loc, type,
4961 negate_expr (tem)));
4962 case NE_EXPR:
4963 case LTGT_EXPR:
4964 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4965 case UNGE_EXPR:
4966 case UNGT_EXPR:
4967 if (flag_trapping_math)
4968 break;
4969 /* Fall through. */
4970 case GE_EXPR:
4971 case GT_EXPR:
4972 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4973 break;
4974 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4975 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4976 case UNLE_EXPR:
4977 case UNLT_EXPR:
4978 if (flag_trapping_math)
4979 break;
4980 case LE_EXPR:
4981 case LT_EXPR:
4982 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4983 break;
4984 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4985 return negate_expr (fold_convert_loc (loc, type, tem));
4986 default:
4987 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4988 break;
4991 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4992 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4993 both transformations are correct when A is NaN: A != 0
4994 is then true, and A == 0 is false. */
4996 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4997 && integer_zerop (arg01) && integer_zerop (arg2))
4999 if (comp_code == NE_EXPR)
5000 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5001 else if (comp_code == EQ_EXPR)
5002 return build_zero_cst (type);
5005 /* Try some transformations of A op B ? A : B.
5007 A == B? A : B same as B
5008 A != B? A : B same as A
5009 A >= B? A : B same as max (A, B)
5010 A > B? A : B same as max (B, A)
5011 A <= B? A : B same as min (A, B)
5012 A < B? A : B same as min (B, A)
5014 As above, these transformations don't work in the presence
5015 of signed zeros. For example, if A and B are zeros of
5016 opposite sign, the first two transformations will change
5017 the sign of the result. In the last four, the original
5018 expressions give different results for (A=+0, B=-0) and
5019 (A=-0, B=+0), but the transformed expressions do not.
5021 The first two transformations are correct if either A or B
5022 is a NaN. In the first transformation, the condition will
5023 be false, and B will indeed be chosen. In the case of the
5024 second transformation, the condition A != B will be true,
5025 and A will be chosen.
5027 The conversions to max() and min() are not correct if B is
5028 a number and A is not. The conditions in the original
5029 expressions will be false, so all four give B. The min()
5030 and max() versions would give a NaN instead. */
5031 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5032 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5033 /* Avoid these transformations if the COND_EXPR may be used
5034 as an lvalue in the C++ front-end. PR c++/19199. */
5035 && (in_gimple_form
5036 || VECTOR_TYPE_P (type)
5037 || (! lang_GNU_CXX ()
5038 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5039 || ! maybe_lvalue_p (arg1)
5040 || ! maybe_lvalue_p (arg2)))
5042 tree comp_op0 = arg00;
5043 tree comp_op1 = arg01;
5044 tree comp_type = TREE_TYPE (comp_op0);
5046 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5047 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5049 comp_type = type;
5050 comp_op0 = arg1;
5051 comp_op1 = arg2;
5054 switch (comp_code)
5056 case EQ_EXPR:
5057 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5058 case NE_EXPR:
5059 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5060 case LE_EXPR:
5061 case LT_EXPR:
5062 case UNLE_EXPR:
5063 case UNLT_EXPR:
5064 /* In C++ a ?: expression can be an lvalue, so put the
5065 operand which will be used if they are equal first
5066 so that we can convert this back to the
5067 corresponding COND_EXPR. */
5068 if (!HONOR_NANS (arg1))
5070 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5071 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5072 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5073 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5074 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5075 comp_op1, comp_op0);
5076 return pedantic_non_lvalue_loc (loc,
5077 fold_convert_loc (loc, type, tem));
5079 break;
5080 case GE_EXPR:
5081 case GT_EXPR:
5082 case UNGE_EXPR:
5083 case UNGT_EXPR:
5084 if (!HONOR_NANS (arg1))
5086 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5087 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5088 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5089 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5090 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5091 comp_op1, comp_op0);
5092 return pedantic_non_lvalue_loc (loc,
5093 fold_convert_loc (loc, type, tem));
5095 break;
5096 case UNEQ_EXPR:
5097 if (!HONOR_NANS (arg1))
5098 return pedantic_non_lvalue_loc (loc,
5099 fold_convert_loc (loc, type, arg2));
5100 break;
5101 case LTGT_EXPR:
5102 if (!HONOR_NANS (arg1))
5103 return pedantic_non_lvalue_loc (loc,
5104 fold_convert_loc (loc, type, arg1));
5105 break;
5106 default:
5107 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5108 break;
5112 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5113 we might still be able to simplify this. For example,
5114 if C1 is one less or one more than C2, this might have started
5115 out as a MIN or MAX and been transformed by this function.
5116 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5118 if (INTEGRAL_TYPE_P (type)
5119 && TREE_CODE (arg01) == INTEGER_CST
5120 && TREE_CODE (arg2) == INTEGER_CST)
5121 switch (comp_code)
5123 case EQ_EXPR:
5124 if (TREE_CODE (arg1) == INTEGER_CST)
5125 break;
5126 /* We can replace A with C1 in this case. */
5127 arg1 = fold_convert_loc (loc, type, arg01);
5128 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5130 case LT_EXPR:
5131 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5132 MIN_EXPR, to preserve the signedness of the comparison. */
5133 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5134 OEP_ONLY_CONST)
5135 && operand_equal_p (arg01,
5136 const_binop (PLUS_EXPR, arg2,
5137 build_int_cst (type, 1)),
5138 OEP_ONLY_CONST))
5140 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5141 fold_convert_loc (loc, TREE_TYPE (arg00),
5142 arg2));
5143 return pedantic_non_lvalue_loc (loc,
5144 fold_convert_loc (loc, type, tem));
5146 break;
5148 case LE_EXPR:
5149 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5150 as above. */
5151 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5152 OEP_ONLY_CONST)
5153 && operand_equal_p (arg01,
5154 const_binop (MINUS_EXPR, arg2,
5155 build_int_cst (type, 1)),
5156 OEP_ONLY_CONST))
5158 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5159 fold_convert_loc (loc, TREE_TYPE (arg00),
5160 arg2));
5161 return pedantic_non_lvalue_loc (loc,
5162 fold_convert_loc (loc, type, tem));
5164 break;
5166 case GT_EXPR:
5167 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5168 MAX_EXPR, to preserve the signedness of the comparison. */
5169 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5170 OEP_ONLY_CONST)
5171 && operand_equal_p (arg01,
5172 const_binop (MINUS_EXPR, arg2,
5173 build_int_cst (type, 1)),
5174 OEP_ONLY_CONST))
5176 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5177 fold_convert_loc (loc, TREE_TYPE (arg00),
5178 arg2));
5179 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5181 break;
5183 case GE_EXPR:
5184 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5185 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5186 OEP_ONLY_CONST)
5187 && operand_equal_p (arg01,
5188 const_binop (PLUS_EXPR, arg2,
5189 build_int_cst (type, 1)),
5190 OEP_ONLY_CONST))
5192 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5193 fold_convert_loc (loc, TREE_TYPE (arg00),
5194 arg2));
5195 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5197 break;
5198 case NE_EXPR:
5199 break;
5200 default:
5201 gcc_unreachable ();
5204 return NULL_TREE;
5209 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5210 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5211 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5212 false) >= 2)
5213 #endif
5215 /* EXP is some logical combination of boolean tests. See if we can
5216 merge it into some range test. Return the new tree if so. */
5218 static tree
5219 fold_range_test (location_t loc, enum tree_code code, tree type,
5220 tree op0, tree op1)
5222 int or_op = (code == TRUTH_ORIF_EXPR
5223 || code == TRUTH_OR_EXPR);
5224 int in0_p, in1_p, in_p;
5225 tree low0, low1, low, high0, high1, high;
5226 bool strict_overflow_p = false;
5227 tree tem, lhs, rhs;
5228 const char * const warnmsg = G_("assuming signed overflow does not occur "
5229 "when simplifying range test");
5231 if (!INTEGRAL_TYPE_P (type))
5232 return 0;
5234 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5235 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5237 /* If this is an OR operation, invert both sides; we will invert
5238 again at the end. */
5239 if (or_op)
5240 in0_p = ! in0_p, in1_p = ! in1_p;
5242 /* If both expressions are the same, if we can merge the ranges, and we
5243 can build the range test, return it or it inverted. If one of the
5244 ranges is always true or always false, consider it to be the same
5245 expression as the other. */
5246 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5247 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5248 in1_p, low1, high1)
5249 && 0 != (tem = (build_range_check (loc, type,
5250 lhs != 0 ? lhs
5251 : rhs != 0 ? rhs : integer_zero_node,
5252 in_p, low, high))))
5254 if (strict_overflow_p)
5255 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5256 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5259 /* On machines where the branch cost is expensive, if this is a
5260 short-circuited branch and the underlying object on both sides
5261 is the same, make a non-short-circuit operation. */
5262 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5263 && lhs != 0 && rhs != 0
5264 && (code == TRUTH_ANDIF_EXPR
5265 || code == TRUTH_ORIF_EXPR)
5266 && operand_equal_p (lhs, rhs, 0))
5268 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5269 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5270 which cases we can't do this. */
5271 if (simple_operand_p (lhs))
5272 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5273 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5274 type, op0, op1);
5276 else if (!lang_hooks.decls.global_bindings_p ()
5277 && !CONTAINS_PLACEHOLDER_P (lhs))
5279 tree common = save_expr (lhs);
5281 if (0 != (lhs = build_range_check (loc, type, common,
5282 or_op ? ! in0_p : in0_p,
5283 low0, high0))
5284 && (0 != (rhs = build_range_check (loc, type, common,
5285 or_op ? ! in1_p : in1_p,
5286 low1, high1))))
5288 if (strict_overflow_p)
5289 fold_overflow_warning (warnmsg,
5290 WARN_STRICT_OVERFLOW_COMPARISON);
5291 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5292 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5293 type, lhs, rhs);
5298 return 0;
5301 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5302 bit value. Arrange things so the extra bits will be set to zero if and
5303 only if C is signed-extended to its full width. If MASK is nonzero,
5304 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5306 static tree
5307 unextend (tree c, int p, int unsignedp, tree mask)
5309 tree type = TREE_TYPE (c);
5310 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5311 tree temp;
5313 if (p == modesize || unsignedp)
5314 return c;
5316 /* We work by getting just the sign bit into the low-order bit, then
5317 into the high-order bit, then sign-extend. We then XOR that value
5318 with C. */
5319 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5321 /* We must use a signed type in order to get an arithmetic right shift.
5322 However, we must also avoid introducing accidental overflows, so that
5323 a subsequent call to integer_zerop will work. Hence we must
5324 do the type conversion here. At this point, the constant is either
5325 zero or one, and the conversion to a signed type can never overflow.
5326 We could get an overflow if this conversion is done anywhere else. */
5327 if (TYPE_UNSIGNED (type))
5328 temp = fold_convert (signed_type_for (type), temp);
5330 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5331 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5332 if (mask != 0)
5333 temp = const_binop (BIT_AND_EXPR, temp,
5334 fold_convert (TREE_TYPE (c), mask));
5335 /* If necessary, convert the type back to match the type of C. */
5336 if (TYPE_UNSIGNED (type))
5337 temp = fold_convert (type, temp);
5339 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5342 /* For an expression that has the form
5343 (A && B) || ~B
5345 (A || B) && ~B,
5346 we can drop one of the inner expressions and simplify to
5347 A || ~B
5349 A && ~B
5350 LOC is the location of the resulting expression. OP is the inner
5351 logical operation; the left-hand side in the examples above, while CMPOP
5352 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5353 removing a condition that guards another, as in
5354 (A != NULL && A->...) || A == NULL
5355 which we must not transform. If RHS_ONLY is true, only eliminate the
5356 right-most operand of the inner logical operation. */
5358 static tree
5359 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5360 bool rhs_only)
5362 tree type = TREE_TYPE (cmpop);
5363 enum tree_code code = TREE_CODE (cmpop);
5364 enum tree_code truthop_code = TREE_CODE (op);
5365 tree lhs = TREE_OPERAND (op, 0);
5366 tree rhs = TREE_OPERAND (op, 1);
5367 tree orig_lhs = lhs, orig_rhs = rhs;
5368 enum tree_code rhs_code = TREE_CODE (rhs);
5369 enum tree_code lhs_code = TREE_CODE (lhs);
5370 enum tree_code inv_code;
5372 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5373 return NULL_TREE;
5375 if (TREE_CODE_CLASS (code) != tcc_comparison)
5376 return NULL_TREE;
5378 if (rhs_code == truthop_code)
5380 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5381 if (newrhs != NULL_TREE)
5383 rhs = newrhs;
5384 rhs_code = TREE_CODE (rhs);
5387 if (lhs_code == truthop_code && !rhs_only)
5389 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5390 if (newlhs != NULL_TREE)
5392 lhs = newlhs;
5393 lhs_code = TREE_CODE (lhs);
5397 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5398 if (inv_code == rhs_code
5399 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5400 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5401 return lhs;
5402 if (!rhs_only && inv_code == lhs_code
5403 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5404 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5405 return rhs;
5406 if (rhs != orig_rhs || lhs != orig_lhs)
5407 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5408 lhs, rhs);
5409 return NULL_TREE;
5412 /* Find ways of folding logical expressions of LHS and RHS:
5413 Try to merge two comparisons to the same innermost item.
5414 Look for range tests like "ch >= '0' && ch <= '9'".
5415 Look for combinations of simple terms on machines with expensive branches
5416 and evaluate the RHS unconditionally.
5418 For example, if we have p->a == 2 && p->b == 4 and we can make an
5419 object large enough to span both A and B, we can do this with a comparison
5420 against the object ANDed with the a mask.
5422 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5423 operations to do this with one comparison.
5425 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5426 function and the one above.
5428 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5429 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5431 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5432 two operands.
5434 We return the simplified tree or 0 if no optimization is possible. */
5436 static tree
5437 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5438 tree lhs, tree rhs)
5440 /* If this is the "or" of two comparisons, we can do something if
5441 the comparisons are NE_EXPR. If this is the "and", we can do something
5442 if the comparisons are EQ_EXPR. I.e.,
5443 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5445 WANTED_CODE is this operation code. For single bit fields, we can
5446 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5447 comparison for one-bit fields. */
5449 enum tree_code wanted_code;
5450 enum tree_code lcode, rcode;
5451 tree ll_arg, lr_arg, rl_arg, rr_arg;
5452 tree ll_inner, lr_inner, rl_inner, rr_inner;
5453 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5454 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5455 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5456 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5457 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5458 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5459 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5460 machine_mode lnmode, rnmode;
5461 tree ll_mask, lr_mask, rl_mask, rr_mask;
5462 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5463 tree l_const, r_const;
5464 tree lntype, rntype, result;
5465 HOST_WIDE_INT first_bit, end_bit;
5466 int volatilep;
5468 /* Start by getting the comparison codes. Fail if anything is volatile.
5469 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5470 it were surrounded with a NE_EXPR. */
5472 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5473 return 0;
5475 lcode = TREE_CODE (lhs);
5476 rcode = TREE_CODE (rhs);
5478 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5480 lhs = build2 (NE_EXPR, truth_type, lhs,
5481 build_int_cst (TREE_TYPE (lhs), 0));
5482 lcode = NE_EXPR;
5485 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5487 rhs = build2 (NE_EXPR, truth_type, rhs,
5488 build_int_cst (TREE_TYPE (rhs), 0));
5489 rcode = NE_EXPR;
5492 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5493 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5494 return 0;
5496 ll_arg = TREE_OPERAND (lhs, 0);
5497 lr_arg = TREE_OPERAND (lhs, 1);
5498 rl_arg = TREE_OPERAND (rhs, 0);
5499 rr_arg = TREE_OPERAND (rhs, 1);
5501 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5502 if (simple_operand_p (ll_arg)
5503 && simple_operand_p (lr_arg))
5505 if (operand_equal_p (ll_arg, rl_arg, 0)
5506 && operand_equal_p (lr_arg, rr_arg, 0))
5508 result = combine_comparisons (loc, code, lcode, rcode,
5509 truth_type, ll_arg, lr_arg);
5510 if (result)
5511 return result;
5513 else if (operand_equal_p (ll_arg, rr_arg, 0)
5514 && operand_equal_p (lr_arg, rl_arg, 0))
5516 result = combine_comparisons (loc, code, lcode,
5517 swap_tree_comparison (rcode),
5518 truth_type, ll_arg, lr_arg);
5519 if (result)
5520 return result;
5524 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5525 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5527 /* If the RHS can be evaluated unconditionally and its operands are
5528 simple, it wins to evaluate the RHS unconditionally on machines
5529 with expensive branches. In this case, this isn't a comparison
5530 that can be merged. */
5532 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5533 false) >= 2
5534 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5535 && simple_operand_p (rl_arg)
5536 && simple_operand_p (rr_arg))
5538 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5539 if (code == TRUTH_OR_EXPR
5540 && lcode == NE_EXPR && integer_zerop (lr_arg)
5541 && rcode == NE_EXPR && integer_zerop (rr_arg)
5542 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5543 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5544 return build2_loc (loc, NE_EXPR, truth_type,
5545 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5546 ll_arg, rl_arg),
5547 build_int_cst (TREE_TYPE (ll_arg), 0));
5549 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5550 if (code == TRUTH_AND_EXPR
5551 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5552 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5553 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5554 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5555 return build2_loc (loc, EQ_EXPR, truth_type,
5556 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5557 ll_arg, rl_arg),
5558 build_int_cst (TREE_TYPE (ll_arg), 0));
5561 /* See if the comparisons can be merged. Then get all the parameters for
5562 each side. */
5564 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5565 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5566 return 0;
5568 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5569 volatilep = 0;
5570 ll_inner = decode_field_reference (loc, ll_arg,
5571 &ll_bitsize, &ll_bitpos, &ll_mode,
5572 &ll_unsignedp, &ll_reversep, &volatilep,
5573 &ll_mask, &ll_and_mask);
5574 lr_inner = decode_field_reference (loc, lr_arg,
5575 &lr_bitsize, &lr_bitpos, &lr_mode,
5576 &lr_unsignedp, &lr_reversep, &volatilep,
5577 &lr_mask, &lr_and_mask);
5578 rl_inner = decode_field_reference (loc, rl_arg,
5579 &rl_bitsize, &rl_bitpos, &rl_mode,
5580 &rl_unsignedp, &rl_reversep, &volatilep,
5581 &rl_mask, &rl_and_mask);
5582 rr_inner = decode_field_reference (loc, rr_arg,
5583 &rr_bitsize, &rr_bitpos, &rr_mode,
5584 &rr_unsignedp, &rr_reversep, &volatilep,
5585 &rr_mask, &rr_and_mask);
5587 /* It must be true that the inner operation on the lhs of each
5588 comparison must be the same if we are to be able to do anything.
5589 Then see if we have constants. If not, the same must be true for
5590 the rhs's. */
5591 if (volatilep
5592 || ll_reversep != rl_reversep
5593 || ll_inner == 0 || rl_inner == 0
5594 || ! operand_equal_p (ll_inner, rl_inner, 0))
5595 return 0;
5597 if (TREE_CODE (lr_arg) == INTEGER_CST
5598 && TREE_CODE (rr_arg) == INTEGER_CST)
5600 l_const = lr_arg, r_const = rr_arg;
5601 lr_reversep = ll_reversep;
5603 else if (lr_reversep != rr_reversep
5604 || lr_inner == 0 || rr_inner == 0
5605 || ! operand_equal_p (lr_inner, rr_inner, 0))
5606 return 0;
5607 else
5608 l_const = r_const = 0;
5610 /* If either comparison code is not correct for our logical operation,
5611 fail. However, we can convert a one-bit comparison against zero into
5612 the opposite comparison against that bit being set in the field. */
5614 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5615 if (lcode != wanted_code)
5617 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5619 /* Make the left operand unsigned, since we are only interested
5620 in the value of one bit. Otherwise we are doing the wrong
5621 thing below. */
5622 ll_unsignedp = 1;
5623 l_const = ll_mask;
5625 else
5626 return 0;
5629 /* This is analogous to the code for l_const above. */
5630 if (rcode != wanted_code)
5632 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5634 rl_unsignedp = 1;
5635 r_const = rl_mask;
5637 else
5638 return 0;
5641 /* See if we can find a mode that contains both fields being compared on
5642 the left. If we can't, fail. Otherwise, update all constants and masks
5643 to be relative to a field of that size. */
5644 first_bit = MIN (ll_bitpos, rl_bitpos);
5645 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5646 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5647 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5648 volatilep);
5649 if (lnmode == VOIDmode)
5650 return 0;
5652 lnbitsize = GET_MODE_BITSIZE (lnmode);
5653 lnbitpos = first_bit & ~ (lnbitsize - 1);
5654 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5655 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5657 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5659 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5660 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5663 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5664 size_int (xll_bitpos));
5665 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5666 size_int (xrl_bitpos));
5668 if (l_const)
5670 l_const = fold_convert_loc (loc, lntype, l_const);
5671 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5672 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5673 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5674 fold_build1_loc (loc, BIT_NOT_EXPR,
5675 lntype, ll_mask))))
5677 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5679 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5682 if (r_const)
5684 r_const = fold_convert_loc (loc, lntype, r_const);
5685 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5686 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5687 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5688 fold_build1_loc (loc, BIT_NOT_EXPR,
5689 lntype, rl_mask))))
5691 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5693 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5697 /* If the right sides are not constant, do the same for it. Also,
5698 disallow this optimization if a size or signedness mismatch occurs
5699 between the left and right sides. */
5700 if (l_const == 0)
5702 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5703 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5704 /* Make sure the two fields on the right
5705 correspond to the left without being swapped. */
5706 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5707 return 0;
5709 first_bit = MIN (lr_bitpos, rr_bitpos);
5710 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5711 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5712 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5713 volatilep);
5714 if (rnmode == VOIDmode)
5715 return 0;
5717 rnbitsize = GET_MODE_BITSIZE (rnmode);
5718 rnbitpos = first_bit & ~ (rnbitsize - 1);
5719 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5720 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5722 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5724 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5725 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5728 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5729 rntype, lr_mask),
5730 size_int (xlr_bitpos));
5731 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5732 rntype, rr_mask),
5733 size_int (xrr_bitpos));
5735 /* Make a mask that corresponds to both fields being compared.
5736 Do this for both items being compared. If the operands are the
5737 same size and the bits being compared are in the same position
5738 then we can do this by masking both and comparing the masked
5739 results. */
5740 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5741 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5742 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5744 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5745 ll_unsignedp || rl_unsignedp, ll_reversep);
5746 if (! all_ones_mask_p (ll_mask, lnbitsize))
5747 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5749 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5750 lr_unsignedp || rr_unsignedp, lr_reversep);
5751 if (! all_ones_mask_p (lr_mask, rnbitsize))
5752 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5754 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5757 /* There is still another way we can do something: If both pairs of
5758 fields being compared are adjacent, we may be able to make a wider
5759 field containing them both.
5761 Note that we still must mask the lhs/rhs expressions. Furthermore,
5762 the mask must be shifted to account for the shift done by
5763 make_bit_field_ref. */
5764 if ((ll_bitsize + ll_bitpos == rl_bitpos
5765 && lr_bitsize + lr_bitpos == rr_bitpos)
5766 || (ll_bitpos == rl_bitpos + rl_bitsize
5767 && lr_bitpos == rr_bitpos + rr_bitsize))
5769 tree type;
5771 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5772 ll_bitsize + rl_bitsize,
5773 MIN (ll_bitpos, rl_bitpos),
5774 ll_unsignedp, ll_reversep);
5775 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5776 lr_bitsize + rr_bitsize,
5777 MIN (lr_bitpos, rr_bitpos),
5778 lr_unsignedp, lr_reversep);
5780 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5781 size_int (MIN (xll_bitpos, xrl_bitpos)));
5782 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5783 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5785 /* Convert to the smaller type before masking out unwanted bits. */
5786 type = lntype;
5787 if (lntype != rntype)
5789 if (lnbitsize > rnbitsize)
5791 lhs = fold_convert_loc (loc, rntype, lhs);
5792 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5793 type = rntype;
5795 else if (lnbitsize < rnbitsize)
5797 rhs = fold_convert_loc (loc, lntype, rhs);
5798 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5799 type = lntype;
5803 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5804 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5806 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5807 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5809 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5812 return 0;
5815 /* Handle the case of comparisons with constants. If there is something in
5816 common between the masks, those bits of the constants must be the same.
5817 If not, the condition is always false. Test for this to avoid generating
5818 incorrect code below. */
5819 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5820 if (! integer_zerop (result)
5821 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5822 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5824 if (wanted_code == NE_EXPR)
5826 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5827 return constant_boolean_node (true, truth_type);
5829 else
5831 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5832 return constant_boolean_node (false, truth_type);
5836 /* Construct the expression we will return. First get the component
5837 reference we will make. Unless the mask is all ones the width of
5838 that field, perform the mask operation. Then compare with the
5839 merged constant. */
5840 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5841 ll_unsignedp || rl_unsignedp, ll_reversep);
5843 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5844 if (! all_ones_mask_p (ll_mask, lnbitsize))
5845 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5847 return build2_loc (loc, wanted_code, truth_type, result,
5848 const_binop (BIT_IOR_EXPR, l_const, r_const));
5851 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5852 constant. */
5854 static tree
5855 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5856 tree op0, tree op1)
5858 tree arg0 = op0;
5859 enum tree_code op_code;
5860 tree comp_const;
5861 tree minmax_const;
5862 int consts_equal, consts_lt;
5863 tree inner;
5865 STRIP_SIGN_NOPS (arg0);
5867 op_code = TREE_CODE (arg0);
5868 minmax_const = TREE_OPERAND (arg0, 1);
5869 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5870 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5871 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5872 inner = TREE_OPERAND (arg0, 0);
5874 /* If something does not permit us to optimize, return the original tree. */
5875 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5876 || TREE_CODE (comp_const) != INTEGER_CST
5877 || TREE_OVERFLOW (comp_const)
5878 || TREE_CODE (minmax_const) != INTEGER_CST
5879 || TREE_OVERFLOW (minmax_const))
5880 return NULL_TREE;
5882 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5883 and GT_EXPR, doing the rest with recursive calls using logical
5884 simplifications. */
5885 switch (code)
5887 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5889 tree tem
5890 = optimize_minmax_comparison (loc,
5891 invert_tree_comparison (code, false),
5892 type, op0, op1);
5893 if (tem)
5894 return invert_truthvalue_loc (loc, tem);
5895 return NULL_TREE;
5898 case GE_EXPR:
5899 return
5900 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5901 optimize_minmax_comparison
5902 (loc, EQ_EXPR, type, arg0, comp_const),
5903 optimize_minmax_comparison
5904 (loc, GT_EXPR, type, arg0, comp_const));
5906 case EQ_EXPR:
5907 if (op_code == MAX_EXPR && consts_equal)
5908 /* MAX (X, 0) == 0 -> X <= 0 */
5909 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5911 else if (op_code == MAX_EXPR && consts_lt)
5912 /* MAX (X, 0) == 5 -> X == 5 */
5913 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5915 else if (op_code == MAX_EXPR)
5916 /* MAX (X, 0) == -1 -> false */
5917 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5919 else if (consts_equal)
5920 /* MIN (X, 0) == 0 -> X >= 0 */
5921 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5923 else if (consts_lt)
5924 /* MIN (X, 0) == 5 -> false */
5925 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5927 else
5928 /* MIN (X, 0) == -1 -> X == -1 */
5929 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5931 case GT_EXPR:
5932 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5933 /* MAX (X, 0) > 0 -> X > 0
5934 MAX (X, 0) > 5 -> X > 5 */
5935 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5937 else if (op_code == MAX_EXPR)
5938 /* MAX (X, 0) > -1 -> true */
5939 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5941 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5942 /* MIN (X, 0) > 0 -> false
5943 MIN (X, 0) > 5 -> false */
5944 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5946 else
5947 /* MIN (X, 0) > -1 -> X > -1 */
5948 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5950 default:
5951 return NULL_TREE;
5955 /* T is an integer expression that is being multiplied, divided, or taken a
5956 modulus (CODE says which and what kind of divide or modulus) by a
5957 constant C. See if we can eliminate that operation by folding it with
5958 other operations already in T. WIDE_TYPE, if non-null, is a type that
5959 should be used for the computation if wider than our type.
5961 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5962 (X * 2) + (Y * 4). We must, however, be assured that either the original
5963 expression would not overflow or that overflow is undefined for the type
5964 in the language in question.
5966 If we return a non-null expression, it is an equivalent form of the
5967 original computation, but need not be in the original type.
5969 We set *STRICT_OVERFLOW_P to true if the return values depends on
5970 signed overflow being undefined. Otherwise we do not change
5971 *STRICT_OVERFLOW_P. */
5973 static tree
5974 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5975 bool *strict_overflow_p)
5977 /* To avoid exponential search depth, refuse to allow recursion past
5978 three levels. Beyond that (1) it's highly unlikely that we'll find
5979 something interesting and (2) we've probably processed it before
5980 when we built the inner expression. */
5982 static int depth;
5983 tree ret;
5985 if (depth > 3)
5986 return NULL;
5988 depth++;
5989 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5990 depth--;
5992 return ret;
5995 static tree
5996 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5997 bool *strict_overflow_p)
5999 tree type = TREE_TYPE (t);
6000 enum tree_code tcode = TREE_CODE (t);
6001 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6002 > GET_MODE_SIZE (TYPE_MODE (type)))
6003 ? wide_type : type);
6004 tree t1, t2;
6005 int same_p = tcode == code;
6006 tree op0 = NULL_TREE, op1 = NULL_TREE;
6007 bool sub_strict_overflow_p;
6009 /* Don't deal with constants of zero here; they confuse the code below. */
6010 if (integer_zerop (c))
6011 return NULL_TREE;
6013 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6014 op0 = TREE_OPERAND (t, 0);
6016 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6017 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6019 /* Note that we need not handle conditional operations here since fold
6020 already handles those cases. So just do arithmetic here. */
6021 switch (tcode)
6023 case INTEGER_CST:
6024 /* For a constant, we can always simplify if we are a multiply
6025 or (for divide and modulus) if it is a multiple of our constant. */
6026 if (code == MULT_EXPR
6027 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6029 tree tem = const_binop (code, fold_convert (ctype, t),
6030 fold_convert (ctype, c));
6031 /* If the multiplication overflowed to INT_MIN then we lost sign
6032 information on it and a subsequent multiplication might
6033 spuriously overflow. See PR68142. */
6034 if (TREE_OVERFLOW (tem)
6035 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6036 return NULL_TREE;
6037 return tem;
6039 break;
6041 CASE_CONVERT: case NON_LVALUE_EXPR:
6042 /* If op0 is an expression ... */
6043 if ((COMPARISON_CLASS_P (op0)
6044 || UNARY_CLASS_P (op0)
6045 || BINARY_CLASS_P (op0)
6046 || VL_EXP_CLASS_P (op0)
6047 || EXPRESSION_CLASS_P (op0))
6048 /* ... and has wrapping overflow, and its type is smaller
6049 than ctype, then we cannot pass through as widening. */
6050 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6051 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6052 && (TYPE_PRECISION (ctype)
6053 > TYPE_PRECISION (TREE_TYPE (op0))))
6054 /* ... or this is a truncation (t is narrower than op0),
6055 then we cannot pass through this narrowing. */
6056 || (TYPE_PRECISION (type)
6057 < TYPE_PRECISION (TREE_TYPE (op0)))
6058 /* ... or signedness changes for division or modulus,
6059 then we cannot pass through this conversion. */
6060 || (code != MULT_EXPR
6061 && (TYPE_UNSIGNED (ctype)
6062 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6063 /* ... or has undefined overflow while the converted to
6064 type has not, we cannot do the operation in the inner type
6065 as that would introduce undefined overflow. */
6066 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6067 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6068 && !TYPE_OVERFLOW_UNDEFINED (type))))
6069 break;
6071 /* Pass the constant down and see if we can make a simplification. If
6072 we can, replace this expression with the inner simplification for
6073 possible later conversion to our or some other type. */
6074 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6075 && TREE_CODE (t2) == INTEGER_CST
6076 && !TREE_OVERFLOW (t2)
6077 && (0 != (t1 = extract_muldiv (op0, t2, code,
6078 code == MULT_EXPR
6079 ? ctype : NULL_TREE,
6080 strict_overflow_p))))
6081 return t1;
6082 break;
6084 case ABS_EXPR:
6085 /* If widening the type changes it from signed to unsigned, then we
6086 must avoid building ABS_EXPR itself as unsigned. */
6087 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6089 tree cstype = (*signed_type_for) (ctype);
6090 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6091 != 0)
6093 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6094 return fold_convert (ctype, t1);
6096 break;
6098 /* If the constant is negative, we cannot simplify this. */
6099 if (tree_int_cst_sgn (c) == -1)
6100 break;
6101 /* FALLTHROUGH */
6102 case NEGATE_EXPR:
6103 /* For division and modulus, type can't be unsigned, as e.g.
6104 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6105 For signed types, even with wrapping overflow, this is fine. */
6106 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6107 break;
6108 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6109 != 0)
6110 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6111 break;
6113 case MIN_EXPR: case MAX_EXPR:
6114 /* If widening the type changes the signedness, then we can't perform
6115 this optimization as that changes the result. */
6116 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6117 break;
6119 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6120 sub_strict_overflow_p = false;
6121 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6122 &sub_strict_overflow_p)) != 0
6123 && (t2 = extract_muldiv (op1, c, code, wide_type,
6124 &sub_strict_overflow_p)) != 0)
6126 if (tree_int_cst_sgn (c) < 0)
6127 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6128 if (sub_strict_overflow_p)
6129 *strict_overflow_p = true;
6130 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6131 fold_convert (ctype, t2));
6133 break;
6135 case LSHIFT_EXPR: case RSHIFT_EXPR:
6136 /* If the second operand is constant, this is a multiplication
6137 or floor division, by a power of two, so we can treat it that
6138 way unless the multiplier or divisor overflows. Signed
6139 left-shift overflow is implementation-defined rather than
6140 undefined in C90, so do not convert signed left shift into
6141 multiplication. */
6142 if (TREE_CODE (op1) == INTEGER_CST
6143 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6144 /* const_binop may not detect overflow correctly,
6145 so check for it explicitly here. */
6146 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6147 && 0 != (t1 = fold_convert (ctype,
6148 const_binop (LSHIFT_EXPR,
6149 size_one_node,
6150 op1)))
6151 && !TREE_OVERFLOW (t1))
6152 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6153 ? MULT_EXPR : FLOOR_DIV_EXPR,
6154 ctype,
6155 fold_convert (ctype, op0),
6156 t1),
6157 c, code, wide_type, strict_overflow_p);
6158 break;
6160 case PLUS_EXPR: case MINUS_EXPR:
6161 /* See if we can eliminate the operation on both sides. If we can, we
6162 can return a new PLUS or MINUS. If we can't, the only remaining
6163 cases where we can do anything are if the second operand is a
6164 constant. */
6165 sub_strict_overflow_p = false;
6166 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6167 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6168 if (t1 != 0 && t2 != 0
6169 && (code == MULT_EXPR
6170 /* If not multiplication, we can only do this if both operands
6171 are divisible by c. */
6172 || (multiple_of_p (ctype, op0, c)
6173 && multiple_of_p (ctype, op1, c))))
6175 if (sub_strict_overflow_p)
6176 *strict_overflow_p = true;
6177 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6178 fold_convert (ctype, t2));
6181 /* If this was a subtraction, negate OP1 and set it to be an addition.
6182 This simplifies the logic below. */
6183 if (tcode == MINUS_EXPR)
6185 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6186 /* If OP1 was not easily negatable, the constant may be OP0. */
6187 if (TREE_CODE (op0) == INTEGER_CST)
6189 std::swap (op0, op1);
6190 std::swap (t1, t2);
6194 if (TREE_CODE (op1) != INTEGER_CST)
6195 break;
6197 /* If either OP1 or C are negative, this optimization is not safe for
6198 some of the division and remainder types while for others we need
6199 to change the code. */
6200 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6202 if (code == CEIL_DIV_EXPR)
6203 code = FLOOR_DIV_EXPR;
6204 else if (code == FLOOR_DIV_EXPR)
6205 code = CEIL_DIV_EXPR;
6206 else if (code != MULT_EXPR
6207 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6208 break;
6211 /* If it's a multiply or a division/modulus operation of a multiple
6212 of our constant, do the operation and verify it doesn't overflow. */
6213 if (code == MULT_EXPR
6214 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6216 op1 = const_binop (code, fold_convert (ctype, op1),
6217 fold_convert (ctype, c));
6218 /* We allow the constant to overflow with wrapping semantics. */
6219 if (op1 == 0
6220 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6221 break;
6223 else
6224 break;
6226 /* If we have an unsigned type, we cannot widen the operation since it
6227 will change the result if the original computation overflowed. */
6228 if (TYPE_UNSIGNED (ctype) && ctype != type)
6229 break;
6231 /* If we were able to eliminate our operation from the first side,
6232 apply our operation to the second side and reform the PLUS. */
6233 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6234 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6236 /* The last case is if we are a multiply. In that case, we can
6237 apply the distributive law to commute the multiply and addition
6238 if the multiplication of the constants doesn't overflow
6239 and overflow is defined. With undefined overflow
6240 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6241 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6242 return fold_build2 (tcode, ctype,
6243 fold_build2 (code, ctype,
6244 fold_convert (ctype, op0),
6245 fold_convert (ctype, c)),
6246 op1);
6248 break;
6250 case MULT_EXPR:
6251 /* We have a special case here if we are doing something like
6252 (C * 8) % 4 since we know that's zero. */
6253 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6254 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6255 /* If the multiplication can overflow we cannot optimize this. */
6256 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6257 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6258 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6260 *strict_overflow_p = true;
6261 return omit_one_operand (type, integer_zero_node, op0);
6264 /* ... fall through ... */
6266 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6267 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6268 /* If we can extract our operation from the LHS, do so and return a
6269 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6270 do something only if the second operand is a constant. */
6271 if (same_p
6272 && (t1 = extract_muldiv (op0, c, code, wide_type,
6273 strict_overflow_p)) != 0)
6274 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6275 fold_convert (ctype, op1));
6276 else if (tcode == MULT_EXPR && code == MULT_EXPR
6277 && (t1 = extract_muldiv (op1, c, code, wide_type,
6278 strict_overflow_p)) != 0)
6279 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6280 fold_convert (ctype, t1));
6281 else if (TREE_CODE (op1) != INTEGER_CST)
6282 return 0;
6284 /* If these are the same operation types, we can associate them
6285 assuming no overflow. */
6286 if (tcode == code)
6288 bool overflow_p = false;
6289 bool overflow_mul_p;
6290 signop sign = TYPE_SIGN (ctype);
6291 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6292 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6293 if (overflow_mul_p
6294 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6295 overflow_p = true;
6296 if (!overflow_p)
6298 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6299 TYPE_SIGN (TREE_TYPE (op1)));
6300 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6301 wide_int_to_tree (ctype, mul));
6305 /* If these operations "cancel" each other, we have the main
6306 optimizations of this pass, which occur when either constant is a
6307 multiple of the other, in which case we replace this with either an
6308 operation or CODE or TCODE.
6310 If we have an unsigned type, we cannot do this since it will change
6311 the result if the original computation overflowed. */
6312 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6313 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6314 || (tcode == MULT_EXPR
6315 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6316 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6317 && code != MULT_EXPR)))
6319 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6321 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6322 *strict_overflow_p = true;
6323 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6324 fold_convert (ctype,
6325 const_binop (TRUNC_DIV_EXPR,
6326 op1, c)));
6328 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6330 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6331 *strict_overflow_p = true;
6332 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6333 fold_convert (ctype,
6334 const_binop (TRUNC_DIV_EXPR,
6335 c, op1)));
6338 break;
6340 default:
6341 break;
6344 return 0;
6347 /* Return a node which has the indicated constant VALUE (either 0 or
6348 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6349 and is of the indicated TYPE. */
6351 tree
6352 constant_boolean_node (bool value, tree type)
6354 if (type == integer_type_node)
6355 return value ? integer_one_node : integer_zero_node;
6356 else if (type == boolean_type_node)
6357 return value ? boolean_true_node : boolean_false_node;
6358 else if (TREE_CODE (type) == VECTOR_TYPE)
6359 return build_vector_from_val (type,
6360 build_int_cst (TREE_TYPE (type),
6361 value ? -1 : 0));
6362 else
6363 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6367 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6368 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6369 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6370 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6371 COND is the first argument to CODE; otherwise (as in the example
6372 given here), it is the second argument. TYPE is the type of the
6373 original expression. Return NULL_TREE if no simplification is
6374 possible. */
6376 static tree
6377 fold_binary_op_with_conditional_arg (location_t loc,
6378 enum tree_code code,
6379 tree type, tree op0, tree op1,
6380 tree cond, tree arg, int cond_first_p)
6382 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6383 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6384 tree test, true_value, false_value;
6385 tree lhs = NULL_TREE;
6386 tree rhs = NULL_TREE;
6387 enum tree_code cond_code = COND_EXPR;
6389 if (TREE_CODE (cond) == COND_EXPR
6390 || TREE_CODE (cond) == VEC_COND_EXPR)
6392 test = TREE_OPERAND (cond, 0);
6393 true_value = TREE_OPERAND (cond, 1);
6394 false_value = TREE_OPERAND (cond, 2);
6395 /* If this operand throws an expression, then it does not make
6396 sense to try to perform a logical or arithmetic operation
6397 involving it. */
6398 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6399 lhs = true_value;
6400 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6401 rhs = false_value;
6403 else
6405 tree testtype = TREE_TYPE (cond);
6406 test = cond;
6407 true_value = constant_boolean_node (true, testtype);
6408 false_value = constant_boolean_node (false, testtype);
6411 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6412 cond_code = VEC_COND_EXPR;
6414 /* This transformation is only worthwhile if we don't have to wrap ARG
6415 in a SAVE_EXPR and the operation can be simplified without recursing
6416 on at least one of the branches once its pushed inside the COND_EXPR. */
6417 if (!TREE_CONSTANT (arg)
6418 && (TREE_SIDE_EFFECTS (arg)
6419 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6420 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6421 return NULL_TREE;
6423 arg = fold_convert_loc (loc, arg_type, arg);
6424 if (lhs == 0)
6426 true_value = fold_convert_loc (loc, cond_type, true_value);
6427 if (cond_first_p)
6428 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6429 else
6430 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6432 if (rhs == 0)
6434 false_value = fold_convert_loc (loc, cond_type, false_value);
6435 if (cond_first_p)
6436 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6437 else
6438 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6441 /* Check that we have simplified at least one of the branches. */
6442 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6443 return NULL_TREE;
6445 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6449 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6451 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6452 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6453 ADDEND is the same as X.
6455 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6456 and finite. The problematic cases are when X is zero, and its mode
6457 has signed zeros. In the case of rounding towards -infinity,
6458 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6459 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6461 bool
6462 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6464 if (!real_zerop (addend))
6465 return false;
6467 /* Don't allow the fold with -fsignaling-nans. */
6468 if (HONOR_SNANS (element_mode (type)))
6469 return false;
6471 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6472 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6473 return true;
6475 /* In a vector or complex, we would need to check the sign of all zeros. */
6476 if (TREE_CODE (addend) != REAL_CST)
6477 return false;
6479 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6480 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6481 negate = !negate;
6483 /* The mode has signed zeros, and we have to honor their sign.
6484 In this situation, there is only one case we can return true for.
6485 X - 0 is the same as X unless rounding towards -infinity is
6486 supported. */
6487 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6490 /* Subroutine of fold() that optimizes comparisons of a division by
6491 a nonzero integer constant against an integer constant, i.e.
6492 X/C1 op C2.
6494 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6495 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6496 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6498 The function returns the constant folded tree if a simplification
6499 can be made, and NULL_TREE otherwise. */
6501 static tree
6502 fold_div_compare (location_t loc,
6503 enum tree_code code, tree type, tree arg0, tree arg1)
6505 tree prod, tmp, hi, lo;
6506 tree arg00 = TREE_OPERAND (arg0, 0);
6507 tree arg01 = TREE_OPERAND (arg0, 1);
6508 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6509 bool neg_overflow = false;
6510 bool overflow;
6512 /* We have to do this the hard way to detect unsigned overflow.
6513 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6514 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6515 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6516 neg_overflow = false;
6518 if (sign == UNSIGNED)
6520 tmp = int_const_binop (MINUS_EXPR, arg01,
6521 build_int_cst (TREE_TYPE (arg01), 1));
6522 lo = prod;
6524 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6525 val = wi::add (prod, tmp, sign, &overflow);
6526 hi = force_fit_type (TREE_TYPE (arg00), val,
6527 -1, overflow | TREE_OVERFLOW (prod));
6529 else if (tree_int_cst_sgn (arg01) >= 0)
6531 tmp = int_const_binop (MINUS_EXPR, arg01,
6532 build_int_cst (TREE_TYPE (arg01), 1));
6533 switch (tree_int_cst_sgn (arg1))
6535 case -1:
6536 neg_overflow = true;
6537 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6538 hi = prod;
6539 break;
6541 case 0:
6542 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6543 hi = tmp;
6544 break;
6546 case 1:
6547 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6548 lo = prod;
6549 break;
6551 default:
6552 gcc_unreachable ();
6555 else
6557 /* A negative divisor reverses the relational operators. */
6558 code = swap_tree_comparison (code);
6560 tmp = int_const_binop (PLUS_EXPR, arg01,
6561 build_int_cst (TREE_TYPE (arg01), 1));
6562 switch (tree_int_cst_sgn (arg1))
6564 case -1:
6565 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6566 lo = prod;
6567 break;
6569 case 0:
6570 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6571 lo = tmp;
6572 break;
6574 case 1:
6575 neg_overflow = true;
6576 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6577 hi = prod;
6578 break;
6580 default:
6581 gcc_unreachable ();
6585 switch (code)
6587 case EQ_EXPR:
6588 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6589 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6590 if (TREE_OVERFLOW (hi))
6591 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6592 if (TREE_OVERFLOW (lo))
6593 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6594 return build_range_check (loc, type, arg00, 1, lo, hi);
6596 case NE_EXPR:
6597 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6598 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6599 if (TREE_OVERFLOW (hi))
6600 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6601 if (TREE_OVERFLOW (lo))
6602 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6603 return build_range_check (loc, type, arg00, 0, lo, hi);
6605 case LT_EXPR:
6606 if (TREE_OVERFLOW (lo))
6608 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6609 return omit_one_operand_loc (loc, type, tmp, arg00);
6611 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6613 case LE_EXPR:
6614 if (TREE_OVERFLOW (hi))
6616 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6617 return omit_one_operand_loc (loc, type, tmp, arg00);
6619 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6621 case GT_EXPR:
6622 if (TREE_OVERFLOW (hi))
6624 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6625 return omit_one_operand_loc (loc, type, tmp, arg00);
6627 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6629 case GE_EXPR:
6630 if (TREE_OVERFLOW (lo))
6632 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6633 return omit_one_operand_loc (loc, type, tmp, arg00);
6635 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6637 default:
6638 break;
6641 return NULL_TREE;
6645 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6646 equality/inequality test, then return a simplified form of the test
6647 using a sign testing. Otherwise return NULL. TYPE is the desired
6648 result type. */
6650 static tree
6651 fold_single_bit_test_into_sign_test (location_t loc,
6652 enum tree_code code, tree arg0, tree arg1,
6653 tree result_type)
6655 /* If this is testing a single bit, we can optimize the test. */
6656 if ((code == NE_EXPR || code == EQ_EXPR)
6657 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6658 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6660 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6661 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6662 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6664 if (arg00 != NULL_TREE
6665 /* This is only a win if casting to a signed type is cheap,
6666 i.e. when arg00's type is not a partial mode. */
6667 && TYPE_PRECISION (TREE_TYPE (arg00))
6668 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6670 tree stype = signed_type_for (TREE_TYPE (arg00));
6671 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6672 result_type,
6673 fold_convert_loc (loc, stype, arg00),
6674 build_int_cst (stype, 0));
6678 return NULL_TREE;
6681 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6682 equality/inequality test, then return a simplified form of
6683 the test using shifts and logical operations. Otherwise return
6684 NULL. TYPE is the desired result type. */
6686 tree
6687 fold_single_bit_test (location_t loc, enum tree_code code,
6688 tree arg0, tree arg1, tree result_type)
6690 /* If this is testing a single bit, we can optimize the test. */
6691 if ((code == NE_EXPR || code == EQ_EXPR)
6692 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6693 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6695 tree inner = TREE_OPERAND (arg0, 0);
6696 tree type = TREE_TYPE (arg0);
6697 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6698 machine_mode operand_mode = TYPE_MODE (type);
6699 int ops_unsigned;
6700 tree signed_type, unsigned_type, intermediate_type;
6701 tree tem, one;
6703 /* First, see if we can fold the single bit test into a sign-bit
6704 test. */
6705 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6706 result_type);
6707 if (tem)
6708 return tem;
6710 /* Otherwise we have (A & C) != 0 where C is a single bit,
6711 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6712 Similarly for (A & C) == 0. */
6714 /* If INNER is a right shift of a constant and it plus BITNUM does
6715 not overflow, adjust BITNUM and INNER. */
6716 if (TREE_CODE (inner) == RSHIFT_EXPR
6717 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6718 && bitnum < TYPE_PRECISION (type)
6719 && wi::ltu_p (TREE_OPERAND (inner, 1),
6720 TYPE_PRECISION (type) - bitnum))
6722 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6723 inner = TREE_OPERAND (inner, 0);
6726 /* If we are going to be able to omit the AND below, we must do our
6727 operations as unsigned. If we must use the AND, we have a choice.
6728 Normally unsigned is faster, but for some machines signed is. */
6729 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6730 && !flag_syntax_only) ? 0 : 1;
6732 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6733 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6734 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6735 inner = fold_convert_loc (loc, intermediate_type, inner);
6737 if (bitnum != 0)
6738 inner = build2 (RSHIFT_EXPR, intermediate_type,
6739 inner, size_int (bitnum));
6741 one = build_int_cst (intermediate_type, 1);
6743 if (code == EQ_EXPR)
6744 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6746 /* Put the AND last so it can combine with more things. */
6747 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6749 /* Make sure to return the proper type. */
6750 inner = fold_convert_loc (loc, result_type, inner);
6752 return inner;
6754 return NULL_TREE;
6757 /* Check whether we are allowed to reorder operands arg0 and arg1,
6758 such that the evaluation of arg1 occurs before arg0. */
6760 static bool
6761 reorder_operands_p (const_tree arg0, const_tree arg1)
6763 if (! flag_evaluation_order)
6764 return true;
6765 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6766 return true;
6767 return ! TREE_SIDE_EFFECTS (arg0)
6768 && ! TREE_SIDE_EFFECTS (arg1);
6771 /* Test whether it is preferable two swap two operands, ARG0 and
6772 ARG1, for example because ARG0 is an integer constant and ARG1
6773 isn't. If REORDER is true, only recommend swapping if we can
6774 evaluate the operands in reverse order. */
6776 bool
6777 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6779 if (CONSTANT_CLASS_P (arg1))
6780 return 0;
6781 if (CONSTANT_CLASS_P (arg0))
6782 return 1;
6784 STRIP_NOPS (arg0);
6785 STRIP_NOPS (arg1);
6787 if (TREE_CONSTANT (arg1))
6788 return 0;
6789 if (TREE_CONSTANT (arg0))
6790 return 1;
6792 if (reorder && flag_evaluation_order
6793 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6794 return 0;
6796 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6797 for commutative and comparison operators. Ensuring a canonical
6798 form allows the optimizers to find additional redundancies without
6799 having to explicitly check for both orderings. */
6800 if (TREE_CODE (arg0) == SSA_NAME
6801 && TREE_CODE (arg1) == SSA_NAME
6802 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6803 return 1;
6805 /* Put SSA_NAMEs last. */
6806 if (TREE_CODE (arg1) == SSA_NAME)
6807 return 0;
6808 if (TREE_CODE (arg0) == SSA_NAME)
6809 return 1;
6811 /* Put variables last. */
6812 if (DECL_P (arg1))
6813 return 0;
6814 if (DECL_P (arg0))
6815 return 1;
6817 return 0;
6821 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6822 means A >= Y && A != MAX, but in this case we know that
6823 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6825 static tree
6826 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6828 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6830 if (TREE_CODE (bound) == LT_EXPR)
6831 a = TREE_OPERAND (bound, 0);
6832 else if (TREE_CODE (bound) == GT_EXPR)
6833 a = TREE_OPERAND (bound, 1);
6834 else
6835 return NULL_TREE;
6837 typea = TREE_TYPE (a);
6838 if (!INTEGRAL_TYPE_P (typea)
6839 && !POINTER_TYPE_P (typea))
6840 return NULL_TREE;
6842 if (TREE_CODE (ineq) == LT_EXPR)
6844 a1 = TREE_OPERAND (ineq, 1);
6845 y = TREE_OPERAND (ineq, 0);
6847 else if (TREE_CODE (ineq) == GT_EXPR)
6849 a1 = TREE_OPERAND (ineq, 0);
6850 y = TREE_OPERAND (ineq, 1);
6852 else
6853 return NULL_TREE;
6855 if (TREE_TYPE (a1) != typea)
6856 return NULL_TREE;
6858 if (POINTER_TYPE_P (typea))
6860 /* Convert the pointer types into integer before taking the difference. */
6861 tree ta = fold_convert_loc (loc, ssizetype, a);
6862 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6863 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6865 else
6866 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6868 if (!diff || !integer_onep (diff))
6869 return NULL_TREE;
6871 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6874 /* Fold a sum or difference of at least one multiplication.
6875 Returns the folded tree or NULL if no simplification could be made. */
6877 static tree
6878 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6879 tree arg0, tree arg1)
6881 tree arg00, arg01, arg10, arg11;
6882 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6884 /* (A * C) +- (B * C) -> (A+-B) * C.
6885 (A * C) +- A -> A * (C+-1).
6886 We are most concerned about the case where C is a constant,
6887 but other combinations show up during loop reduction. Since
6888 it is not difficult, try all four possibilities. */
6890 if (TREE_CODE (arg0) == MULT_EXPR)
6892 arg00 = TREE_OPERAND (arg0, 0);
6893 arg01 = TREE_OPERAND (arg0, 1);
6895 else if (TREE_CODE (arg0) == INTEGER_CST)
6897 arg00 = build_one_cst (type);
6898 arg01 = arg0;
6900 else
6902 /* We cannot generate constant 1 for fract. */
6903 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6904 return NULL_TREE;
6905 arg00 = arg0;
6906 arg01 = build_one_cst (type);
6908 if (TREE_CODE (arg1) == MULT_EXPR)
6910 arg10 = TREE_OPERAND (arg1, 0);
6911 arg11 = TREE_OPERAND (arg1, 1);
6913 else if (TREE_CODE (arg1) == INTEGER_CST)
6915 arg10 = build_one_cst (type);
6916 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6917 the purpose of this canonicalization. */
6918 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6919 && negate_expr_p (arg1)
6920 && code == PLUS_EXPR)
6922 arg11 = negate_expr (arg1);
6923 code = MINUS_EXPR;
6925 else
6926 arg11 = arg1;
6928 else
6930 /* We cannot generate constant 1 for fract. */
6931 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6932 return NULL_TREE;
6933 arg10 = arg1;
6934 arg11 = build_one_cst (type);
6936 same = NULL_TREE;
6938 if (operand_equal_p (arg01, arg11, 0))
6939 same = arg01, alt0 = arg00, alt1 = arg10;
6940 else if (operand_equal_p (arg00, arg10, 0))
6941 same = arg00, alt0 = arg01, alt1 = arg11;
6942 else if (operand_equal_p (arg00, arg11, 0))
6943 same = arg00, alt0 = arg01, alt1 = arg10;
6944 else if (operand_equal_p (arg01, arg10, 0))
6945 same = arg01, alt0 = arg00, alt1 = arg11;
6947 /* No identical multiplicands; see if we can find a common
6948 power-of-two factor in non-power-of-two multiplies. This
6949 can help in multi-dimensional array access. */
6950 else if (tree_fits_shwi_p (arg01)
6951 && tree_fits_shwi_p (arg11))
6953 HOST_WIDE_INT int01, int11, tmp;
6954 bool swap = false;
6955 tree maybe_same;
6956 int01 = tree_to_shwi (arg01);
6957 int11 = tree_to_shwi (arg11);
6959 /* Move min of absolute values to int11. */
6960 if (absu_hwi (int01) < absu_hwi (int11))
6962 tmp = int01, int01 = int11, int11 = tmp;
6963 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6964 maybe_same = arg01;
6965 swap = true;
6967 else
6968 maybe_same = arg11;
6970 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6971 /* The remainder should not be a constant, otherwise we
6972 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6973 increased the number of multiplications necessary. */
6974 && TREE_CODE (arg10) != INTEGER_CST)
6976 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6977 build_int_cst (TREE_TYPE (arg00),
6978 int01 / int11));
6979 alt1 = arg10;
6980 same = maybe_same;
6981 if (swap)
6982 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6986 if (same)
6987 return fold_build2_loc (loc, MULT_EXPR, type,
6988 fold_build2_loc (loc, code, type,
6989 fold_convert_loc (loc, type, alt0),
6990 fold_convert_loc (loc, type, alt1)),
6991 fold_convert_loc (loc, type, same));
6993 return NULL_TREE;
6996 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6997 specified by EXPR into the buffer PTR of length LEN bytes.
6998 Return the number of bytes placed in the buffer, or zero
6999 upon failure. */
7001 static int
7002 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7004 tree type = TREE_TYPE (expr);
7005 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7006 int byte, offset, word, words;
7007 unsigned char value;
7009 if ((off == -1 && total_bytes > len)
7010 || off >= total_bytes)
7011 return 0;
7012 if (off == -1)
7013 off = 0;
7014 words = total_bytes / UNITS_PER_WORD;
7016 for (byte = 0; byte < total_bytes; byte++)
7018 int bitpos = byte * BITS_PER_UNIT;
7019 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7020 number of bytes. */
7021 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7023 if (total_bytes > UNITS_PER_WORD)
7025 word = byte / UNITS_PER_WORD;
7026 if (WORDS_BIG_ENDIAN)
7027 word = (words - 1) - word;
7028 offset = word * UNITS_PER_WORD;
7029 if (BYTES_BIG_ENDIAN)
7030 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7031 else
7032 offset += byte % UNITS_PER_WORD;
7034 else
7035 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7036 if (offset >= off
7037 && offset - off < len)
7038 ptr[offset - off] = value;
7040 return MIN (len, total_bytes - off);
7044 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7045 specified by EXPR into the buffer PTR of length LEN bytes.
7046 Return the number of bytes placed in the buffer, or zero
7047 upon failure. */
7049 static int
7050 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7052 tree type = TREE_TYPE (expr);
7053 machine_mode mode = TYPE_MODE (type);
7054 int total_bytes = GET_MODE_SIZE (mode);
7055 FIXED_VALUE_TYPE value;
7056 tree i_value, i_type;
7058 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7059 return 0;
7061 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7063 if (NULL_TREE == i_type
7064 || TYPE_PRECISION (i_type) != total_bytes)
7065 return 0;
7067 value = TREE_FIXED_CST (expr);
7068 i_value = double_int_to_tree (i_type, value.data);
7070 return native_encode_int (i_value, ptr, len, off);
7074 /* Subroutine of native_encode_expr. Encode the REAL_CST
7075 specified by EXPR into the buffer PTR of length LEN bytes.
7076 Return the number of bytes placed in the buffer, or zero
7077 upon failure. */
7079 static int
7080 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7082 tree type = TREE_TYPE (expr);
7083 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7084 int byte, offset, word, words, bitpos;
7085 unsigned char value;
7087 /* There are always 32 bits in each long, no matter the size of
7088 the hosts long. We handle floating point representations with
7089 up to 192 bits. */
7090 long tmp[6];
7092 if ((off == -1 && total_bytes > len)
7093 || off >= total_bytes)
7094 return 0;
7095 if (off == -1)
7096 off = 0;
7097 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7099 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7101 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7102 bitpos += BITS_PER_UNIT)
7104 byte = (bitpos / BITS_PER_UNIT) & 3;
7105 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7107 if (UNITS_PER_WORD < 4)
7109 word = byte / UNITS_PER_WORD;
7110 if (WORDS_BIG_ENDIAN)
7111 word = (words - 1) - word;
7112 offset = word * UNITS_PER_WORD;
7113 if (BYTES_BIG_ENDIAN)
7114 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7115 else
7116 offset += byte % UNITS_PER_WORD;
7118 else
7119 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7120 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7121 if (offset >= off
7122 && offset - off < len)
7123 ptr[offset - off] = value;
7125 return MIN (len, total_bytes - off);
7128 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7129 specified by EXPR into the buffer PTR of length LEN bytes.
7130 Return the number of bytes placed in the buffer, or zero
7131 upon failure. */
7133 static int
7134 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7136 int rsize, isize;
7137 tree part;
7139 part = TREE_REALPART (expr);
7140 rsize = native_encode_expr (part, ptr, len, off);
7141 if (off == -1
7142 && rsize == 0)
7143 return 0;
7144 part = TREE_IMAGPART (expr);
7145 if (off != -1)
7146 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7147 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7148 if (off == -1
7149 && isize != rsize)
7150 return 0;
7151 return rsize + isize;
7155 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7160 static int
7161 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7163 unsigned i, count;
7164 int size, offset;
7165 tree itype, elem;
7167 offset = 0;
7168 count = VECTOR_CST_NELTS (expr);
7169 itype = TREE_TYPE (TREE_TYPE (expr));
7170 size = GET_MODE_SIZE (TYPE_MODE (itype));
7171 for (i = 0; i < count; i++)
7173 if (off >= size)
7175 off -= size;
7176 continue;
7178 elem = VECTOR_CST_ELT (expr, i);
7179 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7180 if ((off == -1 && res != size)
7181 || res == 0)
7182 return 0;
7183 offset += res;
7184 if (offset >= len)
7185 return offset;
7186 if (off != -1)
7187 off = 0;
7189 return offset;
7193 /* Subroutine of native_encode_expr. Encode the STRING_CST
7194 specified by EXPR into the buffer PTR of length LEN bytes.
7195 Return the number of bytes placed in the buffer, or zero
7196 upon failure. */
7198 static int
7199 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7201 tree type = TREE_TYPE (expr);
7202 HOST_WIDE_INT total_bytes;
7204 if (TREE_CODE (type) != ARRAY_TYPE
7205 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7206 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7207 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7208 return 0;
7209 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7210 if ((off == -1 && total_bytes > len)
7211 || off >= total_bytes)
7212 return 0;
7213 if (off == -1)
7214 off = 0;
7215 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7217 int written = 0;
7218 if (off < TREE_STRING_LENGTH (expr))
7220 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7221 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7223 memset (ptr + written, 0,
7224 MIN (total_bytes - written, len - written));
7226 else
7227 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7228 return MIN (total_bytes - off, len);
7232 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7233 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7234 buffer PTR of length LEN bytes. If OFF is not -1 then start
7235 the encoding at byte offset OFF and encode at most LEN bytes.
7236 Return the number of bytes placed in the buffer, or zero upon failure. */
7239 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7241 /* We don't support starting at negative offset and -1 is special. */
7242 if (off < -1)
7243 return 0;
7245 switch (TREE_CODE (expr))
7247 case INTEGER_CST:
7248 return native_encode_int (expr, ptr, len, off);
7250 case REAL_CST:
7251 return native_encode_real (expr, ptr, len, off);
7253 case FIXED_CST:
7254 return native_encode_fixed (expr, ptr, len, off);
7256 case COMPLEX_CST:
7257 return native_encode_complex (expr, ptr, len, off);
7259 case VECTOR_CST:
7260 return native_encode_vector (expr, ptr, len, off);
7262 case STRING_CST:
7263 return native_encode_string (expr, ptr, len, off);
7265 default:
7266 return 0;
7271 /* Subroutine of native_interpret_expr. Interpret the contents of
7272 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7273 If the buffer cannot be interpreted, return NULL_TREE. */
7275 static tree
7276 native_interpret_int (tree type, const unsigned char *ptr, int len)
7278 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7280 if (total_bytes > len
7281 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7282 return NULL_TREE;
7284 wide_int result = wi::from_buffer (ptr, total_bytes);
7286 return wide_int_to_tree (type, result);
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7294 static tree
7295 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 double_int result;
7299 FIXED_VALUE_TYPE fixed_value;
7301 if (total_bytes > len
7302 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7303 return NULL_TREE;
7305 result = double_int::from_buffer (ptr, total_bytes);
7306 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7308 return build_fixed (type, fixed_value);
7312 /* Subroutine of native_interpret_expr. Interpret the contents of
7313 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7314 If the buffer cannot be interpreted, return NULL_TREE. */
7316 static tree
7317 native_interpret_real (tree type, const unsigned char *ptr, int len)
7319 machine_mode mode = TYPE_MODE (type);
7320 int total_bytes = GET_MODE_SIZE (mode);
7321 unsigned char value;
7322 /* There are always 32 bits in each long, no matter the size of
7323 the hosts long. We handle floating point representations with
7324 up to 192 bits. */
7325 REAL_VALUE_TYPE r;
7326 long tmp[6];
7328 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7329 if (total_bytes > len || total_bytes > 24)
7330 return NULL_TREE;
7331 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7333 memset (tmp, 0, sizeof (tmp));
7334 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7335 bitpos += BITS_PER_UNIT)
7337 /* Both OFFSET and BYTE index within a long;
7338 bitpos indexes the whole float. */
7339 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7340 if (UNITS_PER_WORD < 4)
7342 int word = byte / UNITS_PER_WORD;
7343 if (WORDS_BIG_ENDIAN)
7344 word = (words - 1) - word;
7345 offset = word * UNITS_PER_WORD;
7346 if (BYTES_BIG_ENDIAN)
7347 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7348 else
7349 offset += byte % UNITS_PER_WORD;
7351 else
7353 offset = byte;
7354 if (BYTES_BIG_ENDIAN)
7356 /* Reverse bytes within each long, or within the entire float
7357 if it's smaller than a long (for HFmode). */
7358 offset = MIN (3, total_bytes - 1) - offset;
7359 gcc_assert (offset >= 0);
7362 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7364 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7367 real_from_target (&r, tmp, mode);
7368 return build_real (type, r);
7372 /* Subroutine of native_interpret_expr. Interpret the contents of
7373 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7374 If the buffer cannot be interpreted, return NULL_TREE. */
7376 static tree
7377 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7379 tree etype, rpart, ipart;
7380 int size;
7382 etype = TREE_TYPE (type);
7383 size = GET_MODE_SIZE (TYPE_MODE (etype));
7384 if (size * 2 > len)
7385 return NULL_TREE;
7386 rpart = native_interpret_expr (etype, ptr, size);
7387 if (!rpart)
7388 return NULL_TREE;
7389 ipart = native_interpret_expr (etype, ptr+size, size);
7390 if (!ipart)
7391 return NULL_TREE;
7392 return build_complex (type, rpart, ipart);
7396 /* Subroutine of native_interpret_expr. Interpret the contents of
7397 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7398 If the buffer cannot be interpreted, return NULL_TREE. */
7400 static tree
7401 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7403 tree etype, elem;
7404 int i, size, count;
7405 tree *elements;
7407 etype = TREE_TYPE (type);
7408 size = GET_MODE_SIZE (TYPE_MODE (etype));
7409 count = TYPE_VECTOR_SUBPARTS (type);
7410 if (size * count > len)
7411 return NULL_TREE;
7413 elements = XALLOCAVEC (tree, count);
7414 for (i = count - 1; i >= 0; i--)
7416 elem = native_interpret_expr (etype, ptr+(i*size), size);
7417 if (!elem)
7418 return NULL_TREE;
7419 elements[i] = elem;
7421 return build_vector (type, elements);
7425 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a constant of type TYPE. For
7427 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7428 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7429 return NULL_TREE. */
7431 tree
7432 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7434 switch (TREE_CODE (type))
7436 case INTEGER_TYPE:
7437 case ENUMERAL_TYPE:
7438 case BOOLEAN_TYPE:
7439 case POINTER_TYPE:
7440 case REFERENCE_TYPE:
7441 return native_interpret_int (type, ptr, len);
7443 case REAL_TYPE:
7444 return native_interpret_real (type, ptr, len);
7446 case FIXED_POINT_TYPE:
7447 return native_interpret_fixed (type, ptr, len);
7449 case COMPLEX_TYPE:
7450 return native_interpret_complex (type, ptr, len);
7452 case VECTOR_TYPE:
7453 return native_interpret_vector (type, ptr, len);
7455 default:
7456 return NULL_TREE;
7460 /* Returns true if we can interpret the contents of a native encoding
7461 as TYPE. */
7463 static bool
7464 can_native_interpret_type_p (tree type)
7466 switch (TREE_CODE (type))
7468 case INTEGER_TYPE:
7469 case ENUMERAL_TYPE:
7470 case BOOLEAN_TYPE:
7471 case POINTER_TYPE:
7472 case REFERENCE_TYPE:
7473 case FIXED_POINT_TYPE:
7474 case REAL_TYPE:
7475 case COMPLEX_TYPE:
7476 case VECTOR_TYPE:
7477 return true;
7478 default:
7479 return false;
7483 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7484 TYPE at compile-time. If we're unable to perform the conversion
7485 return NULL_TREE. */
7487 static tree
7488 fold_view_convert_expr (tree type, tree expr)
7490 /* We support up to 512-bit values (for V8DFmode). */
7491 unsigned char buffer[64];
7492 int len;
7494 /* Check that the host and target are sane. */
7495 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7496 return NULL_TREE;
7498 len = native_encode_expr (expr, buffer, sizeof (buffer));
7499 if (len == 0)
7500 return NULL_TREE;
7502 return native_interpret_expr (type, buffer, len);
7505 /* Build an expression for the address of T. Folds away INDIRECT_REF
7506 to avoid confusing the gimplify process. */
7508 tree
7509 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7511 /* The size of the object is not relevant when talking about its address. */
7512 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7513 t = TREE_OPERAND (t, 0);
7515 if (TREE_CODE (t) == INDIRECT_REF)
7517 t = TREE_OPERAND (t, 0);
7519 if (TREE_TYPE (t) != ptrtype)
7520 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7522 else if (TREE_CODE (t) == MEM_REF
7523 && integer_zerop (TREE_OPERAND (t, 1)))
7524 return TREE_OPERAND (t, 0);
7525 else if (TREE_CODE (t) == MEM_REF
7526 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7527 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7528 TREE_OPERAND (t, 0),
7529 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7530 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7532 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7534 if (TREE_TYPE (t) != ptrtype)
7535 t = fold_convert_loc (loc, ptrtype, t);
7537 else
7538 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7540 return t;
7543 /* Build an expression for the address of T. */
7545 tree
7546 build_fold_addr_expr_loc (location_t loc, tree t)
7548 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7550 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7553 /* Fold a unary expression of code CODE and type TYPE with operand
7554 OP0. Return the folded expression if folding is successful.
7555 Otherwise, return NULL_TREE. */
7557 tree
7558 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7560 tree tem;
7561 tree arg0;
7562 enum tree_code_class kind = TREE_CODE_CLASS (code);
7564 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7565 && TREE_CODE_LENGTH (code) == 1);
7567 arg0 = op0;
7568 if (arg0)
7570 if (CONVERT_EXPR_CODE_P (code)
7571 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7573 /* Don't use STRIP_NOPS, because signedness of argument type
7574 matters. */
7575 STRIP_SIGN_NOPS (arg0);
7577 else
7579 /* Strip any conversions that don't change the mode. This
7580 is safe for every expression, except for a comparison
7581 expression because its signedness is derived from its
7582 operands.
7584 Note that this is done as an internal manipulation within
7585 the constant folder, in order to find the simplest
7586 representation of the arguments so that their form can be
7587 studied. In any cases, the appropriate type conversions
7588 should be put back in the tree that will get out of the
7589 constant folder. */
7590 STRIP_NOPS (arg0);
7593 if (CONSTANT_CLASS_P (arg0))
7595 tree tem = const_unop (code, type, arg0);
7596 if (tem)
7598 if (TREE_TYPE (tem) != type)
7599 tem = fold_convert_loc (loc, type, tem);
7600 return tem;
7605 tem = generic_simplify (loc, code, type, op0);
7606 if (tem)
7607 return tem;
7609 if (TREE_CODE_CLASS (code) == tcc_unary)
7611 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7612 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7613 fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc, TREE_TYPE (op0),
7615 TREE_OPERAND (arg0, 1))));
7616 else if (TREE_CODE (arg0) == COND_EXPR)
7618 tree arg01 = TREE_OPERAND (arg0, 1);
7619 tree arg02 = TREE_OPERAND (arg0, 2);
7620 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7621 arg01 = fold_build1_loc (loc, code, type,
7622 fold_convert_loc (loc,
7623 TREE_TYPE (op0), arg01));
7624 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7625 arg02 = fold_build1_loc (loc, code, type,
7626 fold_convert_loc (loc,
7627 TREE_TYPE (op0), arg02));
7628 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7629 arg01, arg02);
7631 /* If this was a conversion, and all we did was to move into
7632 inside the COND_EXPR, bring it back out. But leave it if
7633 it is a conversion from integer to integer and the
7634 result precision is no wider than a word since such a
7635 conversion is cheap and may be optimized away by combine,
7636 while it couldn't if it were outside the COND_EXPR. Then return
7637 so we don't get into an infinite recursion loop taking the
7638 conversion out and then back in. */
7640 if ((CONVERT_EXPR_CODE_P (code)
7641 || code == NON_LVALUE_EXPR)
7642 && TREE_CODE (tem) == COND_EXPR
7643 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7644 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7646 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7647 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7648 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7649 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7650 && (INTEGRAL_TYPE_P
7651 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7652 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7653 || flag_syntax_only))
7654 tem = build1_loc (loc, code, type,
7655 build3 (COND_EXPR,
7656 TREE_TYPE (TREE_OPERAND
7657 (TREE_OPERAND (tem, 1), 0)),
7658 TREE_OPERAND (tem, 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7660 TREE_OPERAND (TREE_OPERAND (tem, 2),
7661 0)));
7662 return tem;
7666 switch (code)
7668 case NON_LVALUE_EXPR:
7669 if (!maybe_lvalue_p (op0))
7670 return fold_convert_loc (loc, type, op0);
7671 return NULL_TREE;
7673 CASE_CONVERT:
7674 case FLOAT_EXPR:
7675 case FIX_TRUNC_EXPR:
7676 if (COMPARISON_CLASS_P (op0))
7678 /* If we have (type) (a CMP b) and type is an integral type, return
7679 new expression involving the new type. Canonicalize
7680 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7681 non-integral type.
7682 Do not fold the result as that would not simplify further, also
7683 folding again results in recursions. */
7684 if (TREE_CODE (type) == BOOLEAN_TYPE)
7685 return build2_loc (loc, TREE_CODE (op0), type,
7686 TREE_OPERAND (op0, 0),
7687 TREE_OPERAND (op0, 1));
7688 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7689 && TREE_CODE (type) != VECTOR_TYPE)
7690 return build3_loc (loc, COND_EXPR, type, op0,
7691 constant_boolean_node (true, type),
7692 constant_boolean_node (false, type));
7695 /* Handle (T *)&A.B.C for A being of type T and B and C
7696 living at offset zero. This occurs frequently in
7697 C++ upcasting and then accessing the base. */
7698 if (TREE_CODE (op0) == ADDR_EXPR
7699 && POINTER_TYPE_P (type)
7700 && handled_component_p (TREE_OPERAND (op0, 0)))
7702 HOST_WIDE_INT bitsize, bitpos;
7703 tree offset;
7704 machine_mode mode;
7705 int unsignedp, reversep, volatilep;
7706 tree base
7707 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7708 &offset, &mode, &unsignedp, &reversep,
7709 &volatilep, false);
7710 /* If the reference was to a (constant) zero offset, we can use
7711 the address of the base if it has the same base type
7712 as the result type and the pointer type is unqualified. */
7713 if (! offset && bitpos == 0
7714 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7715 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7716 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7717 return fold_convert_loc (loc, type,
7718 build_fold_addr_expr_loc (loc, base));
7721 if (TREE_CODE (op0) == MODIFY_EXPR
7722 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7723 /* Detect assigning a bitfield. */
7724 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7725 && DECL_BIT_FIELD
7726 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7728 /* Don't leave an assignment inside a conversion
7729 unless assigning a bitfield. */
7730 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7731 /* First do the assignment, then return converted constant. */
7732 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7733 TREE_NO_WARNING (tem) = 1;
7734 TREE_USED (tem) = 1;
7735 return tem;
7738 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7739 constants (if x has signed type, the sign bit cannot be set
7740 in c). This folds extension into the BIT_AND_EXPR.
7741 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7742 very likely don't have maximal range for their precision and this
7743 transformation effectively doesn't preserve non-maximal ranges. */
7744 if (TREE_CODE (type) == INTEGER_TYPE
7745 && TREE_CODE (op0) == BIT_AND_EXPR
7746 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7748 tree and_expr = op0;
7749 tree and0 = TREE_OPERAND (and_expr, 0);
7750 tree and1 = TREE_OPERAND (and_expr, 1);
7751 int change = 0;
7753 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7754 || (TYPE_PRECISION (type)
7755 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7756 change = 1;
7757 else if (TYPE_PRECISION (TREE_TYPE (and1))
7758 <= HOST_BITS_PER_WIDE_INT
7759 && tree_fits_uhwi_p (and1))
7761 unsigned HOST_WIDE_INT cst;
7763 cst = tree_to_uhwi (and1);
7764 cst &= HOST_WIDE_INT_M1U
7765 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7766 change = (cst == 0);
7767 if (change
7768 && !flag_syntax_only
7769 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7770 == ZERO_EXTEND))
7772 tree uns = unsigned_type_for (TREE_TYPE (and0));
7773 and0 = fold_convert_loc (loc, uns, and0);
7774 and1 = fold_convert_loc (loc, uns, and1);
7777 if (change)
7779 tem = force_fit_type (type, wi::to_widest (and1), 0,
7780 TREE_OVERFLOW (and1));
7781 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7782 fold_convert_loc (loc, type, and0), tem);
7786 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7787 cast (T1)X will fold away. We assume that this happens when X itself
7788 is a cast. */
7789 if (POINTER_TYPE_P (type)
7790 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7791 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7793 tree arg00 = TREE_OPERAND (arg0, 0);
7794 tree arg01 = TREE_OPERAND (arg0, 1);
7796 return fold_build_pointer_plus_loc
7797 (loc, fold_convert_loc (loc, type, arg00), arg01);
7800 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7801 of the same precision, and X is an integer type not narrower than
7802 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7803 if (INTEGRAL_TYPE_P (type)
7804 && TREE_CODE (op0) == BIT_NOT_EXPR
7805 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7806 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7807 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7809 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7810 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7811 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7812 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7813 fold_convert_loc (loc, type, tem));
7816 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7817 type of X and Y (integer types only). */
7818 if (INTEGRAL_TYPE_P (type)
7819 && TREE_CODE (op0) == MULT_EXPR
7820 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7821 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7823 /* Be careful not to introduce new overflows. */
7824 tree mult_type;
7825 if (TYPE_OVERFLOW_WRAPS (type))
7826 mult_type = type;
7827 else
7828 mult_type = unsigned_type_for (type);
7830 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7832 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7833 fold_convert_loc (loc, mult_type,
7834 TREE_OPERAND (op0, 0)),
7835 fold_convert_loc (loc, mult_type,
7836 TREE_OPERAND (op0, 1)));
7837 return fold_convert_loc (loc, type, tem);
7841 return NULL_TREE;
7843 case VIEW_CONVERT_EXPR:
7844 if (TREE_CODE (op0) == MEM_REF)
7846 tem = fold_build2_loc (loc, MEM_REF, type,
7847 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7848 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7849 return tem;
7852 return NULL_TREE;
7854 case NEGATE_EXPR:
7855 tem = fold_negate_expr (loc, arg0);
7856 if (tem)
7857 return fold_convert_loc (loc, type, tem);
7858 return NULL_TREE;
7860 case ABS_EXPR:
7861 /* Convert fabs((double)float) into (double)fabsf(float). */
7862 if (TREE_CODE (arg0) == NOP_EXPR
7863 && TREE_CODE (type) == REAL_TYPE)
7865 tree targ0 = strip_float_extensions (arg0);
7866 if (targ0 != arg0)
7867 return fold_convert_loc (loc, type,
7868 fold_build1_loc (loc, ABS_EXPR,
7869 TREE_TYPE (targ0),
7870 targ0));
7872 return NULL_TREE;
7874 case BIT_NOT_EXPR:
7875 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7876 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7877 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7878 fold_convert_loc (loc, type,
7879 TREE_OPERAND (arg0, 0)))))
7880 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7881 fold_convert_loc (loc, type,
7882 TREE_OPERAND (arg0, 1)));
7883 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7884 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7885 fold_convert_loc (loc, type,
7886 TREE_OPERAND (arg0, 1)))))
7887 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7888 fold_convert_loc (loc, type,
7889 TREE_OPERAND (arg0, 0)), tem);
7891 return NULL_TREE;
7893 case TRUTH_NOT_EXPR:
7894 /* Note that the operand of this must be an int
7895 and its values must be 0 or 1.
7896 ("true" is a fixed value perhaps depending on the language,
7897 but we don't handle values other than 1 correctly yet.) */
7898 tem = fold_truth_not_expr (loc, arg0);
7899 if (!tem)
7900 return NULL_TREE;
7901 return fold_convert_loc (loc, type, tem);
7903 case INDIRECT_REF:
7904 /* Fold *&X to X if X is an lvalue. */
7905 if (TREE_CODE (op0) == ADDR_EXPR)
7907 tree op00 = TREE_OPERAND (op0, 0);
7908 if ((TREE_CODE (op00) == VAR_DECL
7909 || TREE_CODE (op00) == PARM_DECL
7910 || TREE_CODE (op00) == RESULT_DECL)
7911 && !TREE_READONLY (op00))
7912 return op00;
7914 return NULL_TREE;
7916 default:
7917 return NULL_TREE;
7918 } /* switch (code) */
7922 /* If the operation was a conversion do _not_ mark a resulting constant
7923 with TREE_OVERFLOW if the original constant was not. These conversions
7924 have implementation defined behavior and retaining the TREE_OVERFLOW
7925 flag here would confuse later passes such as VRP. */
7926 tree
7927 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7928 tree type, tree op0)
7930 tree res = fold_unary_loc (loc, code, type, op0);
7931 if (res
7932 && TREE_CODE (res) == INTEGER_CST
7933 && TREE_CODE (op0) == INTEGER_CST
7934 && CONVERT_EXPR_CODE_P (code))
7935 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7937 return res;
7940 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7941 operands OP0 and OP1. LOC is the location of the resulting expression.
7942 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7943 Return the folded expression if folding is successful. Otherwise,
7944 return NULL_TREE. */
7945 static tree
7946 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7947 tree arg0, tree arg1, tree op0, tree op1)
7949 tree tem;
7951 /* We only do these simplifications if we are optimizing. */
7952 if (!optimize)
7953 return NULL_TREE;
7955 /* Check for things like (A || B) && (A || C). We can convert this
7956 to A || (B && C). Note that either operator can be any of the four
7957 truth and/or operations and the transformation will still be
7958 valid. Also note that we only care about order for the
7959 ANDIF and ORIF operators. If B contains side effects, this
7960 might change the truth-value of A. */
7961 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7962 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7963 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7964 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7965 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7966 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7968 tree a00 = TREE_OPERAND (arg0, 0);
7969 tree a01 = TREE_OPERAND (arg0, 1);
7970 tree a10 = TREE_OPERAND (arg1, 0);
7971 tree a11 = TREE_OPERAND (arg1, 1);
7972 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7973 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7974 && (code == TRUTH_AND_EXPR
7975 || code == TRUTH_OR_EXPR));
7977 if (operand_equal_p (a00, a10, 0))
7978 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7979 fold_build2_loc (loc, code, type, a01, a11));
7980 else if (commutative && operand_equal_p (a00, a11, 0))
7981 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7982 fold_build2_loc (loc, code, type, a01, a10));
7983 else if (commutative && operand_equal_p (a01, a10, 0))
7984 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7985 fold_build2_loc (loc, code, type, a00, a11));
7987 /* This case if tricky because we must either have commutative
7988 operators or else A10 must not have side-effects. */
7990 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7991 && operand_equal_p (a01, a11, 0))
7992 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7993 fold_build2_loc (loc, code, type, a00, a10),
7994 a01);
7997 /* See if we can build a range comparison. */
7998 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7999 return tem;
8001 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8002 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8004 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8005 if (tem)
8006 return fold_build2_loc (loc, code, type, tem, arg1);
8009 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8010 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8012 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8013 if (tem)
8014 return fold_build2_loc (loc, code, type, arg0, tem);
8017 /* Check for the possibility of merging component references. If our
8018 lhs is another similar operation, try to merge its rhs with our
8019 rhs. Then try to merge our lhs and rhs. */
8020 if (TREE_CODE (arg0) == code
8021 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8022 TREE_OPERAND (arg0, 1), arg1)))
8023 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8025 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8026 return tem;
8028 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8029 && (code == TRUTH_AND_EXPR
8030 || code == TRUTH_ANDIF_EXPR
8031 || code == TRUTH_OR_EXPR
8032 || code == TRUTH_ORIF_EXPR))
8034 enum tree_code ncode, icode;
8036 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8037 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8038 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8040 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8041 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8042 We don't want to pack more than two leafs to a non-IF AND/OR
8043 expression.
8044 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8045 equal to IF-CODE, then we don't want to add right-hand operand.
8046 If the inner right-hand side of left-hand operand has
8047 side-effects, or isn't simple, then we can't add to it,
8048 as otherwise we might destroy if-sequence. */
8049 if (TREE_CODE (arg0) == icode
8050 && simple_operand_p_2 (arg1)
8051 /* Needed for sequence points to handle trappings, and
8052 side-effects. */
8053 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8055 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8056 arg1);
8057 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8058 tem);
8060 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8061 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8062 else if (TREE_CODE (arg1) == icode
8063 && simple_operand_p_2 (arg0)
8064 /* Needed for sequence points to handle trappings, and
8065 side-effects. */
8066 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8068 tem = fold_build2_loc (loc, ncode, type,
8069 arg0, TREE_OPERAND (arg1, 0));
8070 return fold_build2_loc (loc, icode, type, tem,
8071 TREE_OPERAND (arg1, 1));
8073 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8074 into (A OR B).
8075 For sequence point consistancy, we need to check for trapping,
8076 and side-effects. */
8077 else if (code == icode && simple_operand_p_2 (arg0)
8078 && simple_operand_p_2 (arg1))
8079 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8082 return NULL_TREE;
8085 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8086 by changing CODE to reduce the magnitude of constants involved in
8087 ARG0 of the comparison.
8088 Returns a canonicalized comparison tree if a simplification was
8089 possible, otherwise returns NULL_TREE.
8090 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8091 valid if signed overflow is undefined. */
8093 static tree
8094 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8095 tree arg0, tree arg1,
8096 bool *strict_overflow_p)
8098 enum tree_code code0 = TREE_CODE (arg0);
8099 tree t, cst0 = NULL_TREE;
8100 int sgn0;
8102 /* Match A +- CST code arg1. We can change this only if overflow
8103 is undefined. */
8104 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8105 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8106 /* In principle pointers also have undefined overflow behavior,
8107 but that causes problems elsewhere. */
8108 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8109 && (code0 == MINUS_EXPR
8110 || code0 == PLUS_EXPR)
8111 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8112 return NULL_TREE;
8114 /* Identify the constant in arg0 and its sign. */
8115 cst0 = TREE_OPERAND (arg0, 1);
8116 sgn0 = tree_int_cst_sgn (cst0);
8118 /* Overflowed constants and zero will cause problems. */
8119 if (integer_zerop (cst0)
8120 || TREE_OVERFLOW (cst0))
8121 return NULL_TREE;
8123 /* See if we can reduce the magnitude of the constant in
8124 arg0 by changing the comparison code. */
8125 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8126 if (code == LT_EXPR
8127 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8128 code = LE_EXPR;
8129 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8130 else if (code == GT_EXPR
8131 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8132 code = GE_EXPR;
8133 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8134 else if (code == LE_EXPR
8135 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8136 code = LT_EXPR;
8137 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8138 else if (code == GE_EXPR
8139 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8140 code = GT_EXPR;
8141 else
8142 return NULL_TREE;
8143 *strict_overflow_p = true;
8145 /* Now build the constant reduced in magnitude. But not if that
8146 would produce one outside of its types range. */
8147 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8148 && ((sgn0 == 1
8149 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8150 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8151 || (sgn0 == -1
8152 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8153 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8154 return NULL_TREE;
8156 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8157 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8158 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8159 t = fold_convert (TREE_TYPE (arg1), t);
8161 return fold_build2_loc (loc, code, type, t, arg1);
8164 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8165 overflow further. Try to decrease the magnitude of constants involved
8166 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8167 and put sole constants at the second argument position.
8168 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8170 static tree
8171 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8172 tree arg0, tree arg1)
8174 tree t;
8175 bool strict_overflow_p;
8176 const char * const warnmsg = G_("assuming signed overflow does not occur "
8177 "when reducing constant in comparison");
8179 /* Try canonicalization by simplifying arg0. */
8180 strict_overflow_p = false;
8181 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8182 &strict_overflow_p);
8183 if (t)
8185 if (strict_overflow_p)
8186 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8187 return t;
8190 /* Try canonicalization by simplifying arg1 using the swapped
8191 comparison. */
8192 code = swap_tree_comparison (code);
8193 strict_overflow_p = false;
8194 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8195 &strict_overflow_p);
8196 if (t && strict_overflow_p)
8197 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8198 return t;
8201 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8202 space. This is used to avoid issuing overflow warnings for
8203 expressions like &p->x which can not wrap. */
8205 static bool
8206 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8208 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8209 return true;
8211 if (bitpos < 0)
8212 return true;
8214 wide_int wi_offset;
8215 int precision = TYPE_PRECISION (TREE_TYPE (base));
8216 if (offset == NULL_TREE)
8217 wi_offset = wi::zero (precision);
8218 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8219 return true;
8220 else
8221 wi_offset = offset;
8223 bool overflow;
8224 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8225 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8226 if (overflow)
8227 return true;
8229 if (!wi::fits_uhwi_p (total))
8230 return true;
8232 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8233 if (size <= 0)
8234 return true;
8236 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8237 array. */
8238 if (TREE_CODE (base) == ADDR_EXPR)
8240 HOST_WIDE_INT base_size;
8242 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8243 if (base_size > 0 && size < base_size)
8244 size = base_size;
8247 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8250 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8251 kind INTEGER_CST. This makes sure to properly sign-extend the
8252 constant. */
8254 static HOST_WIDE_INT
8255 size_low_cst (const_tree t)
8257 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8258 int prec = TYPE_PRECISION (TREE_TYPE (t));
8259 if (prec < HOST_BITS_PER_WIDE_INT)
8260 return sext_hwi (w, prec);
8261 return w;
8264 /* Subroutine of fold_binary. This routine performs all of the
8265 transformations that are common to the equality/inequality
8266 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8267 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8268 fold_binary should call fold_binary. Fold a comparison with
8269 tree code CODE and type TYPE with operands OP0 and OP1. Return
8270 the folded comparison or NULL_TREE. */
8272 static tree
8273 fold_comparison (location_t loc, enum tree_code code, tree type,
8274 tree op0, tree op1)
8276 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8277 tree arg0, arg1, tem;
8279 arg0 = op0;
8280 arg1 = op1;
8282 STRIP_SIGN_NOPS (arg0);
8283 STRIP_SIGN_NOPS (arg1);
8285 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8286 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8287 && (equality_code
8288 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8289 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8291 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8292 && TREE_CODE (arg1) == INTEGER_CST
8293 && !TREE_OVERFLOW (arg1))
8295 const enum tree_code
8296 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8297 tree const1 = TREE_OPERAND (arg0, 1);
8298 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8299 tree variable = TREE_OPERAND (arg0, 0);
8300 tree new_const = int_const_binop (reverse_op, const2, const1);
8302 /* If the constant operation overflowed this can be
8303 simplified as a comparison against INT_MAX/INT_MIN. */
8304 if (TREE_OVERFLOW (new_const)
8305 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8307 int const1_sgn = tree_int_cst_sgn (const1);
8308 enum tree_code code2 = code;
8310 /* Get the sign of the constant on the lhs if the
8311 operation were VARIABLE + CONST1. */
8312 if (TREE_CODE (arg0) == MINUS_EXPR)
8313 const1_sgn = -const1_sgn;
8315 /* The sign of the constant determines if we overflowed
8316 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8317 Canonicalize to the INT_MIN overflow by swapping the comparison
8318 if necessary. */
8319 if (const1_sgn == -1)
8320 code2 = swap_tree_comparison (code);
8322 /* We now can look at the canonicalized case
8323 VARIABLE + 1 CODE2 INT_MIN
8324 and decide on the result. */
8325 switch (code2)
8327 case EQ_EXPR:
8328 case LT_EXPR:
8329 case LE_EXPR:
8330 return
8331 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8333 case NE_EXPR:
8334 case GE_EXPR:
8335 case GT_EXPR:
8336 return
8337 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8339 default:
8340 gcc_unreachable ();
8343 else
8345 if (!equality_code)
8346 fold_overflow_warning ("assuming signed overflow does not occur "
8347 "when changing X +- C1 cmp C2 to "
8348 "X cmp C2 -+ C1",
8349 WARN_STRICT_OVERFLOW_COMPARISON);
8350 return fold_build2_loc (loc, code, type, variable, new_const);
8354 /* For comparisons of pointers we can decompose it to a compile time
8355 comparison of the base objects and the offsets into the object.
8356 This requires at least one operand being an ADDR_EXPR or a
8357 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8358 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8359 && (TREE_CODE (arg0) == ADDR_EXPR
8360 || TREE_CODE (arg1) == ADDR_EXPR
8361 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8362 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8364 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8365 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8366 machine_mode mode;
8367 int volatilep, reversep, unsignedp;
8368 bool indirect_base0 = false, indirect_base1 = false;
8370 /* Get base and offset for the access. Strip ADDR_EXPR for
8371 get_inner_reference, but put it back by stripping INDIRECT_REF
8372 off the base object if possible. indirect_baseN will be true
8373 if baseN is not an address but refers to the object itself. */
8374 base0 = arg0;
8375 if (TREE_CODE (arg0) == ADDR_EXPR)
8377 base0
8378 = get_inner_reference (TREE_OPERAND (arg0, 0),
8379 &bitsize, &bitpos0, &offset0, &mode,
8380 &unsignedp, &reversep, &volatilep, false);
8381 if (TREE_CODE (base0) == INDIRECT_REF)
8382 base0 = TREE_OPERAND (base0, 0);
8383 else
8384 indirect_base0 = true;
8386 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8388 base0 = TREE_OPERAND (arg0, 0);
8389 STRIP_SIGN_NOPS (base0);
8390 if (TREE_CODE (base0) == ADDR_EXPR)
8392 base0 = TREE_OPERAND (base0, 0);
8393 indirect_base0 = true;
8395 offset0 = TREE_OPERAND (arg0, 1);
8396 if (tree_fits_shwi_p (offset0))
8398 HOST_WIDE_INT off = size_low_cst (offset0);
8399 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8400 * BITS_PER_UNIT)
8401 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8403 bitpos0 = off * BITS_PER_UNIT;
8404 offset0 = NULL_TREE;
8409 base1 = arg1;
8410 if (TREE_CODE (arg1) == ADDR_EXPR)
8412 base1
8413 = get_inner_reference (TREE_OPERAND (arg1, 0),
8414 &bitsize, &bitpos1, &offset1, &mode,
8415 &unsignedp, &reversep, &volatilep, false);
8416 if (TREE_CODE (base1) == INDIRECT_REF)
8417 base1 = TREE_OPERAND (base1, 0);
8418 else
8419 indirect_base1 = true;
8421 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8423 base1 = TREE_OPERAND (arg1, 0);
8424 STRIP_SIGN_NOPS (base1);
8425 if (TREE_CODE (base1) == ADDR_EXPR)
8427 base1 = TREE_OPERAND (base1, 0);
8428 indirect_base1 = true;
8430 offset1 = TREE_OPERAND (arg1, 1);
8431 if (tree_fits_shwi_p (offset1))
8433 HOST_WIDE_INT off = size_low_cst (offset1);
8434 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8435 * BITS_PER_UNIT)
8436 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8438 bitpos1 = off * BITS_PER_UNIT;
8439 offset1 = NULL_TREE;
8444 /* If we have equivalent bases we might be able to simplify. */
8445 if (indirect_base0 == indirect_base1
8446 && operand_equal_p (base0, base1,
8447 indirect_base0 ? OEP_ADDRESS_OF : 0))
8449 /* We can fold this expression to a constant if the non-constant
8450 offset parts are equal. */
8451 if ((offset0 == offset1
8452 || (offset0 && offset1
8453 && operand_equal_p (offset0, offset1, 0)))
8454 && (code == EQ_EXPR
8455 || code == NE_EXPR
8456 || (indirect_base0 && DECL_P (base0))
8457 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8460 if (!equality_code
8461 && bitpos0 != bitpos1
8462 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8463 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8464 fold_overflow_warning (("assuming pointer wraparound does not "
8465 "occur when comparing P +- C1 with "
8466 "P +- C2"),
8467 WARN_STRICT_OVERFLOW_CONDITIONAL);
8469 switch (code)
8471 case EQ_EXPR:
8472 return constant_boolean_node (bitpos0 == bitpos1, type);
8473 case NE_EXPR:
8474 return constant_boolean_node (bitpos0 != bitpos1, type);
8475 case LT_EXPR:
8476 return constant_boolean_node (bitpos0 < bitpos1, type);
8477 case LE_EXPR:
8478 return constant_boolean_node (bitpos0 <= bitpos1, type);
8479 case GE_EXPR:
8480 return constant_boolean_node (bitpos0 >= bitpos1, type);
8481 case GT_EXPR:
8482 return constant_boolean_node (bitpos0 > bitpos1, type);
8483 default:;
8486 /* We can simplify the comparison to a comparison of the variable
8487 offset parts if the constant offset parts are equal.
8488 Be careful to use signed sizetype here because otherwise we
8489 mess with array offsets in the wrong way. This is possible
8490 because pointer arithmetic is restricted to retain within an
8491 object and overflow on pointer differences is undefined as of
8492 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8493 else if (bitpos0 == bitpos1
8494 && (equality_code
8495 || (indirect_base0 && DECL_P (base0))
8496 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8498 /* By converting to signed sizetype we cover middle-end pointer
8499 arithmetic which operates on unsigned pointer types of size
8500 type size and ARRAY_REF offsets which are properly sign or
8501 zero extended from their type in case it is narrower than
8502 sizetype. */
8503 if (offset0 == NULL_TREE)
8504 offset0 = build_int_cst (ssizetype, 0);
8505 else
8506 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8507 if (offset1 == NULL_TREE)
8508 offset1 = build_int_cst (ssizetype, 0);
8509 else
8510 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8512 if (!equality_code
8513 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8514 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8515 fold_overflow_warning (("assuming pointer wraparound does not "
8516 "occur when comparing P +- C1 with "
8517 "P +- C2"),
8518 WARN_STRICT_OVERFLOW_COMPARISON);
8520 return fold_build2_loc (loc, code, type, offset0, offset1);
8523 /* For equal offsets we can simplify to a comparison of the
8524 base addresses. */
8525 else if (bitpos0 == bitpos1
8526 && (indirect_base0
8527 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8528 && (indirect_base1
8529 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8530 && ((offset0 == offset1)
8531 || (offset0 && offset1
8532 && operand_equal_p (offset0, offset1, 0))))
8534 if (indirect_base0)
8535 base0 = build_fold_addr_expr_loc (loc, base0);
8536 if (indirect_base1)
8537 base1 = build_fold_addr_expr_loc (loc, base1);
8538 return fold_build2_loc (loc, code, type, base0, base1);
8542 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8543 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8544 the resulting offset is smaller in absolute value than the
8545 original one and has the same sign. */
8546 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8547 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8548 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8549 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8550 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8551 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8552 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8553 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8555 tree const1 = TREE_OPERAND (arg0, 1);
8556 tree const2 = TREE_OPERAND (arg1, 1);
8557 tree variable1 = TREE_OPERAND (arg0, 0);
8558 tree variable2 = TREE_OPERAND (arg1, 0);
8559 tree cst;
8560 const char * const warnmsg = G_("assuming signed overflow does not "
8561 "occur when combining constants around "
8562 "a comparison");
8564 /* Put the constant on the side where it doesn't overflow and is
8565 of lower absolute value and of same sign than before. */
8566 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8567 ? MINUS_EXPR : PLUS_EXPR,
8568 const2, const1);
8569 if (!TREE_OVERFLOW (cst)
8570 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8571 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8573 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8574 return fold_build2_loc (loc, code, type,
8575 variable1,
8576 fold_build2_loc (loc, TREE_CODE (arg1),
8577 TREE_TYPE (arg1),
8578 variable2, cst));
8581 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8582 ? MINUS_EXPR : PLUS_EXPR,
8583 const1, const2);
8584 if (!TREE_OVERFLOW (cst)
8585 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8586 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8588 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8589 return fold_build2_loc (loc, code, type,
8590 fold_build2_loc (loc, TREE_CODE (arg0),
8591 TREE_TYPE (arg0),
8592 variable1, cst),
8593 variable2);
8597 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8598 if (tem)
8599 return tem;
8601 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8602 constant, we can simplify it. */
8603 if (TREE_CODE (arg1) == INTEGER_CST
8604 && (TREE_CODE (arg0) == MIN_EXPR
8605 || TREE_CODE (arg0) == MAX_EXPR)
8606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8608 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8609 if (tem)
8610 return tem;
8613 /* If we are comparing an expression that just has comparisons
8614 of two integer values, arithmetic expressions of those comparisons,
8615 and constants, we can simplify it. There are only three cases
8616 to check: the two values can either be equal, the first can be
8617 greater, or the second can be greater. Fold the expression for
8618 those three values. Since each value must be 0 or 1, we have
8619 eight possibilities, each of which corresponds to the constant 0
8620 or 1 or one of the six possible comparisons.
8622 This handles common cases like (a > b) == 0 but also handles
8623 expressions like ((x > y) - (y > x)) > 0, which supposedly
8624 occur in macroized code. */
8626 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8628 tree cval1 = 0, cval2 = 0;
8629 int save_p = 0;
8631 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8632 /* Don't handle degenerate cases here; they should already
8633 have been handled anyway. */
8634 && cval1 != 0 && cval2 != 0
8635 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8636 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8637 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8638 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8639 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8640 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8641 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8643 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8644 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8646 /* We can't just pass T to eval_subst in case cval1 or cval2
8647 was the same as ARG1. */
8649 tree high_result
8650 = fold_build2_loc (loc, code, type,
8651 eval_subst (loc, arg0, cval1, maxval,
8652 cval2, minval),
8653 arg1);
8654 tree equal_result
8655 = fold_build2_loc (loc, code, type,
8656 eval_subst (loc, arg0, cval1, maxval,
8657 cval2, maxval),
8658 arg1);
8659 tree low_result
8660 = fold_build2_loc (loc, code, type,
8661 eval_subst (loc, arg0, cval1, minval,
8662 cval2, maxval),
8663 arg1);
8665 /* All three of these results should be 0 or 1. Confirm they are.
8666 Then use those values to select the proper code to use. */
8668 if (TREE_CODE (high_result) == INTEGER_CST
8669 && TREE_CODE (equal_result) == INTEGER_CST
8670 && TREE_CODE (low_result) == INTEGER_CST)
8672 /* Make a 3-bit mask with the high-order bit being the
8673 value for `>', the next for '=', and the low for '<'. */
8674 switch ((integer_onep (high_result) * 4)
8675 + (integer_onep (equal_result) * 2)
8676 + integer_onep (low_result))
8678 case 0:
8679 /* Always false. */
8680 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8681 case 1:
8682 code = LT_EXPR;
8683 break;
8684 case 2:
8685 code = EQ_EXPR;
8686 break;
8687 case 3:
8688 code = LE_EXPR;
8689 break;
8690 case 4:
8691 code = GT_EXPR;
8692 break;
8693 case 5:
8694 code = NE_EXPR;
8695 break;
8696 case 6:
8697 code = GE_EXPR;
8698 break;
8699 case 7:
8700 /* Always true. */
8701 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8704 if (save_p)
8706 tem = save_expr (build2 (code, type, cval1, cval2));
8707 SET_EXPR_LOCATION (tem, loc);
8708 return tem;
8710 return fold_build2_loc (loc, code, type, cval1, cval2);
8715 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8716 into a single range test. */
8717 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8718 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8719 && TREE_CODE (arg1) == INTEGER_CST
8720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8721 && !integer_zerop (TREE_OPERAND (arg0, 1))
8722 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8723 && !TREE_OVERFLOW (arg1))
8725 tem = fold_div_compare (loc, code, type, arg0, arg1);
8726 if (tem != NULL_TREE)
8727 return tem;
8730 return NULL_TREE;
8734 /* Subroutine of fold_binary. Optimize complex multiplications of the
8735 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8736 argument EXPR represents the expression "z" of type TYPE. */
8738 static tree
8739 fold_mult_zconjz (location_t loc, tree type, tree expr)
8741 tree itype = TREE_TYPE (type);
8742 tree rpart, ipart, tem;
8744 if (TREE_CODE (expr) == COMPLEX_EXPR)
8746 rpart = TREE_OPERAND (expr, 0);
8747 ipart = TREE_OPERAND (expr, 1);
8749 else if (TREE_CODE (expr) == COMPLEX_CST)
8751 rpart = TREE_REALPART (expr);
8752 ipart = TREE_IMAGPART (expr);
8754 else
8756 expr = save_expr (expr);
8757 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8758 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8761 rpart = save_expr (rpart);
8762 ipart = save_expr (ipart);
8763 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8764 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8765 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8766 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8767 build_zero_cst (itype));
8771 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8772 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8774 static bool
8775 vec_cst_ctor_to_array (tree arg, tree *elts)
8777 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8779 if (TREE_CODE (arg) == VECTOR_CST)
8781 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8782 elts[i] = VECTOR_CST_ELT (arg, i);
8784 else if (TREE_CODE (arg) == CONSTRUCTOR)
8786 constructor_elt *elt;
8788 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8789 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8790 return false;
8791 else
8792 elts[i] = elt->value;
8794 else
8795 return false;
8796 for (; i < nelts; i++)
8797 elts[i]
8798 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8799 return true;
8802 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8803 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8804 NULL_TREE otherwise. */
8806 static tree
8807 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8809 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8810 tree *elts;
8811 bool need_ctor = false;
8813 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8814 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8815 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8816 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8817 return NULL_TREE;
8819 elts = XALLOCAVEC (tree, nelts * 3);
8820 if (!vec_cst_ctor_to_array (arg0, elts)
8821 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8822 return NULL_TREE;
8824 for (i = 0; i < nelts; i++)
8826 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8827 need_ctor = true;
8828 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8831 if (need_ctor)
8833 vec<constructor_elt, va_gc> *v;
8834 vec_alloc (v, nelts);
8835 for (i = 0; i < nelts; i++)
8836 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8837 return build_constructor (type, v);
8839 else
8840 return build_vector (type, &elts[2 * nelts]);
8843 /* Try to fold a pointer difference of type TYPE two address expressions of
8844 array references AREF0 and AREF1 using location LOC. Return a
8845 simplified expression for the difference or NULL_TREE. */
8847 static tree
8848 fold_addr_of_array_ref_difference (location_t loc, tree type,
8849 tree aref0, tree aref1)
8851 tree base0 = TREE_OPERAND (aref0, 0);
8852 tree base1 = TREE_OPERAND (aref1, 0);
8853 tree base_offset = build_int_cst (type, 0);
8855 /* If the bases are array references as well, recurse. If the bases
8856 are pointer indirections compute the difference of the pointers.
8857 If the bases are equal, we are set. */
8858 if ((TREE_CODE (base0) == ARRAY_REF
8859 && TREE_CODE (base1) == ARRAY_REF
8860 && (base_offset
8861 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8862 || (INDIRECT_REF_P (base0)
8863 && INDIRECT_REF_P (base1)
8864 && (base_offset
8865 = fold_binary_loc (loc, MINUS_EXPR, type,
8866 fold_convert (type, TREE_OPERAND (base0, 0)),
8867 fold_convert (type,
8868 TREE_OPERAND (base1, 0)))))
8869 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8871 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8872 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8873 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8874 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8875 return fold_build2_loc (loc, PLUS_EXPR, type,
8876 base_offset,
8877 fold_build2_loc (loc, MULT_EXPR, type,
8878 diff, esz));
8880 return NULL_TREE;
8883 /* If the real or vector real constant CST of type TYPE has an exact
8884 inverse, return it, else return NULL. */
8886 tree
8887 exact_inverse (tree type, tree cst)
8889 REAL_VALUE_TYPE r;
8890 tree unit_type, *elts;
8891 machine_mode mode;
8892 unsigned vec_nelts, i;
8894 switch (TREE_CODE (cst))
8896 case REAL_CST:
8897 r = TREE_REAL_CST (cst);
8899 if (exact_real_inverse (TYPE_MODE (type), &r))
8900 return build_real (type, r);
8902 return NULL_TREE;
8904 case VECTOR_CST:
8905 vec_nelts = VECTOR_CST_NELTS (cst);
8906 elts = XALLOCAVEC (tree, vec_nelts);
8907 unit_type = TREE_TYPE (type);
8908 mode = TYPE_MODE (unit_type);
8910 for (i = 0; i < vec_nelts; i++)
8912 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8913 if (!exact_real_inverse (mode, &r))
8914 return NULL_TREE;
8915 elts[i] = build_real (unit_type, r);
8918 return build_vector (type, elts);
8920 default:
8921 return NULL_TREE;
8925 /* Mask out the tz least significant bits of X of type TYPE where
8926 tz is the number of trailing zeroes in Y. */
8927 static wide_int
8928 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8930 int tz = wi::ctz (y);
8931 if (tz > 0)
8932 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8933 return x;
8936 /* Return true when T is an address and is known to be nonzero.
8937 For floating point we further ensure that T is not denormal.
8938 Similar logic is present in nonzero_address in rtlanal.h.
8940 If the return value is based on the assumption that signed overflow
8941 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8942 change *STRICT_OVERFLOW_P. */
8944 static bool
8945 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8947 tree type = TREE_TYPE (t);
8948 enum tree_code code;
8950 /* Doing something useful for floating point would need more work. */
8951 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8952 return false;
8954 code = TREE_CODE (t);
8955 switch (TREE_CODE_CLASS (code))
8957 case tcc_unary:
8958 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8959 strict_overflow_p);
8960 case tcc_binary:
8961 case tcc_comparison:
8962 return tree_binary_nonzero_warnv_p (code, type,
8963 TREE_OPERAND (t, 0),
8964 TREE_OPERAND (t, 1),
8965 strict_overflow_p);
8966 case tcc_constant:
8967 case tcc_declaration:
8968 case tcc_reference:
8969 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8971 default:
8972 break;
8975 switch (code)
8977 case TRUTH_NOT_EXPR:
8978 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8979 strict_overflow_p);
8981 case TRUTH_AND_EXPR:
8982 case TRUTH_OR_EXPR:
8983 case TRUTH_XOR_EXPR:
8984 return tree_binary_nonzero_warnv_p (code, type,
8985 TREE_OPERAND (t, 0),
8986 TREE_OPERAND (t, 1),
8987 strict_overflow_p);
8989 case COND_EXPR:
8990 case CONSTRUCTOR:
8991 case OBJ_TYPE_REF:
8992 case ASSERT_EXPR:
8993 case ADDR_EXPR:
8994 case WITH_SIZE_EXPR:
8995 case SSA_NAME:
8996 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8998 case COMPOUND_EXPR:
8999 case MODIFY_EXPR:
9000 case BIND_EXPR:
9001 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9002 strict_overflow_p);
9004 case SAVE_EXPR:
9005 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9006 strict_overflow_p);
9008 case CALL_EXPR:
9010 tree fndecl = get_callee_fndecl (t);
9011 if (!fndecl) return false;
9012 if (flag_delete_null_pointer_checks && !flag_check_new
9013 && DECL_IS_OPERATOR_NEW (fndecl)
9014 && !TREE_NOTHROW (fndecl))
9015 return true;
9016 if (flag_delete_null_pointer_checks
9017 && lookup_attribute ("returns_nonnull",
9018 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9019 return true;
9020 return alloca_call_p (t);
9023 default:
9024 break;
9026 return false;
9029 /* Return true when T is an address and is known to be nonzero.
9030 Handle warnings about undefined signed overflow. */
9032 static bool
9033 tree_expr_nonzero_p (tree t)
9035 bool ret, strict_overflow_p;
9037 strict_overflow_p = false;
9038 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9039 if (strict_overflow_p)
9040 fold_overflow_warning (("assuming signed overflow does not occur when "
9041 "determining that expression is always "
9042 "non-zero"),
9043 WARN_STRICT_OVERFLOW_MISC);
9044 return ret;
9047 /* Fold a binary expression of code CODE and type TYPE with operands
9048 OP0 and OP1. LOC is the location of the resulting expression.
9049 Return the folded expression if folding is successful. Otherwise,
9050 return NULL_TREE. */
9052 tree
9053 fold_binary_loc (location_t loc,
9054 enum tree_code code, tree type, tree op0, tree op1)
9056 enum tree_code_class kind = TREE_CODE_CLASS (code);
9057 tree arg0, arg1, tem;
9058 tree t1 = NULL_TREE;
9059 bool strict_overflow_p;
9060 unsigned int prec;
9062 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9063 && TREE_CODE_LENGTH (code) == 2
9064 && op0 != NULL_TREE
9065 && op1 != NULL_TREE);
9067 arg0 = op0;
9068 arg1 = op1;
9070 /* Strip any conversions that don't change the mode. This is
9071 safe for every expression, except for a comparison expression
9072 because its signedness is derived from its operands. So, in
9073 the latter case, only strip conversions that don't change the
9074 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9075 preserved.
9077 Note that this is done as an internal manipulation within the
9078 constant folder, in order to find the simplest representation
9079 of the arguments so that their form can be studied. In any
9080 cases, the appropriate type conversions should be put back in
9081 the tree that will get out of the constant folder. */
9083 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9085 STRIP_SIGN_NOPS (arg0);
9086 STRIP_SIGN_NOPS (arg1);
9088 else
9090 STRIP_NOPS (arg0);
9091 STRIP_NOPS (arg1);
9094 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9095 constant but we can't do arithmetic on them. */
9096 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9098 tem = const_binop (code, type, arg0, arg1);
9099 if (tem != NULL_TREE)
9101 if (TREE_TYPE (tem) != type)
9102 tem = fold_convert_loc (loc, type, tem);
9103 return tem;
9107 /* If this is a commutative operation, and ARG0 is a constant, move it
9108 to ARG1 to reduce the number of tests below. */
9109 if (commutative_tree_code (code)
9110 && tree_swap_operands_p (arg0, arg1, true))
9111 return fold_build2_loc (loc, code, type, op1, op0);
9113 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9114 to ARG1 to reduce the number of tests below. */
9115 if (kind == tcc_comparison
9116 && tree_swap_operands_p (arg0, arg1, true))
9117 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9119 tem = generic_simplify (loc, code, type, op0, op1);
9120 if (tem)
9121 return tem;
9123 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9125 First check for cases where an arithmetic operation is applied to a
9126 compound, conditional, or comparison operation. Push the arithmetic
9127 operation inside the compound or conditional to see if any folding
9128 can then be done. Convert comparison to conditional for this purpose.
9129 The also optimizes non-constant cases that used to be done in
9130 expand_expr.
9132 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9133 one of the operands is a comparison and the other is a comparison, a
9134 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9135 code below would make the expression more complex. Change it to a
9136 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9137 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9139 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9140 || code == EQ_EXPR || code == NE_EXPR)
9141 && TREE_CODE (type) != VECTOR_TYPE
9142 && ((truth_value_p (TREE_CODE (arg0))
9143 && (truth_value_p (TREE_CODE (arg1))
9144 || (TREE_CODE (arg1) == BIT_AND_EXPR
9145 && integer_onep (TREE_OPERAND (arg1, 1)))))
9146 || (truth_value_p (TREE_CODE (arg1))
9147 && (truth_value_p (TREE_CODE (arg0))
9148 || (TREE_CODE (arg0) == BIT_AND_EXPR
9149 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9151 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9152 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9153 : TRUTH_XOR_EXPR,
9154 boolean_type_node,
9155 fold_convert_loc (loc, boolean_type_node, arg0),
9156 fold_convert_loc (loc, boolean_type_node, arg1));
9158 if (code == EQ_EXPR)
9159 tem = invert_truthvalue_loc (loc, tem);
9161 return fold_convert_loc (loc, type, tem);
9164 if (TREE_CODE_CLASS (code) == tcc_binary
9165 || TREE_CODE_CLASS (code) == tcc_comparison)
9167 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9169 tem = fold_build2_loc (loc, code, type,
9170 fold_convert_loc (loc, TREE_TYPE (op0),
9171 TREE_OPERAND (arg0, 1)), op1);
9172 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9173 tem);
9175 if (TREE_CODE (arg1) == COMPOUND_EXPR
9176 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9178 tem = fold_build2_loc (loc, code, type, op0,
9179 fold_convert_loc (loc, TREE_TYPE (op1),
9180 TREE_OPERAND (arg1, 1)));
9181 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9182 tem);
9185 if (TREE_CODE (arg0) == COND_EXPR
9186 || TREE_CODE (arg0) == VEC_COND_EXPR
9187 || COMPARISON_CLASS_P (arg0))
9189 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9190 arg0, arg1,
9191 /*cond_first_p=*/1);
9192 if (tem != NULL_TREE)
9193 return tem;
9196 if (TREE_CODE (arg1) == COND_EXPR
9197 || TREE_CODE (arg1) == VEC_COND_EXPR
9198 || COMPARISON_CLASS_P (arg1))
9200 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9201 arg1, arg0,
9202 /*cond_first_p=*/0);
9203 if (tem != NULL_TREE)
9204 return tem;
9208 switch (code)
9210 case MEM_REF:
9211 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9212 if (TREE_CODE (arg0) == ADDR_EXPR
9213 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9215 tree iref = TREE_OPERAND (arg0, 0);
9216 return fold_build2 (MEM_REF, type,
9217 TREE_OPERAND (iref, 0),
9218 int_const_binop (PLUS_EXPR, arg1,
9219 TREE_OPERAND (iref, 1)));
9222 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9223 if (TREE_CODE (arg0) == ADDR_EXPR
9224 && handled_component_p (TREE_OPERAND (arg0, 0)))
9226 tree base;
9227 HOST_WIDE_INT coffset;
9228 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9229 &coffset);
9230 if (!base)
9231 return NULL_TREE;
9232 return fold_build2 (MEM_REF, type,
9233 build_fold_addr_expr (base),
9234 int_const_binop (PLUS_EXPR, arg1,
9235 size_int (coffset)));
9238 return NULL_TREE;
9240 case POINTER_PLUS_EXPR:
9241 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9242 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9243 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9244 return fold_convert_loc (loc, type,
9245 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9246 fold_convert_loc (loc, sizetype,
9247 arg1),
9248 fold_convert_loc (loc, sizetype,
9249 arg0)));
9251 return NULL_TREE;
9253 case PLUS_EXPR:
9254 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9256 /* X + (X / CST) * -CST is X % CST. */
9257 if (TREE_CODE (arg1) == MULT_EXPR
9258 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9259 && operand_equal_p (arg0,
9260 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9262 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9263 tree cst1 = TREE_OPERAND (arg1, 1);
9264 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9265 cst1, cst0);
9266 if (sum && integer_zerop (sum))
9267 return fold_convert_loc (loc, type,
9268 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9269 TREE_TYPE (arg0), arg0,
9270 cst0));
9274 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9275 one. Make sure the type is not saturating and has the signedness of
9276 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9277 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9278 if ((TREE_CODE (arg0) == MULT_EXPR
9279 || TREE_CODE (arg1) == MULT_EXPR)
9280 && !TYPE_SATURATING (type)
9281 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9282 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9283 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9285 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9286 if (tem)
9287 return tem;
9290 if (! FLOAT_TYPE_P (type))
9292 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9293 (plus (plus (mult) (mult)) (foo)) so that we can
9294 take advantage of the factoring cases below. */
9295 if (ANY_INTEGRAL_TYPE_P (type)
9296 && TYPE_OVERFLOW_WRAPS (type)
9297 && (((TREE_CODE (arg0) == PLUS_EXPR
9298 || TREE_CODE (arg0) == MINUS_EXPR)
9299 && TREE_CODE (arg1) == MULT_EXPR)
9300 || ((TREE_CODE (arg1) == PLUS_EXPR
9301 || TREE_CODE (arg1) == MINUS_EXPR)
9302 && TREE_CODE (arg0) == MULT_EXPR)))
9304 tree parg0, parg1, parg, marg;
9305 enum tree_code pcode;
9307 if (TREE_CODE (arg1) == MULT_EXPR)
9308 parg = arg0, marg = arg1;
9309 else
9310 parg = arg1, marg = arg0;
9311 pcode = TREE_CODE (parg);
9312 parg0 = TREE_OPERAND (parg, 0);
9313 parg1 = TREE_OPERAND (parg, 1);
9314 STRIP_NOPS (parg0);
9315 STRIP_NOPS (parg1);
9317 if (TREE_CODE (parg0) == MULT_EXPR
9318 && TREE_CODE (parg1) != MULT_EXPR)
9319 return fold_build2_loc (loc, pcode, type,
9320 fold_build2_loc (loc, PLUS_EXPR, type,
9321 fold_convert_loc (loc, type,
9322 parg0),
9323 fold_convert_loc (loc, type,
9324 marg)),
9325 fold_convert_loc (loc, type, parg1));
9326 if (TREE_CODE (parg0) != MULT_EXPR
9327 && TREE_CODE (parg1) == MULT_EXPR)
9328 return
9329 fold_build2_loc (loc, PLUS_EXPR, type,
9330 fold_convert_loc (loc, type, parg0),
9331 fold_build2_loc (loc, pcode, type,
9332 fold_convert_loc (loc, type, marg),
9333 fold_convert_loc (loc, type,
9334 parg1)));
9337 else
9339 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9340 to __complex__ ( x, y ). This is not the same for SNaNs or
9341 if signed zeros are involved. */
9342 if (!HONOR_SNANS (element_mode (arg0))
9343 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9344 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9346 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9347 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9348 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9349 bool arg0rz = false, arg0iz = false;
9350 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9351 || (arg0i && (arg0iz = real_zerop (arg0i))))
9353 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9354 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9355 if (arg0rz && arg1i && real_zerop (arg1i))
9357 tree rp = arg1r ? arg1r
9358 : build1 (REALPART_EXPR, rtype, arg1);
9359 tree ip = arg0i ? arg0i
9360 : build1 (IMAGPART_EXPR, rtype, arg0);
9361 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9363 else if (arg0iz && arg1r && real_zerop (arg1r))
9365 tree rp = arg0r ? arg0r
9366 : build1 (REALPART_EXPR, rtype, arg0);
9367 tree ip = arg1i ? arg1i
9368 : build1 (IMAGPART_EXPR, rtype, arg1);
9369 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9374 if (flag_unsafe_math_optimizations
9375 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9376 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9377 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9378 return tem;
9380 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9381 We associate floats only if the user has specified
9382 -fassociative-math. */
9383 if (flag_associative_math
9384 && TREE_CODE (arg1) == PLUS_EXPR
9385 && TREE_CODE (arg0) != MULT_EXPR)
9387 tree tree10 = TREE_OPERAND (arg1, 0);
9388 tree tree11 = TREE_OPERAND (arg1, 1);
9389 if (TREE_CODE (tree11) == MULT_EXPR
9390 && TREE_CODE (tree10) == MULT_EXPR)
9392 tree tree0;
9393 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9394 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9397 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9398 We associate floats only if the user has specified
9399 -fassociative-math. */
9400 if (flag_associative_math
9401 && TREE_CODE (arg0) == PLUS_EXPR
9402 && TREE_CODE (arg1) != MULT_EXPR)
9404 tree tree00 = TREE_OPERAND (arg0, 0);
9405 tree tree01 = TREE_OPERAND (arg0, 1);
9406 if (TREE_CODE (tree01) == MULT_EXPR
9407 && TREE_CODE (tree00) == MULT_EXPR)
9409 tree tree0;
9410 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9411 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9416 bit_rotate:
9417 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9418 is a rotate of A by C1 bits. */
9419 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9420 is a rotate of A by B bits. */
9422 enum tree_code code0, code1;
9423 tree rtype;
9424 code0 = TREE_CODE (arg0);
9425 code1 = TREE_CODE (arg1);
9426 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9427 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9428 && operand_equal_p (TREE_OPERAND (arg0, 0),
9429 TREE_OPERAND (arg1, 0), 0)
9430 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9431 TYPE_UNSIGNED (rtype))
9432 /* Only create rotates in complete modes. Other cases are not
9433 expanded properly. */
9434 && (element_precision (rtype)
9435 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9437 tree tree01, tree11;
9438 enum tree_code code01, code11;
9440 tree01 = TREE_OPERAND (arg0, 1);
9441 tree11 = TREE_OPERAND (arg1, 1);
9442 STRIP_NOPS (tree01);
9443 STRIP_NOPS (tree11);
9444 code01 = TREE_CODE (tree01);
9445 code11 = TREE_CODE (tree11);
9446 if (code01 == INTEGER_CST
9447 && code11 == INTEGER_CST
9448 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9449 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9451 tem = build2_loc (loc, LROTATE_EXPR,
9452 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9453 TREE_OPERAND (arg0, 0),
9454 code0 == LSHIFT_EXPR
9455 ? TREE_OPERAND (arg0, 1)
9456 : TREE_OPERAND (arg1, 1));
9457 return fold_convert_loc (loc, type, tem);
9459 else if (code11 == MINUS_EXPR)
9461 tree tree110, tree111;
9462 tree110 = TREE_OPERAND (tree11, 0);
9463 tree111 = TREE_OPERAND (tree11, 1);
9464 STRIP_NOPS (tree110);
9465 STRIP_NOPS (tree111);
9466 if (TREE_CODE (tree110) == INTEGER_CST
9467 && 0 == compare_tree_int (tree110,
9468 element_precision
9469 (TREE_TYPE (TREE_OPERAND
9470 (arg0, 0))))
9471 && operand_equal_p (tree01, tree111, 0))
9472 return
9473 fold_convert_loc (loc, type,
9474 build2 ((code0 == LSHIFT_EXPR
9475 ? LROTATE_EXPR
9476 : RROTATE_EXPR),
9477 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9478 TREE_OPERAND (arg0, 0),
9479 TREE_OPERAND (arg0, 1)));
9481 else if (code01 == MINUS_EXPR)
9483 tree tree010, tree011;
9484 tree010 = TREE_OPERAND (tree01, 0);
9485 tree011 = TREE_OPERAND (tree01, 1);
9486 STRIP_NOPS (tree010);
9487 STRIP_NOPS (tree011);
9488 if (TREE_CODE (tree010) == INTEGER_CST
9489 && 0 == compare_tree_int (tree010,
9490 element_precision
9491 (TREE_TYPE (TREE_OPERAND
9492 (arg0, 0))))
9493 && operand_equal_p (tree11, tree011, 0))
9494 return fold_convert_loc
9495 (loc, type,
9496 build2 ((code0 != LSHIFT_EXPR
9497 ? LROTATE_EXPR
9498 : RROTATE_EXPR),
9499 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9500 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9505 associate:
9506 /* In most languages, can't associate operations on floats through
9507 parentheses. Rather than remember where the parentheses were, we
9508 don't associate floats at all, unless the user has specified
9509 -fassociative-math.
9510 And, we need to make sure type is not saturating. */
9512 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9513 && !TYPE_SATURATING (type))
9515 tree var0, con0, lit0, minus_lit0;
9516 tree var1, con1, lit1, minus_lit1;
9517 tree atype = type;
9518 bool ok = true;
9520 /* Split both trees into variables, constants, and literals. Then
9521 associate each group together, the constants with literals,
9522 then the result with variables. This increases the chances of
9523 literals being recombined later and of generating relocatable
9524 expressions for the sum of a constant and literal. */
9525 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9526 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9527 code == MINUS_EXPR);
9529 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9530 if (code == MINUS_EXPR)
9531 code = PLUS_EXPR;
9533 /* With undefined overflow prefer doing association in a type
9534 which wraps on overflow, if that is one of the operand types. */
9535 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9536 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9538 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9539 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9540 atype = TREE_TYPE (arg0);
9541 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9542 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9543 atype = TREE_TYPE (arg1);
9544 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9547 /* With undefined overflow we can only associate constants with one
9548 variable, and constants whose association doesn't overflow. */
9549 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9550 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9552 if (var0 && var1)
9554 tree tmp0 = var0;
9555 tree tmp1 = var1;
9556 bool one_neg = false;
9558 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9560 tmp0 = TREE_OPERAND (tmp0, 0);
9561 one_neg = !one_neg;
9563 if (CONVERT_EXPR_P (tmp0)
9564 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9565 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9566 <= TYPE_PRECISION (atype)))
9567 tmp0 = TREE_OPERAND (tmp0, 0);
9568 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9570 tmp1 = TREE_OPERAND (tmp1, 0);
9571 one_neg = !one_neg;
9573 if (CONVERT_EXPR_P (tmp1)
9574 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9575 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9576 <= TYPE_PRECISION (atype)))
9577 tmp1 = TREE_OPERAND (tmp1, 0);
9578 /* The only case we can still associate with two variables
9579 is if they cancel out. */
9580 if (!one_neg
9581 || !operand_equal_p (tmp0, tmp1, 0))
9582 ok = false;
9586 /* Only do something if we found more than two objects. Otherwise,
9587 nothing has changed and we risk infinite recursion. */
9588 if (ok
9589 && (2 < ((var0 != 0) + (var1 != 0)
9590 + (con0 != 0) + (con1 != 0)
9591 + (lit0 != 0) + (lit1 != 0)
9592 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9594 bool any_overflows = false;
9595 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9596 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9597 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9598 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9599 var0 = associate_trees (loc, var0, var1, code, atype);
9600 con0 = associate_trees (loc, con0, con1, code, atype);
9601 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9602 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9603 code, atype);
9605 /* Preserve the MINUS_EXPR if the negative part of the literal is
9606 greater than the positive part. Otherwise, the multiplicative
9607 folding code (i.e extract_muldiv) may be fooled in case
9608 unsigned constants are subtracted, like in the following
9609 example: ((X*2 + 4) - 8U)/2. */
9610 if (minus_lit0 && lit0)
9612 if (TREE_CODE (lit0) == INTEGER_CST
9613 && TREE_CODE (minus_lit0) == INTEGER_CST
9614 && tree_int_cst_lt (lit0, minus_lit0))
9616 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9617 MINUS_EXPR, atype);
9618 lit0 = 0;
9620 else
9622 lit0 = associate_trees (loc, lit0, minus_lit0,
9623 MINUS_EXPR, atype);
9624 minus_lit0 = 0;
9628 /* Don't introduce overflows through reassociation. */
9629 if (!any_overflows
9630 && ((lit0 && TREE_OVERFLOW_P (lit0))
9631 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9632 return NULL_TREE;
9634 if (minus_lit0)
9636 if (con0 == 0)
9637 return
9638 fold_convert_loc (loc, type,
9639 associate_trees (loc, var0, minus_lit0,
9640 MINUS_EXPR, atype));
9641 else
9643 con0 = associate_trees (loc, con0, minus_lit0,
9644 MINUS_EXPR, atype);
9645 return
9646 fold_convert_loc (loc, type,
9647 associate_trees (loc, var0, con0,
9648 PLUS_EXPR, atype));
9652 con0 = associate_trees (loc, con0, lit0, code, atype);
9653 return
9654 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9655 code, atype));
9659 return NULL_TREE;
9661 case MINUS_EXPR:
9662 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9663 if (TREE_CODE (arg0) == NEGATE_EXPR
9664 && negate_expr_p (arg1)
9665 && reorder_operands_p (arg0, arg1))
9666 return fold_build2_loc (loc, MINUS_EXPR, type,
9667 fold_convert_loc (loc, type,
9668 negate_expr (arg1)),
9669 fold_convert_loc (loc, type,
9670 TREE_OPERAND (arg0, 0)));
9672 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9673 __complex__ ( x, -y ). This is not the same for SNaNs or if
9674 signed zeros are involved. */
9675 if (!HONOR_SNANS (element_mode (arg0))
9676 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9677 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9679 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9680 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9681 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9682 bool arg0rz = false, arg0iz = false;
9683 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9684 || (arg0i && (arg0iz = real_zerop (arg0i))))
9686 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9687 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9688 if (arg0rz && arg1i && real_zerop (arg1i))
9690 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9691 arg1r ? arg1r
9692 : build1 (REALPART_EXPR, rtype, arg1));
9693 tree ip = arg0i ? arg0i
9694 : build1 (IMAGPART_EXPR, rtype, arg0);
9695 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9697 else if (arg0iz && arg1r && real_zerop (arg1r))
9699 tree rp = arg0r ? arg0r
9700 : build1 (REALPART_EXPR, rtype, arg0);
9701 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9702 arg1i ? arg1i
9703 : build1 (IMAGPART_EXPR, rtype, arg1));
9704 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9709 /* A - B -> A + (-B) if B is easily negatable. */
9710 if (negate_expr_p (arg1)
9711 && !TYPE_OVERFLOW_SANITIZED (type)
9712 && ((FLOAT_TYPE_P (type)
9713 /* Avoid this transformation if B is a positive REAL_CST. */
9714 && (TREE_CODE (arg1) != REAL_CST
9715 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9716 || INTEGRAL_TYPE_P (type)))
9717 return fold_build2_loc (loc, PLUS_EXPR, type,
9718 fold_convert_loc (loc, type, arg0),
9719 fold_convert_loc (loc, type,
9720 negate_expr (arg1)));
9722 /* Fold &a[i] - &a[j] to i-j. */
9723 if (TREE_CODE (arg0) == ADDR_EXPR
9724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9725 && TREE_CODE (arg1) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9728 tree tem = fold_addr_of_array_ref_difference (loc, type,
9729 TREE_OPERAND (arg0, 0),
9730 TREE_OPERAND (arg1, 0));
9731 if (tem)
9732 return tem;
9735 if (FLOAT_TYPE_P (type)
9736 && flag_unsafe_math_optimizations
9737 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9738 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9739 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9740 return tem;
9742 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9743 one. Make sure the type is not saturating and has the signedness of
9744 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9745 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9746 if ((TREE_CODE (arg0) == MULT_EXPR
9747 || TREE_CODE (arg1) == MULT_EXPR)
9748 && !TYPE_SATURATING (type)
9749 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9750 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9751 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9753 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9754 if (tem)
9755 return tem;
9758 goto associate;
9760 case MULT_EXPR:
9761 if (! FLOAT_TYPE_P (type))
9763 /* Transform x * -C into -x * C if x is easily negatable. */
9764 if (TREE_CODE (arg1) == INTEGER_CST
9765 && tree_int_cst_sgn (arg1) == -1
9766 && negate_expr_p (arg0)
9767 && (tem = negate_expr (arg1)) != arg1
9768 && !TREE_OVERFLOW (tem))
9769 return fold_build2_loc (loc, MULT_EXPR, type,
9770 fold_convert_loc (loc, type,
9771 negate_expr (arg0)),
9772 tem);
9774 /* (A + A) * C -> A * 2 * C */
9775 if (TREE_CODE (arg0) == PLUS_EXPR
9776 && TREE_CODE (arg1) == INTEGER_CST
9777 && operand_equal_p (TREE_OPERAND (arg0, 0),
9778 TREE_OPERAND (arg0, 1), 0))
9779 return fold_build2_loc (loc, MULT_EXPR, type,
9780 omit_one_operand_loc (loc, type,
9781 TREE_OPERAND (arg0, 0),
9782 TREE_OPERAND (arg0, 1)),
9783 fold_build2_loc (loc, MULT_EXPR, type,
9784 build_int_cst (type, 2) , arg1));
9786 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9787 sign-changing only. */
9788 if (TREE_CODE (arg1) == INTEGER_CST
9789 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9790 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9791 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9793 strict_overflow_p = false;
9794 if (TREE_CODE (arg1) == INTEGER_CST
9795 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9796 &strict_overflow_p)))
9798 if (strict_overflow_p)
9799 fold_overflow_warning (("assuming signed overflow does not "
9800 "occur when simplifying "
9801 "multiplication"),
9802 WARN_STRICT_OVERFLOW_MISC);
9803 return fold_convert_loc (loc, type, tem);
9806 /* Optimize z * conj(z) for integer complex numbers. */
9807 if (TREE_CODE (arg0) == CONJ_EXPR
9808 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9809 return fold_mult_zconjz (loc, type, arg1);
9810 if (TREE_CODE (arg1) == CONJ_EXPR
9811 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9812 return fold_mult_zconjz (loc, type, arg0);
9814 else
9816 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9817 This is not the same for NaNs or if signed zeros are
9818 involved. */
9819 if (!HONOR_NANS (arg0)
9820 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9821 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9822 && TREE_CODE (arg1) == COMPLEX_CST
9823 && real_zerop (TREE_REALPART (arg1)))
9825 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9826 if (real_onep (TREE_IMAGPART (arg1)))
9827 return
9828 fold_build2_loc (loc, COMPLEX_EXPR, type,
9829 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9830 rtype, arg0)),
9831 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9832 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9833 return
9834 fold_build2_loc (loc, COMPLEX_EXPR, type,
9835 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9836 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9837 rtype, arg0)));
9840 /* Optimize z * conj(z) for floating point complex numbers.
9841 Guarded by flag_unsafe_math_optimizations as non-finite
9842 imaginary components don't produce scalar results. */
9843 if (flag_unsafe_math_optimizations
9844 && TREE_CODE (arg0) == CONJ_EXPR
9845 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9846 return fold_mult_zconjz (loc, type, arg1);
9847 if (flag_unsafe_math_optimizations
9848 && TREE_CODE (arg1) == CONJ_EXPR
9849 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9850 return fold_mult_zconjz (loc, type, arg0);
9852 if (flag_unsafe_math_optimizations)
9855 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9856 if (!in_gimple_form
9857 && optimize
9858 && operand_equal_p (arg0, arg1, 0))
9860 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9862 if (powfn)
9864 tree arg = build_real (type, dconst2);
9865 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9870 goto associate;
9872 case BIT_IOR_EXPR:
9873 /* Canonicalize (X & C1) | C2. */
9874 if (TREE_CODE (arg0) == BIT_AND_EXPR
9875 && TREE_CODE (arg1) == INTEGER_CST
9876 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9878 int width = TYPE_PRECISION (type), w;
9879 wide_int c1 = TREE_OPERAND (arg0, 1);
9880 wide_int c2 = arg1;
9882 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9883 if ((c1 & c2) == c1)
9884 return omit_one_operand_loc (loc, type, arg1,
9885 TREE_OPERAND (arg0, 0));
9887 wide_int msk = wi::mask (width, false,
9888 TYPE_PRECISION (TREE_TYPE (arg1)));
9890 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9891 if (msk.and_not (c1 | c2) == 0)
9892 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9893 TREE_OPERAND (arg0, 0), arg1);
9895 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9896 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9897 mode which allows further optimizations. */
9898 c1 &= msk;
9899 c2 &= msk;
9900 wide_int c3 = c1.and_not (c2);
9901 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9903 wide_int mask = wi::mask (w, false,
9904 TYPE_PRECISION (type));
9905 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9907 c3 = mask;
9908 break;
9912 if (c3 != c1)
9913 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9914 fold_build2_loc (loc, BIT_AND_EXPR, type,
9915 TREE_OPERAND (arg0, 0),
9916 wide_int_to_tree (type,
9917 c3)),
9918 arg1);
9921 /* See if this can be simplified into a rotate first. If that
9922 is unsuccessful continue in the association code. */
9923 goto bit_rotate;
9925 case BIT_XOR_EXPR:
9926 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9927 if (TREE_CODE (arg0) == BIT_AND_EXPR
9928 && INTEGRAL_TYPE_P (type)
9929 && integer_onep (TREE_OPERAND (arg0, 1))
9930 && integer_onep (arg1))
9931 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9932 build_zero_cst (TREE_TYPE (arg0)));
9934 /* See if this can be simplified into a rotate first. If that
9935 is unsuccessful continue in the association code. */
9936 goto bit_rotate;
9938 case BIT_AND_EXPR:
9939 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9940 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9941 && INTEGRAL_TYPE_P (type)
9942 && integer_onep (TREE_OPERAND (arg0, 1))
9943 && integer_onep (arg1))
9945 tree tem2;
9946 tem = TREE_OPERAND (arg0, 0);
9947 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9948 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9949 tem, tem2);
9950 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9951 build_zero_cst (TREE_TYPE (tem)));
9953 /* Fold ~X & 1 as (X & 1) == 0. */
9954 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9955 && INTEGRAL_TYPE_P (type)
9956 && integer_onep (arg1))
9958 tree tem2;
9959 tem = TREE_OPERAND (arg0, 0);
9960 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9961 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9962 tem, tem2);
9963 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9964 build_zero_cst (TREE_TYPE (tem)));
9966 /* Fold !X & 1 as X == 0. */
9967 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9968 && integer_onep (arg1))
9970 tem = TREE_OPERAND (arg0, 0);
9971 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9972 build_zero_cst (TREE_TYPE (tem)));
9975 /* Fold (X ^ Y) & Y as ~X & Y. */
9976 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9977 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9979 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9980 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9981 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9982 fold_convert_loc (loc, type, arg1));
9984 /* Fold (X ^ Y) & X as ~Y & X. */
9985 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9986 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9987 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9989 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9990 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9991 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9992 fold_convert_loc (loc, type, arg1));
9994 /* Fold X & (X ^ Y) as X & ~Y. */
9995 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9996 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9998 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9999 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10000 fold_convert_loc (loc, type, arg0),
10001 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10003 /* Fold X & (Y ^ X) as ~Y & X. */
10004 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10005 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10006 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10008 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10009 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10010 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10011 fold_convert_loc (loc, type, arg0));
10014 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10015 multiple of 1 << CST. */
10016 if (TREE_CODE (arg1) == INTEGER_CST)
10018 wide_int cst1 = arg1;
10019 wide_int ncst1 = -cst1;
10020 if ((cst1 & ncst1) == ncst1
10021 && multiple_of_p (type, arg0,
10022 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10023 return fold_convert_loc (loc, type, arg0);
10026 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10027 bits from CST2. */
10028 if (TREE_CODE (arg1) == INTEGER_CST
10029 && TREE_CODE (arg0) == MULT_EXPR
10030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10032 wide_int warg1 = arg1;
10033 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10035 if (masked == 0)
10036 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10037 arg0, arg1);
10038 else if (masked != warg1)
10040 /* Avoid the transform if arg1 is a mask of some
10041 mode which allows further optimizations. */
10042 int pop = wi::popcount (warg1);
10043 if (!(pop >= BITS_PER_UNIT
10044 && exact_log2 (pop) != -1
10045 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10046 return fold_build2_loc (loc, code, type, op0,
10047 wide_int_to_tree (type, masked));
10051 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10052 ((A & N) + B) & M -> (A + B) & M
10053 Similarly if (N & M) == 0,
10054 ((A | N) + B) & M -> (A + B) & M
10055 and for - instead of + (or unary - instead of +)
10056 and/or ^ instead of |.
10057 If B is constant and (B & M) == 0, fold into A & M. */
10058 if (TREE_CODE (arg1) == INTEGER_CST)
10060 wide_int cst1 = arg1;
10061 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10062 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10063 && (TREE_CODE (arg0) == PLUS_EXPR
10064 || TREE_CODE (arg0) == MINUS_EXPR
10065 || TREE_CODE (arg0) == NEGATE_EXPR)
10066 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10067 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10069 tree pmop[2];
10070 int which = 0;
10071 wide_int cst0;
10073 /* Now we know that arg0 is (C + D) or (C - D) or
10074 -C and arg1 (M) is == (1LL << cst) - 1.
10075 Store C into PMOP[0] and D into PMOP[1]. */
10076 pmop[0] = TREE_OPERAND (arg0, 0);
10077 pmop[1] = NULL;
10078 if (TREE_CODE (arg0) != NEGATE_EXPR)
10080 pmop[1] = TREE_OPERAND (arg0, 1);
10081 which = 1;
10084 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10085 which = -1;
10087 for (; which >= 0; which--)
10088 switch (TREE_CODE (pmop[which]))
10090 case BIT_AND_EXPR:
10091 case BIT_IOR_EXPR:
10092 case BIT_XOR_EXPR:
10093 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10094 != INTEGER_CST)
10095 break;
10096 cst0 = TREE_OPERAND (pmop[which], 1);
10097 cst0 &= cst1;
10098 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10100 if (cst0 != cst1)
10101 break;
10103 else if (cst0 != 0)
10104 break;
10105 /* If C or D is of the form (A & N) where
10106 (N & M) == M, or of the form (A | N) or
10107 (A ^ N) where (N & M) == 0, replace it with A. */
10108 pmop[which] = TREE_OPERAND (pmop[which], 0);
10109 break;
10110 case INTEGER_CST:
10111 /* If C or D is a N where (N & M) == 0, it can be
10112 omitted (assumed 0). */
10113 if ((TREE_CODE (arg0) == PLUS_EXPR
10114 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10115 && (cst1 & pmop[which]) == 0)
10116 pmop[which] = NULL;
10117 break;
10118 default:
10119 break;
10122 /* Only build anything new if we optimized one or both arguments
10123 above. */
10124 if (pmop[0] != TREE_OPERAND (arg0, 0)
10125 || (TREE_CODE (arg0) != NEGATE_EXPR
10126 && pmop[1] != TREE_OPERAND (arg0, 1)))
10128 tree utype = TREE_TYPE (arg0);
10129 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10131 /* Perform the operations in a type that has defined
10132 overflow behavior. */
10133 utype = unsigned_type_for (TREE_TYPE (arg0));
10134 if (pmop[0] != NULL)
10135 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10136 if (pmop[1] != NULL)
10137 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10140 if (TREE_CODE (arg0) == NEGATE_EXPR)
10141 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10142 else if (TREE_CODE (arg0) == PLUS_EXPR)
10144 if (pmop[0] != NULL && pmop[1] != NULL)
10145 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10146 pmop[0], pmop[1]);
10147 else if (pmop[0] != NULL)
10148 tem = pmop[0];
10149 else if (pmop[1] != NULL)
10150 tem = pmop[1];
10151 else
10152 return build_int_cst (type, 0);
10154 else if (pmop[0] == NULL)
10155 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10156 else
10157 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10158 pmop[0], pmop[1]);
10159 /* TEM is now the new binary +, - or unary - replacement. */
10160 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10161 fold_convert_loc (loc, utype, arg1));
10162 return fold_convert_loc (loc, type, tem);
10167 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10168 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10169 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10171 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10173 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10174 if (mask == -1)
10175 return
10176 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10179 goto associate;
10181 case RDIV_EXPR:
10182 /* Don't touch a floating-point divide by zero unless the mode
10183 of the constant can represent infinity. */
10184 if (TREE_CODE (arg1) == REAL_CST
10185 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10186 && real_zerop (arg1))
10187 return NULL_TREE;
10189 /* (-A) / (-B) -> A / B */
10190 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10191 return fold_build2_loc (loc, RDIV_EXPR, type,
10192 TREE_OPERAND (arg0, 0),
10193 negate_expr (arg1));
10194 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10195 return fold_build2_loc (loc, RDIV_EXPR, type,
10196 negate_expr (arg0),
10197 TREE_OPERAND (arg1, 0));
10199 /* Convert A/B/C to A/(B*C). */
10200 if (flag_reciprocal_math
10201 && TREE_CODE (arg0) == RDIV_EXPR)
10202 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10203 fold_build2_loc (loc, MULT_EXPR, type,
10204 TREE_OPERAND (arg0, 1), arg1));
10206 /* Convert A/(B/C) to (A/B)*C. */
10207 if (flag_reciprocal_math
10208 && TREE_CODE (arg1) == RDIV_EXPR)
10209 return fold_build2_loc (loc, MULT_EXPR, type,
10210 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10211 TREE_OPERAND (arg1, 0)),
10212 TREE_OPERAND (arg1, 1));
10214 /* Convert C1/(X*C2) into (C1/C2)/X. */
10215 if (flag_reciprocal_math
10216 && TREE_CODE (arg1) == MULT_EXPR
10217 && TREE_CODE (arg0) == REAL_CST
10218 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10220 tree tem = const_binop (RDIV_EXPR, arg0,
10221 TREE_OPERAND (arg1, 1));
10222 if (tem)
10223 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10224 TREE_OPERAND (arg1, 0));
10227 return NULL_TREE;
10229 case TRUNC_DIV_EXPR:
10230 /* Optimize (X & (-A)) / A where A is a power of 2,
10231 to X >> log2(A) */
10232 if (TREE_CODE (arg0) == BIT_AND_EXPR
10233 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10234 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10236 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10237 arg1, TREE_OPERAND (arg0, 1));
10238 if (sum && integer_zerop (sum)) {
10239 tree pow2 = build_int_cst (integer_type_node,
10240 wi::exact_log2 (arg1));
10241 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10242 TREE_OPERAND (arg0, 0), pow2);
10246 /* Fall through */
10248 case FLOOR_DIV_EXPR:
10249 /* Simplify A / (B << N) where A and B are positive and B is
10250 a power of 2, to A >> (N + log2(B)). */
10251 strict_overflow_p = false;
10252 if (TREE_CODE (arg1) == LSHIFT_EXPR
10253 && (TYPE_UNSIGNED (type)
10254 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10256 tree sval = TREE_OPERAND (arg1, 0);
10257 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10259 tree sh_cnt = TREE_OPERAND (arg1, 1);
10260 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10261 wi::exact_log2 (sval));
10263 if (strict_overflow_p)
10264 fold_overflow_warning (("assuming signed overflow does not "
10265 "occur when simplifying A / (B << N)"),
10266 WARN_STRICT_OVERFLOW_MISC);
10268 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10269 sh_cnt, pow2);
10270 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10271 fold_convert_loc (loc, type, arg0), sh_cnt);
10275 /* Fall through */
10277 case ROUND_DIV_EXPR:
10278 case CEIL_DIV_EXPR:
10279 case EXACT_DIV_EXPR:
10280 if (integer_zerop (arg1))
10281 return NULL_TREE;
10283 /* Convert -A / -B to A / B when the type is signed and overflow is
10284 undefined. */
10285 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10286 && TREE_CODE (arg0) == NEGATE_EXPR
10287 && negate_expr_p (arg1))
10289 if (INTEGRAL_TYPE_P (type))
10290 fold_overflow_warning (("assuming signed overflow does not occur "
10291 "when distributing negation across "
10292 "division"),
10293 WARN_STRICT_OVERFLOW_MISC);
10294 return fold_build2_loc (loc, code, type,
10295 fold_convert_loc (loc, type,
10296 TREE_OPERAND (arg0, 0)),
10297 fold_convert_loc (loc, type,
10298 negate_expr (arg1)));
10300 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10301 && TREE_CODE (arg1) == NEGATE_EXPR
10302 && negate_expr_p (arg0))
10304 if (INTEGRAL_TYPE_P (type))
10305 fold_overflow_warning (("assuming signed overflow does not occur "
10306 "when distributing negation across "
10307 "division"),
10308 WARN_STRICT_OVERFLOW_MISC);
10309 return fold_build2_loc (loc, code, type,
10310 fold_convert_loc (loc, type,
10311 negate_expr (arg0)),
10312 fold_convert_loc (loc, type,
10313 TREE_OPERAND (arg1, 0)));
10316 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10317 operation, EXACT_DIV_EXPR.
10319 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10320 At one time others generated faster code, it's not clear if they do
10321 after the last round to changes to the DIV code in expmed.c. */
10322 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10323 && multiple_of_p (type, arg0, arg1))
10324 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10325 fold_convert (type, arg0),
10326 fold_convert (type, arg1));
10328 strict_overflow_p = false;
10329 if (TREE_CODE (arg1) == INTEGER_CST
10330 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10331 &strict_overflow_p)))
10333 if (strict_overflow_p)
10334 fold_overflow_warning (("assuming signed overflow does not occur "
10335 "when simplifying division"),
10336 WARN_STRICT_OVERFLOW_MISC);
10337 return fold_convert_loc (loc, type, tem);
10340 return NULL_TREE;
10342 case CEIL_MOD_EXPR:
10343 case FLOOR_MOD_EXPR:
10344 case ROUND_MOD_EXPR:
10345 case TRUNC_MOD_EXPR:
10346 strict_overflow_p = false;
10347 if (TREE_CODE (arg1) == INTEGER_CST
10348 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10349 &strict_overflow_p)))
10351 if (strict_overflow_p)
10352 fold_overflow_warning (("assuming signed overflow does not occur "
10353 "when simplifying modulus"),
10354 WARN_STRICT_OVERFLOW_MISC);
10355 return fold_convert_loc (loc, type, tem);
10358 return NULL_TREE;
10360 case LROTATE_EXPR:
10361 case RROTATE_EXPR:
10362 case RSHIFT_EXPR:
10363 case LSHIFT_EXPR:
10364 /* Since negative shift count is not well-defined,
10365 don't try to compute it in the compiler. */
10366 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10367 return NULL_TREE;
10369 prec = element_precision (type);
10371 /* If we have a rotate of a bit operation with the rotate count and
10372 the second operand of the bit operation both constant,
10373 permute the two operations. */
10374 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10375 && (TREE_CODE (arg0) == BIT_AND_EXPR
10376 || TREE_CODE (arg0) == BIT_IOR_EXPR
10377 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10378 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10379 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10380 fold_build2_loc (loc, code, type,
10381 TREE_OPERAND (arg0, 0), arg1),
10382 fold_build2_loc (loc, code, type,
10383 TREE_OPERAND (arg0, 1), arg1));
10385 /* Two consecutive rotates adding up to the some integer
10386 multiple of the precision of the type can be ignored. */
10387 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10388 && TREE_CODE (arg0) == RROTATE_EXPR
10389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10390 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10391 prec) == 0)
10392 return TREE_OPERAND (arg0, 0);
10394 return NULL_TREE;
10396 case MIN_EXPR:
10397 case MAX_EXPR:
10398 goto associate;
10400 case TRUTH_ANDIF_EXPR:
10401 /* Note that the operands of this must be ints
10402 and their values must be 0 or 1.
10403 ("true" is a fixed value perhaps depending on the language.) */
10404 /* If first arg is constant zero, return it. */
10405 if (integer_zerop (arg0))
10406 return fold_convert_loc (loc, type, arg0);
10407 case TRUTH_AND_EXPR:
10408 /* If either arg is constant true, drop it. */
10409 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10410 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10411 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10412 /* Preserve sequence points. */
10413 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10414 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10415 /* If second arg is constant zero, result is zero, but first arg
10416 must be evaluated. */
10417 if (integer_zerop (arg1))
10418 return omit_one_operand_loc (loc, type, arg1, arg0);
10419 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10420 case will be handled here. */
10421 if (integer_zerop (arg0))
10422 return omit_one_operand_loc (loc, type, arg0, arg1);
10424 /* !X && X is always false. */
10425 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10426 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10427 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10428 /* X && !X is always false. */
10429 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10430 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10431 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10433 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10434 means A >= Y && A != MAX, but in this case we know that
10435 A < X <= MAX. */
10437 if (!TREE_SIDE_EFFECTS (arg0)
10438 && !TREE_SIDE_EFFECTS (arg1))
10440 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10441 if (tem && !operand_equal_p (tem, arg0, 0))
10442 return fold_build2_loc (loc, code, type, tem, arg1);
10444 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10445 if (tem && !operand_equal_p (tem, arg1, 0))
10446 return fold_build2_loc (loc, code, type, arg0, tem);
10449 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10450 != NULL_TREE)
10451 return tem;
10453 return NULL_TREE;
10455 case TRUTH_ORIF_EXPR:
10456 /* Note that the operands of this must be ints
10457 and their values must be 0 or true.
10458 ("true" is a fixed value perhaps depending on the language.) */
10459 /* If first arg is constant true, return it. */
10460 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10461 return fold_convert_loc (loc, type, arg0);
10462 case TRUTH_OR_EXPR:
10463 /* If either arg is constant zero, drop it. */
10464 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10465 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10466 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10467 /* Preserve sequence points. */
10468 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10469 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10470 /* If second arg is constant true, result is true, but we must
10471 evaluate first arg. */
10472 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10473 return omit_one_operand_loc (loc, type, arg1, arg0);
10474 /* Likewise for first arg, but note this only occurs here for
10475 TRUTH_OR_EXPR. */
10476 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10477 return omit_one_operand_loc (loc, type, arg0, arg1);
10479 /* !X || X is always true. */
10480 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10481 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10482 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10483 /* X || !X is always true. */
10484 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10485 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10486 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10488 /* (X && !Y) || (!X && Y) is X ^ Y */
10489 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10490 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10492 tree a0, a1, l0, l1, n0, n1;
10494 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10495 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10497 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10498 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10500 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10501 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10503 if ((operand_equal_p (n0, a0, 0)
10504 && operand_equal_p (n1, a1, 0))
10505 || (operand_equal_p (n0, a1, 0)
10506 && operand_equal_p (n1, a0, 0)))
10507 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10510 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10511 != NULL_TREE)
10512 return tem;
10514 return NULL_TREE;
10516 case TRUTH_XOR_EXPR:
10517 /* If the second arg is constant zero, drop it. */
10518 if (integer_zerop (arg1))
10519 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10520 /* If the second arg is constant true, this is a logical inversion. */
10521 if (integer_onep (arg1))
10523 tem = invert_truthvalue_loc (loc, arg0);
10524 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10526 /* Identical arguments cancel to zero. */
10527 if (operand_equal_p (arg0, arg1, 0))
10528 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10530 /* !X ^ X is always true. */
10531 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10532 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10533 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10535 /* X ^ !X is always true. */
10536 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10537 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10538 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10540 return NULL_TREE;
10542 case EQ_EXPR:
10543 case NE_EXPR:
10544 STRIP_NOPS (arg0);
10545 STRIP_NOPS (arg1);
10547 tem = fold_comparison (loc, code, type, op0, op1);
10548 if (tem != NULL_TREE)
10549 return tem;
10551 /* bool_var != 1 becomes !bool_var. */
10552 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10553 && code == NE_EXPR)
10554 return fold_convert_loc (loc, type,
10555 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10556 TREE_TYPE (arg0), arg0));
10558 /* bool_var == 0 becomes !bool_var. */
10559 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10560 && code == EQ_EXPR)
10561 return fold_convert_loc (loc, type,
10562 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10563 TREE_TYPE (arg0), arg0));
10565 /* !exp != 0 becomes !exp */
10566 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10567 && code == NE_EXPR)
10568 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10570 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10571 if ((TREE_CODE (arg0) == PLUS_EXPR
10572 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10573 || TREE_CODE (arg0) == MINUS_EXPR)
10574 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10575 0)),
10576 arg1, 0)
10577 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10578 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10580 tree val = TREE_OPERAND (arg0, 1);
10581 return omit_two_operands_loc (loc, type,
10582 fold_build2_loc (loc, code, type,
10583 val,
10584 build_int_cst (TREE_TYPE (val),
10585 0)),
10586 TREE_OPERAND (arg0, 0), arg1);
10589 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10590 if (TREE_CODE (arg0) == MINUS_EXPR
10591 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10592 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10593 1)),
10594 arg1, 0)
10595 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10597 return omit_two_operands_loc (loc, type,
10598 code == NE_EXPR
10599 ? boolean_true_node : boolean_false_node,
10600 TREE_OPERAND (arg0, 1), arg1);
10603 /* If this is an EQ or NE comparison with zero and ARG0 is
10604 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10605 two operations, but the latter can be done in one less insn
10606 on machines that have only two-operand insns or on which a
10607 constant cannot be the first operand. */
10608 if (TREE_CODE (arg0) == BIT_AND_EXPR
10609 && integer_zerop (arg1))
10611 tree arg00 = TREE_OPERAND (arg0, 0);
10612 tree arg01 = TREE_OPERAND (arg0, 1);
10613 if (TREE_CODE (arg00) == LSHIFT_EXPR
10614 && integer_onep (TREE_OPERAND (arg00, 0)))
10616 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10617 arg01, TREE_OPERAND (arg00, 1));
10618 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10619 build_int_cst (TREE_TYPE (arg0), 1));
10620 return fold_build2_loc (loc, code, type,
10621 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10622 arg1);
10624 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10625 && integer_onep (TREE_OPERAND (arg01, 0)))
10627 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10628 arg00, TREE_OPERAND (arg01, 1));
10629 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10630 build_int_cst (TREE_TYPE (arg0), 1));
10631 return fold_build2_loc (loc, code, type,
10632 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10633 arg1);
10637 /* If this is an NE or EQ comparison of zero against the result of a
10638 signed MOD operation whose second operand is a power of 2, make
10639 the MOD operation unsigned since it is simpler and equivalent. */
10640 if (integer_zerop (arg1)
10641 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10642 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10643 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10644 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10645 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10646 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10648 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10649 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10650 fold_convert_loc (loc, newtype,
10651 TREE_OPERAND (arg0, 0)),
10652 fold_convert_loc (loc, newtype,
10653 TREE_OPERAND (arg0, 1)));
10655 return fold_build2_loc (loc, code, type, newmod,
10656 fold_convert_loc (loc, newtype, arg1));
10659 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10660 C1 is a valid shift constant, and C2 is a power of two, i.e.
10661 a single bit. */
10662 if (TREE_CODE (arg0) == BIT_AND_EXPR
10663 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10664 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10665 == INTEGER_CST
10666 && integer_pow2p (TREE_OPERAND (arg0, 1))
10667 && integer_zerop (arg1))
10669 tree itype = TREE_TYPE (arg0);
10670 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10671 prec = TYPE_PRECISION (itype);
10673 /* Check for a valid shift count. */
10674 if (wi::ltu_p (arg001, prec))
10676 tree arg01 = TREE_OPERAND (arg0, 1);
10677 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10678 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10679 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10680 can be rewritten as (X & (C2 << C1)) != 0. */
10681 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10683 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10684 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10685 return fold_build2_loc (loc, code, type, tem,
10686 fold_convert_loc (loc, itype, arg1));
10688 /* Otherwise, for signed (arithmetic) shifts,
10689 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10690 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10691 else if (!TYPE_UNSIGNED (itype))
10692 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10693 arg000, build_int_cst (itype, 0));
10694 /* Otherwise, of unsigned (logical) shifts,
10695 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10696 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10697 else
10698 return omit_one_operand_loc (loc, type,
10699 code == EQ_EXPR ? integer_one_node
10700 : integer_zero_node,
10701 arg000);
10705 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10706 Similarly for NE_EXPR. */
10707 if (TREE_CODE (arg0) == BIT_AND_EXPR
10708 && TREE_CODE (arg1) == INTEGER_CST
10709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10711 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10712 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10713 TREE_OPERAND (arg0, 1));
10714 tree dandnotc
10715 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10716 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10717 notc);
10718 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10719 if (integer_nonzerop (dandnotc))
10720 return omit_one_operand_loc (loc, type, rslt, arg0);
10723 /* If this is a comparison of a field, we may be able to simplify it. */
10724 if ((TREE_CODE (arg0) == COMPONENT_REF
10725 || TREE_CODE (arg0) == BIT_FIELD_REF)
10726 /* Handle the constant case even without -O
10727 to make sure the warnings are given. */
10728 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10730 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10731 if (t1)
10732 return t1;
10735 /* Optimize comparisons of strlen vs zero to a compare of the
10736 first character of the string vs zero. To wit,
10737 strlen(ptr) == 0 => *ptr == 0
10738 strlen(ptr) != 0 => *ptr != 0
10739 Other cases should reduce to one of these two (or a constant)
10740 due to the return value of strlen being unsigned. */
10741 if (TREE_CODE (arg0) == CALL_EXPR
10742 && integer_zerop (arg1))
10744 tree fndecl = get_callee_fndecl (arg0);
10746 if (fndecl
10747 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10748 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10749 && call_expr_nargs (arg0) == 1
10750 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10752 tree iref = build_fold_indirect_ref_loc (loc,
10753 CALL_EXPR_ARG (arg0, 0));
10754 return fold_build2_loc (loc, code, type, iref,
10755 build_int_cst (TREE_TYPE (iref), 0));
10759 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10760 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10761 if (TREE_CODE (arg0) == RSHIFT_EXPR
10762 && integer_zerop (arg1)
10763 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10765 tree arg00 = TREE_OPERAND (arg0, 0);
10766 tree arg01 = TREE_OPERAND (arg0, 1);
10767 tree itype = TREE_TYPE (arg00);
10768 if (wi::eq_p (arg01, element_precision (itype) - 1))
10770 if (TYPE_UNSIGNED (itype))
10772 itype = signed_type_for (itype);
10773 arg00 = fold_convert_loc (loc, itype, arg00);
10775 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10776 type, arg00, build_zero_cst (itype));
10780 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10781 (X & C) == 0 when C is a single bit. */
10782 if (TREE_CODE (arg0) == BIT_AND_EXPR
10783 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10784 && integer_zerop (arg1)
10785 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10787 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10788 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10789 TREE_OPERAND (arg0, 1));
10790 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10791 type, tem,
10792 fold_convert_loc (loc, TREE_TYPE (arg0),
10793 arg1));
10796 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10797 constant C is a power of two, i.e. a single bit. */
10798 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10799 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10800 && integer_zerop (arg1)
10801 && integer_pow2p (TREE_OPERAND (arg0, 1))
10802 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10803 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10805 tree arg00 = TREE_OPERAND (arg0, 0);
10806 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10807 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10810 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10811 when is C is a power of two, i.e. a single bit. */
10812 if (TREE_CODE (arg0) == BIT_AND_EXPR
10813 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10814 && integer_zerop (arg1)
10815 && integer_pow2p (TREE_OPERAND (arg0, 1))
10816 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10817 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10819 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10820 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10821 arg000, TREE_OPERAND (arg0, 1));
10822 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10823 tem, build_int_cst (TREE_TYPE (tem), 0));
10826 if (integer_zerop (arg1)
10827 && tree_expr_nonzero_p (arg0))
10829 tree res = constant_boolean_node (code==NE_EXPR, type);
10830 return omit_one_operand_loc (loc, type, res, arg0);
10833 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10834 if (TREE_CODE (arg0) == BIT_AND_EXPR
10835 && TREE_CODE (arg1) == BIT_AND_EXPR)
10837 tree arg00 = TREE_OPERAND (arg0, 0);
10838 tree arg01 = TREE_OPERAND (arg0, 1);
10839 tree arg10 = TREE_OPERAND (arg1, 0);
10840 tree arg11 = TREE_OPERAND (arg1, 1);
10841 tree itype = TREE_TYPE (arg0);
10843 if (operand_equal_p (arg01, arg11, 0))
10844 return fold_build2_loc (loc, code, type,
10845 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10846 fold_build2_loc (loc,
10847 BIT_XOR_EXPR, itype,
10848 arg00, arg10),
10849 arg01),
10850 build_zero_cst (itype));
10852 if (operand_equal_p (arg01, arg10, 0))
10853 return fold_build2_loc (loc, code, type,
10854 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10855 fold_build2_loc (loc,
10856 BIT_XOR_EXPR, itype,
10857 arg00, arg11),
10858 arg01),
10859 build_zero_cst (itype));
10861 if (operand_equal_p (arg00, arg11, 0))
10862 return fold_build2_loc (loc, code, type,
10863 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10864 fold_build2_loc (loc,
10865 BIT_XOR_EXPR, itype,
10866 arg01, arg10),
10867 arg00),
10868 build_zero_cst (itype));
10870 if (operand_equal_p (arg00, arg10, 0))
10871 return fold_build2_loc (loc, code, type,
10872 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10873 fold_build2_loc (loc,
10874 BIT_XOR_EXPR, itype,
10875 arg01, arg11),
10876 arg00),
10877 build_zero_cst (itype));
10880 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10881 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10883 tree arg00 = TREE_OPERAND (arg0, 0);
10884 tree arg01 = TREE_OPERAND (arg0, 1);
10885 tree arg10 = TREE_OPERAND (arg1, 0);
10886 tree arg11 = TREE_OPERAND (arg1, 1);
10887 tree itype = TREE_TYPE (arg0);
10889 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10890 operand_equal_p guarantees no side-effects so we don't need
10891 to use omit_one_operand on Z. */
10892 if (operand_equal_p (arg01, arg11, 0))
10893 return fold_build2_loc (loc, code, type, arg00,
10894 fold_convert_loc (loc, TREE_TYPE (arg00),
10895 arg10));
10896 if (operand_equal_p (arg01, arg10, 0))
10897 return fold_build2_loc (loc, code, type, arg00,
10898 fold_convert_loc (loc, TREE_TYPE (arg00),
10899 arg11));
10900 if (operand_equal_p (arg00, arg11, 0))
10901 return fold_build2_loc (loc, code, type, arg01,
10902 fold_convert_loc (loc, TREE_TYPE (arg01),
10903 arg10));
10904 if (operand_equal_p (arg00, arg10, 0))
10905 return fold_build2_loc (loc, code, type, arg01,
10906 fold_convert_loc (loc, TREE_TYPE (arg01),
10907 arg11));
10909 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10910 if (TREE_CODE (arg01) == INTEGER_CST
10911 && TREE_CODE (arg11) == INTEGER_CST)
10913 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10914 fold_convert_loc (loc, itype, arg11));
10915 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10916 return fold_build2_loc (loc, code, type, tem,
10917 fold_convert_loc (loc, itype, arg10));
10921 /* Attempt to simplify equality/inequality comparisons of complex
10922 values. Only lower the comparison if the result is known or
10923 can be simplified to a single scalar comparison. */
10924 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10925 || TREE_CODE (arg0) == COMPLEX_CST)
10926 && (TREE_CODE (arg1) == COMPLEX_EXPR
10927 || TREE_CODE (arg1) == COMPLEX_CST))
10929 tree real0, imag0, real1, imag1;
10930 tree rcond, icond;
10932 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10934 real0 = TREE_OPERAND (arg0, 0);
10935 imag0 = TREE_OPERAND (arg0, 1);
10937 else
10939 real0 = TREE_REALPART (arg0);
10940 imag0 = TREE_IMAGPART (arg0);
10943 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10945 real1 = TREE_OPERAND (arg1, 0);
10946 imag1 = TREE_OPERAND (arg1, 1);
10948 else
10950 real1 = TREE_REALPART (arg1);
10951 imag1 = TREE_IMAGPART (arg1);
10954 rcond = fold_binary_loc (loc, code, type, real0, real1);
10955 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10957 if (integer_zerop (rcond))
10959 if (code == EQ_EXPR)
10960 return omit_two_operands_loc (loc, type, boolean_false_node,
10961 imag0, imag1);
10962 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10964 else
10966 if (code == NE_EXPR)
10967 return omit_two_operands_loc (loc, type, boolean_true_node,
10968 imag0, imag1);
10969 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10973 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10974 if (icond && TREE_CODE (icond) == INTEGER_CST)
10976 if (integer_zerop (icond))
10978 if (code == EQ_EXPR)
10979 return omit_two_operands_loc (loc, type, boolean_false_node,
10980 real0, real1);
10981 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10983 else
10985 if (code == NE_EXPR)
10986 return omit_two_operands_loc (loc, type, boolean_true_node,
10987 real0, real1);
10988 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10993 return NULL_TREE;
10995 case LT_EXPR:
10996 case GT_EXPR:
10997 case LE_EXPR:
10998 case GE_EXPR:
10999 tem = fold_comparison (loc, code, type, op0, op1);
11000 if (tem != NULL_TREE)
11001 return tem;
11003 /* Transform comparisons of the form X +- C CMP X. */
11004 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11005 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11006 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11007 && !HONOR_SNANS (arg0))
11008 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11009 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11011 tree arg01 = TREE_OPERAND (arg0, 1);
11012 enum tree_code code0 = TREE_CODE (arg0);
11013 int is_positive;
11015 if (TREE_CODE (arg01) == REAL_CST)
11016 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11017 else
11018 is_positive = tree_int_cst_sgn (arg01);
11020 /* (X - c) > X becomes false. */
11021 if (code == GT_EXPR
11022 && ((code0 == MINUS_EXPR && is_positive >= 0)
11023 || (code0 == PLUS_EXPR && is_positive <= 0)))
11025 if (TREE_CODE (arg01) == INTEGER_CST
11026 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11027 fold_overflow_warning (("assuming signed overflow does not "
11028 "occur when assuming that (X - c) > X "
11029 "is always false"),
11030 WARN_STRICT_OVERFLOW_ALL);
11031 return constant_boolean_node (0, type);
11034 /* Likewise (X + c) < X becomes false. */
11035 if (code == LT_EXPR
11036 && ((code0 == PLUS_EXPR && is_positive >= 0)
11037 || (code0 == MINUS_EXPR && is_positive <= 0)))
11039 if (TREE_CODE (arg01) == INTEGER_CST
11040 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11041 fold_overflow_warning (("assuming signed overflow does not "
11042 "occur when assuming that "
11043 "(X + c) < X is always false"),
11044 WARN_STRICT_OVERFLOW_ALL);
11045 return constant_boolean_node (0, type);
11048 /* Convert (X - c) <= X to true. */
11049 if (!HONOR_NANS (arg1)
11050 && code == LE_EXPR
11051 && ((code0 == MINUS_EXPR && is_positive >= 0)
11052 || (code0 == PLUS_EXPR && is_positive <= 0)))
11054 if (TREE_CODE (arg01) == INTEGER_CST
11055 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11056 fold_overflow_warning (("assuming signed overflow does not "
11057 "occur when assuming that "
11058 "(X - c) <= X is always true"),
11059 WARN_STRICT_OVERFLOW_ALL);
11060 return constant_boolean_node (1, type);
11063 /* Convert (X + c) >= X to true. */
11064 if (!HONOR_NANS (arg1)
11065 && code == GE_EXPR
11066 && ((code0 == PLUS_EXPR && is_positive >= 0)
11067 || (code0 == MINUS_EXPR && is_positive <= 0)))
11069 if (TREE_CODE (arg01) == INTEGER_CST
11070 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11071 fold_overflow_warning (("assuming signed overflow does not "
11072 "occur when assuming that "
11073 "(X + c) >= X is always true"),
11074 WARN_STRICT_OVERFLOW_ALL);
11075 return constant_boolean_node (1, type);
11078 if (TREE_CODE (arg01) == INTEGER_CST)
11080 /* Convert X + c > X and X - c < X to true for integers. */
11081 if (code == GT_EXPR
11082 && ((code0 == PLUS_EXPR && is_positive > 0)
11083 || (code0 == MINUS_EXPR && is_positive < 0)))
11085 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11086 fold_overflow_warning (("assuming signed overflow does "
11087 "not occur when assuming that "
11088 "(X + c) > X is always true"),
11089 WARN_STRICT_OVERFLOW_ALL);
11090 return constant_boolean_node (1, type);
11093 if (code == LT_EXPR
11094 && ((code0 == MINUS_EXPR && is_positive > 0)
11095 || (code0 == PLUS_EXPR && is_positive < 0)))
11097 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11098 fold_overflow_warning (("assuming signed overflow does "
11099 "not occur when assuming that "
11100 "(X - c) < X is always true"),
11101 WARN_STRICT_OVERFLOW_ALL);
11102 return constant_boolean_node (1, type);
11105 /* Convert X + c <= X and X - c >= X to false for integers. */
11106 if (code == LE_EXPR
11107 && ((code0 == PLUS_EXPR && is_positive > 0)
11108 || (code0 == MINUS_EXPR && is_positive < 0)))
11110 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11111 fold_overflow_warning (("assuming signed overflow does "
11112 "not occur when assuming that "
11113 "(X + c) <= X is always false"),
11114 WARN_STRICT_OVERFLOW_ALL);
11115 return constant_boolean_node (0, type);
11118 if (code == GE_EXPR
11119 && ((code0 == MINUS_EXPR && is_positive > 0)
11120 || (code0 == PLUS_EXPR && is_positive < 0)))
11122 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11123 fold_overflow_warning (("assuming signed overflow does "
11124 "not occur when assuming that "
11125 "(X - c) >= X is always false"),
11126 WARN_STRICT_OVERFLOW_ALL);
11127 return constant_boolean_node (0, type);
11132 /* If we are comparing an ABS_EXPR with a constant, we can
11133 convert all the cases into explicit comparisons, but they may
11134 well not be faster than doing the ABS and one comparison.
11135 But ABS (X) <= C is a range comparison, which becomes a subtraction
11136 and a comparison, and is probably faster. */
11137 if (code == LE_EXPR
11138 && TREE_CODE (arg1) == INTEGER_CST
11139 && TREE_CODE (arg0) == ABS_EXPR
11140 && ! TREE_SIDE_EFFECTS (arg0)
11141 && (0 != (tem = negate_expr (arg1)))
11142 && TREE_CODE (tem) == INTEGER_CST
11143 && !TREE_OVERFLOW (tem))
11144 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11145 build2 (GE_EXPR, type,
11146 TREE_OPERAND (arg0, 0), tem),
11147 build2 (LE_EXPR, type,
11148 TREE_OPERAND (arg0, 0), arg1));
11150 /* Convert ABS_EXPR<x> >= 0 to true. */
11151 strict_overflow_p = false;
11152 if (code == GE_EXPR
11153 && (integer_zerop (arg1)
11154 || (! HONOR_NANS (arg0)
11155 && real_zerop (arg1)))
11156 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11158 if (strict_overflow_p)
11159 fold_overflow_warning (("assuming signed overflow does not occur "
11160 "when simplifying comparison of "
11161 "absolute value and zero"),
11162 WARN_STRICT_OVERFLOW_CONDITIONAL);
11163 return omit_one_operand_loc (loc, type,
11164 constant_boolean_node (true, type),
11165 arg0);
11168 /* Convert ABS_EXPR<x> < 0 to false. */
11169 strict_overflow_p = false;
11170 if (code == LT_EXPR
11171 && (integer_zerop (arg1) || real_zerop (arg1))
11172 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11174 if (strict_overflow_p)
11175 fold_overflow_warning (("assuming signed overflow does not occur "
11176 "when simplifying comparison of "
11177 "absolute value and zero"),
11178 WARN_STRICT_OVERFLOW_CONDITIONAL);
11179 return omit_one_operand_loc (loc, type,
11180 constant_boolean_node (false, type),
11181 arg0);
11184 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11185 and similarly for >= into !=. */
11186 if ((code == LT_EXPR || code == GE_EXPR)
11187 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11188 && TREE_CODE (arg1) == LSHIFT_EXPR
11189 && integer_onep (TREE_OPERAND (arg1, 0)))
11190 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11191 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11192 TREE_OPERAND (arg1, 1)),
11193 build_zero_cst (TREE_TYPE (arg0)));
11195 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11196 otherwise Y might be >= # of bits in X's type and thus e.g.
11197 (unsigned char) (1 << Y) for Y 15 might be 0.
11198 If the cast is widening, then 1 << Y should have unsigned type,
11199 otherwise if Y is number of bits in the signed shift type minus 1,
11200 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11201 31 might be 0xffffffff80000000. */
11202 if ((code == LT_EXPR || code == GE_EXPR)
11203 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11204 && CONVERT_EXPR_P (arg1)
11205 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11206 && (element_precision (TREE_TYPE (arg1))
11207 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11208 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11209 || (element_precision (TREE_TYPE (arg1))
11210 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11211 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11213 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11214 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11215 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11216 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11217 build_zero_cst (TREE_TYPE (arg0)));
11220 return NULL_TREE;
11222 case UNORDERED_EXPR:
11223 case ORDERED_EXPR:
11224 case UNLT_EXPR:
11225 case UNLE_EXPR:
11226 case UNGT_EXPR:
11227 case UNGE_EXPR:
11228 case UNEQ_EXPR:
11229 case LTGT_EXPR:
11230 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11232 tree targ0 = strip_float_extensions (arg0);
11233 tree targ1 = strip_float_extensions (arg1);
11234 tree newtype = TREE_TYPE (targ0);
11236 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11237 newtype = TREE_TYPE (targ1);
11239 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11240 return fold_build2_loc (loc, code, type,
11241 fold_convert_loc (loc, newtype, targ0),
11242 fold_convert_loc (loc, newtype, targ1));
11245 return NULL_TREE;
11247 case COMPOUND_EXPR:
11248 /* When pedantic, a compound expression can be neither an lvalue
11249 nor an integer constant expression. */
11250 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11251 return NULL_TREE;
11252 /* Don't let (0, 0) be null pointer constant. */
11253 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11254 : fold_convert_loc (loc, type, arg1);
11255 return pedantic_non_lvalue_loc (loc, tem);
11257 case ASSERT_EXPR:
11258 /* An ASSERT_EXPR should never be passed to fold_binary. */
11259 gcc_unreachable ();
11261 default:
11262 return NULL_TREE;
11263 } /* switch (code) */
11266 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11267 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11268 of GOTO_EXPR. */
11270 static tree
11271 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11273 switch (TREE_CODE (*tp))
11275 case LABEL_EXPR:
11276 return *tp;
11278 case GOTO_EXPR:
11279 *walk_subtrees = 0;
11281 /* ... fall through ... */
11283 default:
11284 return NULL_TREE;
11288 /* Return whether the sub-tree ST contains a label which is accessible from
11289 outside the sub-tree. */
11291 static bool
11292 contains_label_p (tree st)
11294 return
11295 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11298 /* Fold a ternary expression of code CODE and type TYPE with operands
11299 OP0, OP1, and OP2. Return the folded expression if folding is
11300 successful. Otherwise, return NULL_TREE. */
11302 tree
11303 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11304 tree op0, tree op1, tree op2)
11306 tree tem;
11307 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11308 enum tree_code_class kind = TREE_CODE_CLASS (code);
11310 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11311 && TREE_CODE_LENGTH (code) == 3);
11313 /* If this is a commutative operation, and OP0 is a constant, move it
11314 to OP1 to reduce the number of tests below. */
11315 if (commutative_ternary_tree_code (code)
11316 && tree_swap_operands_p (op0, op1, true))
11317 return fold_build3_loc (loc, code, type, op1, op0, op2);
11319 tem = generic_simplify (loc, code, type, op0, op1, op2);
11320 if (tem)
11321 return tem;
11323 /* Strip any conversions that don't change the mode. This is safe
11324 for every expression, except for a comparison expression because
11325 its signedness is derived from its operands. So, in the latter
11326 case, only strip conversions that don't change the signedness.
11328 Note that this is done as an internal manipulation within the
11329 constant folder, in order to find the simplest representation of
11330 the arguments so that their form can be studied. In any cases,
11331 the appropriate type conversions should be put back in the tree
11332 that will get out of the constant folder. */
11333 if (op0)
11335 arg0 = op0;
11336 STRIP_NOPS (arg0);
11339 if (op1)
11341 arg1 = op1;
11342 STRIP_NOPS (arg1);
11345 if (op2)
11347 arg2 = op2;
11348 STRIP_NOPS (arg2);
11351 switch (code)
11353 case COMPONENT_REF:
11354 if (TREE_CODE (arg0) == CONSTRUCTOR
11355 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11357 unsigned HOST_WIDE_INT idx;
11358 tree field, value;
11359 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11360 if (field == arg1)
11361 return value;
11363 return NULL_TREE;
11365 case COND_EXPR:
11366 case VEC_COND_EXPR:
11367 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11368 so all simple results must be passed through pedantic_non_lvalue. */
11369 if (TREE_CODE (arg0) == INTEGER_CST)
11371 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11372 tem = integer_zerop (arg0) ? op2 : op1;
11373 /* Only optimize constant conditions when the selected branch
11374 has the same type as the COND_EXPR. This avoids optimizing
11375 away "c ? x : throw", where the throw has a void type.
11376 Avoid throwing away that operand which contains label. */
11377 if ((!TREE_SIDE_EFFECTS (unused_op)
11378 || !contains_label_p (unused_op))
11379 && (! VOID_TYPE_P (TREE_TYPE (tem))
11380 || VOID_TYPE_P (type)))
11381 return pedantic_non_lvalue_loc (loc, tem);
11382 return NULL_TREE;
11384 else if (TREE_CODE (arg0) == VECTOR_CST)
11386 if ((TREE_CODE (arg1) == VECTOR_CST
11387 || TREE_CODE (arg1) == CONSTRUCTOR)
11388 && (TREE_CODE (arg2) == VECTOR_CST
11389 || TREE_CODE (arg2) == CONSTRUCTOR))
11391 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11392 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11393 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11394 for (i = 0; i < nelts; i++)
11396 tree val = VECTOR_CST_ELT (arg0, i);
11397 if (integer_all_onesp (val))
11398 sel[i] = i;
11399 else if (integer_zerop (val))
11400 sel[i] = nelts + i;
11401 else /* Currently unreachable. */
11402 return NULL_TREE;
11404 tree t = fold_vec_perm (type, arg1, arg2, sel);
11405 if (t != NULL_TREE)
11406 return t;
11410 /* If we have A op B ? A : C, we may be able to convert this to a
11411 simpler expression, depending on the operation and the values
11412 of B and C. Signed zeros prevent all of these transformations,
11413 for reasons given above each one.
11415 Also try swapping the arguments and inverting the conditional. */
11416 if (COMPARISON_CLASS_P (arg0)
11417 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11418 arg1, TREE_OPERAND (arg0, 1))
11419 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11421 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11422 if (tem)
11423 return tem;
11426 if (COMPARISON_CLASS_P (arg0)
11427 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11428 op2,
11429 TREE_OPERAND (arg0, 1))
11430 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11432 location_t loc0 = expr_location_or (arg0, loc);
11433 tem = fold_invert_truthvalue (loc0, arg0);
11434 if (tem && COMPARISON_CLASS_P (tem))
11436 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11437 if (tem)
11438 return tem;
11442 /* If the second operand is simpler than the third, swap them
11443 since that produces better jump optimization results. */
11444 if (truth_value_p (TREE_CODE (arg0))
11445 && tree_swap_operands_p (op1, op2, false))
11447 location_t loc0 = expr_location_or (arg0, loc);
11448 /* See if this can be inverted. If it can't, possibly because
11449 it was a floating-point inequality comparison, don't do
11450 anything. */
11451 tem = fold_invert_truthvalue (loc0, arg0);
11452 if (tem)
11453 return fold_build3_loc (loc, code, type, tem, op2, op1);
11456 /* Convert A ? 1 : 0 to simply A. */
11457 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11458 : (integer_onep (op1)
11459 && !VECTOR_TYPE_P (type)))
11460 && integer_zerop (op2)
11461 /* If we try to convert OP0 to our type, the
11462 call to fold will try to move the conversion inside
11463 a COND, which will recurse. In that case, the COND_EXPR
11464 is probably the best choice, so leave it alone. */
11465 && type == TREE_TYPE (arg0))
11466 return pedantic_non_lvalue_loc (loc, arg0);
11468 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11469 over COND_EXPR in cases such as floating point comparisons. */
11470 if (integer_zerop (op1)
11471 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11472 : (integer_onep (op2)
11473 && !VECTOR_TYPE_P (type)))
11474 && truth_value_p (TREE_CODE (arg0)))
11475 return pedantic_non_lvalue_loc (loc,
11476 fold_convert_loc (loc, type,
11477 invert_truthvalue_loc (loc,
11478 arg0)));
11480 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11481 if (TREE_CODE (arg0) == LT_EXPR
11482 && integer_zerop (TREE_OPERAND (arg0, 1))
11483 && integer_zerop (op2)
11484 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11486 /* sign_bit_p looks through both zero and sign extensions,
11487 but for this optimization only sign extensions are
11488 usable. */
11489 tree tem2 = TREE_OPERAND (arg0, 0);
11490 while (tem != tem2)
11492 if (TREE_CODE (tem2) != NOP_EXPR
11493 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11495 tem = NULL_TREE;
11496 break;
11498 tem2 = TREE_OPERAND (tem2, 0);
11500 /* sign_bit_p only checks ARG1 bits within A's precision.
11501 If <sign bit of A> has wider type than A, bits outside
11502 of A's precision in <sign bit of A> need to be checked.
11503 If they are all 0, this optimization needs to be done
11504 in unsigned A's type, if they are all 1 in signed A's type,
11505 otherwise this can't be done. */
11506 if (tem
11507 && TYPE_PRECISION (TREE_TYPE (tem))
11508 < TYPE_PRECISION (TREE_TYPE (arg1))
11509 && TYPE_PRECISION (TREE_TYPE (tem))
11510 < TYPE_PRECISION (type))
11512 int inner_width, outer_width;
11513 tree tem_type;
11515 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11516 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11517 if (outer_width > TYPE_PRECISION (type))
11518 outer_width = TYPE_PRECISION (type);
11520 wide_int mask = wi::shifted_mask
11521 (inner_width, outer_width - inner_width, false,
11522 TYPE_PRECISION (TREE_TYPE (arg1)));
11524 wide_int common = mask & arg1;
11525 if (common == mask)
11527 tem_type = signed_type_for (TREE_TYPE (tem));
11528 tem = fold_convert_loc (loc, tem_type, tem);
11530 else if (common == 0)
11532 tem_type = unsigned_type_for (TREE_TYPE (tem));
11533 tem = fold_convert_loc (loc, tem_type, tem);
11535 else
11536 tem = NULL;
11539 if (tem)
11540 return
11541 fold_convert_loc (loc, type,
11542 fold_build2_loc (loc, BIT_AND_EXPR,
11543 TREE_TYPE (tem), tem,
11544 fold_convert_loc (loc,
11545 TREE_TYPE (tem),
11546 arg1)));
11549 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11550 already handled above. */
11551 if (TREE_CODE (arg0) == BIT_AND_EXPR
11552 && integer_onep (TREE_OPERAND (arg0, 1))
11553 && integer_zerop (op2)
11554 && integer_pow2p (arg1))
11556 tree tem = TREE_OPERAND (arg0, 0);
11557 STRIP_NOPS (tem);
11558 if (TREE_CODE (tem) == RSHIFT_EXPR
11559 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11560 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11561 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11562 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11563 TREE_OPERAND (tem, 0), arg1);
11566 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11567 is probably obsolete because the first operand should be a
11568 truth value (that's why we have the two cases above), but let's
11569 leave it in until we can confirm this for all front-ends. */
11570 if (integer_zerop (op2)
11571 && TREE_CODE (arg0) == NE_EXPR
11572 && integer_zerop (TREE_OPERAND (arg0, 1))
11573 && integer_pow2p (arg1)
11574 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11575 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11576 arg1, OEP_ONLY_CONST))
11577 return pedantic_non_lvalue_loc (loc,
11578 fold_convert_loc (loc, type,
11579 TREE_OPERAND (arg0, 0)));
11581 /* Disable the transformations below for vectors, since
11582 fold_binary_op_with_conditional_arg may undo them immediately,
11583 yielding an infinite loop. */
11584 if (code == VEC_COND_EXPR)
11585 return NULL_TREE;
11587 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11588 if (integer_zerop (op2)
11589 && truth_value_p (TREE_CODE (arg0))
11590 && truth_value_p (TREE_CODE (arg1))
11591 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11592 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11593 : TRUTH_ANDIF_EXPR,
11594 type, fold_convert_loc (loc, type, arg0), arg1);
11596 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11597 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11598 && truth_value_p (TREE_CODE (arg0))
11599 && truth_value_p (TREE_CODE (arg1))
11600 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11602 location_t loc0 = expr_location_or (arg0, loc);
11603 /* Only perform transformation if ARG0 is easily inverted. */
11604 tem = fold_invert_truthvalue (loc0, arg0);
11605 if (tem)
11606 return fold_build2_loc (loc, code == VEC_COND_EXPR
11607 ? BIT_IOR_EXPR
11608 : TRUTH_ORIF_EXPR,
11609 type, fold_convert_loc (loc, type, tem),
11610 arg1);
11613 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11614 if (integer_zerop (arg1)
11615 && truth_value_p (TREE_CODE (arg0))
11616 && truth_value_p (TREE_CODE (op2))
11617 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11619 location_t loc0 = expr_location_or (arg0, loc);
11620 /* Only perform transformation if ARG0 is easily inverted. */
11621 tem = fold_invert_truthvalue (loc0, arg0);
11622 if (tem)
11623 return fold_build2_loc (loc, code == VEC_COND_EXPR
11624 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11625 type, fold_convert_loc (loc, type, tem),
11626 op2);
11629 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11630 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11631 && truth_value_p (TREE_CODE (arg0))
11632 && truth_value_p (TREE_CODE (op2))
11633 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11634 return fold_build2_loc (loc, code == VEC_COND_EXPR
11635 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11636 type, fold_convert_loc (loc, type, arg0), op2);
11638 return NULL_TREE;
11640 case CALL_EXPR:
11641 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11642 of fold_ternary on them. */
11643 gcc_unreachable ();
11645 case BIT_FIELD_REF:
11646 if ((TREE_CODE (arg0) == VECTOR_CST
11647 || (TREE_CODE (arg0) == CONSTRUCTOR
11648 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11649 && (type == TREE_TYPE (TREE_TYPE (arg0))
11650 || (TREE_CODE (type) == VECTOR_TYPE
11651 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11653 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11654 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11655 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11656 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11658 if (n != 0
11659 && (idx % width) == 0
11660 && (n % width) == 0
11661 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11663 idx = idx / width;
11664 n = n / width;
11666 if (TREE_CODE (arg0) == VECTOR_CST)
11668 if (n == 1)
11669 return VECTOR_CST_ELT (arg0, idx);
11671 tree *vals = XALLOCAVEC (tree, n);
11672 for (unsigned i = 0; i < n; ++i)
11673 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11674 return build_vector (type, vals);
11677 /* Constructor elements can be subvectors. */
11678 unsigned HOST_WIDE_INT k = 1;
11679 if (CONSTRUCTOR_NELTS (arg0) != 0)
11681 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11682 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11683 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11686 /* We keep an exact subset of the constructor elements. */
11687 if ((idx % k) == 0 && (n % k) == 0)
11689 if (CONSTRUCTOR_NELTS (arg0) == 0)
11690 return build_constructor (type, NULL);
11691 idx /= k;
11692 n /= k;
11693 if (n == 1)
11695 if (idx < CONSTRUCTOR_NELTS (arg0))
11696 return CONSTRUCTOR_ELT (arg0, idx)->value;
11697 return build_zero_cst (type);
11700 vec<constructor_elt, va_gc> *vals;
11701 vec_alloc (vals, n);
11702 for (unsigned i = 0;
11703 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11704 ++i)
11705 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11706 CONSTRUCTOR_ELT
11707 (arg0, idx + i)->value);
11708 return build_constructor (type, vals);
11710 /* The bitfield references a single constructor element. */
11711 else if (idx + n <= (idx / k + 1) * k)
11713 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11714 return build_zero_cst (type);
11715 else if (n == k)
11716 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11717 else
11718 return fold_build3_loc (loc, code, type,
11719 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11720 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11725 /* A bit-field-ref that referenced the full argument can be stripped. */
11726 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11727 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11728 && integer_zerop (op2))
11729 return fold_convert_loc (loc, type, arg0);
11731 /* On constants we can use native encode/interpret to constant
11732 fold (nearly) all BIT_FIELD_REFs. */
11733 if (CONSTANT_CLASS_P (arg0)
11734 && can_native_interpret_type_p (type)
11735 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11736 /* This limitation should not be necessary, we just need to
11737 round this up to mode size. */
11738 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11739 /* Need bit-shifting of the buffer to relax the following. */
11740 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11742 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11743 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11744 unsigned HOST_WIDE_INT clen;
11745 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11746 /* ??? We cannot tell native_encode_expr to start at
11747 some random byte only. So limit us to a reasonable amount
11748 of work. */
11749 if (clen <= 4096)
11751 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11752 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11753 if (len > 0
11754 && len * BITS_PER_UNIT >= bitpos + bitsize)
11756 tree v = native_interpret_expr (type,
11757 b + bitpos / BITS_PER_UNIT,
11758 bitsize / BITS_PER_UNIT);
11759 if (v)
11760 return v;
11765 return NULL_TREE;
11767 case FMA_EXPR:
11768 /* For integers we can decompose the FMA if possible. */
11769 if (TREE_CODE (arg0) == INTEGER_CST
11770 && TREE_CODE (arg1) == INTEGER_CST)
11771 return fold_build2_loc (loc, PLUS_EXPR, type,
11772 const_binop (MULT_EXPR, arg0, arg1), arg2);
11773 if (integer_zerop (arg2))
11774 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11776 return fold_fma (loc, type, arg0, arg1, arg2);
11778 case VEC_PERM_EXPR:
11779 if (TREE_CODE (arg2) == VECTOR_CST)
11781 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11782 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11783 unsigned char *sel2 = sel + nelts;
11784 bool need_mask_canon = false;
11785 bool need_mask_canon2 = false;
11786 bool all_in_vec0 = true;
11787 bool all_in_vec1 = true;
11788 bool maybe_identity = true;
11789 bool single_arg = (op0 == op1);
11790 bool changed = false;
11792 mask2 = 2 * nelts - 1;
11793 mask = single_arg ? (nelts - 1) : mask2;
11794 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11795 for (i = 0; i < nelts; i++)
11797 tree val = VECTOR_CST_ELT (arg2, i);
11798 if (TREE_CODE (val) != INTEGER_CST)
11799 return NULL_TREE;
11801 /* Make sure that the perm value is in an acceptable
11802 range. */
11803 wide_int t = val;
11804 need_mask_canon |= wi::gtu_p (t, mask);
11805 need_mask_canon2 |= wi::gtu_p (t, mask2);
11806 sel[i] = t.to_uhwi () & mask;
11807 sel2[i] = t.to_uhwi () & mask2;
11809 if (sel[i] < nelts)
11810 all_in_vec1 = false;
11811 else
11812 all_in_vec0 = false;
11814 if ((sel[i] & (nelts-1)) != i)
11815 maybe_identity = false;
11818 if (maybe_identity)
11820 if (all_in_vec0)
11821 return op0;
11822 if (all_in_vec1)
11823 return op1;
11826 if (all_in_vec0)
11827 op1 = op0;
11828 else if (all_in_vec1)
11830 op0 = op1;
11831 for (i = 0; i < nelts; i++)
11832 sel[i] -= nelts;
11833 need_mask_canon = true;
11836 if ((TREE_CODE (op0) == VECTOR_CST
11837 || TREE_CODE (op0) == CONSTRUCTOR)
11838 && (TREE_CODE (op1) == VECTOR_CST
11839 || TREE_CODE (op1) == CONSTRUCTOR))
11841 tree t = fold_vec_perm (type, op0, op1, sel);
11842 if (t != NULL_TREE)
11843 return t;
11846 if (op0 == op1 && !single_arg)
11847 changed = true;
11849 /* Some targets are deficient and fail to expand a single
11850 argument permutation while still allowing an equivalent
11851 2-argument version. */
11852 if (need_mask_canon && arg2 == op2
11853 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11854 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11856 need_mask_canon = need_mask_canon2;
11857 sel = sel2;
11860 if (need_mask_canon && arg2 == op2)
11862 tree *tsel = XALLOCAVEC (tree, nelts);
11863 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11864 for (i = 0; i < nelts; i++)
11865 tsel[i] = build_int_cst (eltype, sel[i]);
11866 op2 = build_vector (TREE_TYPE (arg2), tsel);
11867 changed = true;
11870 if (changed)
11871 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11873 return NULL_TREE;
11875 default:
11876 return NULL_TREE;
11877 } /* switch (code) */
11880 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11881 of an array (or vector). */
11883 tree
11884 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11886 tree index_type = NULL_TREE;
11887 offset_int low_bound = 0;
11889 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11891 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11892 if (domain_type && TYPE_MIN_VALUE (domain_type))
11894 /* Static constructors for variably sized objects makes no sense. */
11895 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11896 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11897 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11901 if (index_type)
11902 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11903 TYPE_SIGN (index_type));
11905 offset_int index = low_bound - 1;
11906 if (index_type)
11907 index = wi::ext (index, TYPE_PRECISION (index_type),
11908 TYPE_SIGN (index_type));
11910 offset_int max_index;
11911 unsigned HOST_WIDE_INT cnt;
11912 tree cfield, cval;
11914 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11916 /* Array constructor might explicitely set index, or specify range
11917 * or leave index NULL meaning that it is next index after previous
11918 * one. */
11919 if (cfield)
11921 if (TREE_CODE (cfield) == INTEGER_CST)
11922 max_index = index = wi::to_offset (cfield);
11923 else
11925 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11926 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11927 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11930 else
11932 index += 1;
11933 if (index_type)
11934 index = wi::ext (index, TYPE_PRECISION (index_type),
11935 TYPE_SIGN (index_type));
11936 max_index = index;
11939 /* Do we have match? */
11940 if (wi::cmpu (access_index, index) >= 0
11941 && wi::cmpu (access_index, max_index) <= 0)
11942 return cval;
11944 return NULL_TREE;
11947 /* Perform constant folding and related simplification of EXPR.
11948 The related simplifications include x*1 => x, x*0 => 0, etc.,
11949 and application of the associative law.
11950 NOP_EXPR conversions may be removed freely (as long as we
11951 are careful not to change the type of the overall expression).
11952 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11953 but we can constant-fold them if they have constant operands. */
11955 #ifdef ENABLE_FOLD_CHECKING
11956 # define fold(x) fold_1 (x)
11957 static tree fold_1 (tree);
11958 static
11959 #endif
11960 tree
11961 fold (tree expr)
11963 const tree t = expr;
11964 enum tree_code code = TREE_CODE (t);
11965 enum tree_code_class kind = TREE_CODE_CLASS (code);
11966 tree tem;
11967 location_t loc = EXPR_LOCATION (expr);
11969 /* Return right away if a constant. */
11970 if (kind == tcc_constant)
11971 return t;
11973 /* CALL_EXPR-like objects with variable numbers of operands are
11974 treated specially. */
11975 if (kind == tcc_vl_exp)
11977 if (code == CALL_EXPR)
11979 tem = fold_call_expr (loc, expr, false);
11980 return tem ? tem : expr;
11982 return expr;
11985 if (IS_EXPR_CODE_CLASS (kind))
11987 tree type = TREE_TYPE (t);
11988 tree op0, op1, op2;
11990 switch (TREE_CODE_LENGTH (code))
11992 case 1:
11993 op0 = TREE_OPERAND (t, 0);
11994 tem = fold_unary_loc (loc, code, type, op0);
11995 return tem ? tem : expr;
11996 case 2:
11997 op0 = TREE_OPERAND (t, 0);
11998 op1 = TREE_OPERAND (t, 1);
11999 tem = fold_binary_loc (loc, code, type, op0, op1);
12000 return tem ? tem : expr;
12001 case 3:
12002 op0 = TREE_OPERAND (t, 0);
12003 op1 = TREE_OPERAND (t, 1);
12004 op2 = TREE_OPERAND (t, 2);
12005 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12006 return tem ? tem : expr;
12007 default:
12008 break;
12012 switch (code)
12014 case ARRAY_REF:
12016 tree op0 = TREE_OPERAND (t, 0);
12017 tree op1 = TREE_OPERAND (t, 1);
12019 if (TREE_CODE (op1) == INTEGER_CST
12020 && TREE_CODE (op0) == CONSTRUCTOR
12021 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12023 tree val = get_array_ctor_element_at_index (op0,
12024 wi::to_offset (op1));
12025 if (val)
12026 return val;
12029 return t;
12032 /* Return a VECTOR_CST if possible. */
12033 case CONSTRUCTOR:
12035 tree type = TREE_TYPE (t);
12036 if (TREE_CODE (type) != VECTOR_TYPE)
12037 return t;
12039 unsigned i;
12040 tree val;
12041 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12042 if (! CONSTANT_CLASS_P (val))
12043 return t;
12045 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12048 case CONST_DECL:
12049 return fold (DECL_INITIAL (t));
12051 default:
12052 return t;
12053 } /* switch (code) */
12056 #ifdef ENABLE_FOLD_CHECKING
12057 #undef fold
12059 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12060 hash_table<nofree_ptr_hash<const tree_node> > *);
12061 static void fold_check_failed (const_tree, const_tree);
12062 void print_fold_checksum (const_tree);
12064 /* When --enable-checking=fold, compute a digest of expr before
12065 and after actual fold call to see if fold did not accidentally
12066 change original expr. */
12068 tree
12069 fold (tree expr)
12071 tree ret;
12072 struct md5_ctx ctx;
12073 unsigned char checksum_before[16], checksum_after[16];
12074 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12076 md5_init_ctx (&ctx);
12077 fold_checksum_tree (expr, &ctx, &ht);
12078 md5_finish_ctx (&ctx, checksum_before);
12079 ht.empty ();
12081 ret = fold_1 (expr);
12083 md5_init_ctx (&ctx);
12084 fold_checksum_tree (expr, &ctx, &ht);
12085 md5_finish_ctx (&ctx, checksum_after);
12087 if (memcmp (checksum_before, checksum_after, 16))
12088 fold_check_failed (expr, ret);
12090 return ret;
12093 void
12094 print_fold_checksum (const_tree expr)
12096 struct md5_ctx ctx;
12097 unsigned char checksum[16], cnt;
12098 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12100 md5_init_ctx (&ctx);
12101 fold_checksum_tree (expr, &ctx, &ht);
12102 md5_finish_ctx (&ctx, checksum);
12103 for (cnt = 0; cnt < 16; ++cnt)
12104 fprintf (stderr, "%02x", checksum[cnt]);
12105 putc ('\n', stderr);
12108 static void
12109 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12111 internal_error ("fold check: original tree changed by fold");
12114 static void
12115 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12116 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12118 const tree_node **slot;
12119 enum tree_code code;
12120 union tree_node buf;
12121 int i, len;
12123 recursive_label:
12124 if (expr == NULL)
12125 return;
12126 slot = ht->find_slot (expr, INSERT);
12127 if (*slot != NULL)
12128 return;
12129 *slot = expr;
12130 code = TREE_CODE (expr);
12131 if (TREE_CODE_CLASS (code) == tcc_declaration
12132 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12134 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12135 memcpy ((char *) &buf, expr, tree_size (expr));
12136 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12137 buf.decl_with_vis.symtab_node = NULL;
12138 expr = (tree) &buf;
12140 else if (TREE_CODE_CLASS (code) == tcc_type
12141 && (TYPE_POINTER_TO (expr)
12142 || TYPE_REFERENCE_TO (expr)
12143 || TYPE_CACHED_VALUES_P (expr)
12144 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12145 || TYPE_NEXT_VARIANT (expr)))
12147 /* Allow these fields to be modified. */
12148 tree tmp;
12149 memcpy ((char *) &buf, expr, tree_size (expr));
12150 expr = tmp = (tree) &buf;
12151 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12152 TYPE_POINTER_TO (tmp) = NULL;
12153 TYPE_REFERENCE_TO (tmp) = NULL;
12154 TYPE_NEXT_VARIANT (tmp) = NULL;
12155 if (TYPE_CACHED_VALUES_P (tmp))
12157 TYPE_CACHED_VALUES_P (tmp) = 0;
12158 TYPE_CACHED_VALUES (tmp) = NULL;
12161 md5_process_bytes (expr, tree_size (expr), ctx);
12162 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12163 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12164 if (TREE_CODE_CLASS (code) != tcc_type
12165 && TREE_CODE_CLASS (code) != tcc_declaration
12166 && code != TREE_LIST
12167 && code != SSA_NAME
12168 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12169 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12170 switch (TREE_CODE_CLASS (code))
12172 case tcc_constant:
12173 switch (code)
12175 case STRING_CST:
12176 md5_process_bytes (TREE_STRING_POINTER (expr),
12177 TREE_STRING_LENGTH (expr), ctx);
12178 break;
12179 case COMPLEX_CST:
12180 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12181 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12182 break;
12183 case VECTOR_CST:
12184 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12185 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12186 break;
12187 default:
12188 break;
12190 break;
12191 case tcc_exceptional:
12192 switch (code)
12194 case TREE_LIST:
12195 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12196 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12197 expr = TREE_CHAIN (expr);
12198 goto recursive_label;
12199 break;
12200 case TREE_VEC:
12201 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12202 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12203 break;
12204 default:
12205 break;
12207 break;
12208 case tcc_expression:
12209 case tcc_reference:
12210 case tcc_comparison:
12211 case tcc_unary:
12212 case tcc_binary:
12213 case tcc_statement:
12214 case tcc_vl_exp:
12215 len = TREE_OPERAND_LENGTH (expr);
12216 for (i = 0; i < len; ++i)
12217 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12218 break;
12219 case tcc_declaration:
12220 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12221 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12222 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12224 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12225 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12226 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12227 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12228 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12231 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12233 if (TREE_CODE (expr) == FUNCTION_DECL)
12235 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12236 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12238 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12240 break;
12241 case tcc_type:
12242 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12243 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12244 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12245 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12246 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12247 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12248 if (INTEGRAL_TYPE_P (expr)
12249 || SCALAR_FLOAT_TYPE_P (expr))
12251 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12252 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12254 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12255 if (TREE_CODE (expr) == RECORD_TYPE
12256 || TREE_CODE (expr) == UNION_TYPE
12257 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12258 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12259 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12260 break;
12261 default:
12262 break;
12266 /* Helper function for outputting the checksum of a tree T. When
12267 debugging with gdb, you can "define mynext" to be "next" followed
12268 by "call debug_fold_checksum (op0)", then just trace down till the
12269 outputs differ. */
12271 DEBUG_FUNCTION void
12272 debug_fold_checksum (const_tree t)
12274 int i;
12275 unsigned char checksum[16];
12276 struct md5_ctx ctx;
12277 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12279 md5_init_ctx (&ctx);
12280 fold_checksum_tree (t, &ctx, &ht);
12281 md5_finish_ctx (&ctx, checksum);
12282 ht.empty ();
12284 for (i = 0; i < 16; i++)
12285 fprintf (stderr, "%d ", checksum[i]);
12287 fprintf (stderr, "\n");
12290 #endif
12292 /* Fold a unary tree expression with code CODE of type TYPE with an
12293 operand OP0. LOC is the location of the resulting expression.
12294 Return a folded expression if successful. Otherwise, return a tree
12295 expression with code CODE of type TYPE with an operand OP0. */
12297 tree
12298 fold_build1_stat_loc (location_t loc,
12299 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12301 tree tem;
12302 #ifdef ENABLE_FOLD_CHECKING
12303 unsigned char checksum_before[16], checksum_after[16];
12304 struct md5_ctx ctx;
12305 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12307 md5_init_ctx (&ctx);
12308 fold_checksum_tree (op0, &ctx, &ht);
12309 md5_finish_ctx (&ctx, checksum_before);
12310 ht.empty ();
12311 #endif
12313 tem = fold_unary_loc (loc, code, type, op0);
12314 if (!tem)
12315 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12317 #ifdef ENABLE_FOLD_CHECKING
12318 md5_init_ctx (&ctx);
12319 fold_checksum_tree (op0, &ctx, &ht);
12320 md5_finish_ctx (&ctx, checksum_after);
12322 if (memcmp (checksum_before, checksum_after, 16))
12323 fold_check_failed (op0, tem);
12324 #endif
12325 return tem;
12328 /* Fold a binary tree expression with code CODE of type TYPE with
12329 operands OP0 and OP1. LOC is the location of the resulting
12330 expression. Return a folded expression if successful. Otherwise,
12331 return a tree expression with code CODE of type TYPE with operands
12332 OP0 and OP1. */
12334 tree
12335 fold_build2_stat_loc (location_t loc,
12336 enum tree_code code, tree type, tree op0, tree op1
12337 MEM_STAT_DECL)
12339 tree tem;
12340 #ifdef ENABLE_FOLD_CHECKING
12341 unsigned char checksum_before_op0[16],
12342 checksum_before_op1[16],
12343 checksum_after_op0[16],
12344 checksum_after_op1[16];
12345 struct md5_ctx ctx;
12346 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12348 md5_init_ctx (&ctx);
12349 fold_checksum_tree (op0, &ctx, &ht);
12350 md5_finish_ctx (&ctx, checksum_before_op0);
12351 ht.empty ();
12353 md5_init_ctx (&ctx);
12354 fold_checksum_tree (op1, &ctx, &ht);
12355 md5_finish_ctx (&ctx, checksum_before_op1);
12356 ht.empty ();
12357 #endif
12359 tem = fold_binary_loc (loc, code, type, op0, op1);
12360 if (!tem)
12361 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12363 #ifdef ENABLE_FOLD_CHECKING
12364 md5_init_ctx (&ctx);
12365 fold_checksum_tree (op0, &ctx, &ht);
12366 md5_finish_ctx (&ctx, checksum_after_op0);
12367 ht.empty ();
12369 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12370 fold_check_failed (op0, tem);
12372 md5_init_ctx (&ctx);
12373 fold_checksum_tree (op1, &ctx, &ht);
12374 md5_finish_ctx (&ctx, checksum_after_op1);
12376 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12377 fold_check_failed (op1, tem);
12378 #endif
12379 return tem;
12382 /* Fold a ternary tree expression with code CODE of type TYPE with
12383 operands OP0, OP1, and OP2. Return a folded expression if
12384 successful. Otherwise, return a tree expression with code CODE of
12385 type TYPE with operands OP0, OP1, and OP2. */
12387 tree
12388 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12389 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12391 tree tem;
12392 #ifdef ENABLE_FOLD_CHECKING
12393 unsigned char checksum_before_op0[16],
12394 checksum_before_op1[16],
12395 checksum_before_op2[16],
12396 checksum_after_op0[16],
12397 checksum_after_op1[16],
12398 checksum_after_op2[16];
12399 struct md5_ctx ctx;
12400 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12402 md5_init_ctx (&ctx);
12403 fold_checksum_tree (op0, &ctx, &ht);
12404 md5_finish_ctx (&ctx, checksum_before_op0);
12405 ht.empty ();
12407 md5_init_ctx (&ctx);
12408 fold_checksum_tree (op1, &ctx, &ht);
12409 md5_finish_ctx (&ctx, checksum_before_op1);
12410 ht.empty ();
12412 md5_init_ctx (&ctx);
12413 fold_checksum_tree (op2, &ctx, &ht);
12414 md5_finish_ctx (&ctx, checksum_before_op2);
12415 ht.empty ();
12416 #endif
12418 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12419 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12420 if (!tem)
12421 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12423 #ifdef ENABLE_FOLD_CHECKING
12424 md5_init_ctx (&ctx);
12425 fold_checksum_tree (op0, &ctx, &ht);
12426 md5_finish_ctx (&ctx, checksum_after_op0);
12427 ht.empty ();
12429 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12430 fold_check_failed (op0, tem);
12432 md5_init_ctx (&ctx);
12433 fold_checksum_tree (op1, &ctx, &ht);
12434 md5_finish_ctx (&ctx, checksum_after_op1);
12435 ht.empty ();
12437 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12438 fold_check_failed (op1, tem);
12440 md5_init_ctx (&ctx);
12441 fold_checksum_tree (op2, &ctx, &ht);
12442 md5_finish_ctx (&ctx, checksum_after_op2);
12444 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12445 fold_check_failed (op2, tem);
12446 #endif
12447 return tem;
12450 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12451 arguments in ARGARRAY, and a null static chain.
12452 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12453 of type TYPE from the given operands as constructed by build_call_array. */
12455 tree
12456 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12457 int nargs, tree *argarray)
12459 tree tem;
12460 #ifdef ENABLE_FOLD_CHECKING
12461 unsigned char checksum_before_fn[16],
12462 checksum_before_arglist[16],
12463 checksum_after_fn[16],
12464 checksum_after_arglist[16];
12465 struct md5_ctx ctx;
12466 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12467 int i;
12469 md5_init_ctx (&ctx);
12470 fold_checksum_tree (fn, &ctx, &ht);
12471 md5_finish_ctx (&ctx, checksum_before_fn);
12472 ht.empty ();
12474 md5_init_ctx (&ctx);
12475 for (i = 0; i < nargs; i++)
12476 fold_checksum_tree (argarray[i], &ctx, &ht);
12477 md5_finish_ctx (&ctx, checksum_before_arglist);
12478 ht.empty ();
12479 #endif
12481 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12482 if (!tem)
12483 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12485 #ifdef ENABLE_FOLD_CHECKING
12486 md5_init_ctx (&ctx);
12487 fold_checksum_tree (fn, &ctx, &ht);
12488 md5_finish_ctx (&ctx, checksum_after_fn);
12489 ht.empty ();
12491 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12492 fold_check_failed (fn, tem);
12494 md5_init_ctx (&ctx);
12495 for (i = 0; i < nargs; i++)
12496 fold_checksum_tree (argarray[i], &ctx, &ht);
12497 md5_finish_ctx (&ctx, checksum_after_arglist);
12499 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12500 fold_check_failed (NULL_TREE, tem);
12501 #endif
12502 return tem;
12505 /* Perform constant folding and related simplification of initializer
12506 expression EXPR. These behave identically to "fold_buildN" but ignore
12507 potential run-time traps and exceptions that fold must preserve. */
12509 #define START_FOLD_INIT \
12510 int saved_signaling_nans = flag_signaling_nans;\
12511 int saved_trapping_math = flag_trapping_math;\
12512 int saved_rounding_math = flag_rounding_math;\
12513 int saved_trapv = flag_trapv;\
12514 int saved_folding_initializer = folding_initializer;\
12515 flag_signaling_nans = 0;\
12516 flag_trapping_math = 0;\
12517 flag_rounding_math = 0;\
12518 flag_trapv = 0;\
12519 folding_initializer = 1;
12521 #define END_FOLD_INIT \
12522 flag_signaling_nans = saved_signaling_nans;\
12523 flag_trapping_math = saved_trapping_math;\
12524 flag_rounding_math = saved_rounding_math;\
12525 flag_trapv = saved_trapv;\
12526 folding_initializer = saved_folding_initializer;
12528 tree
12529 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12530 tree type, tree op)
12532 tree result;
12533 START_FOLD_INIT;
12535 result = fold_build1_loc (loc, code, type, op);
12537 END_FOLD_INIT;
12538 return result;
12541 tree
12542 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12543 tree type, tree op0, tree op1)
12545 tree result;
12546 START_FOLD_INIT;
12548 result = fold_build2_loc (loc, code, type, op0, op1);
12550 END_FOLD_INIT;
12551 return result;
12554 tree
12555 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12556 int nargs, tree *argarray)
12558 tree result;
12559 START_FOLD_INIT;
12561 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12563 END_FOLD_INIT;
12564 return result;
12567 #undef START_FOLD_INIT
12568 #undef END_FOLD_INIT
12570 /* Determine if first argument is a multiple of second argument. Return 0 if
12571 it is not, or we cannot easily determined it to be.
12573 An example of the sort of thing we care about (at this point; this routine
12574 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12575 fold cases do now) is discovering that
12577 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12579 is a multiple of
12581 SAVE_EXPR (J * 8)
12583 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12585 This code also handles discovering that
12587 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12589 is a multiple of 8 so we don't have to worry about dealing with a
12590 possible remainder.
12592 Note that we *look* inside a SAVE_EXPR only to determine how it was
12593 calculated; it is not safe for fold to do much of anything else with the
12594 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12595 at run time. For example, the latter example above *cannot* be implemented
12596 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12597 evaluation time of the original SAVE_EXPR is not necessarily the same at
12598 the time the new expression is evaluated. The only optimization of this
12599 sort that would be valid is changing
12601 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12603 divided by 8 to
12605 SAVE_EXPR (I) * SAVE_EXPR (J)
12607 (where the same SAVE_EXPR (J) is used in the original and the
12608 transformed version). */
12611 multiple_of_p (tree type, const_tree top, const_tree bottom)
12613 if (operand_equal_p (top, bottom, 0))
12614 return 1;
12616 if (TREE_CODE (type) != INTEGER_TYPE)
12617 return 0;
12619 switch (TREE_CODE (top))
12621 case BIT_AND_EXPR:
12622 /* Bitwise and provides a power of two multiple. If the mask is
12623 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12624 if (!integer_pow2p (bottom))
12625 return 0;
12626 /* FALLTHRU */
12628 case MULT_EXPR:
12629 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12630 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12632 case PLUS_EXPR:
12633 case MINUS_EXPR:
12634 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12635 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12637 case LSHIFT_EXPR:
12638 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12640 tree op1, t1;
12642 op1 = TREE_OPERAND (top, 1);
12643 /* const_binop may not detect overflow correctly,
12644 so check for it explicitly here. */
12645 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12646 && 0 != (t1 = fold_convert (type,
12647 const_binop (LSHIFT_EXPR,
12648 size_one_node,
12649 op1)))
12650 && !TREE_OVERFLOW (t1))
12651 return multiple_of_p (type, t1, bottom);
12653 return 0;
12655 case NOP_EXPR:
12656 /* Can't handle conversions from non-integral or wider integral type. */
12657 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12658 || (TYPE_PRECISION (type)
12659 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12660 return 0;
12662 /* .. fall through ... */
12664 case SAVE_EXPR:
12665 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12667 case COND_EXPR:
12668 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12669 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12671 case INTEGER_CST:
12672 if (TREE_CODE (bottom) != INTEGER_CST
12673 || integer_zerop (bottom)
12674 || (TYPE_UNSIGNED (type)
12675 && (tree_int_cst_sgn (top) < 0
12676 || tree_int_cst_sgn (bottom) < 0)))
12677 return 0;
12678 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12679 SIGNED);
12681 default:
12682 return 0;
12686 #define tree_expr_nonnegative_warnv_p(X, Y) \
12687 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12689 #define RECURSE(X) \
12690 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12692 /* Return true if CODE or TYPE is known to be non-negative. */
12694 static bool
12695 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12697 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12698 && truth_value_p (code))
12699 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12700 have a signed:1 type (where the value is -1 and 0). */
12701 return true;
12702 return false;
12705 /* Return true if (CODE OP0) is known to be non-negative. If the return
12706 value is based on the assumption that signed overflow is undefined,
12707 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12708 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12710 bool
12711 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12712 bool *strict_overflow_p, int depth)
12714 if (TYPE_UNSIGNED (type))
12715 return true;
12717 switch (code)
12719 case ABS_EXPR:
12720 /* We can't return 1 if flag_wrapv is set because
12721 ABS_EXPR<INT_MIN> = INT_MIN. */
12722 if (!ANY_INTEGRAL_TYPE_P (type))
12723 return true;
12724 if (TYPE_OVERFLOW_UNDEFINED (type))
12726 *strict_overflow_p = true;
12727 return true;
12729 break;
12731 case NON_LVALUE_EXPR:
12732 case FLOAT_EXPR:
12733 case FIX_TRUNC_EXPR:
12734 return RECURSE (op0);
12736 CASE_CONVERT:
12738 tree inner_type = TREE_TYPE (op0);
12739 tree outer_type = type;
12741 if (TREE_CODE (outer_type) == REAL_TYPE)
12743 if (TREE_CODE (inner_type) == REAL_TYPE)
12744 return RECURSE (op0);
12745 if (INTEGRAL_TYPE_P (inner_type))
12747 if (TYPE_UNSIGNED (inner_type))
12748 return true;
12749 return RECURSE (op0);
12752 else if (INTEGRAL_TYPE_P (outer_type))
12754 if (TREE_CODE (inner_type) == REAL_TYPE)
12755 return RECURSE (op0);
12756 if (INTEGRAL_TYPE_P (inner_type))
12757 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12758 && TYPE_UNSIGNED (inner_type);
12761 break;
12763 default:
12764 return tree_simple_nonnegative_warnv_p (code, type);
12767 /* We don't know sign of `t', so be conservative and return false. */
12768 return false;
12771 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12772 value is based on the assumption that signed overflow is undefined,
12773 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12774 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12776 bool
12777 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12778 tree op1, bool *strict_overflow_p,
12779 int depth)
12781 if (TYPE_UNSIGNED (type))
12782 return true;
12784 switch (code)
12786 case POINTER_PLUS_EXPR:
12787 case PLUS_EXPR:
12788 if (FLOAT_TYPE_P (type))
12789 return RECURSE (op0) && RECURSE (op1);
12791 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12792 both unsigned and at least 2 bits shorter than the result. */
12793 if (TREE_CODE (type) == INTEGER_TYPE
12794 && TREE_CODE (op0) == NOP_EXPR
12795 && TREE_CODE (op1) == NOP_EXPR)
12797 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12798 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12799 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12800 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12802 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12803 TYPE_PRECISION (inner2)) + 1;
12804 return prec < TYPE_PRECISION (type);
12807 break;
12809 case MULT_EXPR:
12810 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12812 /* x * x is always non-negative for floating point x
12813 or without overflow. */
12814 if (operand_equal_p (op0, op1, 0)
12815 || (RECURSE (op0) && RECURSE (op1)))
12817 if (ANY_INTEGRAL_TYPE_P (type)
12818 && TYPE_OVERFLOW_UNDEFINED (type))
12819 *strict_overflow_p = true;
12820 return true;
12824 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12825 both unsigned and their total bits is shorter than the result. */
12826 if (TREE_CODE (type) == INTEGER_TYPE
12827 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12828 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12830 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12831 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12832 : TREE_TYPE (op0);
12833 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12834 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12835 : TREE_TYPE (op1);
12837 bool unsigned0 = TYPE_UNSIGNED (inner0);
12838 bool unsigned1 = TYPE_UNSIGNED (inner1);
12840 if (TREE_CODE (op0) == INTEGER_CST)
12841 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12843 if (TREE_CODE (op1) == INTEGER_CST)
12844 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12846 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12847 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12849 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12850 ? tree_int_cst_min_precision (op0, UNSIGNED)
12851 : TYPE_PRECISION (inner0);
12853 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12854 ? tree_int_cst_min_precision (op1, UNSIGNED)
12855 : TYPE_PRECISION (inner1);
12857 return precision0 + precision1 < TYPE_PRECISION (type);
12860 return false;
12862 case BIT_AND_EXPR:
12863 case MAX_EXPR:
12864 return RECURSE (op0) || RECURSE (op1);
12866 case BIT_IOR_EXPR:
12867 case BIT_XOR_EXPR:
12868 case MIN_EXPR:
12869 case RDIV_EXPR:
12870 case TRUNC_DIV_EXPR:
12871 case CEIL_DIV_EXPR:
12872 case FLOOR_DIV_EXPR:
12873 case ROUND_DIV_EXPR:
12874 return RECURSE (op0) && RECURSE (op1);
12876 case TRUNC_MOD_EXPR:
12877 return RECURSE (op0);
12879 case FLOOR_MOD_EXPR:
12880 return RECURSE (op1);
12882 case CEIL_MOD_EXPR:
12883 case ROUND_MOD_EXPR:
12884 default:
12885 return tree_simple_nonnegative_warnv_p (code, type);
12888 /* We don't know sign of `t', so be conservative and return false. */
12889 return false;
12892 /* Return true if T is known to be non-negative. If the return
12893 value is based on the assumption that signed overflow is undefined,
12894 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12895 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12897 bool
12898 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12900 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12901 return true;
12903 switch (TREE_CODE (t))
12905 case INTEGER_CST:
12906 return tree_int_cst_sgn (t) >= 0;
12908 case REAL_CST:
12909 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12911 case FIXED_CST:
12912 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12914 case COND_EXPR:
12915 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12917 case SSA_NAME:
12918 /* Limit the depth of recursion to avoid quadratic behavior.
12919 This is expected to catch almost all occurrences in practice.
12920 If this code misses important cases that unbounded recursion
12921 would not, passes that need this information could be revised
12922 to provide it through dataflow propagation. */
12923 return (!name_registered_for_update_p (t)
12924 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12925 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12926 strict_overflow_p, depth));
12928 default:
12929 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12933 /* Return true if T is known to be non-negative. If the return
12934 value is based on the assumption that signed overflow is undefined,
12935 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12936 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12938 bool
12939 tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
12940 bool *strict_overflow_p, int depth)
12942 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12943 switch (DECL_FUNCTION_CODE (fndecl))
12945 CASE_FLT_FN (BUILT_IN_ACOS):
12946 CASE_FLT_FN (BUILT_IN_ACOSH):
12947 CASE_FLT_FN (BUILT_IN_CABS):
12948 CASE_FLT_FN (BUILT_IN_COSH):
12949 CASE_FLT_FN (BUILT_IN_ERFC):
12950 CASE_FLT_FN (BUILT_IN_EXP):
12951 CASE_FLT_FN (BUILT_IN_EXP10):
12952 CASE_FLT_FN (BUILT_IN_EXP2):
12953 CASE_FLT_FN (BUILT_IN_FABS):
12954 CASE_FLT_FN (BUILT_IN_FDIM):
12955 CASE_FLT_FN (BUILT_IN_HYPOT):
12956 CASE_FLT_FN (BUILT_IN_POW10):
12957 CASE_INT_FN (BUILT_IN_FFS):
12958 CASE_INT_FN (BUILT_IN_PARITY):
12959 CASE_INT_FN (BUILT_IN_POPCOUNT):
12960 CASE_INT_FN (BUILT_IN_CLZ):
12961 CASE_INT_FN (BUILT_IN_CLRSB):
12962 case BUILT_IN_BSWAP32:
12963 case BUILT_IN_BSWAP64:
12964 /* Always true. */
12965 return true;
12967 CASE_FLT_FN (BUILT_IN_SQRT):
12968 /* sqrt(-0.0) is -0.0. */
12969 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12970 return true;
12971 return RECURSE (arg0);
12973 CASE_FLT_FN (BUILT_IN_ASINH):
12974 CASE_FLT_FN (BUILT_IN_ATAN):
12975 CASE_FLT_FN (BUILT_IN_ATANH):
12976 CASE_FLT_FN (BUILT_IN_CBRT):
12977 CASE_FLT_FN (BUILT_IN_CEIL):
12978 CASE_FLT_FN (BUILT_IN_ERF):
12979 CASE_FLT_FN (BUILT_IN_EXPM1):
12980 CASE_FLT_FN (BUILT_IN_FLOOR):
12981 CASE_FLT_FN (BUILT_IN_FMOD):
12982 CASE_FLT_FN (BUILT_IN_FREXP):
12983 CASE_FLT_FN (BUILT_IN_ICEIL):
12984 CASE_FLT_FN (BUILT_IN_IFLOOR):
12985 CASE_FLT_FN (BUILT_IN_IRINT):
12986 CASE_FLT_FN (BUILT_IN_IROUND):
12987 CASE_FLT_FN (BUILT_IN_LCEIL):
12988 CASE_FLT_FN (BUILT_IN_LDEXP):
12989 CASE_FLT_FN (BUILT_IN_LFLOOR):
12990 CASE_FLT_FN (BUILT_IN_LLCEIL):
12991 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12992 CASE_FLT_FN (BUILT_IN_LLRINT):
12993 CASE_FLT_FN (BUILT_IN_LLROUND):
12994 CASE_FLT_FN (BUILT_IN_LRINT):
12995 CASE_FLT_FN (BUILT_IN_LROUND):
12996 CASE_FLT_FN (BUILT_IN_MODF):
12997 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12998 CASE_FLT_FN (BUILT_IN_RINT):
12999 CASE_FLT_FN (BUILT_IN_ROUND):
13000 CASE_FLT_FN (BUILT_IN_SCALB):
13001 CASE_FLT_FN (BUILT_IN_SCALBLN):
13002 CASE_FLT_FN (BUILT_IN_SCALBN):
13003 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13004 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13005 CASE_FLT_FN (BUILT_IN_SINH):
13006 CASE_FLT_FN (BUILT_IN_TANH):
13007 CASE_FLT_FN (BUILT_IN_TRUNC):
13008 /* True if the 1st argument is nonnegative. */
13009 return RECURSE (arg0);
13011 CASE_FLT_FN (BUILT_IN_FMAX):
13012 /* True if the 1st OR 2nd arguments are nonnegative. */
13013 return RECURSE (arg0) || RECURSE (arg1);
13015 CASE_FLT_FN (BUILT_IN_FMIN):
13016 /* True if the 1st AND 2nd arguments are nonnegative. */
13017 return RECURSE (arg0) && RECURSE (arg1);
13019 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13020 /* True if the 2nd argument is nonnegative. */
13021 return RECURSE (arg1);
13023 CASE_FLT_FN (BUILT_IN_POWI):
13024 /* True if the 1st argument is nonnegative or the second
13025 argument is an even integer. */
13026 if (TREE_CODE (arg1) == INTEGER_CST
13027 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13028 return true;
13029 return RECURSE (arg0);
13031 CASE_FLT_FN (BUILT_IN_POW):
13032 /* True if the 1st argument is nonnegative or the second
13033 argument is an even integer valued real. */
13034 if (TREE_CODE (arg1) == REAL_CST)
13036 REAL_VALUE_TYPE c;
13037 HOST_WIDE_INT n;
13039 c = TREE_REAL_CST (arg1);
13040 n = real_to_integer (&c);
13041 if ((n & 1) == 0)
13043 REAL_VALUE_TYPE cint;
13044 real_from_integer (&cint, VOIDmode, n, SIGNED);
13045 if (real_identical (&c, &cint))
13046 return true;
13049 return RECURSE (arg0);
13051 default:
13052 break;
13054 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13057 /* Return true if T is known to be non-negative. If the return
13058 value is based on the assumption that signed overflow is undefined,
13059 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13060 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13062 static bool
13063 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13065 enum tree_code code = TREE_CODE (t);
13066 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13067 return true;
13069 switch (code)
13071 case TARGET_EXPR:
13073 tree temp = TARGET_EXPR_SLOT (t);
13074 t = TARGET_EXPR_INITIAL (t);
13076 /* If the initializer is non-void, then it's a normal expression
13077 that will be assigned to the slot. */
13078 if (!VOID_TYPE_P (t))
13079 return RECURSE (t);
13081 /* Otherwise, the initializer sets the slot in some way. One common
13082 way is an assignment statement at the end of the initializer. */
13083 while (1)
13085 if (TREE_CODE (t) == BIND_EXPR)
13086 t = expr_last (BIND_EXPR_BODY (t));
13087 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13088 || TREE_CODE (t) == TRY_CATCH_EXPR)
13089 t = expr_last (TREE_OPERAND (t, 0));
13090 else if (TREE_CODE (t) == STATEMENT_LIST)
13091 t = expr_last (t);
13092 else
13093 break;
13095 if (TREE_CODE (t) == MODIFY_EXPR
13096 && TREE_OPERAND (t, 0) == temp)
13097 return RECURSE (TREE_OPERAND (t, 1));
13099 return false;
13102 case CALL_EXPR:
13104 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13105 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13107 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13108 get_callee_fndecl (t),
13109 arg0,
13110 arg1,
13111 strict_overflow_p, depth);
13113 case COMPOUND_EXPR:
13114 case MODIFY_EXPR:
13115 return RECURSE (TREE_OPERAND (t, 1));
13117 case BIND_EXPR:
13118 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13120 case SAVE_EXPR:
13121 return RECURSE (TREE_OPERAND (t, 0));
13123 default:
13124 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13128 #undef RECURSE
13129 #undef tree_expr_nonnegative_warnv_p
13131 /* Return true if T is known to be non-negative. If the return
13132 value is based on the assumption that signed overflow is undefined,
13133 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13134 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13136 bool
13137 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13139 enum tree_code code;
13140 if (t == error_mark_node)
13141 return false;
13143 code = TREE_CODE (t);
13144 switch (TREE_CODE_CLASS (code))
13146 case tcc_binary:
13147 case tcc_comparison:
13148 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13149 TREE_TYPE (t),
13150 TREE_OPERAND (t, 0),
13151 TREE_OPERAND (t, 1),
13152 strict_overflow_p, depth);
13154 case tcc_unary:
13155 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13156 TREE_TYPE (t),
13157 TREE_OPERAND (t, 0),
13158 strict_overflow_p, depth);
13160 case tcc_constant:
13161 case tcc_declaration:
13162 case tcc_reference:
13163 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13165 default:
13166 break;
13169 switch (code)
13171 case TRUTH_AND_EXPR:
13172 case TRUTH_OR_EXPR:
13173 case TRUTH_XOR_EXPR:
13174 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13175 TREE_TYPE (t),
13176 TREE_OPERAND (t, 0),
13177 TREE_OPERAND (t, 1),
13178 strict_overflow_p, depth);
13179 case TRUTH_NOT_EXPR:
13180 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13181 TREE_TYPE (t),
13182 TREE_OPERAND (t, 0),
13183 strict_overflow_p, depth);
13185 case COND_EXPR:
13186 case CONSTRUCTOR:
13187 case OBJ_TYPE_REF:
13188 case ASSERT_EXPR:
13189 case ADDR_EXPR:
13190 case WITH_SIZE_EXPR:
13191 case SSA_NAME:
13192 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13194 default:
13195 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13199 /* Return true if `t' is known to be non-negative. Handle warnings
13200 about undefined signed overflow. */
13202 bool
13203 tree_expr_nonnegative_p (tree t)
13205 bool ret, strict_overflow_p;
13207 strict_overflow_p = false;
13208 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13209 if (strict_overflow_p)
13210 fold_overflow_warning (("assuming signed overflow does not occur when "
13211 "determining that expression is always "
13212 "non-negative"),
13213 WARN_STRICT_OVERFLOW_MISC);
13214 return ret;
13218 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13219 For floating point we further ensure that T is not denormal.
13220 Similar logic is present in nonzero_address in rtlanal.h.
13222 If the return value is based on the assumption that signed overflow
13223 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13224 change *STRICT_OVERFLOW_P. */
13226 bool
13227 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13228 bool *strict_overflow_p)
13230 switch (code)
13232 case ABS_EXPR:
13233 return tree_expr_nonzero_warnv_p (op0,
13234 strict_overflow_p);
13236 case NOP_EXPR:
13238 tree inner_type = TREE_TYPE (op0);
13239 tree outer_type = type;
13241 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13242 && tree_expr_nonzero_warnv_p (op0,
13243 strict_overflow_p));
13245 break;
13247 case NON_LVALUE_EXPR:
13248 return tree_expr_nonzero_warnv_p (op0,
13249 strict_overflow_p);
13251 default:
13252 break;
13255 return false;
13258 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13259 For floating point we further ensure that T is not denormal.
13260 Similar logic is present in nonzero_address in rtlanal.h.
13262 If the return value is based on the assumption that signed overflow
13263 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13264 change *STRICT_OVERFLOW_P. */
13266 bool
13267 tree_binary_nonzero_warnv_p (enum tree_code code,
13268 tree type,
13269 tree op0,
13270 tree op1, bool *strict_overflow_p)
13272 bool sub_strict_overflow_p;
13273 switch (code)
13275 case POINTER_PLUS_EXPR:
13276 case PLUS_EXPR:
13277 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13279 /* With the presence of negative values it is hard
13280 to say something. */
13281 sub_strict_overflow_p = false;
13282 if (!tree_expr_nonnegative_warnv_p (op0,
13283 &sub_strict_overflow_p)
13284 || !tree_expr_nonnegative_warnv_p (op1,
13285 &sub_strict_overflow_p))
13286 return false;
13287 /* One of operands must be positive and the other non-negative. */
13288 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13289 overflows, on a twos-complement machine the sum of two
13290 nonnegative numbers can never be zero. */
13291 return (tree_expr_nonzero_warnv_p (op0,
13292 strict_overflow_p)
13293 || tree_expr_nonzero_warnv_p (op1,
13294 strict_overflow_p));
13296 break;
13298 case MULT_EXPR:
13299 if (TYPE_OVERFLOW_UNDEFINED (type))
13301 if (tree_expr_nonzero_warnv_p (op0,
13302 strict_overflow_p)
13303 && tree_expr_nonzero_warnv_p (op1,
13304 strict_overflow_p))
13306 *strict_overflow_p = true;
13307 return true;
13310 break;
13312 case MIN_EXPR:
13313 sub_strict_overflow_p = false;
13314 if (tree_expr_nonzero_warnv_p (op0,
13315 &sub_strict_overflow_p)
13316 && tree_expr_nonzero_warnv_p (op1,
13317 &sub_strict_overflow_p))
13319 if (sub_strict_overflow_p)
13320 *strict_overflow_p = true;
13322 break;
13324 case MAX_EXPR:
13325 sub_strict_overflow_p = false;
13326 if (tree_expr_nonzero_warnv_p (op0,
13327 &sub_strict_overflow_p))
13329 if (sub_strict_overflow_p)
13330 *strict_overflow_p = true;
13332 /* When both operands are nonzero, then MAX must be too. */
13333 if (tree_expr_nonzero_warnv_p (op1,
13334 strict_overflow_p))
13335 return true;
13337 /* MAX where operand 0 is positive is positive. */
13338 return tree_expr_nonnegative_warnv_p (op0,
13339 strict_overflow_p);
13341 /* MAX where operand 1 is positive is positive. */
13342 else if (tree_expr_nonzero_warnv_p (op1,
13343 &sub_strict_overflow_p)
13344 && tree_expr_nonnegative_warnv_p (op1,
13345 &sub_strict_overflow_p))
13347 if (sub_strict_overflow_p)
13348 *strict_overflow_p = true;
13349 return true;
13351 break;
13353 case BIT_IOR_EXPR:
13354 return (tree_expr_nonzero_warnv_p (op1,
13355 strict_overflow_p)
13356 || tree_expr_nonzero_warnv_p (op0,
13357 strict_overflow_p));
13359 default:
13360 break;
13363 return false;
13366 /* Return true when T is an address and is known to be nonzero.
13367 For floating point we further ensure that T is not denormal.
13368 Similar logic is present in nonzero_address in rtlanal.h.
13370 If the return value is based on the assumption that signed overflow
13371 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13372 change *STRICT_OVERFLOW_P. */
13374 bool
13375 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13377 bool sub_strict_overflow_p;
13378 switch (TREE_CODE (t))
13380 case INTEGER_CST:
13381 return !integer_zerop (t);
13383 case ADDR_EXPR:
13385 tree base = TREE_OPERAND (t, 0);
13387 if (!DECL_P (base))
13388 base = get_base_address (base);
13390 if (!base)
13391 return false;
13393 /* For objects in symbol table check if we know they are non-zero.
13394 Don't do anything for variables and functions before symtab is built;
13395 it is quite possible that they will be declared weak later. */
13396 if (DECL_P (base) && decl_in_symtab_p (base))
13398 struct symtab_node *symbol;
13400 symbol = symtab_node::get_create (base);
13401 if (symbol)
13402 return symbol->nonzero_address ();
13403 else
13404 return false;
13407 /* Function local objects are never NULL. */
13408 if (DECL_P (base)
13409 && (DECL_CONTEXT (base)
13410 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13411 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13412 return true;
13414 /* Constants are never weak. */
13415 if (CONSTANT_CLASS_P (base))
13416 return true;
13418 return false;
13421 case COND_EXPR:
13422 sub_strict_overflow_p = false;
13423 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13424 &sub_strict_overflow_p)
13425 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13426 &sub_strict_overflow_p))
13428 if (sub_strict_overflow_p)
13429 *strict_overflow_p = true;
13430 return true;
13432 break;
13434 default:
13435 break;
13437 return false;
13440 #define integer_valued_real_p(X) \
13441 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13443 #define RECURSE(X) \
13444 ((integer_valued_real_p) (X, depth + 1))
13446 /* Return true if the floating point result of (CODE OP0) has an
13447 integer value. We also allow +Inf, -Inf and NaN to be considered
13448 integer values.
13450 DEPTH is the current nesting depth of the query. */
13452 bool
13453 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13455 switch (code)
13457 case FLOAT_EXPR:
13458 return true;
13460 case ABS_EXPR:
13461 return RECURSE (op0);
13463 CASE_CONVERT:
13465 tree type = TREE_TYPE (op0);
13466 if (TREE_CODE (type) == INTEGER_TYPE)
13467 return true;
13468 if (TREE_CODE (type) == REAL_TYPE)
13469 return RECURSE (op0);
13470 break;
13473 default:
13474 break;
13476 return false;
13479 /* Return true if the floating point result of (CODE OP0 OP1) has an
13480 integer value. We also allow +Inf, -Inf and NaN to be considered
13481 integer values.
13483 DEPTH is the current nesting depth of the query. */
13485 bool
13486 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13488 switch (code)
13490 case PLUS_EXPR:
13491 case MINUS_EXPR:
13492 case MULT_EXPR:
13493 case MIN_EXPR:
13494 case MAX_EXPR:
13495 return RECURSE (op0) && RECURSE (op1);
13497 default:
13498 break;
13500 return false;
13503 /* Return true if the floating point result of calling FNDECL with arguments
13504 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13505 considered integer values. If FNDECL takes fewer than 2 arguments,
13506 the remaining ARGn are null.
13508 DEPTH is the current nesting depth of the query. */
13510 bool
13511 integer_valued_real_call_p (tree fndecl, tree arg0, tree arg1, int depth)
13513 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13514 switch (DECL_FUNCTION_CODE (fndecl))
13516 CASE_FLT_FN (BUILT_IN_CEIL):
13517 CASE_FLT_FN (BUILT_IN_FLOOR):
13518 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13519 CASE_FLT_FN (BUILT_IN_RINT):
13520 CASE_FLT_FN (BUILT_IN_ROUND):
13521 CASE_FLT_FN (BUILT_IN_TRUNC):
13522 return true;
13524 CASE_FLT_FN (BUILT_IN_FMIN):
13525 CASE_FLT_FN (BUILT_IN_FMAX):
13526 return RECURSE (arg0) && RECURSE (arg1);
13528 default:
13529 break;
13531 return false;
13534 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13535 has an integer value. We also allow +Inf, -Inf and NaN to be
13536 considered integer values.
13538 DEPTH is the current nesting depth of the query. */
13540 bool
13541 integer_valued_real_single_p (tree t, int depth)
13543 switch (TREE_CODE (t))
13545 case REAL_CST:
13546 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13548 case COND_EXPR:
13549 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13551 case SSA_NAME:
13552 /* Limit the depth of recursion to avoid quadratic behavior.
13553 This is expected to catch almost all occurrences in practice.
13554 If this code misses important cases that unbounded recursion
13555 would not, passes that need this information could be revised
13556 to provide it through dataflow propagation. */
13557 return (!name_registered_for_update_p (t)
13558 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13559 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13560 depth));
13562 default:
13563 break;
13565 return false;
13568 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13569 has an integer value. We also allow +Inf, -Inf and NaN to be
13570 considered integer values.
13572 DEPTH is the current nesting depth of the query. */
13574 static bool
13575 integer_valued_real_invalid_p (tree t, int depth)
13577 switch (TREE_CODE (t))
13579 case COMPOUND_EXPR:
13580 case MODIFY_EXPR:
13581 case BIND_EXPR:
13582 return RECURSE (TREE_OPERAND (t, 1));
13584 case SAVE_EXPR:
13585 return RECURSE (TREE_OPERAND (t, 0));
13587 default:
13588 break;
13590 return false;
13593 #undef RECURSE
13594 #undef integer_valued_real_p
13596 /* Return true if the floating point expression T has an integer value.
13597 We also allow +Inf, -Inf and NaN to be considered integer values.
13599 DEPTH is the current nesting depth of the query. */
13601 bool
13602 integer_valued_real_p (tree t, int depth)
13604 if (t == error_mark_node)
13605 return false;
13607 tree_code code = TREE_CODE (t);
13608 switch (TREE_CODE_CLASS (code))
13610 case tcc_binary:
13611 case tcc_comparison:
13612 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13613 TREE_OPERAND (t, 1), depth);
13615 case tcc_unary:
13616 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13618 case tcc_constant:
13619 case tcc_declaration:
13620 case tcc_reference:
13621 return integer_valued_real_single_p (t, depth);
13623 default:
13624 break;
13627 switch (code)
13629 case COND_EXPR:
13630 case SSA_NAME:
13631 return integer_valued_real_single_p (t, depth);
13633 case CALL_EXPR:
13635 tree arg0 = (call_expr_nargs (t) > 0
13636 ? CALL_EXPR_ARG (t, 0)
13637 : NULL_TREE);
13638 tree arg1 = (call_expr_nargs (t) > 1
13639 ? CALL_EXPR_ARG (t, 1)
13640 : NULL_TREE);
13641 return integer_valued_real_call_p (get_callee_fndecl (t),
13642 arg0, arg1, depth);
13645 default:
13646 return integer_valued_real_invalid_p (t, depth);
13650 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13651 attempt to fold the expression to a constant without modifying TYPE,
13652 OP0 or OP1.
13654 If the expression could be simplified to a constant, then return
13655 the constant. If the expression would not be simplified to a
13656 constant, then return NULL_TREE. */
13658 tree
13659 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13661 tree tem = fold_binary (code, type, op0, op1);
13662 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13665 /* Given the components of a unary expression CODE, TYPE and OP0,
13666 attempt to fold the expression to a constant without modifying
13667 TYPE or OP0.
13669 If the expression could be simplified to a constant, then return
13670 the constant. If the expression would not be simplified to a
13671 constant, then return NULL_TREE. */
13673 tree
13674 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13676 tree tem = fold_unary (code, type, op0);
13677 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13680 /* If EXP represents referencing an element in a constant string
13681 (either via pointer arithmetic or array indexing), return the
13682 tree representing the value accessed, otherwise return NULL. */
13684 tree
13685 fold_read_from_constant_string (tree exp)
13687 if ((TREE_CODE (exp) == INDIRECT_REF
13688 || TREE_CODE (exp) == ARRAY_REF)
13689 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13691 tree exp1 = TREE_OPERAND (exp, 0);
13692 tree index;
13693 tree string;
13694 location_t loc = EXPR_LOCATION (exp);
13696 if (TREE_CODE (exp) == INDIRECT_REF)
13697 string = string_constant (exp1, &index);
13698 else
13700 tree low_bound = array_ref_low_bound (exp);
13701 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13703 /* Optimize the special-case of a zero lower bound.
13705 We convert the low_bound to sizetype to avoid some problems
13706 with constant folding. (E.g. suppose the lower bound is 1,
13707 and its mode is QI. Without the conversion,l (ARRAY
13708 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13709 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13710 if (! integer_zerop (low_bound))
13711 index = size_diffop_loc (loc, index,
13712 fold_convert_loc (loc, sizetype, low_bound));
13714 string = exp1;
13717 if (string
13718 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13719 && TREE_CODE (string) == STRING_CST
13720 && TREE_CODE (index) == INTEGER_CST
13721 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13722 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13723 == MODE_INT)
13724 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13725 return build_int_cst_type (TREE_TYPE (exp),
13726 (TREE_STRING_POINTER (string)
13727 [TREE_INT_CST_LOW (index)]));
13729 return NULL;
13732 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13733 an integer constant, real, or fixed-point constant.
13735 TYPE is the type of the result. */
13737 static tree
13738 fold_negate_const (tree arg0, tree type)
13740 tree t = NULL_TREE;
13742 switch (TREE_CODE (arg0))
13744 case INTEGER_CST:
13746 bool overflow;
13747 wide_int val = wi::neg (arg0, &overflow);
13748 t = force_fit_type (type, val, 1,
13749 (overflow | TREE_OVERFLOW (arg0))
13750 && !TYPE_UNSIGNED (type));
13751 break;
13754 case REAL_CST:
13755 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13756 break;
13758 case FIXED_CST:
13760 FIXED_VALUE_TYPE f;
13761 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13762 &(TREE_FIXED_CST (arg0)), NULL,
13763 TYPE_SATURATING (type));
13764 t = build_fixed (type, f);
13765 /* Propagate overflow flags. */
13766 if (overflow_p | TREE_OVERFLOW (arg0))
13767 TREE_OVERFLOW (t) = 1;
13768 break;
13771 default:
13772 gcc_unreachable ();
13775 return t;
13778 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13779 an integer constant or real constant.
13781 TYPE is the type of the result. */
13783 tree
13784 fold_abs_const (tree arg0, tree type)
13786 tree t = NULL_TREE;
13788 switch (TREE_CODE (arg0))
13790 case INTEGER_CST:
13792 /* If the value is unsigned or non-negative, then the absolute value
13793 is the same as the ordinary value. */
13794 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13795 t = arg0;
13797 /* If the value is negative, then the absolute value is
13798 its negation. */
13799 else
13801 bool overflow;
13802 wide_int val = wi::neg (arg0, &overflow);
13803 t = force_fit_type (type, val, -1,
13804 overflow | TREE_OVERFLOW (arg0));
13807 break;
13809 case REAL_CST:
13810 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13811 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13812 else
13813 t = arg0;
13814 break;
13816 default:
13817 gcc_unreachable ();
13820 return t;
13823 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13824 constant. TYPE is the type of the result. */
13826 static tree
13827 fold_not_const (const_tree arg0, tree type)
13829 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13831 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13834 /* Given CODE, a relational operator, the target type, TYPE and two
13835 constant operands OP0 and OP1, return the result of the
13836 relational operation. If the result is not a compile time
13837 constant, then return NULL_TREE. */
13839 static tree
13840 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13842 int result, invert;
13844 /* From here on, the only cases we handle are when the result is
13845 known to be a constant. */
13847 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13849 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13850 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13852 /* Handle the cases where either operand is a NaN. */
13853 if (real_isnan (c0) || real_isnan (c1))
13855 switch (code)
13857 case EQ_EXPR:
13858 case ORDERED_EXPR:
13859 result = 0;
13860 break;
13862 case NE_EXPR:
13863 case UNORDERED_EXPR:
13864 case UNLT_EXPR:
13865 case UNLE_EXPR:
13866 case UNGT_EXPR:
13867 case UNGE_EXPR:
13868 case UNEQ_EXPR:
13869 result = 1;
13870 break;
13872 case LT_EXPR:
13873 case LE_EXPR:
13874 case GT_EXPR:
13875 case GE_EXPR:
13876 case LTGT_EXPR:
13877 if (flag_trapping_math)
13878 return NULL_TREE;
13879 result = 0;
13880 break;
13882 default:
13883 gcc_unreachable ();
13886 return constant_boolean_node (result, type);
13889 return constant_boolean_node (real_compare (code, c0, c1), type);
13892 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13894 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13895 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13896 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13899 /* Handle equality/inequality of complex constants. */
13900 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13902 tree rcond = fold_relational_const (code, type,
13903 TREE_REALPART (op0),
13904 TREE_REALPART (op1));
13905 tree icond = fold_relational_const (code, type,
13906 TREE_IMAGPART (op0),
13907 TREE_IMAGPART (op1));
13908 if (code == EQ_EXPR)
13909 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13910 else if (code == NE_EXPR)
13911 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13912 else
13913 return NULL_TREE;
13916 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13918 unsigned count = VECTOR_CST_NELTS (op0);
13919 tree *elts = XALLOCAVEC (tree, count);
13920 gcc_assert (VECTOR_CST_NELTS (op1) == count
13921 && TYPE_VECTOR_SUBPARTS (type) == count);
13923 for (unsigned i = 0; i < count; i++)
13925 tree elem_type = TREE_TYPE (type);
13926 tree elem0 = VECTOR_CST_ELT (op0, i);
13927 tree elem1 = VECTOR_CST_ELT (op1, i);
13929 tree tem = fold_relational_const (code, elem_type,
13930 elem0, elem1);
13932 if (tem == NULL_TREE)
13933 return NULL_TREE;
13935 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13938 return build_vector (type, elts);
13941 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13943 To compute GT, swap the arguments and do LT.
13944 To compute GE, do LT and invert the result.
13945 To compute LE, swap the arguments, do LT and invert the result.
13946 To compute NE, do EQ and invert the result.
13948 Therefore, the code below must handle only EQ and LT. */
13950 if (code == LE_EXPR || code == GT_EXPR)
13952 std::swap (op0, op1);
13953 code = swap_tree_comparison (code);
13956 /* Note that it is safe to invert for real values here because we
13957 have already handled the one case that it matters. */
13959 invert = 0;
13960 if (code == NE_EXPR || code == GE_EXPR)
13962 invert = 1;
13963 code = invert_tree_comparison (code, false);
13966 /* Compute a result for LT or EQ if args permit;
13967 Otherwise return T. */
13968 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13970 if (code == EQ_EXPR)
13971 result = tree_int_cst_equal (op0, op1);
13972 else
13973 result = tree_int_cst_lt (op0, op1);
13975 else
13976 return NULL_TREE;
13978 if (invert)
13979 result ^= 1;
13980 return constant_boolean_node (result, type);
13983 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13984 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13985 itself. */
13987 tree
13988 fold_build_cleanup_point_expr (tree type, tree expr)
13990 /* If the expression does not have side effects then we don't have to wrap
13991 it with a cleanup point expression. */
13992 if (!TREE_SIDE_EFFECTS (expr))
13993 return expr;
13995 /* If the expression is a return, check to see if the expression inside the
13996 return has no side effects or the right hand side of the modify expression
13997 inside the return. If either don't have side effects set we don't need to
13998 wrap the expression in a cleanup point expression. Note we don't check the
13999 left hand side of the modify because it should always be a return decl. */
14000 if (TREE_CODE (expr) == RETURN_EXPR)
14002 tree op = TREE_OPERAND (expr, 0);
14003 if (!op || !TREE_SIDE_EFFECTS (op))
14004 return expr;
14005 op = TREE_OPERAND (op, 1);
14006 if (!TREE_SIDE_EFFECTS (op))
14007 return expr;
14010 return build1 (CLEANUP_POINT_EXPR, type, expr);
14013 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14014 of an indirection through OP0, or NULL_TREE if no simplification is
14015 possible. */
14017 tree
14018 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14020 tree sub = op0;
14021 tree subtype;
14023 STRIP_NOPS (sub);
14024 subtype = TREE_TYPE (sub);
14025 if (!POINTER_TYPE_P (subtype))
14026 return NULL_TREE;
14028 if (TREE_CODE (sub) == ADDR_EXPR)
14030 tree op = TREE_OPERAND (sub, 0);
14031 tree optype = TREE_TYPE (op);
14032 /* *&CONST_DECL -> to the value of the const decl. */
14033 if (TREE_CODE (op) == CONST_DECL)
14034 return DECL_INITIAL (op);
14035 /* *&p => p; make sure to handle *&"str"[cst] here. */
14036 if (type == optype)
14038 tree fop = fold_read_from_constant_string (op);
14039 if (fop)
14040 return fop;
14041 else
14042 return op;
14044 /* *(foo *)&fooarray => fooarray[0] */
14045 else if (TREE_CODE (optype) == ARRAY_TYPE
14046 && type == TREE_TYPE (optype)
14047 && (!in_gimple_form
14048 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14050 tree type_domain = TYPE_DOMAIN (optype);
14051 tree min_val = size_zero_node;
14052 if (type_domain && TYPE_MIN_VALUE (type_domain))
14053 min_val = TYPE_MIN_VALUE (type_domain);
14054 if (in_gimple_form
14055 && TREE_CODE (min_val) != INTEGER_CST)
14056 return NULL_TREE;
14057 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14058 NULL_TREE, NULL_TREE);
14060 /* *(foo *)&complexfoo => __real__ complexfoo */
14061 else if (TREE_CODE (optype) == COMPLEX_TYPE
14062 && type == TREE_TYPE (optype))
14063 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14064 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14065 else if (TREE_CODE (optype) == VECTOR_TYPE
14066 && type == TREE_TYPE (optype))
14068 tree part_width = TYPE_SIZE (type);
14069 tree index = bitsize_int (0);
14070 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14074 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14075 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14077 tree op00 = TREE_OPERAND (sub, 0);
14078 tree op01 = TREE_OPERAND (sub, 1);
14080 STRIP_NOPS (op00);
14081 if (TREE_CODE (op00) == ADDR_EXPR)
14083 tree op00type;
14084 op00 = TREE_OPERAND (op00, 0);
14085 op00type = TREE_TYPE (op00);
14087 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14088 if (TREE_CODE (op00type) == VECTOR_TYPE
14089 && type == TREE_TYPE (op00type))
14091 HOST_WIDE_INT offset = tree_to_shwi (op01);
14092 tree part_width = TYPE_SIZE (type);
14093 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14094 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14095 tree index = bitsize_int (indexi);
14097 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14098 return fold_build3_loc (loc,
14099 BIT_FIELD_REF, type, op00,
14100 part_width, index);
14103 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14104 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14105 && type == TREE_TYPE (op00type))
14107 tree size = TYPE_SIZE_UNIT (type);
14108 if (tree_int_cst_equal (size, op01))
14109 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14111 /* ((foo *)&fooarray)[1] => fooarray[1] */
14112 else if (TREE_CODE (op00type) == ARRAY_TYPE
14113 && type == TREE_TYPE (op00type))
14115 tree type_domain = TYPE_DOMAIN (op00type);
14116 tree min_val = size_zero_node;
14117 if (type_domain && TYPE_MIN_VALUE (type_domain))
14118 min_val = TYPE_MIN_VALUE (type_domain);
14119 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14120 TYPE_SIZE_UNIT (type));
14121 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14122 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14123 NULL_TREE, NULL_TREE);
14128 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14129 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14130 && type == TREE_TYPE (TREE_TYPE (subtype))
14131 && (!in_gimple_form
14132 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14134 tree type_domain;
14135 tree min_val = size_zero_node;
14136 sub = build_fold_indirect_ref_loc (loc, sub);
14137 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14138 if (type_domain && TYPE_MIN_VALUE (type_domain))
14139 min_val = TYPE_MIN_VALUE (type_domain);
14140 if (in_gimple_form
14141 && TREE_CODE (min_val) != INTEGER_CST)
14142 return NULL_TREE;
14143 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14144 NULL_TREE);
14147 return NULL_TREE;
14150 /* Builds an expression for an indirection through T, simplifying some
14151 cases. */
14153 tree
14154 build_fold_indirect_ref_loc (location_t loc, tree t)
14156 tree type = TREE_TYPE (TREE_TYPE (t));
14157 tree sub = fold_indirect_ref_1 (loc, type, t);
14159 if (sub)
14160 return sub;
14162 return build1_loc (loc, INDIRECT_REF, type, t);
14165 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14167 tree
14168 fold_indirect_ref_loc (location_t loc, tree t)
14170 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14172 if (sub)
14173 return sub;
14174 else
14175 return t;
14178 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14179 whose result is ignored. The type of the returned tree need not be
14180 the same as the original expression. */
14182 tree
14183 fold_ignored_result (tree t)
14185 if (!TREE_SIDE_EFFECTS (t))
14186 return integer_zero_node;
14188 for (;;)
14189 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14191 case tcc_unary:
14192 t = TREE_OPERAND (t, 0);
14193 break;
14195 case tcc_binary:
14196 case tcc_comparison:
14197 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14198 t = TREE_OPERAND (t, 0);
14199 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14200 t = TREE_OPERAND (t, 1);
14201 else
14202 return t;
14203 break;
14205 case tcc_expression:
14206 switch (TREE_CODE (t))
14208 case COMPOUND_EXPR:
14209 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14210 return t;
14211 t = TREE_OPERAND (t, 0);
14212 break;
14214 case COND_EXPR:
14215 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14216 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14217 return t;
14218 t = TREE_OPERAND (t, 0);
14219 break;
14221 default:
14222 return t;
14224 break;
14226 default:
14227 return t;
14231 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14233 tree
14234 round_up_loc (location_t loc, tree value, unsigned int divisor)
14236 tree div = NULL_TREE;
14238 if (divisor == 1)
14239 return value;
14241 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14242 have to do anything. Only do this when we are not given a const,
14243 because in that case, this check is more expensive than just
14244 doing it. */
14245 if (TREE_CODE (value) != INTEGER_CST)
14247 div = build_int_cst (TREE_TYPE (value), divisor);
14249 if (multiple_of_p (TREE_TYPE (value), value, div))
14250 return value;
14253 /* If divisor is a power of two, simplify this to bit manipulation. */
14254 if (divisor == (divisor & -divisor))
14256 if (TREE_CODE (value) == INTEGER_CST)
14258 wide_int val = value;
14259 bool overflow_p;
14261 if ((val & (divisor - 1)) == 0)
14262 return value;
14264 overflow_p = TREE_OVERFLOW (value);
14265 val += divisor - 1;
14266 val &= - (int) divisor;
14267 if (val == 0)
14268 overflow_p = true;
14270 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14272 else
14274 tree t;
14276 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14277 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14278 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14279 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14282 else
14284 if (!div)
14285 div = build_int_cst (TREE_TYPE (value), divisor);
14286 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14287 value = size_binop_loc (loc, MULT_EXPR, value, div);
14290 return value;
14293 /* Likewise, but round down. */
14295 tree
14296 round_down_loc (location_t loc, tree value, int divisor)
14298 tree div = NULL_TREE;
14300 gcc_assert (divisor > 0);
14301 if (divisor == 1)
14302 return value;
14304 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14305 have to do anything. Only do this when we are not given a const,
14306 because in that case, this check is more expensive than just
14307 doing it. */
14308 if (TREE_CODE (value) != INTEGER_CST)
14310 div = build_int_cst (TREE_TYPE (value), divisor);
14312 if (multiple_of_p (TREE_TYPE (value), value, div))
14313 return value;
14316 /* If divisor is a power of two, simplify this to bit manipulation. */
14317 if (divisor == (divisor & -divisor))
14319 tree t;
14321 t = build_int_cst (TREE_TYPE (value), -divisor);
14322 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14324 else
14326 if (!div)
14327 div = build_int_cst (TREE_TYPE (value), divisor);
14328 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14329 value = size_binop_loc (loc, MULT_EXPR, value, div);
14332 return value;
14335 /* Returns the pointer to the base of the object addressed by EXP and
14336 extracts the information about the offset of the access, storing it
14337 to PBITPOS and POFFSET. */
14339 static tree
14340 split_address_to_core_and_offset (tree exp,
14341 HOST_WIDE_INT *pbitpos, tree *poffset)
14343 tree core;
14344 machine_mode mode;
14345 int unsignedp, reversep, volatilep;
14346 HOST_WIDE_INT bitsize;
14347 location_t loc = EXPR_LOCATION (exp);
14349 if (TREE_CODE (exp) == ADDR_EXPR)
14351 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14352 poffset, &mode, &unsignedp, &reversep,
14353 &volatilep, false);
14354 core = build_fold_addr_expr_loc (loc, core);
14356 else
14358 core = exp;
14359 *pbitpos = 0;
14360 *poffset = NULL_TREE;
14363 return core;
14366 /* Returns true if addresses of E1 and E2 differ by a constant, false
14367 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14369 bool
14370 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14372 tree core1, core2;
14373 HOST_WIDE_INT bitpos1, bitpos2;
14374 tree toffset1, toffset2, tdiff, type;
14376 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14377 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14379 if (bitpos1 % BITS_PER_UNIT != 0
14380 || bitpos2 % BITS_PER_UNIT != 0
14381 || !operand_equal_p (core1, core2, 0))
14382 return false;
14384 if (toffset1 && toffset2)
14386 type = TREE_TYPE (toffset1);
14387 if (type != TREE_TYPE (toffset2))
14388 toffset2 = fold_convert (type, toffset2);
14390 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14391 if (!cst_and_fits_in_hwi (tdiff))
14392 return false;
14394 *diff = int_cst_value (tdiff);
14396 else if (toffset1 || toffset2)
14398 /* If only one of the offsets is non-constant, the difference cannot
14399 be a constant. */
14400 return false;
14402 else
14403 *diff = 0;
14405 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14406 return true;
14409 /* Return OFF converted to a pointer offset type suitable as offset for
14410 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14411 tree
14412 convert_to_ptrofftype_loc (location_t loc, tree off)
14414 return fold_convert_loc (loc, sizetype, off);
14417 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14418 tree
14419 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14421 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14422 ptr, convert_to_ptrofftype_loc (loc, off));
14425 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14426 tree
14427 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14429 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14430 ptr, size_int (off));
14433 /* Return a char pointer for a C string if it is a string constant
14434 or sum of string constant and integer constant. */
14436 const char *
14437 c_getstr (tree src)
14439 tree offset_node;
14441 src = string_constant (src, &offset_node);
14442 if (src == 0)
14443 return 0;
14445 if (offset_node == 0)
14446 return TREE_STRING_POINTER (src);
14447 else if (!tree_fits_uhwi_p (offset_node)
14448 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14449 return 0;
14451 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);