Remove unused debug_str_hash_forced from dwarf2out.
[official-gcc.git] / gcc / fold-const.c
blob1daebbd3ad85fab8d8894a0fd2a34f3a4fb67f9c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
120 tree, tree);
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
126 tree, tree, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
147 static location_t
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
169 return x;
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
176 addition.
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
180 sign. */
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
187 tree
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
190 double_int quo, rem;
191 int uns;
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
199 uns = false;
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
203 uns, code, &rem);
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
208 return NULL_TREE;
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
218 used. */
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
237 void
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
250 deferred code. */
252 void
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
255 const char *warnmsg;
256 location_t locus;
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
263 && code != 0
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
266 return;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
273 return;
275 if (gimple_no_warning_p (stmt))
276 return;
278 /* Use the smallest code level when deciding to issue the
279 warning. */
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
284 return;
286 if (stmt == NULL)
287 locus = input_location;
288 else
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
294 warnings. */
296 void
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
304 bool
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
313 static void
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
332 static bool
333 negate_mathfn_p (enum built_in_function code)
335 switch (code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
360 return true;
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
368 default:
369 break;
371 return false;
374 /* Check whether we may negate an integer constant T without causing
375 overflow. */
377 bool
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
381 unsigned int prec;
382 tree type;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
388 return false;
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
394 return true;
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
398 else
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
408 static bool
409 negate_expr_p (tree t)
411 tree type;
413 if (t == 0)
414 return false;
416 type = TREE_TYPE (t);
418 STRIP_SIGN_NOPS (t);
419 switch (TREE_CODE (t))
421 case INTEGER_CST:
422 if (TYPE_OVERFLOW_WRAPS (type))
423 return true;
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
427 case BIT_NOT_EXPR:
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
431 case FIXED_CST:
432 case NEGATE_EXPR:
433 return true;
435 case REAL_CST:
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
440 case COMPLEX_CST:
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
444 case COMPLEX_EXPR:
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
448 case CONJ_EXPR:
449 return negate_expr_p (TREE_OPERAND (t, 0));
451 case PLUS_EXPR:
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
454 return false;
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
459 return true;
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
463 case MINUS_EXPR:
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
470 case MULT_EXPR:
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
472 break;
474 /* Fall through. */
476 case RDIV_EXPR:
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
480 break;
482 case TRUNC_DIV_EXPR:
483 case ROUND_DIV_EXPR:
484 case FLOOR_DIV_EXPR:
485 case CEIL_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
491 overflow. */
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
494 break;
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
506 break;
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
522 return true;
524 break;
526 default:
527 break;
529 return false;
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
537 static tree
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
541 tree tem;
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
550 break;
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
556 return tem;
557 break;
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
563 return tem;
564 break;
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
570 case COMPLEX_CST:
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
581 break;
583 case COMPLEX_EXPR:
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 break;
590 case CONJ_EXPR:
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 break;
596 case NEGATE_EXPR:
597 return TREE_OPERAND (t, 0);
599 case PLUS_EXPR:
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
621 break;
623 case MINUS_EXPR:
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
630 break;
632 case MULT_EXPR:
633 if (TYPE_UNSIGNED (type))
634 break;
636 /* Fall through. */
638 case RDIV_EXPR:
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
650 break;
652 case TRUNC_DIV_EXPR:
653 case ROUND_DIV_EXPR:
654 case FLOOR_DIV_EXPR:
655 case CEIL_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
661 overflow. */
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
687 break;
689 case NOP_EXPR:
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
697 break;
699 case CALL_EXPR:
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
704 tree fndecl, arg;
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
710 break;
712 case RSHIFT_EXPR:
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
729 break;
731 default:
732 break;
735 return NULL_TREE;
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
740 return NULL_TREE. */
742 static tree
743 negate_expr (tree t)
745 tree type, tem;
746 location_t loc;
748 if (t == NULL_TREE)
749 return NULL_TREE;
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
753 STRIP_SIGN_NOPS (t);
755 tem = fold_negate_expr (loc, t);
756 if (!tem)
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
781 static tree
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
785 tree var = 0;
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
843 *conp = in;
844 else
845 var = in;
847 if (negate_p)
849 if (*litp)
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
857 return var;
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
865 static tree
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 if (t1 == 0)
869 return t2;
870 else if (t2 == 0)
871 return t1;
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
911 static bool
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
915 return false;
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
917 return false;
919 switch (code)
921 case LSHIFT_EXPR:
922 case RSHIFT_EXPR:
923 case LROTATE_EXPR:
924 case RROTATE_EXPR:
925 return true;
927 default:
928 break;
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time.
941 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
943 tree
944 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
946 double_int op1, op2, res, tmp;
947 tree t;
948 tree type = TREE_TYPE (arg1);
949 bool uns = TYPE_UNSIGNED (type);
950 bool is_sizetype
951 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
952 bool overflow = false;
954 op1 = tree_to_double_int (arg1);
955 op2 = tree_to_double_int (arg2);
957 switch (code)
959 case BIT_IOR_EXPR:
960 res = double_int_ior (op1, op2);
961 break;
963 case BIT_XOR_EXPR:
964 res = double_int_xor (op1, op2);
965 break;
967 case BIT_AND_EXPR:
968 res = double_int_and (op1, op2);
969 break;
971 case RSHIFT_EXPR:
972 res = double_int_rshift (op1, double_int_to_shwi (op2),
973 TYPE_PRECISION (type), !uns);
974 break;
976 case LSHIFT_EXPR:
977 /* It's unclear from the C standard whether shifts can overflow.
978 The following code ignores overflow; perhaps a C standard
979 interpretation ruling is needed. */
980 res = double_int_lshift (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type), !uns);
982 break;
984 case RROTATE_EXPR:
985 res = double_int_rrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
987 break;
989 case LROTATE_EXPR:
990 res = double_int_lrotate (op1, double_int_to_shwi (op2),
991 TYPE_PRECISION (type));
992 break;
994 case PLUS_EXPR:
995 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
996 &res.low, &res.high);
997 break;
999 case MINUS_EXPR:
1000 neg_double (op2.low, op2.high, &res.low, &res.high);
1001 add_double (op1.low, op1.high, res.low, res.high,
1002 &res.low, &res.high);
1003 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1004 break;
1006 case MULT_EXPR:
1007 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1008 &res.low, &res.high);
1009 break;
1011 case TRUNC_DIV_EXPR:
1012 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1013 case EXACT_DIV_EXPR:
1014 /* This is a shortcut for a common special case. */
1015 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1016 && !TREE_OVERFLOW (arg1)
1017 && !TREE_OVERFLOW (arg2)
1018 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1020 if (code == CEIL_DIV_EXPR)
1021 op1.low += op2.low - 1;
1023 res.low = op1.low / op2.low, res.high = 0;
1024 break;
1027 /* ... fall through ... */
1029 case ROUND_DIV_EXPR:
1030 if (double_int_zero_p (op2))
1031 return NULL_TREE;
1032 if (double_int_one_p (op2))
1034 res = op1;
1035 break;
1037 if (double_int_equal_p (op1, op2)
1038 && ! double_int_zero_p (op1))
1040 res = double_int_one;
1041 break;
1043 overflow = div_and_round_double (code, uns,
1044 op1.low, op1.high, op2.low, op2.high,
1045 &res.low, &res.high,
1046 &tmp.low, &tmp.high);
1047 break;
1049 case TRUNC_MOD_EXPR:
1050 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1051 /* This is a shortcut for a common special case. */
1052 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1053 && !TREE_OVERFLOW (arg1)
1054 && !TREE_OVERFLOW (arg2)
1055 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1057 if (code == CEIL_MOD_EXPR)
1058 op1.low += op2.low - 1;
1059 res.low = op1.low % op2.low, res.high = 0;
1060 break;
1063 /* ... fall through ... */
1065 case ROUND_MOD_EXPR:
1066 if (double_int_zero_p (op2))
1067 return NULL_TREE;
1068 overflow = div_and_round_double (code, uns,
1069 op1.low, op1.high, op2.low, op2.high,
1070 &tmp.low, &tmp.high,
1071 &res.low, &res.high);
1072 break;
1074 case MIN_EXPR:
1075 res = double_int_min (op1, op2, uns);
1076 break;
1078 case MAX_EXPR:
1079 res = double_int_max (op1, op2, uns);
1080 break;
1082 default:
1083 return NULL_TREE;
1086 if (notrunc)
1088 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1090 /* Propagate overflow flags ourselves. */
1091 if (((!uns || is_sizetype) && overflow)
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1094 t = copy_node (t);
1095 TREE_OVERFLOW (t) = 1;
1098 else
1099 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1100 ((!uns || is_sizetype) && overflow)
1101 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1103 return t;
1106 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1107 constant. We assume ARG1 and ARG2 have the same data type, or at least
1108 are the same kind of constant and the same machine mode. Return zero if
1109 combining the constants is not allowed in the current operating mode. */
1111 static tree
1112 const_binop (enum tree_code code, tree arg1, tree arg2)
1114 /* Sanity check for the recursive cases. */
1115 if (!arg1 || !arg2)
1116 return NULL_TREE;
1118 STRIP_NOPS (arg1);
1119 STRIP_NOPS (arg2);
1121 if (TREE_CODE (arg1) == INTEGER_CST)
1122 return int_const_binop (code, arg1, arg2, 0);
1124 if (TREE_CODE (arg1) == REAL_CST)
1126 enum machine_mode mode;
1127 REAL_VALUE_TYPE d1;
1128 REAL_VALUE_TYPE d2;
1129 REAL_VALUE_TYPE value;
1130 REAL_VALUE_TYPE result;
1131 bool inexact;
1132 tree t, type;
1134 /* The following codes are handled by real_arithmetic. */
1135 switch (code)
1137 case PLUS_EXPR:
1138 case MINUS_EXPR:
1139 case MULT_EXPR:
1140 case RDIV_EXPR:
1141 case MIN_EXPR:
1142 case MAX_EXPR:
1143 break;
1145 default:
1146 return NULL_TREE;
1149 d1 = TREE_REAL_CST (arg1);
1150 d2 = TREE_REAL_CST (arg2);
1152 type = TREE_TYPE (arg1);
1153 mode = TYPE_MODE (type);
1155 /* Don't perform operation if we honor signaling NaNs and
1156 either operand is a NaN. */
1157 if (HONOR_SNANS (mode)
1158 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1159 return NULL_TREE;
1161 /* Don't perform operation if it would raise a division
1162 by zero exception. */
1163 if (code == RDIV_EXPR
1164 && REAL_VALUES_EQUAL (d2, dconst0)
1165 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1166 return NULL_TREE;
1168 /* If either operand is a NaN, just return it. Otherwise, set up
1169 for floating-point trap; we return an overflow. */
1170 if (REAL_VALUE_ISNAN (d1))
1171 return arg1;
1172 else if (REAL_VALUE_ISNAN (d2))
1173 return arg2;
1175 inexact = real_arithmetic (&value, code, &d1, &d2);
1176 real_convert (&result, mode, &value);
1178 /* Don't constant fold this floating point operation if
1179 the result has overflowed and flag_trapping_math. */
1180 if (flag_trapping_math
1181 && MODE_HAS_INFINITIES (mode)
1182 && REAL_VALUE_ISINF (result)
1183 && !REAL_VALUE_ISINF (d1)
1184 && !REAL_VALUE_ISINF (d2))
1185 return NULL_TREE;
1187 /* Don't constant fold this floating point operation if the
1188 result may dependent upon the run-time rounding mode and
1189 flag_rounding_math is set, or if GCC's software emulation
1190 is unable to accurately represent the result. */
1191 if ((flag_rounding_math
1192 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1193 && (inexact || !real_identical (&result, &value)))
1194 return NULL_TREE;
1196 t = build_real (type, result);
1198 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1199 return t;
1202 if (TREE_CODE (arg1) == FIXED_CST)
1204 FIXED_VALUE_TYPE f1;
1205 FIXED_VALUE_TYPE f2;
1206 FIXED_VALUE_TYPE result;
1207 tree t, type;
1208 int sat_p;
1209 bool overflow_p;
1211 /* The following codes are handled by fixed_arithmetic. */
1212 switch (code)
1214 case PLUS_EXPR:
1215 case MINUS_EXPR:
1216 case MULT_EXPR:
1217 case TRUNC_DIV_EXPR:
1218 f2 = TREE_FIXED_CST (arg2);
1219 break;
1221 case LSHIFT_EXPR:
1222 case RSHIFT_EXPR:
1223 f2.data.high = TREE_INT_CST_HIGH (arg2);
1224 f2.data.low = TREE_INT_CST_LOW (arg2);
1225 f2.mode = SImode;
1226 break;
1228 default:
1229 return NULL_TREE;
1232 f1 = TREE_FIXED_CST (arg1);
1233 type = TREE_TYPE (arg1);
1234 sat_p = TYPE_SATURATING (type);
1235 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1236 t = build_fixed (type, result);
1237 /* Propagate overflow flags. */
1238 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1239 TREE_OVERFLOW (t) = 1;
1240 return t;
1243 if (TREE_CODE (arg1) == COMPLEX_CST)
1245 tree type = TREE_TYPE (arg1);
1246 tree r1 = TREE_REALPART (arg1);
1247 tree i1 = TREE_IMAGPART (arg1);
1248 tree r2 = TREE_REALPART (arg2);
1249 tree i2 = TREE_IMAGPART (arg2);
1250 tree real, imag;
1252 switch (code)
1254 case PLUS_EXPR:
1255 case MINUS_EXPR:
1256 real = const_binop (code, r1, r2);
1257 imag = const_binop (code, i1, i2);
1258 break;
1260 case MULT_EXPR:
1261 if (COMPLEX_FLOAT_TYPE_P (type))
1262 return do_mpc_arg2 (arg1, arg2, type,
1263 /* do_nonfinite= */ folding_initializer,
1264 mpc_mul);
1266 real = const_binop (MINUS_EXPR,
1267 const_binop (MULT_EXPR, r1, r2),
1268 const_binop (MULT_EXPR, i1, i2));
1269 imag = const_binop (PLUS_EXPR,
1270 const_binop (MULT_EXPR, r1, i2),
1271 const_binop (MULT_EXPR, i1, r2));
1272 break;
1274 case RDIV_EXPR:
1275 if (COMPLEX_FLOAT_TYPE_P (type))
1276 return do_mpc_arg2 (arg1, arg2, type,
1277 /* do_nonfinite= */ folding_initializer,
1278 mpc_div);
1279 /* Fallthru ... */
1280 case TRUNC_DIV_EXPR:
1281 case CEIL_DIV_EXPR:
1282 case FLOOR_DIV_EXPR:
1283 case ROUND_DIV_EXPR:
1284 if (flag_complex_method == 0)
1286 /* Keep this algorithm in sync with
1287 tree-complex.c:expand_complex_div_straight().
1289 Expand complex division to scalars, straightforward algorithm.
1290 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1291 t = br*br + bi*bi
1293 tree magsquared
1294 = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r2, r2),
1296 const_binop (MULT_EXPR, i2, i2));
1297 tree t1
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r1, r2),
1300 const_binop (MULT_EXPR, i1, i2));
1301 tree t2
1302 = const_binop (MINUS_EXPR,
1303 const_binop (MULT_EXPR, i1, r2),
1304 const_binop (MULT_EXPR, r1, i2));
1306 real = const_binop (code, t1, magsquared);
1307 imag = const_binop (code, t2, magsquared);
1309 else
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_wide().
1314 Expand complex division to scalars, modified algorithm to minimize
1315 overflow with wide input ranges. */
1316 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1317 fold_abs_const (r2, TREE_TYPE (type)),
1318 fold_abs_const (i2, TREE_TYPE (type)));
1320 if (integer_nonzerop (compare))
1322 /* In the TRUE branch, we compute
1323 ratio = br/bi;
1324 div = (br * ratio) + bi;
1325 tr = (ar * ratio) + ai;
1326 ti = (ai * ratio) - ar;
1327 tr = tr / div;
1328 ti = ti / div; */
1329 tree ratio = const_binop (code, r2, i2);
1330 tree div = const_binop (PLUS_EXPR, i2,
1331 const_binop (MULT_EXPR, r2, ratio));
1332 real = const_binop (MULT_EXPR, r1, ratio);
1333 real = const_binop (PLUS_EXPR, real, i1);
1334 real = const_binop (code, real, div);
1336 imag = const_binop (MULT_EXPR, i1, ratio);
1337 imag = const_binop (MINUS_EXPR, imag, r1);
1338 imag = const_binop (code, imag, div);
1340 else
1342 /* In the FALSE branch, we compute
1343 ratio = d/c;
1344 divisor = (d * ratio) + c;
1345 tr = (b * ratio) + a;
1346 ti = b - (a * ratio);
1347 tr = tr / div;
1348 ti = ti / div; */
1349 tree ratio = const_binop (code, i2, r2);
1350 tree div = const_binop (PLUS_EXPR, r2,
1351 const_binop (MULT_EXPR, i2, ratio));
1353 real = const_binop (MULT_EXPR, i1, ratio);
1354 real = const_binop (PLUS_EXPR, real, r1);
1355 real = const_binop (code, real, div);
1357 imag = const_binop (MULT_EXPR, r1, ratio);
1358 imag = const_binop (MINUS_EXPR, i1, imag);
1359 imag = const_binop (code, imag, div);
1362 break;
1364 default:
1365 return NULL_TREE;
1368 if (real && imag)
1369 return build_complex (type, real, imag);
1372 if (TREE_CODE (arg1) == VECTOR_CST)
1374 tree type = TREE_TYPE(arg1);
1375 int count = TYPE_VECTOR_SUBPARTS (type), i;
1376 tree elements1, elements2, list = NULL_TREE;
1378 if(TREE_CODE(arg2) != VECTOR_CST)
1379 return NULL_TREE;
1381 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1382 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1384 for (i = 0; i < count; i++)
1386 tree elem1, elem2, elem;
1388 /* The trailing elements can be empty and should be treated as 0 */
1389 if(!elements1)
1390 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1391 else
1393 elem1 = TREE_VALUE(elements1);
1394 elements1 = TREE_CHAIN (elements1);
1397 if(!elements2)
1398 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1399 else
1401 elem2 = TREE_VALUE(elements2);
1402 elements2 = TREE_CHAIN (elements2);
1405 elem = const_binop (code, elem1, elem2);
1407 /* It is possible that const_binop cannot handle the given
1408 code and return NULL_TREE */
1409 if(elem == NULL_TREE)
1410 return NULL_TREE;
1412 list = tree_cons (NULL_TREE, elem, list);
1414 return build_vector(type, nreverse(list));
1416 return NULL_TREE;
1419 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1420 indicates which particular sizetype to create. */
1422 tree
1423 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1425 return build_int_cst (sizetype_tab[(int) kind], number);
1428 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1429 is a tree code. The type of the result is taken from the operands.
1430 Both must be equivalent integer types, ala int_binop_types_match_p.
1431 If the operands are constant, so is the result. */
1433 tree
1434 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1436 tree type = TREE_TYPE (arg0);
1438 if (arg0 == error_mark_node || arg1 == error_mark_node)
1439 return error_mark_node;
1441 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1442 TREE_TYPE (arg1)));
1444 /* Handle the special case of two integer constants faster. */
1445 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1447 /* And some specific cases even faster than that. */
1448 if (code == PLUS_EXPR)
1450 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1451 return arg1;
1452 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1453 return arg0;
1455 else if (code == MINUS_EXPR)
1457 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1458 return arg0;
1460 else if (code == MULT_EXPR)
1462 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1463 return arg1;
1466 /* Handle general case of two integer constants. */
1467 return int_const_binop (code, arg0, arg1, 0);
1470 return fold_build2_loc (loc, code, type, arg0, arg1);
1473 /* Given two values, either both of sizetype or both of bitsizetype,
1474 compute the difference between the two values. Return the value
1475 in signed type corresponding to the type of the operands. */
1477 tree
1478 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1480 tree type = TREE_TYPE (arg0);
1481 tree ctype;
1483 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1484 TREE_TYPE (arg1)));
1486 /* If the type is already signed, just do the simple thing. */
1487 if (!TYPE_UNSIGNED (type))
1488 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1490 if (type == sizetype)
1491 ctype = ssizetype;
1492 else if (type == bitsizetype)
1493 ctype = sbitsizetype;
1494 else
1495 ctype = signed_type_for (type);
1497 /* If either operand is not a constant, do the conversions to the signed
1498 type and subtract. The hardware will do the right thing with any
1499 overflow in the subtraction. */
1500 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1501 return size_binop_loc (loc, MINUS_EXPR,
1502 fold_convert_loc (loc, ctype, arg0),
1503 fold_convert_loc (loc, ctype, arg1));
1505 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1506 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1507 overflow) and negate (which can't either). Special-case a result
1508 of zero while we're here. */
1509 if (tree_int_cst_equal (arg0, arg1))
1510 return build_int_cst (ctype, 0);
1511 else if (tree_int_cst_lt (arg1, arg0))
1512 return fold_convert_loc (loc, ctype,
1513 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1514 else
1515 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1516 fold_convert_loc (loc, ctype,
1517 size_binop_loc (loc,
1518 MINUS_EXPR,
1519 arg1, arg0)));
1522 /* A subroutine of fold_convert_const handling conversions of an
1523 INTEGER_CST to another integer type. */
1525 static tree
1526 fold_convert_const_int_from_int (tree type, const_tree arg1)
1528 tree t;
1530 /* Given an integer constant, make new constant with new type,
1531 appropriately sign-extended or truncated. */
1532 t = force_fit_type_double (type, tree_to_double_int (arg1),
1533 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1534 (TREE_INT_CST_HIGH (arg1) < 0
1535 && (TYPE_UNSIGNED (type)
1536 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1537 | TREE_OVERFLOW (arg1));
1539 return t;
1542 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1543 to an integer type. */
1545 static tree
1546 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1548 int overflow = 0;
1549 tree t;
1551 /* The following code implements the floating point to integer
1552 conversion rules required by the Java Language Specification,
1553 that IEEE NaNs are mapped to zero and values that overflow
1554 the target precision saturate, i.e. values greater than
1555 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1556 are mapped to INT_MIN. These semantics are allowed by the
1557 C and C++ standards that simply state that the behavior of
1558 FP-to-integer conversion is unspecified upon overflow. */
1560 double_int val;
1561 REAL_VALUE_TYPE r;
1562 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1564 switch (code)
1566 case FIX_TRUNC_EXPR:
1567 real_trunc (&r, VOIDmode, &x);
1568 break;
1570 default:
1571 gcc_unreachable ();
1574 /* If R is NaN, return zero and show we have an overflow. */
1575 if (REAL_VALUE_ISNAN (r))
1577 overflow = 1;
1578 val = double_int_zero;
1581 /* See if R is less than the lower bound or greater than the
1582 upper bound. */
1584 if (! overflow)
1586 tree lt = TYPE_MIN_VALUE (type);
1587 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1588 if (REAL_VALUES_LESS (r, l))
1590 overflow = 1;
1591 val = tree_to_double_int (lt);
1595 if (! overflow)
1597 tree ut = TYPE_MAX_VALUE (type);
1598 if (ut)
1600 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1601 if (REAL_VALUES_LESS (u, r))
1603 overflow = 1;
1604 val = tree_to_double_int (ut);
1609 if (! overflow)
1610 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1612 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1613 return t;
1616 /* A subroutine of fold_convert_const handling conversions of a
1617 FIXED_CST to an integer type. */
1619 static tree
1620 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1622 tree t;
1623 double_int temp, temp_trunc;
1624 unsigned int mode;
1626 /* Right shift FIXED_CST to temp by fbit. */
1627 temp = TREE_FIXED_CST (arg1).data;
1628 mode = TREE_FIXED_CST (arg1).mode;
1629 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1631 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1632 HOST_BITS_PER_DOUBLE_INT,
1633 SIGNED_FIXED_POINT_MODE_P (mode));
1635 /* Left shift temp to temp_trunc by fbit. */
1636 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1637 HOST_BITS_PER_DOUBLE_INT,
1638 SIGNED_FIXED_POINT_MODE_P (mode));
1640 else
1642 temp = double_int_zero;
1643 temp_trunc = double_int_zero;
1646 /* If FIXED_CST is negative, we need to round the value toward 0.
1647 By checking if the fractional bits are not zero to add 1 to temp. */
1648 if (SIGNED_FIXED_POINT_MODE_P (mode)
1649 && double_int_negative_p (temp_trunc)
1650 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1651 temp = double_int_add (temp, double_int_one);
1653 /* Given a fixed-point constant, make new constant with new type,
1654 appropriately sign-extended or truncated. */
1655 t = force_fit_type_double (type, temp, -1,
1656 (double_int_negative_p (temp)
1657 && (TYPE_UNSIGNED (type)
1658 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1659 | TREE_OVERFLOW (arg1));
1661 return t;
1664 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1665 to another floating point type. */
1667 static tree
1668 fold_convert_const_real_from_real (tree type, const_tree arg1)
1670 REAL_VALUE_TYPE value;
1671 tree t;
1673 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1674 t = build_real (type, value);
1676 /* If converting an infinity or NAN to a representation that doesn't
1677 have one, set the overflow bit so that we can produce some kind of
1678 error message at the appropriate point if necessary. It's not the
1679 most user-friendly message, but it's better than nothing. */
1680 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1681 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1682 TREE_OVERFLOW (t) = 1;
1683 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1684 && !MODE_HAS_NANS (TYPE_MODE (type)))
1685 TREE_OVERFLOW (t) = 1;
1686 /* Regular overflow, conversion produced an infinity in a mode that
1687 can't represent them. */
1688 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1689 && REAL_VALUE_ISINF (value)
1690 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1691 TREE_OVERFLOW (t) = 1;
1692 else
1693 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1694 return t;
1697 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1698 to a floating point type. */
1700 static tree
1701 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1703 REAL_VALUE_TYPE value;
1704 tree t;
1706 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1707 t = build_real (type, value);
1709 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1710 return t;
1713 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1714 to another fixed-point type. */
1716 static tree
1717 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1719 FIXED_VALUE_TYPE value;
1720 tree t;
1721 bool overflow_p;
1723 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1730 return t;
1733 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1734 to a fixed-point type. */
1736 static tree
1737 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1743 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1744 TREE_INT_CST (arg1),
1745 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1746 TYPE_SATURATING (type));
1747 t = build_fixed (type, value);
1749 /* Propagate overflow flags. */
1750 if (overflow_p | TREE_OVERFLOW (arg1))
1751 TREE_OVERFLOW (t) = 1;
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1756 to a fixed-point type. */
1758 static tree
1759 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1765 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1766 &TREE_REAL_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1776 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1777 type TYPE. If no simplification can be done return NULL_TREE. */
1779 static tree
1780 fold_convert_const (enum tree_code code, tree type, tree arg1)
1782 if (TREE_TYPE (arg1) == type)
1783 return arg1;
1785 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1786 || TREE_CODE (type) == OFFSET_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return fold_convert_const_int_from_int (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_int_from_real (code, type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_int_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == REAL_TYPE)
1797 if (TREE_CODE (arg1) == INTEGER_CST)
1798 return build_real_from_int_cst (type, arg1);
1799 else if (TREE_CODE (arg1) == REAL_CST)
1800 return fold_convert_const_real_from_real (type, arg1);
1801 else if (TREE_CODE (arg1) == FIXED_CST)
1802 return fold_convert_const_real_from_fixed (type, arg1);
1804 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1806 if (TREE_CODE (arg1) == FIXED_CST)
1807 return fold_convert_const_fixed_from_fixed (type, arg1);
1808 else if (TREE_CODE (arg1) == INTEGER_CST)
1809 return fold_convert_const_fixed_from_int (type, arg1);
1810 else if (TREE_CODE (arg1) == REAL_CST)
1811 return fold_convert_const_fixed_from_real (type, arg1);
1813 return NULL_TREE;
1816 /* Construct a vector of zero elements of vector type TYPE. */
1818 static tree
1819 build_zero_vector (tree type)
1821 tree t;
1823 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1824 return build_vector_from_val (type, t);
1827 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1829 bool
1830 fold_convertible_p (const_tree type, const_tree arg)
1832 tree orig = TREE_TYPE (arg);
1834 if (type == orig)
1835 return true;
1837 if (TREE_CODE (arg) == ERROR_MARK
1838 || TREE_CODE (type) == ERROR_MARK
1839 || TREE_CODE (orig) == ERROR_MARK)
1840 return false;
1842 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1843 return true;
1845 switch (TREE_CODE (type))
1847 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1848 case POINTER_TYPE: case REFERENCE_TYPE:
1849 case OFFSET_TYPE:
1850 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1851 || TREE_CODE (orig) == OFFSET_TYPE)
1852 return true;
1853 return (TREE_CODE (orig) == VECTOR_TYPE
1854 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1856 case REAL_TYPE:
1857 case FIXED_POINT_TYPE:
1858 case COMPLEX_TYPE:
1859 case VECTOR_TYPE:
1860 case VOID_TYPE:
1861 return TREE_CODE (type) == TREE_CODE (orig);
1863 default:
1864 return false;
1868 /* Convert expression ARG to type TYPE. Used by the middle-end for
1869 simple conversions in preference to calling the front-end's convert. */
1871 tree
1872 fold_convert_loc (location_t loc, tree type, tree arg)
1874 tree orig = TREE_TYPE (arg);
1875 tree tem;
1877 if (type == orig)
1878 return arg;
1880 if (TREE_CODE (arg) == ERROR_MARK
1881 || TREE_CODE (type) == ERROR_MARK
1882 || TREE_CODE (orig) == ERROR_MARK)
1883 return error_mark_node;
1885 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1886 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1888 switch (TREE_CODE (type))
1890 case POINTER_TYPE:
1891 case REFERENCE_TYPE:
1892 /* Handle conversions between pointers to different address spaces. */
1893 if (POINTER_TYPE_P (orig)
1894 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1895 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1896 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1897 /* fall through */
1899 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1900 case OFFSET_TYPE:
1901 if (TREE_CODE (arg) == INTEGER_CST)
1903 tem = fold_convert_const (NOP_EXPR, type, arg);
1904 if (tem != NULL_TREE)
1905 return tem;
1907 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1908 || TREE_CODE (orig) == OFFSET_TYPE)
1909 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1910 if (TREE_CODE (orig) == COMPLEX_TYPE)
1911 return fold_convert_loc (loc, type,
1912 fold_build1_loc (loc, REALPART_EXPR,
1913 TREE_TYPE (orig), arg));
1914 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1915 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1916 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1918 case REAL_TYPE:
1919 if (TREE_CODE (arg) == INTEGER_CST)
1921 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1922 if (tem != NULL_TREE)
1923 return tem;
1925 else if (TREE_CODE (arg) == REAL_CST)
1927 tem = fold_convert_const (NOP_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1931 else if (TREE_CODE (arg) == FIXED_CST)
1933 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1938 switch (TREE_CODE (orig))
1940 case INTEGER_TYPE:
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1945 case REAL_TYPE:
1946 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1948 case FIXED_POINT_TYPE:
1949 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1951 case COMPLEX_TYPE:
1952 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1953 return fold_convert_loc (loc, type, tem);
1955 default:
1956 gcc_unreachable ();
1959 case FIXED_POINT_TYPE:
1960 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1961 || TREE_CODE (arg) == REAL_CST)
1963 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1965 goto fold_convert_exit;
1968 switch (TREE_CODE (orig))
1970 case FIXED_POINT_TYPE:
1971 case INTEGER_TYPE:
1972 case ENUMERAL_TYPE:
1973 case BOOLEAN_TYPE:
1974 case REAL_TYPE:
1975 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1977 case COMPLEX_TYPE:
1978 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1979 return fold_convert_loc (loc, type, tem);
1981 default:
1982 gcc_unreachable ();
1985 case COMPLEX_TYPE:
1986 switch (TREE_CODE (orig))
1988 case INTEGER_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 case REAL_TYPE:
1992 case FIXED_POINT_TYPE:
1993 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1994 fold_convert_loc (loc, TREE_TYPE (type), arg),
1995 fold_convert_loc (loc, TREE_TYPE (type),
1996 integer_zero_node));
1997 case COMPLEX_TYPE:
1999 tree rpart, ipart;
2001 if (TREE_CODE (arg) == COMPLEX_EXPR)
2003 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2004 TREE_OPERAND (arg, 0));
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2006 TREE_OPERAND (arg, 1));
2007 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2010 arg = save_expr (arg);
2011 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2012 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2013 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2014 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2015 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2018 default:
2019 gcc_unreachable ();
2022 case VECTOR_TYPE:
2023 if (integer_zerop (arg))
2024 return build_zero_vector (type);
2025 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2026 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2027 || TREE_CODE (orig) == VECTOR_TYPE);
2028 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2030 case VOID_TYPE:
2031 tem = fold_ignored_result (arg);
2032 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2034 default:
2035 gcc_unreachable ();
2037 fold_convert_exit:
2038 protected_set_expr_location_unshare (tem, loc);
2039 return tem;
2042 /* Return false if expr can be assumed not to be an lvalue, true
2043 otherwise. */
2045 static bool
2046 maybe_lvalue_p (const_tree x)
2048 /* We only need to wrap lvalue tree codes. */
2049 switch (TREE_CODE (x))
2051 case VAR_DECL:
2052 case PARM_DECL:
2053 case RESULT_DECL:
2054 case LABEL_DECL:
2055 case FUNCTION_DECL:
2056 case SSA_NAME:
2058 case COMPONENT_REF:
2059 case MEM_REF:
2060 case INDIRECT_REF:
2061 case ARRAY_REF:
2062 case ARRAY_RANGE_REF:
2063 case BIT_FIELD_REF:
2064 case OBJ_TYPE_REF:
2066 case REALPART_EXPR:
2067 case IMAGPART_EXPR:
2068 case PREINCREMENT_EXPR:
2069 case PREDECREMENT_EXPR:
2070 case SAVE_EXPR:
2071 case TRY_CATCH_EXPR:
2072 case WITH_CLEANUP_EXPR:
2073 case COMPOUND_EXPR:
2074 case MODIFY_EXPR:
2075 case TARGET_EXPR:
2076 case COND_EXPR:
2077 case BIND_EXPR:
2078 break;
2080 default:
2081 /* Assume the worst for front-end tree codes. */
2082 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2083 break;
2084 return false;
2087 return true;
2090 /* Return an expr equal to X but certainly not valid as an lvalue. */
2092 tree
2093 non_lvalue_loc (location_t loc, tree x)
2095 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2096 us. */
2097 if (in_gimple_form)
2098 return x;
2100 if (! maybe_lvalue_p (x))
2101 return x;
2102 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2105 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2106 Zero means allow extended lvalues. */
2108 int pedantic_lvalues;
2110 /* When pedantic, return an expr equal to X but certainly not valid as a
2111 pedantic lvalue. Otherwise, return X. */
2113 static tree
2114 pedantic_non_lvalue_loc (location_t loc, tree x)
2116 if (pedantic_lvalues)
2117 return non_lvalue_loc (loc, x);
2119 return protected_set_expr_location_unshare (x, loc);
2122 /* Given a tree comparison code, return the code that is the logical inverse
2123 of the given code. It is not safe to do this for floating-point
2124 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2125 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2127 enum tree_code
2128 invert_tree_comparison (enum tree_code code, bool honor_nans)
2130 if (honor_nans && flag_trapping_math)
2131 return ERROR_MARK;
2133 switch (code)
2135 case EQ_EXPR:
2136 return NE_EXPR;
2137 case NE_EXPR:
2138 return EQ_EXPR;
2139 case GT_EXPR:
2140 return honor_nans ? UNLE_EXPR : LE_EXPR;
2141 case GE_EXPR:
2142 return honor_nans ? UNLT_EXPR : LT_EXPR;
2143 case LT_EXPR:
2144 return honor_nans ? UNGE_EXPR : GE_EXPR;
2145 case LE_EXPR:
2146 return honor_nans ? UNGT_EXPR : GT_EXPR;
2147 case LTGT_EXPR:
2148 return UNEQ_EXPR;
2149 case UNEQ_EXPR:
2150 return LTGT_EXPR;
2151 case UNGT_EXPR:
2152 return LE_EXPR;
2153 case UNGE_EXPR:
2154 return LT_EXPR;
2155 case UNLT_EXPR:
2156 return GE_EXPR;
2157 case UNLE_EXPR:
2158 return GT_EXPR;
2159 case ORDERED_EXPR:
2160 return UNORDERED_EXPR;
2161 case UNORDERED_EXPR:
2162 return ORDERED_EXPR;
2163 default:
2164 gcc_unreachable ();
2168 /* Similar, but return the comparison that results if the operands are
2169 swapped. This is safe for floating-point. */
2171 enum tree_code
2172 swap_tree_comparison (enum tree_code code)
2174 switch (code)
2176 case EQ_EXPR:
2177 case NE_EXPR:
2178 case ORDERED_EXPR:
2179 case UNORDERED_EXPR:
2180 case LTGT_EXPR:
2181 case UNEQ_EXPR:
2182 return code;
2183 case GT_EXPR:
2184 return LT_EXPR;
2185 case GE_EXPR:
2186 return LE_EXPR;
2187 case LT_EXPR:
2188 return GT_EXPR;
2189 case LE_EXPR:
2190 return GE_EXPR;
2191 case UNGT_EXPR:
2192 return UNLT_EXPR;
2193 case UNGE_EXPR:
2194 return UNLE_EXPR;
2195 case UNLT_EXPR:
2196 return UNGT_EXPR;
2197 case UNLE_EXPR:
2198 return UNGE_EXPR;
2199 default:
2200 gcc_unreachable ();
2205 /* Convert a comparison tree code from an enum tree_code representation
2206 into a compcode bit-based encoding. This function is the inverse of
2207 compcode_to_comparison. */
2209 static enum comparison_code
2210 comparison_to_compcode (enum tree_code code)
2212 switch (code)
2214 case LT_EXPR:
2215 return COMPCODE_LT;
2216 case EQ_EXPR:
2217 return COMPCODE_EQ;
2218 case LE_EXPR:
2219 return COMPCODE_LE;
2220 case GT_EXPR:
2221 return COMPCODE_GT;
2222 case NE_EXPR:
2223 return COMPCODE_NE;
2224 case GE_EXPR:
2225 return COMPCODE_GE;
2226 case ORDERED_EXPR:
2227 return COMPCODE_ORD;
2228 case UNORDERED_EXPR:
2229 return COMPCODE_UNORD;
2230 case UNLT_EXPR:
2231 return COMPCODE_UNLT;
2232 case UNEQ_EXPR:
2233 return COMPCODE_UNEQ;
2234 case UNLE_EXPR:
2235 return COMPCODE_UNLE;
2236 case UNGT_EXPR:
2237 return COMPCODE_UNGT;
2238 case LTGT_EXPR:
2239 return COMPCODE_LTGT;
2240 case UNGE_EXPR:
2241 return COMPCODE_UNGE;
2242 default:
2243 gcc_unreachable ();
2247 /* Convert a compcode bit-based encoding of a comparison operator back
2248 to GCC's enum tree_code representation. This function is the
2249 inverse of comparison_to_compcode. */
2251 static enum tree_code
2252 compcode_to_comparison (enum comparison_code code)
2254 switch (code)
2256 case COMPCODE_LT:
2257 return LT_EXPR;
2258 case COMPCODE_EQ:
2259 return EQ_EXPR;
2260 case COMPCODE_LE:
2261 return LE_EXPR;
2262 case COMPCODE_GT:
2263 return GT_EXPR;
2264 case COMPCODE_NE:
2265 return NE_EXPR;
2266 case COMPCODE_GE:
2267 return GE_EXPR;
2268 case COMPCODE_ORD:
2269 return ORDERED_EXPR;
2270 case COMPCODE_UNORD:
2271 return UNORDERED_EXPR;
2272 case COMPCODE_UNLT:
2273 return UNLT_EXPR;
2274 case COMPCODE_UNEQ:
2275 return UNEQ_EXPR;
2276 case COMPCODE_UNLE:
2277 return UNLE_EXPR;
2278 case COMPCODE_UNGT:
2279 return UNGT_EXPR;
2280 case COMPCODE_LTGT:
2281 return LTGT_EXPR;
2282 case COMPCODE_UNGE:
2283 return UNGE_EXPR;
2284 default:
2285 gcc_unreachable ();
2289 /* Return a tree for the comparison which is the combination of
2290 doing the AND or OR (depending on CODE) of the two operations LCODE
2291 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2292 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2293 if this makes the transformation invalid. */
2295 tree
2296 combine_comparisons (location_t loc,
2297 enum tree_code code, enum tree_code lcode,
2298 enum tree_code rcode, tree truth_type,
2299 tree ll_arg, tree lr_arg)
2301 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2302 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2303 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2304 int compcode;
2306 switch (code)
2308 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2309 compcode = lcompcode & rcompcode;
2310 break;
2312 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2313 compcode = lcompcode | rcompcode;
2314 break;
2316 default:
2317 return NULL_TREE;
2320 if (!honor_nans)
2322 /* Eliminate unordered comparisons, as well as LTGT and ORD
2323 which are not used unless the mode has NaNs. */
2324 compcode &= ~COMPCODE_UNORD;
2325 if (compcode == COMPCODE_LTGT)
2326 compcode = COMPCODE_NE;
2327 else if (compcode == COMPCODE_ORD)
2328 compcode = COMPCODE_TRUE;
2330 else if (flag_trapping_math)
2332 /* Check that the original operation and the optimized ones will trap
2333 under the same condition. */
2334 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2335 && (lcompcode != COMPCODE_EQ)
2336 && (lcompcode != COMPCODE_ORD);
2337 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2338 && (rcompcode != COMPCODE_EQ)
2339 && (rcompcode != COMPCODE_ORD);
2340 bool trap = (compcode & COMPCODE_UNORD) == 0
2341 && (compcode != COMPCODE_EQ)
2342 && (compcode != COMPCODE_ORD);
2344 /* In a short-circuited boolean expression the LHS might be
2345 such that the RHS, if evaluated, will never trap. For
2346 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2347 if neither x nor y is NaN. (This is a mixed blessing: for
2348 example, the expression above will never trap, hence
2349 optimizing it to x < y would be invalid). */
2350 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2351 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2352 rtrap = false;
2354 /* If the comparison was short-circuited, and only the RHS
2355 trapped, we may now generate a spurious trap. */
2356 if (rtrap && !ltrap
2357 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2358 return NULL_TREE;
2360 /* If we changed the conditions that cause a trap, we lose. */
2361 if ((ltrap || rtrap) != trap)
2362 return NULL_TREE;
2365 if (compcode == COMPCODE_TRUE)
2366 return constant_boolean_node (true, truth_type);
2367 else if (compcode == COMPCODE_FALSE)
2368 return constant_boolean_node (false, truth_type);
2369 else
2371 enum tree_code tcode;
2373 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2374 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2378 /* Return nonzero if two operands (typically of the same tree node)
2379 are necessarily equal. If either argument has side-effects this
2380 function returns zero. FLAGS modifies behavior as follows:
2382 If OEP_ONLY_CONST is set, only return nonzero for constants.
2383 This function tests whether the operands are indistinguishable;
2384 it does not test whether they are equal using C's == operation.
2385 The distinction is important for IEEE floating point, because
2386 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2387 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2389 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2390 even though it may hold multiple values during a function.
2391 This is because a GCC tree node guarantees that nothing else is
2392 executed between the evaluation of its "operands" (which may often
2393 be evaluated in arbitrary order). Hence if the operands themselves
2394 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2395 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2396 unset means assuming isochronic (or instantaneous) tree equivalence.
2397 Unless comparing arbitrary expression trees, such as from different
2398 statements, this flag can usually be left unset.
2400 If OEP_PURE_SAME is set, then pure functions with identical arguments
2401 are considered the same. It is used when the caller has other ways
2402 to ensure that global memory is unchanged in between. */
2405 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2407 /* If either is ERROR_MARK, they aren't equal. */
2408 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2409 || TREE_TYPE (arg0) == error_mark_node
2410 || TREE_TYPE (arg1) == error_mark_node)
2411 return 0;
2413 /* Similar, if either does not have a type (like a released SSA name),
2414 they aren't equal. */
2415 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2416 return 0;
2418 /* Check equality of integer constants before bailing out due to
2419 precision differences. */
2420 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2421 return tree_int_cst_equal (arg0, arg1);
2423 /* If both types don't have the same signedness, then we can't consider
2424 them equal. We must check this before the STRIP_NOPS calls
2425 because they may change the signedness of the arguments. As pointers
2426 strictly don't have a signedness, require either two pointers or
2427 two non-pointers as well. */
2428 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2429 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2430 return 0;
2432 /* We cannot consider pointers to different address space equal. */
2433 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2434 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2435 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2436 return 0;
2438 /* If both types don't have the same precision, then it is not safe
2439 to strip NOPs. */
2440 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2441 return 0;
2443 STRIP_NOPS (arg0);
2444 STRIP_NOPS (arg1);
2446 /* In case both args are comparisons but with different comparison
2447 code, try to swap the comparison operands of one arg to produce
2448 a match and compare that variant. */
2449 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2450 && COMPARISON_CLASS_P (arg0)
2451 && COMPARISON_CLASS_P (arg1))
2453 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2455 if (TREE_CODE (arg0) == swap_code)
2456 return operand_equal_p (TREE_OPERAND (arg0, 0),
2457 TREE_OPERAND (arg1, 1), flags)
2458 && operand_equal_p (TREE_OPERAND (arg0, 1),
2459 TREE_OPERAND (arg1, 0), flags);
2462 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2463 /* This is needed for conversions and for COMPONENT_REF.
2464 Might as well play it safe and always test this. */
2465 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2466 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2467 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2468 return 0;
2470 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2471 We don't care about side effects in that case because the SAVE_EXPR
2472 takes care of that for us. In all other cases, two expressions are
2473 equal if they have no side effects. If we have two identical
2474 expressions with side effects that should be treated the same due
2475 to the only side effects being identical SAVE_EXPR's, that will
2476 be detected in the recursive calls below.
2477 If we are taking an invariant address of two identical objects
2478 they are necessarily equal as well. */
2479 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2480 && (TREE_CODE (arg0) == SAVE_EXPR
2481 || (flags & OEP_CONSTANT_ADDRESS_OF)
2482 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2483 return 1;
2485 /* Next handle constant cases, those for which we can return 1 even
2486 if ONLY_CONST is set. */
2487 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2488 switch (TREE_CODE (arg0))
2490 case INTEGER_CST:
2491 return tree_int_cst_equal (arg0, arg1);
2493 case FIXED_CST:
2494 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2495 TREE_FIXED_CST (arg1));
2497 case REAL_CST:
2498 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2499 TREE_REAL_CST (arg1)))
2500 return 1;
2503 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2505 /* If we do not distinguish between signed and unsigned zero,
2506 consider them equal. */
2507 if (real_zerop (arg0) && real_zerop (arg1))
2508 return 1;
2510 return 0;
2512 case VECTOR_CST:
2514 tree v1, v2;
2516 v1 = TREE_VECTOR_CST_ELTS (arg0);
2517 v2 = TREE_VECTOR_CST_ELTS (arg1);
2518 while (v1 && v2)
2520 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2521 flags))
2522 return 0;
2523 v1 = TREE_CHAIN (v1);
2524 v2 = TREE_CHAIN (v2);
2527 return v1 == v2;
2530 case COMPLEX_CST:
2531 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2532 flags)
2533 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2534 flags));
2536 case STRING_CST:
2537 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2538 && ! memcmp (TREE_STRING_POINTER (arg0),
2539 TREE_STRING_POINTER (arg1),
2540 TREE_STRING_LENGTH (arg0)));
2542 case ADDR_EXPR:
2543 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2544 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2545 ? OEP_CONSTANT_ADDRESS_OF : 0);
2546 default:
2547 break;
2550 if (flags & OEP_ONLY_CONST)
2551 return 0;
2553 /* Define macros to test an operand from arg0 and arg1 for equality and a
2554 variant that allows null and views null as being different from any
2555 non-null value. In the latter case, if either is null, the both
2556 must be; otherwise, do the normal comparison. */
2557 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2558 TREE_OPERAND (arg1, N), flags)
2560 #define OP_SAME_WITH_NULL(N) \
2561 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2562 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2564 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2566 case tcc_unary:
2567 /* Two conversions are equal only if signedness and modes match. */
2568 switch (TREE_CODE (arg0))
2570 CASE_CONVERT:
2571 case FIX_TRUNC_EXPR:
2572 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2573 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2574 return 0;
2575 break;
2576 default:
2577 break;
2580 return OP_SAME (0);
2583 case tcc_comparison:
2584 case tcc_binary:
2585 if (OP_SAME (0) && OP_SAME (1))
2586 return 1;
2588 /* For commutative ops, allow the other order. */
2589 return (commutative_tree_code (TREE_CODE (arg0))
2590 && operand_equal_p (TREE_OPERAND (arg0, 0),
2591 TREE_OPERAND (arg1, 1), flags)
2592 && operand_equal_p (TREE_OPERAND (arg0, 1),
2593 TREE_OPERAND (arg1, 0), flags));
2595 case tcc_reference:
2596 /* If either of the pointer (or reference) expressions we are
2597 dereferencing contain a side effect, these cannot be equal. */
2598 if (TREE_SIDE_EFFECTS (arg0)
2599 || TREE_SIDE_EFFECTS (arg1))
2600 return 0;
2602 switch (TREE_CODE (arg0))
2604 case INDIRECT_REF:
2605 case REALPART_EXPR:
2606 case IMAGPART_EXPR:
2607 return OP_SAME (0);
2609 case MEM_REF:
2610 /* Require equal access sizes, and similar pointer types.
2611 We can have incomplete types for array references of
2612 variable-sized arrays from the Fortran frontent
2613 though. */
2614 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2615 || (TYPE_SIZE (TREE_TYPE (arg0))
2616 && TYPE_SIZE (TREE_TYPE (arg1))
2617 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2618 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2619 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2620 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2621 && OP_SAME (0) && OP_SAME (1));
2623 case ARRAY_REF:
2624 case ARRAY_RANGE_REF:
2625 /* Operands 2 and 3 may be null.
2626 Compare the array index by value if it is constant first as we
2627 may have different types but same value here. */
2628 return (OP_SAME (0)
2629 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2630 TREE_OPERAND (arg1, 1))
2631 || OP_SAME (1))
2632 && OP_SAME_WITH_NULL (2)
2633 && OP_SAME_WITH_NULL (3));
2635 case COMPONENT_REF:
2636 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2637 may be NULL when we're called to compare MEM_EXPRs. */
2638 return OP_SAME_WITH_NULL (0)
2639 && OP_SAME (1)
2640 && OP_SAME_WITH_NULL (2);
2642 case BIT_FIELD_REF:
2643 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2645 default:
2646 return 0;
2649 case tcc_expression:
2650 switch (TREE_CODE (arg0))
2652 case ADDR_EXPR:
2653 case TRUTH_NOT_EXPR:
2654 return OP_SAME (0);
2656 case TRUTH_ANDIF_EXPR:
2657 case TRUTH_ORIF_EXPR:
2658 return OP_SAME (0) && OP_SAME (1);
2660 case FMA_EXPR:
2661 case WIDEN_MULT_PLUS_EXPR:
2662 case WIDEN_MULT_MINUS_EXPR:
2663 if (!OP_SAME (2))
2664 return 0;
2665 /* The multiplcation operands are commutative. */
2666 /* FALLTHRU */
2668 case TRUTH_AND_EXPR:
2669 case TRUTH_OR_EXPR:
2670 case TRUTH_XOR_EXPR:
2671 if (OP_SAME (0) && OP_SAME (1))
2672 return 1;
2674 /* Otherwise take into account this is a commutative operation. */
2675 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2676 TREE_OPERAND (arg1, 1), flags)
2677 && operand_equal_p (TREE_OPERAND (arg0, 1),
2678 TREE_OPERAND (arg1, 0), flags));
2680 case COND_EXPR:
2681 case VEC_COND_EXPR:
2682 case DOT_PROD_EXPR:
2683 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2685 default:
2686 return 0;
2689 case tcc_vl_exp:
2690 switch (TREE_CODE (arg0))
2692 case CALL_EXPR:
2693 /* If the CALL_EXPRs call different functions, then they
2694 clearly can not be equal. */
2695 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2696 flags))
2697 return 0;
2700 unsigned int cef = call_expr_flags (arg0);
2701 if (flags & OEP_PURE_SAME)
2702 cef &= ECF_CONST | ECF_PURE;
2703 else
2704 cef &= ECF_CONST;
2705 if (!cef)
2706 return 0;
2709 /* Now see if all the arguments are the same. */
2711 const_call_expr_arg_iterator iter0, iter1;
2712 const_tree a0, a1;
2713 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2714 a1 = first_const_call_expr_arg (arg1, &iter1);
2715 a0 && a1;
2716 a0 = next_const_call_expr_arg (&iter0),
2717 a1 = next_const_call_expr_arg (&iter1))
2718 if (! operand_equal_p (a0, a1, flags))
2719 return 0;
2721 /* If we get here and both argument lists are exhausted
2722 then the CALL_EXPRs are equal. */
2723 return ! (a0 || a1);
2725 default:
2726 return 0;
2729 case tcc_declaration:
2730 /* Consider __builtin_sqrt equal to sqrt. */
2731 return (TREE_CODE (arg0) == FUNCTION_DECL
2732 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2733 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2734 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2736 default:
2737 return 0;
2740 #undef OP_SAME
2741 #undef OP_SAME_WITH_NULL
2744 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2745 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2747 When in doubt, return 0. */
2749 static int
2750 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2752 int unsignedp1, unsignedpo;
2753 tree primarg0, primarg1, primother;
2754 unsigned int correct_width;
2756 if (operand_equal_p (arg0, arg1, 0))
2757 return 1;
2759 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2760 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2761 return 0;
2763 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2764 and see if the inner values are the same. This removes any
2765 signedness comparison, which doesn't matter here. */
2766 primarg0 = arg0, primarg1 = arg1;
2767 STRIP_NOPS (primarg0);
2768 STRIP_NOPS (primarg1);
2769 if (operand_equal_p (primarg0, primarg1, 0))
2770 return 1;
2772 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2773 actual comparison operand, ARG0.
2775 First throw away any conversions to wider types
2776 already present in the operands. */
2778 primarg1 = get_narrower (arg1, &unsignedp1);
2779 primother = get_narrower (other, &unsignedpo);
2781 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2782 if (unsignedp1 == unsignedpo
2783 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2784 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2786 tree type = TREE_TYPE (arg0);
2788 /* Make sure shorter operand is extended the right way
2789 to match the longer operand. */
2790 primarg1 = fold_convert (signed_or_unsigned_type_for
2791 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2793 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2794 return 1;
2797 return 0;
2800 /* See if ARG is an expression that is either a comparison or is performing
2801 arithmetic on comparisons. The comparisons must only be comparing
2802 two different values, which will be stored in *CVAL1 and *CVAL2; if
2803 they are nonzero it means that some operands have already been found.
2804 No variables may be used anywhere else in the expression except in the
2805 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2806 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2808 If this is true, return 1. Otherwise, return zero. */
2810 static int
2811 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2813 enum tree_code code = TREE_CODE (arg);
2814 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2816 /* We can handle some of the tcc_expression cases here. */
2817 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2818 tclass = tcc_unary;
2819 else if (tclass == tcc_expression
2820 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2821 || code == COMPOUND_EXPR))
2822 tclass = tcc_binary;
2824 else if (tclass == tcc_expression && code == SAVE_EXPR
2825 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2827 /* If we've already found a CVAL1 or CVAL2, this expression is
2828 two complex to handle. */
2829 if (*cval1 || *cval2)
2830 return 0;
2832 tclass = tcc_unary;
2833 *save_p = 1;
2836 switch (tclass)
2838 case tcc_unary:
2839 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2841 case tcc_binary:
2842 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2843 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2844 cval1, cval2, save_p));
2846 case tcc_constant:
2847 return 1;
2849 case tcc_expression:
2850 if (code == COND_EXPR)
2851 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2852 cval1, cval2, save_p)
2853 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2854 cval1, cval2, save_p)
2855 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2856 cval1, cval2, save_p));
2857 return 0;
2859 case tcc_comparison:
2860 /* First see if we can handle the first operand, then the second. For
2861 the second operand, we know *CVAL1 can't be zero. It must be that
2862 one side of the comparison is each of the values; test for the
2863 case where this isn't true by failing if the two operands
2864 are the same. */
2866 if (operand_equal_p (TREE_OPERAND (arg, 0),
2867 TREE_OPERAND (arg, 1), 0))
2868 return 0;
2870 if (*cval1 == 0)
2871 *cval1 = TREE_OPERAND (arg, 0);
2872 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2874 else if (*cval2 == 0)
2875 *cval2 = TREE_OPERAND (arg, 0);
2876 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2878 else
2879 return 0;
2881 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2883 else if (*cval2 == 0)
2884 *cval2 = TREE_OPERAND (arg, 1);
2885 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2887 else
2888 return 0;
2890 return 1;
2892 default:
2893 return 0;
2897 /* ARG is a tree that is known to contain just arithmetic operations and
2898 comparisons. Evaluate the operations in the tree substituting NEW0 for
2899 any occurrence of OLD0 as an operand of a comparison and likewise for
2900 NEW1 and OLD1. */
2902 static tree
2903 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2904 tree old1, tree new1)
2906 tree type = TREE_TYPE (arg);
2907 enum tree_code code = TREE_CODE (arg);
2908 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2910 /* We can handle some of the tcc_expression cases here. */
2911 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2912 tclass = tcc_unary;
2913 else if (tclass == tcc_expression
2914 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2915 tclass = tcc_binary;
2917 switch (tclass)
2919 case tcc_unary:
2920 return fold_build1_loc (loc, code, type,
2921 eval_subst (loc, TREE_OPERAND (arg, 0),
2922 old0, new0, old1, new1));
2924 case tcc_binary:
2925 return fold_build2_loc (loc, code, type,
2926 eval_subst (loc, TREE_OPERAND (arg, 0),
2927 old0, new0, old1, new1),
2928 eval_subst (loc, TREE_OPERAND (arg, 1),
2929 old0, new0, old1, new1));
2931 case tcc_expression:
2932 switch (code)
2934 case SAVE_EXPR:
2935 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2936 old1, new1);
2938 case COMPOUND_EXPR:
2939 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2940 old1, new1);
2942 case COND_EXPR:
2943 return fold_build3_loc (loc, code, type,
2944 eval_subst (loc, TREE_OPERAND (arg, 0),
2945 old0, new0, old1, new1),
2946 eval_subst (loc, TREE_OPERAND (arg, 1),
2947 old0, new0, old1, new1),
2948 eval_subst (loc, TREE_OPERAND (arg, 2),
2949 old0, new0, old1, new1));
2950 default:
2951 break;
2953 /* Fall through - ??? */
2955 case tcc_comparison:
2957 tree arg0 = TREE_OPERAND (arg, 0);
2958 tree arg1 = TREE_OPERAND (arg, 1);
2960 /* We need to check both for exact equality and tree equality. The
2961 former will be true if the operand has a side-effect. In that
2962 case, we know the operand occurred exactly once. */
2964 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2965 arg0 = new0;
2966 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2967 arg0 = new1;
2969 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2970 arg1 = new0;
2971 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2972 arg1 = new1;
2974 return fold_build2_loc (loc, code, type, arg0, arg1);
2977 default:
2978 return arg;
2982 /* Return a tree for the case when the result of an expression is RESULT
2983 converted to TYPE and OMITTED was previously an operand of the expression
2984 but is now not needed (e.g., we folded OMITTED * 0).
2986 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2987 the conversion of RESULT to TYPE. */
2989 tree
2990 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2992 tree t = fold_convert_loc (loc, type, result);
2994 /* If the resulting operand is an empty statement, just return the omitted
2995 statement casted to void. */
2996 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2997 return build1_loc (loc, NOP_EXPR, void_type_node,
2998 fold_ignored_result (omitted));
3000 if (TREE_SIDE_EFFECTS (omitted))
3001 return build2_loc (loc, COMPOUND_EXPR, type,
3002 fold_ignored_result (omitted), t);
3004 return non_lvalue_loc (loc, t);
3007 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3009 static tree
3010 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3011 tree omitted)
3013 tree t = fold_convert_loc (loc, type, result);
3015 /* If the resulting operand is an empty statement, just return the omitted
3016 statement casted to void. */
3017 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3018 return build1_loc (loc, NOP_EXPR, void_type_node,
3019 fold_ignored_result (omitted));
3021 if (TREE_SIDE_EFFECTS (omitted))
3022 return build2_loc (loc, COMPOUND_EXPR, type,
3023 fold_ignored_result (omitted), t);
3025 return pedantic_non_lvalue_loc (loc, t);
3028 /* Return a tree for the case when the result of an expression is RESULT
3029 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3030 of the expression but are now not needed.
3032 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3033 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3034 evaluated before OMITTED2. Otherwise, if neither has side effects,
3035 just do the conversion of RESULT to TYPE. */
3037 tree
3038 omit_two_operands_loc (location_t loc, tree type, tree result,
3039 tree omitted1, tree omitted2)
3041 tree t = fold_convert_loc (loc, type, result);
3043 if (TREE_SIDE_EFFECTS (omitted2))
3044 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3045 if (TREE_SIDE_EFFECTS (omitted1))
3046 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3048 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3052 /* Return a simplified tree node for the truth-negation of ARG. This
3053 never alters ARG itself. We assume that ARG is an operation that
3054 returns a truth value (0 or 1).
3056 FIXME: one would think we would fold the result, but it causes
3057 problems with the dominator optimizer. */
3059 tree
3060 fold_truth_not_expr (location_t loc, tree arg)
3062 tree type = TREE_TYPE (arg);
3063 enum tree_code code = TREE_CODE (arg);
3064 location_t loc1, loc2;
3066 /* If this is a comparison, we can simply invert it, except for
3067 floating-point non-equality comparisons, in which case we just
3068 enclose a TRUTH_NOT_EXPR around what we have. */
3070 if (TREE_CODE_CLASS (code) == tcc_comparison)
3072 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3073 if (FLOAT_TYPE_P (op_type)
3074 && flag_trapping_math
3075 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3076 && code != NE_EXPR && code != EQ_EXPR)
3077 return NULL_TREE;
3079 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3080 if (code == ERROR_MARK)
3081 return NULL_TREE;
3083 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3084 TREE_OPERAND (arg, 1));
3087 switch (code)
3089 case INTEGER_CST:
3090 return constant_boolean_node (integer_zerop (arg), type);
3092 case TRUTH_AND_EXPR:
3093 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3094 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3095 return build2_loc (loc, TRUTH_OR_EXPR, type,
3096 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3097 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3099 case TRUTH_OR_EXPR:
3100 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3101 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3102 return build2_loc (loc, TRUTH_AND_EXPR, type,
3103 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3104 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3106 case TRUTH_XOR_EXPR:
3107 /* Here we can invert either operand. We invert the first operand
3108 unless the second operand is a TRUTH_NOT_EXPR in which case our
3109 result is the XOR of the first operand with the inside of the
3110 negation of the second operand. */
3112 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3113 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3114 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3115 else
3116 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3117 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3118 TREE_OPERAND (arg, 1));
3120 case TRUTH_ANDIF_EXPR:
3121 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3122 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3123 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3124 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3125 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3127 case TRUTH_ORIF_EXPR:
3128 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3129 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3130 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3131 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3132 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3134 case TRUTH_NOT_EXPR:
3135 return TREE_OPERAND (arg, 0);
3137 case COND_EXPR:
3139 tree arg1 = TREE_OPERAND (arg, 1);
3140 tree arg2 = TREE_OPERAND (arg, 2);
3142 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3143 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3145 /* A COND_EXPR may have a throw as one operand, which
3146 then has void type. Just leave void operands
3147 as they are. */
3148 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3149 VOID_TYPE_P (TREE_TYPE (arg1))
3150 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3151 VOID_TYPE_P (TREE_TYPE (arg2))
3152 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3155 case COMPOUND_EXPR:
3156 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3157 return build2_loc (loc, COMPOUND_EXPR, type,
3158 TREE_OPERAND (arg, 0),
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3161 case NON_LVALUE_EXPR:
3162 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3163 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3165 CASE_CONVERT:
3166 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3167 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3169 /* ... fall through ... */
3171 case FLOAT_EXPR:
3172 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3173 return build1_loc (loc, TREE_CODE (arg), type,
3174 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3176 case BIT_AND_EXPR:
3177 if (!integer_onep (TREE_OPERAND (arg, 1)))
3178 return NULL_TREE;
3179 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3181 case SAVE_EXPR:
3182 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3184 case CLEANUP_POINT_EXPR:
3185 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3186 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3187 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3189 default:
3190 return NULL_TREE;
3194 /* Return a simplified tree node for the truth-negation of ARG. This
3195 never alters ARG itself. We assume that ARG is an operation that
3196 returns a truth value (0 or 1).
3198 FIXME: one would think we would fold the result, but it causes
3199 problems with the dominator optimizer. */
3201 tree
3202 invert_truthvalue_loc (location_t loc, tree arg)
3204 tree tem;
3206 if (TREE_CODE (arg) == ERROR_MARK)
3207 return arg;
3209 tem = fold_truth_not_expr (loc, arg);
3210 if (!tem)
3211 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3213 return tem;
3216 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3217 operands are another bit-wise operation with a common input. If so,
3218 distribute the bit operations to save an operation and possibly two if
3219 constants are involved. For example, convert
3220 (A | B) & (A | C) into A | (B & C)
3221 Further simplification will occur if B and C are constants.
3223 If this optimization cannot be done, 0 will be returned. */
3225 static tree
3226 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3227 tree arg0, tree arg1)
3229 tree common;
3230 tree left, right;
3232 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3233 || TREE_CODE (arg0) == code
3234 || (TREE_CODE (arg0) != BIT_AND_EXPR
3235 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3236 return 0;
3238 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3240 common = TREE_OPERAND (arg0, 0);
3241 left = TREE_OPERAND (arg0, 1);
3242 right = TREE_OPERAND (arg1, 1);
3244 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3246 common = TREE_OPERAND (arg0, 0);
3247 left = TREE_OPERAND (arg0, 1);
3248 right = TREE_OPERAND (arg1, 0);
3250 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3252 common = TREE_OPERAND (arg0, 1);
3253 left = TREE_OPERAND (arg0, 0);
3254 right = TREE_OPERAND (arg1, 1);
3256 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3258 common = TREE_OPERAND (arg0, 1);
3259 left = TREE_OPERAND (arg0, 0);
3260 right = TREE_OPERAND (arg1, 0);
3262 else
3263 return 0;
3265 common = fold_convert_loc (loc, type, common);
3266 left = fold_convert_loc (loc, type, left);
3267 right = fold_convert_loc (loc, type, right);
3268 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3269 fold_build2_loc (loc, code, type, left, right));
3272 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3273 with code CODE. This optimization is unsafe. */
3274 static tree
3275 distribute_real_division (location_t loc, enum tree_code code, tree type,
3276 tree arg0, tree arg1)
3278 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3279 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3281 /* (A / C) +- (B / C) -> (A +- B) / C. */
3282 if (mul0 == mul1
3283 && operand_equal_p (TREE_OPERAND (arg0, 1),
3284 TREE_OPERAND (arg1, 1), 0))
3285 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3286 fold_build2_loc (loc, code, type,
3287 TREE_OPERAND (arg0, 0),
3288 TREE_OPERAND (arg1, 0)),
3289 TREE_OPERAND (arg0, 1));
3291 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3292 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3293 TREE_OPERAND (arg1, 0), 0)
3294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3295 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3297 REAL_VALUE_TYPE r0, r1;
3298 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3299 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3300 if (!mul0)
3301 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3302 if (!mul1)
3303 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3304 real_arithmetic (&r0, code, &r0, &r1);
3305 return fold_build2_loc (loc, MULT_EXPR, type,
3306 TREE_OPERAND (arg0, 0),
3307 build_real (type, r0));
3310 return NULL_TREE;
3313 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3314 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3316 static tree
3317 make_bit_field_ref (location_t loc, tree inner, tree type,
3318 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3320 tree result, bftype;
3322 if (bitpos == 0)
3324 tree size = TYPE_SIZE (TREE_TYPE (inner));
3325 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3326 || POINTER_TYPE_P (TREE_TYPE (inner)))
3327 && host_integerp (size, 0)
3328 && tree_low_cst (size, 0) == bitsize)
3329 return fold_convert_loc (loc, type, inner);
3332 bftype = type;
3333 if (TYPE_PRECISION (bftype) != bitsize
3334 || TYPE_UNSIGNED (bftype) == !unsignedp)
3335 bftype = build_nonstandard_integer_type (bitsize, 0);
3337 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3338 size_int (bitsize), bitsize_int (bitpos));
3340 if (bftype != type)
3341 result = fold_convert_loc (loc, type, result);
3343 return result;
3346 /* Optimize a bit-field compare.
3348 There are two cases: First is a compare against a constant and the
3349 second is a comparison of two items where the fields are at the same
3350 bit position relative to the start of a chunk (byte, halfword, word)
3351 large enough to contain it. In these cases we can avoid the shift
3352 implicit in bitfield extractions.
3354 For constants, we emit a compare of the shifted constant with the
3355 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3356 compared. For two fields at the same position, we do the ANDs with the
3357 similar mask and compare the result of the ANDs.
3359 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3360 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3361 are the left and right operands of the comparison, respectively.
3363 If the optimization described above can be done, we return the resulting
3364 tree. Otherwise we return zero. */
3366 static tree
3367 optimize_bit_field_compare (location_t loc, enum tree_code code,
3368 tree compare_type, tree lhs, tree rhs)
3370 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3371 tree type = TREE_TYPE (lhs);
3372 tree signed_type, unsigned_type;
3373 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3374 enum machine_mode lmode, rmode, nmode;
3375 int lunsignedp, runsignedp;
3376 int lvolatilep = 0, rvolatilep = 0;
3377 tree linner, rinner = NULL_TREE;
3378 tree mask;
3379 tree offset;
3381 /* Get all the information about the extractions being done. If the bit size
3382 if the same as the size of the underlying object, we aren't doing an
3383 extraction at all and so can do nothing. We also don't want to
3384 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3385 then will no longer be able to replace it. */
3386 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3387 &lunsignedp, &lvolatilep, false);
3388 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3389 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3390 return 0;
3392 if (!const_p)
3394 /* If this is not a constant, we can only do something if bit positions,
3395 sizes, and signedness are the same. */
3396 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3397 &runsignedp, &rvolatilep, false);
3399 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3400 || lunsignedp != runsignedp || offset != 0
3401 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3402 return 0;
3405 /* See if we can find a mode to refer to this field. We should be able to,
3406 but fail if we can't. */
3407 if (lvolatilep
3408 && GET_MODE_BITSIZE (lmode) > 0
3409 && flag_strict_volatile_bitfields > 0)
3410 nmode = lmode;
3411 else
3412 nmode = get_best_mode (lbitsize, lbitpos,
3413 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3414 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3415 TYPE_ALIGN (TREE_TYPE (rinner))),
3416 word_mode, lvolatilep || rvolatilep);
3417 if (nmode == VOIDmode)
3418 return 0;
3420 /* Set signed and unsigned types of the precision of this mode for the
3421 shifts below. */
3422 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3423 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3425 /* Compute the bit position and size for the new reference and our offset
3426 within it. If the new reference is the same size as the original, we
3427 won't optimize anything, so return zero. */
3428 nbitsize = GET_MODE_BITSIZE (nmode);
3429 nbitpos = lbitpos & ~ (nbitsize - 1);
3430 lbitpos -= nbitpos;
3431 if (nbitsize == lbitsize)
3432 return 0;
3434 if (BYTES_BIG_ENDIAN)
3435 lbitpos = nbitsize - lbitsize - lbitpos;
3437 /* Make the mask to be used against the extracted field. */
3438 mask = build_int_cst_type (unsigned_type, -1);
3439 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3440 mask = const_binop (RSHIFT_EXPR, mask,
3441 size_int (nbitsize - lbitsize - lbitpos));
3443 if (! const_p)
3444 /* If not comparing with constant, just rework the comparison
3445 and return. */
3446 return fold_build2_loc (loc, code, compare_type,
3447 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3448 make_bit_field_ref (loc, linner,
3449 unsigned_type,
3450 nbitsize, nbitpos,
3452 mask),
3453 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3454 make_bit_field_ref (loc, rinner,
3455 unsigned_type,
3456 nbitsize, nbitpos,
3458 mask));
3460 /* Otherwise, we are handling the constant case. See if the constant is too
3461 big for the field. Warn and return a tree of for 0 (false) if so. We do
3462 this not only for its own sake, but to avoid having to test for this
3463 error case below. If we didn't, we might generate wrong code.
3465 For unsigned fields, the constant shifted right by the field length should
3466 be all zero. For signed fields, the high-order bits should agree with
3467 the sign bit. */
3469 if (lunsignedp)
3471 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3472 fold_convert_loc (loc,
3473 unsigned_type, rhs),
3474 size_int (lbitsize))))
3476 warning (0, "comparison is always %d due to width of bit-field",
3477 code == NE_EXPR);
3478 return constant_boolean_node (code == NE_EXPR, compare_type);
3481 else
3483 tree tem = const_binop (RSHIFT_EXPR,
3484 fold_convert_loc (loc, signed_type, rhs),
3485 size_int (lbitsize - 1));
3486 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3488 warning (0, "comparison is always %d due to width of bit-field",
3489 code == NE_EXPR);
3490 return constant_boolean_node (code == NE_EXPR, compare_type);
3494 /* Single-bit compares should always be against zero. */
3495 if (lbitsize == 1 && ! integer_zerop (rhs))
3497 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3498 rhs = build_int_cst (type, 0);
3501 /* Make a new bitfield reference, shift the constant over the
3502 appropriate number of bits and mask it with the computed mask
3503 (in case this was a signed field). If we changed it, make a new one. */
3504 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3505 if (lvolatilep)
3507 TREE_SIDE_EFFECTS (lhs) = 1;
3508 TREE_THIS_VOLATILE (lhs) = 1;
3511 rhs = const_binop (BIT_AND_EXPR,
3512 const_binop (LSHIFT_EXPR,
3513 fold_convert_loc (loc, unsigned_type, rhs),
3514 size_int (lbitpos)),
3515 mask);
3517 lhs = build2_loc (loc, code, compare_type,
3518 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3519 return lhs;
3522 /* Subroutine for fold_truthop: decode a field reference.
3524 If EXP is a comparison reference, we return the innermost reference.
3526 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3527 set to the starting bit number.
3529 If the innermost field can be completely contained in a mode-sized
3530 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3532 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3533 otherwise it is not changed.
3535 *PUNSIGNEDP is set to the signedness of the field.
3537 *PMASK is set to the mask used. This is either contained in a
3538 BIT_AND_EXPR or derived from the width of the field.
3540 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3542 Return 0 if this is not a component reference or is one that we can't
3543 do anything with. */
3545 static tree
3546 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3547 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3548 int *punsignedp, int *pvolatilep,
3549 tree *pmask, tree *pand_mask)
3551 tree outer_type = 0;
3552 tree and_mask = 0;
3553 tree mask, inner, offset;
3554 tree unsigned_type;
3555 unsigned int precision;
3557 /* All the optimizations using this function assume integer fields.
3558 There are problems with FP fields since the type_for_size call
3559 below can fail for, e.g., XFmode. */
3560 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3561 return 0;
3563 /* We are interested in the bare arrangement of bits, so strip everything
3564 that doesn't affect the machine mode. However, record the type of the
3565 outermost expression if it may matter below. */
3566 if (CONVERT_EXPR_P (exp)
3567 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3568 outer_type = TREE_TYPE (exp);
3569 STRIP_NOPS (exp);
3571 if (TREE_CODE (exp) == BIT_AND_EXPR)
3573 and_mask = TREE_OPERAND (exp, 1);
3574 exp = TREE_OPERAND (exp, 0);
3575 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3576 if (TREE_CODE (and_mask) != INTEGER_CST)
3577 return 0;
3580 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3581 punsignedp, pvolatilep, false);
3582 if ((inner == exp && and_mask == 0)
3583 || *pbitsize < 0 || offset != 0
3584 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3585 return 0;
3587 /* If the number of bits in the reference is the same as the bitsize of
3588 the outer type, then the outer type gives the signedness. Otherwise
3589 (in case of a small bitfield) the signedness is unchanged. */
3590 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3591 *punsignedp = TYPE_UNSIGNED (outer_type);
3593 /* Compute the mask to access the bitfield. */
3594 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3595 precision = TYPE_PRECISION (unsigned_type);
3597 mask = build_int_cst_type (unsigned_type, -1);
3599 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3600 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3602 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3603 if (and_mask != 0)
3604 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3605 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3607 *pmask = mask;
3608 *pand_mask = and_mask;
3609 return inner;
3612 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3613 bit positions. */
3615 static int
3616 all_ones_mask_p (const_tree mask, int size)
3618 tree type = TREE_TYPE (mask);
3619 unsigned int precision = TYPE_PRECISION (type);
3620 tree tmask;
3622 tmask = build_int_cst_type (signed_type_for (type), -1);
3624 return
3625 tree_int_cst_equal (mask,
3626 const_binop (RSHIFT_EXPR,
3627 const_binop (LSHIFT_EXPR, tmask,
3628 size_int (precision - size)),
3629 size_int (precision - size)));
3632 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3633 represents the sign bit of EXP's type. If EXP represents a sign
3634 or zero extension, also test VAL against the unextended type.
3635 The return value is the (sub)expression whose sign bit is VAL,
3636 or NULL_TREE otherwise. */
3638 static tree
3639 sign_bit_p (tree exp, const_tree val)
3641 unsigned HOST_WIDE_INT mask_lo, lo;
3642 HOST_WIDE_INT mask_hi, hi;
3643 int width;
3644 tree t;
3646 /* Tree EXP must have an integral type. */
3647 t = TREE_TYPE (exp);
3648 if (! INTEGRAL_TYPE_P (t))
3649 return NULL_TREE;
3651 /* Tree VAL must be an integer constant. */
3652 if (TREE_CODE (val) != INTEGER_CST
3653 || TREE_OVERFLOW (val))
3654 return NULL_TREE;
3656 width = TYPE_PRECISION (t);
3657 if (width > HOST_BITS_PER_WIDE_INT)
3659 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3660 lo = 0;
3662 mask_hi = ((unsigned HOST_WIDE_INT) -1
3663 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3664 mask_lo = -1;
3666 else
3668 hi = 0;
3669 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3671 mask_hi = 0;
3672 mask_lo = ((unsigned HOST_WIDE_INT) -1
3673 >> (HOST_BITS_PER_WIDE_INT - width));
3676 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3677 treat VAL as if it were unsigned. */
3678 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3679 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3680 return exp;
3682 /* Handle extension from a narrower type. */
3683 if (TREE_CODE (exp) == NOP_EXPR
3684 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3685 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3687 return NULL_TREE;
3690 /* Subroutine for fold_truthop: determine if an operand is simple enough
3691 to be evaluated unconditionally. */
3693 static int
3694 simple_operand_p (const_tree exp)
3696 /* Strip any conversions that don't change the machine mode. */
3697 STRIP_NOPS (exp);
3699 return (CONSTANT_CLASS_P (exp)
3700 || TREE_CODE (exp) == SSA_NAME
3701 || (DECL_P (exp)
3702 && ! TREE_ADDRESSABLE (exp)
3703 && ! TREE_THIS_VOLATILE (exp)
3704 && ! DECL_NONLOCAL (exp)
3705 /* Don't regard global variables as simple. They may be
3706 allocated in ways unknown to the compiler (shared memory,
3707 #pragma weak, etc). */
3708 && ! TREE_PUBLIC (exp)
3709 && ! DECL_EXTERNAL (exp)
3710 /* Loading a static variable is unduly expensive, but global
3711 registers aren't expensive. */
3712 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3715 /* The following functions are subroutines to fold_range_test and allow it to
3716 try to change a logical combination of comparisons into a range test.
3718 For example, both
3719 X == 2 || X == 3 || X == 4 || X == 5
3721 X >= 2 && X <= 5
3722 are converted to
3723 (unsigned) (X - 2) <= 3
3725 We describe each set of comparisons as being either inside or outside
3726 a range, using a variable named like IN_P, and then describe the
3727 range with a lower and upper bound. If one of the bounds is omitted,
3728 it represents either the highest or lowest value of the type.
3730 In the comments below, we represent a range by two numbers in brackets
3731 preceded by a "+" to designate being inside that range, or a "-" to
3732 designate being outside that range, so the condition can be inverted by
3733 flipping the prefix. An omitted bound is represented by a "-". For
3734 example, "- [-, 10]" means being outside the range starting at the lowest
3735 possible value and ending at 10, in other words, being greater than 10.
3736 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3737 always false.
3739 We set up things so that the missing bounds are handled in a consistent
3740 manner so neither a missing bound nor "true" and "false" need to be
3741 handled using a special case. */
3743 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3744 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3745 and UPPER1_P are nonzero if the respective argument is an upper bound
3746 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3747 must be specified for a comparison. ARG1 will be converted to ARG0's
3748 type if both are specified. */
3750 static tree
3751 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3752 tree arg1, int upper1_p)
3754 tree tem;
3755 int result;
3756 int sgn0, sgn1;
3758 /* If neither arg represents infinity, do the normal operation.
3759 Else, if not a comparison, return infinity. Else handle the special
3760 comparison rules. Note that most of the cases below won't occur, but
3761 are handled for consistency. */
3763 if (arg0 != 0 && arg1 != 0)
3765 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3766 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3767 STRIP_NOPS (tem);
3768 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3771 if (TREE_CODE_CLASS (code) != tcc_comparison)
3772 return 0;
3774 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3775 for neither. In real maths, we cannot assume open ended ranges are
3776 the same. But, this is computer arithmetic, where numbers are finite.
3777 We can therefore make the transformation of any unbounded range with
3778 the value Z, Z being greater than any representable number. This permits
3779 us to treat unbounded ranges as equal. */
3780 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3781 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3782 switch (code)
3784 case EQ_EXPR:
3785 result = sgn0 == sgn1;
3786 break;
3787 case NE_EXPR:
3788 result = sgn0 != sgn1;
3789 break;
3790 case LT_EXPR:
3791 result = sgn0 < sgn1;
3792 break;
3793 case LE_EXPR:
3794 result = sgn0 <= sgn1;
3795 break;
3796 case GT_EXPR:
3797 result = sgn0 > sgn1;
3798 break;
3799 case GE_EXPR:
3800 result = sgn0 >= sgn1;
3801 break;
3802 default:
3803 gcc_unreachable ();
3806 return constant_boolean_node (result, type);
3809 /* Given EXP, a logical expression, set the range it is testing into
3810 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3811 actually being tested. *PLOW and *PHIGH will be made of the same
3812 type as the returned expression. If EXP is not a comparison, we
3813 will most likely not be returning a useful value and range. Set
3814 *STRICT_OVERFLOW_P to true if the return value is only valid
3815 because signed overflow is undefined; otherwise, do not change
3816 *STRICT_OVERFLOW_P. */
3818 tree
3819 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3820 bool *strict_overflow_p)
3822 enum tree_code code;
3823 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3824 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3825 int in_p, n_in_p;
3826 tree low, high, n_low, n_high;
3827 location_t loc = EXPR_LOCATION (exp);
3829 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3830 and see if we can refine the range. Some of the cases below may not
3831 happen, but it doesn't seem worth worrying about this. We "continue"
3832 the outer loop when we've changed something; otherwise we "break"
3833 the switch, which will "break" the while. */
3835 in_p = 0;
3836 low = high = build_int_cst (TREE_TYPE (exp), 0);
3838 while (1)
3840 code = TREE_CODE (exp);
3841 exp_type = TREE_TYPE (exp);
3843 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3845 if (TREE_OPERAND_LENGTH (exp) > 0)
3846 arg0 = TREE_OPERAND (exp, 0);
3847 if (TREE_CODE_CLASS (code) == tcc_comparison
3848 || TREE_CODE_CLASS (code) == tcc_unary
3849 || TREE_CODE_CLASS (code) == tcc_binary)
3850 arg0_type = TREE_TYPE (arg0);
3851 if (TREE_CODE_CLASS (code) == tcc_binary
3852 || TREE_CODE_CLASS (code) == tcc_comparison
3853 || (TREE_CODE_CLASS (code) == tcc_expression
3854 && TREE_OPERAND_LENGTH (exp) > 1))
3855 arg1 = TREE_OPERAND (exp, 1);
3858 switch (code)
3860 case TRUTH_NOT_EXPR:
3861 in_p = ! in_p, exp = arg0;
3862 continue;
3864 case EQ_EXPR: case NE_EXPR:
3865 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3866 /* We can only do something if the range is testing for zero
3867 and if the second operand is an integer constant. Note that
3868 saying something is "in" the range we make is done by
3869 complementing IN_P since it will set in the initial case of
3870 being not equal to zero; "out" is leaving it alone. */
3871 if (low == 0 || high == 0
3872 || ! integer_zerop (low) || ! integer_zerop (high)
3873 || TREE_CODE (arg1) != INTEGER_CST)
3874 break;
3876 switch (code)
3878 case NE_EXPR: /* - [c, c] */
3879 low = high = arg1;
3880 break;
3881 case EQ_EXPR: /* + [c, c] */
3882 in_p = ! in_p, low = high = arg1;
3883 break;
3884 case GT_EXPR: /* - [-, c] */
3885 low = 0, high = arg1;
3886 break;
3887 case GE_EXPR: /* + [c, -] */
3888 in_p = ! in_p, low = arg1, high = 0;
3889 break;
3890 case LT_EXPR: /* - [c, -] */
3891 low = arg1, high = 0;
3892 break;
3893 case LE_EXPR: /* + [-, c] */
3894 in_p = ! in_p, low = 0, high = arg1;
3895 break;
3896 default:
3897 gcc_unreachable ();
3900 /* If this is an unsigned comparison, we also know that EXP is
3901 greater than or equal to zero. We base the range tests we make
3902 on that fact, so we record it here so we can parse existing
3903 range tests. We test arg0_type since often the return type
3904 of, e.g. EQ_EXPR, is boolean. */
3905 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3907 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3908 in_p, low, high, 1,
3909 build_int_cst (arg0_type, 0),
3910 NULL_TREE))
3911 break;
3913 in_p = n_in_p, low = n_low, high = n_high;
3915 /* If the high bound is missing, but we have a nonzero low
3916 bound, reverse the range so it goes from zero to the low bound
3917 minus 1. */
3918 if (high == 0 && low && ! integer_zerop (low))
3920 in_p = ! in_p;
3921 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3922 integer_one_node, 0);
3923 low = build_int_cst (arg0_type, 0);
3927 exp = arg0;
3928 continue;
3930 case NEGATE_EXPR:
3931 /* (-x) IN [a,b] -> x in [-b, -a] */
3932 n_low = range_binop (MINUS_EXPR, exp_type,
3933 build_int_cst (exp_type, 0),
3934 0, high, 1);
3935 n_high = range_binop (MINUS_EXPR, exp_type,
3936 build_int_cst (exp_type, 0),
3937 0, low, 0);
3938 if (n_high != 0 && TREE_OVERFLOW (n_high))
3939 break;
3940 goto normalize;
3942 case BIT_NOT_EXPR:
3943 /* ~ X -> -X - 1 */
3944 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3945 build_int_cst (exp_type, 1));
3946 continue;
3948 case PLUS_EXPR: case MINUS_EXPR:
3949 if (TREE_CODE (arg1) != INTEGER_CST)
3950 break;
3952 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3953 move a constant to the other side. */
3954 if (!TYPE_UNSIGNED (arg0_type)
3955 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 break;
3958 /* If EXP is signed, any overflow in the computation is undefined,
3959 so we don't worry about it so long as our computations on
3960 the bounds don't overflow. For unsigned, overflow is defined
3961 and this is exactly the right thing. */
3962 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3963 arg0_type, low, 0, arg1, 0);
3964 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3965 arg0_type, high, 1, arg1, 0);
3966 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3967 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3968 break;
3970 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3971 *strict_overflow_p = true;
3973 normalize:
3974 /* Check for an unsigned range which has wrapped around the maximum
3975 value thus making n_high < n_low, and normalize it. */
3976 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3978 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3979 integer_one_node, 0);
3980 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3981 integer_one_node, 0);
3983 /* If the range is of the form +/- [ x+1, x ], we won't
3984 be able to normalize it. But then, it represents the
3985 whole range or the empty set, so make it
3986 +/- [ -, - ]. */
3987 if (tree_int_cst_equal (n_low, low)
3988 && tree_int_cst_equal (n_high, high))
3989 low = high = 0;
3990 else
3991 in_p = ! in_p;
3993 else
3994 low = n_low, high = n_high;
3996 exp = arg0;
3997 continue;
3999 CASE_CONVERT: case NON_LVALUE_EXPR:
4000 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4001 break;
4003 if (! INTEGRAL_TYPE_P (arg0_type)
4004 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4005 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4006 break;
4008 n_low = low, n_high = high;
4010 if (n_low != 0)
4011 n_low = fold_convert_loc (loc, arg0_type, n_low);
4013 if (n_high != 0)
4014 n_high = fold_convert_loc (loc, arg0_type, n_high);
4017 /* If we're converting arg0 from an unsigned type, to exp,
4018 a signed type, we will be doing the comparison as unsigned.
4019 The tests above have already verified that LOW and HIGH
4020 are both positive.
4022 So we have to ensure that we will handle large unsigned
4023 values the same way that the current signed bounds treat
4024 negative values. */
4026 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4028 tree high_positive;
4029 tree equiv_type;
4030 /* For fixed-point modes, we need to pass the saturating flag
4031 as the 2nd parameter. */
4032 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4033 equiv_type = lang_hooks.types.type_for_mode
4034 (TYPE_MODE (arg0_type),
4035 TYPE_SATURATING (arg0_type));
4036 else
4037 equiv_type = lang_hooks.types.type_for_mode
4038 (TYPE_MODE (arg0_type), 1);
4040 /* A range without an upper bound is, naturally, unbounded.
4041 Since convert would have cropped a very large value, use
4042 the max value for the destination type. */
4043 high_positive
4044 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4045 : TYPE_MAX_VALUE (arg0_type);
4047 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4048 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4049 fold_convert_loc (loc, arg0_type,
4050 high_positive),
4051 build_int_cst (arg0_type, 1));
4053 /* If the low bound is specified, "and" the range with the
4054 range for which the original unsigned value will be
4055 positive. */
4056 if (low != 0)
4058 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4059 1, n_low, n_high, 1,
4060 fold_convert_loc (loc, arg0_type,
4061 integer_zero_node),
4062 high_positive))
4063 break;
4065 in_p = (n_in_p == in_p);
4067 else
4069 /* Otherwise, "or" the range with the range of the input
4070 that will be interpreted as negative. */
4071 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4072 0, n_low, n_high, 1,
4073 fold_convert_loc (loc, arg0_type,
4074 integer_zero_node),
4075 high_positive))
4076 break;
4078 in_p = (in_p != n_in_p);
4082 exp = arg0;
4083 low = n_low, high = n_high;
4084 continue;
4086 default:
4087 break;
4090 break;
4093 /* If EXP is a constant, we can evaluate whether this is true or false. */
4094 if (TREE_CODE (exp) == INTEGER_CST)
4096 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4097 exp, 0, low, 0))
4098 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4099 exp, 1, high, 1)));
4100 low = high = 0;
4101 exp = 0;
4104 *pin_p = in_p, *plow = low, *phigh = high;
4105 return exp;
4108 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4109 type, TYPE, return an expression to test if EXP is in (or out of, depending
4110 on IN_P) the range. Return 0 if the test couldn't be created. */
4112 tree
4113 build_range_check (location_t loc, tree type, tree exp, int in_p,
4114 tree low, tree high)
4116 tree etype = TREE_TYPE (exp), value;
4118 #ifdef HAVE_canonicalize_funcptr_for_compare
4119 /* Disable this optimization for function pointer expressions
4120 on targets that require function pointer canonicalization. */
4121 if (HAVE_canonicalize_funcptr_for_compare
4122 && TREE_CODE (etype) == POINTER_TYPE
4123 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4124 return NULL_TREE;
4125 #endif
4127 if (! in_p)
4129 value = build_range_check (loc, type, exp, 1, low, high);
4130 if (value != 0)
4131 return invert_truthvalue_loc (loc, value);
4133 return 0;
4136 if (low == 0 && high == 0)
4137 return build_int_cst (type, 1);
4139 if (low == 0)
4140 return fold_build2_loc (loc, LE_EXPR, type, exp,
4141 fold_convert_loc (loc, etype, high));
4143 if (high == 0)
4144 return fold_build2_loc (loc, GE_EXPR, type, exp,
4145 fold_convert_loc (loc, etype, low));
4147 if (operand_equal_p (low, high, 0))
4148 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4149 fold_convert_loc (loc, etype, low));
4151 if (integer_zerop (low))
4153 if (! TYPE_UNSIGNED (etype))
4155 etype = unsigned_type_for (etype);
4156 high = fold_convert_loc (loc, etype, high);
4157 exp = fold_convert_loc (loc, etype, exp);
4159 return build_range_check (loc, type, exp, 1, 0, high);
4162 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4163 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4165 unsigned HOST_WIDE_INT lo;
4166 HOST_WIDE_INT hi;
4167 int prec;
4169 prec = TYPE_PRECISION (etype);
4170 if (prec <= HOST_BITS_PER_WIDE_INT)
4172 hi = 0;
4173 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4175 else
4177 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4178 lo = (unsigned HOST_WIDE_INT) -1;
4181 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4183 if (TYPE_UNSIGNED (etype))
4185 tree signed_etype = signed_type_for (etype);
4186 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4187 etype
4188 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4189 else
4190 etype = signed_etype;
4191 exp = fold_convert_loc (loc, etype, exp);
4193 return fold_build2_loc (loc, GT_EXPR, type, exp,
4194 build_int_cst (etype, 0));
4198 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4199 This requires wrap-around arithmetics for the type of the expression.
4200 First make sure that arithmetics in this type is valid, then make sure
4201 that it wraps around. */
4202 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4203 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4204 TYPE_UNSIGNED (etype));
4206 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4208 tree utype, minv, maxv;
4210 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4211 for the type in question, as we rely on this here. */
4212 utype = unsigned_type_for (etype);
4213 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4214 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4215 integer_one_node, 1);
4216 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4218 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4219 minv, 1, maxv, 1)))
4220 etype = utype;
4221 else
4222 return 0;
4225 high = fold_convert_loc (loc, etype, high);
4226 low = fold_convert_loc (loc, etype, low);
4227 exp = fold_convert_loc (loc, etype, exp);
4229 value = const_binop (MINUS_EXPR, high, low);
4232 if (POINTER_TYPE_P (etype))
4234 if (value != 0 && !TREE_OVERFLOW (value))
4236 low = fold_convert_loc (loc, sizetype, low);
4237 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4238 return build_range_check (loc, type,
4239 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4240 etype, exp, low),
4241 1, build_int_cst (etype, 0), value);
4243 return 0;
4246 if (value != 0 && !TREE_OVERFLOW (value))
4247 return build_range_check (loc, type,
4248 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4249 1, build_int_cst (etype, 0), value);
4251 return 0;
4254 /* Return the predecessor of VAL in its type, handling the infinite case. */
4256 static tree
4257 range_predecessor (tree val)
4259 tree type = TREE_TYPE (val);
4261 if (INTEGRAL_TYPE_P (type)
4262 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4263 return 0;
4264 else
4265 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4268 /* Return the successor of VAL in its type, handling the infinite case. */
4270 static tree
4271 range_successor (tree val)
4273 tree type = TREE_TYPE (val);
4275 if (INTEGRAL_TYPE_P (type)
4276 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4277 return 0;
4278 else
4279 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4282 /* Given two ranges, see if we can merge them into one. Return 1 if we
4283 can, 0 if we can't. Set the output range into the specified parameters. */
4285 bool
4286 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4287 tree high0, int in1_p, tree low1, tree high1)
4289 int no_overlap;
4290 int subset;
4291 int temp;
4292 tree tem;
4293 int in_p;
4294 tree low, high;
4295 int lowequal = ((low0 == 0 && low1 == 0)
4296 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4297 low0, 0, low1, 0)));
4298 int highequal = ((high0 == 0 && high1 == 0)
4299 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4300 high0, 1, high1, 1)));
4302 /* Make range 0 be the range that starts first, or ends last if they
4303 start at the same value. Swap them if it isn't. */
4304 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4305 low0, 0, low1, 0))
4306 || (lowequal
4307 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4308 high1, 1, high0, 1))))
4310 temp = in0_p, in0_p = in1_p, in1_p = temp;
4311 tem = low0, low0 = low1, low1 = tem;
4312 tem = high0, high0 = high1, high1 = tem;
4315 /* Now flag two cases, whether the ranges are disjoint or whether the
4316 second range is totally subsumed in the first. Note that the tests
4317 below are simplified by the ones above. */
4318 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4319 high0, 1, low1, 0));
4320 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4321 high1, 1, high0, 1));
4323 /* We now have four cases, depending on whether we are including or
4324 excluding the two ranges. */
4325 if (in0_p && in1_p)
4327 /* If they don't overlap, the result is false. If the second range
4328 is a subset it is the result. Otherwise, the range is from the start
4329 of the second to the end of the first. */
4330 if (no_overlap)
4331 in_p = 0, low = high = 0;
4332 else if (subset)
4333 in_p = 1, low = low1, high = high1;
4334 else
4335 in_p = 1, low = low1, high = high0;
4338 else if (in0_p && ! in1_p)
4340 /* If they don't overlap, the result is the first range. If they are
4341 equal, the result is false. If the second range is a subset of the
4342 first, and the ranges begin at the same place, we go from just after
4343 the end of the second range to the end of the first. If the second
4344 range is not a subset of the first, or if it is a subset and both
4345 ranges end at the same place, the range starts at the start of the
4346 first range and ends just before the second range.
4347 Otherwise, we can't describe this as a single range. */
4348 if (no_overlap)
4349 in_p = 1, low = low0, high = high0;
4350 else if (lowequal && highequal)
4351 in_p = 0, low = high = 0;
4352 else if (subset && lowequal)
4354 low = range_successor (high1);
4355 high = high0;
4356 in_p = 1;
4357 if (low == 0)
4359 /* We are in the weird situation where high0 > high1 but
4360 high1 has no successor. Punt. */
4361 return 0;
4364 else if (! subset || highequal)
4366 low = low0;
4367 high = range_predecessor (low1);
4368 in_p = 1;
4369 if (high == 0)
4371 /* low0 < low1 but low1 has no predecessor. Punt. */
4372 return 0;
4375 else
4376 return 0;
4379 else if (! in0_p && in1_p)
4381 /* If they don't overlap, the result is the second range. If the second
4382 is a subset of the first, the result is false. Otherwise,
4383 the range starts just after the first range and ends at the
4384 end of the second. */
4385 if (no_overlap)
4386 in_p = 1, low = low1, high = high1;
4387 else if (subset || highequal)
4388 in_p = 0, low = high = 0;
4389 else
4391 low = range_successor (high0);
4392 high = high1;
4393 in_p = 1;
4394 if (low == 0)
4396 /* high1 > high0 but high0 has no successor. Punt. */
4397 return 0;
4402 else
4404 /* The case where we are excluding both ranges. Here the complex case
4405 is if they don't overlap. In that case, the only time we have a
4406 range is if they are adjacent. If the second is a subset of the
4407 first, the result is the first. Otherwise, the range to exclude
4408 starts at the beginning of the first range and ends at the end of the
4409 second. */
4410 if (no_overlap)
4412 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4413 range_successor (high0),
4414 1, low1, 0)))
4415 in_p = 0, low = low0, high = high1;
4416 else
4418 /* Canonicalize - [min, x] into - [-, x]. */
4419 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4420 switch (TREE_CODE (TREE_TYPE (low0)))
4422 case ENUMERAL_TYPE:
4423 if (TYPE_PRECISION (TREE_TYPE (low0))
4424 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4425 break;
4426 /* FALLTHROUGH */
4427 case INTEGER_TYPE:
4428 if (tree_int_cst_equal (low0,
4429 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4430 low0 = 0;
4431 break;
4432 case POINTER_TYPE:
4433 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4434 && integer_zerop (low0))
4435 low0 = 0;
4436 break;
4437 default:
4438 break;
4441 /* Canonicalize - [x, max] into - [x, -]. */
4442 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4443 switch (TREE_CODE (TREE_TYPE (high1)))
4445 case ENUMERAL_TYPE:
4446 if (TYPE_PRECISION (TREE_TYPE (high1))
4447 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4448 break;
4449 /* FALLTHROUGH */
4450 case INTEGER_TYPE:
4451 if (tree_int_cst_equal (high1,
4452 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4453 high1 = 0;
4454 break;
4455 case POINTER_TYPE:
4456 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4457 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4458 high1, 1,
4459 integer_one_node, 1)))
4460 high1 = 0;
4461 break;
4462 default:
4463 break;
4466 /* The ranges might be also adjacent between the maximum and
4467 minimum values of the given type. For
4468 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4469 return + [x + 1, y - 1]. */
4470 if (low0 == 0 && high1 == 0)
4472 low = range_successor (high0);
4473 high = range_predecessor (low1);
4474 if (low == 0 || high == 0)
4475 return 0;
4477 in_p = 1;
4479 else
4480 return 0;
4483 else if (subset)
4484 in_p = 0, low = low0, high = high0;
4485 else
4486 in_p = 0, low = low0, high = high1;
4489 *pin_p = in_p, *plow = low, *phigh = high;
4490 return 1;
4494 /* Subroutine of fold, looking inside expressions of the form
4495 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4496 of the COND_EXPR. This function is being used also to optimize
4497 A op B ? C : A, by reversing the comparison first.
4499 Return a folded expression whose code is not a COND_EXPR
4500 anymore, or NULL_TREE if no folding opportunity is found. */
4502 static tree
4503 fold_cond_expr_with_comparison (location_t loc, tree type,
4504 tree arg0, tree arg1, tree arg2)
4506 enum tree_code comp_code = TREE_CODE (arg0);
4507 tree arg00 = TREE_OPERAND (arg0, 0);
4508 tree arg01 = TREE_OPERAND (arg0, 1);
4509 tree arg1_type = TREE_TYPE (arg1);
4510 tree tem;
4512 STRIP_NOPS (arg1);
4513 STRIP_NOPS (arg2);
4515 /* If we have A op 0 ? A : -A, consider applying the following
4516 transformations:
4518 A == 0? A : -A same as -A
4519 A != 0? A : -A same as A
4520 A >= 0? A : -A same as abs (A)
4521 A > 0? A : -A same as abs (A)
4522 A <= 0? A : -A same as -abs (A)
4523 A < 0? A : -A same as -abs (A)
4525 None of these transformations work for modes with signed
4526 zeros. If A is +/-0, the first two transformations will
4527 change the sign of the result (from +0 to -0, or vice
4528 versa). The last four will fix the sign of the result,
4529 even though the original expressions could be positive or
4530 negative, depending on the sign of A.
4532 Note that all these transformations are correct if A is
4533 NaN, since the two alternatives (A and -A) are also NaNs. */
4534 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4535 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4536 ? real_zerop (arg01)
4537 : integer_zerop (arg01))
4538 && ((TREE_CODE (arg2) == NEGATE_EXPR
4539 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4540 /* In the case that A is of the form X-Y, '-A' (arg2) may
4541 have already been folded to Y-X, check for that. */
4542 || (TREE_CODE (arg1) == MINUS_EXPR
4543 && TREE_CODE (arg2) == MINUS_EXPR
4544 && operand_equal_p (TREE_OPERAND (arg1, 0),
4545 TREE_OPERAND (arg2, 1), 0)
4546 && operand_equal_p (TREE_OPERAND (arg1, 1),
4547 TREE_OPERAND (arg2, 0), 0))))
4548 switch (comp_code)
4550 case EQ_EXPR:
4551 case UNEQ_EXPR:
4552 tem = fold_convert_loc (loc, arg1_type, arg1);
4553 return pedantic_non_lvalue_loc (loc,
4554 fold_convert_loc (loc, type,
4555 negate_expr (tem)));
4556 case NE_EXPR:
4557 case LTGT_EXPR:
4558 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4559 case UNGE_EXPR:
4560 case UNGT_EXPR:
4561 if (flag_trapping_math)
4562 break;
4563 /* Fall through. */
4564 case GE_EXPR:
4565 case GT_EXPR:
4566 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4567 arg1 = fold_convert_loc (loc, signed_type_for
4568 (TREE_TYPE (arg1)), arg1);
4569 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4570 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4571 case UNLE_EXPR:
4572 case UNLT_EXPR:
4573 if (flag_trapping_math)
4574 break;
4575 case LE_EXPR:
4576 case LT_EXPR:
4577 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4578 arg1 = fold_convert_loc (loc, signed_type_for
4579 (TREE_TYPE (arg1)), arg1);
4580 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4581 return negate_expr (fold_convert_loc (loc, type, tem));
4582 default:
4583 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4584 break;
4587 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4588 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4589 both transformations are correct when A is NaN: A != 0
4590 is then true, and A == 0 is false. */
4592 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4593 && integer_zerop (arg01) && integer_zerop (arg2))
4595 if (comp_code == NE_EXPR)
4596 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4597 else if (comp_code == EQ_EXPR)
4598 return build_int_cst (type, 0);
4601 /* Try some transformations of A op B ? A : B.
4603 A == B? A : B same as B
4604 A != B? A : B same as A
4605 A >= B? A : B same as max (A, B)
4606 A > B? A : B same as max (B, A)
4607 A <= B? A : B same as min (A, B)
4608 A < B? A : B same as min (B, A)
4610 As above, these transformations don't work in the presence
4611 of signed zeros. For example, if A and B are zeros of
4612 opposite sign, the first two transformations will change
4613 the sign of the result. In the last four, the original
4614 expressions give different results for (A=+0, B=-0) and
4615 (A=-0, B=+0), but the transformed expressions do not.
4617 The first two transformations are correct if either A or B
4618 is a NaN. In the first transformation, the condition will
4619 be false, and B will indeed be chosen. In the case of the
4620 second transformation, the condition A != B will be true,
4621 and A will be chosen.
4623 The conversions to max() and min() are not correct if B is
4624 a number and A is not. The conditions in the original
4625 expressions will be false, so all four give B. The min()
4626 and max() versions would give a NaN instead. */
4627 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4628 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4629 /* Avoid these transformations if the COND_EXPR may be used
4630 as an lvalue in the C++ front-end. PR c++/19199. */
4631 && (in_gimple_form
4632 || (strcmp (lang_hooks.name, "GNU C++") != 0
4633 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4634 || ! maybe_lvalue_p (arg1)
4635 || ! maybe_lvalue_p (arg2)))
4637 tree comp_op0 = arg00;
4638 tree comp_op1 = arg01;
4639 tree comp_type = TREE_TYPE (comp_op0);
4641 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4642 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4644 comp_type = type;
4645 comp_op0 = arg1;
4646 comp_op1 = arg2;
4649 switch (comp_code)
4651 case EQ_EXPR:
4652 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4653 case NE_EXPR:
4654 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4655 case LE_EXPR:
4656 case LT_EXPR:
4657 case UNLE_EXPR:
4658 case UNLT_EXPR:
4659 /* In C++ a ?: expression can be an lvalue, so put the
4660 operand which will be used if they are equal first
4661 so that we can convert this back to the
4662 corresponding COND_EXPR. */
4663 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4665 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4666 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4667 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4668 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4669 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4670 comp_op1, comp_op0);
4671 return pedantic_non_lvalue_loc (loc,
4672 fold_convert_loc (loc, type, tem));
4674 break;
4675 case GE_EXPR:
4676 case GT_EXPR:
4677 case UNGE_EXPR:
4678 case UNGT_EXPR:
4679 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4681 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4682 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4683 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4684 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4685 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4686 comp_op1, comp_op0);
4687 return pedantic_non_lvalue_loc (loc,
4688 fold_convert_loc (loc, type, tem));
4690 break;
4691 case UNEQ_EXPR:
4692 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4693 return pedantic_non_lvalue_loc (loc,
4694 fold_convert_loc (loc, type, arg2));
4695 break;
4696 case LTGT_EXPR:
4697 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4698 return pedantic_non_lvalue_loc (loc,
4699 fold_convert_loc (loc, type, arg1));
4700 break;
4701 default:
4702 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4703 break;
4707 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4708 we might still be able to simplify this. For example,
4709 if C1 is one less or one more than C2, this might have started
4710 out as a MIN or MAX and been transformed by this function.
4711 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4713 if (INTEGRAL_TYPE_P (type)
4714 && TREE_CODE (arg01) == INTEGER_CST
4715 && TREE_CODE (arg2) == INTEGER_CST)
4716 switch (comp_code)
4718 case EQ_EXPR:
4719 if (TREE_CODE (arg1) == INTEGER_CST)
4720 break;
4721 /* We can replace A with C1 in this case. */
4722 arg1 = fold_convert_loc (loc, type, arg01);
4723 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4725 case LT_EXPR:
4726 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4727 MIN_EXPR, to preserve the signedness of the comparison. */
4728 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4729 OEP_ONLY_CONST)
4730 && operand_equal_p (arg01,
4731 const_binop (PLUS_EXPR, arg2,
4732 build_int_cst (type, 1)),
4733 OEP_ONLY_CONST))
4735 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4736 fold_convert_loc (loc, TREE_TYPE (arg00),
4737 arg2));
4738 return pedantic_non_lvalue_loc (loc,
4739 fold_convert_loc (loc, type, tem));
4741 break;
4743 case LE_EXPR:
4744 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4745 as above. */
4746 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4747 OEP_ONLY_CONST)
4748 && operand_equal_p (arg01,
4749 const_binop (MINUS_EXPR, arg2,
4750 build_int_cst (type, 1)),
4751 OEP_ONLY_CONST))
4753 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4754 fold_convert_loc (loc, TREE_TYPE (arg00),
4755 arg2));
4756 return pedantic_non_lvalue_loc (loc,
4757 fold_convert_loc (loc, type, tem));
4759 break;
4761 case GT_EXPR:
4762 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4763 MAX_EXPR, to preserve the signedness of the comparison. */
4764 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4765 OEP_ONLY_CONST)
4766 && operand_equal_p (arg01,
4767 const_binop (MINUS_EXPR, arg2,
4768 build_int_cst (type, 1)),
4769 OEP_ONLY_CONST))
4771 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4772 fold_convert_loc (loc, TREE_TYPE (arg00),
4773 arg2));
4774 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4776 break;
4778 case GE_EXPR:
4779 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4780 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4781 OEP_ONLY_CONST)
4782 && operand_equal_p (arg01,
4783 const_binop (PLUS_EXPR, arg2,
4784 build_int_cst (type, 1)),
4785 OEP_ONLY_CONST))
4787 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4788 fold_convert_loc (loc, TREE_TYPE (arg00),
4789 arg2));
4790 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4792 break;
4793 case NE_EXPR:
4794 break;
4795 default:
4796 gcc_unreachable ();
4799 return NULL_TREE;
4804 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4805 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4806 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4807 false) >= 2)
4808 #endif
4810 /* EXP is some logical combination of boolean tests. See if we can
4811 merge it into some range test. Return the new tree if so. */
4813 static tree
4814 fold_range_test (location_t loc, enum tree_code code, tree type,
4815 tree op0, tree op1)
4817 int or_op = (code == TRUTH_ORIF_EXPR
4818 || code == TRUTH_OR_EXPR);
4819 int in0_p, in1_p, in_p;
4820 tree low0, low1, low, high0, high1, high;
4821 bool strict_overflow_p = false;
4822 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4823 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4824 tree tem;
4825 const char * const warnmsg = G_("assuming signed overflow does not occur "
4826 "when simplifying range test");
4828 /* If this is an OR operation, invert both sides; we will invert
4829 again at the end. */
4830 if (or_op)
4831 in0_p = ! in0_p, in1_p = ! in1_p;
4833 /* If both expressions are the same, if we can merge the ranges, and we
4834 can build the range test, return it or it inverted. If one of the
4835 ranges is always true or always false, consider it to be the same
4836 expression as the other. */
4837 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4838 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4839 in1_p, low1, high1)
4840 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4841 lhs != 0 ? lhs
4842 : rhs != 0 ? rhs : integer_zero_node,
4843 in_p, low, high))))
4845 if (strict_overflow_p)
4846 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4847 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4850 /* On machines where the branch cost is expensive, if this is a
4851 short-circuited branch and the underlying object on both sides
4852 is the same, make a non-short-circuit operation. */
4853 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4854 && lhs != 0 && rhs != 0
4855 && (code == TRUTH_ANDIF_EXPR
4856 || code == TRUTH_ORIF_EXPR)
4857 && operand_equal_p (lhs, rhs, 0))
4859 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4860 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4861 which cases we can't do this. */
4862 if (simple_operand_p (lhs))
4863 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4864 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4865 type, op0, op1);
4867 else if (lang_hooks.decls.global_bindings_p () == 0
4868 && ! CONTAINS_PLACEHOLDER_P (lhs))
4870 tree common = save_expr (lhs);
4872 if (0 != (lhs = build_range_check (loc, type, common,
4873 or_op ? ! in0_p : in0_p,
4874 low0, high0))
4875 && (0 != (rhs = build_range_check (loc, type, common,
4876 or_op ? ! in1_p : in1_p,
4877 low1, high1))))
4879 if (strict_overflow_p)
4880 fold_overflow_warning (warnmsg,
4881 WARN_STRICT_OVERFLOW_COMPARISON);
4882 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4883 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4884 type, lhs, rhs);
4889 return 0;
4892 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4893 bit value. Arrange things so the extra bits will be set to zero if and
4894 only if C is signed-extended to its full width. If MASK is nonzero,
4895 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4897 static tree
4898 unextend (tree c, int p, int unsignedp, tree mask)
4900 tree type = TREE_TYPE (c);
4901 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4902 tree temp;
4904 if (p == modesize || unsignedp)
4905 return c;
4907 /* We work by getting just the sign bit into the low-order bit, then
4908 into the high-order bit, then sign-extend. We then XOR that value
4909 with C. */
4910 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4911 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4913 /* We must use a signed type in order to get an arithmetic right shift.
4914 However, we must also avoid introducing accidental overflows, so that
4915 a subsequent call to integer_zerop will work. Hence we must
4916 do the type conversion here. At this point, the constant is either
4917 zero or one, and the conversion to a signed type can never overflow.
4918 We could get an overflow if this conversion is done anywhere else. */
4919 if (TYPE_UNSIGNED (type))
4920 temp = fold_convert (signed_type_for (type), temp);
4922 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4923 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4924 if (mask != 0)
4925 temp = const_binop (BIT_AND_EXPR, temp,
4926 fold_convert (TREE_TYPE (c), mask));
4927 /* If necessary, convert the type back to match the type of C. */
4928 if (TYPE_UNSIGNED (type))
4929 temp = fold_convert (type, temp);
4931 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4934 /* For an expression that has the form
4935 (A && B) || ~B
4937 (A || B) && ~B,
4938 we can drop one of the inner expressions and simplify to
4939 A || ~B
4941 A && ~B
4942 LOC is the location of the resulting expression. OP is the inner
4943 logical operation; the left-hand side in the examples above, while CMPOP
4944 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4945 removing a condition that guards another, as in
4946 (A != NULL && A->...) || A == NULL
4947 which we must not transform. If RHS_ONLY is true, only eliminate the
4948 right-most operand of the inner logical operation. */
4950 static tree
4951 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4952 bool rhs_only)
4954 tree type = TREE_TYPE (cmpop);
4955 enum tree_code code = TREE_CODE (cmpop);
4956 enum tree_code truthop_code = TREE_CODE (op);
4957 tree lhs = TREE_OPERAND (op, 0);
4958 tree rhs = TREE_OPERAND (op, 1);
4959 tree orig_lhs = lhs, orig_rhs = rhs;
4960 enum tree_code rhs_code = TREE_CODE (rhs);
4961 enum tree_code lhs_code = TREE_CODE (lhs);
4962 enum tree_code inv_code;
4964 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4965 return NULL_TREE;
4967 if (TREE_CODE_CLASS (code) != tcc_comparison)
4968 return NULL_TREE;
4970 if (rhs_code == truthop_code)
4972 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4973 if (newrhs != NULL_TREE)
4975 rhs = newrhs;
4976 rhs_code = TREE_CODE (rhs);
4979 if (lhs_code == truthop_code && !rhs_only)
4981 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4982 if (newlhs != NULL_TREE)
4984 lhs = newlhs;
4985 lhs_code = TREE_CODE (lhs);
4989 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4990 if (inv_code == rhs_code
4991 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4992 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4993 return lhs;
4994 if (!rhs_only && inv_code == lhs_code
4995 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4996 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4997 return rhs;
4998 if (rhs != orig_rhs || lhs != orig_lhs)
4999 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5000 lhs, rhs);
5001 return NULL_TREE;
5004 /* Find ways of folding logical expressions of LHS and RHS:
5005 Try to merge two comparisons to the same innermost item.
5006 Look for range tests like "ch >= '0' && ch <= '9'".
5007 Look for combinations of simple terms on machines with expensive branches
5008 and evaluate the RHS unconditionally.
5010 For example, if we have p->a == 2 && p->b == 4 and we can make an
5011 object large enough to span both A and B, we can do this with a comparison
5012 against the object ANDed with the a mask.
5014 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5015 operations to do this with one comparison.
5017 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5018 function and the one above.
5020 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5021 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5023 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5024 two operands.
5026 We return the simplified tree or 0 if no optimization is possible. */
5028 static tree
5029 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5030 tree lhs, tree rhs)
5032 /* If this is the "or" of two comparisons, we can do something if
5033 the comparisons are NE_EXPR. If this is the "and", we can do something
5034 if the comparisons are EQ_EXPR. I.e.,
5035 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5037 WANTED_CODE is this operation code. For single bit fields, we can
5038 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5039 comparison for one-bit fields. */
5041 enum tree_code wanted_code;
5042 enum tree_code lcode, rcode;
5043 tree ll_arg, lr_arg, rl_arg, rr_arg;
5044 tree ll_inner, lr_inner, rl_inner, rr_inner;
5045 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5046 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5047 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5048 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5049 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5050 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5051 enum machine_mode lnmode, rnmode;
5052 tree ll_mask, lr_mask, rl_mask, rr_mask;
5053 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5054 tree l_const, r_const;
5055 tree lntype, rntype, result;
5056 HOST_WIDE_INT first_bit, end_bit;
5057 int volatilep;
5058 tree orig_lhs = lhs, orig_rhs = rhs;
5059 enum tree_code orig_code = code;
5061 /* Start by getting the comparison codes. Fail if anything is volatile.
5062 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5063 it were surrounded with a NE_EXPR. */
5065 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5066 return 0;
5068 lcode = TREE_CODE (lhs);
5069 rcode = TREE_CODE (rhs);
5071 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5073 lhs = build2 (NE_EXPR, truth_type, lhs,
5074 build_int_cst (TREE_TYPE (lhs), 0));
5075 lcode = NE_EXPR;
5078 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5080 rhs = build2 (NE_EXPR, truth_type, rhs,
5081 build_int_cst (TREE_TYPE (rhs), 0));
5082 rcode = NE_EXPR;
5085 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5086 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5087 return 0;
5089 ll_arg = TREE_OPERAND (lhs, 0);
5090 lr_arg = TREE_OPERAND (lhs, 1);
5091 rl_arg = TREE_OPERAND (rhs, 0);
5092 rr_arg = TREE_OPERAND (rhs, 1);
5094 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5095 if (simple_operand_p (ll_arg)
5096 && simple_operand_p (lr_arg))
5098 if (operand_equal_p (ll_arg, rl_arg, 0)
5099 && operand_equal_p (lr_arg, rr_arg, 0))
5101 result = combine_comparisons (loc, code, lcode, rcode,
5102 truth_type, ll_arg, lr_arg);
5103 if (result)
5104 return result;
5106 else if (operand_equal_p (ll_arg, rr_arg, 0)
5107 && operand_equal_p (lr_arg, rl_arg, 0))
5109 result = combine_comparisons (loc, code, lcode,
5110 swap_tree_comparison (rcode),
5111 truth_type, ll_arg, lr_arg);
5112 if (result)
5113 return result;
5117 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5118 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5120 /* If the RHS can be evaluated unconditionally and its operands are
5121 simple, it wins to evaluate the RHS unconditionally on machines
5122 with expensive branches. In this case, this isn't a comparison
5123 that can be merged. Avoid doing this if the RHS is a floating-point
5124 comparison since those can trap. */
5126 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5127 false) >= 2
5128 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5129 && simple_operand_p (rl_arg)
5130 && simple_operand_p (rr_arg))
5132 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5133 if (code == TRUTH_OR_EXPR
5134 && lcode == NE_EXPR && integer_zerop (lr_arg)
5135 && rcode == NE_EXPR && integer_zerop (rr_arg)
5136 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5137 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5138 return build2_loc (loc, NE_EXPR, truth_type,
5139 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5140 ll_arg, rl_arg),
5141 build_int_cst (TREE_TYPE (ll_arg), 0));
5143 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5144 if (code == TRUTH_AND_EXPR
5145 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5146 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5147 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5148 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5149 return build2_loc (loc, EQ_EXPR, truth_type,
5150 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5151 ll_arg, rl_arg),
5152 build_int_cst (TREE_TYPE (ll_arg), 0));
5154 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5156 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5157 return build2_loc (loc, code, truth_type, lhs, rhs);
5158 return NULL_TREE;
5162 /* See if the comparisons can be merged. Then get all the parameters for
5163 each side. */
5165 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5166 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5167 return 0;
5169 volatilep = 0;
5170 ll_inner = decode_field_reference (loc, ll_arg,
5171 &ll_bitsize, &ll_bitpos, &ll_mode,
5172 &ll_unsignedp, &volatilep, &ll_mask,
5173 &ll_and_mask);
5174 lr_inner = decode_field_reference (loc, lr_arg,
5175 &lr_bitsize, &lr_bitpos, &lr_mode,
5176 &lr_unsignedp, &volatilep, &lr_mask,
5177 &lr_and_mask);
5178 rl_inner = decode_field_reference (loc, rl_arg,
5179 &rl_bitsize, &rl_bitpos, &rl_mode,
5180 &rl_unsignedp, &volatilep, &rl_mask,
5181 &rl_and_mask);
5182 rr_inner = decode_field_reference (loc, rr_arg,
5183 &rr_bitsize, &rr_bitpos, &rr_mode,
5184 &rr_unsignedp, &volatilep, &rr_mask,
5185 &rr_and_mask);
5187 /* It must be true that the inner operation on the lhs of each
5188 comparison must be the same if we are to be able to do anything.
5189 Then see if we have constants. If not, the same must be true for
5190 the rhs's. */
5191 if (volatilep || ll_inner == 0 || rl_inner == 0
5192 || ! operand_equal_p (ll_inner, rl_inner, 0))
5193 return 0;
5195 if (TREE_CODE (lr_arg) == INTEGER_CST
5196 && TREE_CODE (rr_arg) == INTEGER_CST)
5197 l_const = lr_arg, r_const = rr_arg;
5198 else if (lr_inner == 0 || rr_inner == 0
5199 || ! operand_equal_p (lr_inner, rr_inner, 0))
5200 return 0;
5201 else
5202 l_const = r_const = 0;
5204 /* If either comparison code is not correct for our logical operation,
5205 fail. However, we can convert a one-bit comparison against zero into
5206 the opposite comparison against that bit being set in the field. */
5208 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5209 if (lcode != wanted_code)
5211 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5213 /* Make the left operand unsigned, since we are only interested
5214 in the value of one bit. Otherwise we are doing the wrong
5215 thing below. */
5216 ll_unsignedp = 1;
5217 l_const = ll_mask;
5219 else
5220 return 0;
5223 /* This is analogous to the code for l_const above. */
5224 if (rcode != wanted_code)
5226 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5228 rl_unsignedp = 1;
5229 r_const = rl_mask;
5231 else
5232 return 0;
5235 /* See if we can find a mode that contains both fields being compared on
5236 the left. If we can't, fail. Otherwise, update all constants and masks
5237 to be relative to a field of that size. */
5238 first_bit = MIN (ll_bitpos, rl_bitpos);
5239 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5240 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5241 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5242 volatilep);
5243 if (lnmode == VOIDmode)
5244 return 0;
5246 lnbitsize = GET_MODE_BITSIZE (lnmode);
5247 lnbitpos = first_bit & ~ (lnbitsize - 1);
5248 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5249 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5251 if (BYTES_BIG_ENDIAN)
5253 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5254 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5257 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5258 size_int (xll_bitpos));
5259 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5260 size_int (xrl_bitpos));
5262 if (l_const)
5264 l_const = fold_convert_loc (loc, lntype, l_const);
5265 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5266 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5267 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5268 fold_build1_loc (loc, BIT_NOT_EXPR,
5269 lntype, ll_mask))))
5271 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5273 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5276 if (r_const)
5278 r_const = fold_convert_loc (loc, lntype, r_const);
5279 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5280 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5281 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5282 fold_build1_loc (loc, BIT_NOT_EXPR,
5283 lntype, rl_mask))))
5285 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5287 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5291 /* If the right sides are not constant, do the same for it. Also,
5292 disallow this optimization if a size or signedness mismatch occurs
5293 between the left and right sides. */
5294 if (l_const == 0)
5296 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5297 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5298 /* Make sure the two fields on the right
5299 correspond to the left without being swapped. */
5300 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5301 return 0;
5303 first_bit = MIN (lr_bitpos, rr_bitpos);
5304 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5305 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5306 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5307 volatilep);
5308 if (rnmode == VOIDmode)
5309 return 0;
5311 rnbitsize = GET_MODE_BITSIZE (rnmode);
5312 rnbitpos = first_bit & ~ (rnbitsize - 1);
5313 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5314 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5316 if (BYTES_BIG_ENDIAN)
5318 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5319 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5322 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5323 rntype, lr_mask),
5324 size_int (xlr_bitpos));
5325 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5326 rntype, rr_mask),
5327 size_int (xrr_bitpos));
5329 /* Make a mask that corresponds to both fields being compared.
5330 Do this for both items being compared. If the operands are the
5331 same size and the bits being compared are in the same position
5332 then we can do this by masking both and comparing the masked
5333 results. */
5334 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5335 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5336 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5338 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5339 ll_unsignedp || rl_unsignedp);
5340 if (! all_ones_mask_p (ll_mask, lnbitsize))
5341 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5343 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5344 lr_unsignedp || rr_unsignedp);
5345 if (! all_ones_mask_p (lr_mask, rnbitsize))
5346 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5348 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5351 /* There is still another way we can do something: If both pairs of
5352 fields being compared are adjacent, we may be able to make a wider
5353 field containing them both.
5355 Note that we still must mask the lhs/rhs expressions. Furthermore,
5356 the mask must be shifted to account for the shift done by
5357 make_bit_field_ref. */
5358 if ((ll_bitsize + ll_bitpos == rl_bitpos
5359 && lr_bitsize + lr_bitpos == rr_bitpos)
5360 || (ll_bitpos == rl_bitpos + rl_bitsize
5361 && lr_bitpos == rr_bitpos + rr_bitsize))
5363 tree type;
5365 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5366 ll_bitsize + rl_bitsize,
5367 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5368 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5369 lr_bitsize + rr_bitsize,
5370 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5372 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5373 size_int (MIN (xll_bitpos, xrl_bitpos)));
5374 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5375 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5377 /* Convert to the smaller type before masking out unwanted bits. */
5378 type = lntype;
5379 if (lntype != rntype)
5381 if (lnbitsize > rnbitsize)
5383 lhs = fold_convert_loc (loc, rntype, lhs);
5384 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5385 type = rntype;
5387 else if (lnbitsize < rnbitsize)
5389 rhs = fold_convert_loc (loc, lntype, rhs);
5390 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5391 type = lntype;
5395 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5396 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5398 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5399 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5401 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5404 return 0;
5407 /* Handle the case of comparisons with constants. If there is something in
5408 common between the masks, those bits of the constants must be the same.
5409 If not, the condition is always false. Test for this to avoid generating
5410 incorrect code below. */
5411 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5412 if (! integer_zerop (result)
5413 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5414 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5416 if (wanted_code == NE_EXPR)
5418 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5419 return constant_boolean_node (true, truth_type);
5421 else
5423 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5424 return constant_boolean_node (false, truth_type);
5428 /* Construct the expression we will return. First get the component
5429 reference we will make. Unless the mask is all ones the width of
5430 that field, perform the mask operation. Then compare with the
5431 merged constant. */
5432 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5433 ll_unsignedp || rl_unsignedp);
5435 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5436 if (! all_ones_mask_p (ll_mask, lnbitsize))
5437 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5439 return build2_loc (loc, wanted_code, truth_type, result,
5440 const_binop (BIT_IOR_EXPR, l_const, r_const));
5443 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5444 constant. */
5446 static tree
5447 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5448 tree op0, tree op1)
5450 tree arg0 = op0;
5451 enum tree_code op_code;
5452 tree comp_const;
5453 tree minmax_const;
5454 int consts_equal, consts_lt;
5455 tree inner;
5457 STRIP_SIGN_NOPS (arg0);
5459 op_code = TREE_CODE (arg0);
5460 minmax_const = TREE_OPERAND (arg0, 1);
5461 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5462 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5463 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5464 inner = TREE_OPERAND (arg0, 0);
5466 /* If something does not permit us to optimize, return the original tree. */
5467 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5468 || TREE_CODE (comp_const) != INTEGER_CST
5469 || TREE_OVERFLOW (comp_const)
5470 || TREE_CODE (minmax_const) != INTEGER_CST
5471 || TREE_OVERFLOW (minmax_const))
5472 return NULL_TREE;
5474 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5475 and GT_EXPR, doing the rest with recursive calls using logical
5476 simplifications. */
5477 switch (code)
5479 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5481 tree tem
5482 = optimize_minmax_comparison (loc,
5483 invert_tree_comparison (code, false),
5484 type, op0, op1);
5485 if (tem)
5486 return invert_truthvalue_loc (loc, tem);
5487 return NULL_TREE;
5490 case GE_EXPR:
5491 return
5492 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5493 optimize_minmax_comparison
5494 (loc, EQ_EXPR, type, arg0, comp_const),
5495 optimize_minmax_comparison
5496 (loc, GT_EXPR, type, arg0, comp_const));
5498 case EQ_EXPR:
5499 if (op_code == MAX_EXPR && consts_equal)
5500 /* MAX (X, 0) == 0 -> X <= 0 */
5501 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5503 else if (op_code == MAX_EXPR && consts_lt)
5504 /* MAX (X, 0) == 5 -> X == 5 */
5505 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5507 else if (op_code == MAX_EXPR)
5508 /* MAX (X, 0) == -1 -> false */
5509 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5511 else if (consts_equal)
5512 /* MIN (X, 0) == 0 -> X >= 0 */
5513 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5515 else if (consts_lt)
5516 /* MIN (X, 0) == 5 -> false */
5517 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5519 else
5520 /* MIN (X, 0) == -1 -> X == -1 */
5521 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5523 case GT_EXPR:
5524 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5525 /* MAX (X, 0) > 0 -> X > 0
5526 MAX (X, 0) > 5 -> X > 5 */
5527 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5529 else if (op_code == MAX_EXPR)
5530 /* MAX (X, 0) > -1 -> true */
5531 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5533 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5534 /* MIN (X, 0) > 0 -> false
5535 MIN (X, 0) > 5 -> false */
5536 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5538 else
5539 /* MIN (X, 0) > -1 -> X > -1 */
5540 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5542 default:
5543 return NULL_TREE;
5547 /* T is an integer expression that is being multiplied, divided, or taken a
5548 modulus (CODE says which and what kind of divide or modulus) by a
5549 constant C. See if we can eliminate that operation by folding it with
5550 other operations already in T. WIDE_TYPE, if non-null, is a type that
5551 should be used for the computation if wider than our type.
5553 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5554 (X * 2) + (Y * 4). We must, however, be assured that either the original
5555 expression would not overflow or that overflow is undefined for the type
5556 in the language in question.
5558 If we return a non-null expression, it is an equivalent form of the
5559 original computation, but need not be in the original type.
5561 We set *STRICT_OVERFLOW_P to true if the return values depends on
5562 signed overflow being undefined. Otherwise we do not change
5563 *STRICT_OVERFLOW_P. */
5565 static tree
5566 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5567 bool *strict_overflow_p)
5569 /* To avoid exponential search depth, refuse to allow recursion past
5570 three levels. Beyond that (1) it's highly unlikely that we'll find
5571 something interesting and (2) we've probably processed it before
5572 when we built the inner expression. */
5574 static int depth;
5575 tree ret;
5577 if (depth > 3)
5578 return NULL;
5580 depth++;
5581 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5582 depth--;
5584 return ret;
5587 static tree
5588 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5589 bool *strict_overflow_p)
5591 tree type = TREE_TYPE (t);
5592 enum tree_code tcode = TREE_CODE (t);
5593 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5594 > GET_MODE_SIZE (TYPE_MODE (type)))
5595 ? wide_type : type);
5596 tree t1, t2;
5597 int same_p = tcode == code;
5598 tree op0 = NULL_TREE, op1 = NULL_TREE;
5599 bool sub_strict_overflow_p;
5601 /* Don't deal with constants of zero here; they confuse the code below. */
5602 if (integer_zerop (c))
5603 return NULL_TREE;
5605 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5606 op0 = TREE_OPERAND (t, 0);
5608 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5609 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5611 /* Note that we need not handle conditional operations here since fold
5612 already handles those cases. So just do arithmetic here. */
5613 switch (tcode)
5615 case INTEGER_CST:
5616 /* For a constant, we can always simplify if we are a multiply
5617 or (for divide and modulus) if it is a multiple of our constant. */
5618 if (code == MULT_EXPR
5619 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5620 return const_binop (code, fold_convert (ctype, t),
5621 fold_convert (ctype, c));
5622 break;
5624 CASE_CONVERT: case NON_LVALUE_EXPR:
5625 /* If op0 is an expression ... */
5626 if ((COMPARISON_CLASS_P (op0)
5627 || UNARY_CLASS_P (op0)
5628 || BINARY_CLASS_P (op0)
5629 || VL_EXP_CLASS_P (op0)
5630 || EXPRESSION_CLASS_P (op0))
5631 /* ... and has wrapping overflow, and its type is smaller
5632 than ctype, then we cannot pass through as widening. */
5633 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5634 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5635 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5636 && (TYPE_PRECISION (ctype)
5637 > TYPE_PRECISION (TREE_TYPE (op0))))
5638 /* ... or this is a truncation (t is narrower than op0),
5639 then we cannot pass through this narrowing. */
5640 || (TYPE_PRECISION (type)
5641 < TYPE_PRECISION (TREE_TYPE (op0)))
5642 /* ... or signedness changes for division or modulus,
5643 then we cannot pass through this conversion. */
5644 || (code != MULT_EXPR
5645 && (TYPE_UNSIGNED (ctype)
5646 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5647 /* ... or has undefined overflow while the converted to
5648 type has not, we cannot do the operation in the inner type
5649 as that would introduce undefined overflow. */
5650 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5651 && !TYPE_OVERFLOW_UNDEFINED (type))))
5652 break;
5654 /* Pass the constant down and see if we can make a simplification. If
5655 we can, replace this expression with the inner simplification for
5656 possible later conversion to our or some other type. */
5657 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5658 && TREE_CODE (t2) == INTEGER_CST
5659 && !TREE_OVERFLOW (t2)
5660 && (0 != (t1 = extract_muldiv (op0, t2, code,
5661 code == MULT_EXPR
5662 ? ctype : NULL_TREE,
5663 strict_overflow_p))))
5664 return t1;
5665 break;
5667 case ABS_EXPR:
5668 /* If widening the type changes it from signed to unsigned, then we
5669 must avoid building ABS_EXPR itself as unsigned. */
5670 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5672 tree cstype = (*signed_type_for) (ctype);
5673 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5674 != 0)
5676 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5677 return fold_convert (ctype, t1);
5679 break;
5681 /* If the constant is negative, we cannot simplify this. */
5682 if (tree_int_cst_sgn (c) == -1)
5683 break;
5684 /* FALLTHROUGH */
5685 case NEGATE_EXPR:
5686 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5687 != 0)
5688 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5689 break;
5691 case MIN_EXPR: case MAX_EXPR:
5692 /* If widening the type changes the signedness, then we can't perform
5693 this optimization as that changes the result. */
5694 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5695 break;
5697 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5698 sub_strict_overflow_p = false;
5699 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5700 &sub_strict_overflow_p)) != 0
5701 && (t2 = extract_muldiv (op1, c, code, wide_type,
5702 &sub_strict_overflow_p)) != 0)
5704 if (tree_int_cst_sgn (c) < 0)
5705 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5706 if (sub_strict_overflow_p)
5707 *strict_overflow_p = true;
5708 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5709 fold_convert (ctype, t2));
5711 break;
5713 case LSHIFT_EXPR: case RSHIFT_EXPR:
5714 /* If the second operand is constant, this is a multiplication
5715 or floor division, by a power of two, so we can treat it that
5716 way unless the multiplier or divisor overflows. Signed
5717 left-shift overflow is implementation-defined rather than
5718 undefined in C90, so do not convert signed left shift into
5719 multiplication. */
5720 if (TREE_CODE (op1) == INTEGER_CST
5721 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5722 /* const_binop may not detect overflow correctly,
5723 so check for it explicitly here. */
5724 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5725 && TREE_INT_CST_HIGH (op1) == 0
5726 && 0 != (t1 = fold_convert (ctype,
5727 const_binop (LSHIFT_EXPR,
5728 size_one_node,
5729 op1)))
5730 && !TREE_OVERFLOW (t1))
5731 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5732 ? MULT_EXPR : FLOOR_DIV_EXPR,
5733 ctype,
5734 fold_convert (ctype, op0),
5735 t1),
5736 c, code, wide_type, strict_overflow_p);
5737 break;
5739 case PLUS_EXPR: case MINUS_EXPR:
5740 /* See if we can eliminate the operation on both sides. If we can, we
5741 can return a new PLUS or MINUS. If we can't, the only remaining
5742 cases where we can do anything are if the second operand is a
5743 constant. */
5744 sub_strict_overflow_p = false;
5745 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5746 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5747 if (t1 != 0 && t2 != 0
5748 && (code == MULT_EXPR
5749 /* If not multiplication, we can only do this if both operands
5750 are divisible by c. */
5751 || (multiple_of_p (ctype, op0, c)
5752 && multiple_of_p (ctype, op1, c))))
5754 if (sub_strict_overflow_p)
5755 *strict_overflow_p = true;
5756 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5757 fold_convert (ctype, t2));
5760 /* If this was a subtraction, negate OP1 and set it to be an addition.
5761 This simplifies the logic below. */
5762 if (tcode == MINUS_EXPR)
5764 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5765 /* If OP1 was not easily negatable, the constant may be OP0. */
5766 if (TREE_CODE (op0) == INTEGER_CST)
5768 tree tem = op0;
5769 op0 = op1;
5770 op1 = tem;
5771 tem = t1;
5772 t1 = t2;
5773 t2 = tem;
5777 if (TREE_CODE (op1) != INTEGER_CST)
5778 break;
5780 /* If either OP1 or C are negative, this optimization is not safe for
5781 some of the division and remainder types while for others we need
5782 to change the code. */
5783 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5785 if (code == CEIL_DIV_EXPR)
5786 code = FLOOR_DIV_EXPR;
5787 else if (code == FLOOR_DIV_EXPR)
5788 code = CEIL_DIV_EXPR;
5789 else if (code != MULT_EXPR
5790 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5791 break;
5794 /* If it's a multiply or a division/modulus operation of a multiple
5795 of our constant, do the operation and verify it doesn't overflow. */
5796 if (code == MULT_EXPR
5797 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5799 op1 = const_binop (code, fold_convert (ctype, op1),
5800 fold_convert (ctype, c));
5801 /* We allow the constant to overflow with wrapping semantics. */
5802 if (op1 == 0
5803 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5804 break;
5806 else
5807 break;
5809 /* If we have an unsigned type is not a sizetype, we cannot widen
5810 the operation since it will change the result if the original
5811 computation overflowed. */
5812 if (TYPE_UNSIGNED (ctype)
5813 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5814 && ctype != type)
5815 break;
5817 /* If we were able to eliminate our operation from the first side,
5818 apply our operation to the second side and reform the PLUS. */
5819 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5820 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5822 /* The last case is if we are a multiply. In that case, we can
5823 apply the distributive law to commute the multiply and addition
5824 if the multiplication of the constants doesn't overflow. */
5825 if (code == MULT_EXPR)
5826 return fold_build2 (tcode, ctype,
5827 fold_build2 (code, ctype,
5828 fold_convert (ctype, op0),
5829 fold_convert (ctype, c)),
5830 op1);
5832 break;
5834 case MULT_EXPR:
5835 /* We have a special case here if we are doing something like
5836 (C * 8) % 4 since we know that's zero. */
5837 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5838 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5839 /* If the multiplication can overflow we cannot optimize this.
5840 ??? Until we can properly mark individual operations as
5841 not overflowing we need to treat sizetype special here as
5842 stor-layout relies on this opimization to make
5843 DECL_FIELD_BIT_OFFSET always a constant. */
5844 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5845 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5846 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5847 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5848 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5850 *strict_overflow_p = true;
5851 return omit_one_operand (type, integer_zero_node, op0);
5854 /* ... fall through ... */
5856 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5857 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5858 /* If we can extract our operation from the LHS, do so and return a
5859 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5860 do something only if the second operand is a constant. */
5861 if (same_p
5862 && (t1 = extract_muldiv (op0, c, code, wide_type,
5863 strict_overflow_p)) != 0)
5864 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5865 fold_convert (ctype, op1));
5866 else if (tcode == MULT_EXPR && code == MULT_EXPR
5867 && (t1 = extract_muldiv (op1, c, code, wide_type,
5868 strict_overflow_p)) != 0)
5869 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5870 fold_convert (ctype, t1));
5871 else if (TREE_CODE (op1) != INTEGER_CST)
5872 return 0;
5874 /* If these are the same operation types, we can associate them
5875 assuming no overflow. */
5876 if (tcode == code
5877 && 0 != (t1 = int_const_binop (MULT_EXPR,
5878 fold_convert (ctype, op1),
5879 fold_convert (ctype, c), 1))
5880 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5881 (TYPE_UNSIGNED (ctype)
5882 && tcode != MULT_EXPR) ? -1 : 1,
5883 TREE_OVERFLOW (t1)))
5884 && !TREE_OVERFLOW (t1))
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5887 /* If these operations "cancel" each other, we have the main
5888 optimizations of this pass, which occur when either constant is a
5889 multiple of the other, in which case we replace this with either an
5890 operation or CODE or TCODE.
5892 If we have an unsigned type that is not a sizetype, we cannot do
5893 this since it will change the result if the original computation
5894 overflowed. */
5895 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5896 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5897 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5898 || (tcode == MULT_EXPR
5899 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5900 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5901 && code != MULT_EXPR)))
5903 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5906 *strict_overflow_p = true;
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5908 fold_convert (ctype,
5909 const_binop (TRUNC_DIV_EXPR,
5910 op1, c)));
5912 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5914 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5915 *strict_overflow_p = true;
5916 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5917 fold_convert (ctype,
5918 const_binop (TRUNC_DIV_EXPR,
5919 c, op1)));
5922 break;
5924 default:
5925 break;
5928 return 0;
5931 /* Return a node which has the indicated constant VALUE (either 0 or
5932 1), and is of the indicated TYPE. */
5934 tree
5935 constant_boolean_node (int value, tree type)
5937 if (type == integer_type_node)
5938 return value ? integer_one_node : integer_zero_node;
5939 else if (type == boolean_type_node)
5940 return value ? boolean_true_node : boolean_false_node;
5941 else
5942 return build_int_cst (type, value);
5946 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5947 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5948 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5949 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5950 COND is the first argument to CODE; otherwise (as in the example
5951 given here), it is the second argument. TYPE is the type of the
5952 original expression. Return NULL_TREE if no simplification is
5953 possible. */
5955 static tree
5956 fold_binary_op_with_conditional_arg (location_t loc,
5957 enum tree_code code,
5958 tree type, tree op0, tree op1,
5959 tree cond, tree arg, int cond_first_p)
5961 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5962 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5963 tree test, true_value, false_value;
5964 tree lhs = NULL_TREE;
5965 tree rhs = NULL_TREE;
5967 if (TREE_CODE (cond) == COND_EXPR)
5969 test = TREE_OPERAND (cond, 0);
5970 true_value = TREE_OPERAND (cond, 1);
5971 false_value = TREE_OPERAND (cond, 2);
5972 /* If this operand throws an expression, then it does not make
5973 sense to try to perform a logical or arithmetic operation
5974 involving it. */
5975 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5976 lhs = true_value;
5977 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5978 rhs = false_value;
5980 else
5982 tree testtype = TREE_TYPE (cond);
5983 test = cond;
5984 true_value = constant_boolean_node (true, testtype);
5985 false_value = constant_boolean_node (false, testtype);
5988 /* This transformation is only worthwhile if we don't have to wrap ARG
5989 in a SAVE_EXPR and the operation can be simplified on at least one
5990 of the branches once its pushed inside the COND_EXPR. */
5991 if (!TREE_CONSTANT (arg)
5992 && (TREE_SIDE_EFFECTS (arg)
5993 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5994 return NULL_TREE;
5996 arg = fold_convert_loc (loc, arg_type, arg);
5997 if (lhs == 0)
5999 true_value = fold_convert_loc (loc, cond_type, true_value);
6000 if (cond_first_p)
6001 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6002 else
6003 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6005 if (rhs == 0)
6007 false_value = fold_convert_loc (loc, cond_type, false_value);
6008 if (cond_first_p)
6009 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6010 else
6011 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6014 /* Check that we have simplified at least one of the branches. */
6015 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6016 return NULL_TREE;
6018 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6022 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6024 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6025 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6026 ADDEND is the same as X.
6028 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6029 and finite. The problematic cases are when X is zero, and its mode
6030 has signed zeros. In the case of rounding towards -infinity,
6031 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6032 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6034 bool
6035 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6037 if (!real_zerop (addend))
6038 return false;
6040 /* Don't allow the fold with -fsignaling-nans. */
6041 if (HONOR_SNANS (TYPE_MODE (type)))
6042 return false;
6044 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6045 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6046 return true;
6048 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6049 if (TREE_CODE (addend) == REAL_CST
6050 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6051 negate = !negate;
6053 /* The mode has signed zeros, and we have to honor their sign.
6054 In this situation, there is only one case we can return true for.
6055 X - 0 is the same as X unless rounding towards -infinity is
6056 supported. */
6057 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6060 /* Subroutine of fold() that checks comparisons of built-in math
6061 functions against real constants.
6063 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6064 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6065 is the type of the result and ARG0 and ARG1 are the operands of the
6066 comparison. ARG1 must be a TREE_REAL_CST.
6068 The function returns the constant folded tree if a simplification
6069 can be made, and NULL_TREE otherwise. */
6071 static tree
6072 fold_mathfn_compare (location_t loc,
6073 enum built_in_function fcode, enum tree_code code,
6074 tree type, tree arg0, tree arg1)
6076 REAL_VALUE_TYPE c;
6078 if (BUILTIN_SQRT_P (fcode))
6080 tree arg = CALL_EXPR_ARG (arg0, 0);
6081 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6083 c = TREE_REAL_CST (arg1);
6084 if (REAL_VALUE_NEGATIVE (c))
6086 /* sqrt(x) < y is always false, if y is negative. */
6087 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6088 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6090 /* sqrt(x) > y is always true, if y is negative and we
6091 don't care about NaNs, i.e. negative values of x. */
6092 if (code == NE_EXPR || !HONOR_NANS (mode))
6093 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6095 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6096 return fold_build2_loc (loc, GE_EXPR, type, arg,
6097 build_real (TREE_TYPE (arg), dconst0));
6099 else if (code == GT_EXPR || code == GE_EXPR)
6101 REAL_VALUE_TYPE c2;
6103 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6104 real_convert (&c2, mode, &c2);
6106 if (REAL_VALUE_ISINF (c2))
6108 /* sqrt(x) > y is x == +Inf, when y is very large. */
6109 if (HONOR_INFINITIES (mode))
6110 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6111 build_real (TREE_TYPE (arg), c2));
6113 /* sqrt(x) > y is always false, when y is very large
6114 and we don't care about infinities. */
6115 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6118 /* sqrt(x) > c is the same as x > c*c. */
6119 return fold_build2_loc (loc, code, type, arg,
6120 build_real (TREE_TYPE (arg), c2));
6122 else if (code == LT_EXPR || code == LE_EXPR)
6124 REAL_VALUE_TYPE c2;
6126 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6127 real_convert (&c2, mode, &c2);
6129 if (REAL_VALUE_ISINF (c2))
6131 /* sqrt(x) < y is always true, when y is a very large
6132 value and we don't care about NaNs or Infinities. */
6133 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6134 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6136 /* sqrt(x) < y is x != +Inf when y is very large and we
6137 don't care about NaNs. */
6138 if (! HONOR_NANS (mode))
6139 return fold_build2_loc (loc, NE_EXPR, type, arg,
6140 build_real (TREE_TYPE (arg), c2));
6142 /* sqrt(x) < y is x >= 0 when y is very large and we
6143 don't care about Infinities. */
6144 if (! HONOR_INFINITIES (mode))
6145 return fold_build2_loc (loc, GE_EXPR, type, arg,
6146 build_real (TREE_TYPE (arg), dconst0));
6148 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6149 if (lang_hooks.decls.global_bindings_p () != 0
6150 || CONTAINS_PLACEHOLDER_P (arg))
6151 return NULL_TREE;
6153 arg = save_expr (arg);
6154 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6155 fold_build2_loc (loc, GE_EXPR, type, arg,
6156 build_real (TREE_TYPE (arg),
6157 dconst0)),
6158 fold_build2_loc (loc, NE_EXPR, type, arg,
6159 build_real (TREE_TYPE (arg),
6160 c2)));
6163 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6164 if (! HONOR_NANS (mode))
6165 return fold_build2_loc (loc, code, type, arg,
6166 build_real (TREE_TYPE (arg), c2));
6168 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6169 if (lang_hooks.decls.global_bindings_p () == 0
6170 && ! CONTAINS_PLACEHOLDER_P (arg))
6172 arg = save_expr (arg);
6173 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6174 fold_build2_loc (loc, GE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg),
6176 dconst0)),
6177 fold_build2_loc (loc, code, type, arg,
6178 build_real (TREE_TYPE (arg),
6179 c2)));
6184 return NULL_TREE;
6187 /* Subroutine of fold() that optimizes comparisons against Infinities,
6188 either +Inf or -Inf.
6190 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6191 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6192 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6194 The function returns the constant folded tree if a simplification
6195 can be made, and NULL_TREE otherwise. */
6197 static tree
6198 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6199 tree arg0, tree arg1)
6201 enum machine_mode mode;
6202 REAL_VALUE_TYPE max;
6203 tree temp;
6204 bool neg;
6206 mode = TYPE_MODE (TREE_TYPE (arg0));
6208 /* For negative infinity swap the sense of the comparison. */
6209 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6210 if (neg)
6211 code = swap_tree_comparison (code);
6213 switch (code)
6215 case GT_EXPR:
6216 /* x > +Inf is always false, if with ignore sNANs. */
6217 if (HONOR_SNANS (mode))
6218 return NULL_TREE;
6219 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6221 case LE_EXPR:
6222 /* x <= +Inf is always true, if we don't case about NaNs. */
6223 if (! HONOR_NANS (mode))
6224 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6226 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6227 if (lang_hooks.decls.global_bindings_p () == 0
6228 && ! CONTAINS_PLACEHOLDER_P (arg0))
6230 arg0 = save_expr (arg0);
6231 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6233 break;
6235 case EQ_EXPR:
6236 case GE_EXPR:
6237 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6238 real_maxval (&max, neg, mode);
6239 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6240 arg0, build_real (TREE_TYPE (arg0), max));
6242 case LT_EXPR:
6243 /* x < +Inf is always equal to x <= DBL_MAX. */
6244 real_maxval (&max, neg, mode);
6245 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6246 arg0, build_real (TREE_TYPE (arg0), max));
6248 case NE_EXPR:
6249 /* x != +Inf is always equal to !(x > DBL_MAX). */
6250 real_maxval (&max, neg, mode);
6251 if (! HONOR_NANS (mode))
6252 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6253 arg0, build_real (TREE_TYPE (arg0), max));
6255 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6256 arg0, build_real (TREE_TYPE (arg0), max));
6257 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6259 default:
6260 break;
6263 return NULL_TREE;
6266 /* Subroutine of fold() that optimizes comparisons of a division by
6267 a nonzero integer constant against an integer constant, i.e.
6268 X/C1 op C2.
6270 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6271 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6272 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6274 The function returns the constant folded tree if a simplification
6275 can be made, and NULL_TREE otherwise. */
6277 static tree
6278 fold_div_compare (location_t loc,
6279 enum tree_code code, tree type, tree arg0, tree arg1)
6281 tree prod, tmp, hi, lo;
6282 tree arg00 = TREE_OPERAND (arg0, 0);
6283 tree arg01 = TREE_OPERAND (arg0, 1);
6284 double_int val;
6285 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6286 bool neg_overflow;
6287 int overflow;
6289 /* We have to do this the hard way to detect unsigned overflow.
6290 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6291 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6292 TREE_INT_CST_HIGH (arg01),
6293 TREE_INT_CST_LOW (arg1),
6294 TREE_INT_CST_HIGH (arg1),
6295 &val.low, &val.high, unsigned_p);
6296 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6297 neg_overflow = false;
6299 if (unsigned_p)
6301 tmp = int_const_binop (MINUS_EXPR, arg01,
6302 build_int_cst (TREE_TYPE (arg01), 1), 0);
6303 lo = prod;
6305 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6306 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6307 TREE_INT_CST_HIGH (prod),
6308 TREE_INT_CST_LOW (tmp),
6309 TREE_INT_CST_HIGH (tmp),
6310 &val.low, &val.high, unsigned_p);
6311 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6312 -1, overflow | TREE_OVERFLOW (prod));
6314 else if (tree_int_cst_sgn (arg01) >= 0)
6316 tmp = int_const_binop (MINUS_EXPR, arg01,
6317 build_int_cst (TREE_TYPE (arg01), 1), 0);
6318 switch (tree_int_cst_sgn (arg1))
6320 case -1:
6321 neg_overflow = true;
6322 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6323 hi = prod;
6324 break;
6326 case 0:
6327 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6328 hi = tmp;
6329 break;
6331 case 1:
6332 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6333 lo = prod;
6334 break;
6336 default:
6337 gcc_unreachable ();
6340 else
6342 /* A negative divisor reverses the relational operators. */
6343 code = swap_tree_comparison (code);
6345 tmp = int_const_binop (PLUS_EXPR, arg01,
6346 build_int_cst (TREE_TYPE (arg01), 1), 0);
6347 switch (tree_int_cst_sgn (arg1))
6349 case -1:
6350 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6351 lo = prod;
6352 break;
6354 case 0:
6355 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6356 lo = tmp;
6357 break;
6359 case 1:
6360 neg_overflow = true;
6361 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6362 hi = prod;
6363 break;
6365 default:
6366 gcc_unreachable ();
6370 switch (code)
6372 case EQ_EXPR:
6373 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6374 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6375 if (TREE_OVERFLOW (hi))
6376 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6377 if (TREE_OVERFLOW (lo))
6378 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6379 return build_range_check (loc, type, arg00, 1, lo, hi);
6381 case NE_EXPR:
6382 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6383 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6384 if (TREE_OVERFLOW (hi))
6385 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6386 if (TREE_OVERFLOW (lo))
6387 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6388 return build_range_check (loc, type, arg00, 0, lo, hi);
6390 case LT_EXPR:
6391 if (TREE_OVERFLOW (lo))
6393 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6394 return omit_one_operand_loc (loc, type, tmp, arg00);
6396 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6398 case LE_EXPR:
6399 if (TREE_OVERFLOW (hi))
6401 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6402 return omit_one_operand_loc (loc, type, tmp, arg00);
6404 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6406 case GT_EXPR:
6407 if (TREE_OVERFLOW (hi))
6409 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6410 return omit_one_operand_loc (loc, type, tmp, arg00);
6412 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6414 case GE_EXPR:
6415 if (TREE_OVERFLOW (lo))
6417 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6418 return omit_one_operand_loc (loc, type, tmp, arg00);
6420 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6422 default:
6423 break;
6426 return NULL_TREE;
6430 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6431 equality/inequality test, then return a simplified form of the test
6432 using a sign testing. Otherwise return NULL. TYPE is the desired
6433 result type. */
6435 static tree
6436 fold_single_bit_test_into_sign_test (location_t loc,
6437 enum tree_code code, tree arg0, tree arg1,
6438 tree result_type)
6440 /* If this is testing a single bit, we can optimize the test. */
6441 if ((code == NE_EXPR || code == EQ_EXPR)
6442 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6443 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6445 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6446 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6447 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6449 if (arg00 != NULL_TREE
6450 /* This is only a win if casting to a signed type is cheap,
6451 i.e. when arg00's type is not a partial mode. */
6452 && TYPE_PRECISION (TREE_TYPE (arg00))
6453 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6455 tree stype = signed_type_for (TREE_TYPE (arg00));
6456 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6457 result_type,
6458 fold_convert_loc (loc, stype, arg00),
6459 build_int_cst (stype, 0));
6463 return NULL_TREE;
6466 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6467 equality/inequality test, then return a simplified form of
6468 the test using shifts and logical operations. Otherwise return
6469 NULL. TYPE is the desired result type. */
6471 tree
6472 fold_single_bit_test (location_t loc, enum tree_code code,
6473 tree arg0, tree arg1, tree result_type)
6475 /* If this is testing a single bit, we can optimize the test. */
6476 if ((code == NE_EXPR || code == EQ_EXPR)
6477 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6478 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6480 tree inner = TREE_OPERAND (arg0, 0);
6481 tree type = TREE_TYPE (arg0);
6482 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6483 enum machine_mode operand_mode = TYPE_MODE (type);
6484 int ops_unsigned;
6485 tree signed_type, unsigned_type, intermediate_type;
6486 tree tem, one;
6488 /* First, see if we can fold the single bit test into a sign-bit
6489 test. */
6490 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6491 result_type);
6492 if (tem)
6493 return tem;
6495 /* Otherwise we have (A & C) != 0 where C is a single bit,
6496 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6497 Similarly for (A & C) == 0. */
6499 /* If INNER is a right shift of a constant and it plus BITNUM does
6500 not overflow, adjust BITNUM and INNER. */
6501 if (TREE_CODE (inner) == RSHIFT_EXPR
6502 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6503 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6504 && bitnum < TYPE_PRECISION (type)
6505 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6506 bitnum - TYPE_PRECISION (type)))
6508 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6509 inner = TREE_OPERAND (inner, 0);
6512 /* If we are going to be able to omit the AND below, we must do our
6513 operations as unsigned. If we must use the AND, we have a choice.
6514 Normally unsigned is faster, but for some machines signed is. */
6515 #ifdef LOAD_EXTEND_OP
6516 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6517 && !flag_syntax_only) ? 0 : 1;
6518 #else
6519 ops_unsigned = 1;
6520 #endif
6522 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6523 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6524 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6525 inner = fold_convert_loc (loc, intermediate_type, inner);
6527 if (bitnum != 0)
6528 inner = build2 (RSHIFT_EXPR, intermediate_type,
6529 inner, size_int (bitnum));
6531 one = build_int_cst (intermediate_type, 1);
6533 if (code == EQ_EXPR)
6534 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6536 /* Put the AND last so it can combine with more things. */
6537 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6539 /* Make sure to return the proper type. */
6540 inner = fold_convert_loc (loc, result_type, inner);
6542 return inner;
6544 return NULL_TREE;
6547 /* Check whether we are allowed to reorder operands arg0 and arg1,
6548 such that the evaluation of arg1 occurs before arg0. */
6550 static bool
6551 reorder_operands_p (const_tree arg0, const_tree arg1)
6553 if (! flag_evaluation_order)
6554 return true;
6555 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6556 return true;
6557 return ! TREE_SIDE_EFFECTS (arg0)
6558 && ! TREE_SIDE_EFFECTS (arg1);
6561 /* Test whether it is preferable two swap two operands, ARG0 and
6562 ARG1, for example because ARG0 is an integer constant and ARG1
6563 isn't. If REORDER is true, only recommend swapping if we can
6564 evaluate the operands in reverse order. */
6566 bool
6567 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6569 STRIP_SIGN_NOPS (arg0);
6570 STRIP_SIGN_NOPS (arg1);
6572 if (TREE_CODE (arg1) == INTEGER_CST)
6573 return 0;
6574 if (TREE_CODE (arg0) == INTEGER_CST)
6575 return 1;
6577 if (TREE_CODE (arg1) == REAL_CST)
6578 return 0;
6579 if (TREE_CODE (arg0) == REAL_CST)
6580 return 1;
6582 if (TREE_CODE (arg1) == FIXED_CST)
6583 return 0;
6584 if (TREE_CODE (arg0) == FIXED_CST)
6585 return 1;
6587 if (TREE_CODE (arg1) == COMPLEX_CST)
6588 return 0;
6589 if (TREE_CODE (arg0) == COMPLEX_CST)
6590 return 1;
6592 if (TREE_CONSTANT (arg1))
6593 return 0;
6594 if (TREE_CONSTANT (arg0))
6595 return 1;
6597 if (optimize_function_for_size_p (cfun))
6598 return 0;
6600 if (reorder && flag_evaluation_order
6601 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6602 return 0;
6604 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6605 for commutative and comparison operators. Ensuring a canonical
6606 form allows the optimizers to find additional redundancies without
6607 having to explicitly check for both orderings. */
6608 if (TREE_CODE (arg0) == SSA_NAME
6609 && TREE_CODE (arg1) == SSA_NAME
6610 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6611 return 1;
6613 /* Put SSA_NAMEs last. */
6614 if (TREE_CODE (arg1) == SSA_NAME)
6615 return 0;
6616 if (TREE_CODE (arg0) == SSA_NAME)
6617 return 1;
6619 /* Put variables last. */
6620 if (DECL_P (arg1))
6621 return 0;
6622 if (DECL_P (arg0))
6623 return 1;
6625 return 0;
6628 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6629 ARG0 is extended to a wider type. */
6631 static tree
6632 fold_widened_comparison (location_t loc, enum tree_code code,
6633 tree type, tree arg0, tree arg1)
6635 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6636 tree arg1_unw;
6637 tree shorter_type, outer_type;
6638 tree min, max;
6639 bool above, below;
6641 if (arg0_unw == arg0)
6642 return NULL_TREE;
6643 shorter_type = TREE_TYPE (arg0_unw);
6645 #ifdef HAVE_canonicalize_funcptr_for_compare
6646 /* Disable this optimization if we're casting a function pointer
6647 type on targets that require function pointer canonicalization. */
6648 if (HAVE_canonicalize_funcptr_for_compare
6649 && TREE_CODE (shorter_type) == POINTER_TYPE
6650 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6651 return NULL_TREE;
6652 #endif
6654 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6655 return NULL_TREE;
6657 arg1_unw = get_unwidened (arg1, NULL_TREE);
6659 /* If possible, express the comparison in the shorter mode. */
6660 if ((code == EQ_EXPR || code == NE_EXPR
6661 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6662 && (TREE_TYPE (arg1_unw) == shorter_type
6663 || ((TYPE_PRECISION (shorter_type)
6664 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6665 && (TYPE_UNSIGNED (shorter_type)
6666 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6667 || (TREE_CODE (arg1_unw) == INTEGER_CST
6668 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6669 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6670 && int_fits_type_p (arg1_unw, shorter_type))))
6671 return fold_build2_loc (loc, code, type, arg0_unw,
6672 fold_convert_loc (loc, shorter_type, arg1_unw));
6674 if (TREE_CODE (arg1_unw) != INTEGER_CST
6675 || TREE_CODE (shorter_type) != INTEGER_TYPE
6676 || !int_fits_type_p (arg1_unw, shorter_type))
6677 return NULL_TREE;
6679 /* If we are comparing with the integer that does not fit into the range
6680 of the shorter type, the result is known. */
6681 outer_type = TREE_TYPE (arg1_unw);
6682 min = lower_bound_in_type (outer_type, shorter_type);
6683 max = upper_bound_in_type (outer_type, shorter_type);
6685 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6686 max, arg1_unw));
6687 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6688 arg1_unw, min));
6690 switch (code)
6692 case EQ_EXPR:
6693 if (above || below)
6694 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6695 break;
6697 case NE_EXPR:
6698 if (above || below)
6699 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6700 break;
6702 case LT_EXPR:
6703 case LE_EXPR:
6704 if (above)
6705 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6706 else if (below)
6707 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6709 case GT_EXPR:
6710 case GE_EXPR:
6711 if (above)
6712 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6713 else if (below)
6714 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6716 default:
6717 break;
6720 return NULL_TREE;
6723 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6724 ARG0 just the signedness is changed. */
6726 static tree
6727 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6728 tree arg0, tree arg1)
6730 tree arg0_inner;
6731 tree inner_type, outer_type;
6733 if (!CONVERT_EXPR_P (arg0))
6734 return NULL_TREE;
6736 outer_type = TREE_TYPE (arg0);
6737 arg0_inner = TREE_OPERAND (arg0, 0);
6738 inner_type = TREE_TYPE (arg0_inner);
6740 #ifdef HAVE_canonicalize_funcptr_for_compare
6741 /* Disable this optimization if we're casting a function pointer
6742 type on targets that require function pointer canonicalization. */
6743 if (HAVE_canonicalize_funcptr_for_compare
6744 && TREE_CODE (inner_type) == POINTER_TYPE
6745 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6746 return NULL_TREE;
6747 #endif
6749 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6750 return NULL_TREE;
6752 if (TREE_CODE (arg1) != INTEGER_CST
6753 && !(CONVERT_EXPR_P (arg1)
6754 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6755 return NULL_TREE;
6757 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6758 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6759 && code != NE_EXPR
6760 && code != EQ_EXPR)
6761 return NULL_TREE;
6763 if (TREE_CODE (arg1) == INTEGER_CST)
6764 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6765 0, TREE_OVERFLOW (arg1));
6766 else
6767 arg1 = fold_convert_loc (loc, inner_type, arg1);
6769 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6772 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6773 step of the array. Reconstructs s and delta in the case of s *
6774 delta being an integer constant (and thus already folded). ADDR is
6775 the address. MULT is the multiplicative expression. If the
6776 function succeeds, the new address expression is returned.
6777 Otherwise NULL_TREE is returned. LOC is the location of the
6778 resulting expression. */
6780 static tree
6781 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6783 tree s, delta, step;
6784 tree ref = TREE_OPERAND (addr, 0), pref;
6785 tree ret, pos;
6786 tree itype;
6787 bool mdim = false;
6789 /* Strip the nops that might be added when converting op1 to sizetype. */
6790 STRIP_NOPS (op1);
6792 /* Canonicalize op1 into a possibly non-constant delta
6793 and an INTEGER_CST s. */
6794 if (TREE_CODE (op1) == MULT_EXPR)
6796 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6798 STRIP_NOPS (arg0);
6799 STRIP_NOPS (arg1);
6801 if (TREE_CODE (arg0) == INTEGER_CST)
6803 s = arg0;
6804 delta = arg1;
6806 else if (TREE_CODE (arg1) == INTEGER_CST)
6808 s = arg1;
6809 delta = arg0;
6811 else
6812 return NULL_TREE;
6814 else if (TREE_CODE (op1) == INTEGER_CST)
6816 delta = op1;
6817 s = NULL_TREE;
6819 else
6821 /* Simulate we are delta * 1. */
6822 delta = op1;
6823 s = integer_one_node;
6826 for (;; ref = TREE_OPERAND (ref, 0))
6828 if (TREE_CODE (ref) == ARRAY_REF)
6830 tree domain;
6832 /* Remember if this was a multi-dimensional array. */
6833 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6834 mdim = true;
6836 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6837 if (! domain)
6838 continue;
6839 itype = TREE_TYPE (domain);
6841 step = array_ref_element_size (ref);
6842 if (TREE_CODE (step) != INTEGER_CST)
6843 continue;
6845 if (s)
6847 if (! tree_int_cst_equal (step, s))
6848 continue;
6850 else
6852 /* Try if delta is a multiple of step. */
6853 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6854 if (! tmp)
6855 continue;
6856 delta = tmp;
6859 /* Only fold here if we can verify we do not overflow one
6860 dimension of a multi-dimensional array. */
6861 if (mdim)
6863 tree tmp;
6865 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6866 || !TYPE_MAX_VALUE (domain)
6867 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6868 continue;
6870 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6871 fold_convert_loc (loc, itype,
6872 TREE_OPERAND (ref, 1)),
6873 fold_convert_loc (loc, itype, delta));
6874 if (!tmp
6875 || TREE_CODE (tmp) != INTEGER_CST
6876 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6877 continue;
6880 break;
6882 else
6883 mdim = false;
6885 if (!handled_component_p (ref))
6886 return NULL_TREE;
6889 /* We found the suitable array reference. So copy everything up to it,
6890 and replace the index. */
6892 pref = TREE_OPERAND (addr, 0);
6893 ret = copy_node (pref);
6894 SET_EXPR_LOCATION (ret, loc);
6895 pos = ret;
6897 while (pref != ref)
6899 pref = TREE_OPERAND (pref, 0);
6900 TREE_OPERAND (pos, 0) = copy_node (pref);
6901 pos = TREE_OPERAND (pos, 0);
6904 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6905 fold_convert_loc (loc, itype,
6906 TREE_OPERAND (pos, 1)),
6907 fold_convert_loc (loc, itype, delta));
6909 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6913 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6914 means A >= Y && A != MAX, but in this case we know that
6915 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6917 static tree
6918 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6920 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6922 if (TREE_CODE (bound) == LT_EXPR)
6923 a = TREE_OPERAND (bound, 0);
6924 else if (TREE_CODE (bound) == GT_EXPR)
6925 a = TREE_OPERAND (bound, 1);
6926 else
6927 return NULL_TREE;
6929 typea = TREE_TYPE (a);
6930 if (!INTEGRAL_TYPE_P (typea)
6931 && !POINTER_TYPE_P (typea))
6932 return NULL_TREE;
6934 if (TREE_CODE (ineq) == LT_EXPR)
6936 a1 = TREE_OPERAND (ineq, 1);
6937 y = TREE_OPERAND (ineq, 0);
6939 else if (TREE_CODE (ineq) == GT_EXPR)
6941 a1 = TREE_OPERAND (ineq, 0);
6942 y = TREE_OPERAND (ineq, 1);
6944 else
6945 return NULL_TREE;
6947 if (TREE_TYPE (a1) != typea)
6948 return NULL_TREE;
6950 if (POINTER_TYPE_P (typea))
6952 /* Convert the pointer types into integer before taking the difference. */
6953 tree ta = fold_convert_loc (loc, ssizetype, a);
6954 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6955 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6957 else
6958 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6960 if (!diff || !integer_onep (diff))
6961 return NULL_TREE;
6963 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6966 /* Fold a sum or difference of at least one multiplication.
6967 Returns the folded tree or NULL if no simplification could be made. */
6969 static tree
6970 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6971 tree arg0, tree arg1)
6973 tree arg00, arg01, arg10, arg11;
6974 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6976 /* (A * C) +- (B * C) -> (A+-B) * C.
6977 (A * C) +- A -> A * (C+-1).
6978 We are most concerned about the case where C is a constant,
6979 but other combinations show up during loop reduction. Since
6980 it is not difficult, try all four possibilities. */
6982 if (TREE_CODE (arg0) == MULT_EXPR)
6984 arg00 = TREE_OPERAND (arg0, 0);
6985 arg01 = TREE_OPERAND (arg0, 1);
6987 else if (TREE_CODE (arg0) == INTEGER_CST)
6989 arg00 = build_one_cst (type);
6990 arg01 = arg0;
6992 else
6994 /* We cannot generate constant 1 for fract. */
6995 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6996 return NULL_TREE;
6997 arg00 = arg0;
6998 arg01 = build_one_cst (type);
7000 if (TREE_CODE (arg1) == MULT_EXPR)
7002 arg10 = TREE_OPERAND (arg1, 0);
7003 arg11 = TREE_OPERAND (arg1, 1);
7005 else if (TREE_CODE (arg1) == INTEGER_CST)
7007 arg10 = build_one_cst (type);
7008 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7009 the purpose of this canonicalization. */
7010 if (TREE_INT_CST_HIGH (arg1) == -1
7011 && negate_expr_p (arg1)
7012 && code == PLUS_EXPR)
7014 arg11 = negate_expr (arg1);
7015 code = MINUS_EXPR;
7017 else
7018 arg11 = arg1;
7020 else
7022 /* We cannot generate constant 1 for fract. */
7023 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7024 return NULL_TREE;
7025 arg10 = arg1;
7026 arg11 = build_one_cst (type);
7028 same = NULL_TREE;
7030 if (operand_equal_p (arg01, arg11, 0))
7031 same = arg01, alt0 = arg00, alt1 = arg10;
7032 else if (operand_equal_p (arg00, arg10, 0))
7033 same = arg00, alt0 = arg01, alt1 = arg11;
7034 else if (operand_equal_p (arg00, arg11, 0))
7035 same = arg00, alt0 = arg01, alt1 = arg10;
7036 else if (operand_equal_p (arg01, arg10, 0))
7037 same = arg01, alt0 = arg00, alt1 = arg11;
7039 /* No identical multiplicands; see if we can find a common
7040 power-of-two factor in non-power-of-two multiplies. This
7041 can help in multi-dimensional array access. */
7042 else if (host_integerp (arg01, 0)
7043 && host_integerp (arg11, 0))
7045 HOST_WIDE_INT int01, int11, tmp;
7046 bool swap = false;
7047 tree maybe_same;
7048 int01 = TREE_INT_CST_LOW (arg01);
7049 int11 = TREE_INT_CST_LOW (arg11);
7051 /* Move min of absolute values to int11. */
7052 if ((int01 >= 0 ? int01 : -int01)
7053 < (int11 >= 0 ? int11 : -int11))
7055 tmp = int01, int01 = int11, int11 = tmp;
7056 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7057 maybe_same = arg01;
7058 swap = true;
7060 else
7061 maybe_same = arg11;
7063 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7064 /* The remainder should not be a constant, otherwise we
7065 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7066 increased the number of multiplications necessary. */
7067 && TREE_CODE (arg10) != INTEGER_CST)
7069 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7070 build_int_cst (TREE_TYPE (arg00),
7071 int01 / int11));
7072 alt1 = arg10;
7073 same = maybe_same;
7074 if (swap)
7075 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7079 if (same)
7080 return fold_build2_loc (loc, MULT_EXPR, type,
7081 fold_build2_loc (loc, code, type,
7082 fold_convert_loc (loc, type, alt0),
7083 fold_convert_loc (loc, type, alt1)),
7084 fold_convert_loc (loc, type, same));
7086 return NULL_TREE;
7089 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7090 specified by EXPR into the buffer PTR of length LEN bytes.
7091 Return the number of bytes placed in the buffer, or zero
7092 upon failure. */
7094 static int
7095 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7097 tree type = TREE_TYPE (expr);
7098 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7099 int byte, offset, word, words;
7100 unsigned char value;
7102 if (total_bytes > len)
7103 return 0;
7104 words = total_bytes / UNITS_PER_WORD;
7106 for (byte = 0; byte < total_bytes; byte++)
7108 int bitpos = byte * BITS_PER_UNIT;
7109 if (bitpos < HOST_BITS_PER_WIDE_INT)
7110 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7111 else
7112 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7113 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7115 if (total_bytes > UNITS_PER_WORD)
7117 word = byte / UNITS_PER_WORD;
7118 if (WORDS_BIG_ENDIAN)
7119 word = (words - 1) - word;
7120 offset = word * UNITS_PER_WORD;
7121 if (BYTES_BIG_ENDIAN)
7122 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7123 else
7124 offset += byte % UNITS_PER_WORD;
7126 else
7127 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7128 ptr[offset] = value;
7130 return total_bytes;
7134 /* Subroutine of native_encode_expr. Encode the REAL_CST
7135 specified by EXPR into the buffer PTR of length LEN bytes.
7136 Return the number of bytes placed in the buffer, or zero
7137 upon failure. */
7139 static int
7140 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7142 tree type = TREE_TYPE (expr);
7143 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7144 int byte, offset, word, words, bitpos;
7145 unsigned char value;
7147 /* There are always 32 bits in each long, no matter the size of
7148 the hosts long. We handle floating point representations with
7149 up to 192 bits. */
7150 long tmp[6];
7152 if (total_bytes > len)
7153 return 0;
7154 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7156 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7158 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7159 bitpos += BITS_PER_UNIT)
7161 byte = (bitpos / BITS_PER_UNIT) & 3;
7162 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7164 if (UNITS_PER_WORD < 4)
7166 word = byte / UNITS_PER_WORD;
7167 if (WORDS_BIG_ENDIAN)
7168 word = (words - 1) - word;
7169 offset = word * UNITS_PER_WORD;
7170 if (BYTES_BIG_ENDIAN)
7171 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7172 else
7173 offset += byte % UNITS_PER_WORD;
7175 else
7176 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7177 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7179 return total_bytes;
7182 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7183 specified by EXPR into the buffer PTR of length LEN bytes.
7184 Return the number of bytes placed in the buffer, or zero
7185 upon failure. */
7187 static int
7188 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7190 int rsize, isize;
7191 tree part;
7193 part = TREE_REALPART (expr);
7194 rsize = native_encode_expr (part, ptr, len);
7195 if (rsize == 0)
7196 return 0;
7197 part = TREE_IMAGPART (expr);
7198 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7199 if (isize != rsize)
7200 return 0;
7201 return rsize + isize;
7205 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7206 specified by EXPR into the buffer PTR of length LEN bytes.
7207 Return the number of bytes placed in the buffer, or zero
7208 upon failure. */
7210 static int
7211 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7213 int i, size, offset, count;
7214 tree itype, elem, elements;
7216 offset = 0;
7217 elements = TREE_VECTOR_CST_ELTS (expr);
7218 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7219 itype = TREE_TYPE (TREE_TYPE (expr));
7220 size = GET_MODE_SIZE (TYPE_MODE (itype));
7221 for (i = 0; i < count; i++)
7223 if (elements)
7225 elem = TREE_VALUE (elements);
7226 elements = TREE_CHAIN (elements);
7228 else
7229 elem = NULL_TREE;
7231 if (elem)
7233 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7234 return 0;
7236 else
7238 if (offset + size > len)
7239 return 0;
7240 memset (ptr+offset, 0, size);
7242 offset += size;
7244 return offset;
7248 /* Subroutine of native_encode_expr. Encode the STRING_CST
7249 specified by EXPR into the buffer PTR of length LEN bytes.
7250 Return the number of bytes placed in the buffer, or zero
7251 upon failure. */
7253 static int
7254 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7256 tree type = TREE_TYPE (expr);
7257 HOST_WIDE_INT total_bytes;
7259 if (TREE_CODE (type) != ARRAY_TYPE
7260 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7261 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7262 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7263 return 0;
7264 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7265 if (total_bytes > len)
7266 return 0;
7267 if (TREE_STRING_LENGTH (expr) < total_bytes)
7269 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7270 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7271 total_bytes - TREE_STRING_LENGTH (expr));
7273 else
7274 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7275 return total_bytes;
7279 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7280 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7281 buffer PTR of length LEN bytes. Return the number of bytes
7282 placed in the buffer, or zero upon failure. */
7285 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7287 switch (TREE_CODE (expr))
7289 case INTEGER_CST:
7290 return native_encode_int (expr, ptr, len);
7292 case REAL_CST:
7293 return native_encode_real (expr, ptr, len);
7295 case COMPLEX_CST:
7296 return native_encode_complex (expr, ptr, len);
7298 case VECTOR_CST:
7299 return native_encode_vector (expr, ptr, len);
7301 case STRING_CST:
7302 return native_encode_string (expr, ptr, len);
7304 default:
7305 return 0;
7310 /* Subroutine of native_interpret_expr. Interpret the contents of
7311 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7312 If the buffer cannot be interpreted, return NULL_TREE. */
7314 static tree
7315 native_interpret_int (tree type, const unsigned char *ptr, int len)
7317 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7318 int byte, offset, word, words;
7319 unsigned char value;
7320 double_int result;
7322 if (total_bytes > len)
7323 return NULL_TREE;
7324 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7325 return NULL_TREE;
7327 result = double_int_zero;
7328 words = total_bytes / UNITS_PER_WORD;
7330 for (byte = 0; byte < total_bytes; byte++)
7332 int bitpos = byte * BITS_PER_UNIT;
7333 if (total_bytes > UNITS_PER_WORD)
7335 word = byte / UNITS_PER_WORD;
7336 if (WORDS_BIG_ENDIAN)
7337 word = (words - 1) - word;
7338 offset = word * UNITS_PER_WORD;
7339 if (BYTES_BIG_ENDIAN)
7340 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7341 else
7342 offset += byte % UNITS_PER_WORD;
7344 else
7345 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7346 value = ptr[offset];
7348 if (bitpos < HOST_BITS_PER_WIDE_INT)
7349 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7350 else
7351 result.high |= (unsigned HOST_WIDE_INT) value
7352 << (bitpos - HOST_BITS_PER_WIDE_INT);
7355 return double_int_to_tree (type, result);
7359 /* Subroutine of native_interpret_expr. Interpret the contents of
7360 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7361 If the buffer cannot be interpreted, return NULL_TREE. */
7363 static tree
7364 native_interpret_real (tree type, const unsigned char *ptr, int len)
7366 enum machine_mode mode = TYPE_MODE (type);
7367 int total_bytes = GET_MODE_SIZE (mode);
7368 int byte, offset, word, words, bitpos;
7369 unsigned char value;
7370 /* There are always 32 bits in each long, no matter the size of
7371 the hosts long. We handle floating point representations with
7372 up to 192 bits. */
7373 REAL_VALUE_TYPE r;
7374 long tmp[6];
7376 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7377 if (total_bytes > len || total_bytes > 24)
7378 return NULL_TREE;
7379 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7381 memset (tmp, 0, sizeof (tmp));
7382 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7383 bitpos += BITS_PER_UNIT)
7385 byte = (bitpos / BITS_PER_UNIT) & 3;
7386 if (UNITS_PER_WORD < 4)
7388 word = byte / UNITS_PER_WORD;
7389 if (WORDS_BIG_ENDIAN)
7390 word = (words - 1) - word;
7391 offset = word * UNITS_PER_WORD;
7392 if (BYTES_BIG_ENDIAN)
7393 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7394 else
7395 offset += byte % UNITS_PER_WORD;
7397 else
7398 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7399 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7401 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7404 real_from_target (&r, tmp, mode);
7405 return build_real (type, r);
7409 /* Subroutine of native_interpret_expr. Interpret the contents of
7410 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7411 If the buffer cannot be interpreted, return NULL_TREE. */
7413 static tree
7414 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7416 tree etype, rpart, ipart;
7417 int size;
7419 etype = TREE_TYPE (type);
7420 size = GET_MODE_SIZE (TYPE_MODE (etype));
7421 if (size * 2 > len)
7422 return NULL_TREE;
7423 rpart = native_interpret_expr (etype, ptr, size);
7424 if (!rpart)
7425 return NULL_TREE;
7426 ipart = native_interpret_expr (etype, ptr+size, size);
7427 if (!ipart)
7428 return NULL_TREE;
7429 return build_complex (type, rpart, ipart);
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7437 static tree
7438 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7440 tree etype, elem, elements;
7441 int i, size, count;
7443 etype = TREE_TYPE (type);
7444 size = GET_MODE_SIZE (TYPE_MODE (etype));
7445 count = TYPE_VECTOR_SUBPARTS (type);
7446 if (size * count > len)
7447 return NULL_TREE;
7449 elements = NULL_TREE;
7450 for (i = count - 1; i >= 0; i--)
7452 elem = native_interpret_expr (etype, ptr+(i*size), size);
7453 if (!elem)
7454 return NULL_TREE;
7455 elements = tree_cons (NULL_TREE, elem, elements);
7457 return build_vector (type, elements);
7461 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7462 the buffer PTR of length LEN as a constant of type TYPE. For
7463 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7464 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7465 return NULL_TREE. */
7467 tree
7468 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7470 switch (TREE_CODE (type))
7472 case INTEGER_TYPE:
7473 case ENUMERAL_TYPE:
7474 case BOOLEAN_TYPE:
7475 return native_interpret_int (type, ptr, len);
7477 case REAL_TYPE:
7478 return native_interpret_real (type, ptr, len);
7480 case COMPLEX_TYPE:
7481 return native_interpret_complex (type, ptr, len);
7483 case VECTOR_TYPE:
7484 return native_interpret_vector (type, ptr, len);
7486 default:
7487 return NULL_TREE;
7492 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7493 TYPE at compile-time. If we're unable to perform the conversion
7494 return NULL_TREE. */
7496 static tree
7497 fold_view_convert_expr (tree type, tree expr)
7499 /* We support up to 512-bit values (for V8DFmode). */
7500 unsigned char buffer[64];
7501 int len;
7503 /* Check that the host and target are sane. */
7504 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7505 return NULL_TREE;
7507 len = native_encode_expr (expr, buffer, sizeof (buffer));
7508 if (len == 0)
7509 return NULL_TREE;
7511 return native_interpret_expr (type, buffer, len);
7514 /* Build an expression for the address of T. Folds away INDIRECT_REF
7515 to avoid confusing the gimplify process. */
7517 tree
7518 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7520 /* The size of the object is not relevant when talking about its address. */
7521 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7522 t = TREE_OPERAND (t, 0);
7524 if (TREE_CODE (t) == INDIRECT_REF)
7526 t = TREE_OPERAND (t, 0);
7528 if (TREE_TYPE (t) != ptrtype)
7529 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7531 else if (TREE_CODE (t) == MEM_REF
7532 && integer_zerop (TREE_OPERAND (t, 1)))
7533 return TREE_OPERAND (t, 0);
7534 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7536 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7538 if (TREE_TYPE (t) != ptrtype)
7539 t = fold_convert_loc (loc, ptrtype, t);
7541 else
7542 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7544 return t;
7547 /* Build an expression for the address of T. */
7549 tree
7550 build_fold_addr_expr_loc (location_t loc, tree t)
7552 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7554 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7557 /* Fold a unary expression of code CODE and type TYPE with operand
7558 OP0. Return the folded expression if folding is successful.
7559 Otherwise, return NULL_TREE. */
7561 tree
7562 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7564 tree tem;
7565 tree arg0;
7566 enum tree_code_class kind = TREE_CODE_CLASS (code);
7568 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7569 && TREE_CODE_LENGTH (code) == 1);
7571 arg0 = op0;
7572 if (arg0)
7574 if (CONVERT_EXPR_CODE_P (code)
7575 || code == FLOAT_EXPR || code == ABS_EXPR)
7577 /* Don't use STRIP_NOPS, because signedness of argument type
7578 matters. */
7579 STRIP_SIGN_NOPS (arg0);
7581 else
7583 /* Strip any conversions that don't change the mode. This
7584 is safe for every expression, except for a comparison
7585 expression because its signedness is derived from its
7586 operands.
7588 Note that this is done as an internal manipulation within
7589 the constant folder, in order to find the simplest
7590 representation of the arguments so that their form can be
7591 studied. In any cases, the appropriate type conversions
7592 should be put back in the tree that will get out of the
7593 constant folder. */
7594 STRIP_NOPS (arg0);
7598 if (TREE_CODE_CLASS (code) == tcc_unary)
7600 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7601 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7602 fold_build1_loc (loc, code, type,
7603 fold_convert_loc (loc, TREE_TYPE (op0),
7604 TREE_OPERAND (arg0, 1))));
7605 else if (TREE_CODE (arg0) == COND_EXPR)
7607 tree arg01 = TREE_OPERAND (arg0, 1);
7608 tree arg02 = TREE_OPERAND (arg0, 2);
7609 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7610 arg01 = fold_build1_loc (loc, code, type,
7611 fold_convert_loc (loc,
7612 TREE_TYPE (op0), arg01));
7613 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7614 arg02 = fold_build1_loc (loc, code, type,
7615 fold_convert_loc (loc,
7616 TREE_TYPE (op0), arg02));
7617 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7618 arg01, arg02);
7620 /* If this was a conversion, and all we did was to move into
7621 inside the COND_EXPR, bring it back out. But leave it if
7622 it is a conversion from integer to integer and the
7623 result precision is no wider than a word since such a
7624 conversion is cheap and may be optimized away by combine,
7625 while it couldn't if it were outside the COND_EXPR. Then return
7626 so we don't get into an infinite recursion loop taking the
7627 conversion out and then back in. */
7629 if ((CONVERT_EXPR_CODE_P (code)
7630 || code == NON_LVALUE_EXPR)
7631 && TREE_CODE (tem) == COND_EXPR
7632 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7633 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7634 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7635 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7636 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7637 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7638 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7639 && (INTEGRAL_TYPE_P
7640 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7641 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7642 || flag_syntax_only))
7643 tem = build1_loc (loc, code, type,
7644 build3 (COND_EXPR,
7645 TREE_TYPE (TREE_OPERAND
7646 (TREE_OPERAND (tem, 1), 0)),
7647 TREE_OPERAND (tem, 0),
7648 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7649 TREE_OPERAND (TREE_OPERAND (tem, 2),
7650 0)));
7651 return tem;
7653 else if (COMPARISON_CLASS_P (arg0))
7655 if (TREE_CODE (type) == BOOLEAN_TYPE)
7657 arg0 = copy_node (arg0);
7658 TREE_TYPE (arg0) = type;
7659 return arg0;
7661 else if (TREE_CODE (type) != INTEGER_TYPE)
7662 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7663 fold_build1_loc (loc, code, type,
7664 integer_one_node),
7665 fold_build1_loc (loc, code, type,
7666 integer_zero_node));
7670 switch (code)
7672 case PAREN_EXPR:
7673 /* Re-association barriers around constants and other re-association
7674 barriers can be removed. */
7675 if (CONSTANT_CLASS_P (op0)
7676 || TREE_CODE (op0) == PAREN_EXPR)
7677 return fold_convert_loc (loc, type, op0);
7678 return NULL_TREE;
7680 CASE_CONVERT:
7681 case FLOAT_EXPR:
7682 case FIX_TRUNC_EXPR:
7683 if (TREE_TYPE (op0) == type)
7684 return op0;
7686 /* If we have (type) (a CMP b) and type is an integral type, return
7687 new expression involving the new type. */
7688 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7689 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7690 TREE_OPERAND (op0, 1));
7692 /* Handle cases of two conversions in a row. */
7693 if (CONVERT_EXPR_P (op0))
7695 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7696 tree inter_type = TREE_TYPE (op0);
7697 int inside_int = INTEGRAL_TYPE_P (inside_type);
7698 int inside_ptr = POINTER_TYPE_P (inside_type);
7699 int inside_float = FLOAT_TYPE_P (inside_type);
7700 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7701 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7702 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7703 int inter_int = INTEGRAL_TYPE_P (inter_type);
7704 int inter_ptr = POINTER_TYPE_P (inter_type);
7705 int inter_float = FLOAT_TYPE_P (inter_type);
7706 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7707 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7708 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7709 int final_int = INTEGRAL_TYPE_P (type);
7710 int final_ptr = POINTER_TYPE_P (type);
7711 int final_float = FLOAT_TYPE_P (type);
7712 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7713 unsigned int final_prec = TYPE_PRECISION (type);
7714 int final_unsignedp = TYPE_UNSIGNED (type);
7716 /* In addition to the cases of two conversions in a row
7717 handled below, if we are converting something to its own
7718 type via an object of identical or wider precision, neither
7719 conversion is needed. */
7720 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7721 && (((inter_int || inter_ptr) && final_int)
7722 || (inter_float && final_float))
7723 && inter_prec >= final_prec)
7724 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7726 /* Likewise, if the intermediate and initial types are either both
7727 float or both integer, we don't need the middle conversion if the
7728 former is wider than the latter and doesn't change the signedness
7729 (for integers). Avoid this if the final type is a pointer since
7730 then we sometimes need the middle conversion. Likewise if the
7731 final type has a precision not equal to the size of its mode. */
7732 if (((inter_int && inside_int)
7733 || (inter_float && inside_float)
7734 || (inter_vec && inside_vec))
7735 && inter_prec >= inside_prec
7736 && (inter_float || inter_vec
7737 || inter_unsignedp == inside_unsignedp)
7738 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7739 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7740 && ! final_ptr
7741 && (! final_vec || inter_prec == inside_prec))
7742 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7744 /* If we have a sign-extension of a zero-extended value, we can
7745 replace that by a single zero-extension. */
7746 if (inside_int && inter_int && final_int
7747 && inside_prec < inter_prec && inter_prec < final_prec
7748 && inside_unsignedp && !inter_unsignedp)
7749 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7751 /* Two conversions in a row are not needed unless:
7752 - some conversion is floating-point (overstrict for now), or
7753 - some conversion is a vector (overstrict for now), or
7754 - the intermediate type is narrower than both initial and
7755 final, or
7756 - the intermediate type and innermost type differ in signedness,
7757 and the outermost type is wider than the intermediate, or
7758 - the initial type is a pointer type and the precisions of the
7759 intermediate and final types differ, or
7760 - the final type is a pointer type and the precisions of the
7761 initial and intermediate types differ. */
7762 if (! inside_float && ! inter_float && ! final_float
7763 && ! inside_vec && ! inter_vec && ! final_vec
7764 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7765 && ! (inside_int && inter_int
7766 && inter_unsignedp != inside_unsignedp
7767 && inter_prec < final_prec)
7768 && ((inter_unsignedp && inter_prec > inside_prec)
7769 == (final_unsignedp && final_prec > inter_prec))
7770 && ! (inside_ptr && inter_prec != final_prec)
7771 && ! (final_ptr && inside_prec != inter_prec)
7772 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7773 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7774 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7777 /* Handle (T *)&A.B.C for A being of type T and B and C
7778 living at offset zero. This occurs frequently in
7779 C++ upcasting and then accessing the base. */
7780 if (TREE_CODE (op0) == ADDR_EXPR
7781 && POINTER_TYPE_P (type)
7782 && handled_component_p (TREE_OPERAND (op0, 0)))
7784 HOST_WIDE_INT bitsize, bitpos;
7785 tree offset;
7786 enum machine_mode mode;
7787 int unsignedp, volatilep;
7788 tree base = TREE_OPERAND (op0, 0);
7789 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7790 &mode, &unsignedp, &volatilep, false);
7791 /* If the reference was to a (constant) zero offset, we can use
7792 the address of the base if it has the same base type
7793 as the result type and the pointer type is unqualified. */
7794 if (! offset && bitpos == 0
7795 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7796 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7797 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7798 return fold_convert_loc (loc, type,
7799 build_fold_addr_expr_loc (loc, base));
7802 if (TREE_CODE (op0) == MODIFY_EXPR
7803 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7804 /* Detect assigning a bitfield. */
7805 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7806 && DECL_BIT_FIELD
7807 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7809 /* Don't leave an assignment inside a conversion
7810 unless assigning a bitfield. */
7811 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7812 /* First do the assignment, then return converted constant. */
7813 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7814 TREE_NO_WARNING (tem) = 1;
7815 TREE_USED (tem) = 1;
7816 return tem;
7819 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7820 constants (if x has signed type, the sign bit cannot be set
7821 in c). This folds extension into the BIT_AND_EXPR.
7822 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7823 very likely don't have maximal range for their precision and this
7824 transformation effectively doesn't preserve non-maximal ranges. */
7825 if (TREE_CODE (type) == INTEGER_TYPE
7826 && TREE_CODE (op0) == BIT_AND_EXPR
7827 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7829 tree and_expr = op0;
7830 tree and0 = TREE_OPERAND (and_expr, 0);
7831 tree and1 = TREE_OPERAND (and_expr, 1);
7832 int change = 0;
7834 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7835 || (TYPE_PRECISION (type)
7836 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7837 change = 1;
7838 else if (TYPE_PRECISION (TREE_TYPE (and1))
7839 <= HOST_BITS_PER_WIDE_INT
7840 && host_integerp (and1, 1))
7842 unsigned HOST_WIDE_INT cst;
7844 cst = tree_low_cst (and1, 1);
7845 cst &= (HOST_WIDE_INT) -1
7846 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7847 change = (cst == 0);
7848 #ifdef LOAD_EXTEND_OP
7849 if (change
7850 && !flag_syntax_only
7851 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7852 == ZERO_EXTEND))
7854 tree uns = unsigned_type_for (TREE_TYPE (and0));
7855 and0 = fold_convert_loc (loc, uns, and0);
7856 and1 = fold_convert_loc (loc, uns, and1);
7858 #endif
7860 if (change)
7862 tem = force_fit_type_double (type, tree_to_double_int (and1),
7863 0, TREE_OVERFLOW (and1));
7864 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7865 fold_convert_loc (loc, type, and0), tem);
7869 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7870 when one of the new casts will fold away. Conservatively we assume
7871 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7872 if (POINTER_TYPE_P (type)
7873 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7874 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7875 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7876 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7878 tree arg00 = TREE_OPERAND (arg0, 0);
7879 tree arg01 = TREE_OPERAND (arg0, 1);
7881 return fold_build2_loc (loc,
7882 TREE_CODE (arg0), type,
7883 fold_convert_loc (loc, type, arg00),
7884 fold_convert_loc (loc, sizetype, arg01));
7887 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7888 of the same precision, and X is an integer type not narrower than
7889 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7890 if (INTEGRAL_TYPE_P (type)
7891 && TREE_CODE (op0) == BIT_NOT_EXPR
7892 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7893 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7894 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7896 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7897 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7898 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7899 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7900 fold_convert_loc (loc, type, tem));
7903 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7904 type of X and Y (integer types only). */
7905 if (INTEGRAL_TYPE_P (type)
7906 && TREE_CODE (op0) == MULT_EXPR
7907 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7908 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7910 /* Be careful not to introduce new overflows. */
7911 tree mult_type;
7912 if (TYPE_OVERFLOW_WRAPS (type))
7913 mult_type = type;
7914 else
7915 mult_type = unsigned_type_for (type);
7917 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7919 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7920 fold_convert_loc (loc, mult_type,
7921 TREE_OPERAND (op0, 0)),
7922 fold_convert_loc (loc, mult_type,
7923 TREE_OPERAND (op0, 1)));
7924 return fold_convert_loc (loc, type, tem);
7928 tem = fold_convert_const (code, type, op0);
7929 return tem ? tem : NULL_TREE;
7931 case ADDR_SPACE_CONVERT_EXPR:
7932 if (integer_zerop (arg0))
7933 return fold_convert_const (code, type, arg0);
7934 return NULL_TREE;
7936 case FIXED_CONVERT_EXPR:
7937 tem = fold_convert_const (code, type, arg0);
7938 return tem ? tem : NULL_TREE;
7940 case VIEW_CONVERT_EXPR:
7941 if (TREE_TYPE (op0) == type)
7942 return op0;
7943 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7944 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7945 type, TREE_OPERAND (op0, 0));
7946 if (TREE_CODE (op0) == MEM_REF)
7947 return fold_build2_loc (loc, MEM_REF, type,
7948 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7950 /* For integral conversions with the same precision or pointer
7951 conversions use a NOP_EXPR instead. */
7952 if ((INTEGRAL_TYPE_P (type)
7953 || POINTER_TYPE_P (type))
7954 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7955 || POINTER_TYPE_P (TREE_TYPE (op0)))
7956 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7957 return fold_convert_loc (loc, type, op0);
7959 /* Strip inner integral conversions that do not change the precision. */
7960 if (CONVERT_EXPR_P (op0)
7961 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7962 || POINTER_TYPE_P (TREE_TYPE (op0)))
7963 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7964 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7965 && (TYPE_PRECISION (TREE_TYPE (op0))
7966 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7967 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7968 type, TREE_OPERAND (op0, 0));
7970 return fold_view_convert_expr (type, op0);
7972 case NEGATE_EXPR:
7973 tem = fold_negate_expr (loc, arg0);
7974 if (tem)
7975 return fold_convert_loc (loc, type, tem);
7976 return NULL_TREE;
7978 case ABS_EXPR:
7979 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7980 return fold_abs_const (arg0, type);
7981 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7982 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7983 /* Convert fabs((double)float) into (double)fabsf(float). */
7984 else if (TREE_CODE (arg0) == NOP_EXPR
7985 && TREE_CODE (type) == REAL_TYPE)
7987 tree targ0 = strip_float_extensions (arg0);
7988 if (targ0 != arg0)
7989 return fold_convert_loc (loc, type,
7990 fold_build1_loc (loc, ABS_EXPR,
7991 TREE_TYPE (targ0),
7992 targ0));
7994 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7995 else if (TREE_CODE (arg0) == ABS_EXPR)
7996 return arg0;
7997 else if (tree_expr_nonnegative_p (arg0))
7998 return arg0;
8000 /* Strip sign ops from argument. */
8001 if (TREE_CODE (type) == REAL_TYPE)
8003 tem = fold_strip_sign_ops (arg0);
8004 if (tem)
8005 return fold_build1_loc (loc, ABS_EXPR, type,
8006 fold_convert_loc (loc, type, tem));
8008 return NULL_TREE;
8010 case CONJ_EXPR:
8011 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8012 return fold_convert_loc (loc, type, arg0);
8013 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8015 tree itype = TREE_TYPE (type);
8016 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8017 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8018 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8019 negate_expr (ipart));
8021 if (TREE_CODE (arg0) == COMPLEX_CST)
8023 tree itype = TREE_TYPE (type);
8024 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8025 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8026 return build_complex (type, rpart, negate_expr (ipart));
8028 if (TREE_CODE (arg0) == CONJ_EXPR)
8029 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8030 return NULL_TREE;
8032 case BIT_NOT_EXPR:
8033 if (TREE_CODE (arg0) == INTEGER_CST)
8034 return fold_not_const (arg0, type);
8035 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8036 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8037 /* Convert ~ (-A) to A - 1. */
8038 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8039 return fold_build2_loc (loc, MINUS_EXPR, type,
8040 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8041 build_int_cst (type, 1));
8042 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8043 else if (INTEGRAL_TYPE_P (type)
8044 && ((TREE_CODE (arg0) == MINUS_EXPR
8045 && integer_onep (TREE_OPERAND (arg0, 1)))
8046 || (TREE_CODE (arg0) == PLUS_EXPR
8047 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8048 return fold_build1_loc (loc, NEGATE_EXPR, type,
8049 fold_convert_loc (loc, type,
8050 TREE_OPERAND (arg0, 0)));
8051 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8052 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8053 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8054 fold_convert_loc (loc, type,
8055 TREE_OPERAND (arg0, 0)))))
8056 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8057 fold_convert_loc (loc, type,
8058 TREE_OPERAND (arg0, 1)));
8059 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8060 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8061 fold_convert_loc (loc, type,
8062 TREE_OPERAND (arg0, 1)))))
8063 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8064 fold_convert_loc (loc, type,
8065 TREE_OPERAND (arg0, 0)), tem);
8066 /* Perform BIT_NOT_EXPR on each element individually. */
8067 else if (TREE_CODE (arg0) == VECTOR_CST)
8069 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8070 int count = TYPE_VECTOR_SUBPARTS (type), i;
8072 for (i = 0; i < count; i++)
8074 if (elements)
8076 elem = TREE_VALUE (elements);
8077 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8078 if (elem == NULL_TREE)
8079 break;
8080 elements = TREE_CHAIN (elements);
8082 else
8083 elem = build_int_cst (TREE_TYPE (type), -1);
8084 list = tree_cons (NULL_TREE, elem, list);
8086 if (i == count)
8087 return build_vector (type, nreverse (list));
8090 return NULL_TREE;
8092 case TRUTH_NOT_EXPR:
8093 /* The argument to invert_truthvalue must have Boolean type. */
8094 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8095 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8097 /* Note that the operand of this must be an int
8098 and its values must be 0 or 1.
8099 ("true" is a fixed value perhaps depending on the language,
8100 but we don't handle values other than 1 correctly yet.) */
8101 tem = fold_truth_not_expr (loc, arg0);
8102 if (!tem)
8103 return NULL_TREE;
8104 return fold_convert_loc (loc, type, tem);
8106 case REALPART_EXPR:
8107 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8108 return fold_convert_loc (loc, type, arg0);
8109 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8110 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8111 TREE_OPERAND (arg0, 1));
8112 if (TREE_CODE (arg0) == COMPLEX_CST)
8113 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8114 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8116 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8117 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8118 fold_build1_loc (loc, REALPART_EXPR, itype,
8119 TREE_OPERAND (arg0, 0)),
8120 fold_build1_loc (loc, REALPART_EXPR, itype,
8121 TREE_OPERAND (arg0, 1)));
8122 return fold_convert_loc (loc, type, tem);
8124 if (TREE_CODE (arg0) == CONJ_EXPR)
8126 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8127 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8128 TREE_OPERAND (arg0, 0));
8129 return fold_convert_loc (loc, type, tem);
8131 if (TREE_CODE (arg0) == CALL_EXPR)
8133 tree fn = get_callee_fndecl (arg0);
8134 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8135 switch (DECL_FUNCTION_CODE (fn))
8137 CASE_FLT_FN (BUILT_IN_CEXPI):
8138 fn = mathfn_built_in (type, BUILT_IN_COS);
8139 if (fn)
8140 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8141 break;
8143 default:
8144 break;
8147 return NULL_TREE;
8149 case IMAGPART_EXPR:
8150 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8151 return build_zero_cst (type);
8152 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8153 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8154 TREE_OPERAND (arg0, 0));
8155 if (TREE_CODE (arg0) == COMPLEX_CST)
8156 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8157 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8159 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8160 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8161 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8162 TREE_OPERAND (arg0, 0)),
8163 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8164 TREE_OPERAND (arg0, 1)));
8165 return fold_convert_loc (loc, type, tem);
8167 if (TREE_CODE (arg0) == CONJ_EXPR)
8169 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8170 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8171 return fold_convert_loc (loc, type, negate_expr (tem));
8173 if (TREE_CODE (arg0) == CALL_EXPR)
8175 tree fn = get_callee_fndecl (arg0);
8176 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8177 switch (DECL_FUNCTION_CODE (fn))
8179 CASE_FLT_FN (BUILT_IN_CEXPI):
8180 fn = mathfn_built_in (type, BUILT_IN_SIN);
8181 if (fn)
8182 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8183 break;
8185 default:
8186 break;
8189 return NULL_TREE;
8191 case INDIRECT_REF:
8192 /* Fold *&X to X if X is an lvalue. */
8193 if (TREE_CODE (op0) == ADDR_EXPR)
8195 tree op00 = TREE_OPERAND (op0, 0);
8196 if ((TREE_CODE (op00) == VAR_DECL
8197 || TREE_CODE (op00) == PARM_DECL
8198 || TREE_CODE (op00) == RESULT_DECL)
8199 && !TREE_READONLY (op00))
8200 return op00;
8202 return NULL_TREE;
8204 default:
8205 return NULL_TREE;
8206 } /* switch (code) */
8210 /* If the operation was a conversion do _not_ mark a resulting constant
8211 with TREE_OVERFLOW if the original constant was not. These conversions
8212 have implementation defined behavior and retaining the TREE_OVERFLOW
8213 flag here would confuse later passes such as VRP. */
8214 tree
8215 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8216 tree type, tree op0)
8218 tree res = fold_unary_loc (loc, code, type, op0);
8219 if (res
8220 && TREE_CODE (res) == INTEGER_CST
8221 && TREE_CODE (op0) == INTEGER_CST
8222 && CONVERT_EXPR_CODE_P (code))
8223 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8225 return res;
8228 /* Fold a binary expression of code CODE and type TYPE with operands
8229 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8230 Return the folded expression if folding is successful. Otherwise,
8231 return NULL_TREE. */
8233 static tree
8234 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8236 enum tree_code compl_code;
8238 if (code == MIN_EXPR)
8239 compl_code = MAX_EXPR;
8240 else if (code == MAX_EXPR)
8241 compl_code = MIN_EXPR;
8242 else
8243 gcc_unreachable ();
8245 /* MIN (MAX (a, b), b) == b. */
8246 if (TREE_CODE (op0) == compl_code
8247 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8248 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8250 /* MIN (MAX (b, a), b) == b. */
8251 if (TREE_CODE (op0) == compl_code
8252 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8253 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8254 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8256 /* MIN (a, MAX (a, b)) == a. */
8257 if (TREE_CODE (op1) == compl_code
8258 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8259 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8260 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8262 /* MIN (a, MAX (b, a)) == a. */
8263 if (TREE_CODE (op1) == compl_code
8264 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8265 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8266 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8268 return NULL_TREE;
8271 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8272 by changing CODE to reduce the magnitude of constants involved in
8273 ARG0 of the comparison.
8274 Returns a canonicalized comparison tree if a simplification was
8275 possible, otherwise returns NULL_TREE.
8276 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8277 valid if signed overflow is undefined. */
8279 static tree
8280 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8281 tree arg0, tree arg1,
8282 bool *strict_overflow_p)
8284 enum tree_code code0 = TREE_CODE (arg0);
8285 tree t, cst0 = NULL_TREE;
8286 int sgn0;
8287 bool swap = false;
8289 /* Match A +- CST code arg1 and CST code arg1. We can change the
8290 first form only if overflow is undefined. */
8291 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8292 /* In principle pointers also have undefined overflow behavior,
8293 but that causes problems elsewhere. */
8294 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8295 && (code0 == MINUS_EXPR
8296 || code0 == PLUS_EXPR)
8297 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8298 || code0 == INTEGER_CST))
8299 return NULL_TREE;
8301 /* Identify the constant in arg0 and its sign. */
8302 if (code0 == INTEGER_CST)
8303 cst0 = arg0;
8304 else
8305 cst0 = TREE_OPERAND (arg0, 1);
8306 sgn0 = tree_int_cst_sgn (cst0);
8308 /* Overflowed constants and zero will cause problems. */
8309 if (integer_zerop (cst0)
8310 || TREE_OVERFLOW (cst0))
8311 return NULL_TREE;
8313 /* See if we can reduce the magnitude of the constant in
8314 arg0 by changing the comparison code. */
8315 if (code0 == INTEGER_CST)
8317 /* CST <= arg1 -> CST-1 < arg1. */
8318 if (code == LE_EXPR && sgn0 == 1)
8319 code = LT_EXPR;
8320 /* -CST < arg1 -> -CST-1 <= arg1. */
8321 else if (code == LT_EXPR && sgn0 == -1)
8322 code = LE_EXPR;
8323 /* CST > arg1 -> CST-1 >= arg1. */
8324 else if (code == GT_EXPR && sgn0 == 1)
8325 code = GE_EXPR;
8326 /* -CST >= arg1 -> -CST-1 > arg1. */
8327 else if (code == GE_EXPR && sgn0 == -1)
8328 code = GT_EXPR;
8329 else
8330 return NULL_TREE;
8331 /* arg1 code' CST' might be more canonical. */
8332 swap = true;
8334 else
8336 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8337 if (code == LT_EXPR
8338 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8339 code = LE_EXPR;
8340 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8341 else if (code == GT_EXPR
8342 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8343 code = GE_EXPR;
8344 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8345 else if (code == LE_EXPR
8346 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8347 code = LT_EXPR;
8348 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8349 else if (code == GE_EXPR
8350 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8351 code = GT_EXPR;
8352 else
8353 return NULL_TREE;
8354 *strict_overflow_p = true;
8357 /* Now build the constant reduced in magnitude. But not if that
8358 would produce one outside of its types range. */
8359 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8360 && ((sgn0 == 1
8361 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8362 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8363 || (sgn0 == -1
8364 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8365 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8366 /* We cannot swap the comparison here as that would cause us to
8367 endlessly recurse. */
8368 return NULL_TREE;
8370 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8371 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8372 if (code0 != INTEGER_CST)
8373 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8375 /* If swapping might yield to a more canonical form, do so. */
8376 if (swap)
8377 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8378 else
8379 return fold_build2_loc (loc, code, type, t, arg1);
8382 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8383 overflow further. Try to decrease the magnitude of constants involved
8384 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8385 and put sole constants at the second argument position.
8386 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8388 static tree
8389 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8390 tree arg0, tree arg1)
8392 tree t;
8393 bool strict_overflow_p;
8394 const char * const warnmsg = G_("assuming signed overflow does not occur "
8395 "when reducing constant in comparison");
8397 /* Try canonicalization by simplifying arg0. */
8398 strict_overflow_p = false;
8399 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8400 &strict_overflow_p);
8401 if (t)
8403 if (strict_overflow_p)
8404 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8405 return t;
8408 /* Try canonicalization by simplifying arg1 using the swapped
8409 comparison. */
8410 code = swap_tree_comparison (code);
8411 strict_overflow_p = false;
8412 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8413 &strict_overflow_p);
8414 if (t && strict_overflow_p)
8415 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8416 return t;
8419 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8420 space. This is used to avoid issuing overflow warnings for
8421 expressions like &p->x which can not wrap. */
8423 static bool
8424 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8426 unsigned HOST_WIDE_INT offset_low, total_low;
8427 HOST_WIDE_INT size, offset_high, total_high;
8429 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8430 return true;
8432 if (bitpos < 0)
8433 return true;
8435 if (offset == NULL_TREE)
8437 offset_low = 0;
8438 offset_high = 0;
8440 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8441 return true;
8442 else
8444 offset_low = TREE_INT_CST_LOW (offset);
8445 offset_high = TREE_INT_CST_HIGH (offset);
8448 if (add_double_with_sign (offset_low, offset_high,
8449 bitpos / BITS_PER_UNIT, 0,
8450 &total_low, &total_high,
8451 true))
8452 return true;
8454 if (total_high != 0)
8455 return true;
8457 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8458 if (size <= 0)
8459 return true;
8461 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8462 array. */
8463 if (TREE_CODE (base) == ADDR_EXPR)
8465 HOST_WIDE_INT base_size;
8467 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8468 if (base_size > 0 && size < base_size)
8469 size = base_size;
8472 return total_low > (unsigned HOST_WIDE_INT) size;
8475 /* Subroutine of fold_binary. This routine performs all of the
8476 transformations that are common to the equality/inequality
8477 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8478 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8479 fold_binary should call fold_binary. Fold a comparison with
8480 tree code CODE and type TYPE with operands OP0 and OP1. Return
8481 the folded comparison or NULL_TREE. */
8483 static tree
8484 fold_comparison (location_t loc, enum tree_code code, tree type,
8485 tree op0, tree op1)
8487 tree arg0, arg1, tem;
8489 arg0 = op0;
8490 arg1 = op1;
8492 STRIP_SIGN_NOPS (arg0);
8493 STRIP_SIGN_NOPS (arg1);
8495 tem = fold_relational_const (code, type, arg0, arg1);
8496 if (tem != NULL_TREE)
8497 return tem;
8499 /* If one arg is a real or integer constant, put it last. */
8500 if (tree_swap_operands_p (arg0, arg1, true))
8501 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8503 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8504 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8505 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8506 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8507 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8508 && (TREE_CODE (arg1) == INTEGER_CST
8509 && !TREE_OVERFLOW (arg1)))
8511 tree const1 = TREE_OPERAND (arg0, 1);
8512 tree const2 = arg1;
8513 tree variable = TREE_OPERAND (arg0, 0);
8514 tree lhs;
8515 int lhs_add;
8516 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8518 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8519 TREE_TYPE (arg1), const2, const1);
8521 /* If the constant operation overflowed this can be
8522 simplified as a comparison against INT_MAX/INT_MIN. */
8523 if (TREE_CODE (lhs) == INTEGER_CST
8524 && TREE_OVERFLOW (lhs))
8526 int const1_sgn = tree_int_cst_sgn (const1);
8527 enum tree_code code2 = code;
8529 /* Get the sign of the constant on the lhs if the
8530 operation were VARIABLE + CONST1. */
8531 if (TREE_CODE (arg0) == MINUS_EXPR)
8532 const1_sgn = -const1_sgn;
8534 /* The sign of the constant determines if we overflowed
8535 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8536 Canonicalize to the INT_MIN overflow by swapping the comparison
8537 if necessary. */
8538 if (const1_sgn == -1)
8539 code2 = swap_tree_comparison (code);
8541 /* We now can look at the canonicalized case
8542 VARIABLE + 1 CODE2 INT_MIN
8543 and decide on the result. */
8544 if (code2 == LT_EXPR
8545 || code2 == LE_EXPR
8546 || code2 == EQ_EXPR)
8547 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8548 else if (code2 == NE_EXPR
8549 || code2 == GE_EXPR
8550 || code2 == GT_EXPR)
8551 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8554 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8555 && (TREE_CODE (lhs) != INTEGER_CST
8556 || !TREE_OVERFLOW (lhs)))
8558 if (code != EQ_EXPR && code != NE_EXPR)
8559 fold_overflow_warning ("assuming signed overflow does not occur "
8560 "when changing X +- C1 cmp C2 to "
8561 "X cmp C1 +- C2",
8562 WARN_STRICT_OVERFLOW_COMPARISON);
8563 return fold_build2_loc (loc, code, type, variable, lhs);
8567 /* For comparisons of pointers we can decompose it to a compile time
8568 comparison of the base objects and the offsets into the object.
8569 This requires at least one operand being an ADDR_EXPR or a
8570 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8571 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8572 && (TREE_CODE (arg0) == ADDR_EXPR
8573 || TREE_CODE (arg1) == ADDR_EXPR
8574 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8575 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8577 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8578 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8579 enum machine_mode mode;
8580 int volatilep, unsignedp;
8581 bool indirect_base0 = false, indirect_base1 = false;
8583 /* Get base and offset for the access. Strip ADDR_EXPR for
8584 get_inner_reference, but put it back by stripping INDIRECT_REF
8585 off the base object if possible. indirect_baseN will be true
8586 if baseN is not an address but refers to the object itself. */
8587 base0 = arg0;
8588 if (TREE_CODE (arg0) == ADDR_EXPR)
8590 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8591 &bitsize, &bitpos0, &offset0, &mode,
8592 &unsignedp, &volatilep, false);
8593 if (TREE_CODE (base0) == INDIRECT_REF)
8594 base0 = TREE_OPERAND (base0, 0);
8595 else
8596 indirect_base0 = true;
8598 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8600 base0 = TREE_OPERAND (arg0, 0);
8601 STRIP_SIGN_NOPS (base0);
8602 if (TREE_CODE (base0) == ADDR_EXPR)
8604 base0 = TREE_OPERAND (base0, 0);
8605 indirect_base0 = true;
8607 offset0 = TREE_OPERAND (arg0, 1);
8610 base1 = arg1;
8611 if (TREE_CODE (arg1) == ADDR_EXPR)
8613 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8614 &bitsize, &bitpos1, &offset1, &mode,
8615 &unsignedp, &volatilep, false);
8616 if (TREE_CODE (base1) == INDIRECT_REF)
8617 base1 = TREE_OPERAND (base1, 0);
8618 else
8619 indirect_base1 = true;
8621 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8623 base1 = TREE_OPERAND (arg1, 0);
8624 STRIP_SIGN_NOPS (base1);
8625 if (TREE_CODE (base1) == ADDR_EXPR)
8627 base1 = TREE_OPERAND (base1, 0);
8628 indirect_base1 = true;
8630 offset1 = TREE_OPERAND (arg1, 1);
8633 /* A local variable can never be pointed to by
8634 the default SSA name of an incoming parameter. */
8635 if ((TREE_CODE (arg0) == ADDR_EXPR
8636 && indirect_base0
8637 && TREE_CODE (base0) == VAR_DECL
8638 && auto_var_in_fn_p (base0, current_function_decl)
8639 && !indirect_base1
8640 && TREE_CODE (base1) == SSA_NAME
8641 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8642 && SSA_NAME_IS_DEFAULT_DEF (base1))
8643 || (TREE_CODE (arg1) == ADDR_EXPR
8644 && indirect_base1
8645 && TREE_CODE (base1) == VAR_DECL
8646 && auto_var_in_fn_p (base1, current_function_decl)
8647 && !indirect_base0
8648 && TREE_CODE (base0) == SSA_NAME
8649 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8650 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8652 if (code == NE_EXPR)
8653 return constant_boolean_node (1, type);
8654 else if (code == EQ_EXPR)
8655 return constant_boolean_node (0, type);
8657 /* If we have equivalent bases we might be able to simplify. */
8658 else if (indirect_base0 == indirect_base1
8659 && operand_equal_p (base0, base1, 0))
8661 /* We can fold this expression to a constant if the non-constant
8662 offset parts are equal. */
8663 if ((offset0 == offset1
8664 || (offset0 && offset1
8665 && operand_equal_p (offset0, offset1, 0)))
8666 && (code == EQ_EXPR
8667 || code == NE_EXPR
8668 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8671 if (code != EQ_EXPR
8672 && code != NE_EXPR
8673 && bitpos0 != bitpos1
8674 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8675 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8676 fold_overflow_warning (("assuming pointer wraparound does not "
8677 "occur when comparing P +- C1 with "
8678 "P +- C2"),
8679 WARN_STRICT_OVERFLOW_CONDITIONAL);
8681 switch (code)
8683 case EQ_EXPR:
8684 return constant_boolean_node (bitpos0 == bitpos1, type);
8685 case NE_EXPR:
8686 return constant_boolean_node (bitpos0 != bitpos1, type);
8687 case LT_EXPR:
8688 return constant_boolean_node (bitpos0 < bitpos1, type);
8689 case LE_EXPR:
8690 return constant_boolean_node (bitpos0 <= bitpos1, type);
8691 case GE_EXPR:
8692 return constant_boolean_node (bitpos0 >= bitpos1, type);
8693 case GT_EXPR:
8694 return constant_boolean_node (bitpos0 > bitpos1, type);
8695 default:;
8698 /* We can simplify the comparison to a comparison of the variable
8699 offset parts if the constant offset parts are equal.
8700 Be careful to use signed size type here because otherwise we
8701 mess with array offsets in the wrong way. This is possible
8702 because pointer arithmetic is restricted to retain within an
8703 object and overflow on pointer differences is undefined as of
8704 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8705 else if (bitpos0 == bitpos1
8706 && ((code == EQ_EXPR || code == NE_EXPR)
8707 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8709 /* By converting to signed size type we cover middle-end pointer
8710 arithmetic which operates on unsigned pointer types of size
8711 type size and ARRAY_REF offsets which are properly sign or
8712 zero extended from their type in case it is narrower than
8713 size type. */
8714 if (offset0 == NULL_TREE)
8715 offset0 = build_int_cst (ssizetype, 0);
8716 else
8717 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8718 if (offset1 == NULL_TREE)
8719 offset1 = build_int_cst (ssizetype, 0);
8720 else
8721 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8723 if (code != EQ_EXPR
8724 && code != NE_EXPR
8725 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8726 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8727 fold_overflow_warning (("assuming pointer wraparound does not "
8728 "occur when comparing P +- C1 with "
8729 "P +- C2"),
8730 WARN_STRICT_OVERFLOW_COMPARISON);
8732 return fold_build2_loc (loc, code, type, offset0, offset1);
8735 /* For non-equal bases we can simplify if they are addresses
8736 of local binding decls or constants. */
8737 else if (indirect_base0 && indirect_base1
8738 /* We know that !operand_equal_p (base0, base1, 0)
8739 because the if condition was false. But make
8740 sure two decls are not the same. */
8741 && base0 != base1
8742 && TREE_CODE (arg0) == ADDR_EXPR
8743 && TREE_CODE (arg1) == ADDR_EXPR
8744 && (((TREE_CODE (base0) == VAR_DECL
8745 || TREE_CODE (base0) == PARM_DECL)
8746 && (targetm.binds_local_p (base0)
8747 || CONSTANT_CLASS_P (base1)))
8748 || CONSTANT_CLASS_P (base0))
8749 && (((TREE_CODE (base1) == VAR_DECL
8750 || TREE_CODE (base1) == PARM_DECL)
8751 && (targetm.binds_local_p (base1)
8752 || CONSTANT_CLASS_P (base0)))
8753 || CONSTANT_CLASS_P (base1)))
8755 if (code == EQ_EXPR)
8756 return omit_two_operands_loc (loc, type, boolean_false_node,
8757 arg0, arg1);
8758 else if (code == NE_EXPR)
8759 return omit_two_operands_loc (loc, type, boolean_true_node,
8760 arg0, arg1);
8762 /* For equal offsets we can simplify to a comparison of the
8763 base addresses. */
8764 else if (bitpos0 == bitpos1
8765 && (indirect_base0
8766 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8767 && (indirect_base1
8768 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8769 && ((offset0 == offset1)
8770 || (offset0 && offset1
8771 && operand_equal_p (offset0, offset1, 0))))
8773 if (indirect_base0)
8774 base0 = build_fold_addr_expr_loc (loc, base0);
8775 if (indirect_base1)
8776 base1 = build_fold_addr_expr_loc (loc, base1);
8777 return fold_build2_loc (loc, code, type, base0, base1);
8781 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8782 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8783 the resulting offset is smaller in absolute value than the
8784 original one. */
8785 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8786 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8787 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8788 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8789 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8790 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8791 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8793 tree const1 = TREE_OPERAND (arg0, 1);
8794 tree const2 = TREE_OPERAND (arg1, 1);
8795 tree variable1 = TREE_OPERAND (arg0, 0);
8796 tree variable2 = TREE_OPERAND (arg1, 0);
8797 tree cst;
8798 const char * const warnmsg = G_("assuming signed overflow does not "
8799 "occur when combining constants around "
8800 "a comparison");
8802 /* Put the constant on the side where it doesn't overflow and is
8803 of lower absolute value than before. */
8804 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8805 ? MINUS_EXPR : PLUS_EXPR,
8806 const2, const1, 0);
8807 if (!TREE_OVERFLOW (cst)
8808 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8810 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8811 return fold_build2_loc (loc, code, type,
8812 variable1,
8813 fold_build2_loc (loc,
8814 TREE_CODE (arg1), TREE_TYPE (arg1),
8815 variable2, cst));
8818 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8819 ? MINUS_EXPR : PLUS_EXPR,
8820 const1, const2, 0);
8821 if (!TREE_OVERFLOW (cst)
8822 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8824 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8825 return fold_build2_loc (loc, code, type,
8826 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8827 variable1, cst),
8828 variable2);
8832 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8833 signed arithmetic case. That form is created by the compiler
8834 often enough for folding it to be of value. One example is in
8835 computing loop trip counts after Operator Strength Reduction. */
8836 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8837 && TREE_CODE (arg0) == MULT_EXPR
8838 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8839 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8840 && integer_zerop (arg1))
8842 tree const1 = TREE_OPERAND (arg0, 1);
8843 tree const2 = arg1; /* zero */
8844 tree variable1 = TREE_OPERAND (arg0, 0);
8845 enum tree_code cmp_code = code;
8847 /* Handle unfolded multiplication by zero. */
8848 if (integer_zerop (const1))
8849 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8851 fold_overflow_warning (("assuming signed overflow does not occur when "
8852 "eliminating multiplication in comparison "
8853 "with zero"),
8854 WARN_STRICT_OVERFLOW_COMPARISON);
8856 /* If const1 is negative we swap the sense of the comparison. */
8857 if (tree_int_cst_sgn (const1) < 0)
8858 cmp_code = swap_tree_comparison (cmp_code);
8860 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8863 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8864 if (tem)
8865 return tem;
8867 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8869 tree targ0 = strip_float_extensions (arg0);
8870 tree targ1 = strip_float_extensions (arg1);
8871 tree newtype = TREE_TYPE (targ0);
8873 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8874 newtype = TREE_TYPE (targ1);
8876 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8877 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8878 return fold_build2_loc (loc, code, type,
8879 fold_convert_loc (loc, newtype, targ0),
8880 fold_convert_loc (loc, newtype, targ1));
8882 /* (-a) CMP (-b) -> b CMP a */
8883 if (TREE_CODE (arg0) == NEGATE_EXPR
8884 && TREE_CODE (arg1) == NEGATE_EXPR)
8885 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8886 TREE_OPERAND (arg0, 0));
8888 if (TREE_CODE (arg1) == REAL_CST)
8890 REAL_VALUE_TYPE cst;
8891 cst = TREE_REAL_CST (arg1);
8893 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8894 if (TREE_CODE (arg0) == NEGATE_EXPR)
8895 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8896 TREE_OPERAND (arg0, 0),
8897 build_real (TREE_TYPE (arg1),
8898 real_value_negate (&cst)));
8900 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8901 /* a CMP (-0) -> a CMP 0 */
8902 if (REAL_VALUE_MINUS_ZERO (cst))
8903 return fold_build2_loc (loc, code, type, arg0,
8904 build_real (TREE_TYPE (arg1), dconst0));
8906 /* x != NaN is always true, other ops are always false. */
8907 if (REAL_VALUE_ISNAN (cst)
8908 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8910 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8911 return omit_one_operand_loc (loc, type, tem, arg0);
8914 /* Fold comparisons against infinity. */
8915 if (REAL_VALUE_ISINF (cst)
8916 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8918 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8919 if (tem != NULL_TREE)
8920 return tem;
8924 /* If this is a comparison of a real constant with a PLUS_EXPR
8925 or a MINUS_EXPR of a real constant, we can convert it into a
8926 comparison with a revised real constant as long as no overflow
8927 occurs when unsafe_math_optimizations are enabled. */
8928 if (flag_unsafe_math_optimizations
8929 && TREE_CODE (arg1) == REAL_CST
8930 && (TREE_CODE (arg0) == PLUS_EXPR
8931 || TREE_CODE (arg0) == MINUS_EXPR)
8932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8933 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8934 ? MINUS_EXPR : PLUS_EXPR,
8935 arg1, TREE_OPERAND (arg0, 1)))
8936 && !TREE_OVERFLOW (tem))
8937 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8939 /* Likewise, we can simplify a comparison of a real constant with
8940 a MINUS_EXPR whose first operand is also a real constant, i.e.
8941 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8942 floating-point types only if -fassociative-math is set. */
8943 if (flag_associative_math
8944 && TREE_CODE (arg1) == REAL_CST
8945 && TREE_CODE (arg0) == MINUS_EXPR
8946 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8947 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8948 arg1))
8949 && !TREE_OVERFLOW (tem))
8950 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8951 TREE_OPERAND (arg0, 1), tem);
8953 /* Fold comparisons against built-in math functions. */
8954 if (TREE_CODE (arg1) == REAL_CST
8955 && flag_unsafe_math_optimizations
8956 && ! flag_errno_math)
8958 enum built_in_function fcode = builtin_mathfn_code (arg0);
8960 if (fcode != END_BUILTINS)
8962 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8963 if (tem != NULL_TREE)
8964 return tem;
8969 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8970 && CONVERT_EXPR_P (arg0))
8972 /* If we are widening one operand of an integer comparison,
8973 see if the other operand is similarly being widened. Perhaps we
8974 can do the comparison in the narrower type. */
8975 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8976 if (tem)
8977 return tem;
8979 /* Or if we are changing signedness. */
8980 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8981 if (tem)
8982 return tem;
8985 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8986 constant, we can simplify it. */
8987 if (TREE_CODE (arg1) == INTEGER_CST
8988 && (TREE_CODE (arg0) == MIN_EXPR
8989 || TREE_CODE (arg0) == MAX_EXPR)
8990 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8992 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8993 if (tem)
8994 return tem;
8997 /* Simplify comparison of something with itself. (For IEEE
8998 floating-point, we can only do some of these simplifications.) */
8999 if (operand_equal_p (arg0, arg1, 0))
9001 switch (code)
9003 case EQ_EXPR:
9004 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9005 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9006 return constant_boolean_node (1, type);
9007 break;
9009 case GE_EXPR:
9010 case LE_EXPR:
9011 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9012 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9013 return constant_boolean_node (1, type);
9014 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9016 case NE_EXPR:
9017 /* For NE, we can only do this simplification if integer
9018 or we don't honor IEEE floating point NaNs. */
9019 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9020 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9021 break;
9022 /* ... fall through ... */
9023 case GT_EXPR:
9024 case LT_EXPR:
9025 return constant_boolean_node (0, type);
9026 default:
9027 gcc_unreachable ();
9031 /* If we are comparing an expression that just has comparisons
9032 of two integer values, arithmetic expressions of those comparisons,
9033 and constants, we can simplify it. There are only three cases
9034 to check: the two values can either be equal, the first can be
9035 greater, or the second can be greater. Fold the expression for
9036 those three values. Since each value must be 0 or 1, we have
9037 eight possibilities, each of which corresponds to the constant 0
9038 or 1 or one of the six possible comparisons.
9040 This handles common cases like (a > b) == 0 but also handles
9041 expressions like ((x > y) - (y > x)) > 0, which supposedly
9042 occur in macroized code. */
9044 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9046 tree cval1 = 0, cval2 = 0;
9047 int save_p = 0;
9049 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9050 /* Don't handle degenerate cases here; they should already
9051 have been handled anyway. */
9052 && cval1 != 0 && cval2 != 0
9053 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9054 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9055 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9056 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9057 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9058 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9059 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9061 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9062 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9064 /* We can't just pass T to eval_subst in case cval1 or cval2
9065 was the same as ARG1. */
9067 tree high_result
9068 = fold_build2_loc (loc, code, type,
9069 eval_subst (loc, arg0, cval1, maxval,
9070 cval2, minval),
9071 arg1);
9072 tree equal_result
9073 = fold_build2_loc (loc, code, type,
9074 eval_subst (loc, arg0, cval1, maxval,
9075 cval2, maxval),
9076 arg1);
9077 tree low_result
9078 = fold_build2_loc (loc, code, type,
9079 eval_subst (loc, arg0, cval1, minval,
9080 cval2, maxval),
9081 arg1);
9083 /* All three of these results should be 0 or 1. Confirm they are.
9084 Then use those values to select the proper code to use. */
9086 if (TREE_CODE (high_result) == INTEGER_CST
9087 && TREE_CODE (equal_result) == INTEGER_CST
9088 && TREE_CODE (low_result) == INTEGER_CST)
9090 /* Make a 3-bit mask with the high-order bit being the
9091 value for `>', the next for '=', and the low for '<'. */
9092 switch ((integer_onep (high_result) * 4)
9093 + (integer_onep (equal_result) * 2)
9094 + integer_onep (low_result))
9096 case 0:
9097 /* Always false. */
9098 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9099 case 1:
9100 code = LT_EXPR;
9101 break;
9102 case 2:
9103 code = EQ_EXPR;
9104 break;
9105 case 3:
9106 code = LE_EXPR;
9107 break;
9108 case 4:
9109 code = GT_EXPR;
9110 break;
9111 case 5:
9112 code = NE_EXPR;
9113 break;
9114 case 6:
9115 code = GE_EXPR;
9116 break;
9117 case 7:
9118 /* Always true. */
9119 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9122 if (save_p)
9124 tem = save_expr (build2 (code, type, cval1, cval2));
9125 SET_EXPR_LOCATION (tem, loc);
9126 return tem;
9128 return fold_build2_loc (loc, code, type, cval1, cval2);
9133 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9134 into a single range test. */
9135 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9136 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9137 && TREE_CODE (arg1) == INTEGER_CST
9138 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9139 && !integer_zerop (TREE_OPERAND (arg0, 1))
9140 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9141 && !TREE_OVERFLOW (arg1))
9143 tem = fold_div_compare (loc, code, type, arg0, arg1);
9144 if (tem != NULL_TREE)
9145 return tem;
9148 /* Fold ~X op ~Y as Y op X. */
9149 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9150 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9152 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9153 return fold_build2_loc (loc, code, type,
9154 fold_convert_loc (loc, cmp_type,
9155 TREE_OPERAND (arg1, 0)),
9156 TREE_OPERAND (arg0, 0));
9159 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9160 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9161 && TREE_CODE (arg1) == INTEGER_CST)
9163 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9164 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9165 TREE_OPERAND (arg0, 0),
9166 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9167 fold_convert_loc (loc, cmp_type, arg1)));
9170 return NULL_TREE;
9174 /* Subroutine of fold_binary. Optimize complex multiplications of the
9175 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9176 argument EXPR represents the expression "z" of type TYPE. */
9178 static tree
9179 fold_mult_zconjz (location_t loc, tree type, tree expr)
9181 tree itype = TREE_TYPE (type);
9182 tree rpart, ipart, tem;
9184 if (TREE_CODE (expr) == COMPLEX_EXPR)
9186 rpart = TREE_OPERAND (expr, 0);
9187 ipart = TREE_OPERAND (expr, 1);
9189 else if (TREE_CODE (expr) == COMPLEX_CST)
9191 rpart = TREE_REALPART (expr);
9192 ipart = TREE_IMAGPART (expr);
9194 else
9196 expr = save_expr (expr);
9197 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9198 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9201 rpart = save_expr (rpart);
9202 ipart = save_expr (ipart);
9203 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9204 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9205 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9206 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9207 build_zero_cst (itype));
9211 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9212 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9213 guarantees that P and N have the same least significant log2(M) bits.
9214 N is not otherwise constrained. In particular, N is not normalized to
9215 0 <= N < M as is common. In general, the precise value of P is unknown.
9216 M is chosen as large as possible such that constant N can be determined.
9218 Returns M and sets *RESIDUE to N.
9220 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9221 account. This is not always possible due to PR 35705.
9224 static unsigned HOST_WIDE_INT
9225 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9226 bool allow_func_align)
9228 enum tree_code code;
9230 *residue = 0;
9232 code = TREE_CODE (expr);
9233 if (code == ADDR_EXPR)
9235 unsigned int bitalign;
9236 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9237 *residue /= BITS_PER_UNIT;
9238 return bitalign / BITS_PER_UNIT;
9240 else if (code == POINTER_PLUS_EXPR)
9242 tree op0, op1;
9243 unsigned HOST_WIDE_INT modulus;
9244 enum tree_code inner_code;
9246 op0 = TREE_OPERAND (expr, 0);
9247 STRIP_NOPS (op0);
9248 modulus = get_pointer_modulus_and_residue (op0, residue,
9249 allow_func_align);
9251 op1 = TREE_OPERAND (expr, 1);
9252 STRIP_NOPS (op1);
9253 inner_code = TREE_CODE (op1);
9254 if (inner_code == INTEGER_CST)
9256 *residue += TREE_INT_CST_LOW (op1);
9257 return modulus;
9259 else if (inner_code == MULT_EXPR)
9261 op1 = TREE_OPERAND (op1, 1);
9262 if (TREE_CODE (op1) == INTEGER_CST)
9264 unsigned HOST_WIDE_INT align;
9266 /* Compute the greatest power-of-2 divisor of op1. */
9267 align = TREE_INT_CST_LOW (op1);
9268 align &= -align;
9270 /* If align is non-zero and less than *modulus, replace
9271 *modulus with align., If align is 0, then either op1 is 0
9272 or the greatest power-of-2 divisor of op1 doesn't fit in an
9273 unsigned HOST_WIDE_INT. In either case, no additional
9274 constraint is imposed. */
9275 if (align)
9276 modulus = MIN (modulus, align);
9278 return modulus;
9283 /* If we get here, we were unable to determine anything useful about the
9284 expression. */
9285 return 1;
9289 /* Fold a binary expression of code CODE and type TYPE with operands
9290 OP0 and OP1. LOC is the location of the resulting expression.
9291 Return the folded expression if folding is successful. Otherwise,
9292 return NULL_TREE. */
9294 tree
9295 fold_binary_loc (location_t loc,
9296 enum tree_code code, tree type, tree op0, tree op1)
9298 enum tree_code_class kind = TREE_CODE_CLASS (code);
9299 tree arg0, arg1, tem;
9300 tree t1 = NULL_TREE;
9301 bool strict_overflow_p;
9303 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9304 && TREE_CODE_LENGTH (code) == 2
9305 && op0 != NULL_TREE
9306 && op1 != NULL_TREE);
9308 arg0 = op0;
9309 arg1 = op1;
9311 /* Strip any conversions that don't change the mode. This is
9312 safe for every expression, except for a comparison expression
9313 because its signedness is derived from its operands. So, in
9314 the latter case, only strip conversions that don't change the
9315 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9316 preserved.
9318 Note that this is done as an internal manipulation within the
9319 constant folder, in order to find the simplest representation
9320 of the arguments so that their form can be studied. In any
9321 cases, the appropriate type conversions should be put back in
9322 the tree that will get out of the constant folder. */
9324 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9326 STRIP_SIGN_NOPS (arg0);
9327 STRIP_SIGN_NOPS (arg1);
9329 else
9331 STRIP_NOPS (arg0);
9332 STRIP_NOPS (arg1);
9335 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9336 constant but we can't do arithmetic on them. */
9337 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9338 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9339 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9340 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9341 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9342 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9344 if (kind == tcc_binary)
9346 /* Make sure type and arg0 have the same saturating flag. */
9347 gcc_assert (TYPE_SATURATING (type)
9348 == TYPE_SATURATING (TREE_TYPE (arg0)));
9349 tem = const_binop (code, arg0, arg1);
9351 else if (kind == tcc_comparison)
9352 tem = fold_relational_const (code, type, arg0, arg1);
9353 else
9354 tem = NULL_TREE;
9356 if (tem != NULL_TREE)
9358 if (TREE_TYPE (tem) != type)
9359 tem = fold_convert_loc (loc, type, tem);
9360 return tem;
9364 /* If this is a commutative operation, and ARG0 is a constant, move it
9365 to ARG1 to reduce the number of tests below. */
9366 if (commutative_tree_code (code)
9367 && tree_swap_operands_p (arg0, arg1, true))
9368 return fold_build2_loc (loc, code, type, op1, op0);
9370 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9372 First check for cases where an arithmetic operation is applied to a
9373 compound, conditional, or comparison operation. Push the arithmetic
9374 operation inside the compound or conditional to see if any folding
9375 can then be done. Convert comparison to conditional for this purpose.
9376 The also optimizes non-constant cases that used to be done in
9377 expand_expr.
9379 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9380 one of the operands is a comparison and the other is a comparison, a
9381 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9382 code below would make the expression more complex. Change it to a
9383 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9384 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9386 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9387 || code == EQ_EXPR || code == NE_EXPR)
9388 && ((truth_value_p (TREE_CODE (arg0))
9389 && (truth_value_p (TREE_CODE (arg1))
9390 || (TREE_CODE (arg1) == BIT_AND_EXPR
9391 && integer_onep (TREE_OPERAND (arg1, 1)))))
9392 || (truth_value_p (TREE_CODE (arg1))
9393 && (truth_value_p (TREE_CODE (arg0))
9394 || (TREE_CODE (arg0) == BIT_AND_EXPR
9395 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9397 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9398 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9399 : TRUTH_XOR_EXPR,
9400 boolean_type_node,
9401 fold_convert_loc (loc, boolean_type_node, arg0),
9402 fold_convert_loc (loc, boolean_type_node, arg1));
9404 if (code == EQ_EXPR)
9405 tem = invert_truthvalue_loc (loc, tem);
9407 return fold_convert_loc (loc, type, tem);
9410 if (TREE_CODE_CLASS (code) == tcc_binary
9411 || TREE_CODE_CLASS (code) == tcc_comparison)
9413 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9415 tem = fold_build2_loc (loc, code, type,
9416 fold_convert_loc (loc, TREE_TYPE (op0),
9417 TREE_OPERAND (arg0, 1)), op1);
9418 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9419 tem);
9421 if (TREE_CODE (arg1) == COMPOUND_EXPR
9422 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9424 tem = fold_build2_loc (loc, code, type, op0,
9425 fold_convert_loc (loc, TREE_TYPE (op1),
9426 TREE_OPERAND (arg1, 1)));
9427 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9428 tem);
9431 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9433 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9434 arg0, arg1,
9435 /*cond_first_p=*/1);
9436 if (tem != NULL_TREE)
9437 return tem;
9440 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9442 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9443 arg1, arg0,
9444 /*cond_first_p=*/0);
9445 if (tem != NULL_TREE)
9446 return tem;
9450 switch (code)
9452 case MEM_REF:
9453 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9454 if (TREE_CODE (arg0) == ADDR_EXPR
9455 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9457 tree iref = TREE_OPERAND (arg0, 0);
9458 return fold_build2 (MEM_REF, type,
9459 TREE_OPERAND (iref, 0),
9460 int_const_binop (PLUS_EXPR, arg1,
9461 TREE_OPERAND (iref, 1), 0));
9464 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9465 if (TREE_CODE (arg0) == ADDR_EXPR
9466 && handled_component_p (TREE_OPERAND (arg0, 0)))
9468 tree base;
9469 HOST_WIDE_INT coffset;
9470 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9471 &coffset);
9472 if (!base)
9473 return NULL_TREE;
9474 return fold_build2 (MEM_REF, type,
9475 build_fold_addr_expr (base),
9476 int_const_binop (PLUS_EXPR, arg1,
9477 size_int (coffset), 0));
9480 return NULL_TREE;
9482 case POINTER_PLUS_EXPR:
9483 /* 0 +p index -> (type)index */
9484 if (integer_zerop (arg0))
9485 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9487 /* PTR +p 0 -> PTR */
9488 if (integer_zerop (arg1))
9489 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9491 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9492 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9493 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9494 return fold_convert_loc (loc, type,
9495 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9496 fold_convert_loc (loc, sizetype,
9497 arg1),
9498 fold_convert_loc (loc, sizetype,
9499 arg0)));
9501 /* index +p PTR -> PTR +p index */
9502 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9503 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9504 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9505 fold_convert_loc (loc, type, arg1),
9506 fold_convert_loc (loc, sizetype, arg0));
9508 /* (PTR +p B) +p A -> PTR +p (B + A) */
9509 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9511 tree inner;
9512 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9513 tree arg00 = TREE_OPERAND (arg0, 0);
9514 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9515 arg01, fold_convert_loc (loc, sizetype, arg1));
9516 return fold_convert_loc (loc, type,
9517 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9518 TREE_TYPE (arg00),
9519 arg00, inner));
9522 /* PTR_CST +p CST -> CST1 */
9523 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9524 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9525 fold_convert_loc (loc, type, arg1));
9527 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9528 of the array. Loop optimizer sometimes produce this type of
9529 expressions. */
9530 if (TREE_CODE (arg0) == ADDR_EXPR)
9532 tem = try_move_mult_to_index (loc, arg0,
9533 fold_convert_loc (loc, sizetype, arg1));
9534 if (tem)
9535 return fold_convert_loc (loc, type, tem);
9538 return NULL_TREE;
9540 case PLUS_EXPR:
9541 /* A + (-B) -> A - B */
9542 if (TREE_CODE (arg1) == NEGATE_EXPR)
9543 return fold_build2_loc (loc, MINUS_EXPR, type,
9544 fold_convert_loc (loc, type, arg0),
9545 fold_convert_loc (loc, type,
9546 TREE_OPERAND (arg1, 0)));
9547 /* (-A) + B -> B - A */
9548 if (TREE_CODE (arg0) == NEGATE_EXPR
9549 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9550 return fold_build2_loc (loc, MINUS_EXPR, type,
9551 fold_convert_loc (loc, type, arg1),
9552 fold_convert_loc (loc, type,
9553 TREE_OPERAND (arg0, 0)));
9555 if (INTEGRAL_TYPE_P (type))
9557 /* Convert ~A + 1 to -A. */
9558 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9559 && integer_onep (arg1))
9560 return fold_build1_loc (loc, NEGATE_EXPR, type,
9561 fold_convert_loc (loc, type,
9562 TREE_OPERAND (arg0, 0)));
9564 /* ~X + X is -1. */
9565 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9566 && !TYPE_OVERFLOW_TRAPS (type))
9568 tree tem = TREE_OPERAND (arg0, 0);
9570 STRIP_NOPS (tem);
9571 if (operand_equal_p (tem, arg1, 0))
9573 t1 = build_int_cst_type (type, -1);
9574 return omit_one_operand_loc (loc, type, t1, arg1);
9578 /* X + ~X is -1. */
9579 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9580 && !TYPE_OVERFLOW_TRAPS (type))
9582 tree tem = TREE_OPERAND (arg1, 0);
9584 STRIP_NOPS (tem);
9585 if (operand_equal_p (arg0, tem, 0))
9587 t1 = build_int_cst_type (type, -1);
9588 return omit_one_operand_loc (loc, type, t1, arg0);
9592 /* X + (X / CST) * -CST is X % CST. */
9593 if (TREE_CODE (arg1) == MULT_EXPR
9594 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9595 && operand_equal_p (arg0,
9596 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9598 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9599 tree cst1 = TREE_OPERAND (arg1, 1);
9600 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9601 cst1, cst0);
9602 if (sum && integer_zerop (sum))
9603 return fold_convert_loc (loc, type,
9604 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9605 TREE_TYPE (arg0), arg0,
9606 cst0));
9610 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9611 same or one. Make sure type is not saturating.
9612 fold_plusminus_mult_expr will re-associate. */
9613 if ((TREE_CODE (arg0) == MULT_EXPR
9614 || TREE_CODE (arg1) == MULT_EXPR)
9615 && !TYPE_SATURATING (type)
9616 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9618 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9619 if (tem)
9620 return tem;
9623 if (! FLOAT_TYPE_P (type))
9625 if (integer_zerop (arg1))
9626 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9628 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9629 with a constant, and the two constants have no bits in common,
9630 we should treat this as a BIT_IOR_EXPR since this may produce more
9631 simplifications. */
9632 if (TREE_CODE (arg0) == BIT_AND_EXPR
9633 && TREE_CODE (arg1) == BIT_AND_EXPR
9634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9635 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9636 && integer_zerop (const_binop (BIT_AND_EXPR,
9637 TREE_OPERAND (arg0, 1),
9638 TREE_OPERAND (arg1, 1))))
9640 code = BIT_IOR_EXPR;
9641 goto bit_ior;
9644 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9645 (plus (plus (mult) (mult)) (foo)) so that we can
9646 take advantage of the factoring cases below. */
9647 if (((TREE_CODE (arg0) == PLUS_EXPR
9648 || TREE_CODE (arg0) == MINUS_EXPR)
9649 && TREE_CODE (arg1) == MULT_EXPR)
9650 || ((TREE_CODE (arg1) == PLUS_EXPR
9651 || TREE_CODE (arg1) == MINUS_EXPR)
9652 && TREE_CODE (arg0) == MULT_EXPR))
9654 tree parg0, parg1, parg, marg;
9655 enum tree_code pcode;
9657 if (TREE_CODE (arg1) == MULT_EXPR)
9658 parg = arg0, marg = arg1;
9659 else
9660 parg = arg1, marg = arg0;
9661 pcode = TREE_CODE (parg);
9662 parg0 = TREE_OPERAND (parg, 0);
9663 parg1 = TREE_OPERAND (parg, 1);
9664 STRIP_NOPS (parg0);
9665 STRIP_NOPS (parg1);
9667 if (TREE_CODE (parg0) == MULT_EXPR
9668 && TREE_CODE (parg1) != MULT_EXPR)
9669 return fold_build2_loc (loc, pcode, type,
9670 fold_build2_loc (loc, PLUS_EXPR, type,
9671 fold_convert_loc (loc, type,
9672 parg0),
9673 fold_convert_loc (loc, type,
9674 marg)),
9675 fold_convert_loc (loc, type, parg1));
9676 if (TREE_CODE (parg0) != MULT_EXPR
9677 && TREE_CODE (parg1) == MULT_EXPR)
9678 return
9679 fold_build2_loc (loc, PLUS_EXPR, type,
9680 fold_convert_loc (loc, type, parg0),
9681 fold_build2_loc (loc, pcode, type,
9682 fold_convert_loc (loc, type, marg),
9683 fold_convert_loc (loc, type,
9684 parg1)));
9687 else
9689 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9690 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9691 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9693 /* Likewise if the operands are reversed. */
9694 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9695 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9697 /* Convert X + -C into X - C. */
9698 if (TREE_CODE (arg1) == REAL_CST
9699 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9701 tem = fold_negate_const (arg1, type);
9702 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9703 return fold_build2_loc (loc, MINUS_EXPR, type,
9704 fold_convert_loc (loc, type, arg0),
9705 fold_convert_loc (loc, type, tem));
9708 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9709 to __complex__ ( x, y ). This is not the same for SNaNs or
9710 if signed zeros are involved. */
9711 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9712 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9713 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9715 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9716 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9717 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9718 bool arg0rz = false, arg0iz = false;
9719 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9720 || (arg0i && (arg0iz = real_zerop (arg0i))))
9722 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9723 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9724 if (arg0rz && arg1i && real_zerop (arg1i))
9726 tree rp = arg1r ? arg1r
9727 : build1 (REALPART_EXPR, rtype, arg1);
9728 tree ip = arg0i ? arg0i
9729 : build1 (IMAGPART_EXPR, rtype, arg0);
9730 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9732 else if (arg0iz && arg1r && real_zerop (arg1r))
9734 tree rp = arg0r ? arg0r
9735 : build1 (REALPART_EXPR, rtype, arg0);
9736 tree ip = arg1i ? arg1i
9737 : build1 (IMAGPART_EXPR, rtype, arg1);
9738 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9743 if (flag_unsafe_math_optimizations
9744 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9745 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9746 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9747 return tem;
9749 /* Convert x+x into x*2.0. */
9750 if (operand_equal_p (arg0, arg1, 0)
9751 && SCALAR_FLOAT_TYPE_P (type))
9752 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9753 build_real (type, dconst2));
9755 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9756 We associate floats only if the user has specified
9757 -fassociative-math. */
9758 if (flag_associative_math
9759 && TREE_CODE (arg1) == PLUS_EXPR
9760 && TREE_CODE (arg0) != MULT_EXPR)
9762 tree tree10 = TREE_OPERAND (arg1, 0);
9763 tree tree11 = TREE_OPERAND (arg1, 1);
9764 if (TREE_CODE (tree11) == MULT_EXPR
9765 && TREE_CODE (tree10) == MULT_EXPR)
9767 tree tree0;
9768 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9769 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9772 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9773 We associate floats only if the user has specified
9774 -fassociative-math. */
9775 if (flag_associative_math
9776 && TREE_CODE (arg0) == PLUS_EXPR
9777 && TREE_CODE (arg1) != MULT_EXPR)
9779 tree tree00 = TREE_OPERAND (arg0, 0);
9780 tree tree01 = TREE_OPERAND (arg0, 1);
9781 if (TREE_CODE (tree01) == MULT_EXPR
9782 && TREE_CODE (tree00) == MULT_EXPR)
9784 tree tree0;
9785 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9786 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9791 bit_rotate:
9792 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9793 is a rotate of A by C1 bits. */
9794 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9795 is a rotate of A by B bits. */
9797 enum tree_code code0, code1;
9798 tree rtype;
9799 code0 = TREE_CODE (arg0);
9800 code1 = TREE_CODE (arg1);
9801 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9802 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9803 && operand_equal_p (TREE_OPERAND (arg0, 0),
9804 TREE_OPERAND (arg1, 0), 0)
9805 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9806 TYPE_UNSIGNED (rtype))
9807 /* Only create rotates in complete modes. Other cases are not
9808 expanded properly. */
9809 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9811 tree tree01, tree11;
9812 enum tree_code code01, code11;
9814 tree01 = TREE_OPERAND (arg0, 1);
9815 tree11 = TREE_OPERAND (arg1, 1);
9816 STRIP_NOPS (tree01);
9817 STRIP_NOPS (tree11);
9818 code01 = TREE_CODE (tree01);
9819 code11 = TREE_CODE (tree11);
9820 if (code01 == INTEGER_CST
9821 && code11 == INTEGER_CST
9822 && TREE_INT_CST_HIGH (tree01) == 0
9823 && TREE_INT_CST_HIGH (tree11) == 0
9824 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9825 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9827 tem = build2_loc (loc, LROTATE_EXPR,
9828 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9829 TREE_OPERAND (arg0, 0),
9830 code0 == LSHIFT_EXPR ? tree01 : tree11);
9831 return fold_convert_loc (loc, type, tem);
9833 else if (code11 == MINUS_EXPR)
9835 tree tree110, tree111;
9836 tree110 = TREE_OPERAND (tree11, 0);
9837 tree111 = TREE_OPERAND (tree11, 1);
9838 STRIP_NOPS (tree110);
9839 STRIP_NOPS (tree111);
9840 if (TREE_CODE (tree110) == INTEGER_CST
9841 && 0 == compare_tree_int (tree110,
9842 TYPE_PRECISION
9843 (TREE_TYPE (TREE_OPERAND
9844 (arg0, 0))))
9845 && operand_equal_p (tree01, tree111, 0))
9846 return
9847 fold_convert_loc (loc, type,
9848 build2 ((code0 == LSHIFT_EXPR
9849 ? LROTATE_EXPR
9850 : RROTATE_EXPR),
9851 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9852 TREE_OPERAND (arg0, 0), tree01));
9854 else if (code01 == MINUS_EXPR)
9856 tree tree010, tree011;
9857 tree010 = TREE_OPERAND (tree01, 0);
9858 tree011 = TREE_OPERAND (tree01, 1);
9859 STRIP_NOPS (tree010);
9860 STRIP_NOPS (tree011);
9861 if (TREE_CODE (tree010) == INTEGER_CST
9862 && 0 == compare_tree_int (tree010,
9863 TYPE_PRECISION
9864 (TREE_TYPE (TREE_OPERAND
9865 (arg0, 0))))
9866 && operand_equal_p (tree11, tree011, 0))
9867 return fold_convert_loc
9868 (loc, type,
9869 build2 ((code0 != LSHIFT_EXPR
9870 ? LROTATE_EXPR
9871 : RROTATE_EXPR),
9872 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9873 TREE_OPERAND (arg0, 0), tree11));
9878 associate:
9879 /* In most languages, can't associate operations on floats through
9880 parentheses. Rather than remember where the parentheses were, we
9881 don't associate floats at all, unless the user has specified
9882 -fassociative-math.
9883 And, we need to make sure type is not saturating. */
9885 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9886 && !TYPE_SATURATING (type))
9888 tree var0, con0, lit0, minus_lit0;
9889 tree var1, con1, lit1, minus_lit1;
9890 bool ok = true;
9892 /* Split both trees into variables, constants, and literals. Then
9893 associate each group together, the constants with literals,
9894 then the result with variables. This increases the chances of
9895 literals being recombined later and of generating relocatable
9896 expressions for the sum of a constant and literal. */
9897 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9898 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9899 code == MINUS_EXPR);
9901 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9902 if (code == MINUS_EXPR)
9903 code = PLUS_EXPR;
9905 /* With undefined overflow we can only associate constants with one
9906 variable, and constants whose association doesn't overflow. */
9907 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9908 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9910 if (var0 && var1)
9912 tree tmp0 = var0;
9913 tree tmp1 = var1;
9915 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9916 tmp0 = TREE_OPERAND (tmp0, 0);
9917 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9918 tmp1 = TREE_OPERAND (tmp1, 0);
9919 /* The only case we can still associate with two variables
9920 is if they are the same, modulo negation. */
9921 if (!operand_equal_p (tmp0, tmp1, 0))
9922 ok = false;
9925 if (ok && lit0 && lit1)
9927 tree tmp0 = fold_convert (type, lit0);
9928 tree tmp1 = fold_convert (type, lit1);
9930 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9931 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9932 ok = false;
9936 /* Only do something if we found more than two objects. Otherwise,
9937 nothing has changed and we risk infinite recursion. */
9938 if (ok
9939 && (2 < ((var0 != 0) + (var1 != 0)
9940 + (con0 != 0) + (con1 != 0)
9941 + (lit0 != 0) + (lit1 != 0)
9942 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9944 var0 = associate_trees (loc, var0, var1, code, type);
9945 con0 = associate_trees (loc, con0, con1, code, type);
9946 lit0 = associate_trees (loc, lit0, lit1, code, type);
9947 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9949 /* Preserve the MINUS_EXPR if the negative part of the literal is
9950 greater than the positive part. Otherwise, the multiplicative
9951 folding code (i.e extract_muldiv) may be fooled in case
9952 unsigned constants are subtracted, like in the following
9953 example: ((X*2 + 4) - 8U)/2. */
9954 if (minus_lit0 && lit0)
9956 if (TREE_CODE (lit0) == INTEGER_CST
9957 && TREE_CODE (minus_lit0) == INTEGER_CST
9958 && tree_int_cst_lt (lit0, minus_lit0))
9960 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9961 MINUS_EXPR, type);
9962 lit0 = 0;
9964 else
9966 lit0 = associate_trees (loc, lit0, minus_lit0,
9967 MINUS_EXPR, type);
9968 minus_lit0 = 0;
9971 if (minus_lit0)
9973 if (con0 == 0)
9974 return
9975 fold_convert_loc (loc, type,
9976 associate_trees (loc, var0, minus_lit0,
9977 MINUS_EXPR, type));
9978 else
9980 con0 = associate_trees (loc, con0, minus_lit0,
9981 MINUS_EXPR, type);
9982 return
9983 fold_convert_loc (loc, type,
9984 associate_trees (loc, var0, con0,
9985 PLUS_EXPR, type));
9989 con0 = associate_trees (loc, con0, lit0, code, type);
9990 return
9991 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9992 code, type));
9996 return NULL_TREE;
9998 case MINUS_EXPR:
9999 /* Pointer simplifications for subtraction, simple reassociations. */
10000 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10002 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10003 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10004 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10006 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10007 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10008 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10009 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10010 return fold_build2_loc (loc, PLUS_EXPR, type,
10011 fold_build2_loc (loc, MINUS_EXPR, type,
10012 arg00, arg10),
10013 fold_build2_loc (loc, MINUS_EXPR, type,
10014 arg01, arg11));
10016 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10017 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10019 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10020 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10021 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10022 fold_convert_loc (loc, type, arg1));
10023 if (tmp)
10024 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10027 /* A - (-B) -> A + B */
10028 if (TREE_CODE (arg1) == NEGATE_EXPR)
10029 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10030 fold_convert_loc (loc, type,
10031 TREE_OPERAND (arg1, 0)));
10032 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10033 if (TREE_CODE (arg0) == NEGATE_EXPR
10034 && (FLOAT_TYPE_P (type)
10035 || INTEGRAL_TYPE_P (type))
10036 && negate_expr_p (arg1)
10037 && reorder_operands_p (arg0, arg1))
10038 return fold_build2_loc (loc, MINUS_EXPR, type,
10039 fold_convert_loc (loc, type,
10040 negate_expr (arg1)),
10041 fold_convert_loc (loc, type,
10042 TREE_OPERAND (arg0, 0)));
10043 /* Convert -A - 1 to ~A. */
10044 if (INTEGRAL_TYPE_P (type)
10045 && TREE_CODE (arg0) == NEGATE_EXPR
10046 && integer_onep (arg1)
10047 && !TYPE_OVERFLOW_TRAPS (type))
10048 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10049 fold_convert_loc (loc, type,
10050 TREE_OPERAND (arg0, 0)));
10052 /* Convert -1 - A to ~A. */
10053 if (INTEGRAL_TYPE_P (type)
10054 && integer_all_onesp (arg0))
10055 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10058 /* X - (X / CST) * CST is X % CST. */
10059 if (INTEGRAL_TYPE_P (type)
10060 && TREE_CODE (arg1) == MULT_EXPR
10061 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10062 && operand_equal_p (arg0,
10063 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10064 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10065 TREE_OPERAND (arg1, 1), 0))
10066 return
10067 fold_convert_loc (loc, type,
10068 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10069 arg0, TREE_OPERAND (arg1, 1)));
10071 if (! FLOAT_TYPE_P (type))
10073 if (integer_zerop (arg0))
10074 return negate_expr (fold_convert_loc (loc, type, arg1));
10075 if (integer_zerop (arg1))
10076 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10078 /* Fold A - (A & B) into ~B & A. */
10079 if (!TREE_SIDE_EFFECTS (arg0)
10080 && TREE_CODE (arg1) == BIT_AND_EXPR)
10082 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10084 tree arg10 = fold_convert_loc (loc, type,
10085 TREE_OPERAND (arg1, 0));
10086 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10087 fold_build1_loc (loc, BIT_NOT_EXPR,
10088 type, arg10),
10089 fold_convert_loc (loc, type, arg0));
10091 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10093 tree arg11 = fold_convert_loc (loc,
10094 type, TREE_OPERAND (arg1, 1));
10095 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10096 fold_build1_loc (loc, BIT_NOT_EXPR,
10097 type, arg11),
10098 fold_convert_loc (loc, type, arg0));
10102 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10103 any power of 2 minus 1. */
10104 if (TREE_CODE (arg0) == BIT_AND_EXPR
10105 && TREE_CODE (arg1) == BIT_AND_EXPR
10106 && operand_equal_p (TREE_OPERAND (arg0, 0),
10107 TREE_OPERAND (arg1, 0), 0))
10109 tree mask0 = TREE_OPERAND (arg0, 1);
10110 tree mask1 = TREE_OPERAND (arg1, 1);
10111 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10113 if (operand_equal_p (tem, mask1, 0))
10115 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10116 TREE_OPERAND (arg0, 0), mask1);
10117 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10122 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10123 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10124 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10126 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10127 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10128 (-ARG1 + ARG0) reduces to -ARG1. */
10129 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10130 return negate_expr (fold_convert_loc (loc, type, arg1));
10132 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10133 __complex__ ( x, -y ). This is not the same for SNaNs or if
10134 signed zeros are involved. */
10135 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10136 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10137 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10139 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10140 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10141 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10142 bool arg0rz = false, arg0iz = false;
10143 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10144 || (arg0i && (arg0iz = real_zerop (arg0i))))
10146 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10147 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10148 if (arg0rz && arg1i && real_zerop (arg1i))
10150 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10151 arg1r ? arg1r
10152 : build1 (REALPART_EXPR, rtype, arg1));
10153 tree ip = arg0i ? arg0i
10154 : build1 (IMAGPART_EXPR, rtype, arg0);
10155 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10157 else if (arg0iz && arg1r && real_zerop (arg1r))
10159 tree rp = arg0r ? arg0r
10160 : build1 (REALPART_EXPR, rtype, arg0);
10161 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10162 arg1i ? arg1i
10163 : build1 (IMAGPART_EXPR, rtype, arg1));
10164 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10169 /* Fold &x - &x. This can happen from &x.foo - &x.
10170 This is unsafe for certain floats even in non-IEEE formats.
10171 In IEEE, it is unsafe because it does wrong for NaNs.
10172 Also note that operand_equal_p is always false if an operand
10173 is volatile. */
10175 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10176 && operand_equal_p (arg0, arg1, 0))
10177 return build_zero_cst (type);
10179 /* A - B -> A + (-B) if B is easily negatable. */
10180 if (negate_expr_p (arg1)
10181 && ((FLOAT_TYPE_P (type)
10182 /* Avoid this transformation if B is a positive REAL_CST. */
10183 && (TREE_CODE (arg1) != REAL_CST
10184 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10185 || INTEGRAL_TYPE_P (type)))
10186 return fold_build2_loc (loc, PLUS_EXPR, type,
10187 fold_convert_loc (loc, type, arg0),
10188 fold_convert_loc (loc, type,
10189 negate_expr (arg1)));
10191 /* Try folding difference of addresses. */
10193 HOST_WIDE_INT diff;
10195 if ((TREE_CODE (arg0) == ADDR_EXPR
10196 || TREE_CODE (arg1) == ADDR_EXPR)
10197 && ptr_difference_const (arg0, arg1, &diff))
10198 return build_int_cst_type (type, diff);
10201 /* Fold &a[i] - &a[j] to i-j. */
10202 if (TREE_CODE (arg0) == ADDR_EXPR
10203 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10204 && TREE_CODE (arg1) == ADDR_EXPR
10205 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10207 tree aref0 = TREE_OPERAND (arg0, 0);
10208 tree aref1 = TREE_OPERAND (arg1, 0);
10209 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10210 TREE_OPERAND (aref1, 0), 0))
10212 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10213 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10214 tree esz = array_ref_element_size (aref0);
10215 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10216 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10217 fold_convert_loc (loc, type, esz));
10222 if (FLOAT_TYPE_P (type)
10223 && flag_unsafe_math_optimizations
10224 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10225 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10226 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10227 return tem;
10229 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10230 same or one. Make sure type is not saturating.
10231 fold_plusminus_mult_expr will re-associate. */
10232 if ((TREE_CODE (arg0) == MULT_EXPR
10233 || TREE_CODE (arg1) == MULT_EXPR)
10234 && !TYPE_SATURATING (type)
10235 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10237 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10238 if (tem)
10239 return tem;
10242 goto associate;
10244 case MULT_EXPR:
10245 /* (-A) * (-B) -> A * B */
10246 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10247 return fold_build2_loc (loc, MULT_EXPR, type,
10248 fold_convert_loc (loc, type,
10249 TREE_OPERAND (arg0, 0)),
10250 fold_convert_loc (loc, type,
10251 negate_expr (arg1)));
10252 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10253 return fold_build2_loc (loc, MULT_EXPR, type,
10254 fold_convert_loc (loc, type,
10255 negate_expr (arg0)),
10256 fold_convert_loc (loc, type,
10257 TREE_OPERAND (arg1, 0)));
10259 if (! FLOAT_TYPE_P (type))
10261 if (integer_zerop (arg1))
10262 return omit_one_operand_loc (loc, type, arg1, arg0);
10263 if (integer_onep (arg1))
10264 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10265 /* Transform x * -1 into -x. Make sure to do the negation
10266 on the original operand with conversions not stripped
10267 because we can only strip non-sign-changing conversions. */
10268 if (integer_all_onesp (arg1))
10269 return fold_convert_loc (loc, type, negate_expr (op0));
10270 /* Transform x * -C into -x * C if x is easily negatable. */
10271 if (TREE_CODE (arg1) == INTEGER_CST
10272 && tree_int_cst_sgn (arg1) == -1
10273 && negate_expr_p (arg0)
10274 && (tem = negate_expr (arg1)) != arg1
10275 && !TREE_OVERFLOW (tem))
10276 return fold_build2_loc (loc, MULT_EXPR, type,
10277 fold_convert_loc (loc, type,
10278 negate_expr (arg0)),
10279 tem);
10281 /* (a * (1 << b)) is (a << b) */
10282 if (TREE_CODE (arg1) == LSHIFT_EXPR
10283 && integer_onep (TREE_OPERAND (arg1, 0)))
10284 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10285 TREE_OPERAND (arg1, 1));
10286 if (TREE_CODE (arg0) == LSHIFT_EXPR
10287 && integer_onep (TREE_OPERAND (arg0, 0)))
10288 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10289 TREE_OPERAND (arg0, 1));
10291 /* (A + A) * C -> A * 2 * C */
10292 if (TREE_CODE (arg0) == PLUS_EXPR
10293 && TREE_CODE (arg1) == INTEGER_CST
10294 && operand_equal_p (TREE_OPERAND (arg0, 0),
10295 TREE_OPERAND (arg0, 1), 0))
10296 return fold_build2_loc (loc, MULT_EXPR, type,
10297 omit_one_operand_loc (loc, type,
10298 TREE_OPERAND (arg0, 0),
10299 TREE_OPERAND (arg0, 1)),
10300 fold_build2_loc (loc, MULT_EXPR, type,
10301 build_int_cst (type, 2) , arg1));
10303 strict_overflow_p = false;
10304 if (TREE_CODE (arg1) == INTEGER_CST
10305 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10306 &strict_overflow_p)))
10308 if (strict_overflow_p)
10309 fold_overflow_warning (("assuming signed overflow does not "
10310 "occur when simplifying "
10311 "multiplication"),
10312 WARN_STRICT_OVERFLOW_MISC);
10313 return fold_convert_loc (loc, type, tem);
10316 /* Optimize z * conj(z) for integer complex numbers. */
10317 if (TREE_CODE (arg0) == CONJ_EXPR
10318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10319 return fold_mult_zconjz (loc, type, arg1);
10320 if (TREE_CODE (arg1) == CONJ_EXPR
10321 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10322 return fold_mult_zconjz (loc, type, arg0);
10324 else
10326 /* Maybe fold x * 0 to 0. The expressions aren't the same
10327 when x is NaN, since x * 0 is also NaN. Nor are they the
10328 same in modes with signed zeros, since multiplying a
10329 negative value by 0 gives -0, not +0. */
10330 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10331 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10332 && real_zerop (arg1))
10333 return omit_one_operand_loc (loc, type, arg1, arg0);
10334 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10335 Likewise for complex arithmetic with signed zeros. */
10336 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10337 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10338 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10339 && real_onep (arg1))
10340 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10342 /* Transform x * -1.0 into -x. */
10343 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10344 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10345 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10346 && real_minus_onep (arg1))
10347 return fold_convert_loc (loc, type, negate_expr (arg0));
10349 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10350 the result for floating point types due to rounding so it is applied
10351 only if -fassociative-math was specify. */
10352 if (flag_associative_math
10353 && TREE_CODE (arg0) == RDIV_EXPR
10354 && TREE_CODE (arg1) == REAL_CST
10355 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10357 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10358 arg1);
10359 if (tem)
10360 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10361 TREE_OPERAND (arg0, 1));
10364 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10365 if (operand_equal_p (arg0, arg1, 0))
10367 tree tem = fold_strip_sign_ops (arg0);
10368 if (tem != NULL_TREE)
10370 tem = fold_convert_loc (loc, type, tem);
10371 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10375 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10376 This is not the same for NaNs or if signed zeros are
10377 involved. */
10378 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10379 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10380 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10381 && TREE_CODE (arg1) == COMPLEX_CST
10382 && real_zerop (TREE_REALPART (arg1)))
10384 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10385 if (real_onep (TREE_IMAGPART (arg1)))
10386 return
10387 fold_build2_loc (loc, COMPLEX_EXPR, type,
10388 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10389 rtype, arg0)),
10390 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10391 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10392 return
10393 fold_build2_loc (loc, COMPLEX_EXPR, type,
10394 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10395 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10396 rtype, arg0)));
10399 /* Optimize z * conj(z) for floating point complex numbers.
10400 Guarded by flag_unsafe_math_optimizations as non-finite
10401 imaginary components don't produce scalar results. */
10402 if (flag_unsafe_math_optimizations
10403 && TREE_CODE (arg0) == CONJ_EXPR
10404 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10405 return fold_mult_zconjz (loc, type, arg1);
10406 if (flag_unsafe_math_optimizations
10407 && TREE_CODE (arg1) == CONJ_EXPR
10408 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10409 return fold_mult_zconjz (loc, type, arg0);
10411 if (flag_unsafe_math_optimizations)
10413 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10414 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10416 /* Optimizations of root(...)*root(...). */
10417 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10419 tree rootfn, arg;
10420 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10421 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10423 /* Optimize sqrt(x)*sqrt(x) as x. */
10424 if (BUILTIN_SQRT_P (fcode0)
10425 && operand_equal_p (arg00, arg10, 0)
10426 && ! HONOR_SNANS (TYPE_MODE (type)))
10427 return arg00;
10429 /* Optimize root(x)*root(y) as root(x*y). */
10430 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10431 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10432 return build_call_expr_loc (loc, rootfn, 1, arg);
10435 /* Optimize expN(x)*expN(y) as expN(x+y). */
10436 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10438 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10439 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10440 CALL_EXPR_ARG (arg0, 0),
10441 CALL_EXPR_ARG (arg1, 0));
10442 return build_call_expr_loc (loc, expfn, 1, arg);
10445 /* Optimizations of pow(...)*pow(...). */
10446 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10447 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10448 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10450 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10451 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10452 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10453 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10455 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10456 if (operand_equal_p (arg01, arg11, 0))
10458 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10459 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10460 arg00, arg10);
10461 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10464 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10465 if (operand_equal_p (arg00, arg10, 0))
10467 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10468 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10469 arg01, arg11);
10470 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10474 /* Optimize tan(x)*cos(x) as sin(x). */
10475 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10476 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10477 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10478 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10479 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10480 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10481 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10482 CALL_EXPR_ARG (arg1, 0), 0))
10484 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10486 if (sinfn != NULL_TREE)
10487 return build_call_expr_loc (loc, sinfn, 1,
10488 CALL_EXPR_ARG (arg0, 0));
10491 /* Optimize x*pow(x,c) as pow(x,c+1). */
10492 if (fcode1 == BUILT_IN_POW
10493 || fcode1 == BUILT_IN_POWF
10494 || fcode1 == BUILT_IN_POWL)
10496 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10497 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10498 if (TREE_CODE (arg11) == REAL_CST
10499 && !TREE_OVERFLOW (arg11)
10500 && operand_equal_p (arg0, arg10, 0))
10502 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10503 REAL_VALUE_TYPE c;
10504 tree arg;
10506 c = TREE_REAL_CST (arg11);
10507 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10508 arg = build_real (type, c);
10509 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10513 /* Optimize pow(x,c)*x as pow(x,c+1). */
10514 if (fcode0 == BUILT_IN_POW
10515 || fcode0 == BUILT_IN_POWF
10516 || fcode0 == BUILT_IN_POWL)
10518 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10519 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10520 if (TREE_CODE (arg01) == REAL_CST
10521 && !TREE_OVERFLOW (arg01)
10522 && operand_equal_p (arg1, arg00, 0))
10524 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10525 REAL_VALUE_TYPE c;
10526 tree arg;
10528 c = TREE_REAL_CST (arg01);
10529 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10530 arg = build_real (type, c);
10531 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10535 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10536 if (optimize_function_for_speed_p (cfun)
10537 && operand_equal_p (arg0, arg1, 0))
10539 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10541 if (powfn)
10543 tree arg = build_real (type, dconst2);
10544 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10549 goto associate;
10551 case BIT_IOR_EXPR:
10552 bit_ior:
10553 if (integer_all_onesp (arg1))
10554 return omit_one_operand_loc (loc, type, arg1, arg0);
10555 if (integer_zerop (arg1))
10556 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10557 if (operand_equal_p (arg0, arg1, 0))
10558 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10560 /* ~X | X is -1. */
10561 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10562 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10564 t1 = build_zero_cst (type);
10565 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10566 return omit_one_operand_loc (loc, type, t1, arg1);
10569 /* X | ~X is -1. */
10570 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10571 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10573 t1 = build_zero_cst (type);
10574 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10575 return omit_one_operand_loc (loc, type, t1, arg0);
10578 /* Canonicalize (X & C1) | C2. */
10579 if (TREE_CODE (arg0) == BIT_AND_EXPR
10580 && TREE_CODE (arg1) == INTEGER_CST
10581 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10583 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10584 int width = TYPE_PRECISION (type), w;
10585 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10586 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10587 hi2 = TREE_INT_CST_HIGH (arg1);
10588 lo2 = TREE_INT_CST_LOW (arg1);
10590 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10591 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10592 return omit_one_operand_loc (loc, type, arg1,
10593 TREE_OPERAND (arg0, 0));
10595 if (width > HOST_BITS_PER_WIDE_INT)
10597 mhi = (unsigned HOST_WIDE_INT) -1
10598 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10599 mlo = -1;
10601 else
10603 mhi = 0;
10604 mlo = (unsigned HOST_WIDE_INT) -1
10605 >> (HOST_BITS_PER_WIDE_INT - width);
10608 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10609 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10610 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10611 TREE_OPERAND (arg0, 0), arg1);
10613 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10614 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10615 mode which allows further optimizations. */
10616 hi1 &= mhi;
10617 lo1 &= mlo;
10618 hi2 &= mhi;
10619 lo2 &= mlo;
10620 hi3 = hi1 & ~hi2;
10621 lo3 = lo1 & ~lo2;
10622 for (w = BITS_PER_UNIT;
10623 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10624 w <<= 1)
10626 unsigned HOST_WIDE_INT mask
10627 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10628 if (((lo1 | lo2) & mask) == mask
10629 && (lo1 & ~mask) == 0 && hi1 == 0)
10631 hi3 = 0;
10632 lo3 = mask;
10633 break;
10636 if (hi3 != hi1 || lo3 != lo1)
10637 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10638 fold_build2_loc (loc, BIT_AND_EXPR, type,
10639 TREE_OPERAND (arg0, 0),
10640 build_int_cst_wide (type,
10641 lo3, hi3)),
10642 arg1);
10645 /* (X & Y) | Y is (X, Y). */
10646 if (TREE_CODE (arg0) == BIT_AND_EXPR
10647 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10648 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10649 /* (X & Y) | X is (Y, X). */
10650 if (TREE_CODE (arg0) == BIT_AND_EXPR
10651 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10652 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10653 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10654 /* X | (X & Y) is (Y, X). */
10655 if (TREE_CODE (arg1) == BIT_AND_EXPR
10656 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10657 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10658 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10659 /* X | (Y & X) is (Y, X). */
10660 if (TREE_CODE (arg1) == BIT_AND_EXPR
10661 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10662 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10663 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10665 /* (X & ~Y) | (~X & Y) is X ^ Y */
10666 if (TREE_CODE (arg0) == BIT_AND_EXPR
10667 && TREE_CODE (arg1) == BIT_AND_EXPR)
10669 tree a0, a1, l0, l1, n0, n1;
10671 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10672 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10674 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10675 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10677 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10678 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10680 if ((operand_equal_p (n0, a0, 0)
10681 && operand_equal_p (n1, a1, 0))
10682 || (operand_equal_p (n0, a1, 0)
10683 && operand_equal_p (n1, a0, 0)))
10684 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10687 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10688 if (t1 != NULL_TREE)
10689 return t1;
10691 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10693 This results in more efficient code for machines without a NAND
10694 instruction. Combine will canonicalize to the first form
10695 which will allow use of NAND instructions provided by the
10696 backend if they exist. */
10697 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10698 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10700 return
10701 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10702 build2 (BIT_AND_EXPR, type,
10703 fold_convert_loc (loc, type,
10704 TREE_OPERAND (arg0, 0)),
10705 fold_convert_loc (loc, type,
10706 TREE_OPERAND (arg1, 0))));
10709 /* See if this can be simplified into a rotate first. If that
10710 is unsuccessful continue in the association code. */
10711 goto bit_rotate;
10713 case BIT_XOR_EXPR:
10714 if (integer_zerop (arg1))
10715 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10716 if (integer_all_onesp (arg1))
10717 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10718 if (operand_equal_p (arg0, arg1, 0))
10719 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10721 /* ~X ^ X is -1. */
10722 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10723 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10725 t1 = build_zero_cst (type);
10726 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10727 return omit_one_operand_loc (loc, type, t1, arg1);
10730 /* X ^ ~X is -1. */
10731 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10732 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10734 t1 = build_zero_cst (type);
10735 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10736 return omit_one_operand_loc (loc, type, t1, arg0);
10739 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10740 with a constant, and the two constants have no bits in common,
10741 we should treat this as a BIT_IOR_EXPR since this may produce more
10742 simplifications. */
10743 if (TREE_CODE (arg0) == BIT_AND_EXPR
10744 && TREE_CODE (arg1) == BIT_AND_EXPR
10745 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10746 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10747 && integer_zerop (const_binop (BIT_AND_EXPR,
10748 TREE_OPERAND (arg0, 1),
10749 TREE_OPERAND (arg1, 1))))
10751 code = BIT_IOR_EXPR;
10752 goto bit_ior;
10755 /* (X | Y) ^ X -> Y & ~ X*/
10756 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10757 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10759 tree t2 = TREE_OPERAND (arg0, 1);
10760 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10761 arg1);
10762 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10763 fold_convert_loc (loc, type, t2),
10764 fold_convert_loc (loc, type, t1));
10765 return t1;
10768 /* (Y | X) ^ X -> Y & ~ X*/
10769 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10770 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10772 tree t2 = TREE_OPERAND (arg0, 0);
10773 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10774 arg1);
10775 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10776 fold_convert_loc (loc, type, t2),
10777 fold_convert_loc (loc, type, t1));
10778 return t1;
10781 /* X ^ (X | Y) -> Y & ~ X*/
10782 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10783 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10785 tree t2 = TREE_OPERAND (arg1, 1);
10786 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10787 arg0);
10788 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10789 fold_convert_loc (loc, type, t2),
10790 fold_convert_loc (loc, type, t1));
10791 return t1;
10794 /* X ^ (Y | X) -> Y & ~ X*/
10795 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10796 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10798 tree t2 = TREE_OPERAND (arg1, 0);
10799 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10800 arg0);
10801 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10802 fold_convert_loc (loc, type, t2),
10803 fold_convert_loc (loc, type, t1));
10804 return t1;
10807 /* Convert ~X ^ ~Y to X ^ Y. */
10808 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10809 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10810 return fold_build2_loc (loc, code, type,
10811 fold_convert_loc (loc, type,
10812 TREE_OPERAND (arg0, 0)),
10813 fold_convert_loc (loc, type,
10814 TREE_OPERAND (arg1, 0)));
10816 /* Convert ~X ^ C to X ^ ~C. */
10817 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10818 && TREE_CODE (arg1) == INTEGER_CST)
10819 return fold_build2_loc (loc, code, type,
10820 fold_convert_loc (loc, type,
10821 TREE_OPERAND (arg0, 0)),
10822 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10824 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10825 if (TREE_CODE (arg0) == BIT_AND_EXPR
10826 && integer_onep (TREE_OPERAND (arg0, 1))
10827 && integer_onep (arg1))
10828 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10829 build_int_cst (TREE_TYPE (arg0), 0));
10831 /* Fold (X & Y) ^ Y as ~X & Y. */
10832 if (TREE_CODE (arg0) == BIT_AND_EXPR
10833 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10835 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10836 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10837 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10838 fold_convert_loc (loc, type, arg1));
10840 /* Fold (X & Y) ^ X as ~Y & X. */
10841 if (TREE_CODE (arg0) == BIT_AND_EXPR
10842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10843 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10845 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10846 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10847 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10848 fold_convert_loc (loc, type, arg1));
10850 /* Fold X ^ (X & Y) as X & ~Y. */
10851 if (TREE_CODE (arg1) == BIT_AND_EXPR
10852 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10854 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10855 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10856 fold_convert_loc (loc, type, arg0),
10857 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10859 /* Fold X ^ (Y & X) as ~Y & X. */
10860 if (TREE_CODE (arg1) == BIT_AND_EXPR
10861 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10862 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10864 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10865 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10866 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10867 fold_convert_loc (loc, type, arg0));
10870 /* See if this can be simplified into a rotate first. If that
10871 is unsuccessful continue in the association code. */
10872 goto bit_rotate;
10874 case BIT_AND_EXPR:
10875 if (integer_all_onesp (arg1))
10876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10877 if (integer_zerop (arg1))
10878 return omit_one_operand_loc (loc, type, arg1, arg0);
10879 if (operand_equal_p (arg0, arg1, 0))
10880 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10882 /* ~X & X is always zero. */
10883 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10884 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10885 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10887 /* X & ~X is always zero. */
10888 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10889 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10890 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10892 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10893 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10894 && TREE_CODE (arg1) == INTEGER_CST
10895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10897 tree tmp1 = fold_convert_loc (loc, type, arg1);
10898 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10899 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10900 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10901 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10902 return
10903 fold_convert_loc (loc, type,
10904 fold_build2_loc (loc, BIT_IOR_EXPR,
10905 type, tmp2, tmp3));
10908 /* (X | Y) & Y is (X, Y). */
10909 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10910 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10911 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10912 /* (X | Y) & X is (Y, X). */
10913 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10914 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10915 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10916 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10917 /* X & (X | Y) is (Y, X). */
10918 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10919 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10920 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10921 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10922 /* X & (Y | X) is (Y, X). */
10923 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10924 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10925 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10926 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10928 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10929 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10930 && integer_onep (TREE_OPERAND (arg0, 1))
10931 && integer_onep (arg1))
10933 tem = TREE_OPERAND (arg0, 0);
10934 return fold_build2_loc (loc, EQ_EXPR, type,
10935 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10936 build_int_cst (TREE_TYPE (tem), 1)),
10937 build_int_cst (TREE_TYPE (tem), 0));
10939 /* Fold ~X & 1 as (X & 1) == 0. */
10940 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10941 && integer_onep (arg1))
10943 tem = TREE_OPERAND (arg0, 0);
10944 return fold_build2_loc (loc, EQ_EXPR, type,
10945 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10946 build_int_cst (TREE_TYPE (tem), 1)),
10947 build_int_cst (TREE_TYPE (tem), 0));
10950 /* Fold (X ^ Y) & Y as ~X & Y. */
10951 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10952 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10954 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10955 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10956 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10957 fold_convert_loc (loc, type, arg1));
10959 /* Fold (X ^ Y) & X as ~Y & X. */
10960 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10962 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10964 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10965 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10966 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10967 fold_convert_loc (loc, type, arg1));
10969 /* Fold X & (X ^ Y) as X & ~Y. */
10970 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10973 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10974 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10975 fold_convert_loc (loc, type, arg0),
10976 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10978 /* Fold X & (Y ^ X) as ~Y & X. */
10979 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10980 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10981 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10983 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10984 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10985 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10986 fold_convert_loc (loc, type, arg0));
10989 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10990 ((A & N) + B) & M -> (A + B) & M
10991 Similarly if (N & M) == 0,
10992 ((A | N) + B) & M -> (A + B) & M
10993 and for - instead of + (or unary - instead of +)
10994 and/or ^ instead of |.
10995 If B is constant and (B & M) == 0, fold into A & M. */
10996 if (host_integerp (arg1, 1))
10998 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
10999 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11000 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11001 && (TREE_CODE (arg0) == PLUS_EXPR
11002 || TREE_CODE (arg0) == MINUS_EXPR
11003 || TREE_CODE (arg0) == NEGATE_EXPR)
11004 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11005 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11007 tree pmop[2];
11008 int which = 0;
11009 unsigned HOST_WIDE_INT cst0;
11011 /* Now we know that arg0 is (C + D) or (C - D) or
11012 -C and arg1 (M) is == (1LL << cst) - 1.
11013 Store C into PMOP[0] and D into PMOP[1]. */
11014 pmop[0] = TREE_OPERAND (arg0, 0);
11015 pmop[1] = NULL;
11016 if (TREE_CODE (arg0) != NEGATE_EXPR)
11018 pmop[1] = TREE_OPERAND (arg0, 1);
11019 which = 1;
11022 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11023 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11024 & cst1) != cst1)
11025 which = -1;
11027 for (; which >= 0; which--)
11028 switch (TREE_CODE (pmop[which]))
11030 case BIT_AND_EXPR:
11031 case BIT_IOR_EXPR:
11032 case BIT_XOR_EXPR:
11033 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11034 != INTEGER_CST)
11035 break;
11036 /* tree_low_cst not used, because we don't care about
11037 the upper bits. */
11038 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11039 cst0 &= cst1;
11040 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11042 if (cst0 != cst1)
11043 break;
11045 else if (cst0 != 0)
11046 break;
11047 /* If C or D is of the form (A & N) where
11048 (N & M) == M, or of the form (A | N) or
11049 (A ^ N) where (N & M) == 0, replace it with A. */
11050 pmop[which] = TREE_OPERAND (pmop[which], 0);
11051 break;
11052 case INTEGER_CST:
11053 /* If C or D is a N where (N & M) == 0, it can be
11054 omitted (assumed 0). */
11055 if ((TREE_CODE (arg0) == PLUS_EXPR
11056 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11057 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11058 pmop[which] = NULL;
11059 break;
11060 default:
11061 break;
11064 /* Only build anything new if we optimized one or both arguments
11065 above. */
11066 if (pmop[0] != TREE_OPERAND (arg0, 0)
11067 || (TREE_CODE (arg0) != NEGATE_EXPR
11068 && pmop[1] != TREE_OPERAND (arg0, 1)))
11070 tree utype = TREE_TYPE (arg0);
11071 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11073 /* Perform the operations in a type that has defined
11074 overflow behavior. */
11075 utype = unsigned_type_for (TREE_TYPE (arg0));
11076 if (pmop[0] != NULL)
11077 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11078 if (pmop[1] != NULL)
11079 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11082 if (TREE_CODE (arg0) == NEGATE_EXPR)
11083 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11084 else if (TREE_CODE (arg0) == PLUS_EXPR)
11086 if (pmop[0] != NULL && pmop[1] != NULL)
11087 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11088 pmop[0], pmop[1]);
11089 else if (pmop[0] != NULL)
11090 tem = pmop[0];
11091 else if (pmop[1] != NULL)
11092 tem = pmop[1];
11093 else
11094 return build_int_cst (type, 0);
11096 else if (pmop[0] == NULL)
11097 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11098 else
11099 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11100 pmop[0], pmop[1]);
11101 /* TEM is now the new binary +, - or unary - replacement. */
11102 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11103 fold_convert_loc (loc, utype, arg1));
11104 return fold_convert_loc (loc, type, tem);
11109 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11110 if (t1 != NULL_TREE)
11111 return t1;
11112 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11113 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11114 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11116 unsigned int prec
11117 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11119 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11120 && (~TREE_INT_CST_LOW (arg1)
11121 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11122 return
11123 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11126 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11128 This results in more efficient code for machines without a NOR
11129 instruction. Combine will canonicalize to the first form
11130 which will allow use of NOR instructions provided by the
11131 backend if they exist. */
11132 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11133 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11135 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11136 build2 (BIT_IOR_EXPR, type,
11137 fold_convert_loc (loc, type,
11138 TREE_OPERAND (arg0, 0)),
11139 fold_convert_loc (loc, type,
11140 TREE_OPERAND (arg1, 0))));
11143 /* If arg0 is derived from the address of an object or function, we may
11144 be able to fold this expression using the object or function's
11145 alignment. */
11146 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11148 unsigned HOST_WIDE_INT modulus, residue;
11149 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11151 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11152 integer_onep (arg1));
11154 /* This works because modulus is a power of 2. If this weren't the
11155 case, we'd have to replace it by its greatest power-of-2
11156 divisor: modulus & -modulus. */
11157 if (low < modulus)
11158 return build_int_cst (type, residue & low);
11161 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11162 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11163 if the new mask might be further optimized. */
11164 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11165 || TREE_CODE (arg0) == RSHIFT_EXPR)
11166 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11167 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11168 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11169 < TYPE_PRECISION (TREE_TYPE (arg0))
11170 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11171 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11173 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11174 unsigned HOST_WIDE_INT mask
11175 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11176 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11177 tree shift_type = TREE_TYPE (arg0);
11179 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11180 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11181 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11182 && TYPE_PRECISION (TREE_TYPE (arg0))
11183 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11185 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11186 tree arg00 = TREE_OPERAND (arg0, 0);
11187 /* See if more bits can be proven as zero because of
11188 zero extension. */
11189 if (TREE_CODE (arg00) == NOP_EXPR
11190 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11192 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11193 if (TYPE_PRECISION (inner_type)
11194 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11195 && TYPE_PRECISION (inner_type) < prec)
11197 prec = TYPE_PRECISION (inner_type);
11198 /* See if we can shorten the right shift. */
11199 if (shiftc < prec)
11200 shift_type = inner_type;
11203 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11204 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11205 zerobits <<= prec - shiftc;
11206 /* For arithmetic shift if sign bit could be set, zerobits
11207 can contain actually sign bits, so no transformation is
11208 possible, unless MASK masks them all away. In that
11209 case the shift needs to be converted into logical shift. */
11210 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11211 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11213 if ((mask & zerobits) == 0)
11214 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11215 else
11216 zerobits = 0;
11220 /* ((X << 16) & 0xff00) is (X, 0). */
11221 if ((mask & zerobits) == mask)
11222 return omit_one_operand_loc (loc, type,
11223 build_int_cst (type, 0), arg0);
11225 newmask = mask | zerobits;
11226 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11228 unsigned int prec;
11230 /* Only do the transformation if NEWMASK is some integer
11231 mode's mask. */
11232 for (prec = BITS_PER_UNIT;
11233 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11234 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11235 break;
11236 if (prec < HOST_BITS_PER_WIDE_INT
11237 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11239 tree newmaskt;
11241 if (shift_type != TREE_TYPE (arg0))
11243 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11244 fold_convert_loc (loc, shift_type,
11245 TREE_OPERAND (arg0, 0)),
11246 TREE_OPERAND (arg0, 1));
11247 tem = fold_convert_loc (loc, type, tem);
11249 else
11250 tem = op0;
11251 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11252 if (!tree_int_cst_equal (newmaskt, arg1))
11253 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11258 goto associate;
11260 case RDIV_EXPR:
11261 /* Don't touch a floating-point divide by zero unless the mode
11262 of the constant can represent infinity. */
11263 if (TREE_CODE (arg1) == REAL_CST
11264 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11265 && real_zerop (arg1))
11266 return NULL_TREE;
11268 /* Optimize A / A to 1.0 if we don't care about
11269 NaNs or Infinities. Skip the transformation
11270 for non-real operands. */
11271 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11272 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11273 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11274 && operand_equal_p (arg0, arg1, 0))
11276 tree r = build_real (TREE_TYPE (arg0), dconst1);
11278 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11281 /* The complex version of the above A / A optimization. */
11282 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11283 && operand_equal_p (arg0, arg1, 0))
11285 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11286 if (! HONOR_NANS (TYPE_MODE (elem_type))
11287 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11289 tree r = build_real (elem_type, dconst1);
11290 /* omit_two_operands will call fold_convert for us. */
11291 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11295 /* (-A) / (-B) -> A / B */
11296 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11297 return fold_build2_loc (loc, RDIV_EXPR, type,
11298 TREE_OPERAND (arg0, 0),
11299 negate_expr (arg1));
11300 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11301 return fold_build2_loc (loc, RDIV_EXPR, type,
11302 negate_expr (arg0),
11303 TREE_OPERAND (arg1, 0));
11305 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11306 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11307 && real_onep (arg1))
11308 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11310 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11311 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11312 && real_minus_onep (arg1))
11313 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11314 negate_expr (arg0)));
11316 /* If ARG1 is a constant, we can convert this to a multiply by the
11317 reciprocal. This does not have the same rounding properties,
11318 so only do this if -freciprocal-math. We can actually
11319 always safely do it if ARG1 is a power of two, but it's hard to
11320 tell if it is or not in a portable manner. */
11321 if (TREE_CODE (arg1) == REAL_CST)
11323 if (flag_reciprocal_math
11324 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11325 arg1)))
11326 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11327 /* Find the reciprocal if optimizing and the result is exact. */
11328 if (optimize)
11330 REAL_VALUE_TYPE r;
11331 r = TREE_REAL_CST (arg1);
11332 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11334 tem = build_real (type, r);
11335 return fold_build2_loc (loc, MULT_EXPR, type,
11336 fold_convert_loc (loc, type, arg0), tem);
11340 /* Convert A/B/C to A/(B*C). */
11341 if (flag_reciprocal_math
11342 && TREE_CODE (arg0) == RDIV_EXPR)
11343 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11344 fold_build2_loc (loc, MULT_EXPR, type,
11345 TREE_OPERAND (arg0, 1), arg1));
11347 /* Convert A/(B/C) to (A/B)*C. */
11348 if (flag_reciprocal_math
11349 && TREE_CODE (arg1) == RDIV_EXPR)
11350 return fold_build2_loc (loc, MULT_EXPR, type,
11351 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11352 TREE_OPERAND (arg1, 0)),
11353 TREE_OPERAND (arg1, 1));
11355 /* Convert C1/(X*C2) into (C1/C2)/X. */
11356 if (flag_reciprocal_math
11357 && TREE_CODE (arg1) == MULT_EXPR
11358 && TREE_CODE (arg0) == REAL_CST
11359 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11361 tree tem = const_binop (RDIV_EXPR, arg0,
11362 TREE_OPERAND (arg1, 1));
11363 if (tem)
11364 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11365 TREE_OPERAND (arg1, 0));
11368 if (flag_unsafe_math_optimizations)
11370 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11371 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11373 /* Optimize sin(x)/cos(x) as tan(x). */
11374 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11375 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11376 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11377 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11378 CALL_EXPR_ARG (arg1, 0), 0))
11380 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11382 if (tanfn != NULL_TREE)
11383 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11386 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11387 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11388 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11389 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11390 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11391 CALL_EXPR_ARG (arg1, 0), 0))
11393 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11395 if (tanfn != NULL_TREE)
11397 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11398 CALL_EXPR_ARG (arg0, 0));
11399 return fold_build2_loc (loc, RDIV_EXPR, type,
11400 build_real (type, dconst1), tmp);
11404 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11405 NaNs or Infinities. */
11406 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11407 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11408 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11410 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11411 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11413 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11414 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11415 && operand_equal_p (arg00, arg01, 0))
11417 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11419 if (cosfn != NULL_TREE)
11420 return build_call_expr_loc (loc, cosfn, 1, arg00);
11424 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11425 NaNs or Infinities. */
11426 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11427 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11428 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11430 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11431 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11433 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11434 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11435 && operand_equal_p (arg00, arg01, 0))
11437 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11439 if (cosfn != NULL_TREE)
11441 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11442 return fold_build2_loc (loc, RDIV_EXPR, type,
11443 build_real (type, dconst1),
11444 tmp);
11449 /* Optimize pow(x,c)/x as pow(x,c-1). */
11450 if (fcode0 == BUILT_IN_POW
11451 || fcode0 == BUILT_IN_POWF
11452 || fcode0 == BUILT_IN_POWL)
11454 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11455 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11456 if (TREE_CODE (arg01) == REAL_CST
11457 && !TREE_OVERFLOW (arg01)
11458 && operand_equal_p (arg1, arg00, 0))
11460 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11461 REAL_VALUE_TYPE c;
11462 tree arg;
11464 c = TREE_REAL_CST (arg01);
11465 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11466 arg = build_real (type, c);
11467 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11471 /* Optimize a/root(b/c) into a*root(c/b). */
11472 if (BUILTIN_ROOT_P (fcode1))
11474 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11476 if (TREE_CODE (rootarg) == RDIV_EXPR)
11478 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11479 tree b = TREE_OPERAND (rootarg, 0);
11480 tree c = TREE_OPERAND (rootarg, 1);
11482 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11484 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11485 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11489 /* Optimize x/expN(y) into x*expN(-y). */
11490 if (BUILTIN_EXPONENT_P (fcode1))
11492 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11493 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11494 arg1 = build_call_expr_loc (loc,
11495 expfn, 1,
11496 fold_convert_loc (loc, type, arg));
11497 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11500 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11501 if (fcode1 == BUILT_IN_POW
11502 || fcode1 == BUILT_IN_POWF
11503 || fcode1 == BUILT_IN_POWL)
11505 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11506 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11507 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11508 tree neg11 = fold_convert_loc (loc, type,
11509 negate_expr (arg11));
11510 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11511 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11514 return NULL_TREE;
11516 case TRUNC_DIV_EXPR:
11517 /* Optimize (X & (-A)) / A where A is a power of 2,
11518 to X >> log2(A) */
11519 if (TREE_CODE (arg0) == BIT_AND_EXPR
11520 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11521 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11523 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11524 arg1, TREE_OPERAND (arg0, 1));
11525 if (sum && integer_zerop (sum)) {
11526 unsigned long pow2;
11528 if (TREE_INT_CST_LOW (arg1))
11529 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11530 else
11531 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11532 + HOST_BITS_PER_WIDE_INT;
11534 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11535 TREE_OPERAND (arg0, 0),
11536 build_int_cst (integer_type_node, pow2));
11540 /* Fall thru */
11542 case FLOOR_DIV_EXPR:
11543 /* Simplify A / (B << N) where A and B are positive and B is
11544 a power of 2, to A >> (N + log2(B)). */
11545 strict_overflow_p = false;
11546 if (TREE_CODE (arg1) == LSHIFT_EXPR
11547 && (TYPE_UNSIGNED (type)
11548 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11550 tree sval = TREE_OPERAND (arg1, 0);
11551 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11553 tree sh_cnt = TREE_OPERAND (arg1, 1);
11554 unsigned long pow2;
11556 if (TREE_INT_CST_LOW (sval))
11557 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11558 else
11559 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11560 + HOST_BITS_PER_WIDE_INT;
11562 if (strict_overflow_p)
11563 fold_overflow_warning (("assuming signed overflow does not "
11564 "occur when simplifying A / (B << N)"),
11565 WARN_STRICT_OVERFLOW_MISC);
11567 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11568 sh_cnt,
11569 build_int_cst (TREE_TYPE (sh_cnt),
11570 pow2));
11571 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11572 fold_convert_loc (loc, type, arg0), sh_cnt);
11576 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11577 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11578 if (INTEGRAL_TYPE_P (type)
11579 && TYPE_UNSIGNED (type)
11580 && code == FLOOR_DIV_EXPR)
11581 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11583 /* Fall thru */
11585 case ROUND_DIV_EXPR:
11586 case CEIL_DIV_EXPR:
11587 case EXACT_DIV_EXPR:
11588 if (integer_onep (arg1))
11589 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11590 if (integer_zerop (arg1))
11591 return NULL_TREE;
11592 /* X / -1 is -X. */
11593 if (!TYPE_UNSIGNED (type)
11594 && TREE_CODE (arg1) == INTEGER_CST
11595 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11596 && TREE_INT_CST_HIGH (arg1) == -1)
11597 return fold_convert_loc (loc, type, negate_expr (arg0));
11599 /* Convert -A / -B to A / B when the type is signed and overflow is
11600 undefined. */
11601 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11602 && TREE_CODE (arg0) == NEGATE_EXPR
11603 && negate_expr_p (arg1))
11605 if (INTEGRAL_TYPE_P (type))
11606 fold_overflow_warning (("assuming signed overflow does not occur "
11607 "when distributing negation across "
11608 "division"),
11609 WARN_STRICT_OVERFLOW_MISC);
11610 return fold_build2_loc (loc, code, type,
11611 fold_convert_loc (loc, type,
11612 TREE_OPERAND (arg0, 0)),
11613 fold_convert_loc (loc, type,
11614 negate_expr (arg1)));
11616 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11617 && TREE_CODE (arg1) == NEGATE_EXPR
11618 && negate_expr_p (arg0))
11620 if (INTEGRAL_TYPE_P (type))
11621 fold_overflow_warning (("assuming signed overflow does not occur "
11622 "when distributing negation across "
11623 "division"),
11624 WARN_STRICT_OVERFLOW_MISC);
11625 return fold_build2_loc (loc, code, type,
11626 fold_convert_loc (loc, type,
11627 negate_expr (arg0)),
11628 fold_convert_loc (loc, type,
11629 TREE_OPERAND (arg1, 0)));
11632 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11633 operation, EXACT_DIV_EXPR.
11635 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11636 At one time others generated faster code, it's not clear if they do
11637 after the last round to changes to the DIV code in expmed.c. */
11638 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11639 && multiple_of_p (type, arg0, arg1))
11640 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11642 strict_overflow_p = false;
11643 if (TREE_CODE (arg1) == INTEGER_CST
11644 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11645 &strict_overflow_p)))
11647 if (strict_overflow_p)
11648 fold_overflow_warning (("assuming signed overflow does not occur "
11649 "when simplifying division"),
11650 WARN_STRICT_OVERFLOW_MISC);
11651 return fold_convert_loc (loc, type, tem);
11654 return NULL_TREE;
11656 case CEIL_MOD_EXPR:
11657 case FLOOR_MOD_EXPR:
11658 case ROUND_MOD_EXPR:
11659 case TRUNC_MOD_EXPR:
11660 /* X % 1 is always zero, but be sure to preserve any side
11661 effects in X. */
11662 if (integer_onep (arg1))
11663 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11665 /* X % 0, return X % 0 unchanged so that we can get the
11666 proper warnings and errors. */
11667 if (integer_zerop (arg1))
11668 return NULL_TREE;
11670 /* 0 % X is always zero, but be sure to preserve any side
11671 effects in X. Place this after checking for X == 0. */
11672 if (integer_zerop (arg0))
11673 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11675 /* X % -1 is zero. */
11676 if (!TYPE_UNSIGNED (type)
11677 && TREE_CODE (arg1) == INTEGER_CST
11678 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11679 && TREE_INT_CST_HIGH (arg1) == -1)
11680 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11682 /* X % -C is the same as X % C. */
11683 if (code == TRUNC_MOD_EXPR
11684 && !TYPE_UNSIGNED (type)
11685 && TREE_CODE (arg1) == INTEGER_CST
11686 && !TREE_OVERFLOW (arg1)
11687 && TREE_INT_CST_HIGH (arg1) < 0
11688 && !TYPE_OVERFLOW_TRAPS (type)
11689 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11690 && !sign_bit_p (arg1, arg1))
11691 return fold_build2_loc (loc, code, type,
11692 fold_convert_loc (loc, type, arg0),
11693 fold_convert_loc (loc, type,
11694 negate_expr (arg1)));
11696 /* X % -Y is the same as X % Y. */
11697 if (code == TRUNC_MOD_EXPR
11698 && !TYPE_UNSIGNED (type)
11699 && TREE_CODE (arg1) == NEGATE_EXPR
11700 && !TYPE_OVERFLOW_TRAPS (type))
11701 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11702 fold_convert_loc (loc, type,
11703 TREE_OPERAND (arg1, 0)));
11705 strict_overflow_p = false;
11706 if (TREE_CODE (arg1) == INTEGER_CST
11707 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11708 &strict_overflow_p)))
11710 if (strict_overflow_p)
11711 fold_overflow_warning (("assuming signed overflow does not occur "
11712 "when simplifying modulus"),
11713 WARN_STRICT_OVERFLOW_MISC);
11714 return fold_convert_loc (loc, type, tem);
11717 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11718 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11719 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11720 && (TYPE_UNSIGNED (type)
11721 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11723 tree c = arg1;
11724 /* Also optimize A % (C << N) where C is a power of 2,
11725 to A & ((C << N) - 1). */
11726 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11727 c = TREE_OPERAND (arg1, 0);
11729 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11731 tree mask
11732 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11733 build_int_cst (TREE_TYPE (arg1), 1));
11734 if (strict_overflow_p)
11735 fold_overflow_warning (("assuming signed overflow does not "
11736 "occur when simplifying "
11737 "X % (power of two)"),
11738 WARN_STRICT_OVERFLOW_MISC);
11739 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11740 fold_convert_loc (loc, type, arg0),
11741 fold_convert_loc (loc, type, mask));
11745 return NULL_TREE;
11747 case LROTATE_EXPR:
11748 case RROTATE_EXPR:
11749 if (integer_all_onesp (arg0))
11750 return omit_one_operand_loc (loc, type, arg0, arg1);
11751 goto shift;
11753 case RSHIFT_EXPR:
11754 /* Optimize -1 >> x for arithmetic right shifts. */
11755 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11756 && tree_expr_nonnegative_p (arg1))
11757 return omit_one_operand_loc (loc, type, arg0, arg1);
11758 /* ... fall through ... */
11760 case LSHIFT_EXPR:
11761 shift:
11762 if (integer_zerop (arg1))
11763 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11764 if (integer_zerop (arg0))
11765 return omit_one_operand_loc (loc, type, arg0, arg1);
11767 /* Since negative shift count is not well-defined,
11768 don't try to compute it in the compiler. */
11769 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11770 return NULL_TREE;
11772 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11773 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11774 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11775 && host_integerp (TREE_OPERAND (arg0, 1), false)
11776 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11778 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11779 + TREE_INT_CST_LOW (arg1));
11781 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11782 being well defined. */
11783 if (low >= TYPE_PRECISION (type))
11785 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11786 low = low % TYPE_PRECISION (type);
11787 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11788 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11789 TREE_OPERAND (arg0, 0));
11790 else
11791 low = TYPE_PRECISION (type) - 1;
11794 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11795 build_int_cst (type, low));
11798 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11799 into x & ((unsigned)-1 >> c) for unsigned types. */
11800 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11801 || (TYPE_UNSIGNED (type)
11802 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11803 && host_integerp (arg1, false)
11804 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11805 && host_integerp (TREE_OPERAND (arg0, 1), false)
11806 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11808 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11809 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11810 tree lshift;
11811 tree arg00;
11813 if (low0 == low1)
11815 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11817 lshift = build_int_cst (type, -1);
11818 lshift = int_const_binop (code, lshift, arg1, 0);
11820 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11824 /* Rewrite an LROTATE_EXPR by a constant into an
11825 RROTATE_EXPR by a new constant. */
11826 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11828 tree tem = build_int_cst (TREE_TYPE (arg1),
11829 TYPE_PRECISION (type));
11830 tem = const_binop (MINUS_EXPR, tem, arg1);
11831 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11834 /* If we have a rotate of a bit operation with the rotate count and
11835 the second operand of the bit operation both constant,
11836 permute the two operations. */
11837 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11838 && (TREE_CODE (arg0) == BIT_AND_EXPR
11839 || TREE_CODE (arg0) == BIT_IOR_EXPR
11840 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11842 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11843 fold_build2_loc (loc, code, type,
11844 TREE_OPERAND (arg0, 0), arg1),
11845 fold_build2_loc (loc, code, type,
11846 TREE_OPERAND (arg0, 1), arg1));
11848 /* Two consecutive rotates adding up to the precision of the
11849 type can be ignored. */
11850 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11851 && TREE_CODE (arg0) == RROTATE_EXPR
11852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11853 && TREE_INT_CST_HIGH (arg1) == 0
11854 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11855 && ((TREE_INT_CST_LOW (arg1)
11856 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11857 == (unsigned int) TYPE_PRECISION (type)))
11858 return TREE_OPERAND (arg0, 0);
11860 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11861 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11862 if the latter can be further optimized. */
11863 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11864 && TREE_CODE (arg0) == BIT_AND_EXPR
11865 && TREE_CODE (arg1) == INTEGER_CST
11866 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11868 tree mask = fold_build2_loc (loc, code, type,
11869 fold_convert_loc (loc, type,
11870 TREE_OPERAND (arg0, 1)),
11871 arg1);
11872 tree shift = fold_build2_loc (loc, code, type,
11873 fold_convert_loc (loc, type,
11874 TREE_OPERAND (arg0, 0)),
11875 arg1);
11876 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11877 if (tem)
11878 return tem;
11881 return NULL_TREE;
11883 case MIN_EXPR:
11884 if (operand_equal_p (arg0, arg1, 0))
11885 return omit_one_operand_loc (loc, type, arg0, arg1);
11886 if (INTEGRAL_TYPE_P (type)
11887 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11888 return omit_one_operand_loc (loc, type, arg1, arg0);
11889 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11890 if (tem)
11891 return tem;
11892 goto associate;
11894 case MAX_EXPR:
11895 if (operand_equal_p (arg0, arg1, 0))
11896 return omit_one_operand_loc (loc, type, arg0, arg1);
11897 if (INTEGRAL_TYPE_P (type)
11898 && TYPE_MAX_VALUE (type)
11899 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11900 return omit_one_operand_loc (loc, type, arg1, arg0);
11901 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11902 if (tem)
11903 return tem;
11904 goto associate;
11906 case TRUTH_ANDIF_EXPR:
11907 /* Note that the operands of this must be ints
11908 and their values must be 0 or 1.
11909 ("true" is a fixed value perhaps depending on the language.) */
11910 /* If first arg is constant zero, return it. */
11911 if (integer_zerop (arg0))
11912 return fold_convert_loc (loc, type, arg0);
11913 case TRUTH_AND_EXPR:
11914 /* If either arg is constant true, drop it. */
11915 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11916 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11917 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11918 /* Preserve sequence points. */
11919 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11920 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11921 /* If second arg is constant zero, result is zero, but first arg
11922 must be evaluated. */
11923 if (integer_zerop (arg1))
11924 return omit_one_operand_loc (loc, type, arg1, arg0);
11925 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11926 case will be handled here. */
11927 if (integer_zerop (arg0))
11928 return omit_one_operand_loc (loc, type, arg0, arg1);
11930 /* !X && X is always false. */
11931 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11932 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11933 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11934 /* X && !X is always false. */
11935 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11936 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11937 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11939 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11940 means A >= Y && A != MAX, but in this case we know that
11941 A < X <= MAX. */
11943 if (!TREE_SIDE_EFFECTS (arg0)
11944 && !TREE_SIDE_EFFECTS (arg1))
11946 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11947 if (tem && !operand_equal_p (tem, arg0, 0))
11948 return fold_build2_loc (loc, code, type, tem, arg1);
11950 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11951 if (tem && !operand_equal_p (tem, arg1, 0))
11952 return fold_build2_loc (loc, code, type, arg0, tem);
11955 truth_andor:
11956 /* We only do these simplifications if we are optimizing. */
11957 if (!optimize)
11958 return NULL_TREE;
11960 /* Check for things like (A || B) && (A || C). We can convert this
11961 to A || (B && C). Note that either operator can be any of the four
11962 truth and/or operations and the transformation will still be
11963 valid. Also note that we only care about order for the
11964 ANDIF and ORIF operators. If B contains side effects, this
11965 might change the truth-value of A. */
11966 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11967 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11968 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11969 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11970 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11971 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11973 tree a00 = TREE_OPERAND (arg0, 0);
11974 tree a01 = TREE_OPERAND (arg0, 1);
11975 tree a10 = TREE_OPERAND (arg1, 0);
11976 tree a11 = TREE_OPERAND (arg1, 1);
11977 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11978 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11979 && (code == TRUTH_AND_EXPR
11980 || code == TRUTH_OR_EXPR));
11982 if (operand_equal_p (a00, a10, 0))
11983 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11984 fold_build2_loc (loc, code, type, a01, a11));
11985 else if (commutative && operand_equal_p (a00, a11, 0))
11986 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11987 fold_build2_loc (loc, code, type, a01, a10));
11988 else if (commutative && operand_equal_p (a01, a10, 0))
11989 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11990 fold_build2_loc (loc, code, type, a00, a11));
11992 /* This case if tricky because we must either have commutative
11993 operators or else A10 must not have side-effects. */
11995 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11996 && operand_equal_p (a01, a11, 0))
11997 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11998 fold_build2_loc (loc, code, type, a00, a10),
11999 a01);
12002 /* See if we can build a range comparison. */
12003 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12004 return tem;
12006 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12007 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12009 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12010 if (tem)
12011 return fold_build2_loc (loc, code, type, tem, arg1);
12014 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12015 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12017 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12018 if (tem)
12019 return fold_build2_loc (loc, code, type, arg0, tem);
12022 /* Check for the possibility of merging component references. If our
12023 lhs is another similar operation, try to merge its rhs with our
12024 rhs. Then try to merge our lhs and rhs. */
12025 if (TREE_CODE (arg0) == code
12026 && 0 != (tem = fold_truthop (loc, code, type,
12027 TREE_OPERAND (arg0, 1), arg1)))
12028 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12030 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12031 return tem;
12033 return NULL_TREE;
12035 case TRUTH_ORIF_EXPR:
12036 /* Note that the operands of this must be ints
12037 and their values must be 0 or true.
12038 ("true" is a fixed value perhaps depending on the language.) */
12039 /* If first arg is constant true, return it. */
12040 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12041 return fold_convert_loc (loc, type, arg0);
12042 case TRUTH_OR_EXPR:
12043 /* If either arg is constant zero, drop it. */
12044 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12045 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12046 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12047 /* Preserve sequence points. */
12048 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12049 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12050 /* If second arg is constant true, result is true, but we must
12051 evaluate first arg. */
12052 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12053 return omit_one_operand_loc (loc, type, arg1, arg0);
12054 /* Likewise for first arg, but note this only occurs here for
12055 TRUTH_OR_EXPR. */
12056 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12057 return omit_one_operand_loc (loc, type, arg0, arg1);
12059 /* !X || X is always true. */
12060 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12061 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12062 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12063 /* X || !X is always true. */
12064 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12065 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12066 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12068 /* (X && !Y) || (!X && Y) is X ^ Y */
12069 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12070 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12072 tree a0, a1, l0, l1, n0, n1;
12074 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12075 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12077 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12078 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12080 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12081 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12083 if ((operand_equal_p (n0, a0, 0)
12084 && operand_equal_p (n1, a1, 0))
12085 || (operand_equal_p (n0, a1, 0)
12086 && operand_equal_p (n1, a0, 0)))
12087 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12089 goto truth_andor;
12091 case TRUTH_XOR_EXPR:
12092 /* If the second arg is constant zero, drop it. */
12093 if (integer_zerop (arg1))
12094 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12095 /* If the second arg is constant true, this is a logical inversion. */
12096 if (integer_onep (arg1))
12098 /* Only call invert_truthvalue if operand is a truth value. */
12099 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12100 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12101 else
12102 tem = invert_truthvalue_loc (loc, arg0);
12103 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12105 /* Identical arguments cancel to zero. */
12106 if (operand_equal_p (arg0, arg1, 0))
12107 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12109 /* !X ^ X is always true. */
12110 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12111 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12112 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12114 /* X ^ !X is always true. */
12115 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12116 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12117 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12119 return NULL_TREE;
12121 case EQ_EXPR:
12122 case NE_EXPR:
12123 STRIP_NOPS (arg0);
12124 STRIP_NOPS (arg1);
12126 tem = fold_comparison (loc, code, type, op0, op1);
12127 if (tem != NULL_TREE)
12128 return tem;
12130 /* bool_var != 0 becomes bool_var. */
12131 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12132 && code == NE_EXPR)
12133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12135 /* bool_var == 1 becomes bool_var. */
12136 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12137 && code == EQ_EXPR)
12138 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12140 /* bool_var != 1 becomes !bool_var. */
12141 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12142 && code == NE_EXPR)
12143 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12144 fold_convert_loc (loc, type, arg0));
12146 /* bool_var == 0 becomes !bool_var. */
12147 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12148 && code == EQ_EXPR)
12149 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12150 fold_convert_loc (loc, type, arg0));
12152 /* !exp != 0 becomes !exp */
12153 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12154 && code == NE_EXPR)
12155 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12157 /* If this is an equality comparison of the address of two non-weak,
12158 unaliased symbols neither of which are extern (since we do not
12159 have access to attributes for externs), then we know the result. */
12160 if (TREE_CODE (arg0) == ADDR_EXPR
12161 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12162 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12163 && ! lookup_attribute ("alias",
12164 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12165 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12166 && TREE_CODE (arg1) == ADDR_EXPR
12167 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12168 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12169 && ! lookup_attribute ("alias",
12170 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12171 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12173 /* We know that we're looking at the address of two
12174 non-weak, unaliased, static _DECL nodes.
12176 It is both wasteful and incorrect to call operand_equal_p
12177 to compare the two ADDR_EXPR nodes. It is wasteful in that
12178 all we need to do is test pointer equality for the arguments
12179 to the two ADDR_EXPR nodes. It is incorrect to use
12180 operand_equal_p as that function is NOT equivalent to a
12181 C equality test. It can in fact return false for two
12182 objects which would test as equal using the C equality
12183 operator. */
12184 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12185 return constant_boolean_node (equal
12186 ? code == EQ_EXPR : code != EQ_EXPR,
12187 type);
12190 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12191 a MINUS_EXPR of a constant, we can convert it into a comparison with
12192 a revised constant as long as no overflow occurs. */
12193 if (TREE_CODE (arg1) == INTEGER_CST
12194 && (TREE_CODE (arg0) == PLUS_EXPR
12195 || TREE_CODE (arg0) == MINUS_EXPR)
12196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12197 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12198 ? MINUS_EXPR : PLUS_EXPR,
12199 fold_convert_loc (loc, TREE_TYPE (arg0),
12200 arg1),
12201 TREE_OPERAND (arg0, 1)))
12202 && !TREE_OVERFLOW (tem))
12203 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12205 /* Similarly for a NEGATE_EXPR. */
12206 if (TREE_CODE (arg0) == NEGATE_EXPR
12207 && TREE_CODE (arg1) == INTEGER_CST
12208 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12209 arg1)))
12210 && TREE_CODE (tem) == INTEGER_CST
12211 && !TREE_OVERFLOW (tem))
12212 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12214 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12215 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12216 && TREE_CODE (arg1) == INTEGER_CST
12217 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12218 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12219 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12220 fold_convert_loc (loc,
12221 TREE_TYPE (arg0),
12222 arg1),
12223 TREE_OPERAND (arg0, 1)));
12225 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12226 if ((TREE_CODE (arg0) == PLUS_EXPR
12227 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12228 || TREE_CODE (arg0) == MINUS_EXPR)
12229 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12230 0)),
12231 arg1, 0)
12232 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12233 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12235 tree val = TREE_OPERAND (arg0, 1);
12236 return omit_two_operands_loc (loc, type,
12237 fold_build2_loc (loc, code, type,
12238 val,
12239 build_int_cst (TREE_TYPE (val),
12240 0)),
12241 TREE_OPERAND (arg0, 0), arg1);
12244 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12245 if (TREE_CODE (arg0) == MINUS_EXPR
12246 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12247 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12248 1)),
12249 arg1, 0)
12250 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12252 return omit_two_operands_loc (loc, type,
12253 code == NE_EXPR
12254 ? boolean_true_node : boolean_false_node,
12255 TREE_OPERAND (arg0, 1), arg1);
12258 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12259 for !=. Don't do this for ordered comparisons due to overflow. */
12260 if (TREE_CODE (arg0) == MINUS_EXPR
12261 && integer_zerop (arg1))
12262 return fold_build2_loc (loc, code, type,
12263 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12265 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12266 if (TREE_CODE (arg0) == ABS_EXPR
12267 && (integer_zerop (arg1) || real_zerop (arg1)))
12268 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12270 /* If this is an EQ or NE comparison with zero and ARG0 is
12271 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12272 two operations, but the latter can be done in one less insn
12273 on machines that have only two-operand insns or on which a
12274 constant cannot be the first operand. */
12275 if (TREE_CODE (arg0) == BIT_AND_EXPR
12276 && integer_zerop (arg1))
12278 tree arg00 = TREE_OPERAND (arg0, 0);
12279 tree arg01 = TREE_OPERAND (arg0, 1);
12280 if (TREE_CODE (arg00) == LSHIFT_EXPR
12281 && integer_onep (TREE_OPERAND (arg00, 0)))
12283 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12284 arg01, TREE_OPERAND (arg00, 1));
12285 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12286 build_int_cst (TREE_TYPE (arg0), 1));
12287 return fold_build2_loc (loc, code, type,
12288 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12289 arg1);
12291 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12292 && integer_onep (TREE_OPERAND (arg01, 0)))
12294 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12295 arg00, TREE_OPERAND (arg01, 1));
12296 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12297 build_int_cst (TREE_TYPE (arg0), 1));
12298 return fold_build2_loc (loc, code, type,
12299 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12300 arg1);
12304 /* If this is an NE or EQ comparison of zero against the result of a
12305 signed MOD operation whose second operand is a power of 2, make
12306 the MOD operation unsigned since it is simpler and equivalent. */
12307 if (integer_zerop (arg1)
12308 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12309 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12310 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12311 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12312 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12313 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12315 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12316 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12317 fold_convert_loc (loc, newtype,
12318 TREE_OPERAND (arg0, 0)),
12319 fold_convert_loc (loc, newtype,
12320 TREE_OPERAND (arg0, 1)));
12322 return fold_build2_loc (loc, code, type, newmod,
12323 fold_convert_loc (loc, newtype, arg1));
12326 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12327 C1 is a valid shift constant, and C2 is a power of two, i.e.
12328 a single bit. */
12329 if (TREE_CODE (arg0) == BIT_AND_EXPR
12330 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12331 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12332 == INTEGER_CST
12333 && integer_pow2p (TREE_OPERAND (arg0, 1))
12334 && integer_zerop (arg1))
12336 tree itype = TREE_TYPE (arg0);
12337 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12338 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12340 /* Check for a valid shift count. */
12341 if (TREE_INT_CST_HIGH (arg001) == 0
12342 && TREE_INT_CST_LOW (arg001) < prec)
12344 tree arg01 = TREE_OPERAND (arg0, 1);
12345 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12346 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12347 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12348 can be rewritten as (X & (C2 << C1)) != 0. */
12349 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12351 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12352 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12353 return fold_build2_loc (loc, code, type, tem,
12354 fold_convert_loc (loc, itype, arg1));
12356 /* Otherwise, for signed (arithmetic) shifts,
12357 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12358 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12359 else if (!TYPE_UNSIGNED (itype))
12360 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12361 arg000, build_int_cst (itype, 0));
12362 /* Otherwise, of unsigned (logical) shifts,
12363 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12364 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12365 else
12366 return omit_one_operand_loc (loc, type,
12367 code == EQ_EXPR ? integer_one_node
12368 : integer_zero_node,
12369 arg000);
12373 /* If this is an NE comparison of zero with an AND of one, remove the
12374 comparison since the AND will give the correct value. */
12375 if (code == NE_EXPR
12376 && integer_zerop (arg1)
12377 && TREE_CODE (arg0) == BIT_AND_EXPR
12378 && integer_onep (TREE_OPERAND (arg0, 1)))
12379 return fold_convert_loc (loc, type, arg0);
12381 /* If we have (A & C) == C where C is a power of 2, convert this into
12382 (A & C) != 0. Similarly for NE_EXPR. */
12383 if (TREE_CODE (arg0) == BIT_AND_EXPR
12384 && integer_pow2p (TREE_OPERAND (arg0, 1))
12385 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12386 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12387 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12388 integer_zero_node));
12390 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12391 bit, then fold the expression into A < 0 or A >= 0. */
12392 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12393 if (tem)
12394 return tem;
12396 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12397 Similarly for NE_EXPR. */
12398 if (TREE_CODE (arg0) == BIT_AND_EXPR
12399 && TREE_CODE (arg1) == INTEGER_CST
12400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12402 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12403 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12404 TREE_OPERAND (arg0, 1));
12405 tree dandnotc
12406 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12407 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12408 notc);
12409 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12410 if (integer_nonzerop (dandnotc))
12411 return omit_one_operand_loc (loc, type, rslt, arg0);
12414 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12415 Similarly for NE_EXPR. */
12416 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12417 && TREE_CODE (arg1) == INTEGER_CST
12418 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12420 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12421 tree candnotd
12422 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12423 TREE_OPERAND (arg0, 1),
12424 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12425 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12426 if (integer_nonzerop (candnotd))
12427 return omit_one_operand_loc (loc, type, rslt, arg0);
12430 /* If this is a comparison of a field, we may be able to simplify it. */
12431 if ((TREE_CODE (arg0) == COMPONENT_REF
12432 || TREE_CODE (arg0) == BIT_FIELD_REF)
12433 /* Handle the constant case even without -O
12434 to make sure the warnings are given. */
12435 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12437 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12438 if (t1)
12439 return t1;
12442 /* Optimize comparisons of strlen vs zero to a compare of the
12443 first character of the string vs zero. To wit,
12444 strlen(ptr) == 0 => *ptr == 0
12445 strlen(ptr) != 0 => *ptr != 0
12446 Other cases should reduce to one of these two (or a constant)
12447 due to the return value of strlen being unsigned. */
12448 if (TREE_CODE (arg0) == CALL_EXPR
12449 && integer_zerop (arg1))
12451 tree fndecl = get_callee_fndecl (arg0);
12453 if (fndecl
12454 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12455 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12456 && call_expr_nargs (arg0) == 1
12457 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12459 tree iref = build_fold_indirect_ref_loc (loc,
12460 CALL_EXPR_ARG (arg0, 0));
12461 return fold_build2_loc (loc, code, type, iref,
12462 build_int_cst (TREE_TYPE (iref), 0));
12466 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12467 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12468 if (TREE_CODE (arg0) == RSHIFT_EXPR
12469 && integer_zerop (arg1)
12470 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12472 tree arg00 = TREE_OPERAND (arg0, 0);
12473 tree arg01 = TREE_OPERAND (arg0, 1);
12474 tree itype = TREE_TYPE (arg00);
12475 if (TREE_INT_CST_HIGH (arg01) == 0
12476 && TREE_INT_CST_LOW (arg01)
12477 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12479 if (TYPE_UNSIGNED (itype))
12481 itype = signed_type_for (itype);
12482 arg00 = fold_convert_loc (loc, itype, arg00);
12484 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12485 type, arg00, build_int_cst (itype, 0));
12489 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12490 if (integer_zerop (arg1)
12491 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12492 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12493 TREE_OPERAND (arg0, 1));
12495 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12496 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12497 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12498 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12499 build_int_cst (TREE_TYPE (arg0), 0));
12500 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12501 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12502 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12503 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12504 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12505 build_int_cst (TREE_TYPE (arg0), 0));
12507 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12508 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12509 && TREE_CODE (arg1) == INTEGER_CST
12510 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12511 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12512 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12513 TREE_OPERAND (arg0, 1), arg1));
12515 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12516 (X & C) == 0 when C is a single bit. */
12517 if (TREE_CODE (arg0) == BIT_AND_EXPR
12518 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12519 && integer_zerop (arg1)
12520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12522 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12523 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12524 TREE_OPERAND (arg0, 1));
12525 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12526 type, tem,
12527 fold_convert_loc (loc, TREE_TYPE (arg0),
12528 arg1));
12531 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12532 constant C is a power of two, i.e. a single bit. */
12533 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12534 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12535 && integer_zerop (arg1)
12536 && integer_pow2p (TREE_OPERAND (arg0, 1))
12537 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12538 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12540 tree arg00 = TREE_OPERAND (arg0, 0);
12541 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12542 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12545 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12546 when is C is a power of two, i.e. a single bit. */
12547 if (TREE_CODE (arg0) == BIT_AND_EXPR
12548 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12549 && integer_zerop (arg1)
12550 && integer_pow2p (TREE_OPERAND (arg0, 1))
12551 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12552 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12554 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12555 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12556 arg000, TREE_OPERAND (arg0, 1));
12557 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12558 tem, build_int_cst (TREE_TYPE (tem), 0));
12561 if (integer_zerop (arg1)
12562 && tree_expr_nonzero_p (arg0))
12564 tree res = constant_boolean_node (code==NE_EXPR, type);
12565 return omit_one_operand_loc (loc, type, res, arg0);
12568 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12569 if (TREE_CODE (arg0) == NEGATE_EXPR
12570 && TREE_CODE (arg1) == NEGATE_EXPR)
12571 return fold_build2_loc (loc, code, type,
12572 TREE_OPERAND (arg0, 0),
12573 fold_convert_loc (loc, TREE_TYPE (arg0),
12574 TREE_OPERAND (arg1, 0)));
12576 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12577 if (TREE_CODE (arg0) == BIT_AND_EXPR
12578 && TREE_CODE (arg1) == BIT_AND_EXPR)
12580 tree arg00 = TREE_OPERAND (arg0, 0);
12581 tree arg01 = TREE_OPERAND (arg0, 1);
12582 tree arg10 = TREE_OPERAND (arg1, 0);
12583 tree arg11 = TREE_OPERAND (arg1, 1);
12584 tree itype = TREE_TYPE (arg0);
12586 if (operand_equal_p (arg01, arg11, 0))
12587 return fold_build2_loc (loc, code, type,
12588 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12589 fold_build2_loc (loc,
12590 BIT_XOR_EXPR, itype,
12591 arg00, arg10),
12592 arg01),
12593 build_int_cst (itype, 0));
12595 if (operand_equal_p (arg01, arg10, 0))
12596 return fold_build2_loc (loc, code, type,
12597 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12598 fold_build2_loc (loc,
12599 BIT_XOR_EXPR, itype,
12600 arg00, arg11),
12601 arg01),
12602 build_int_cst (itype, 0));
12604 if (operand_equal_p (arg00, arg11, 0))
12605 return fold_build2_loc (loc, code, type,
12606 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12607 fold_build2_loc (loc,
12608 BIT_XOR_EXPR, itype,
12609 arg01, arg10),
12610 arg00),
12611 build_int_cst (itype, 0));
12613 if (operand_equal_p (arg00, arg10, 0))
12614 return fold_build2_loc (loc, code, type,
12615 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12616 fold_build2_loc (loc,
12617 BIT_XOR_EXPR, itype,
12618 arg01, arg11),
12619 arg00),
12620 build_int_cst (itype, 0));
12623 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12624 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12626 tree arg00 = TREE_OPERAND (arg0, 0);
12627 tree arg01 = TREE_OPERAND (arg0, 1);
12628 tree arg10 = TREE_OPERAND (arg1, 0);
12629 tree arg11 = TREE_OPERAND (arg1, 1);
12630 tree itype = TREE_TYPE (arg0);
12632 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12633 operand_equal_p guarantees no side-effects so we don't need
12634 to use omit_one_operand on Z. */
12635 if (operand_equal_p (arg01, arg11, 0))
12636 return fold_build2_loc (loc, code, type, arg00,
12637 fold_convert_loc (loc, TREE_TYPE (arg00),
12638 arg10));
12639 if (operand_equal_p (arg01, arg10, 0))
12640 return fold_build2_loc (loc, code, type, arg00,
12641 fold_convert_loc (loc, TREE_TYPE (arg00),
12642 arg11));
12643 if (operand_equal_p (arg00, arg11, 0))
12644 return fold_build2_loc (loc, code, type, arg01,
12645 fold_convert_loc (loc, TREE_TYPE (arg01),
12646 arg10));
12647 if (operand_equal_p (arg00, arg10, 0))
12648 return fold_build2_loc (loc, code, type, arg01,
12649 fold_convert_loc (loc, TREE_TYPE (arg01),
12650 arg11));
12652 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12653 if (TREE_CODE (arg01) == INTEGER_CST
12654 && TREE_CODE (arg11) == INTEGER_CST)
12656 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12657 fold_convert_loc (loc, itype, arg11));
12658 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12659 return fold_build2_loc (loc, code, type, tem,
12660 fold_convert_loc (loc, itype, arg10));
12664 /* Attempt to simplify equality/inequality comparisons of complex
12665 values. Only lower the comparison if the result is known or
12666 can be simplified to a single scalar comparison. */
12667 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12668 || TREE_CODE (arg0) == COMPLEX_CST)
12669 && (TREE_CODE (arg1) == COMPLEX_EXPR
12670 || TREE_CODE (arg1) == COMPLEX_CST))
12672 tree real0, imag0, real1, imag1;
12673 tree rcond, icond;
12675 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12677 real0 = TREE_OPERAND (arg0, 0);
12678 imag0 = TREE_OPERAND (arg0, 1);
12680 else
12682 real0 = TREE_REALPART (arg0);
12683 imag0 = TREE_IMAGPART (arg0);
12686 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12688 real1 = TREE_OPERAND (arg1, 0);
12689 imag1 = TREE_OPERAND (arg1, 1);
12691 else
12693 real1 = TREE_REALPART (arg1);
12694 imag1 = TREE_IMAGPART (arg1);
12697 rcond = fold_binary_loc (loc, code, type, real0, real1);
12698 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12700 if (integer_zerop (rcond))
12702 if (code == EQ_EXPR)
12703 return omit_two_operands_loc (loc, type, boolean_false_node,
12704 imag0, imag1);
12705 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12707 else
12709 if (code == NE_EXPR)
12710 return omit_two_operands_loc (loc, type, boolean_true_node,
12711 imag0, imag1);
12712 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12716 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12717 if (icond && TREE_CODE (icond) == INTEGER_CST)
12719 if (integer_zerop (icond))
12721 if (code == EQ_EXPR)
12722 return omit_two_operands_loc (loc, type, boolean_false_node,
12723 real0, real1);
12724 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12726 else
12728 if (code == NE_EXPR)
12729 return omit_two_operands_loc (loc, type, boolean_true_node,
12730 real0, real1);
12731 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12736 return NULL_TREE;
12738 case LT_EXPR:
12739 case GT_EXPR:
12740 case LE_EXPR:
12741 case GE_EXPR:
12742 tem = fold_comparison (loc, code, type, op0, op1);
12743 if (tem != NULL_TREE)
12744 return tem;
12746 /* Transform comparisons of the form X +- C CMP X. */
12747 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12748 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12749 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12750 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12751 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12752 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12754 tree arg01 = TREE_OPERAND (arg0, 1);
12755 enum tree_code code0 = TREE_CODE (arg0);
12756 int is_positive;
12758 if (TREE_CODE (arg01) == REAL_CST)
12759 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12760 else
12761 is_positive = tree_int_cst_sgn (arg01);
12763 /* (X - c) > X becomes false. */
12764 if (code == GT_EXPR
12765 && ((code0 == MINUS_EXPR && is_positive >= 0)
12766 || (code0 == PLUS_EXPR && is_positive <= 0)))
12768 if (TREE_CODE (arg01) == INTEGER_CST
12769 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12770 fold_overflow_warning (("assuming signed overflow does not "
12771 "occur when assuming that (X - c) > X "
12772 "is always false"),
12773 WARN_STRICT_OVERFLOW_ALL);
12774 return constant_boolean_node (0, type);
12777 /* Likewise (X + c) < X becomes false. */
12778 if (code == LT_EXPR
12779 && ((code0 == PLUS_EXPR && is_positive >= 0)
12780 || (code0 == MINUS_EXPR && is_positive <= 0)))
12782 if (TREE_CODE (arg01) == INTEGER_CST
12783 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12784 fold_overflow_warning (("assuming signed overflow does not "
12785 "occur when assuming that "
12786 "(X + c) < X is always false"),
12787 WARN_STRICT_OVERFLOW_ALL);
12788 return constant_boolean_node (0, type);
12791 /* Convert (X - c) <= X to true. */
12792 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12793 && code == LE_EXPR
12794 && ((code0 == MINUS_EXPR && is_positive >= 0)
12795 || (code0 == PLUS_EXPR && is_positive <= 0)))
12797 if (TREE_CODE (arg01) == INTEGER_CST
12798 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12799 fold_overflow_warning (("assuming signed overflow does not "
12800 "occur when assuming that "
12801 "(X - c) <= X is always true"),
12802 WARN_STRICT_OVERFLOW_ALL);
12803 return constant_boolean_node (1, type);
12806 /* Convert (X + c) >= X to true. */
12807 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12808 && code == GE_EXPR
12809 && ((code0 == PLUS_EXPR && is_positive >= 0)
12810 || (code0 == MINUS_EXPR && is_positive <= 0)))
12812 if (TREE_CODE (arg01) == INTEGER_CST
12813 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12814 fold_overflow_warning (("assuming signed overflow does not "
12815 "occur when assuming that "
12816 "(X + c) >= X is always true"),
12817 WARN_STRICT_OVERFLOW_ALL);
12818 return constant_boolean_node (1, type);
12821 if (TREE_CODE (arg01) == INTEGER_CST)
12823 /* Convert X + c > X and X - c < X to true for integers. */
12824 if (code == GT_EXPR
12825 && ((code0 == PLUS_EXPR && is_positive > 0)
12826 || (code0 == MINUS_EXPR && is_positive < 0)))
12828 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12829 fold_overflow_warning (("assuming signed overflow does "
12830 "not occur when assuming that "
12831 "(X + c) > X is always true"),
12832 WARN_STRICT_OVERFLOW_ALL);
12833 return constant_boolean_node (1, type);
12836 if (code == LT_EXPR
12837 && ((code0 == MINUS_EXPR && is_positive > 0)
12838 || (code0 == PLUS_EXPR && is_positive < 0)))
12840 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12841 fold_overflow_warning (("assuming signed overflow does "
12842 "not occur when assuming that "
12843 "(X - c) < X is always true"),
12844 WARN_STRICT_OVERFLOW_ALL);
12845 return constant_boolean_node (1, type);
12848 /* Convert X + c <= X and X - c >= X to false for integers. */
12849 if (code == LE_EXPR
12850 && ((code0 == PLUS_EXPR && is_positive > 0)
12851 || (code0 == MINUS_EXPR && is_positive < 0)))
12853 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12854 fold_overflow_warning (("assuming signed overflow does "
12855 "not occur when assuming that "
12856 "(X + c) <= X is always false"),
12857 WARN_STRICT_OVERFLOW_ALL);
12858 return constant_boolean_node (0, type);
12861 if (code == GE_EXPR
12862 && ((code0 == MINUS_EXPR && is_positive > 0)
12863 || (code0 == PLUS_EXPR && is_positive < 0)))
12865 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12866 fold_overflow_warning (("assuming signed overflow does "
12867 "not occur when assuming that "
12868 "(X - c) >= X is always false"),
12869 WARN_STRICT_OVERFLOW_ALL);
12870 return constant_boolean_node (0, type);
12875 /* Comparisons with the highest or lowest possible integer of
12876 the specified precision will have known values. */
12878 tree arg1_type = TREE_TYPE (arg1);
12879 unsigned int width = TYPE_PRECISION (arg1_type);
12881 if (TREE_CODE (arg1) == INTEGER_CST
12882 && width <= 2 * HOST_BITS_PER_WIDE_INT
12883 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12885 HOST_WIDE_INT signed_max_hi;
12886 unsigned HOST_WIDE_INT signed_max_lo;
12887 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12889 if (width <= HOST_BITS_PER_WIDE_INT)
12891 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12892 - 1;
12893 signed_max_hi = 0;
12894 max_hi = 0;
12896 if (TYPE_UNSIGNED (arg1_type))
12898 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12899 min_lo = 0;
12900 min_hi = 0;
12902 else
12904 max_lo = signed_max_lo;
12905 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12906 min_hi = -1;
12909 else
12911 width -= HOST_BITS_PER_WIDE_INT;
12912 signed_max_lo = -1;
12913 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12914 - 1;
12915 max_lo = -1;
12916 min_lo = 0;
12918 if (TYPE_UNSIGNED (arg1_type))
12920 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12921 min_hi = 0;
12923 else
12925 max_hi = signed_max_hi;
12926 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12930 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12931 && TREE_INT_CST_LOW (arg1) == max_lo)
12932 switch (code)
12934 case GT_EXPR:
12935 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12937 case GE_EXPR:
12938 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12940 case LE_EXPR:
12941 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12943 case LT_EXPR:
12944 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12946 /* The GE_EXPR and LT_EXPR cases above are not normally
12947 reached because of previous transformations. */
12949 default:
12950 break;
12952 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12953 == max_hi
12954 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12955 switch (code)
12957 case GT_EXPR:
12958 arg1 = const_binop (PLUS_EXPR, arg1,
12959 build_int_cst (TREE_TYPE (arg1), 1));
12960 return fold_build2_loc (loc, EQ_EXPR, type,
12961 fold_convert_loc (loc,
12962 TREE_TYPE (arg1), arg0),
12963 arg1);
12964 case LE_EXPR:
12965 arg1 = const_binop (PLUS_EXPR, arg1,
12966 build_int_cst (TREE_TYPE (arg1), 1));
12967 return fold_build2_loc (loc, NE_EXPR, type,
12968 fold_convert_loc (loc, TREE_TYPE (arg1),
12969 arg0),
12970 arg1);
12971 default:
12972 break;
12974 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12975 == min_hi
12976 && TREE_INT_CST_LOW (arg1) == min_lo)
12977 switch (code)
12979 case LT_EXPR:
12980 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12982 case LE_EXPR:
12983 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12985 case GE_EXPR:
12986 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12988 case GT_EXPR:
12989 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12991 default:
12992 break;
12994 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12995 == min_hi
12996 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12997 switch (code)
12999 case GE_EXPR:
13000 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13001 return fold_build2_loc (loc, NE_EXPR, type,
13002 fold_convert_loc (loc,
13003 TREE_TYPE (arg1), arg0),
13004 arg1);
13005 case LT_EXPR:
13006 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13007 return fold_build2_loc (loc, EQ_EXPR, type,
13008 fold_convert_loc (loc, TREE_TYPE (arg1),
13009 arg0),
13010 arg1);
13011 default:
13012 break;
13015 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13016 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13017 && TYPE_UNSIGNED (arg1_type)
13018 /* We will flip the signedness of the comparison operator
13019 associated with the mode of arg1, so the sign bit is
13020 specified by this mode. Check that arg1 is the signed
13021 max associated with this sign bit. */
13022 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13023 /* signed_type does not work on pointer types. */
13024 && INTEGRAL_TYPE_P (arg1_type))
13026 /* The following case also applies to X < signed_max+1
13027 and X >= signed_max+1 because previous transformations. */
13028 if (code == LE_EXPR || code == GT_EXPR)
13030 tree st;
13031 st = signed_type_for (TREE_TYPE (arg1));
13032 return fold_build2_loc (loc,
13033 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13034 type, fold_convert_loc (loc, st, arg0),
13035 build_int_cst (st, 0));
13041 /* If we are comparing an ABS_EXPR with a constant, we can
13042 convert all the cases into explicit comparisons, but they may
13043 well not be faster than doing the ABS and one comparison.
13044 But ABS (X) <= C is a range comparison, which becomes a subtraction
13045 and a comparison, and is probably faster. */
13046 if (code == LE_EXPR
13047 && TREE_CODE (arg1) == INTEGER_CST
13048 && TREE_CODE (arg0) == ABS_EXPR
13049 && ! TREE_SIDE_EFFECTS (arg0)
13050 && (0 != (tem = negate_expr (arg1)))
13051 && TREE_CODE (tem) == INTEGER_CST
13052 && !TREE_OVERFLOW (tem))
13053 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13054 build2 (GE_EXPR, type,
13055 TREE_OPERAND (arg0, 0), tem),
13056 build2 (LE_EXPR, type,
13057 TREE_OPERAND (arg0, 0), arg1));
13059 /* Convert ABS_EXPR<x> >= 0 to true. */
13060 strict_overflow_p = false;
13061 if (code == GE_EXPR
13062 && (integer_zerop (arg1)
13063 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13064 && real_zerop (arg1)))
13065 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13067 if (strict_overflow_p)
13068 fold_overflow_warning (("assuming signed overflow does not occur "
13069 "when simplifying comparison of "
13070 "absolute value and zero"),
13071 WARN_STRICT_OVERFLOW_CONDITIONAL);
13072 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13075 /* Convert ABS_EXPR<x> < 0 to false. */
13076 strict_overflow_p = false;
13077 if (code == LT_EXPR
13078 && (integer_zerop (arg1) || real_zerop (arg1))
13079 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13081 if (strict_overflow_p)
13082 fold_overflow_warning (("assuming signed overflow does not occur "
13083 "when simplifying comparison of "
13084 "absolute value and zero"),
13085 WARN_STRICT_OVERFLOW_CONDITIONAL);
13086 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13089 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13090 and similarly for >= into !=. */
13091 if ((code == LT_EXPR || code == GE_EXPR)
13092 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13093 && TREE_CODE (arg1) == LSHIFT_EXPR
13094 && integer_onep (TREE_OPERAND (arg1, 0)))
13095 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13096 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13097 TREE_OPERAND (arg1, 1)),
13098 build_int_cst (TREE_TYPE (arg0), 0));
13100 if ((code == LT_EXPR || code == GE_EXPR)
13101 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13102 && CONVERT_EXPR_P (arg1)
13103 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13104 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13106 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13107 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13108 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13109 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13110 build_int_cst (TREE_TYPE (arg0), 0));
13113 return NULL_TREE;
13115 case UNORDERED_EXPR:
13116 case ORDERED_EXPR:
13117 case UNLT_EXPR:
13118 case UNLE_EXPR:
13119 case UNGT_EXPR:
13120 case UNGE_EXPR:
13121 case UNEQ_EXPR:
13122 case LTGT_EXPR:
13123 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13125 t1 = fold_relational_const (code, type, arg0, arg1);
13126 if (t1 != NULL_TREE)
13127 return t1;
13130 /* If the first operand is NaN, the result is constant. */
13131 if (TREE_CODE (arg0) == REAL_CST
13132 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13133 && (code != LTGT_EXPR || ! flag_trapping_math))
13135 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13136 ? integer_zero_node
13137 : integer_one_node;
13138 return omit_one_operand_loc (loc, type, t1, arg1);
13141 /* If the second operand is NaN, the result is constant. */
13142 if (TREE_CODE (arg1) == REAL_CST
13143 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13144 && (code != LTGT_EXPR || ! flag_trapping_math))
13146 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13147 ? integer_zero_node
13148 : integer_one_node;
13149 return omit_one_operand_loc (loc, type, t1, arg0);
13152 /* Simplify unordered comparison of something with itself. */
13153 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13154 && operand_equal_p (arg0, arg1, 0))
13155 return constant_boolean_node (1, type);
13157 if (code == LTGT_EXPR
13158 && !flag_trapping_math
13159 && operand_equal_p (arg0, arg1, 0))
13160 return constant_boolean_node (0, type);
13162 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13164 tree targ0 = strip_float_extensions (arg0);
13165 tree targ1 = strip_float_extensions (arg1);
13166 tree newtype = TREE_TYPE (targ0);
13168 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13169 newtype = TREE_TYPE (targ1);
13171 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13172 return fold_build2_loc (loc, code, type,
13173 fold_convert_loc (loc, newtype, targ0),
13174 fold_convert_loc (loc, newtype, targ1));
13177 return NULL_TREE;
13179 case COMPOUND_EXPR:
13180 /* When pedantic, a compound expression can be neither an lvalue
13181 nor an integer constant expression. */
13182 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13183 return NULL_TREE;
13184 /* Don't let (0, 0) be null pointer constant. */
13185 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13186 : fold_convert_loc (loc, type, arg1);
13187 return pedantic_non_lvalue_loc (loc, tem);
13189 case COMPLEX_EXPR:
13190 if ((TREE_CODE (arg0) == REAL_CST
13191 && TREE_CODE (arg1) == REAL_CST)
13192 || (TREE_CODE (arg0) == INTEGER_CST
13193 && TREE_CODE (arg1) == INTEGER_CST))
13194 return build_complex (type, arg0, arg1);
13195 if (TREE_CODE (arg0) == REALPART_EXPR
13196 && TREE_CODE (arg1) == IMAGPART_EXPR
13197 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
13198 == TYPE_MAIN_VARIANT (type))
13199 && operand_equal_p (TREE_OPERAND (arg0, 0),
13200 TREE_OPERAND (arg1, 0), 0))
13201 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13202 TREE_OPERAND (arg1, 0));
13203 return NULL_TREE;
13205 case ASSERT_EXPR:
13206 /* An ASSERT_EXPR should never be passed to fold_binary. */
13207 gcc_unreachable ();
13209 default:
13210 return NULL_TREE;
13211 } /* switch (code) */
13214 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13215 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13216 of GOTO_EXPR. */
13218 static tree
13219 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13221 switch (TREE_CODE (*tp))
13223 case LABEL_EXPR:
13224 return *tp;
13226 case GOTO_EXPR:
13227 *walk_subtrees = 0;
13229 /* ... fall through ... */
13231 default:
13232 return NULL_TREE;
13236 /* Return whether the sub-tree ST contains a label which is accessible from
13237 outside the sub-tree. */
13239 static bool
13240 contains_label_p (tree st)
13242 return
13243 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13246 /* Fold a ternary expression of code CODE and type TYPE with operands
13247 OP0, OP1, and OP2. Return the folded expression if folding is
13248 successful. Otherwise, return NULL_TREE. */
13250 tree
13251 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13252 tree op0, tree op1, tree op2)
13254 tree tem;
13255 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13256 enum tree_code_class kind = TREE_CODE_CLASS (code);
13258 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13259 && TREE_CODE_LENGTH (code) == 3);
13261 /* Strip any conversions that don't change the mode. This is safe
13262 for every expression, except for a comparison expression because
13263 its signedness is derived from its operands. So, in the latter
13264 case, only strip conversions that don't change the signedness.
13266 Note that this is done as an internal manipulation within the
13267 constant folder, in order to find the simplest representation of
13268 the arguments so that their form can be studied. In any cases,
13269 the appropriate type conversions should be put back in the tree
13270 that will get out of the constant folder. */
13271 if (op0)
13273 arg0 = op0;
13274 STRIP_NOPS (arg0);
13277 if (op1)
13279 arg1 = op1;
13280 STRIP_NOPS (arg1);
13283 if (op2)
13285 arg2 = op2;
13286 STRIP_NOPS (arg2);
13289 switch (code)
13291 case COMPONENT_REF:
13292 if (TREE_CODE (arg0) == CONSTRUCTOR
13293 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13295 unsigned HOST_WIDE_INT idx;
13296 tree field, value;
13297 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13298 if (field == arg1)
13299 return value;
13301 return NULL_TREE;
13303 case COND_EXPR:
13304 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13305 so all simple results must be passed through pedantic_non_lvalue. */
13306 if (TREE_CODE (arg0) == INTEGER_CST)
13308 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13309 tem = integer_zerop (arg0) ? op2 : op1;
13310 /* Only optimize constant conditions when the selected branch
13311 has the same type as the COND_EXPR. This avoids optimizing
13312 away "c ? x : throw", where the throw has a void type.
13313 Avoid throwing away that operand which contains label. */
13314 if ((!TREE_SIDE_EFFECTS (unused_op)
13315 || !contains_label_p (unused_op))
13316 && (! VOID_TYPE_P (TREE_TYPE (tem))
13317 || VOID_TYPE_P (type)))
13318 return pedantic_non_lvalue_loc (loc, tem);
13319 return NULL_TREE;
13321 if (operand_equal_p (arg1, op2, 0))
13322 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13324 /* If we have A op B ? A : C, we may be able to convert this to a
13325 simpler expression, depending on the operation and the values
13326 of B and C. Signed zeros prevent all of these transformations,
13327 for reasons given above each one.
13329 Also try swapping the arguments and inverting the conditional. */
13330 if (COMPARISON_CLASS_P (arg0)
13331 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13332 arg1, TREE_OPERAND (arg0, 1))
13333 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13335 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13336 if (tem)
13337 return tem;
13340 if (COMPARISON_CLASS_P (arg0)
13341 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13342 op2,
13343 TREE_OPERAND (arg0, 1))
13344 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13346 location_t loc0 = EXPR_LOCATION (arg0);
13347 if (loc0 == UNKNOWN_LOCATION)
13348 loc0 = loc;
13349 tem = fold_truth_not_expr (loc0, arg0);
13350 if (tem && COMPARISON_CLASS_P (tem))
13352 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13353 if (tem)
13354 return tem;
13358 /* If the second operand is simpler than the third, swap them
13359 since that produces better jump optimization results. */
13360 if (truth_value_p (TREE_CODE (arg0))
13361 && tree_swap_operands_p (op1, op2, false))
13363 location_t loc0 = EXPR_LOCATION (arg0);
13364 if (loc0 == UNKNOWN_LOCATION)
13365 loc0 = loc;
13366 /* See if this can be inverted. If it can't, possibly because
13367 it was a floating-point inequality comparison, don't do
13368 anything. */
13369 tem = fold_truth_not_expr (loc0, arg0);
13370 if (tem)
13371 return fold_build3_loc (loc, code, type, tem, op2, op1);
13374 /* Convert A ? 1 : 0 to simply A. */
13375 if (integer_onep (op1)
13376 && integer_zerop (op2)
13377 /* If we try to convert OP0 to our type, the
13378 call to fold will try to move the conversion inside
13379 a COND, which will recurse. In that case, the COND_EXPR
13380 is probably the best choice, so leave it alone. */
13381 && type == TREE_TYPE (arg0))
13382 return pedantic_non_lvalue_loc (loc, arg0);
13384 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13385 over COND_EXPR in cases such as floating point comparisons. */
13386 if (integer_zerop (op1)
13387 && integer_onep (op2)
13388 && truth_value_p (TREE_CODE (arg0)))
13389 return pedantic_non_lvalue_loc (loc,
13390 fold_convert_loc (loc, type,
13391 invert_truthvalue_loc (loc,
13392 arg0)));
13394 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13395 if (TREE_CODE (arg0) == LT_EXPR
13396 && integer_zerop (TREE_OPERAND (arg0, 1))
13397 && integer_zerop (op2)
13398 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13400 /* sign_bit_p only checks ARG1 bits within A's precision.
13401 If <sign bit of A> has wider type than A, bits outside
13402 of A's precision in <sign bit of A> need to be checked.
13403 If they are all 0, this optimization needs to be done
13404 in unsigned A's type, if they are all 1 in signed A's type,
13405 otherwise this can't be done. */
13406 if (TYPE_PRECISION (TREE_TYPE (tem))
13407 < TYPE_PRECISION (TREE_TYPE (arg1))
13408 && TYPE_PRECISION (TREE_TYPE (tem))
13409 < TYPE_PRECISION (type))
13411 unsigned HOST_WIDE_INT mask_lo;
13412 HOST_WIDE_INT mask_hi;
13413 int inner_width, outer_width;
13414 tree tem_type;
13416 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13417 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13418 if (outer_width > TYPE_PRECISION (type))
13419 outer_width = TYPE_PRECISION (type);
13421 if (outer_width > HOST_BITS_PER_WIDE_INT)
13423 mask_hi = ((unsigned HOST_WIDE_INT) -1
13424 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13425 mask_lo = -1;
13427 else
13429 mask_hi = 0;
13430 mask_lo = ((unsigned HOST_WIDE_INT) -1
13431 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13433 if (inner_width > HOST_BITS_PER_WIDE_INT)
13435 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13436 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13437 mask_lo = 0;
13439 else
13440 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13441 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13443 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13444 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13446 tem_type = signed_type_for (TREE_TYPE (tem));
13447 tem = fold_convert_loc (loc, tem_type, tem);
13449 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13450 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13452 tem_type = unsigned_type_for (TREE_TYPE (tem));
13453 tem = fold_convert_loc (loc, tem_type, tem);
13455 else
13456 tem = NULL;
13459 if (tem)
13460 return
13461 fold_convert_loc (loc, type,
13462 fold_build2_loc (loc, BIT_AND_EXPR,
13463 TREE_TYPE (tem), tem,
13464 fold_convert_loc (loc,
13465 TREE_TYPE (tem),
13466 arg1)));
13469 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13470 already handled above. */
13471 if (TREE_CODE (arg0) == BIT_AND_EXPR
13472 && integer_onep (TREE_OPERAND (arg0, 1))
13473 && integer_zerop (op2)
13474 && integer_pow2p (arg1))
13476 tree tem = TREE_OPERAND (arg0, 0);
13477 STRIP_NOPS (tem);
13478 if (TREE_CODE (tem) == RSHIFT_EXPR
13479 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13480 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13481 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13482 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13483 TREE_OPERAND (tem, 0), arg1);
13486 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13487 is probably obsolete because the first operand should be a
13488 truth value (that's why we have the two cases above), but let's
13489 leave it in until we can confirm this for all front-ends. */
13490 if (integer_zerop (op2)
13491 && TREE_CODE (arg0) == NE_EXPR
13492 && integer_zerop (TREE_OPERAND (arg0, 1))
13493 && integer_pow2p (arg1)
13494 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13495 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13496 arg1, OEP_ONLY_CONST))
13497 return pedantic_non_lvalue_loc (loc,
13498 fold_convert_loc (loc, type,
13499 TREE_OPERAND (arg0, 0)));
13501 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13502 if (integer_zerop (op2)
13503 && truth_value_p (TREE_CODE (arg0))
13504 && truth_value_p (TREE_CODE (arg1)))
13505 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13506 fold_convert_loc (loc, type, arg0),
13507 arg1);
13509 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13510 if (integer_onep (op2)
13511 && truth_value_p (TREE_CODE (arg0))
13512 && truth_value_p (TREE_CODE (arg1)))
13514 location_t loc0 = EXPR_LOCATION (arg0);
13515 if (loc0 == UNKNOWN_LOCATION)
13516 loc0 = loc;
13517 /* Only perform transformation if ARG0 is easily inverted. */
13518 tem = fold_truth_not_expr (loc0, arg0);
13519 if (tem)
13520 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13521 fold_convert_loc (loc, type, tem),
13522 arg1);
13525 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13526 if (integer_zerop (arg1)
13527 && truth_value_p (TREE_CODE (arg0))
13528 && truth_value_p (TREE_CODE (op2)))
13530 location_t loc0 = EXPR_LOCATION (arg0);
13531 if (loc0 == UNKNOWN_LOCATION)
13532 loc0 = loc;
13533 /* Only perform transformation if ARG0 is easily inverted. */
13534 tem = fold_truth_not_expr (loc0, arg0);
13535 if (tem)
13536 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13537 fold_convert_loc (loc, type, tem),
13538 op2);
13541 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13542 if (integer_onep (arg1)
13543 && truth_value_p (TREE_CODE (arg0))
13544 && truth_value_p (TREE_CODE (op2)))
13545 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13546 fold_convert_loc (loc, type, arg0),
13547 op2);
13549 return NULL_TREE;
13551 case CALL_EXPR:
13552 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13553 of fold_ternary on them. */
13554 gcc_unreachable ();
13556 case BIT_FIELD_REF:
13557 if ((TREE_CODE (arg0) == VECTOR_CST
13558 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13559 && type == TREE_TYPE (TREE_TYPE (arg0)))
13561 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13562 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13564 if (width != 0
13565 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13566 && (idx % width) == 0
13567 && (idx = idx / width)
13568 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13570 tree elements = NULL_TREE;
13572 if (TREE_CODE (arg0) == VECTOR_CST)
13573 elements = TREE_VECTOR_CST_ELTS (arg0);
13574 else
13576 unsigned HOST_WIDE_INT idx;
13577 tree value;
13579 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13580 elements = tree_cons (NULL_TREE, value, elements);
13582 while (idx-- > 0 && elements)
13583 elements = TREE_CHAIN (elements);
13584 if (elements)
13585 return TREE_VALUE (elements);
13586 else
13587 return build_zero_cst (type);
13591 /* A bit-field-ref that referenced the full argument can be stripped. */
13592 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13593 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13594 && integer_zerop (op2))
13595 return fold_convert_loc (loc, type, arg0);
13597 return NULL_TREE;
13599 case FMA_EXPR:
13600 /* For integers we can decompose the FMA if possible. */
13601 if (TREE_CODE (arg0) == INTEGER_CST
13602 && TREE_CODE (arg1) == INTEGER_CST)
13603 return fold_build2_loc (loc, PLUS_EXPR, type,
13604 const_binop (MULT_EXPR, arg0, arg1), arg2);
13605 if (integer_zerop (arg2))
13606 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13608 return fold_fma (loc, type, arg0, arg1, arg2);
13610 default:
13611 return NULL_TREE;
13612 } /* switch (code) */
13615 /* Perform constant folding and related simplification of EXPR.
13616 The related simplifications include x*1 => x, x*0 => 0, etc.,
13617 and application of the associative law.
13618 NOP_EXPR conversions may be removed freely (as long as we
13619 are careful not to change the type of the overall expression).
13620 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13621 but we can constant-fold them if they have constant operands. */
13623 #ifdef ENABLE_FOLD_CHECKING
13624 # define fold(x) fold_1 (x)
13625 static tree fold_1 (tree);
13626 static
13627 #endif
13628 tree
13629 fold (tree expr)
13631 const tree t = expr;
13632 enum tree_code code = TREE_CODE (t);
13633 enum tree_code_class kind = TREE_CODE_CLASS (code);
13634 tree tem;
13635 location_t loc = EXPR_LOCATION (expr);
13637 /* Return right away if a constant. */
13638 if (kind == tcc_constant)
13639 return t;
13641 /* CALL_EXPR-like objects with variable numbers of operands are
13642 treated specially. */
13643 if (kind == tcc_vl_exp)
13645 if (code == CALL_EXPR)
13647 tem = fold_call_expr (loc, expr, false);
13648 return tem ? tem : expr;
13650 return expr;
13653 if (IS_EXPR_CODE_CLASS (kind))
13655 tree type = TREE_TYPE (t);
13656 tree op0, op1, op2;
13658 switch (TREE_CODE_LENGTH (code))
13660 case 1:
13661 op0 = TREE_OPERAND (t, 0);
13662 tem = fold_unary_loc (loc, code, type, op0);
13663 return tem ? tem : expr;
13664 case 2:
13665 op0 = TREE_OPERAND (t, 0);
13666 op1 = TREE_OPERAND (t, 1);
13667 tem = fold_binary_loc (loc, code, type, op0, op1);
13668 return tem ? tem : expr;
13669 case 3:
13670 op0 = TREE_OPERAND (t, 0);
13671 op1 = TREE_OPERAND (t, 1);
13672 op2 = TREE_OPERAND (t, 2);
13673 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13674 return tem ? tem : expr;
13675 default:
13676 break;
13680 switch (code)
13682 case ARRAY_REF:
13684 tree op0 = TREE_OPERAND (t, 0);
13685 tree op1 = TREE_OPERAND (t, 1);
13687 if (TREE_CODE (op1) == INTEGER_CST
13688 && TREE_CODE (op0) == CONSTRUCTOR
13689 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13691 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13692 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13693 unsigned HOST_WIDE_INT begin = 0;
13695 /* Find a matching index by means of a binary search. */
13696 while (begin != end)
13698 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13699 tree index = VEC_index (constructor_elt, elts, middle)->index;
13701 if (TREE_CODE (index) == INTEGER_CST
13702 && tree_int_cst_lt (index, op1))
13703 begin = middle + 1;
13704 else if (TREE_CODE (index) == INTEGER_CST
13705 && tree_int_cst_lt (op1, index))
13706 end = middle;
13707 else if (TREE_CODE (index) == RANGE_EXPR
13708 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13709 begin = middle + 1;
13710 else if (TREE_CODE (index) == RANGE_EXPR
13711 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13712 end = middle;
13713 else
13714 return VEC_index (constructor_elt, elts, middle)->value;
13718 return t;
13721 case CONST_DECL:
13722 return fold (DECL_INITIAL (t));
13724 default:
13725 return t;
13726 } /* switch (code) */
13729 #ifdef ENABLE_FOLD_CHECKING
13730 #undef fold
13732 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13733 static void fold_check_failed (const_tree, const_tree);
13734 void print_fold_checksum (const_tree);
13736 /* When --enable-checking=fold, compute a digest of expr before
13737 and after actual fold call to see if fold did not accidentally
13738 change original expr. */
13740 tree
13741 fold (tree expr)
13743 tree ret;
13744 struct md5_ctx ctx;
13745 unsigned char checksum_before[16], checksum_after[16];
13746 htab_t ht;
13748 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13749 md5_init_ctx (&ctx);
13750 fold_checksum_tree (expr, &ctx, ht);
13751 md5_finish_ctx (&ctx, checksum_before);
13752 htab_empty (ht);
13754 ret = fold_1 (expr);
13756 md5_init_ctx (&ctx);
13757 fold_checksum_tree (expr, &ctx, ht);
13758 md5_finish_ctx (&ctx, checksum_after);
13759 htab_delete (ht);
13761 if (memcmp (checksum_before, checksum_after, 16))
13762 fold_check_failed (expr, ret);
13764 return ret;
13767 void
13768 print_fold_checksum (const_tree expr)
13770 struct md5_ctx ctx;
13771 unsigned char checksum[16], cnt;
13772 htab_t ht;
13774 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13775 md5_init_ctx (&ctx);
13776 fold_checksum_tree (expr, &ctx, ht);
13777 md5_finish_ctx (&ctx, checksum);
13778 htab_delete (ht);
13779 for (cnt = 0; cnt < 16; ++cnt)
13780 fprintf (stderr, "%02x", checksum[cnt]);
13781 putc ('\n', stderr);
13784 static void
13785 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13787 internal_error ("fold check: original tree changed by fold");
13790 static void
13791 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13793 void **slot;
13794 enum tree_code code;
13795 union tree_node buf;
13796 int i, len;
13798 recursive_label:
13800 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13801 <= sizeof (struct tree_function_decl))
13802 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13803 if (expr == NULL)
13804 return;
13805 slot = (void **) htab_find_slot (ht, expr, INSERT);
13806 if (*slot != NULL)
13807 return;
13808 *slot = CONST_CAST_TREE (expr);
13809 code = TREE_CODE (expr);
13810 if (TREE_CODE_CLASS (code) == tcc_declaration
13811 && DECL_ASSEMBLER_NAME_SET_P (expr))
13813 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13814 memcpy ((char *) &buf, expr, tree_size (expr));
13815 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13816 expr = (tree) &buf;
13818 else if (TREE_CODE_CLASS (code) == tcc_type
13819 && (TYPE_POINTER_TO (expr)
13820 || TYPE_REFERENCE_TO (expr)
13821 || TYPE_CACHED_VALUES_P (expr)
13822 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13823 || TYPE_NEXT_VARIANT (expr)))
13825 /* Allow these fields to be modified. */
13826 tree tmp;
13827 memcpy ((char *) &buf, expr, tree_size (expr));
13828 expr = tmp = (tree) &buf;
13829 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13830 TYPE_POINTER_TO (tmp) = NULL;
13831 TYPE_REFERENCE_TO (tmp) = NULL;
13832 TYPE_NEXT_VARIANT (tmp) = NULL;
13833 if (TYPE_CACHED_VALUES_P (tmp))
13835 TYPE_CACHED_VALUES_P (tmp) = 0;
13836 TYPE_CACHED_VALUES (tmp) = NULL;
13839 md5_process_bytes (expr, tree_size (expr), ctx);
13840 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13841 if (TREE_CODE_CLASS (code) != tcc_type
13842 && TREE_CODE_CLASS (code) != tcc_declaration
13843 && code != TREE_LIST
13844 && code != SSA_NAME)
13845 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13846 switch (TREE_CODE_CLASS (code))
13848 case tcc_constant:
13849 switch (code)
13851 case STRING_CST:
13852 md5_process_bytes (TREE_STRING_POINTER (expr),
13853 TREE_STRING_LENGTH (expr), ctx);
13854 break;
13855 case COMPLEX_CST:
13856 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13857 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13858 break;
13859 case VECTOR_CST:
13860 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13861 break;
13862 default:
13863 break;
13865 break;
13866 case tcc_exceptional:
13867 switch (code)
13869 case TREE_LIST:
13870 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13871 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13872 expr = TREE_CHAIN (expr);
13873 goto recursive_label;
13874 break;
13875 case TREE_VEC:
13876 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13877 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13878 break;
13879 default:
13880 break;
13882 break;
13883 case tcc_expression:
13884 case tcc_reference:
13885 case tcc_comparison:
13886 case tcc_unary:
13887 case tcc_binary:
13888 case tcc_statement:
13889 case tcc_vl_exp:
13890 len = TREE_OPERAND_LENGTH (expr);
13891 for (i = 0; i < len; ++i)
13892 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13893 break;
13894 case tcc_declaration:
13895 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13896 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13897 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13899 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13900 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13901 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13902 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13903 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13905 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13906 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13908 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13910 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13911 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13912 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13914 break;
13915 case tcc_type:
13916 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13917 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13918 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13919 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13920 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13921 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13922 if (INTEGRAL_TYPE_P (expr)
13923 || SCALAR_FLOAT_TYPE_P (expr))
13925 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13926 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13928 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13929 if (TREE_CODE (expr) == RECORD_TYPE
13930 || TREE_CODE (expr) == UNION_TYPE
13931 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13932 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13933 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13934 break;
13935 default:
13936 break;
13940 /* Helper function for outputting the checksum of a tree T. When
13941 debugging with gdb, you can "define mynext" to be "next" followed
13942 by "call debug_fold_checksum (op0)", then just trace down till the
13943 outputs differ. */
13945 DEBUG_FUNCTION void
13946 debug_fold_checksum (const_tree t)
13948 int i;
13949 unsigned char checksum[16];
13950 struct md5_ctx ctx;
13951 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13953 md5_init_ctx (&ctx);
13954 fold_checksum_tree (t, &ctx, ht);
13955 md5_finish_ctx (&ctx, checksum);
13956 htab_empty (ht);
13958 for (i = 0; i < 16; i++)
13959 fprintf (stderr, "%d ", checksum[i]);
13961 fprintf (stderr, "\n");
13964 #endif
13966 /* Fold a unary tree expression with code CODE of type TYPE with an
13967 operand OP0. LOC is the location of the resulting expression.
13968 Return a folded expression if successful. Otherwise, return a tree
13969 expression with code CODE of type TYPE with an operand OP0. */
13971 tree
13972 fold_build1_stat_loc (location_t loc,
13973 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13975 tree tem;
13976 #ifdef ENABLE_FOLD_CHECKING
13977 unsigned char checksum_before[16], checksum_after[16];
13978 struct md5_ctx ctx;
13979 htab_t ht;
13981 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13982 md5_init_ctx (&ctx);
13983 fold_checksum_tree (op0, &ctx, ht);
13984 md5_finish_ctx (&ctx, checksum_before);
13985 htab_empty (ht);
13986 #endif
13988 tem = fold_unary_loc (loc, code, type, op0);
13989 if (!tem)
13990 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13992 #ifdef ENABLE_FOLD_CHECKING
13993 md5_init_ctx (&ctx);
13994 fold_checksum_tree (op0, &ctx, ht);
13995 md5_finish_ctx (&ctx, checksum_after);
13996 htab_delete (ht);
13998 if (memcmp (checksum_before, checksum_after, 16))
13999 fold_check_failed (op0, tem);
14000 #endif
14001 return tem;
14004 /* Fold a binary tree expression with code CODE of type TYPE with
14005 operands OP0 and OP1. LOC is the location of the resulting
14006 expression. Return a folded expression if successful. Otherwise,
14007 return a tree expression with code CODE of type TYPE with operands
14008 OP0 and OP1. */
14010 tree
14011 fold_build2_stat_loc (location_t loc,
14012 enum tree_code code, tree type, tree op0, tree op1
14013 MEM_STAT_DECL)
14015 tree tem;
14016 #ifdef ENABLE_FOLD_CHECKING
14017 unsigned char checksum_before_op0[16],
14018 checksum_before_op1[16],
14019 checksum_after_op0[16],
14020 checksum_after_op1[16];
14021 struct md5_ctx ctx;
14022 htab_t ht;
14024 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14025 md5_init_ctx (&ctx);
14026 fold_checksum_tree (op0, &ctx, ht);
14027 md5_finish_ctx (&ctx, checksum_before_op0);
14028 htab_empty (ht);
14030 md5_init_ctx (&ctx);
14031 fold_checksum_tree (op1, &ctx, ht);
14032 md5_finish_ctx (&ctx, checksum_before_op1);
14033 htab_empty (ht);
14034 #endif
14036 tem = fold_binary_loc (loc, code, type, op0, op1);
14037 if (!tem)
14038 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14040 #ifdef ENABLE_FOLD_CHECKING
14041 md5_init_ctx (&ctx);
14042 fold_checksum_tree (op0, &ctx, ht);
14043 md5_finish_ctx (&ctx, checksum_after_op0);
14044 htab_empty (ht);
14046 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14047 fold_check_failed (op0, tem);
14049 md5_init_ctx (&ctx);
14050 fold_checksum_tree (op1, &ctx, ht);
14051 md5_finish_ctx (&ctx, checksum_after_op1);
14052 htab_delete (ht);
14054 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14055 fold_check_failed (op1, tem);
14056 #endif
14057 return tem;
14060 /* Fold a ternary tree expression with code CODE of type TYPE with
14061 operands OP0, OP1, and OP2. Return a folded expression if
14062 successful. Otherwise, return a tree expression with code CODE of
14063 type TYPE with operands OP0, OP1, and OP2. */
14065 tree
14066 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14067 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14069 tree tem;
14070 #ifdef ENABLE_FOLD_CHECKING
14071 unsigned char checksum_before_op0[16],
14072 checksum_before_op1[16],
14073 checksum_before_op2[16],
14074 checksum_after_op0[16],
14075 checksum_after_op1[16],
14076 checksum_after_op2[16];
14077 struct md5_ctx ctx;
14078 htab_t ht;
14080 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14081 md5_init_ctx (&ctx);
14082 fold_checksum_tree (op0, &ctx, ht);
14083 md5_finish_ctx (&ctx, checksum_before_op0);
14084 htab_empty (ht);
14086 md5_init_ctx (&ctx);
14087 fold_checksum_tree (op1, &ctx, ht);
14088 md5_finish_ctx (&ctx, checksum_before_op1);
14089 htab_empty (ht);
14091 md5_init_ctx (&ctx);
14092 fold_checksum_tree (op2, &ctx, ht);
14093 md5_finish_ctx (&ctx, checksum_before_op2);
14094 htab_empty (ht);
14095 #endif
14097 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14098 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14099 if (!tem)
14100 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14102 #ifdef ENABLE_FOLD_CHECKING
14103 md5_init_ctx (&ctx);
14104 fold_checksum_tree (op0, &ctx, ht);
14105 md5_finish_ctx (&ctx, checksum_after_op0);
14106 htab_empty (ht);
14108 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14109 fold_check_failed (op0, tem);
14111 md5_init_ctx (&ctx);
14112 fold_checksum_tree (op1, &ctx, ht);
14113 md5_finish_ctx (&ctx, checksum_after_op1);
14114 htab_empty (ht);
14116 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14117 fold_check_failed (op1, tem);
14119 md5_init_ctx (&ctx);
14120 fold_checksum_tree (op2, &ctx, ht);
14121 md5_finish_ctx (&ctx, checksum_after_op2);
14122 htab_delete (ht);
14124 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14125 fold_check_failed (op2, tem);
14126 #endif
14127 return tem;
14130 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14131 arguments in ARGARRAY, and a null static chain.
14132 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14133 of type TYPE from the given operands as constructed by build_call_array. */
14135 tree
14136 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14137 int nargs, tree *argarray)
14139 tree tem;
14140 #ifdef ENABLE_FOLD_CHECKING
14141 unsigned char checksum_before_fn[16],
14142 checksum_before_arglist[16],
14143 checksum_after_fn[16],
14144 checksum_after_arglist[16];
14145 struct md5_ctx ctx;
14146 htab_t ht;
14147 int i;
14149 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14150 md5_init_ctx (&ctx);
14151 fold_checksum_tree (fn, &ctx, ht);
14152 md5_finish_ctx (&ctx, checksum_before_fn);
14153 htab_empty (ht);
14155 md5_init_ctx (&ctx);
14156 for (i = 0; i < nargs; i++)
14157 fold_checksum_tree (argarray[i], &ctx, ht);
14158 md5_finish_ctx (&ctx, checksum_before_arglist);
14159 htab_empty (ht);
14160 #endif
14162 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14164 #ifdef ENABLE_FOLD_CHECKING
14165 md5_init_ctx (&ctx);
14166 fold_checksum_tree (fn, &ctx, ht);
14167 md5_finish_ctx (&ctx, checksum_after_fn);
14168 htab_empty (ht);
14170 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14171 fold_check_failed (fn, tem);
14173 md5_init_ctx (&ctx);
14174 for (i = 0; i < nargs; i++)
14175 fold_checksum_tree (argarray[i], &ctx, ht);
14176 md5_finish_ctx (&ctx, checksum_after_arglist);
14177 htab_delete (ht);
14179 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14180 fold_check_failed (NULL_TREE, tem);
14181 #endif
14182 return tem;
14185 /* Perform constant folding and related simplification of initializer
14186 expression EXPR. These behave identically to "fold_buildN" but ignore
14187 potential run-time traps and exceptions that fold must preserve. */
14189 #define START_FOLD_INIT \
14190 int saved_signaling_nans = flag_signaling_nans;\
14191 int saved_trapping_math = flag_trapping_math;\
14192 int saved_rounding_math = flag_rounding_math;\
14193 int saved_trapv = flag_trapv;\
14194 int saved_folding_initializer = folding_initializer;\
14195 flag_signaling_nans = 0;\
14196 flag_trapping_math = 0;\
14197 flag_rounding_math = 0;\
14198 flag_trapv = 0;\
14199 folding_initializer = 1;
14201 #define END_FOLD_INIT \
14202 flag_signaling_nans = saved_signaling_nans;\
14203 flag_trapping_math = saved_trapping_math;\
14204 flag_rounding_math = saved_rounding_math;\
14205 flag_trapv = saved_trapv;\
14206 folding_initializer = saved_folding_initializer;
14208 tree
14209 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14210 tree type, tree op)
14212 tree result;
14213 START_FOLD_INIT;
14215 result = fold_build1_loc (loc, code, type, op);
14217 END_FOLD_INIT;
14218 return result;
14221 tree
14222 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14223 tree type, tree op0, tree op1)
14225 tree result;
14226 START_FOLD_INIT;
14228 result = fold_build2_loc (loc, code, type, op0, op1);
14230 END_FOLD_INIT;
14231 return result;
14234 tree
14235 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14236 tree type, tree op0, tree op1, tree op2)
14238 tree result;
14239 START_FOLD_INIT;
14241 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14243 END_FOLD_INIT;
14244 return result;
14247 tree
14248 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14249 int nargs, tree *argarray)
14251 tree result;
14252 START_FOLD_INIT;
14254 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14256 END_FOLD_INIT;
14257 return result;
14260 #undef START_FOLD_INIT
14261 #undef END_FOLD_INIT
14263 /* Determine if first argument is a multiple of second argument. Return 0 if
14264 it is not, or we cannot easily determined it to be.
14266 An example of the sort of thing we care about (at this point; this routine
14267 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14268 fold cases do now) is discovering that
14270 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14272 is a multiple of
14274 SAVE_EXPR (J * 8)
14276 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14278 This code also handles discovering that
14280 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14282 is a multiple of 8 so we don't have to worry about dealing with a
14283 possible remainder.
14285 Note that we *look* inside a SAVE_EXPR only to determine how it was
14286 calculated; it is not safe for fold to do much of anything else with the
14287 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14288 at run time. For example, the latter example above *cannot* be implemented
14289 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14290 evaluation time of the original SAVE_EXPR is not necessarily the same at
14291 the time the new expression is evaluated. The only optimization of this
14292 sort that would be valid is changing
14294 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14296 divided by 8 to
14298 SAVE_EXPR (I) * SAVE_EXPR (J)
14300 (where the same SAVE_EXPR (J) is used in the original and the
14301 transformed version). */
14304 multiple_of_p (tree type, const_tree top, const_tree bottom)
14306 if (operand_equal_p (top, bottom, 0))
14307 return 1;
14309 if (TREE_CODE (type) != INTEGER_TYPE)
14310 return 0;
14312 switch (TREE_CODE (top))
14314 case BIT_AND_EXPR:
14315 /* Bitwise and provides a power of two multiple. If the mask is
14316 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14317 if (!integer_pow2p (bottom))
14318 return 0;
14319 /* FALLTHRU */
14321 case MULT_EXPR:
14322 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14323 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14325 case PLUS_EXPR:
14326 case MINUS_EXPR:
14327 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14328 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14330 case LSHIFT_EXPR:
14331 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14333 tree op1, t1;
14335 op1 = TREE_OPERAND (top, 1);
14336 /* const_binop may not detect overflow correctly,
14337 so check for it explicitly here. */
14338 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14339 > TREE_INT_CST_LOW (op1)
14340 && TREE_INT_CST_HIGH (op1) == 0
14341 && 0 != (t1 = fold_convert (type,
14342 const_binop (LSHIFT_EXPR,
14343 size_one_node,
14344 op1)))
14345 && !TREE_OVERFLOW (t1))
14346 return multiple_of_p (type, t1, bottom);
14348 return 0;
14350 case NOP_EXPR:
14351 /* Can't handle conversions from non-integral or wider integral type. */
14352 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14353 || (TYPE_PRECISION (type)
14354 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14355 return 0;
14357 /* .. fall through ... */
14359 case SAVE_EXPR:
14360 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14362 case COND_EXPR:
14363 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14364 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14366 case INTEGER_CST:
14367 if (TREE_CODE (bottom) != INTEGER_CST
14368 || integer_zerop (bottom)
14369 || (TYPE_UNSIGNED (type)
14370 && (tree_int_cst_sgn (top) < 0
14371 || tree_int_cst_sgn (bottom) < 0)))
14372 return 0;
14373 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14374 top, bottom, 0));
14376 default:
14377 return 0;
14381 /* Return true if CODE or TYPE is known to be non-negative. */
14383 static bool
14384 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14386 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14387 && truth_value_p (code))
14388 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14389 have a signed:1 type (where the value is -1 and 0). */
14390 return true;
14391 return false;
14394 /* Return true if (CODE OP0) is known to be non-negative. If the return
14395 value is based on the assumption that signed overflow is undefined,
14396 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14397 *STRICT_OVERFLOW_P. */
14399 bool
14400 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14401 bool *strict_overflow_p)
14403 if (TYPE_UNSIGNED (type))
14404 return true;
14406 switch (code)
14408 case ABS_EXPR:
14409 /* We can't return 1 if flag_wrapv is set because
14410 ABS_EXPR<INT_MIN> = INT_MIN. */
14411 if (!INTEGRAL_TYPE_P (type))
14412 return true;
14413 if (TYPE_OVERFLOW_UNDEFINED (type))
14415 *strict_overflow_p = true;
14416 return true;
14418 break;
14420 case NON_LVALUE_EXPR:
14421 case FLOAT_EXPR:
14422 case FIX_TRUNC_EXPR:
14423 return tree_expr_nonnegative_warnv_p (op0,
14424 strict_overflow_p);
14426 case NOP_EXPR:
14428 tree inner_type = TREE_TYPE (op0);
14429 tree outer_type = type;
14431 if (TREE_CODE (outer_type) == REAL_TYPE)
14433 if (TREE_CODE (inner_type) == REAL_TYPE)
14434 return tree_expr_nonnegative_warnv_p (op0,
14435 strict_overflow_p);
14436 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14438 if (TYPE_UNSIGNED (inner_type))
14439 return true;
14440 return tree_expr_nonnegative_warnv_p (op0,
14441 strict_overflow_p);
14444 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14446 if (TREE_CODE (inner_type) == REAL_TYPE)
14447 return tree_expr_nonnegative_warnv_p (op0,
14448 strict_overflow_p);
14449 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14450 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14451 && TYPE_UNSIGNED (inner_type);
14454 break;
14456 default:
14457 return tree_simple_nonnegative_warnv_p (code, type);
14460 /* We don't know sign of `t', so be conservative and return false. */
14461 return false;
14464 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14465 value is based on the assumption that signed overflow is undefined,
14466 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14467 *STRICT_OVERFLOW_P. */
14469 bool
14470 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14471 tree op1, bool *strict_overflow_p)
14473 if (TYPE_UNSIGNED (type))
14474 return true;
14476 switch (code)
14478 case POINTER_PLUS_EXPR:
14479 case PLUS_EXPR:
14480 if (FLOAT_TYPE_P (type))
14481 return (tree_expr_nonnegative_warnv_p (op0,
14482 strict_overflow_p)
14483 && tree_expr_nonnegative_warnv_p (op1,
14484 strict_overflow_p));
14486 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14487 both unsigned and at least 2 bits shorter than the result. */
14488 if (TREE_CODE (type) == INTEGER_TYPE
14489 && TREE_CODE (op0) == NOP_EXPR
14490 && TREE_CODE (op1) == NOP_EXPR)
14492 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14493 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14494 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14495 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14497 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14498 TYPE_PRECISION (inner2)) + 1;
14499 return prec < TYPE_PRECISION (type);
14502 break;
14504 case MULT_EXPR:
14505 if (FLOAT_TYPE_P (type))
14507 /* x * x for floating point x is always non-negative. */
14508 if (operand_equal_p (op0, op1, 0))
14509 return true;
14510 return (tree_expr_nonnegative_warnv_p (op0,
14511 strict_overflow_p)
14512 && tree_expr_nonnegative_warnv_p (op1,
14513 strict_overflow_p));
14516 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14517 both unsigned and their total bits is shorter than the result. */
14518 if (TREE_CODE (type) == INTEGER_TYPE
14519 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14520 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14522 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14523 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14524 : TREE_TYPE (op0);
14525 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14526 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14527 : TREE_TYPE (op1);
14529 bool unsigned0 = TYPE_UNSIGNED (inner0);
14530 bool unsigned1 = TYPE_UNSIGNED (inner1);
14532 if (TREE_CODE (op0) == INTEGER_CST)
14533 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14535 if (TREE_CODE (op1) == INTEGER_CST)
14536 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14538 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14539 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14541 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14542 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14543 : TYPE_PRECISION (inner0);
14545 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14546 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14547 : TYPE_PRECISION (inner1);
14549 return precision0 + precision1 < TYPE_PRECISION (type);
14552 return false;
14554 case BIT_AND_EXPR:
14555 case MAX_EXPR:
14556 return (tree_expr_nonnegative_warnv_p (op0,
14557 strict_overflow_p)
14558 || tree_expr_nonnegative_warnv_p (op1,
14559 strict_overflow_p));
14561 case BIT_IOR_EXPR:
14562 case BIT_XOR_EXPR:
14563 case MIN_EXPR:
14564 case RDIV_EXPR:
14565 case TRUNC_DIV_EXPR:
14566 case CEIL_DIV_EXPR:
14567 case FLOOR_DIV_EXPR:
14568 case ROUND_DIV_EXPR:
14569 return (tree_expr_nonnegative_warnv_p (op0,
14570 strict_overflow_p)
14571 && tree_expr_nonnegative_warnv_p (op1,
14572 strict_overflow_p));
14574 case TRUNC_MOD_EXPR:
14575 case CEIL_MOD_EXPR:
14576 case FLOOR_MOD_EXPR:
14577 case ROUND_MOD_EXPR:
14578 return tree_expr_nonnegative_warnv_p (op0,
14579 strict_overflow_p);
14580 default:
14581 return tree_simple_nonnegative_warnv_p (code, type);
14584 /* We don't know sign of `t', so be conservative and return false. */
14585 return false;
14588 /* Return true if T is known to be non-negative. If the return
14589 value is based on the assumption that signed overflow is undefined,
14590 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14591 *STRICT_OVERFLOW_P. */
14593 bool
14594 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14596 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14597 return true;
14599 switch (TREE_CODE (t))
14601 case INTEGER_CST:
14602 return tree_int_cst_sgn (t) >= 0;
14604 case REAL_CST:
14605 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14607 case FIXED_CST:
14608 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14610 case COND_EXPR:
14611 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14612 strict_overflow_p)
14613 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14614 strict_overflow_p));
14615 default:
14616 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14617 TREE_TYPE (t));
14619 /* We don't know sign of `t', so be conservative and return false. */
14620 return false;
14623 /* Return true if T is known to be non-negative. If the return
14624 value is based on the assumption that signed overflow is undefined,
14625 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14626 *STRICT_OVERFLOW_P. */
14628 bool
14629 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14630 tree arg0, tree arg1, bool *strict_overflow_p)
14632 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14633 switch (DECL_FUNCTION_CODE (fndecl))
14635 CASE_FLT_FN (BUILT_IN_ACOS):
14636 CASE_FLT_FN (BUILT_IN_ACOSH):
14637 CASE_FLT_FN (BUILT_IN_CABS):
14638 CASE_FLT_FN (BUILT_IN_COSH):
14639 CASE_FLT_FN (BUILT_IN_ERFC):
14640 CASE_FLT_FN (BUILT_IN_EXP):
14641 CASE_FLT_FN (BUILT_IN_EXP10):
14642 CASE_FLT_FN (BUILT_IN_EXP2):
14643 CASE_FLT_FN (BUILT_IN_FABS):
14644 CASE_FLT_FN (BUILT_IN_FDIM):
14645 CASE_FLT_FN (BUILT_IN_HYPOT):
14646 CASE_FLT_FN (BUILT_IN_POW10):
14647 CASE_INT_FN (BUILT_IN_FFS):
14648 CASE_INT_FN (BUILT_IN_PARITY):
14649 CASE_INT_FN (BUILT_IN_POPCOUNT):
14650 case BUILT_IN_BSWAP32:
14651 case BUILT_IN_BSWAP64:
14652 /* Always true. */
14653 return true;
14655 CASE_FLT_FN (BUILT_IN_SQRT):
14656 /* sqrt(-0.0) is -0.0. */
14657 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14658 return true;
14659 return tree_expr_nonnegative_warnv_p (arg0,
14660 strict_overflow_p);
14662 CASE_FLT_FN (BUILT_IN_ASINH):
14663 CASE_FLT_FN (BUILT_IN_ATAN):
14664 CASE_FLT_FN (BUILT_IN_ATANH):
14665 CASE_FLT_FN (BUILT_IN_CBRT):
14666 CASE_FLT_FN (BUILT_IN_CEIL):
14667 CASE_FLT_FN (BUILT_IN_ERF):
14668 CASE_FLT_FN (BUILT_IN_EXPM1):
14669 CASE_FLT_FN (BUILT_IN_FLOOR):
14670 CASE_FLT_FN (BUILT_IN_FMOD):
14671 CASE_FLT_FN (BUILT_IN_FREXP):
14672 CASE_FLT_FN (BUILT_IN_LCEIL):
14673 CASE_FLT_FN (BUILT_IN_LDEXP):
14674 CASE_FLT_FN (BUILT_IN_LFLOOR):
14675 CASE_FLT_FN (BUILT_IN_LLCEIL):
14676 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14677 CASE_FLT_FN (BUILT_IN_LLRINT):
14678 CASE_FLT_FN (BUILT_IN_LLROUND):
14679 CASE_FLT_FN (BUILT_IN_LRINT):
14680 CASE_FLT_FN (BUILT_IN_LROUND):
14681 CASE_FLT_FN (BUILT_IN_MODF):
14682 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14683 CASE_FLT_FN (BUILT_IN_RINT):
14684 CASE_FLT_FN (BUILT_IN_ROUND):
14685 CASE_FLT_FN (BUILT_IN_SCALB):
14686 CASE_FLT_FN (BUILT_IN_SCALBLN):
14687 CASE_FLT_FN (BUILT_IN_SCALBN):
14688 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14689 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14690 CASE_FLT_FN (BUILT_IN_SINH):
14691 CASE_FLT_FN (BUILT_IN_TANH):
14692 CASE_FLT_FN (BUILT_IN_TRUNC):
14693 /* True if the 1st argument is nonnegative. */
14694 return tree_expr_nonnegative_warnv_p (arg0,
14695 strict_overflow_p);
14697 CASE_FLT_FN (BUILT_IN_FMAX):
14698 /* True if the 1st OR 2nd arguments are nonnegative. */
14699 return (tree_expr_nonnegative_warnv_p (arg0,
14700 strict_overflow_p)
14701 || (tree_expr_nonnegative_warnv_p (arg1,
14702 strict_overflow_p)));
14704 CASE_FLT_FN (BUILT_IN_FMIN):
14705 /* True if the 1st AND 2nd arguments are nonnegative. */
14706 return (tree_expr_nonnegative_warnv_p (arg0,
14707 strict_overflow_p)
14708 && (tree_expr_nonnegative_warnv_p (arg1,
14709 strict_overflow_p)));
14711 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14712 /* True if the 2nd argument is nonnegative. */
14713 return tree_expr_nonnegative_warnv_p (arg1,
14714 strict_overflow_p);
14716 CASE_FLT_FN (BUILT_IN_POWI):
14717 /* True if the 1st argument is nonnegative or the second
14718 argument is an even integer. */
14719 if (TREE_CODE (arg1) == INTEGER_CST
14720 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14721 return true;
14722 return tree_expr_nonnegative_warnv_p (arg0,
14723 strict_overflow_p);
14725 CASE_FLT_FN (BUILT_IN_POW):
14726 /* True if the 1st argument is nonnegative or the second
14727 argument is an even integer valued real. */
14728 if (TREE_CODE (arg1) == REAL_CST)
14730 REAL_VALUE_TYPE c;
14731 HOST_WIDE_INT n;
14733 c = TREE_REAL_CST (arg1);
14734 n = real_to_integer (&c);
14735 if ((n & 1) == 0)
14737 REAL_VALUE_TYPE cint;
14738 real_from_integer (&cint, VOIDmode, n,
14739 n < 0 ? -1 : 0, 0);
14740 if (real_identical (&c, &cint))
14741 return true;
14744 return tree_expr_nonnegative_warnv_p (arg0,
14745 strict_overflow_p);
14747 default:
14748 break;
14750 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14751 type);
14754 /* Return true if T is known to be non-negative. If the return
14755 value is based on the assumption that signed overflow is undefined,
14756 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14757 *STRICT_OVERFLOW_P. */
14759 bool
14760 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14762 enum tree_code code = TREE_CODE (t);
14763 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14764 return true;
14766 switch (code)
14768 case TARGET_EXPR:
14770 tree temp = TARGET_EXPR_SLOT (t);
14771 t = TARGET_EXPR_INITIAL (t);
14773 /* If the initializer is non-void, then it's a normal expression
14774 that will be assigned to the slot. */
14775 if (!VOID_TYPE_P (t))
14776 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14778 /* Otherwise, the initializer sets the slot in some way. One common
14779 way is an assignment statement at the end of the initializer. */
14780 while (1)
14782 if (TREE_CODE (t) == BIND_EXPR)
14783 t = expr_last (BIND_EXPR_BODY (t));
14784 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14785 || TREE_CODE (t) == TRY_CATCH_EXPR)
14786 t = expr_last (TREE_OPERAND (t, 0));
14787 else if (TREE_CODE (t) == STATEMENT_LIST)
14788 t = expr_last (t);
14789 else
14790 break;
14792 if (TREE_CODE (t) == MODIFY_EXPR
14793 && TREE_OPERAND (t, 0) == temp)
14794 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14795 strict_overflow_p);
14797 return false;
14800 case CALL_EXPR:
14802 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14803 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14805 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14806 get_callee_fndecl (t),
14807 arg0,
14808 arg1,
14809 strict_overflow_p);
14811 case COMPOUND_EXPR:
14812 case MODIFY_EXPR:
14813 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14814 strict_overflow_p);
14815 case BIND_EXPR:
14816 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14817 strict_overflow_p);
14818 case SAVE_EXPR:
14819 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14820 strict_overflow_p);
14822 default:
14823 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14824 TREE_TYPE (t));
14827 /* We don't know sign of `t', so be conservative and return false. */
14828 return false;
14831 /* Return true if T is known to be non-negative. If the return
14832 value is based on the assumption that signed overflow is undefined,
14833 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14834 *STRICT_OVERFLOW_P. */
14836 bool
14837 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14839 enum tree_code code;
14840 if (t == error_mark_node)
14841 return false;
14843 code = TREE_CODE (t);
14844 switch (TREE_CODE_CLASS (code))
14846 case tcc_binary:
14847 case tcc_comparison:
14848 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14849 TREE_TYPE (t),
14850 TREE_OPERAND (t, 0),
14851 TREE_OPERAND (t, 1),
14852 strict_overflow_p);
14854 case tcc_unary:
14855 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14856 TREE_TYPE (t),
14857 TREE_OPERAND (t, 0),
14858 strict_overflow_p);
14860 case tcc_constant:
14861 case tcc_declaration:
14862 case tcc_reference:
14863 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14865 default:
14866 break;
14869 switch (code)
14871 case TRUTH_AND_EXPR:
14872 case TRUTH_OR_EXPR:
14873 case TRUTH_XOR_EXPR:
14874 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14875 TREE_TYPE (t),
14876 TREE_OPERAND (t, 0),
14877 TREE_OPERAND (t, 1),
14878 strict_overflow_p);
14879 case TRUTH_NOT_EXPR:
14880 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14881 TREE_TYPE (t),
14882 TREE_OPERAND (t, 0),
14883 strict_overflow_p);
14885 case COND_EXPR:
14886 case CONSTRUCTOR:
14887 case OBJ_TYPE_REF:
14888 case ASSERT_EXPR:
14889 case ADDR_EXPR:
14890 case WITH_SIZE_EXPR:
14891 case SSA_NAME:
14892 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14894 default:
14895 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14899 /* Return true if `t' is known to be non-negative. Handle warnings
14900 about undefined signed overflow. */
14902 bool
14903 tree_expr_nonnegative_p (tree t)
14905 bool ret, strict_overflow_p;
14907 strict_overflow_p = false;
14908 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14909 if (strict_overflow_p)
14910 fold_overflow_warning (("assuming signed overflow does not occur when "
14911 "determining that expression is always "
14912 "non-negative"),
14913 WARN_STRICT_OVERFLOW_MISC);
14914 return ret;
14918 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14919 For floating point we further ensure that T is not denormal.
14920 Similar logic is present in nonzero_address in rtlanal.h.
14922 If the return value is based on the assumption that signed overflow
14923 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14924 change *STRICT_OVERFLOW_P. */
14926 bool
14927 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14928 bool *strict_overflow_p)
14930 switch (code)
14932 case ABS_EXPR:
14933 return tree_expr_nonzero_warnv_p (op0,
14934 strict_overflow_p);
14936 case NOP_EXPR:
14938 tree inner_type = TREE_TYPE (op0);
14939 tree outer_type = type;
14941 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14942 && tree_expr_nonzero_warnv_p (op0,
14943 strict_overflow_p));
14945 break;
14947 case NON_LVALUE_EXPR:
14948 return tree_expr_nonzero_warnv_p (op0,
14949 strict_overflow_p);
14951 default:
14952 break;
14955 return false;
14958 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14959 For floating point we further ensure that T is not denormal.
14960 Similar logic is present in nonzero_address in rtlanal.h.
14962 If the return value is based on the assumption that signed overflow
14963 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14964 change *STRICT_OVERFLOW_P. */
14966 bool
14967 tree_binary_nonzero_warnv_p (enum tree_code code,
14968 tree type,
14969 tree op0,
14970 tree op1, bool *strict_overflow_p)
14972 bool sub_strict_overflow_p;
14973 switch (code)
14975 case POINTER_PLUS_EXPR:
14976 case PLUS_EXPR:
14977 if (TYPE_OVERFLOW_UNDEFINED (type))
14979 /* With the presence of negative values it is hard
14980 to say something. */
14981 sub_strict_overflow_p = false;
14982 if (!tree_expr_nonnegative_warnv_p (op0,
14983 &sub_strict_overflow_p)
14984 || !tree_expr_nonnegative_warnv_p (op1,
14985 &sub_strict_overflow_p))
14986 return false;
14987 /* One of operands must be positive and the other non-negative. */
14988 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14989 overflows, on a twos-complement machine the sum of two
14990 nonnegative numbers can never be zero. */
14991 return (tree_expr_nonzero_warnv_p (op0,
14992 strict_overflow_p)
14993 || tree_expr_nonzero_warnv_p (op1,
14994 strict_overflow_p));
14996 break;
14998 case MULT_EXPR:
14999 if (TYPE_OVERFLOW_UNDEFINED (type))
15001 if (tree_expr_nonzero_warnv_p (op0,
15002 strict_overflow_p)
15003 && tree_expr_nonzero_warnv_p (op1,
15004 strict_overflow_p))
15006 *strict_overflow_p = true;
15007 return true;
15010 break;
15012 case MIN_EXPR:
15013 sub_strict_overflow_p = false;
15014 if (tree_expr_nonzero_warnv_p (op0,
15015 &sub_strict_overflow_p)
15016 && tree_expr_nonzero_warnv_p (op1,
15017 &sub_strict_overflow_p))
15019 if (sub_strict_overflow_p)
15020 *strict_overflow_p = true;
15022 break;
15024 case MAX_EXPR:
15025 sub_strict_overflow_p = false;
15026 if (tree_expr_nonzero_warnv_p (op0,
15027 &sub_strict_overflow_p))
15029 if (sub_strict_overflow_p)
15030 *strict_overflow_p = true;
15032 /* When both operands are nonzero, then MAX must be too. */
15033 if (tree_expr_nonzero_warnv_p (op1,
15034 strict_overflow_p))
15035 return true;
15037 /* MAX where operand 0 is positive is positive. */
15038 return tree_expr_nonnegative_warnv_p (op0,
15039 strict_overflow_p);
15041 /* MAX where operand 1 is positive is positive. */
15042 else if (tree_expr_nonzero_warnv_p (op1,
15043 &sub_strict_overflow_p)
15044 && tree_expr_nonnegative_warnv_p (op1,
15045 &sub_strict_overflow_p))
15047 if (sub_strict_overflow_p)
15048 *strict_overflow_p = true;
15049 return true;
15051 break;
15053 case BIT_IOR_EXPR:
15054 return (tree_expr_nonzero_warnv_p (op1,
15055 strict_overflow_p)
15056 || tree_expr_nonzero_warnv_p (op0,
15057 strict_overflow_p));
15059 default:
15060 break;
15063 return false;
15066 /* Return true when T is an address and is known to be nonzero.
15067 For floating point we further ensure that T is not denormal.
15068 Similar logic is present in nonzero_address in rtlanal.h.
15070 If the return value is based on the assumption that signed overflow
15071 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15072 change *STRICT_OVERFLOW_P. */
15074 bool
15075 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15077 bool sub_strict_overflow_p;
15078 switch (TREE_CODE (t))
15080 case INTEGER_CST:
15081 return !integer_zerop (t);
15083 case ADDR_EXPR:
15085 tree base = TREE_OPERAND (t, 0);
15086 if (!DECL_P (base))
15087 base = get_base_address (base);
15089 if (!base)
15090 return false;
15092 /* Weak declarations may link to NULL. Other things may also be NULL
15093 so protect with -fdelete-null-pointer-checks; but not variables
15094 allocated on the stack. */
15095 if (DECL_P (base)
15096 && (flag_delete_null_pointer_checks
15097 || (DECL_CONTEXT (base)
15098 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15099 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15100 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15102 /* Constants are never weak. */
15103 if (CONSTANT_CLASS_P (base))
15104 return true;
15106 return false;
15109 case COND_EXPR:
15110 sub_strict_overflow_p = false;
15111 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15112 &sub_strict_overflow_p)
15113 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15114 &sub_strict_overflow_p))
15116 if (sub_strict_overflow_p)
15117 *strict_overflow_p = true;
15118 return true;
15120 break;
15122 default:
15123 break;
15125 return false;
15128 /* Return true when T is an address and is known to be nonzero.
15129 For floating point we further ensure that T is not denormal.
15130 Similar logic is present in nonzero_address in rtlanal.h.
15132 If the return value is based on the assumption that signed overflow
15133 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15134 change *STRICT_OVERFLOW_P. */
15136 bool
15137 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15139 tree type = TREE_TYPE (t);
15140 enum tree_code code;
15142 /* Doing something useful for floating point would need more work. */
15143 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15144 return false;
15146 code = TREE_CODE (t);
15147 switch (TREE_CODE_CLASS (code))
15149 case tcc_unary:
15150 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15151 strict_overflow_p);
15152 case tcc_binary:
15153 case tcc_comparison:
15154 return tree_binary_nonzero_warnv_p (code, type,
15155 TREE_OPERAND (t, 0),
15156 TREE_OPERAND (t, 1),
15157 strict_overflow_p);
15158 case tcc_constant:
15159 case tcc_declaration:
15160 case tcc_reference:
15161 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15163 default:
15164 break;
15167 switch (code)
15169 case TRUTH_NOT_EXPR:
15170 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15171 strict_overflow_p);
15173 case TRUTH_AND_EXPR:
15174 case TRUTH_OR_EXPR:
15175 case TRUTH_XOR_EXPR:
15176 return tree_binary_nonzero_warnv_p (code, type,
15177 TREE_OPERAND (t, 0),
15178 TREE_OPERAND (t, 1),
15179 strict_overflow_p);
15181 case COND_EXPR:
15182 case CONSTRUCTOR:
15183 case OBJ_TYPE_REF:
15184 case ASSERT_EXPR:
15185 case ADDR_EXPR:
15186 case WITH_SIZE_EXPR:
15187 case SSA_NAME:
15188 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15190 case COMPOUND_EXPR:
15191 case MODIFY_EXPR:
15192 case BIND_EXPR:
15193 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15194 strict_overflow_p);
15196 case SAVE_EXPR:
15197 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15198 strict_overflow_p);
15200 case CALL_EXPR:
15201 return alloca_call_p (t);
15203 default:
15204 break;
15206 return false;
15209 /* Return true when T is an address and is known to be nonzero.
15210 Handle warnings about undefined signed overflow. */
15212 bool
15213 tree_expr_nonzero_p (tree t)
15215 bool ret, strict_overflow_p;
15217 strict_overflow_p = false;
15218 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15219 if (strict_overflow_p)
15220 fold_overflow_warning (("assuming signed overflow does not occur when "
15221 "determining that expression is always "
15222 "non-zero"),
15223 WARN_STRICT_OVERFLOW_MISC);
15224 return ret;
15227 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15228 attempt to fold the expression to a constant without modifying TYPE,
15229 OP0 or OP1.
15231 If the expression could be simplified to a constant, then return
15232 the constant. If the expression would not be simplified to a
15233 constant, then return NULL_TREE. */
15235 tree
15236 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15238 tree tem = fold_binary (code, type, op0, op1);
15239 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15242 /* Given the components of a unary expression CODE, TYPE and OP0,
15243 attempt to fold the expression to a constant without modifying
15244 TYPE or OP0.
15246 If the expression could be simplified to a constant, then return
15247 the constant. If the expression would not be simplified to a
15248 constant, then return NULL_TREE. */
15250 tree
15251 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15253 tree tem = fold_unary (code, type, op0);
15254 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15257 /* If EXP represents referencing an element in a constant string
15258 (either via pointer arithmetic or array indexing), return the
15259 tree representing the value accessed, otherwise return NULL. */
15261 tree
15262 fold_read_from_constant_string (tree exp)
15264 if ((TREE_CODE (exp) == INDIRECT_REF
15265 || TREE_CODE (exp) == ARRAY_REF)
15266 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15268 tree exp1 = TREE_OPERAND (exp, 0);
15269 tree index;
15270 tree string;
15271 location_t loc = EXPR_LOCATION (exp);
15273 if (TREE_CODE (exp) == INDIRECT_REF)
15274 string = string_constant (exp1, &index);
15275 else
15277 tree low_bound = array_ref_low_bound (exp);
15278 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15280 /* Optimize the special-case of a zero lower bound.
15282 We convert the low_bound to sizetype to avoid some problems
15283 with constant folding. (E.g. suppose the lower bound is 1,
15284 and its mode is QI. Without the conversion,l (ARRAY
15285 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15286 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15287 if (! integer_zerop (low_bound))
15288 index = size_diffop_loc (loc, index,
15289 fold_convert_loc (loc, sizetype, low_bound));
15291 string = exp1;
15294 if (string
15295 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15296 && TREE_CODE (string) == STRING_CST
15297 && TREE_CODE (index) == INTEGER_CST
15298 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15299 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15300 == MODE_INT)
15301 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15302 return build_int_cst_type (TREE_TYPE (exp),
15303 (TREE_STRING_POINTER (string)
15304 [TREE_INT_CST_LOW (index)]));
15306 return NULL;
15309 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15310 an integer constant, real, or fixed-point constant.
15312 TYPE is the type of the result. */
15314 static tree
15315 fold_negate_const (tree arg0, tree type)
15317 tree t = NULL_TREE;
15319 switch (TREE_CODE (arg0))
15321 case INTEGER_CST:
15323 double_int val = tree_to_double_int (arg0);
15324 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15326 t = force_fit_type_double (type, val, 1,
15327 (overflow | TREE_OVERFLOW (arg0))
15328 && !TYPE_UNSIGNED (type));
15329 break;
15332 case REAL_CST:
15333 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15334 break;
15336 case FIXED_CST:
15338 FIXED_VALUE_TYPE f;
15339 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15340 &(TREE_FIXED_CST (arg0)), NULL,
15341 TYPE_SATURATING (type));
15342 t = build_fixed (type, f);
15343 /* Propagate overflow flags. */
15344 if (overflow_p | TREE_OVERFLOW (arg0))
15345 TREE_OVERFLOW (t) = 1;
15346 break;
15349 default:
15350 gcc_unreachable ();
15353 return t;
15356 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15357 an integer constant or real constant.
15359 TYPE is the type of the result. */
15361 tree
15362 fold_abs_const (tree arg0, tree type)
15364 tree t = NULL_TREE;
15366 switch (TREE_CODE (arg0))
15368 case INTEGER_CST:
15370 double_int val = tree_to_double_int (arg0);
15372 /* If the value is unsigned or non-negative, then the absolute value
15373 is the same as the ordinary value. */
15374 if (TYPE_UNSIGNED (type)
15375 || !double_int_negative_p (val))
15376 t = arg0;
15378 /* If the value is negative, then the absolute value is
15379 its negation. */
15380 else
15382 int overflow;
15384 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15385 t = force_fit_type_double (type, val, -1,
15386 overflow | TREE_OVERFLOW (arg0));
15389 break;
15391 case REAL_CST:
15392 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15393 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15394 else
15395 t = arg0;
15396 break;
15398 default:
15399 gcc_unreachable ();
15402 return t;
15405 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15406 constant. TYPE is the type of the result. */
15408 static tree
15409 fold_not_const (const_tree arg0, tree type)
15411 double_int val;
15413 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15415 val = double_int_not (tree_to_double_int (arg0));
15416 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15419 /* Given CODE, a relational operator, the target type, TYPE and two
15420 constant operands OP0 and OP1, return the result of the
15421 relational operation. If the result is not a compile time
15422 constant, then return NULL_TREE. */
15424 static tree
15425 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15427 int result, invert;
15429 /* From here on, the only cases we handle are when the result is
15430 known to be a constant. */
15432 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15434 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15435 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15437 /* Handle the cases where either operand is a NaN. */
15438 if (real_isnan (c0) || real_isnan (c1))
15440 switch (code)
15442 case EQ_EXPR:
15443 case ORDERED_EXPR:
15444 result = 0;
15445 break;
15447 case NE_EXPR:
15448 case UNORDERED_EXPR:
15449 case UNLT_EXPR:
15450 case UNLE_EXPR:
15451 case UNGT_EXPR:
15452 case UNGE_EXPR:
15453 case UNEQ_EXPR:
15454 result = 1;
15455 break;
15457 case LT_EXPR:
15458 case LE_EXPR:
15459 case GT_EXPR:
15460 case GE_EXPR:
15461 case LTGT_EXPR:
15462 if (flag_trapping_math)
15463 return NULL_TREE;
15464 result = 0;
15465 break;
15467 default:
15468 gcc_unreachable ();
15471 return constant_boolean_node (result, type);
15474 return constant_boolean_node (real_compare (code, c0, c1), type);
15477 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15479 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15480 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15481 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15484 /* Handle equality/inequality of complex constants. */
15485 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15487 tree rcond = fold_relational_const (code, type,
15488 TREE_REALPART (op0),
15489 TREE_REALPART (op1));
15490 tree icond = fold_relational_const (code, type,
15491 TREE_IMAGPART (op0),
15492 TREE_IMAGPART (op1));
15493 if (code == EQ_EXPR)
15494 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15495 else if (code == NE_EXPR)
15496 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15497 else
15498 return NULL_TREE;
15501 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15503 To compute GT, swap the arguments and do LT.
15504 To compute GE, do LT and invert the result.
15505 To compute LE, swap the arguments, do LT and invert the result.
15506 To compute NE, do EQ and invert the result.
15508 Therefore, the code below must handle only EQ and LT. */
15510 if (code == LE_EXPR || code == GT_EXPR)
15512 tree tem = op0;
15513 op0 = op1;
15514 op1 = tem;
15515 code = swap_tree_comparison (code);
15518 /* Note that it is safe to invert for real values here because we
15519 have already handled the one case that it matters. */
15521 invert = 0;
15522 if (code == NE_EXPR || code == GE_EXPR)
15524 invert = 1;
15525 code = invert_tree_comparison (code, false);
15528 /* Compute a result for LT or EQ if args permit;
15529 Otherwise return T. */
15530 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15532 if (code == EQ_EXPR)
15533 result = tree_int_cst_equal (op0, op1);
15534 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15535 result = INT_CST_LT_UNSIGNED (op0, op1);
15536 else
15537 result = INT_CST_LT (op0, op1);
15539 else
15540 return NULL_TREE;
15542 if (invert)
15543 result ^= 1;
15544 return constant_boolean_node (result, type);
15547 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15548 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15549 itself. */
15551 tree
15552 fold_build_cleanup_point_expr (tree type, tree expr)
15554 /* If the expression does not have side effects then we don't have to wrap
15555 it with a cleanup point expression. */
15556 if (!TREE_SIDE_EFFECTS (expr))
15557 return expr;
15559 /* If the expression is a return, check to see if the expression inside the
15560 return has no side effects or the right hand side of the modify expression
15561 inside the return. If either don't have side effects set we don't need to
15562 wrap the expression in a cleanup point expression. Note we don't check the
15563 left hand side of the modify because it should always be a return decl. */
15564 if (TREE_CODE (expr) == RETURN_EXPR)
15566 tree op = TREE_OPERAND (expr, 0);
15567 if (!op || !TREE_SIDE_EFFECTS (op))
15568 return expr;
15569 op = TREE_OPERAND (op, 1);
15570 if (!TREE_SIDE_EFFECTS (op))
15571 return expr;
15574 return build1 (CLEANUP_POINT_EXPR, type, expr);
15577 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15578 of an indirection through OP0, or NULL_TREE if no simplification is
15579 possible. */
15581 tree
15582 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15584 tree sub = op0;
15585 tree subtype;
15587 STRIP_NOPS (sub);
15588 subtype = TREE_TYPE (sub);
15589 if (!POINTER_TYPE_P (subtype))
15590 return NULL_TREE;
15592 if (TREE_CODE (sub) == ADDR_EXPR)
15594 tree op = TREE_OPERAND (sub, 0);
15595 tree optype = TREE_TYPE (op);
15596 /* *&CONST_DECL -> to the value of the const decl. */
15597 if (TREE_CODE (op) == CONST_DECL)
15598 return DECL_INITIAL (op);
15599 /* *&p => p; make sure to handle *&"str"[cst] here. */
15600 if (type == optype)
15602 tree fop = fold_read_from_constant_string (op);
15603 if (fop)
15604 return fop;
15605 else
15606 return op;
15608 /* *(foo *)&fooarray => fooarray[0] */
15609 else if (TREE_CODE (optype) == ARRAY_TYPE
15610 && type == TREE_TYPE (optype)
15611 && (!in_gimple_form
15612 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15614 tree type_domain = TYPE_DOMAIN (optype);
15615 tree min_val = size_zero_node;
15616 if (type_domain && TYPE_MIN_VALUE (type_domain))
15617 min_val = TYPE_MIN_VALUE (type_domain);
15618 if (in_gimple_form
15619 && TREE_CODE (min_val) != INTEGER_CST)
15620 return NULL_TREE;
15621 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15622 NULL_TREE, NULL_TREE);
15624 /* *(foo *)&complexfoo => __real__ complexfoo */
15625 else if (TREE_CODE (optype) == COMPLEX_TYPE
15626 && type == TREE_TYPE (optype))
15627 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15628 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15629 else if (TREE_CODE (optype) == VECTOR_TYPE
15630 && type == TREE_TYPE (optype))
15632 tree part_width = TYPE_SIZE (type);
15633 tree index = bitsize_int (0);
15634 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15638 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15639 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15641 tree op00 = TREE_OPERAND (sub, 0);
15642 tree op01 = TREE_OPERAND (sub, 1);
15644 STRIP_NOPS (op00);
15645 if (TREE_CODE (op00) == ADDR_EXPR)
15647 tree op00type;
15648 op00 = TREE_OPERAND (op00, 0);
15649 op00type = TREE_TYPE (op00);
15651 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15652 if (TREE_CODE (op00type) == VECTOR_TYPE
15653 && type == TREE_TYPE (op00type))
15655 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15656 tree part_width = TYPE_SIZE (type);
15657 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15658 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15659 tree index = bitsize_int (indexi);
15661 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15662 return fold_build3_loc (loc,
15663 BIT_FIELD_REF, type, op00,
15664 part_width, index);
15667 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15668 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15669 && type == TREE_TYPE (op00type))
15671 tree size = TYPE_SIZE_UNIT (type);
15672 if (tree_int_cst_equal (size, op01))
15673 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15675 /* ((foo *)&fooarray)[1] => fooarray[1] */
15676 else if (TREE_CODE (op00type) == ARRAY_TYPE
15677 && type == TREE_TYPE (op00type))
15679 tree type_domain = TYPE_DOMAIN (op00type);
15680 tree min_val = size_zero_node;
15681 if (type_domain && TYPE_MIN_VALUE (type_domain))
15682 min_val = TYPE_MIN_VALUE (type_domain);
15683 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15684 TYPE_SIZE_UNIT (type));
15685 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15686 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15687 NULL_TREE, NULL_TREE);
15692 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15693 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15694 && type == TREE_TYPE (TREE_TYPE (subtype))
15695 && (!in_gimple_form
15696 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15698 tree type_domain;
15699 tree min_val = size_zero_node;
15700 sub = build_fold_indirect_ref_loc (loc, sub);
15701 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15702 if (type_domain && TYPE_MIN_VALUE (type_domain))
15703 min_val = TYPE_MIN_VALUE (type_domain);
15704 if (in_gimple_form
15705 && TREE_CODE (min_val) != INTEGER_CST)
15706 return NULL_TREE;
15707 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15708 NULL_TREE);
15711 return NULL_TREE;
15714 /* Builds an expression for an indirection through T, simplifying some
15715 cases. */
15717 tree
15718 build_fold_indirect_ref_loc (location_t loc, tree t)
15720 tree type = TREE_TYPE (TREE_TYPE (t));
15721 tree sub = fold_indirect_ref_1 (loc, type, t);
15723 if (sub)
15724 return sub;
15726 return build1_loc (loc, INDIRECT_REF, type, t);
15729 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15731 tree
15732 fold_indirect_ref_loc (location_t loc, tree t)
15734 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15736 if (sub)
15737 return sub;
15738 else
15739 return t;
15742 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15743 whose result is ignored. The type of the returned tree need not be
15744 the same as the original expression. */
15746 tree
15747 fold_ignored_result (tree t)
15749 if (!TREE_SIDE_EFFECTS (t))
15750 return integer_zero_node;
15752 for (;;)
15753 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15755 case tcc_unary:
15756 t = TREE_OPERAND (t, 0);
15757 break;
15759 case tcc_binary:
15760 case tcc_comparison:
15761 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15762 t = TREE_OPERAND (t, 0);
15763 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15764 t = TREE_OPERAND (t, 1);
15765 else
15766 return t;
15767 break;
15769 case tcc_expression:
15770 switch (TREE_CODE (t))
15772 case COMPOUND_EXPR:
15773 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15774 return t;
15775 t = TREE_OPERAND (t, 0);
15776 break;
15778 case COND_EXPR:
15779 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15780 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15781 return t;
15782 t = TREE_OPERAND (t, 0);
15783 break;
15785 default:
15786 return t;
15788 break;
15790 default:
15791 return t;
15795 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15796 This can only be applied to objects of a sizetype. */
15798 tree
15799 round_up_loc (location_t loc, tree value, int divisor)
15801 tree div = NULL_TREE;
15803 gcc_assert (divisor > 0);
15804 if (divisor == 1)
15805 return value;
15807 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15808 have to do anything. Only do this when we are not given a const,
15809 because in that case, this check is more expensive than just
15810 doing it. */
15811 if (TREE_CODE (value) != INTEGER_CST)
15813 div = build_int_cst (TREE_TYPE (value), divisor);
15815 if (multiple_of_p (TREE_TYPE (value), value, div))
15816 return value;
15819 /* If divisor is a power of two, simplify this to bit manipulation. */
15820 if (divisor == (divisor & -divisor))
15822 if (TREE_CODE (value) == INTEGER_CST)
15824 double_int val = tree_to_double_int (value);
15825 bool overflow_p;
15827 if ((val.low & (divisor - 1)) == 0)
15828 return value;
15830 overflow_p = TREE_OVERFLOW (value);
15831 val.low &= ~(divisor - 1);
15832 val.low += divisor;
15833 if (val.low == 0)
15835 val.high++;
15836 if (val.high == 0)
15837 overflow_p = true;
15840 return force_fit_type_double (TREE_TYPE (value), val,
15841 -1, overflow_p);
15843 else
15845 tree t;
15847 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15848 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15849 t = build_int_cst (TREE_TYPE (value), -divisor);
15850 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15853 else
15855 if (!div)
15856 div = build_int_cst (TREE_TYPE (value), divisor);
15857 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15858 value = size_binop_loc (loc, MULT_EXPR, value, div);
15861 return value;
15864 /* Likewise, but round down. */
15866 tree
15867 round_down_loc (location_t loc, tree value, int divisor)
15869 tree div = NULL_TREE;
15871 gcc_assert (divisor > 0);
15872 if (divisor == 1)
15873 return value;
15875 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15876 have to do anything. Only do this when we are not given a const,
15877 because in that case, this check is more expensive than just
15878 doing it. */
15879 if (TREE_CODE (value) != INTEGER_CST)
15881 div = build_int_cst (TREE_TYPE (value), divisor);
15883 if (multiple_of_p (TREE_TYPE (value), value, div))
15884 return value;
15887 /* If divisor is a power of two, simplify this to bit manipulation. */
15888 if (divisor == (divisor & -divisor))
15890 tree t;
15892 t = build_int_cst (TREE_TYPE (value), -divisor);
15893 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15895 else
15897 if (!div)
15898 div = build_int_cst (TREE_TYPE (value), divisor);
15899 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15900 value = size_binop_loc (loc, MULT_EXPR, value, div);
15903 return value;
15906 /* Returns the pointer to the base of the object addressed by EXP and
15907 extracts the information about the offset of the access, storing it
15908 to PBITPOS and POFFSET. */
15910 static tree
15911 split_address_to_core_and_offset (tree exp,
15912 HOST_WIDE_INT *pbitpos, tree *poffset)
15914 tree core;
15915 enum machine_mode mode;
15916 int unsignedp, volatilep;
15917 HOST_WIDE_INT bitsize;
15918 location_t loc = EXPR_LOCATION (exp);
15920 if (TREE_CODE (exp) == ADDR_EXPR)
15922 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15923 poffset, &mode, &unsignedp, &volatilep,
15924 false);
15925 core = build_fold_addr_expr_loc (loc, core);
15927 else
15929 core = exp;
15930 *pbitpos = 0;
15931 *poffset = NULL_TREE;
15934 return core;
15937 /* Returns true if addresses of E1 and E2 differ by a constant, false
15938 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15940 bool
15941 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15943 tree core1, core2;
15944 HOST_WIDE_INT bitpos1, bitpos2;
15945 tree toffset1, toffset2, tdiff, type;
15947 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15948 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15950 if (bitpos1 % BITS_PER_UNIT != 0
15951 || bitpos2 % BITS_PER_UNIT != 0
15952 || !operand_equal_p (core1, core2, 0))
15953 return false;
15955 if (toffset1 && toffset2)
15957 type = TREE_TYPE (toffset1);
15958 if (type != TREE_TYPE (toffset2))
15959 toffset2 = fold_convert (type, toffset2);
15961 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15962 if (!cst_and_fits_in_hwi (tdiff))
15963 return false;
15965 *diff = int_cst_value (tdiff);
15967 else if (toffset1 || toffset2)
15969 /* If only one of the offsets is non-constant, the difference cannot
15970 be a constant. */
15971 return false;
15973 else
15974 *diff = 0;
15976 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15977 return true;
15980 /* Simplify the floating point expression EXP when the sign of the
15981 result is not significant. Return NULL_TREE if no simplification
15982 is possible. */
15984 tree
15985 fold_strip_sign_ops (tree exp)
15987 tree arg0, arg1;
15988 location_t loc = EXPR_LOCATION (exp);
15990 switch (TREE_CODE (exp))
15992 case ABS_EXPR:
15993 case NEGATE_EXPR:
15994 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15995 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15997 case MULT_EXPR:
15998 case RDIV_EXPR:
15999 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16000 return NULL_TREE;
16001 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16002 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16003 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16004 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16005 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16006 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16007 break;
16009 case COMPOUND_EXPR:
16010 arg0 = TREE_OPERAND (exp, 0);
16011 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16012 if (arg1)
16013 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16014 break;
16016 case COND_EXPR:
16017 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16018 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16019 if (arg0 || arg1)
16020 return fold_build3_loc (loc,
16021 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16022 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16023 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16024 break;
16026 case CALL_EXPR:
16028 const enum built_in_function fcode = builtin_mathfn_code (exp);
16029 switch (fcode)
16031 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16032 /* Strip copysign function call, return the 1st argument. */
16033 arg0 = CALL_EXPR_ARG (exp, 0);
16034 arg1 = CALL_EXPR_ARG (exp, 1);
16035 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16037 default:
16038 /* Strip sign ops from the argument of "odd" math functions. */
16039 if (negate_mathfn_p (fcode))
16041 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16042 if (arg0)
16043 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16045 break;
16048 break;
16050 default:
16051 break;
16053 return NULL_TREE;