* configure: Regenerated.
[official-gcc.git] / gcc / fold-const.c
blobc76e7ff68d36fd9138a2ad67ac0f87851407537f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
187 double_int quo, rem;
188 int uns;
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
195 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
196 uns, code, &rem);
198 if (rem.is_zero ())
199 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
201 return NULL_TREE;
204 /* This is nonzero if we should defer warnings about undefined
205 overflow. This facility exists because these warnings are a
206 special case. The code to estimate loop iterations does not want
207 to issue any warnings, since it works with expressions which do not
208 occur in user code. Various bits of cleanup code call fold(), but
209 only use the result if it has certain characteristics (e.g., is a
210 constant); that code only wants to issue a warning if the result is
211 used. */
213 static int fold_deferring_overflow_warnings;
215 /* If a warning about undefined overflow is deferred, this is the
216 warning. Note that this may cause us to turn two warnings into
217 one, but that is fine since it is sufficient to only give one
218 warning per expression. */
220 static const char* fold_deferred_overflow_warning;
222 /* If a warning about undefined overflow is deferred, this is the
223 level at which the warning should be emitted. */
225 static enum warn_strict_overflow_code fold_deferred_overflow_code;
227 /* Start deferring overflow warnings. We could use a stack here to
228 permit nested calls, but at present it is not necessary. */
230 void
231 fold_defer_overflow_warnings (void)
233 ++fold_deferring_overflow_warnings;
236 /* Stop deferring overflow warnings. If there is a pending warning,
237 and ISSUE is true, then issue the warning if appropriate. STMT is
238 the statement with which the warning should be associated (used for
239 location information); STMT may be NULL. CODE is the level of the
240 warning--a warn_strict_overflow_code value. This function will use
241 the smaller of CODE and the deferred code when deciding whether to
242 issue the warning. CODE may be zero to mean to always use the
243 deferred code. */
245 void
246 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
248 const char *warnmsg;
249 location_t locus;
251 gcc_assert (fold_deferring_overflow_warnings > 0);
252 --fold_deferring_overflow_warnings;
253 if (fold_deferring_overflow_warnings > 0)
255 if (fold_deferred_overflow_warning != NULL
256 && code != 0
257 && code < (int) fold_deferred_overflow_code)
258 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
259 return;
262 warnmsg = fold_deferred_overflow_warning;
263 fold_deferred_overflow_warning = NULL;
265 if (!issue || warnmsg == NULL)
266 return;
268 if (gimple_no_warning_p (stmt))
269 return;
271 /* Use the smallest code level when deciding to issue the
272 warning. */
273 if (code == 0 || code > (int) fold_deferred_overflow_code)
274 code = fold_deferred_overflow_code;
276 if (!issue_strict_overflow_warning (code))
277 return;
279 if (stmt == NULL)
280 locus = input_location;
281 else
282 locus = gimple_location (stmt);
283 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
286 /* Stop deferring overflow warnings, ignoring any deferred
287 warnings. */
289 void
290 fold_undefer_and_ignore_overflow_warnings (void)
292 fold_undefer_overflow_warnings (false, NULL, 0);
295 /* Whether we are deferring overflow warnings. */
297 bool
298 fold_deferring_overflow_warnings_p (void)
300 return fold_deferring_overflow_warnings > 0;
303 /* This is called when we fold something based on the fact that signed
304 overflow is undefined. */
306 static void
307 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
309 if (fold_deferring_overflow_warnings > 0)
311 if (fold_deferred_overflow_warning == NULL
312 || wc < fold_deferred_overflow_code)
314 fold_deferred_overflow_warning = gmsgid;
315 fold_deferred_overflow_code = wc;
318 else if (issue_strict_overflow_warning (wc))
319 warning (OPT_Wstrict_overflow, gmsgid);
322 /* Return true if the built-in mathematical function specified by CODE
323 is odd, i.e. -f(x) == f(-x). */
325 static bool
326 negate_mathfn_p (enum built_in_function code)
328 switch (code)
330 CASE_FLT_FN (BUILT_IN_ASIN):
331 CASE_FLT_FN (BUILT_IN_ASINH):
332 CASE_FLT_FN (BUILT_IN_ATAN):
333 CASE_FLT_FN (BUILT_IN_ATANH):
334 CASE_FLT_FN (BUILT_IN_CASIN):
335 CASE_FLT_FN (BUILT_IN_CASINH):
336 CASE_FLT_FN (BUILT_IN_CATAN):
337 CASE_FLT_FN (BUILT_IN_CATANH):
338 CASE_FLT_FN (BUILT_IN_CBRT):
339 CASE_FLT_FN (BUILT_IN_CPROJ):
340 CASE_FLT_FN (BUILT_IN_CSIN):
341 CASE_FLT_FN (BUILT_IN_CSINH):
342 CASE_FLT_FN (BUILT_IN_CTAN):
343 CASE_FLT_FN (BUILT_IN_CTANH):
344 CASE_FLT_FN (BUILT_IN_ERF):
345 CASE_FLT_FN (BUILT_IN_LLROUND):
346 CASE_FLT_FN (BUILT_IN_LROUND):
347 CASE_FLT_FN (BUILT_IN_ROUND):
348 CASE_FLT_FN (BUILT_IN_SIN):
349 CASE_FLT_FN (BUILT_IN_SINH):
350 CASE_FLT_FN (BUILT_IN_TAN):
351 CASE_FLT_FN (BUILT_IN_TANH):
352 CASE_FLT_FN (BUILT_IN_TRUNC):
353 return true;
355 CASE_FLT_FN (BUILT_IN_LLRINT):
356 CASE_FLT_FN (BUILT_IN_LRINT):
357 CASE_FLT_FN (BUILT_IN_NEARBYINT):
358 CASE_FLT_FN (BUILT_IN_RINT):
359 return !flag_rounding_math;
361 default:
362 break;
364 return false;
367 /* Check whether we may negate an integer constant T without causing
368 overflow. */
370 bool
371 may_negate_without_overflow_p (const_tree t)
373 unsigned HOST_WIDE_INT val;
374 unsigned int prec;
375 tree type;
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
383 prec = TYPE_PRECISION (type);
384 if (prec > HOST_BITS_PER_WIDE_INT)
386 if (TREE_INT_CST_LOW (t) != 0)
387 return true;
388 prec -= HOST_BITS_PER_WIDE_INT;
389 val = TREE_INT_CST_HIGH (t);
391 else
392 val = TREE_INT_CST_LOW (t);
393 if (prec < HOST_BITS_PER_WIDE_INT)
394 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
395 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
398 /* Determine whether an expression T can be cheaply negated using
399 the function negate_expr without introducing undefined overflow. */
401 static bool
402 negate_expr_p (tree t)
404 tree type;
406 if (t == 0)
407 return false;
409 type = TREE_TYPE (t);
411 STRIP_SIGN_NOPS (t);
412 switch (TREE_CODE (t))
414 case INTEGER_CST:
415 if (TYPE_OVERFLOW_WRAPS (type))
416 return true;
418 /* Check that -CST will not overflow type. */
419 return may_negate_without_overflow_p (t);
420 case BIT_NOT_EXPR:
421 return (INTEGRAL_TYPE_P (type)
422 && TYPE_OVERFLOW_WRAPS (type));
424 case FIXED_CST:
425 case NEGATE_EXPR:
426 return true;
428 case REAL_CST:
429 /* We want to canonicalize to positive real constants. Pretend
430 that only negative ones can be easily negated. */
431 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
433 case COMPLEX_CST:
434 return negate_expr_p (TREE_REALPART (t))
435 && negate_expr_p (TREE_IMAGPART (t));
437 case COMPLEX_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0))
439 && negate_expr_p (TREE_OPERAND (t, 1));
441 case CONJ_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0));
444 case PLUS_EXPR:
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
447 return false;
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1))
450 && reorder_operands_p (TREE_OPERAND (t, 0),
451 TREE_OPERAND (t, 1)))
452 return true;
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t, 0));
456 case MINUS_EXPR:
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
459 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (TREE_TYPE (t)))
465 break;
467 /* Fall through. */
469 case RDIV_EXPR:
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
473 break;
475 case TRUNC_DIV_EXPR:
476 case ROUND_DIV_EXPR:
477 case FLOOR_DIV_EXPR:
478 case CEIL_DIV_EXPR:
479 case EXACT_DIV_EXPR:
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
484 overflow. */
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
486 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 return negate_expr_p (TREE_OPERAND (t, 1))
489 || negate_expr_p (TREE_OPERAND (t, 0));
491 case NOP_EXPR:
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type) == REAL_TYPE)
495 tree tem = strip_float_extensions (t);
496 if (tem != t)
497 return negate_expr_p (tem);
499 break;
501 case CALL_EXPR:
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t)))
504 return negate_expr_p (CALL_EXPR_ARG (t, 0));
505 break;
507 case RSHIFT_EXPR:
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
509 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 tree op1 = TREE_OPERAND (t, 1);
512 if (TREE_INT_CST_HIGH (op1) == 0
513 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
514 == TREE_INT_CST_LOW (op1))
515 return true;
517 break;
519 default:
520 break;
522 return false;
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
530 static tree
531 fold_negate_expr (location_t loc, tree t)
533 tree type = TREE_TYPE (t);
534 tree tem;
536 switch (TREE_CODE (t))
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_int_cst (type, 1));
543 break;
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || !TYPE_OVERFLOW_TRAPS (type))
549 return tem;
550 break;
552 case REAL_CST:
553 tem = fold_negate_const (t, type);
554 /* Two's complement FP formats, such as c4x, may overflow. */
555 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
556 return tem;
557 break;
559 case FIXED_CST:
560 tem = fold_negate_const (t, type);
561 return tem;
563 case COMPLEX_CST:
565 tree rpart = negate_expr (TREE_REALPART (t));
566 tree ipart = negate_expr (TREE_IMAGPART (t));
568 if ((TREE_CODE (rpart) == REAL_CST
569 && TREE_CODE (ipart) == REAL_CST)
570 || (TREE_CODE (rpart) == INTEGER_CST
571 && TREE_CODE (ipart) == INTEGER_CST))
572 return build_complex (type, rpart, ipart);
574 break;
576 case COMPLEX_EXPR:
577 if (negate_expr_p (t))
578 return fold_build2_loc (loc, COMPLEX_EXPR, type,
579 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
580 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
581 break;
583 case CONJ_EXPR:
584 if (negate_expr_p (t))
585 return fold_build1_loc (loc, CONJ_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
587 break;
589 case NEGATE_EXPR:
590 return TREE_OPERAND (t, 0);
592 case PLUS_EXPR:
593 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
594 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
596 /* -(A + B) -> (-B) - A. */
597 if (negate_expr_p (TREE_OPERAND (t, 1))
598 && reorder_operands_p (TREE_OPERAND (t, 0),
599 TREE_OPERAND (t, 1)))
601 tem = negate_expr (TREE_OPERAND (t, 1));
602 return fold_build2_loc (loc, MINUS_EXPR, type,
603 tem, TREE_OPERAND (t, 0));
606 /* -(A + B) -> (-A) - B. */
607 if (negate_expr_p (TREE_OPERAND (t, 0)))
609 tem = negate_expr (TREE_OPERAND (t, 0));
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 tem, TREE_OPERAND (t, 1));
614 break;
616 case MINUS_EXPR:
617 /* - (A - B) -> B - A */
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
620 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
623 break;
625 case MULT_EXPR:
626 if (TYPE_UNSIGNED (type))
627 break;
629 /* Fall through. */
631 case RDIV_EXPR:
632 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
634 tem = TREE_OPERAND (t, 1);
635 if (negate_expr_p (tem))
636 return fold_build2_loc (loc, TREE_CODE (t), type,
637 TREE_OPERAND (t, 0), negate_expr (tem));
638 tem = TREE_OPERAND (t, 0);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 negate_expr (tem), TREE_OPERAND (t, 1));
643 break;
645 case TRUNC_DIV_EXPR:
646 case ROUND_DIV_EXPR:
647 case FLOOR_DIV_EXPR:
648 case CEIL_DIV_EXPR:
649 case EXACT_DIV_EXPR:
650 /* In general we can't negate A / B, because if A is INT_MIN and
651 B is 1, we may turn this into INT_MIN / -1 which is undefined
652 and actually traps on some architectures. But if overflow is
653 undefined, we can negate, because - (INT_MIN / 1) is an
654 overflow. */
655 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
657 const char * const warnmsg = G_("assuming signed overflow does not "
658 "occur when negating a division");
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
662 if (INTEGRAL_TYPE_P (type)
663 && (TREE_CODE (tem) != INTEGER_CST
664 || integer_onep (tem)))
665 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0), negate_expr (tem));
669 tem = TREE_OPERAND (t, 0);
670 if (negate_expr_p (tem))
672 if (INTEGRAL_TYPE_P (type)
673 && (TREE_CODE (tem) != INTEGER_CST
674 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
675 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
676 return fold_build2_loc (loc, TREE_CODE (t), type,
677 negate_expr (tem), TREE_OPERAND (t, 1));
680 break;
682 case NOP_EXPR:
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type) == REAL_TYPE)
686 tem = strip_float_extensions (t);
687 if (tem != t && negate_expr_p (tem))
688 return fold_convert_loc (loc, type, negate_expr (tem));
690 break;
692 case CALL_EXPR:
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (builtin_mathfn_code (t))
695 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 tree fndecl, arg;
699 fndecl = get_callee_fndecl (t);
700 arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 return build_call_expr_loc (loc, fndecl, 1, arg);
703 break;
705 case RSHIFT_EXPR:
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
707 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 tree op1 = TREE_OPERAND (t, 1);
710 if (TREE_INT_CST_HIGH (op1) == 0
711 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
712 == TREE_INT_CST_LOW (op1))
714 tree ntype = TYPE_UNSIGNED (type)
715 ? signed_type_for (type)
716 : unsigned_type_for (type);
717 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
718 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
719 return fold_convert_loc (loc, type, temp);
722 break;
724 default:
725 break;
728 return NULL_TREE;
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 return NULL_TREE. */
735 static tree
736 negate_expr (tree t)
738 tree type, tem;
739 location_t loc;
741 if (t == NULL_TREE)
742 return NULL_TREE;
744 loc = EXPR_LOCATION (t);
745 type = TREE_TYPE (t);
746 STRIP_SIGN_NOPS (t);
748 tem = fold_negate_expr (loc, t);
749 if (!tem)
750 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751 return fold_convert_loc (loc, type, tem);
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead.
769 If IN is itself a literal or constant, return it as appropriate.
771 Note that we do not guarantee that any of the three values will be the
772 same type as IN, but they will have the same signedness and mode. */
774 static tree
775 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
776 tree *minus_litp, int negate_p)
778 tree var = 0;
780 *conp = 0;
781 *litp = 0;
782 *minus_litp = 0;
784 /* Strip any conversions that don't change the machine mode or signedness. */
785 STRIP_SIGN_NOPS (in);
787 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
788 || TREE_CODE (in) == FIXED_CST)
789 *litp = in;
790 else if (TREE_CODE (in) == code
791 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
792 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
793 /* We can associate addition and subtraction together (even
794 though the C standard doesn't say so) for integers because
795 the value is not affected. For reals, the value might be
796 affected, so we can't. */
797 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
798 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
800 tree op0 = TREE_OPERAND (in, 0);
801 tree op1 = TREE_OPERAND (in, 1);
802 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
803 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
805 /* First see if either of the operands is a literal, then a constant. */
806 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
807 || TREE_CODE (op0) == FIXED_CST)
808 *litp = op0, op0 = 0;
809 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
810 || TREE_CODE (op1) == FIXED_CST)
811 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
813 if (op0 != 0 && TREE_CONSTANT (op0))
814 *conp = op0, op0 = 0;
815 else if (op1 != 0 && TREE_CONSTANT (op1))
816 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
818 /* If we haven't dealt with either operand, this is not a case we can
819 decompose. Otherwise, VAR is either of the ones remaining, if any. */
820 if (op0 != 0 && op1 != 0)
821 var = in;
822 else if (op0 != 0)
823 var = op0;
824 else
825 var = op1, neg_var_p = neg1_p;
827 /* Now do any needed negations. */
828 if (neg_litp_p)
829 *minus_litp = *litp, *litp = 0;
830 if (neg_conp_p)
831 *conp = negate_expr (*conp);
832 if (neg_var_p)
833 var = negate_expr (var);
835 else if (TREE_CONSTANT (in))
836 *conp = in;
837 else
838 var = in;
840 if (negate_p)
842 if (*litp)
843 *minus_litp = *litp, *litp = 0;
844 else if (*minus_litp)
845 *litp = *minus_litp, *minus_litp = 0;
846 *conp = negate_expr (*conp);
847 var = negate_expr (var);
850 return var;
853 /* Re-associate trees split by the above function. T1 and T2 are
854 either expressions to associate or null. Return the new
855 expression, if any. LOC is the location of the new expression. If
856 we build an operation, do it in TYPE and with CODE. */
858 static tree
859 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
861 if (t1 == 0)
862 return t2;
863 else if (t2 == 0)
864 return t1;
866 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
867 try to fold this since we will have infinite recursion. But do
868 deal with any NEGATE_EXPRs. */
869 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
870 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
872 if (code == PLUS_EXPR)
874 if (TREE_CODE (t1) == NEGATE_EXPR)
875 return build2_loc (loc, MINUS_EXPR, type,
876 fold_convert_loc (loc, type, t2),
877 fold_convert_loc (loc, type,
878 TREE_OPERAND (t1, 0)));
879 else if (TREE_CODE (t2) == NEGATE_EXPR)
880 return build2_loc (loc, MINUS_EXPR, type,
881 fold_convert_loc (loc, type, t1),
882 fold_convert_loc (loc, type,
883 TREE_OPERAND (t2, 0)));
884 else if (integer_zerop (t2))
885 return fold_convert_loc (loc, type, t1);
887 else if (code == MINUS_EXPR)
889 if (integer_zerop (t2))
890 return fold_convert_loc (loc, type, t1);
893 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
894 fold_convert_loc (loc, type, t2));
897 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 fold_convert_loc (loc, type, t2));
901 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
902 for use in int_const_binop, size_binop and size_diffop. */
904 static bool
905 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
907 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
908 return false;
909 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
910 return false;
912 switch (code)
914 case LSHIFT_EXPR:
915 case RSHIFT_EXPR:
916 case LROTATE_EXPR:
917 case RROTATE_EXPR:
918 return true;
920 default:
921 break;
924 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
925 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
926 && TYPE_MODE (type1) == TYPE_MODE (type2);
930 /* Combine two integer constants ARG1 and ARG2 under operation CODE
931 to produce a new constant. Return NULL_TREE if we don't know how
932 to evaluate CODE at compile-time. */
934 static tree
935 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
936 int overflowable)
938 double_int op1, op2, res, tmp;
939 tree t;
940 tree type = TREE_TYPE (arg1);
941 bool uns = TYPE_UNSIGNED (type);
942 bool overflow = false;
944 op1 = tree_to_double_int (arg1);
945 op2 = tree_to_double_int (arg2);
947 switch (code)
949 case BIT_IOR_EXPR:
950 res = op1 | op2;
951 break;
953 case BIT_XOR_EXPR:
954 res = op1 ^ op2;
955 break;
957 case BIT_AND_EXPR:
958 res = op1 & op2;
959 break;
961 case RSHIFT_EXPR:
962 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
963 break;
965 case LSHIFT_EXPR:
966 /* It's unclear from the C standard whether shifts can overflow.
967 The following code ignores overflow; perhaps a C standard
968 interpretation ruling is needed. */
969 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
970 break;
972 case RROTATE_EXPR:
973 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
974 break;
976 case LROTATE_EXPR:
977 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
978 break;
980 case PLUS_EXPR:
981 res = op1.add_with_sign (op2, false, &overflow);
982 break;
984 case MINUS_EXPR:
985 /* FIXME(crowl) Remove this code if the replacment works.
986 neg_double (op2.low, op2.high, &res.low, &res.high);
987 add_double (op1.low, op1.high, res.low, res.high,
988 &res.low, &res.high);
989 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
991 res = op1.add_with_sign (-op2, false, &overflow);
992 break;
994 case MULT_EXPR:
995 res = op1.mul_with_sign (op2, false, &overflow);
996 break;
998 case MULT_HIGHPART_EXPR:
999 /* ??? Need quad precision, or an additional shift operand
1000 to the multiply primitive, to handle very large highparts. */
1001 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1002 return NULL_TREE;
1003 tmp = op1 - op2;
1004 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
1005 break;
1007 case TRUNC_DIV_EXPR:
1008 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1009 case EXACT_DIV_EXPR:
1010 /* This is a shortcut for a common special case. */
1011 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1012 && !TREE_OVERFLOW (arg1)
1013 && !TREE_OVERFLOW (arg2)
1014 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1016 if (code == CEIL_DIV_EXPR)
1017 op1.low += op2.low - 1;
1019 res.low = op1.low / op2.low, res.high = 0;
1020 break;
1023 /* ... fall through ... */
1025 case ROUND_DIV_EXPR:
1026 if (op2.is_zero ())
1027 return NULL_TREE;
1028 if (op2.is_one ())
1030 res = op1;
1031 break;
1033 if (op1 == op2 && !op1.is_zero ())
1035 res = double_int_one;
1036 break;
1038 overflow = div_and_round_double (code, uns,
1039 op1.low, op1.high, op2.low, op2.high,
1040 &res.low, &res.high,
1041 &tmp.low, &tmp.high);
1042 break;
1044 case TRUNC_MOD_EXPR:
1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046 /* This is a shortcut for a common special case. */
1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 && !TREE_OVERFLOW (arg1)
1049 && !TREE_OVERFLOW (arg2)
1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1052 if (code == CEIL_MOD_EXPR)
1053 op1.low += op2.low - 1;
1054 res.low = op1.low % op2.low, res.high = 0;
1055 break;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR:
1061 if (op2.is_zero ())
1062 return NULL_TREE;
1063 overflow = div_and_round_double (code, uns,
1064 op1.low, op1.high, op2.low, op2.high,
1065 &tmp.low, &tmp.high,
1066 &res.low, &res.high);
1067 break;
1069 case MIN_EXPR:
1070 res = op1.min (op2, uns);
1071 break;
1073 case MAX_EXPR:
1074 res = op1.max (op2, uns);
1075 break;
1077 default:
1078 return NULL_TREE;
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1082 (!uns && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1085 return t;
1088 tree
1089 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1091 return int_const_binop_1 (code, arg1, arg2, 1);
1094 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1095 constant. We assume ARG1 and ARG2 have the same data type, or at least
1096 are the same kind of constant and the same machine mode. Return zero if
1097 combining the constants is not allowed in the current operating mode. */
1099 static tree
1100 const_binop (enum tree_code code, tree arg1, tree arg2)
1102 /* Sanity check for the recursive cases. */
1103 if (!arg1 || !arg2)
1104 return NULL_TREE;
1106 STRIP_NOPS (arg1);
1107 STRIP_NOPS (arg2);
1109 if (TREE_CODE (arg1) == INTEGER_CST)
1110 return int_const_binop (code, arg1, arg2);
1112 if (TREE_CODE (arg1) == REAL_CST)
1114 enum machine_mode mode;
1115 REAL_VALUE_TYPE d1;
1116 REAL_VALUE_TYPE d2;
1117 REAL_VALUE_TYPE value;
1118 REAL_VALUE_TYPE result;
1119 bool inexact;
1120 tree t, type;
1122 /* The following codes are handled by real_arithmetic. */
1123 switch (code)
1125 case PLUS_EXPR:
1126 case MINUS_EXPR:
1127 case MULT_EXPR:
1128 case RDIV_EXPR:
1129 case MIN_EXPR:
1130 case MAX_EXPR:
1131 break;
1133 default:
1134 return NULL_TREE;
1137 d1 = TREE_REAL_CST (arg1);
1138 d2 = TREE_REAL_CST (arg2);
1140 type = TREE_TYPE (arg1);
1141 mode = TYPE_MODE (type);
1143 /* Don't perform operation if we honor signaling NaNs and
1144 either operand is a NaN. */
1145 if (HONOR_SNANS (mode)
1146 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1147 return NULL_TREE;
1149 /* Don't perform operation if it would raise a division
1150 by zero exception. */
1151 if (code == RDIV_EXPR
1152 && REAL_VALUES_EQUAL (d2, dconst0)
1153 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1154 return NULL_TREE;
1156 /* If either operand is a NaN, just return it. Otherwise, set up
1157 for floating-point trap; we return an overflow. */
1158 if (REAL_VALUE_ISNAN (d1))
1159 return arg1;
1160 else if (REAL_VALUE_ISNAN (d2))
1161 return arg2;
1163 inexact = real_arithmetic (&value, code, &d1, &d2);
1164 real_convert (&result, mode, &value);
1166 /* Don't constant fold this floating point operation if
1167 the result has overflowed and flag_trapping_math. */
1168 if (flag_trapping_math
1169 && MODE_HAS_INFINITIES (mode)
1170 && REAL_VALUE_ISINF (result)
1171 && !REAL_VALUE_ISINF (d1)
1172 && !REAL_VALUE_ISINF (d2))
1173 return NULL_TREE;
1175 /* Don't constant fold this floating point operation if the
1176 result may dependent upon the run-time rounding mode and
1177 flag_rounding_math is set, or if GCC's software emulation
1178 is unable to accurately represent the result. */
1179 if ((flag_rounding_math
1180 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1181 && (inexact || !real_identical (&result, &value)))
1182 return NULL_TREE;
1184 t = build_real (type, result);
1186 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187 return t;
1190 if (TREE_CODE (arg1) == FIXED_CST)
1192 FIXED_VALUE_TYPE f1;
1193 FIXED_VALUE_TYPE f2;
1194 FIXED_VALUE_TYPE result;
1195 tree t, type;
1196 int sat_p;
1197 bool overflow_p;
1199 /* The following codes are handled by fixed_arithmetic. */
1200 switch (code)
1202 case PLUS_EXPR:
1203 case MINUS_EXPR:
1204 case MULT_EXPR:
1205 case TRUNC_DIV_EXPR:
1206 f2 = TREE_FIXED_CST (arg2);
1207 break;
1209 case LSHIFT_EXPR:
1210 case RSHIFT_EXPR:
1211 f2.data.high = TREE_INT_CST_HIGH (arg2);
1212 f2.data.low = TREE_INT_CST_LOW (arg2);
1213 f2.mode = SImode;
1214 break;
1216 default:
1217 return NULL_TREE;
1220 f1 = TREE_FIXED_CST (arg1);
1221 type = TREE_TYPE (arg1);
1222 sat_p = TYPE_SATURATING (type);
1223 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1224 t = build_fixed (type, result);
1225 /* Propagate overflow flags. */
1226 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1227 TREE_OVERFLOW (t) = 1;
1228 return t;
1231 if (TREE_CODE (arg1) == COMPLEX_CST)
1233 tree type = TREE_TYPE (arg1);
1234 tree r1 = TREE_REALPART (arg1);
1235 tree i1 = TREE_IMAGPART (arg1);
1236 tree r2 = TREE_REALPART (arg2);
1237 tree i2 = TREE_IMAGPART (arg2);
1238 tree real, imag;
1240 switch (code)
1242 case PLUS_EXPR:
1243 case MINUS_EXPR:
1244 real = const_binop (code, r1, r2);
1245 imag = const_binop (code, i1, i2);
1246 break;
1248 case MULT_EXPR:
1249 if (COMPLEX_FLOAT_TYPE_P (type))
1250 return do_mpc_arg2 (arg1, arg2, type,
1251 /* do_nonfinite= */ folding_initializer,
1252 mpc_mul);
1254 real = const_binop (MINUS_EXPR,
1255 const_binop (MULT_EXPR, r1, r2),
1256 const_binop (MULT_EXPR, i1, i2));
1257 imag = const_binop (PLUS_EXPR,
1258 const_binop (MULT_EXPR, r1, i2),
1259 const_binop (MULT_EXPR, i1, r2));
1260 break;
1262 case RDIV_EXPR:
1263 if (COMPLEX_FLOAT_TYPE_P (type))
1264 return do_mpc_arg2 (arg1, arg2, type,
1265 /* do_nonfinite= */ folding_initializer,
1266 mpc_div);
1267 /* Fallthru ... */
1268 case TRUNC_DIV_EXPR:
1269 case CEIL_DIV_EXPR:
1270 case FLOOR_DIV_EXPR:
1271 case ROUND_DIV_EXPR:
1272 if (flag_complex_method == 0)
1274 /* Keep this algorithm in sync with
1275 tree-complex.c:expand_complex_div_straight().
1277 Expand complex division to scalars, straightforward algorithm.
1278 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 t = br*br + bi*bi
1281 tree magsquared
1282 = const_binop (PLUS_EXPR,
1283 const_binop (MULT_EXPR, r2, r2),
1284 const_binop (MULT_EXPR, i2, i2));
1285 tree t1
1286 = const_binop (PLUS_EXPR,
1287 const_binop (MULT_EXPR, r1, r2),
1288 const_binop (MULT_EXPR, i1, i2));
1289 tree t2
1290 = const_binop (MINUS_EXPR,
1291 const_binop (MULT_EXPR, i1, r2),
1292 const_binop (MULT_EXPR, r1, i2));
1294 real = const_binop (code, t1, magsquared);
1295 imag = const_binop (code, t2, magsquared);
1297 else
1299 /* Keep this algorithm in sync with
1300 tree-complex.c:expand_complex_div_wide().
1302 Expand complex division to scalars, modified algorithm to minimize
1303 overflow with wide input ranges. */
1304 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1305 fold_abs_const (r2, TREE_TYPE (type)),
1306 fold_abs_const (i2, TREE_TYPE (type)));
1308 if (integer_nonzerop (compare))
1310 /* In the TRUE branch, we compute
1311 ratio = br/bi;
1312 div = (br * ratio) + bi;
1313 tr = (ar * ratio) + ai;
1314 ti = (ai * ratio) - ar;
1315 tr = tr / div;
1316 ti = ti / div; */
1317 tree ratio = const_binop (code, r2, i2);
1318 tree div = const_binop (PLUS_EXPR, i2,
1319 const_binop (MULT_EXPR, r2, ratio));
1320 real = const_binop (MULT_EXPR, r1, ratio);
1321 real = const_binop (PLUS_EXPR, real, i1);
1322 real = const_binop (code, real, div);
1324 imag = const_binop (MULT_EXPR, i1, ratio);
1325 imag = const_binop (MINUS_EXPR, imag, r1);
1326 imag = const_binop (code, imag, div);
1328 else
1330 /* In the FALSE branch, we compute
1331 ratio = d/c;
1332 divisor = (d * ratio) + c;
1333 tr = (b * ratio) + a;
1334 ti = b - (a * ratio);
1335 tr = tr / div;
1336 ti = ti / div; */
1337 tree ratio = const_binop (code, i2, r2);
1338 tree div = const_binop (PLUS_EXPR, r2,
1339 const_binop (MULT_EXPR, i2, ratio));
1341 real = const_binop (MULT_EXPR, i1, ratio);
1342 real = const_binop (PLUS_EXPR, real, r1);
1343 real = const_binop (code, real, div);
1345 imag = const_binop (MULT_EXPR, r1, ratio);
1346 imag = const_binop (MINUS_EXPR, i1, imag);
1347 imag = const_binop (code, imag, div);
1350 break;
1352 default:
1353 return NULL_TREE;
1356 if (real && imag)
1357 return build_complex (type, real, imag);
1360 if (TREE_CODE (arg1) == VECTOR_CST
1361 && TREE_CODE (arg2) == VECTOR_CST)
1363 tree type = TREE_TYPE(arg1);
1364 int count = TYPE_VECTOR_SUBPARTS (type), i;
1365 tree *elts = XALLOCAVEC (tree, count);
1367 for (i = 0; i < count; i++)
1369 tree elem1 = VECTOR_CST_ELT (arg1, i);
1370 tree elem2 = VECTOR_CST_ELT (arg2, i);
1372 elts[i] = const_binop (code, elem1, elem2);
1374 /* It is possible that const_binop cannot handle the given
1375 code and return NULL_TREE */
1376 if(elts[i] == NULL_TREE)
1377 return NULL_TREE;
1380 return build_vector (type, elts);
1382 return NULL_TREE;
1385 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1386 indicates which particular sizetype to create. */
1388 tree
1389 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1391 return build_int_cst (sizetype_tab[(int) kind], number);
1394 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1395 is a tree code. The type of the result is taken from the operands.
1396 Both must be equivalent integer types, ala int_binop_types_match_p.
1397 If the operands are constant, so is the result. */
1399 tree
1400 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1402 tree type = TREE_TYPE (arg0);
1404 if (arg0 == error_mark_node || arg1 == error_mark_node)
1405 return error_mark_node;
1407 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1408 TREE_TYPE (arg1)));
1410 /* Handle the special case of two integer constants faster. */
1411 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1413 /* And some specific cases even faster than that. */
1414 if (code == PLUS_EXPR)
1416 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1417 return arg1;
1418 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1419 return arg0;
1421 else if (code == MINUS_EXPR)
1423 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1424 return arg0;
1426 else if (code == MULT_EXPR)
1428 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1429 return arg1;
1432 /* Handle general case of two integer constants. For sizetype
1433 constant calculations we always want to know about overflow,
1434 even in the unsigned case. */
1435 return int_const_binop_1 (code, arg0, arg1, -1);
1438 return fold_build2_loc (loc, code, type, arg0, arg1);
1441 /* Given two values, either both of sizetype or both of bitsizetype,
1442 compute the difference between the two values. Return the value
1443 in signed type corresponding to the type of the operands. */
1445 tree
1446 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1448 tree type = TREE_TYPE (arg0);
1449 tree ctype;
1451 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1452 TREE_TYPE (arg1)));
1454 /* If the type is already signed, just do the simple thing. */
1455 if (!TYPE_UNSIGNED (type))
1456 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1458 if (type == sizetype)
1459 ctype = ssizetype;
1460 else if (type == bitsizetype)
1461 ctype = sbitsizetype;
1462 else
1463 ctype = signed_type_for (type);
1465 /* If either operand is not a constant, do the conversions to the signed
1466 type and subtract. The hardware will do the right thing with any
1467 overflow in the subtraction. */
1468 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1469 return size_binop_loc (loc, MINUS_EXPR,
1470 fold_convert_loc (loc, ctype, arg0),
1471 fold_convert_loc (loc, ctype, arg1));
1473 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1474 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1475 overflow) and negate (which can't either). Special-case a result
1476 of zero while we're here. */
1477 if (tree_int_cst_equal (arg0, arg1))
1478 return build_int_cst (ctype, 0);
1479 else if (tree_int_cst_lt (arg1, arg0))
1480 return fold_convert_loc (loc, ctype,
1481 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1482 else
1483 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1484 fold_convert_loc (loc, ctype,
1485 size_binop_loc (loc,
1486 MINUS_EXPR,
1487 arg1, arg0)));
1490 /* A subroutine of fold_convert_const handling conversions of an
1491 INTEGER_CST to another integer type. */
1493 static tree
1494 fold_convert_const_int_from_int (tree type, const_tree arg1)
1496 tree t;
1498 /* Given an integer constant, make new constant with new type,
1499 appropriately sign-extended or truncated. */
1500 t = force_fit_type_double (type, tree_to_double_int (arg1),
1501 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1502 (TREE_INT_CST_HIGH (arg1) < 0
1503 && (TYPE_UNSIGNED (type)
1504 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1505 | TREE_OVERFLOW (arg1));
1507 return t;
1510 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1511 to an integer type. */
1513 static tree
1514 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1516 int overflow = 0;
1517 tree t;
1519 /* The following code implements the floating point to integer
1520 conversion rules required by the Java Language Specification,
1521 that IEEE NaNs are mapped to zero and values that overflow
1522 the target precision saturate, i.e. values greater than
1523 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1524 are mapped to INT_MIN. These semantics are allowed by the
1525 C and C++ standards that simply state that the behavior of
1526 FP-to-integer conversion is unspecified upon overflow. */
1528 double_int val;
1529 REAL_VALUE_TYPE r;
1530 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1532 switch (code)
1534 case FIX_TRUNC_EXPR:
1535 real_trunc (&r, VOIDmode, &x);
1536 break;
1538 default:
1539 gcc_unreachable ();
1542 /* If R is NaN, return zero and show we have an overflow. */
1543 if (REAL_VALUE_ISNAN (r))
1545 overflow = 1;
1546 val = double_int_zero;
1549 /* See if R is less than the lower bound or greater than the
1550 upper bound. */
1552 if (! overflow)
1554 tree lt = TYPE_MIN_VALUE (type);
1555 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1556 if (REAL_VALUES_LESS (r, l))
1558 overflow = 1;
1559 val = tree_to_double_int (lt);
1563 if (! overflow)
1565 tree ut = TYPE_MAX_VALUE (type);
1566 if (ut)
1568 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1569 if (REAL_VALUES_LESS (u, r))
1571 overflow = 1;
1572 val = tree_to_double_int (ut);
1577 if (! overflow)
1578 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1580 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1581 return t;
1584 /* A subroutine of fold_convert_const handling conversions of a
1585 FIXED_CST to an integer type. */
1587 static tree
1588 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1590 tree t;
1591 double_int temp, temp_trunc;
1592 unsigned int mode;
1594 /* Right shift FIXED_CST to temp by fbit. */
1595 temp = TREE_FIXED_CST (arg1).data;
1596 mode = TREE_FIXED_CST (arg1).mode;
1597 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1599 temp = temp.rshift (GET_MODE_FBIT (mode),
1600 HOST_BITS_PER_DOUBLE_INT,
1601 SIGNED_FIXED_POINT_MODE_P (mode));
1603 /* Left shift temp to temp_trunc by fbit. */
1604 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1605 HOST_BITS_PER_DOUBLE_INT,
1606 SIGNED_FIXED_POINT_MODE_P (mode));
1608 else
1610 temp = double_int_zero;
1611 temp_trunc = double_int_zero;
1614 /* If FIXED_CST is negative, we need to round the value toward 0.
1615 By checking if the fractional bits are not zero to add 1 to temp. */
1616 if (SIGNED_FIXED_POINT_MODE_P (mode)
1617 && temp_trunc.is_negative ()
1618 && TREE_FIXED_CST (arg1).data != temp_trunc)
1619 temp += double_int_one;
1621 /* Given a fixed-point constant, make new constant with new type,
1622 appropriately sign-extended or truncated. */
1623 t = force_fit_type_double (type, temp, -1,
1624 (temp.is_negative ()
1625 && (TYPE_UNSIGNED (type)
1626 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1627 | TREE_OVERFLOW (arg1));
1629 return t;
1632 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1633 to another floating point type. */
1635 static tree
1636 fold_convert_const_real_from_real (tree type, const_tree arg1)
1638 REAL_VALUE_TYPE value;
1639 tree t;
1641 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1642 t = build_real (type, value);
1644 /* If converting an infinity or NAN to a representation that doesn't
1645 have one, set the overflow bit so that we can produce some kind of
1646 error message at the appropriate point if necessary. It's not the
1647 most user-friendly message, but it's better than nothing. */
1648 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1649 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1650 TREE_OVERFLOW (t) = 1;
1651 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1652 && !MODE_HAS_NANS (TYPE_MODE (type)))
1653 TREE_OVERFLOW (t) = 1;
1654 /* Regular overflow, conversion produced an infinity in a mode that
1655 can't represent them. */
1656 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1657 && REAL_VALUE_ISINF (value)
1658 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1659 TREE_OVERFLOW (t) = 1;
1660 else
1661 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1662 return t;
1665 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1666 to a floating point type. */
1668 static tree
1669 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1671 REAL_VALUE_TYPE value;
1672 tree t;
1674 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1675 t = build_real (type, value);
1677 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1678 return t;
1681 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1682 to another fixed-point type. */
1684 static tree
1685 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1687 FIXED_VALUE_TYPE value;
1688 tree t;
1689 bool overflow_p;
1691 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1692 TYPE_SATURATING (type));
1693 t = build_fixed (type, value);
1695 /* Propagate overflow flags. */
1696 if (overflow_p | TREE_OVERFLOW (arg1))
1697 TREE_OVERFLOW (t) = 1;
1698 return t;
1701 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1702 to a fixed-point type. */
1704 static tree
1705 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1707 FIXED_VALUE_TYPE value;
1708 tree t;
1709 bool overflow_p;
1711 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1712 TREE_INT_CST (arg1),
1713 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1714 TYPE_SATURATING (type));
1715 t = build_fixed (type, value);
1717 /* Propagate overflow flags. */
1718 if (overflow_p | TREE_OVERFLOW (arg1))
1719 TREE_OVERFLOW (t) = 1;
1720 return t;
1723 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1724 to a fixed-point type. */
1726 static tree
1727 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1729 FIXED_VALUE_TYPE value;
1730 tree t;
1731 bool overflow_p;
1733 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1734 &TREE_REAL_CST (arg1),
1735 TYPE_SATURATING (type));
1736 t = build_fixed (type, value);
1738 /* Propagate overflow flags. */
1739 if (overflow_p | TREE_OVERFLOW (arg1))
1740 TREE_OVERFLOW (t) = 1;
1741 return t;
1744 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1745 type TYPE. If no simplification can be done return NULL_TREE. */
1747 static tree
1748 fold_convert_const (enum tree_code code, tree type, tree arg1)
1750 if (TREE_TYPE (arg1) == type)
1751 return arg1;
1753 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1754 || TREE_CODE (type) == OFFSET_TYPE)
1756 if (TREE_CODE (arg1) == INTEGER_CST)
1757 return fold_convert_const_int_from_int (type, arg1);
1758 else if (TREE_CODE (arg1) == REAL_CST)
1759 return fold_convert_const_int_from_real (code, type, arg1);
1760 else if (TREE_CODE (arg1) == FIXED_CST)
1761 return fold_convert_const_int_from_fixed (type, arg1);
1763 else if (TREE_CODE (type) == REAL_TYPE)
1765 if (TREE_CODE (arg1) == INTEGER_CST)
1766 return build_real_from_int_cst (type, arg1);
1767 else if (TREE_CODE (arg1) == REAL_CST)
1768 return fold_convert_const_real_from_real (type, arg1);
1769 else if (TREE_CODE (arg1) == FIXED_CST)
1770 return fold_convert_const_real_from_fixed (type, arg1);
1772 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1774 if (TREE_CODE (arg1) == FIXED_CST)
1775 return fold_convert_const_fixed_from_fixed (type, arg1);
1776 else if (TREE_CODE (arg1) == INTEGER_CST)
1777 return fold_convert_const_fixed_from_int (type, arg1);
1778 else if (TREE_CODE (arg1) == REAL_CST)
1779 return fold_convert_const_fixed_from_real (type, arg1);
1781 return NULL_TREE;
1784 /* Construct a vector of zero elements of vector type TYPE. */
1786 static tree
1787 build_zero_vector (tree type)
1789 tree t;
1791 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1792 return build_vector_from_val (type, t);
1795 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1797 bool
1798 fold_convertible_p (const_tree type, const_tree arg)
1800 tree orig = TREE_TYPE (arg);
1802 if (type == orig)
1803 return true;
1805 if (TREE_CODE (arg) == ERROR_MARK
1806 || TREE_CODE (type) == ERROR_MARK
1807 || TREE_CODE (orig) == ERROR_MARK)
1808 return false;
1810 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1811 return true;
1813 switch (TREE_CODE (type))
1815 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1816 case POINTER_TYPE: case REFERENCE_TYPE:
1817 case OFFSET_TYPE:
1818 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1819 || TREE_CODE (orig) == OFFSET_TYPE)
1820 return true;
1821 return (TREE_CODE (orig) == VECTOR_TYPE
1822 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1824 case REAL_TYPE:
1825 case FIXED_POINT_TYPE:
1826 case COMPLEX_TYPE:
1827 case VECTOR_TYPE:
1828 case VOID_TYPE:
1829 return TREE_CODE (type) == TREE_CODE (orig);
1831 default:
1832 return false;
1836 /* Convert expression ARG to type TYPE. Used by the middle-end for
1837 simple conversions in preference to calling the front-end's convert. */
1839 tree
1840 fold_convert_loc (location_t loc, tree type, tree arg)
1842 tree orig = TREE_TYPE (arg);
1843 tree tem;
1845 if (type == orig)
1846 return arg;
1848 if (TREE_CODE (arg) == ERROR_MARK
1849 || TREE_CODE (type) == ERROR_MARK
1850 || TREE_CODE (orig) == ERROR_MARK)
1851 return error_mark_node;
1853 switch (TREE_CODE (type))
1855 case POINTER_TYPE:
1856 case REFERENCE_TYPE:
1857 /* Handle conversions between pointers to different address spaces. */
1858 if (POINTER_TYPE_P (orig)
1859 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1860 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1861 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1862 /* fall through */
1864 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1865 case OFFSET_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (NOP_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1873 || TREE_CODE (orig) == OFFSET_TYPE)
1874 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1875 if (TREE_CODE (orig) == COMPLEX_TYPE)
1876 return fold_convert_loc (loc, type,
1877 fold_build1_loc (loc, REALPART_EXPR,
1878 TREE_TYPE (orig), arg));
1879 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1880 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1881 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1883 case REAL_TYPE:
1884 if (TREE_CODE (arg) == INTEGER_CST)
1886 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1887 if (tem != NULL_TREE)
1888 return tem;
1890 else if (TREE_CODE (arg) == REAL_CST)
1892 tem = fold_convert_const (NOP_EXPR, type, arg);
1893 if (tem != NULL_TREE)
1894 return tem;
1896 else if (TREE_CODE (arg) == FIXED_CST)
1898 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1899 if (tem != NULL_TREE)
1900 return tem;
1903 switch (TREE_CODE (orig))
1905 case INTEGER_TYPE:
1906 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1907 case POINTER_TYPE: case REFERENCE_TYPE:
1908 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1910 case REAL_TYPE:
1911 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1913 case FIXED_POINT_TYPE:
1914 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1916 case COMPLEX_TYPE:
1917 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1918 return fold_convert_loc (loc, type, tem);
1920 default:
1921 gcc_unreachable ();
1924 case FIXED_POINT_TYPE:
1925 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1926 || TREE_CODE (arg) == REAL_CST)
1928 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1930 goto fold_convert_exit;
1933 switch (TREE_CODE (orig))
1935 case FIXED_POINT_TYPE:
1936 case INTEGER_TYPE:
1937 case ENUMERAL_TYPE:
1938 case BOOLEAN_TYPE:
1939 case REAL_TYPE:
1940 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1942 case COMPLEX_TYPE:
1943 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert_loc (loc, type, tem);
1946 default:
1947 gcc_unreachable ();
1950 case COMPLEX_TYPE:
1951 switch (TREE_CODE (orig))
1953 case INTEGER_TYPE:
1954 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1955 case POINTER_TYPE: case REFERENCE_TYPE:
1956 case REAL_TYPE:
1957 case FIXED_POINT_TYPE:
1958 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1959 fold_convert_loc (loc, TREE_TYPE (type), arg),
1960 fold_convert_loc (loc, TREE_TYPE (type),
1961 integer_zero_node));
1962 case COMPLEX_TYPE:
1964 tree rpart, ipart;
1966 if (TREE_CODE (arg) == COMPLEX_EXPR)
1968 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1969 TREE_OPERAND (arg, 0));
1970 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1971 TREE_OPERAND (arg, 1));
1972 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1975 arg = save_expr (arg);
1976 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1977 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1978 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1979 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1980 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1983 default:
1984 gcc_unreachable ();
1987 case VECTOR_TYPE:
1988 if (integer_zerop (arg))
1989 return build_zero_vector (type);
1990 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1991 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1992 || TREE_CODE (orig) == VECTOR_TYPE);
1993 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1995 case VOID_TYPE:
1996 tem = fold_ignored_result (arg);
1997 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1999 default:
2000 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2001 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2002 gcc_unreachable ();
2004 fold_convert_exit:
2005 protected_set_expr_location_unshare (tem, loc);
2006 return tem;
2009 /* Return false if expr can be assumed not to be an lvalue, true
2010 otherwise. */
2012 static bool
2013 maybe_lvalue_p (const_tree x)
2015 /* We only need to wrap lvalue tree codes. */
2016 switch (TREE_CODE (x))
2018 case VAR_DECL:
2019 case PARM_DECL:
2020 case RESULT_DECL:
2021 case LABEL_DECL:
2022 case FUNCTION_DECL:
2023 case SSA_NAME:
2025 case COMPONENT_REF:
2026 case MEM_REF:
2027 case INDIRECT_REF:
2028 case ARRAY_REF:
2029 case ARRAY_RANGE_REF:
2030 case BIT_FIELD_REF:
2031 case OBJ_TYPE_REF:
2033 case REALPART_EXPR:
2034 case IMAGPART_EXPR:
2035 case PREINCREMENT_EXPR:
2036 case PREDECREMENT_EXPR:
2037 case SAVE_EXPR:
2038 case TRY_CATCH_EXPR:
2039 case WITH_CLEANUP_EXPR:
2040 case COMPOUND_EXPR:
2041 case MODIFY_EXPR:
2042 case TARGET_EXPR:
2043 case COND_EXPR:
2044 case BIND_EXPR:
2045 break;
2047 default:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2050 break;
2051 return false;
2054 return true;
2057 /* Return an expr equal to X but certainly not valid as an lvalue. */
2059 tree
2060 non_lvalue_loc (location_t loc, tree x)
2062 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2063 us. */
2064 if (in_gimple_form)
2065 return x;
2067 if (! maybe_lvalue_p (x))
2068 return x;
2069 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2072 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2073 Zero means allow extended lvalues. */
2075 int pedantic_lvalues;
2077 /* When pedantic, return an expr equal to X but certainly not valid as a
2078 pedantic lvalue. Otherwise, return X. */
2080 static tree
2081 pedantic_non_lvalue_loc (location_t loc, tree x)
2083 if (pedantic_lvalues)
2084 return non_lvalue_loc (loc, x);
2086 return protected_set_expr_location_unshare (x, loc);
2089 /* Given a tree comparison code, return the code that is the logical inverse.
2090 It is generally not safe to do this for floating-point comparisons, except
2091 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2092 ERROR_MARK in this case. */
2094 enum tree_code
2095 invert_tree_comparison (enum tree_code code, bool honor_nans)
2097 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2098 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2099 return ERROR_MARK;
2101 switch (code)
2103 case EQ_EXPR:
2104 return NE_EXPR;
2105 case NE_EXPR:
2106 return EQ_EXPR;
2107 case GT_EXPR:
2108 return honor_nans ? UNLE_EXPR : LE_EXPR;
2109 case GE_EXPR:
2110 return honor_nans ? UNLT_EXPR : LT_EXPR;
2111 case LT_EXPR:
2112 return honor_nans ? UNGE_EXPR : GE_EXPR;
2113 case LE_EXPR:
2114 return honor_nans ? UNGT_EXPR : GT_EXPR;
2115 case LTGT_EXPR:
2116 return UNEQ_EXPR;
2117 case UNEQ_EXPR:
2118 return LTGT_EXPR;
2119 case UNGT_EXPR:
2120 return LE_EXPR;
2121 case UNGE_EXPR:
2122 return LT_EXPR;
2123 case UNLT_EXPR:
2124 return GE_EXPR;
2125 case UNLE_EXPR:
2126 return GT_EXPR;
2127 case ORDERED_EXPR:
2128 return UNORDERED_EXPR;
2129 case UNORDERED_EXPR:
2130 return ORDERED_EXPR;
2131 default:
2132 gcc_unreachable ();
2136 /* Similar, but return the comparison that results if the operands are
2137 swapped. This is safe for floating-point. */
2139 enum tree_code
2140 swap_tree_comparison (enum tree_code code)
2142 switch (code)
2144 case EQ_EXPR:
2145 case NE_EXPR:
2146 case ORDERED_EXPR:
2147 case UNORDERED_EXPR:
2148 case LTGT_EXPR:
2149 case UNEQ_EXPR:
2150 return code;
2151 case GT_EXPR:
2152 return LT_EXPR;
2153 case GE_EXPR:
2154 return LE_EXPR;
2155 case LT_EXPR:
2156 return GT_EXPR;
2157 case LE_EXPR:
2158 return GE_EXPR;
2159 case UNGT_EXPR:
2160 return UNLT_EXPR;
2161 case UNGE_EXPR:
2162 return UNLE_EXPR;
2163 case UNLT_EXPR:
2164 return UNGT_EXPR;
2165 case UNLE_EXPR:
2166 return UNGE_EXPR;
2167 default:
2168 gcc_unreachable ();
2173 /* Convert a comparison tree code from an enum tree_code representation
2174 into a compcode bit-based encoding. This function is the inverse of
2175 compcode_to_comparison. */
2177 static enum comparison_code
2178 comparison_to_compcode (enum tree_code code)
2180 switch (code)
2182 case LT_EXPR:
2183 return COMPCODE_LT;
2184 case EQ_EXPR:
2185 return COMPCODE_EQ;
2186 case LE_EXPR:
2187 return COMPCODE_LE;
2188 case GT_EXPR:
2189 return COMPCODE_GT;
2190 case NE_EXPR:
2191 return COMPCODE_NE;
2192 case GE_EXPR:
2193 return COMPCODE_GE;
2194 case ORDERED_EXPR:
2195 return COMPCODE_ORD;
2196 case UNORDERED_EXPR:
2197 return COMPCODE_UNORD;
2198 case UNLT_EXPR:
2199 return COMPCODE_UNLT;
2200 case UNEQ_EXPR:
2201 return COMPCODE_UNEQ;
2202 case UNLE_EXPR:
2203 return COMPCODE_UNLE;
2204 case UNGT_EXPR:
2205 return COMPCODE_UNGT;
2206 case LTGT_EXPR:
2207 return COMPCODE_LTGT;
2208 case UNGE_EXPR:
2209 return COMPCODE_UNGE;
2210 default:
2211 gcc_unreachable ();
2215 /* Convert a compcode bit-based encoding of a comparison operator back
2216 to GCC's enum tree_code representation. This function is the
2217 inverse of comparison_to_compcode. */
2219 static enum tree_code
2220 compcode_to_comparison (enum comparison_code code)
2222 switch (code)
2224 case COMPCODE_LT:
2225 return LT_EXPR;
2226 case COMPCODE_EQ:
2227 return EQ_EXPR;
2228 case COMPCODE_LE:
2229 return LE_EXPR;
2230 case COMPCODE_GT:
2231 return GT_EXPR;
2232 case COMPCODE_NE:
2233 return NE_EXPR;
2234 case COMPCODE_GE:
2235 return GE_EXPR;
2236 case COMPCODE_ORD:
2237 return ORDERED_EXPR;
2238 case COMPCODE_UNORD:
2239 return UNORDERED_EXPR;
2240 case COMPCODE_UNLT:
2241 return UNLT_EXPR;
2242 case COMPCODE_UNEQ:
2243 return UNEQ_EXPR;
2244 case COMPCODE_UNLE:
2245 return UNLE_EXPR;
2246 case COMPCODE_UNGT:
2247 return UNGT_EXPR;
2248 case COMPCODE_LTGT:
2249 return LTGT_EXPR;
2250 case COMPCODE_UNGE:
2251 return UNGE_EXPR;
2252 default:
2253 gcc_unreachable ();
2257 /* Return a tree for the comparison which is the combination of
2258 doing the AND or OR (depending on CODE) of the two operations LCODE
2259 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2260 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2261 if this makes the transformation invalid. */
2263 tree
2264 combine_comparisons (location_t loc,
2265 enum tree_code code, enum tree_code lcode,
2266 enum tree_code rcode, tree truth_type,
2267 tree ll_arg, tree lr_arg)
2269 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2270 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2271 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2272 int compcode;
2274 switch (code)
2276 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2277 compcode = lcompcode & rcompcode;
2278 break;
2280 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2281 compcode = lcompcode | rcompcode;
2282 break;
2284 default:
2285 return NULL_TREE;
2288 if (!honor_nans)
2290 /* Eliminate unordered comparisons, as well as LTGT and ORD
2291 which are not used unless the mode has NaNs. */
2292 compcode &= ~COMPCODE_UNORD;
2293 if (compcode == COMPCODE_LTGT)
2294 compcode = COMPCODE_NE;
2295 else if (compcode == COMPCODE_ORD)
2296 compcode = COMPCODE_TRUE;
2298 else if (flag_trapping_math)
2300 /* Check that the original operation and the optimized ones will trap
2301 under the same condition. */
2302 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2303 && (lcompcode != COMPCODE_EQ)
2304 && (lcompcode != COMPCODE_ORD);
2305 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2306 && (rcompcode != COMPCODE_EQ)
2307 && (rcompcode != COMPCODE_ORD);
2308 bool trap = (compcode & COMPCODE_UNORD) == 0
2309 && (compcode != COMPCODE_EQ)
2310 && (compcode != COMPCODE_ORD);
2312 /* In a short-circuited boolean expression the LHS might be
2313 such that the RHS, if evaluated, will never trap. For
2314 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2315 if neither x nor y is NaN. (This is a mixed blessing: for
2316 example, the expression above will never trap, hence
2317 optimizing it to x < y would be invalid). */
2318 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2319 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2320 rtrap = false;
2322 /* If the comparison was short-circuited, and only the RHS
2323 trapped, we may now generate a spurious trap. */
2324 if (rtrap && !ltrap
2325 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2326 return NULL_TREE;
2328 /* If we changed the conditions that cause a trap, we lose. */
2329 if ((ltrap || rtrap) != trap)
2330 return NULL_TREE;
2333 if (compcode == COMPCODE_TRUE)
2334 return constant_boolean_node (true, truth_type);
2335 else if (compcode == COMPCODE_FALSE)
2336 return constant_boolean_node (false, truth_type);
2337 else
2339 enum tree_code tcode;
2341 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2342 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2346 /* Return nonzero if two operands (typically of the same tree node)
2347 are necessarily equal. If either argument has side-effects this
2348 function returns zero. FLAGS modifies behavior as follows:
2350 If OEP_ONLY_CONST is set, only return nonzero for constants.
2351 This function tests whether the operands are indistinguishable;
2352 it does not test whether they are equal using C's == operation.
2353 The distinction is important for IEEE floating point, because
2354 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2355 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2357 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2358 even though it may hold multiple values during a function.
2359 This is because a GCC tree node guarantees that nothing else is
2360 executed between the evaluation of its "operands" (which may often
2361 be evaluated in arbitrary order). Hence if the operands themselves
2362 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2363 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2364 unset means assuming isochronic (or instantaneous) tree equivalence.
2365 Unless comparing arbitrary expression trees, such as from different
2366 statements, this flag can usually be left unset.
2368 If OEP_PURE_SAME is set, then pure functions with identical arguments
2369 are considered the same. It is used when the caller has other ways
2370 to ensure that global memory is unchanged in between. */
2373 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2375 /* If either is ERROR_MARK, they aren't equal. */
2376 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2377 || TREE_TYPE (arg0) == error_mark_node
2378 || TREE_TYPE (arg1) == error_mark_node)
2379 return 0;
2381 /* Similar, if either does not have a type (like a released SSA name),
2382 they aren't equal. */
2383 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2384 return 0;
2386 /* Check equality of integer constants before bailing out due to
2387 precision differences. */
2388 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2389 return tree_int_cst_equal (arg0, arg1);
2391 /* If both types don't have the same signedness, then we can't consider
2392 them equal. We must check this before the STRIP_NOPS calls
2393 because they may change the signedness of the arguments. As pointers
2394 strictly don't have a signedness, require either two pointers or
2395 two non-pointers as well. */
2396 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2397 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2398 return 0;
2400 /* We cannot consider pointers to different address space equal. */
2401 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2402 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2403 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2404 return 0;
2406 /* If both types don't have the same precision, then it is not safe
2407 to strip NOPs. */
2408 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2409 return 0;
2411 STRIP_NOPS (arg0);
2412 STRIP_NOPS (arg1);
2414 /* In case both args are comparisons but with different comparison
2415 code, try to swap the comparison operands of one arg to produce
2416 a match and compare that variant. */
2417 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2418 && COMPARISON_CLASS_P (arg0)
2419 && COMPARISON_CLASS_P (arg1))
2421 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2423 if (TREE_CODE (arg0) == swap_code)
2424 return operand_equal_p (TREE_OPERAND (arg0, 0),
2425 TREE_OPERAND (arg1, 1), flags)
2426 && operand_equal_p (TREE_OPERAND (arg0, 1),
2427 TREE_OPERAND (arg1, 0), flags);
2430 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2431 /* This is needed for conversions and for COMPONENT_REF.
2432 Might as well play it safe and always test this. */
2433 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2434 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2435 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2436 return 0;
2438 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2439 We don't care about side effects in that case because the SAVE_EXPR
2440 takes care of that for us. In all other cases, two expressions are
2441 equal if they have no side effects. If we have two identical
2442 expressions with side effects that should be treated the same due
2443 to the only side effects being identical SAVE_EXPR's, that will
2444 be detected in the recursive calls below.
2445 If we are taking an invariant address of two identical objects
2446 they are necessarily equal as well. */
2447 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2448 && (TREE_CODE (arg0) == SAVE_EXPR
2449 || (flags & OEP_CONSTANT_ADDRESS_OF)
2450 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2451 return 1;
2453 /* Next handle constant cases, those for which we can return 1 even
2454 if ONLY_CONST is set. */
2455 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2456 switch (TREE_CODE (arg0))
2458 case INTEGER_CST:
2459 return tree_int_cst_equal (arg0, arg1);
2461 case FIXED_CST:
2462 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2463 TREE_FIXED_CST (arg1));
2465 case REAL_CST:
2466 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2467 TREE_REAL_CST (arg1)))
2468 return 1;
2471 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2473 /* If we do not distinguish between signed and unsigned zero,
2474 consider them equal. */
2475 if (real_zerop (arg0) && real_zerop (arg1))
2476 return 1;
2478 return 0;
2480 case VECTOR_CST:
2482 unsigned i;
2484 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2485 return 0;
2487 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2489 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2490 VECTOR_CST_ELT (arg1, i), flags))
2491 return 0;
2493 return 1;
2496 case COMPLEX_CST:
2497 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2498 flags)
2499 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2500 flags));
2502 case STRING_CST:
2503 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2504 && ! memcmp (TREE_STRING_POINTER (arg0),
2505 TREE_STRING_POINTER (arg1),
2506 TREE_STRING_LENGTH (arg0)));
2508 case ADDR_EXPR:
2509 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2510 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2511 ? OEP_CONSTANT_ADDRESS_OF : 0);
2512 default:
2513 break;
2516 if (flags & OEP_ONLY_CONST)
2517 return 0;
2519 /* Define macros to test an operand from arg0 and arg1 for equality and a
2520 variant that allows null and views null as being different from any
2521 non-null value. In the latter case, if either is null, the both
2522 must be; otherwise, do the normal comparison. */
2523 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2524 TREE_OPERAND (arg1, N), flags)
2526 #define OP_SAME_WITH_NULL(N) \
2527 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2528 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2530 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2532 case tcc_unary:
2533 /* Two conversions are equal only if signedness and modes match. */
2534 switch (TREE_CODE (arg0))
2536 CASE_CONVERT:
2537 case FIX_TRUNC_EXPR:
2538 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2539 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2540 return 0;
2541 break;
2542 default:
2543 break;
2546 return OP_SAME (0);
2549 case tcc_comparison:
2550 case tcc_binary:
2551 if (OP_SAME (0) && OP_SAME (1))
2552 return 1;
2554 /* For commutative ops, allow the other order. */
2555 return (commutative_tree_code (TREE_CODE (arg0))
2556 && operand_equal_p (TREE_OPERAND (arg0, 0),
2557 TREE_OPERAND (arg1, 1), flags)
2558 && operand_equal_p (TREE_OPERAND (arg0, 1),
2559 TREE_OPERAND (arg1, 0), flags));
2561 case tcc_reference:
2562 /* If either of the pointer (or reference) expressions we are
2563 dereferencing contain a side effect, these cannot be equal. */
2564 if (TREE_SIDE_EFFECTS (arg0)
2565 || TREE_SIDE_EFFECTS (arg1))
2566 return 0;
2568 switch (TREE_CODE (arg0))
2570 case INDIRECT_REF:
2571 case REALPART_EXPR:
2572 case IMAGPART_EXPR:
2573 return OP_SAME (0);
2575 case TARGET_MEM_REF:
2576 /* Require equal extra operands and then fall through to MEM_REF
2577 handling of the two common operands. */
2578 if (!OP_SAME_WITH_NULL (2)
2579 || !OP_SAME_WITH_NULL (3)
2580 || !OP_SAME_WITH_NULL (4))
2581 return 0;
2582 /* Fallthru. */
2583 case MEM_REF:
2584 /* Require equal access sizes, and similar pointer types.
2585 We can have incomplete types for array references of
2586 variable-sized arrays from the Fortran frontent
2587 though. */
2588 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2589 || (TYPE_SIZE (TREE_TYPE (arg0))
2590 && TYPE_SIZE (TREE_TYPE (arg1))
2591 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2592 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2593 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2594 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2595 && OP_SAME (0) && OP_SAME (1));
2597 case ARRAY_REF:
2598 case ARRAY_RANGE_REF:
2599 /* Operands 2 and 3 may be null.
2600 Compare the array index by value if it is constant first as we
2601 may have different types but same value here. */
2602 return (OP_SAME (0)
2603 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2604 TREE_OPERAND (arg1, 1))
2605 || OP_SAME (1))
2606 && OP_SAME_WITH_NULL (2)
2607 && OP_SAME_WITH_NULL (3));
2609 case COMPONENT_REF:
2610 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2611 may be NULL when we're called to compare MEM_EXPRs. */
2612 return OP_SAME_WITH_NULL (0)
2613 && OP_SAME (1)
2614 && OP_SAME_WITH_NULL (2);
2616 case BIT_FIELD_REF:
2617 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2619 default:
2620 return 0;
2623 case tcc_expression:
2624 switch (TREE_CODE (arg0))
2626 case ADDR_EXPR:
2627 case TRUTH_NOT_EXPR:
2628 return OP_SAME (0);
2630 case TRUTH_ANDIF_EXPR:
2631 case TRUTH_ORIF_EXPR:
2632 return OP_SAME (0) && OP_SAME (1);
2634 case FMA_EXPR:
2635 case WIDEN_MULT_PLUS_EXPR:
2636 case WIDEN_MULT_MINUS_EXPR:
2637 if (!OP_SAME (2))
2638 return 0;
2639 /* The multiplcation operands are commutative. */
2640 /* FALLTHRU */
2642 case TRUTH_AND_EXPR:
2643 case TRUTH_OR_EXPR:
2644 case TRUTH_XOR_EXPR:
2645 if (OP_SAME (0) && OP_SAME (1))
2646 return 1;
2648 /* Otherwise take into account this is a commutative operation. */
2649 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2650 TREE_OPERAND (arg1, 1), flags)
2651 && operand_equal_p (TREE_OPERAND (arg0, 1),
2652 TREE_OPERAND (arg1, 0), flags));
2654 case COND_EXPR:
2655 case VEC_COND_EXPR:
2656 case DOT_PROD_EXPR:
2657 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2659 default:
2660 return 0;
2663 case tcc_vl_exp:
2664 switch (TREE_CODE (arg0))
2666 case CALL_EXPR:
2667 /* If the CALL_EXPRs call different functions, then they
2668 clearly can not be equal. */
2669 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2670 flags))
2671 return 0;
2674 unsigned int cef = call_expr_flags (arg0);
2675 if (flags & OEP_PURE_SAME)
2676 cef &= ECF_CONST | ECF_PURE;
2677 else
2678 cef &= ECF_CONST;
2679 if (!cef)
2680 return 0;
2683 /* Now see if all the arguments are the same. */
2685 const_call_expr_arg_iterator iter0, iter1;
2686 const_tree a0, a1;
2687 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2688 a1 = first_const_call_expr_arg (arg1, &iter1);
2689 a0 && a1;
2690 a0 = next_const_call_expr_arg (&iter0),
2691 a1 = next_const_call_expr_arg (&iter1))
2692 if (! operand_equal_p (a0, a1, flags))
2693 return 0;
2695 /* If we get here and both argument lists are exhausted
2696 then the CALL_EXPRs are equal. */
2697 return ! (a0 || a1);
2699 default:
2700 return 0;
2703 case tcc_declaration:
2704 /* Consider __builtin_sqrt equal to sqrt. */
2705 return (TREE_CODE (arg0) == FUNCTION_DECL
2706 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2707 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2708 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2710 default:
2711 return 0;
2714 #undef OP_SAME
2715 #undef OP_SAME_WITH_NULL
2718 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2719 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2721 When in doubt, return 0. */
2723 static int
2724 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2726 int unsignedp1, unsignedpo;
2727 tree primarg0, primarg1, primother;
2728 unsigned int correct_width;
2730 if (operand_equal_p (arg0, arg1, 0))
2731 return 1;
2733 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2734 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2735 return 0;
2737 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2738 and see if the inner values are the same. This removes any
2739 signedness comparison, which doesn't matter here. */
2740 primarg0 = arg0, primarg1 = arg1;
2741 STRIP_NOPS (primarg0);
2742 STRIP_NOPS (primarg1);
2743 if (operand_equal_p (primarg0, primarg1, 0))
2744 return 1;
2746 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2747 actual comparison operand, ARG0.
2749 First throw away any conversions to wider types
2750 already present in the operands. */
2752 primarg1 = get_narrower (arg1, &unsignedp1);
2753 primother = get_narrower (other, &unsignedpo);
2755 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2756 if (unsignedp1 == unsignedpo
2757 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2758 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2760 tree type = TREE_TYPE (arg0);
2762 /* Make sure shorter operand is extended the right way
2763 to match the longer operand. */
2764 primarg1 = fold_convert (signed_or_unsigned_type_for
2765 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2767 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2768 return 1;
2771 return 0;
2774 /* See if ARG is an expression that is either a comparison or is performing
2775 arithmetic on comparisons. The comparisons must only be comparing
2776 two different values, which will be stored in *CVAL1 and *CVAL2; if
2777 they are nonzero it means that some operands have already been found.
2778 No variables may be used anywhere else in the expression except in the
2779 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2780 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2782 If this is true, return 1. Otherwise, return zero. */
2784 static int
2785 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2787 enum tree_code code = TREE_CODE (arg);
2788 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2790 /* We can handle some of the tcc_expression cases here. */
2791 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2792 tclass = tcc_unary;
2793 else if (tclass == tcc_expression
2794 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2795 || code == COMPOUND_EXPR))
2796 tclass = tcc_binary;
2798 else if (tclass == tcc_expression && code == SAVE_EXPR
2799 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2801 /* If we've already found a CVAL1 or CVAL2, this expression is
2802 two complex to handle. */
2803 if (*cval1 || *cval2)
2804 return 0;
2806 tclass = tcc_unary;
2807 *save_p = 1;
2810 switch (tclass)
2812 case tcc_unary:
2813 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2815 case tcc_binary:
2816 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2817 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2818 cval1, cval2, save_p));
2820 case tcc_constant:
2821 return 1;
2823 case tcc_expression:
2824 if (code == COND_EXPR)
2825 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2826 cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2828 cval1, cval2, save_p)
2829 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2830 cval1, cval2, save_p));
2831 return 0;
2833 case tcc_comparison:
2834 /* First see if we can handle the first operand, then the second. For
2835 the second operand, we know *CVAL1 can't be zero. It must be that
2836 one side of the comparison is each of the values; test for the
2837 case where this isn't true by failing if the two operands
2838 are the same. */
2840 if (operand_equal_p (TREE_OPERAND (arg, 0),
2841 TREE_OPERAND (arg, 1), 0))
2842 return 0;
2844 if (*cval1 == 0)
2845 *cval1 = TREE_OPERAND (arg, 0);
2846 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2848 else if (*cval2 == 0)
2849 *cval2 = TREE_OPERAND (arg, 0);
2850 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2852 else
2853 return 0;
2855 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2857 else if (*cval2 == 0)
2858 *cval2 = TREE_OPERAND (arg, 1);
2859 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2861 else
2862 return 0;
2864 return 1;
2866 default:
2867 return 0;
2871 /* ARG is a tree that is known to contain just arithmetic operations and
2872 comparisons. Evaluate the operations in the tree substituting NEW0 for
2873 any occurrence of OLD0 as an operand of a comparison and likewise for
2874 NEW1 and OLD1. */
2876 static tree
2877 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2878 tree old1, tree new1)
2880 tree type = TREE_TYPE (arg);
2881 enum tree_code code = TREE_CODE (arg);
2882 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2884 /* We can handle some of the tcc_expression cases here. */
2885 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2886 tclass = tcc_unary;
2887 else if (tclass == tcc_expression
2888 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2889 tclass = tcc_binary;
2891 switch (tclass)
2893 case tcc_unary:
2894 return fold_build1_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1));
2898 case tcc_binary:
2899 return fold_build2_loc (loc, code, type,
2900 eval_subst (loc, TREE_OPERAND (arg, 0),
2901 old0, new0, old1, new1),
2902 eval_subst (loc, TREE_OPERAND (arg, 1),
2903 old0, new0, old1, new1));
2905 case tcc_expression:
2906 switch (code)
2908 case SAVE_EXPR:
2909 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2910 old1, new1);
2912 case COMPOUND_EXPR:
2913 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2914 old1, new1);
2916 case COND_EXPR:
2917 return fold_build3_loc (loc, code, type,
2918 eval_subst (loc, TREE_OPERAND (arg, 0),
2919 old0, new0, old1, new1),
2920 eval_subst (loc, TREE_OPERAND (arg, 1),
2921 old0, new0, old1, new1),
2922 eval_subst (loc, TREE_OPERAND (arg, 2),
2923 old0, new0, old1, new1));
2924 default:
2925 break;
2927 /* Fall through - ??? */
2929 case tcc_comparison:
2931 tree arg0 = TREE_OPERAND (arg, 0);
2932 tree arg1 = TREE_OPERAND (arg, 1);
2934 /* We need to check both for exact equality and tree equality. The
2935 former will be true if the operand has a side-effect. In that
2936 case, we know the operand occurred exactly once. */
2938 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2939 arg0 = new0;
2940 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2941 arg0 = new1;
2943 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2944 arg1 = new0;
2945 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2946 arg1 = new1;
2948 return fold_build2_loc (loc, code, type, arg0, arg1);
2951 default:
2952 return arg;
2956 /* Return a tree for the case when the result of an expression is RESULT
2957 converted to TYPE and OMITTED was previously an operand of the expression
2958 but is now not needed (e.g., we folded OMITTED * 0).
2960 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2961 the conversion of RESULT to TYPE. */
2963 tree
2964 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2966 tree t = fold_convert_loc (loc, type, result);
2968 /* If the resulting operand is an empty statement, just return the omitted
2969 statement casted to void. */
2970 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2971 return build1_loc (loc, NOP_EXPR, void_type_node,
2972 fold_ignored_result (omitted));
2974 if (TREE_SIDE_EFFECTS (omitted))
2975 return build2_loc (loc, COMPOUND_EXPR, type,
2976 fold_ignored_result (omitted), t);
2978 return non_lvalue_loc (loc, t);
2981 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2983 static tree
2984 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2985 tree omitted)
2987 tree t = fold_convert_loc (loc, type, result);
2989 /* If the resulting operand is an empty statement, just return the omitted
2990 statement casted to void. */
2991 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2992 return build1_loc (loc, NOP_EXPR, void_type_node,
2993 fold_ignored_result (omitted));
2995 if (TREE_SIDE_EFFECTS (omitted))
2996 return build2_loc (loc, COMPOUND_EXPR, type,
2997 fold_ignored_result (omitted), t);
2999 return pedantic_non_lvalue_loc (loc, t);
3002 /* Return a tree for the case when the result of an expression is RESULT
3003 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3004 of the expression but are now not needed.
3006 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3007 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3008 evaluated before OMITTED2. Otherwise, if neither has side effects,
3009 just do the conversion of RESULT to TYPE. */
3011 tree
3012 omit_two_operands_loc (location_t loc, tree type, tree result,
3013 tree omitted1, tree omitted2)
3015 tree t = fold_convert_loc (loc, type, result);
3017 if (TREE_SIDE_EFFECTS (omitted2))
3018 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3019 if (TREE_SIDE_EFFECTS (omitted1))
3020 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3022 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3026 /* Return a simplified tree node for the truth-negation of ARG. This
3027 never alters ARG itself. We assume that ARG is an operation that
3028 returns a truth value (0 or 1).
3030 FIXME: one would think we would fold the result, but it causes
3031 problems with the dominator optimizer. */
3033 tree
3034 fold_truth_not_expr (location_t loc, tree arg)
3036 tree type = TREE_TYPE (arg);
3037 enum tree_code code = TREE_CODE (arg);
3038 location_t loc1, loc2;
3040 /* If this is a comparison, we can simply invert it, except for
3041 floating-point non-equality comparisons, in which case we just
3042 enclose a TRUTH_NOT_EXPR around what we have. */
3044 if (TREE_CODE_CLASS (code) == tcc_comparison)
3046 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3047 if (FLOAT_TYPE_P (op_type)
3048 && flag_trapping_math
3049 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3050 && code != NE_EXPR && code != EQ_EXPR)
3051 return NULL_TREE;
3053 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3054 if (code == ERROR_MARK)
3055 return NULL_TREE;
3057 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3058 TREE_OPERAND (arg, 1));
3061 switch (code)
3063 case INTEGER_CST:
3064 return constant_boolean_node (integer_zerop (arg), type);
3066 case TRUTH_AND_EXPR:
3067 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3068 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3069 return build2_loc (loc, TRUTH_OR_EXPR, type,
3070 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3071 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3073 case TRUTH_OR_EXPR:
3074 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3075 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3076 return build2_loc (loc, TRUTH_AND_EXPR, type,
3077 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3078 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3080 case TRUTH_XOR_EXPR:
3081 /* Here we can invert either operand. We invert the first operand
3082 unless the second operand is a TRUTH_NOT_EXPR in which case our
3083 result is the XOR of the first operand with the inside of the
3084 negation of the second operand. */
3086 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3087 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3088 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3089 else
3090 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3091 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3092 TREE_OPERAND (arg, 1));
3094 case TRUTH_ANDIF_EXPR:
3095 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3096 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3097 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3098 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3099 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3101 case TRUTH_ORIF_EXPR:
3102 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3103 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3104 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3105 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3106 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3108 case TRUTH_NOT_EXPR:
3109 return TREE_OPERAND (arg, 0);
3111 case COND_EXPR:
3113 tree arg1 = TREE_OPERAND (arg, 1);
3114 tree arg2 = TREE_OPERAND (arg, 2);
3116 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3117 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3119 /* A COND_EXPR may have a throw as one operand, which
3120 then has void type. Just leave void operands
3121 as they are. */
3122 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3123 VOID_TYPE_P (TREE_TYPE (arg1))
3124 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3125 VOID_TYPE_P (TREE_TYPE (arg2))
3126 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3129 case COMPOUND_EXPR:
3130 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3131 return build2_loc (loc, COMPOUND_EXPR, type,
3132 TREE_OPERAND (arg, 0),
3133 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3135 case NON_LVALUE_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3139 CASE_CONVERT:
3140 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3141 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3143 /* ... fall through ... */
3145 case FLOAT_EXPR:
3146 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3147 return build1_loc (loc, TREE_CODE (arg), type,
3148 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3150 case BIT_AND_EXPR:
3151 if (!integer_onep (TREE_OPERAND (arg, 1)))
3152 return NULL_TREE;
3153 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3155 case SAVE_EXPR:
3156 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3158 case CLEANUP_POINT_EXPR:
3159 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3160 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3161 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3163 default:
3164 return NULL_TREE;
3168 /* Return a simplified tree node for the truth-negation of ARG. This
3169 never alters ARG itself. We assume that ARG is an operation that
3170 returns a truth value (0 or 1).
3172 FIXME: one would think we would fold the result, but it causes
3173 problems with the dominator optimizer. */
3175 tree
3176 invert_truthvalue_loc (location_t loc, tree arg)
3178 tree tem;
3180 if (TREE_CODE (arg) == ERROR_MARK)
3181 return arg;
3183 tem = fold_truth_not_expr (loc, arg);
3184 if (!tem)
3185 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3187 return tem;
3190 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3191 operands are another bit-wise operation with a common input. If so,
3192 distribute the bit operations to save an operation and possibly two if
3193 constants are involved. For example, convert
3194 (A | B) & (A | C) into A | (B & C)
3195 Further simplification will occur if B and C are constants.
3197 If this optimization cannot be done, 0 will be returned. */
3199 static tree
3200 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3201 tree arg0, tree arg1)
3203 tree common;
3204 tree left, right;
3206 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3207 || TREE_CODE (arg0) == code
3208 || (TREE_CODE (arg0) != BIT_AND_EXPR
3209 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3210 return 0;
3212 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3214 common = TREE_OPERAND (arg0, 0);
3215 left = TREE_OPERAND (arg0, 1);
3216 right = TREE_OPERAND (arg1, 1);
3218 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3220 common = TREE_OPERAND (arg0, 0);
3221 left = TREE_OPERAND (arg0, 1);
3222 right = TREE_OPERAND (arg1, 0);
3224 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3226 common = TREE_OPERAND (arg0, 1);
3227 left = TREE_OPERAND (arg0, 0);
3228 right = TREE_OPERAND (arg1, 1);
3230 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3232 common = TREE_OPERAND (arg0, 1);
3233 left = TREE_OPERAND (arg0, 0);
3234 right = TREE_OPERAND (arg1, 0);
3236 else
3237 return 0;
3239 common = fold_convert_loc (loc, type, common);
3240 left = fold_convert_loc (loc, type, left);
3241 right = fold_convert_loc (loc, type, right);
3242 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3243 fold_build2_loc (loc, code, type, left, right));
3246 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3247 with code CODE. This optimization is unsafe. */
3248 static tree
3249 distribute_real_division (location_t loc, enum tree_code code, tree type,
3250 tree arg0, tree arg1)
3252 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3253 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3255 /* (A / C) +- (B / C) -> (A +- B) / C. */
3256 if (mul0 == mul1
3257 && operand_equal_p (TREE_OPERAND (arg0, 1),
3258 TREE_OPERAND (arg1, 1), 0))
3259 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3260 fold_build2_loc (loc, code, type,
3261 TREE_OPERAND (arg0, 0),
3262 TREE_OPERAND (arg1, 0)),
3263 TREE_OPERAND (arg0, 1));
3265 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3266 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3267 TREE_OPERAND (arg1, 0), 0)
3268 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3269 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3271 REAL_VALUE_TYPE r0, r1;
3272 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3273 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3274 if (!mul0)
3275 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3276 if (!mul1)
3277 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3278 real_arithmetic (&r0, code, &r0, &r1);
3279 return fold_build2_loc (loc, MULT_EXPR, type,
3280 TREE_OPERAND (arg0, 0),
3281 build_real (type, r0));
3284 return NULL_TREE;
3287 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3288 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3290 static tree
3291 make_bit_field_ref (location_t loc, tree inner, tree type,
3292 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3294 tree result, bftype;
3296 if (bitpos == 0)
3298 tree size = TYPE_SIZE (TREE_TYPE (inner));
3299 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3300 || POINTER_TYPE_P (TREE_TYPE (inner)))
3301 && host_integerp (size, 0)
3302 && tree_low_cst (size, 0) == bitsize)
3303 return fold_convert_loc (loc, type, inner);
3306 bftype = type;
3307 if (TYPE_PRECISION (bftype) != bitsize
3308 || TYPE_UNSIGNED (bftype) == !unsignedp)
3309 bftype = build_nonstandard_integer_type (bitsize, 0);
3311 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3312 size_int (bitsize), bitsize_int (bitpos));
3314 if (bftype != type)
3315 result = fold_convert_loc (loc, type, result);
3317 return result;
3320 /* Optimize a bit-field compare.
3322 There are two cases: First is a compare against a constant and the
3323 second is a comparison of two items where the fields are at the same
3324 bit position relative to the start of a chunk (byte, halfword, word)
3325 large enough to contain it. In these cases we can avoid the shift
3326 implicit in bitfield extractions.
3328 For constants, we emit a compare of the shifted constant with the
3329 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3330 compared. For two fields at the same position, we do the ANDs with the
3331 similar mask and compare the result of the ANDs.
3333 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3334 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3335 are the left and right operands of the comparison, respectively.
3337 If the optimization described above can be done, we return the resulting
3338 tree. Otherwise we return zero. */
3340 static tree
3341 optimize_bit_field_compare (location_t loc, enum tree_code code,
3342 tree compare_type, tree lhs, tree rhs)
3344 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3345 tree type = TREE_TYPE (lhs);
3346 tree signed_type, unsigned_type;
3347 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3348 enum machine_mode lmode, rmode, nmode;
3349 int lunsignedp, runsignedp;
3350 int lvolatilep = 0, rvolatilep = 0;
3351 tree linner, rinner = NULL_TREE;
3352 tree mask;
3353 tree offset;
3355 /* In the strict volatile bitfields case, doing code changes here may prevent
3356 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3357 if (flag_strict_volatile_bitfields > 0)
3358 return 0;
3360 /* Get all the information about the extractions being done. If the bit size
3361 if the same as the size of the underlying object, we aren't doing an
3362 extraction at all and so can do nothing. We also don't want to
3363 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3364 then will no longer be able to replace it. */
3365 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3366 &lunsignedp, &lvolatilep, false);
3367 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3368 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3369 return 0;
3371 if (!const_p)
3373 /* If this is not a constant, we can only do something if bit positions,
3374 sizes, and signedness are the same. */
3375 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3376 &runsignedp, &rvolatilep, false);
3378 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3379 || lunsignedp != runsignedp || offset != 0
3380 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3381 return 0;
3384 /* See if we can find a mode to refer to this field. We should be able to,
3385 but fail if we can't. */
3386 if (lvolatilep
3387 && GET_MODE_BITSIZE (lmode) > 0
3388 && flag_strict_volatile_bitfields > 0)
3389 nmode = lmode;
3390 else
3391 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3392 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3393 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3394 TYPE_ALIGN (TREE_TYPE (rinner))),
3395 word_mode, lvolatilep || rvolatilep);
3396 if (nmode == VOIDmode)
3397 return 0;
3399 /* Set signed and unsigned types of the precision of this mode for the
3400 shifts below. */
3401 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3402 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3404 /* Compute the bit position and size for the new reference and our offset
3405 within it. If the new reference is the same size as the original, we
3406 won't optimize anything, so return zero. */
3407 nbitsize = GET_MODE_BITSIZE (nmode);
3408 nbitpos = lbitpos & ~ (nbitsize - 1);
3409 lbitpos -= nbitpos;
3410 if (nbitsize == lbitsize)
3411 return 0;
3413 if (BYTES_BIG_ENDIAN)
3414 lbitpos = nbitsize - lbitsize - lbitpos;
3416 /* Make the mask to be used against the extracted field. */
3417 mask = build_int_cst_type (unsigned_type, -1);
3418 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3419 mask = const_binop (RSHIFT_EXPR, mask,
3420 size_int (nbitsize - lbitsize - lbitpos));
3422 if (! const_p)
3423 /* If not comparing with constant, just rework the comparison
3424 and return. */
3425 return fold_build2_loc (loc, code, compare_type,
3426 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3427 make_bit_field_ref (loc, linner,
3428 unsigned_type,
3429 nbitsize, nbitpos,
3431 mask),
3432 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3433 make_bit_field_ref (loc, rinner,
3434 unsigned_type,
3435 nbitsize, nbitpos,
3437 mask));
3439 /* Otherwise, we are handling the constant case. See if the constant is too
3440 big for the field. Warn and return a tree of for 0 (false) if so. We do
3441 this not only for its own sake, but to avoid having to test for this
3442 error case below. If we didn't, we might generate wrong code.
3444 For unsigned fields, the constant shifted right by the field length should
3445 be all zero. For signed fields, the high-order bits should agree with
3446 the sign bit. */
3448 if (lunsignedp)
3450 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3451 fold_convert_loc (loc,
3452 unsigned_type, rhs),
3453 size_int (lbitsize))))
3455 warning (0, "comparison is always %d due to width of bit-field",
3456 code == NE_EXPR);
3457 return constant_boolean_node (code == NE_EXPR, compare_type);
3460 else
3462 tree tem = const_binop (RSHIFT_EXPR,
3463 fold_convert_loc (loc, signed_type, rhs),
3464 size_int (lbitsize - 1));
3465 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3467 warning (0, "comparison is always %d due to width of bit-field",
3468 code == NE_EXPR);
3469 return constant_boolean_node (code == NE_EXPR, compare_type);
3473 /* Single-bit compares should always be against zero. */
3474 if (lbitsize == 1 && ! integer_zerop (rhs))
3476 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3477 rhs = build_int_cst (type, 0);
3480 /* Make a new bitfield reference, shift the constant over the
3481 appropriate number of bits and mask it with the computed mask
3482 (in case this was a signed field). If we changed it, make a new one. */
3483 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3484 if (lvolatilep)
3486 TREE_SIDE_EFFECTS (lhs) = 1;
3487 TREE_THIS_VOLATILE (lhs) = 1;
3490 rhs = const_binop (BIT_AND_EXPR,
3491 const_binop (LSHIFT_EXPR,
3492 fold_convert_loc (loc, unsigned_type, rhs),
3493 size_int (lbitpos)),
3494 mask);
3496 lhs = build2_loc (loc, code, compare_type,
3497 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3498 return lhs;
3501 /* Subroutine for fold_truth_andor_1: decode a field reference.
3503 If EXP is a comparison reference, we return the innermost reference.
3505 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3506 set to the starting bit number.
3508 If the innermost field can be completely contained in a mode-sized
3509 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3511 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3512 otherwise it is not changed.
3514 *PUNSIGNEDP is set to the signedness of the field.
3516 *PMASK is set to the mask used. This is either contained in a
3517 BIT_AND_EXPR or derived from the width of the field.
3519 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3521 Return 0 if this is not a component reference or is one that we can't
3522 do anything with. */
3524 static tree
3525 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3526 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3527 int *punsignedp, int *pvolatilep,
3528 tree *pmask, tree *pand_mask)
3530 tree outer_type = 0;
3531 tree and_mask = 0;
3532 tree mask, inner, offset;
3533 tree unsigned_type;
3534 unsigned int precision;
3536 /* All the optimizations using this function assume integer fields.
3537 There are problems with FP fields since the type_for_size call
3538 below can fail for, e.g., XFmode. */
3539 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3540 return 0;
3542 /* We are interested in the bare arrangement of bits, so strip everything
3543 that doesn't affect the machine mode. However, record the type of the
3544 outermost expression if it may matter below. */
3545 if (CONVERT_EXPR_P (exp)
3546 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3547 outer_type = TREE_TYPE (exp);
3548 STRIP_NOPS (exp);
3550 if (TREE_CODE (exp) == BIT_AND_EXPR)
3552 and_mask = TREE_OPERAND (exp, 1);
3553 exp = TREE_OPERAND (exp, 0);
3554 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3555 if (TREE_CODE (and_mask) != INTEGER_CST)
3556 return 0;
3559 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3560 punsignedp, pvolatilep, false);
3561 if ((inner == exp && and_mask == 0)
3562 || *pbitsize < 0 || offset != 0
3563 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3564 return 0;
3566 /* If the number of bits in the reference is the same as the bitsize of
3567 the outer type, then the outer type gives the signedness. Otherwise
3568 (in case of a small bitfield) the signedness is unchanged. */
3569 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3570 *punsignedp = TYPE_UNSIGNED (outer_type);
3572 /* Compute the mask to access the bitfield. */
3573 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3574 precision = TYPE_PRECISION (unsigned_type);
3576 mask = build_int_cst_type (unsigned_type, -1);
3578 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3579 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3581 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3582 if (and_mask != 0)
3583 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3584 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3586 *pmask = mask;
3587 *pand_mask = and_mask;
3588 return inner;
3591 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3592 bit positions. */
3594 static int
3595 all_ones_mask_p (const_tree mask, int size)
3597 tree type = TREE_TYPE (mask);
3598 unsigned int precision = TYPE_PRECISION (type);
3599 tree tmask;
3601 tmask = build_int_cst_type (signed_type_for (type), -1);
3603 return
3604 tree_int_cst_equal (mask,
3605 const_binop (RSHIFT_EXPR,
3606 const_binop (LSHIFT_EXPR, tmask,
3607 size_int (precision - size)),
3608 size_int (precision - size)));
3611 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3612 represents the sign bit of EXP's type. If EXP represents a sign
3613 or zero extension, also test VAL against the unextended type.
3614 The return value is the (sub)expression whose sign bit is VAL,
3615 or NULL_TREE otherwise. */
3617 static tree
3618 sign_bit_p (tree exp, const_tree val)
3620 unsigned HOST_WIDE_INT mask_lo, lo;
3621 HOST_WIDE_INT mask_hi, hi;
3622 int width;
3623 tree t;
3625 /* Tree EXP must have an integral type. */
3626 t = TREE_TYPE (exp);
3627 if (! INTEGRAL_TYPE_P (t))
3628 return NULL_TREE;
3630 /* Tree VAL must be an integer constant. */
3631 if (TREE_CODE (val) != INTEGER_CST
3632 || TREE_OVERFLOW (val))
3633 return NULL_TREE;
3635 width = TYPE_PRECISION (t);
3636 if (width > HOST_BITS_PER_WIDE_INT)
3638 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3639 lo = 0;
3641 mask_hi = ((unsigned HOST_WIDE_INT) -1
3642 >> (HOST_BITS_PER_DOUBLE_INT - width));
3643 mask_lo = -1;
3645 else
3647 hi = 0;
3648 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3650 mask_hi = 0;
3651 mask_lo = ((unsigned HOST_WIDE_INT) -1
3652 >> (HOST_BITS_PER_WIDE_INT - width));
3655 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3656 treat VAL as if it were unsigned. */
3657 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3658 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3659 return exp;
3661 /* Handle extension from a narrower type. */
3662 if (TREE_CODE (exp) == NOP_EXPR
3663 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3664 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3666 return NULL_TREE;
3669 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3670 to be evaluated unconditionally. */
3672 static int
3673 simple_operand_p (const_tree exp)
3675 /* Strip any conversions that don't change the machine mode. */
3676 STRIP_NOPS (exp);
3678 return (CONSTANT_CLASS_P (exp)
3679 || TREE_CODE (exp) == SSA_NAME
3680 || (DECL_P (exp)
3681 && ! TREE_ADDRESSABLE (exp)
3682 && ! TREE_THIS_VOLATILE (exp)
3683 && ! DECL_NONLOCAL (exp)
3684 /* Don't regard global variables as simple. They may be
3685 allocated in ways unknown to the compiler (shared memory,
3686 #pragma weak, etc). */
3687 && ! TREE_PUBLIC (exp)
3688 && ! DECL_EXTERNAL (exp)
3689 /* Loading a static variable is unduly expensive, but global
3690 registers aren't expensive. */
3691 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3694 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3695 to be evaluated unconditionally.
3696 I addition to simple_operand_p, we assume that comparisons, conversions,
3697 and logic-not operations are simple, if their operands are simple, too. */
3699 static bool
3700 simple_operand_p_2 (tree exp)
3702 enum tree_code code;
3704 if (TREE_SIDE_EFFECTS (exp)
3705 || tree_could_trap_p (exp))
3706 return false;
3708 while (CONVERT_EXPR_P (exp))
3709 exp = TREE_OPERAND (exp, 0);
3711 code = TREE_CODE (exp);
3713 if (TREE_CODE_CLASS (code) == tcc_comparison)
3714 return (simple_operand_p (TREE_OPERAND (exp, 0))
3715 && simple_operand_p (TREE_OPERAND (exp, 1)));
3717 if (code == TRUTH_NOT_EXPR)
3718 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3720 return simple_operand_p (exp);
3724 /* The following functions are subroutines to fold_range_test and allow it to
3725 try to change a logical combination of comparisons into a range test.
3727 For example, both
3728 X == 2 || X == 3 || X == 4 || X == 5
3730 X >= 2 && X <= 5
3731 are converted to
3732 (unsigned) (X - 2) <= 3
3734 We describe each set of comparisons as being either inside or outside
3735 a range, using a variable named like IN_P, and then describe the
3736 range with a lower and upper bound. If one of the bounds is omitted,
3737 it represents either the highest or lowest value of the type.
3739 In the comments below, we represent a range by two numbers in brackets
3740 preceded by a "+" to designate being inside that range, or a "-" to
3741 designate being outside that range, so the condition can be inverted by
3742 flipping the prefix. An omitted bound is represented by a "-". For
3743 example, "- [-, 10]" means being outside the range starting at the lowest
3744 possible value and ending at 10, in other words, being greater than 10.
3745 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3746 always false.
3748 We set up things so that the missing bounds are handled in a consistent
3749 manner so neither a missing bound nor "true" and "false" need to be
3750 handled using a special case. */
3752 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3753 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3754 and UPPER1_P are nonzero if the respective argument is an upper bound
3755 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3756 must be specified for a comparison. ARG1 will be converted to ARG0's
3757 type if both are specified. */
3759 static tree
3760 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3761 tree arg1, int upper1_p)
3763 tree tem;
3764 int result;
3765 int sgn0, sgn1;
3767 /* If neither arg represents infinity, do the normal operation.
3768 Else, if not a comparison, return infinity. Else handle the special
3769 comparison rules. Note that most of the cases below won't occur, but
3770 are handled for consistency. */
3772 if (arg0 != 0 && arg1 != 0)
3774 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3775 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3776 STRIP_NOPS (tem);
3777 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3780 if (TREE_CODE_CLASS (code) != tcc_comparison)
3781 return 0;
3783 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3784 for neither. In real maths, we cannot assume open ended ranges are
3785 the same. But, this is computer arithmetic, where numbers are finite.
3786 We can therefore make the transformation of any unbounded range with
3787 the value Z, Z being greater than any representable number. This permits
3788 us to treat unbounded ranges as equal. */
3789 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3790 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3791 switch (code)
3793 case EQ_EXPR:
3794 result = sgn0 == sgn1;
3795 break;
3796 case NE_EXPR:
3797 result = sgn0 != sgn1;
3798 break;
3799 case LT_EXPR:
3800 result = sgn0 < sgn1;
3801 break;
3802 case LE_EXPR:
3803 result = sgn0 <= sgn1;
3804 break;
3805 case GT_EXPR:
3806 result = sgn0 > sgn1;
3807 break;
3808 case GE_EXPR:
3809 result = sgn0 >= sgn1;
3810 break;
3811 default:
3812 gcc_unreachable ();
3815 return constant_boolean_node (result, type);
3818 /* Helper routine for make_range. Perform one step for it, return
3819 new expression if the loop should continue or NULL_TREE if it should
3820 stop. */
3822 tree
3823 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3824 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3825 bool *strict_overflow_p)
3827 tree arg0_type = TREE_TYPE (arg0);
3828 tree n_low, n_high, low = *p_low, high = *p_high;
3829 int in_p = *p_in_p, n_in_p;
3831 switch (code)
3833 case TRUTH_NOT_EXPR:
3834 *p_in_p = ! in_p;
3835 return arg0;
3837 case EQ_EXPR: case NE_EXPR:
3838 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3839 /* We can only do something if the range is testing for zero
3840 and if the second operand is an integer constant. Note that
3841 saying something is "in" the range we make is done by
3842 complementing IN_P since it will set in the initial case of
3843 being not equal to zero; "out" is leaving it alone. */
3844 if (low == NULL_TREE || high == NULL_TREE
3845 || ! integer_zerop (low) || ! integer_zerop (high)
3846 || TREE_CODE (arg1) != INTEGER_CST)
3847 return NULL_TREE;
3849 switch (code)
3851 case NE_EXPR: /* - [c, c] */
3852 low = high = arg1;
3853 break;
3854 case EQ_EXPR: /* + [c, c] */
3855 in_p = ! in_p, low = high = arg1;
3856 break;
3857 case GT_EXPR: /* - [-, c] */
3858 low = 0, high = arg1;
3859 break;
3860 case GE_EXPR: /* + [c, -] */
3861 in_p = ! in_p, low = arg1, high = 0;
3862 break;
3863 case LT_EXPR: /* - [c, -] */
3864 low = arg1, high = 0;
3865 break;
3866 case LE_EXPR: /* + [-, c] */
3867 in_p = ! in_p, low = 0, high = arg1;
3868 break;
3869 default:
3870 gcc_unreachable ();
3873 /* If this is an unsigned comparison, we also know that EXP is
3874 greater than or equal to zero. We base the range tests we make
3875 on that fact, so we record it here so we can parse existing
3876 range tests. We test arg0_type since often the return type
3877 of, e.g. EQ_EXPR, is boolean. */
3878 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3880 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3881 in_p, low, high, 1,
3882 build_int_cst (arg0_type, 0),
3883 NULL_TREE))
3884 return NULL_TREE;
3886 in_p = n_in_p, low = n_low, high = n_high;
3888 /* If the high bound is missing, but we have a nonzero low
3889 bound, reverse the range so it goes from zero to the low bound
3890 minus 1. */
3891 if (high == 0 && low && ! integer_zerop (low))
3893 in_p = ! in_p;
3894 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3895 integer_one_node, 0);
3896 low = build_int_cst (arg0_type, 0);
3900 *p_low = low;
3901 *p_high = high;
3902 *p_in_p = in_p;
3903 return arg0;
3905 case NEGATE_EXPR:
3906 /* (-x) IN [a,b] -> x in [-b, -a] */
3907 n_low = range_binop (MINUS_EXPR, exp_type,
3908 build_int_cst (exp_type, 0),
3909 0, high, 1);
3910 n_high = range_binop (MINUS_EXPR, exp_type,
3911 build_int_cst (exp_type, 0),
3912 0, low, 0);
3913 if (n_high != 0 && TREE_OVERFLOW (n_high))
3914 return NULL_TREE;
3915 goto normalize;
3917 case BIT_NOT_EXPR:
3918 /* ~ X -> -X - 1 */
3919 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3920 build_int_cst (exp_type, 1));
3922 case PLUS_EXPR:
3923 case MINUS_EXPR:
3924 if (TREE_CODE (arg1) != INTEGER_CST)
3925 return NULL_TREE;
3927 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3928 move a constant to the other side. */
3929 if (!TYPE_UNSIGNED (arg0_type)
3930 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3931 return NULL_TREE;
3933 /* If EXP is signed, any overflow in the computation is undefined,
3934 so we don't worry about it so long as our computations on
3935 the bounds don't overflow. For unsigned, overflow is defined
3936 and this is exactly the right thing. */
3937 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3938 arg0_type, low, 0, arg1, 0);
3939 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3940 arg0_type, high, 1, arg1, 0);
3941 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3942 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3943 return NULL_TREE;
3945 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3946 *strict_overflow_p = true;
3948 normalize:
3949 /* Check for an unsigned range which has wrapped around the maximum
3950 value thus making n_high < n_low, and normalize it. */
3951 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3953 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3954 integer_one_node, 0);
3955 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3956 integer_one_node, 0);
3958 /* If the range is of the form +/- [ x+1, x ], we won't
3959 be able to normalize it. But then, it represents the
3960 whole range or the empty set, so make it
3961 +/- [ -, - ]. */
3962 if (tree_int_cst_equal (n_low, low)
3963 && tree_int_cst_equal (n_high, high))
3964 low = high = 0;
3965 else
3966 in_p = ! in_p;
3968 else
3969 low = n_low, high = n_high;
3971 *p_low = low;
3972 *p_high = high;
3973 *p_in_p = in_p;
3974 return arg0;
3976 CASE_CONVERT:
3977 case NON_LVALUE_EXPR:
3978 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3979 return NULL_TREE;
3981 if (! INTEGRAL_TYPE_P (arg0_type)
3982 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3983 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3984 return NULL_TREE;
3986 n_low = low, n_high = high;
3988 if (n_low != 0)
3989 n_low = fold_convert_loc (loc, arg0_type, n_low);
3991 if (n_high != 0)
3992 n_high = fold_convert_loc (loc, arg0_type, n_high);
3994 /* If we're converting arg0 from an unsigned type, to exp,
3995 a signed type, we will be doing the comparison as unsigned.
3996 The tests above have already verified that LOW and HIGH
3997 are both positive.
3999 So we have to ensure that we will handle large unsigned
4000 values the same way that the current signed bounds treat
4001 negative values. */
4003 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4005 tree high_positive;
4006 tree equiv_type;
4007 /* For fixed-point modes, we need to pass the saturating flag
4008 as the 2nd parameter. */
4009 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4010 equiv_type
4011 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4012 TYPE_SATURATING (arg0_type));
4013 else
4014 equiv_type
4015 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4017 /* A range without an upper bound is, naturally, unbounded.
4018 Since convert would have cropped a very large value, use
4019 the max value for the destination type. */
4020 high_positive
4021 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4022 : TYPE_MAX_VALUE (arg0_type);
4024 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4025 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4026 fold_convert_loc (loc, arg0_type,
4027 high_positive),
4028 build_int_cst (arg0_type, 1));
4030 /* If the low bound is specified, "and" the range with the
4031 range for which the original unsigned value will be
4032 positive. */
4033 if (low != 0)
4035 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4036 1, fold_convert_loc (loc, arg0_type,
4037 integer_zero_node),
4038 high_positive))
4039 return NULL_TREE;
4041 in_p = (n_in_p == in_p);
4043 else
4045 /* Otherwise, "or" the range with the range of the input
4046 that will be interpreted as negative. */
4047 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4048 1, fold_convert_loc (loc, arg0_type,
4049 integer_zero_node),
4050 high_positive))
4051 return NULL_TREE;
4053 in_p = (in_p != n_in_p);
4057 *p_low = n_low;
4058 *p_high = n_high;
4059 *p_in_p = in_p;
4060 return arg0;
4062 default:
4063 return NULL_TREE;
4067 /* Given EXP, a logical expression, set the range it is testing into
4068 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4069 actually being tested. *PLOW and *PHIGH will be made of the same
4070 type as the returned expression. If EXP is not a comparison, we
4071 will most likely not be returning a useful value and range. Set
4072 *STRICT_OVERFLOW_P to true if the return value is only valid
4073 because signed overflow is undefined; otherwise, do not change
4074 *STRICT_OVERFLOW_P. */
4076 tree
4077 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4078 bool *strict_overflow_p)
4080 enum tree_code code;
4081 tree arg0, arg1 = NULL_TREE;
4082 tree exp_type, nexp;
4083 int in_p;
4084 tree low, high;
4085 location_t loc = EXPR_LOCATION (exp);
4087 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4088 and see if we can refine the range. Some of the cases below may not
4089 happen, but it doesn't seem worth worrying about this. We "continue"
4090 the outer loop when we've changed something; otherwise we "break"
4091 the switch, which will "break" the while. */
4093 in_p = 0;
4094 low = high = build_int_cst (TREE_TYPE (exp), 0);
4096 while (1)
4098 code = TREE_CODE (exp);
4099 exp_type = TREE_TYPE (exp);
4100 arg0 = NULL_TREE;
4102 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4104 if (TREE_OPERAND_LENGTH (exp) > 0)
4105 arg0 = TREE_OPERAND (exp, 0);
4106 if (TREE_CODE_CLASS (code) == tcc_binary
4107 || TREE_CODE_CLASS (code) == tcc_comparison
4108 || (TREE_CODE_CLASS (code) == tcc_expression
4109 && TREE_OPERAND_LENGTH (exp) > 1))
4110 arg1 = TREE_OPERAND (exp, 1);
4112 if (arg0 == NULL_TREE)
4113 break;
4115 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4116 &high, &in_p, strict_overflow_p);
4117 if (nexp == NULL_TREE)
4118 break;
4119 exp = nexp;
4122 /* If EXP is a constant, we can evaluate whether this is true or false. */
4123 if (TREE_CODE (exp) == INTEGER_CST)
4125 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4126 exp, 0, low, 0))
4127 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4128 exp, 1, high, 1)));
4129 low = high = 0;
4130 exp = 0;
4133 *pin_p = in_p, *plow = low, *phigh = high;
4134 return exp;
4137 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4138 type, TYPE, return an expression to test if EXP is in (or out of, depending
4139 on IN_P) the range. Return 0 if the test couldn't be created. */
4141 tree
4142 build_range_check (location_t loc, tree type, tree exp, int in_p,
4143 tree low, tree high)
4145 tree etype = TREE_TYPE (exp), value;
4147 #ifdef HAVE_canonicalize_funcptr_for_compare
4148 /* Disable this optimization for function pointer expressions
4149 on targets that require function pointer canonicalization. */
4150 if (HAVE_canonicalize_funcptr_for_compare
4151 && TREE_CODE (etype) == POINTER_TYPE
4152 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4153 return NULL_TREE;
4154 #endif
4156 if (! in_p)
4158 value = build_range_check (loc, type, exp, 1, low, high);
4159 if (value != 0)
4160 return invert_truthvalue_loc (loc, value);
4162 return 0;
4165 if (low == 0 && high == 0)
4166 return build_int_cst (type, 1);
4168 if (low == 0)
4169 return fold_build2_loc (loc, LE_EXPR, type, exp,
4170 fold_convert_loc (loc, etype, high));
4172 if (high == 0)
4173 return fold_build2_loc (loc, GE_EXPR, type, exp,
4174 fold_convert_loc (loc, etype, low));
4176 if (operand_equal_p (low, high, 0))
4177 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4178 fold_convert_loc (loc, etype, low));
4180 if (integer_zerop (low))
4182 if (! TYPE_UNSIGNED (etype))
4184 etype = unsigned_type_for (etype);
4185 high = fold_convert_loc (loc, etype, high);
4186 exp = fold_convert_loc (loc, etype, exp);
4188 return build_range_check (loc, type, exp, 1, 0, high);
4191 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4192 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4194 unsigned HOST_WIDE_INT lo;
4195 HOST_WIDE_INT hi;
4196 int prec;
4198 prec = TYPE_PRECISION (etype);
4199 if (prec <= HOST_BITS_PER_WIDE_INT)
4201 hi = 0;
4202 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4204 else
4206 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4207 lo = (unsigned HOST_WIDE_INT) -1;
4210 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4212 if (TYPE_UNSIGNED (etype))
4214 tree signed_etype = signed_type_for (etype);
4215 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4216 etype
4217 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4218 else
4219 etype = signed_etype;
4220 exp = fold_convert_loc (loc, etype, exp);
4222 return fold_build2_loc (loc, GT_EXPR, type, exp,
4223 build_int_cst (etype, 0));
4227 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4228 This requires wrap-around arithmetics for the type of the expression.
4229 First make sure that arithmetics in this type is valid, then make sure
4230 that it wraps around. */
4231 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4232 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4233 TYPE_UNSIGNED (etype));
4235 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4237 tree utype, minv, maxv;
4239 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4240 for the type in question, as we rely on this here. */
4241 utype = unsigned_type_for (etype);
4242 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4243 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4244 integer_one_node, 1);
4245 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4247 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4248 minv, 1, maxv, 1)))
4249 etype = utype;
4250 else
4251 return 0;
4254 high = fold_convert_loc (loc, etype, high);
4255 low = fold_convert_loc (loc, etype, low);
4256 exp = fold_convert_loc (loc, etype, exp);
4258 value = const_binop (MINUS_EXPR, high, low);
4261 if (POINTER_TYPE_P (etype))
4263 if (value != 0 && !TREE_OVERFLOW (value))
4265 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4266 return build_range_check (loc, type,
4267 fold_build_pointer_plus_loc (loc, exp, low),
4268 1, build_int_cst (etype, 0), value);
4270 return 0;
4273 if (value != 0 && !TREE_OVERFLOW (value))
4274 return build_range_check (loc, type,
4275 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4276 1, build_int_cst (etype, 0), value);
4278 return 0;
4281 /* Return the predecessor of VAL in its type, handling the infinite case. */
4283 static tree
4284 range_predecessor (tree val)
4286 tree type = TREE_TYPE (val);
4288 if (INTEGRAL_TYPE_P (type)
4289 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4290 return 0;
4291 else
4292 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4295 /* Return the successor of VAL in its type, handling the infinite case. */
4297 static tree
4298 range_successor (tree val)
4300 tree type = TREE_TYPE (val);
4302 if (INTEGRAL_TYPE_P (type)
4303 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4304 return 0;
4305 else
4306 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4309 /* Given two ranges, see if we can merge them into one. Return 1 if we
4310 can, 0 if we can't. Set the output range into the specified parameters. */
4312 bool
4313 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4314 tree high0, int in1_p, tree low1, tree high1)
4316 int no_overlap;
4317 int subset;
4318 int temp;
4319 tree tem;
4320 int in_p;
4321 tree low, high;
4322 int lowequal = ((low0 == 0 && low1 == 0)
4323 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4324 low0, 0, low1, 0)));
4325 int highequal = ((high0 == 0 && high1 == 0)
4326 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4327 high0, 1, high1, 1)));
4329 /* Make range 0 be the range that starts first, or ends last if they
4330 start at the same value. Swap them if it isn't. */
4331 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4332 low0, 0, low1, 0))
4333 || (lowequal
4334 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4335 high1, 1, high0, 1))))
4337 temp = in0_p, in0_p = in1_p, in1_p = temp;
4338 tem = low0, low0 = low1, low1 = tem;
4339 tem = high0, high0 = high1, high1 = tem;
4342 /* Now flag two cases, whether the ranges are disjoint or whether the
4343 second range is totally subsumed in the first. Note that the tests
4344 below are simplified by the ones above. */
4345 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4346 high0, 1, low1, 0));
4347 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4348 high1, 1, high0, 1));
4350 /* We now have four cases, depending on whether we are including or
4351 excluding the two ranges. */
4352 if (in0_p && in1_p)
4354 /* If they don't overlap, the result is false. If the second range
4355 is a subset it is the result. Otherwise, the range is from the start
4356 of the second to the end of the first. */
4357 if (no_overlap)
4358 in_p = 0, low = high = 0;
4359 else if (subset)
4360 in_p = 1, low = low1, high = high1;
4361 else
4362 in_p = 1, low = low1, high = high0;
4365 else if (in0_p && ! in1_p)
4367 /* If they don't overlap, the result is the first range. If they are
4368 equal, the result is false. If the second range is a subset of the
4369 first, and the ranges begin at the same place, we go from just after
4370 the end of the second range to the end of the first. If the second
4371 range is not a subset of the first, or if it is a subset and both
4372 ranges end at the same place, the range starts at the start of the
4373 first range and ends just before the second range.
4374 Otherwise, we can't describe this as a single range. */
4375 if (no_overlap)
4376 in_p = 1, low = low0, high = high0;
4377 else if (lowequal && highequal)
4378 in_p = 0, low = high = 0;
4379 else if (subset && lowequal)
4381 low = range_successor (high1);
4382 high = high0;
4383 in_p = 1;
4384 if (low == 0)
4386 /* We are in the weird situation where high0 > high1 but
4387 high1 has no successor. Punt. */
4388 return 0;
4391 else if (! subset || highequal)
4393 low = low0;
4394 high = range_predecessor (low1);
4395 in_p = 1;
4396 if (high == 0)
4398 /* low0 < low1 but low1 has no predecessor. Punt. */
4399 return 0;
4402 else
4403 return 0;
4406 else if (! in0_p && in1_p)
4408 /* If they don't overlap, the result is the second range. If the second
4409 is a subset of the first, the result is false. Otherwise,
4410 the range starts just after the first range and ends at the
4411 end of the second. */
4412 if (no_overlap)
4413 in_p = 1, low = low1, high = high1;
4414 else if (subset || highequal)
4415 in_p = 0, low = high = 0;
4416 else
4418 low = range_successor (high0);
4419 high = high1;
4420 in_p = 1;
4421 if (low == 0)
4423 /* high1 > high0 but high0 has no successor. Punt. */
4424 return 0;
4429 else
4431 /* The case where we are excluding both ranges. Here the complex case
4432 is if they don't overlap. In that case, the only time we have a
4433 range is if they are adjacent. If the second is a subset of the
4434 first, the result is the first. Otherwise, the range to exclude
4435 starts at the beginning of the first range and ends at the end of the
4436 second. */
4437 if (no_overlap)
4439 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4440 range_successor (high0),
4441 1, low1, 0)))
4442 in_p = 0, low = low0, high = high1;
4443 else
4445 /* Canonicalize - [min, x] into - [-, x]. */
4446 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4447 switch (TREE_CODE (TREE_TYPE (low0)))
4449 case ENUMERAL_TYPE:
4450 if (TYPE_PRECISION (TREE_TYPE (low0))
4451 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4452 break;
4453 /* FALLTHROUGH */
4454 case INTEGER_TYPE:
4455 if (tree_int_cst_equal (low0,
4456 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4457 low0 = 0;
4458 break;
4459 case POINTER_TYPE:
4460 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4461 && integer_zerop (low0))
4462 low0 = 0;
4463 break;
4464 default:
4465 break;
4468 /* Canonicalize - [x, max] into - [x, -]. */
4469 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4470 switch (TREE_CODE (TREE_TYPE (high1)))
4472 case ENUMERAL_TYPE:
4473 if (TYPE_PRECISION (TREE_TYPE (high1))
4474 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4475 break;
4476 /* FALLTHROUGH */
4477 case INTEGER_TYPE:
4478 if (tree_int_cst_equal (high1,
4479 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4480 high1 = 0;
4481 break;
4482 case POINTER_TYPE:
4483 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4484 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4485 high1, 1,
4486 integer_one_node, 1)))
4487 high1 = 0;
4488 break;
4489 default:
4490 break;
4493 /* The ranges might be also adjacent between the maximum and
4494 minimum values of the given type. For
4495 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4496 return + [x + 1, y - 1]. */
4497 if (low0 == 0 && high1 == 0)
4499 low = range_successor (high0);
4500 high = range_predecessor (low1);
4501 if (low == 0 || high == 0)
4502 return 0;
4504 in_p = 1;
4506 else
4507 return 0;
4510 else if (subset)
4511 in_p = 0, low = low0, high = high0;
4512 else
4513 in_p = 0, low = low0, high = high1;
4516 *pin_p = in_p, *plow = low, *phigh = high;
4517 return 1;
4521 /* Subroutine of fold, looking inside expressions of the form
4522 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4523 of the COND_EXPR. This function is being used also to optimize
4524 A op B ? C : A, by reversing the comparison first.
4526 Return a folded expression whose code is not a COND_EXPR
4527 anymore, or NULL_TREE if no folding opportunity is found. */
4529 static tree
4530 fold_cond_expr_with_comparison (location_t loc, tree type,
4531 tree arg0, tree arg1, tree arg2)
4533 enum tree_code comp_code = TREE_CODE (arg0);
4534 tree arg00 = TREE_OPERAND (arg0, 0);
4535 tree arg01 = TREE_OPERAND (arg0, 1);
4536 tree arg1_type = TREE_TYPE (arg1);
4537 tree tem;
4539 STRIP_NOPS (arg1);
4540 STRIP_NOPS (arg2);
4542 /* If we have A op 0 ? A : -A, consider applying the following
4543 transformations:
4545 A == 0? A : -A same as -A
4546 A != 0? A : -A same as A
4547 A >= 0? A : -A same as abs (A)
4548 A > 0? A : -A same as abs (A)
4549 A <= 0? A : -A same as -abs (A)
4550 A < 0? A : -A same as -abs (A)
4552 None of these transformations work for modes with signed
4553 zeros. If A is +/-0, the first two transformations will
4554 change the sign of the result (from +0 to -0, or vice
4555 versa). The last four will fix the sign of the result,
4556 even though the original expressions could be positive or
4557 negative, depending on the sign of A.
4559 Note that all these transformations are correct if A is
4560 NaN, since the two alternatives (A and -A) are also NaNs. */
4561 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4562 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4563 ? real_zerop (arg01)
4564 : integer_zerop (arg01))
4565 && ((TREE_CODE (arg2) == NEGATE_EXPR
4566 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4567 /* In the case that A is of the form X-Y, '-A' (arg2) may
4568 have already been folded to Y-X, check for that. */
4569 || (TREE_CODE (arg1) == MINUS_EXPR
4570 && TREE_CODE (arg2) == MINUS_EXPR
4571 && operand_equal_p (TREE_OPERAND (arg1, 0),
4572 TREE_OPERAND (arg2, 1), 0)
4573 && operand_equal_p (TREE_OPERAND (arg1, 1),
4574 TREE_OPERAND (arg2, 0), 0))))
4575 switch (comp_code)
4577 case EQ_EXPR:
4578 case UNEQ_EXPR:
4579 tem = fold_convert_loc (loc, arg1_type, arg1);
4580 return pedantic_non_lvalue_loc (loc,
4581 fold_convert_loc (loc, type,
4582 negate_expr (tem)));
4583 case NE_EXPR:
4584 case LTGT_EXPR:
4585 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4586 case UNGE_EXPR:
4587 case UNGT_EXPR:
4588 if (flag_trapping_math)
4589 break;
4590 /* Fall through. */
4591 case GE_EXPR:
4592 case GT_EXPR:
4593 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4594 arg1 = fold_convert_loc (loc, signed_type_for
4595 (TREE_TYPE (arg1)), arg1);
4596 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4597 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4598 case UNLE_EXPR:
4599 case UNLT_EXPR:
4600 if (flag_trapping_math)
4601 break;
4602 case LE_EXPR:
4603 case LT_EXPR:
4604 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4605 arg1 = fold_convert_loc (loc, signed_type_for
4606 (TREE_TYPE (arg1)), arg1);
4607 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4608 return negate_expr (fold_convert_loc (loc, type, tem));
4609 default:
4610 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4611 break;
4614 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4615 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4616 both transformations are correct when A is NaN: A != 0
4617 is then true, and A == 0 is false. */
4619 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4620 && integer_zerop (arg01) && integer_zerop (arg2))
4622 if (comp_code == NE_EXPR)
4623 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4624 else if (comp_code == EQ_EXPR)
4625 return build_int_cst (type, 0);
4628 /* Try some transformations of A op B ? A : B.
4630 A == B? A : B same as B
4631 A != B? A : B same as A
4632 A >= B? A : B same as max (A, B)
4633 A > B? A : B same as max (B, A)
4634 A <= B? A : B same as min (A, B)
4635 A < B? A : B same as min (B, A)
4637 As above, these transformations don't work in the presence
4638 of signed zeros. For example, if A and B are zeros of
4639 opposite sign, the first two transformations will change
4640 the sign of the result. In the last four, the original
4641 expressions give different results for (A=+0, B=-0) and
4642 (A=-0, B=+0), but the transformed expressions do not.
4644 The first two transformations are correct if either A or B
4645 is a NaN. In the first transformation, the condition will
4646 be false, and B will indeed be chosen. In the case of the
4647 second transformation, the condition A != B will be true,
4648 and A will be chosen.
4650 The conversions to max() and min() are not correct if B is
4651 a number and A is not. The conditions in the original
4652 expressions will be false, so all four give B. The min()
4653 and max() versions would give a NaN instead. */
4654 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4655 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4656 /* Avoid these transformations if the COND_EXPR may be used
4657 as an lvalue in the C++ front-end. PR c++/19199. */
4658 && (in_gimple_form
4659 || (strcmp (lang_hooks.name, "GNU C++") != 0
4660 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4661 || ! maybe_lvalue_p (arg1)
4662 || ! maybe_lvalue_p (arg2)))
4664 tree comp_op0 = arg00;
4665 tree comp_op1 = arg01;
4666 tree comp_type = TREE_TYPE (comp_op0);
4668 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4669 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4671 comp_type = type;
4672 comp_op0 = arg1;
4673 comp_op1 = arg2;
4676 switch (comp_code)
4678 case EQ_EXPR:
4679 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4680 case NE_EXPR:
4681 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4682 case LE_EXPR:
4683 case LT_EXPR:
4684 case UNLE_EXPR:
4685 case UNLT_EXPR:
4686 /* In C++ a ?: expression can be an lvalue, so put the
4687 operand which will be used if they are equal first
4688 so that we can convert this back to the
4689 corresponding COND_EXPR. */
4690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4692 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4693 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4694 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4695 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4696 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4697 comp_op1, comp_op0);
4698 return pedantic_non_lvalue_loc (loc,
4699 fold_convert_loc (loc, type, tem));
4701 break;
4702 case GE_EXPR:
4703 case GT_EXPR:
4704 case UNGE_EXPR:
4705 case UNGT_EXPR:
4706 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4708 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4709 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4710 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4711 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4712 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4713 comp_op1, comp_op0);
4714 return pedantic_non_lvalue_loc (loc,
4715 fold_convert_loc (loc, type, tem));
4717 break;
4718 case UNEQ_EXPR:
4719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4720 return pedantic_non_lvalue_loc (loc,
4721 fold_convert_loc (loc, type, arg2));
4722 break;
4723 case LTGT_EXPR:
4724 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4725 return pedantic_non_lvalue_loc (loc,
4726 fold_convert_loc (loc, type, arg1));
4727 break;
4728 default:
4729 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4730 break;
4734 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4735 we might still be able to simplify this. For example,
4736 if C1 is one less or one more than C2, this might have started
4737 out as a MIN or MAX and been transformed by this function.
4738 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4740 if (INTEGRAL_TYPE_P (type)
4741 && TREE_CODE (arg01) == INTEGER_CST
4742 && TREE_CODE (arg2) == INTEGER_CST)
4743 switch (comp_code)
4745 case EQ_EXPR:
4746 if (TREE_CODE (arg1) == INTEGER_CST)
4747 break;
4748 /* We can replace A with C1 in this case. */
4749 arg1 = fold_convert_loc (loc, type, arg01);
4750 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4752 case LT_EXPR:
4753 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4754 MIN_EXPR, to preserve the signedness of the comparison. */
4755 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4756 OEP_ONLY_CONST)
4757 && operand_equal_p (arg01,
4758 const_binop (PLUS_EXPR, arg2,
4759 build_int_cst (type, 1)),
4760 OEP_ONLY_CONST))
4762 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4763 fold_convert_loc (loc, TREE_TYPE (arg00),
4764 arg2));
4765 return pedantic_non_lvalue_loc (loc,
4766 fold_convert_loc (loc, type, tem));
4768 break;
4770 case LE_EXPR:
4771 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4772 as above. */
4773 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4774 OEP_ONLY_CONST)
4775 && operand_equal_p (arg01,
4776 const_binop (MINUS_EXPR, arg2,
4777 build_int_cst (type, 1)),
4778 OEP_ONLY_CONST))
4780 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4781 fold_convert_loc (loc, TREE_TYPE (arg00),
4782 arg2));
4783 return pedantic_non_lvalue_loc (loc,
4784 fold_convert_loc (loc, type, tem));
4786 break;
4788 case GT_EXPR:
4789 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4790 MAX_EXPR, to preserve the signedness of the comparison. */
4791 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4792 OEP_ONLY_CONST)
4793 && operand_equal_p (arg01,
4794 const_binop (MINUS_EXPR, arg2,
4795 build_int_cst (type, 1)),
4796 OEP_ONLY_CONST))
4798 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4799 fold_convert_loc (loc, TREE_TYPE (arg00),
4800 arg2));
4801 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4803 break;
4805 case GE_EXPR:
4806 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4807 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4808 OEP_ONLY_CONST)
4809 && operand_equal_p (arg01,
4810 const_binop (PLUS_EXPR, arg2,
4811 build_int_cst (type, 1)),
4812 OEP_ONLY_CONST))
4814 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4815 fold_convert_loc (loc, TREE_TYPE (arg00),
4816 arg2));
4817 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4819 break;
4820 case NE_EXPR:
4821 break;
4822 default:
4823 gcc_unreachable ();
4826 return NULL_TREE;
4831 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4832 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4833 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4834 false) >= 2)
4835 #endif
4837 /* EXP is some logical combination of boolean tests. See if we can
4838 merge it into some range test. Return the new tree if so. */
4840 static tree
4841 fold_range_test (location_t loc, enum tree_code code, tree type,
4842 tree op0, tree op1)
4844 int or_op = (code == TRUTH_ORIF_EXPR
4845 || code == TRUTH_OR_EXPR);
4846 int in0_p, in1_p, in_p;
4847 tree low0, low1, low, high0, high1, high;
4848 bool strict_overflow_p = false;
4849 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4850 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4851 tree tem;
4852 const char * const warnmsg = G_("assuming signed overflow does not occur "
4853 "when simplifying range test");
4855 /* If this is an OR operation, invert both sides; we will invert
4856 again at the end. */
4857 if (or_op)
4858 in0_p = ! in0_p, in1_p = ! in1_p;
4860 /* If both expressions are the same, if we can merge the ranges, and we
4861 can build the range test, return it or it inverted. If one of the
4862 ranges is always true or always false, consider it to be the same
4863 expression as the other. */
4864 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4865 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4866 in1_p, low1, high1)
4867 && 0 != (tem = (build_range_check (loc, type,
4868 lhs != 0 ? lhs
4869 : rhs != 0 ? rhs : integer_zero_node,
4870 in_p, low, high))))
4872 if (strict_overflow_p)
4873 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4874 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4877 /* On machines where the branch cost is expensive, if this is a
4878 short-circuited branch and the underlying object on both sides
4879 is the same, make a non-short-circuit operation. */
4880 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4881 && lhs != 0 && rhs != 0
4882 && (code == TRUTH_ANDIF_EXPR
4883 || code == TRUTH_ORIF_EXPR)
4884 && operand_equal_p (lhs, rhs, 0))
4886 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4887 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4888 which cases we can't do this. */
4889 if (simple_operand_p (lhs))
4890 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4891 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4892 type, op0, op1);
4894 else if (!lang_hooks.decls.global_bindings_p ()
4895 && !CONTAINS_PLACEHOLDER_P (lhs))
4897 tree common = save_expr (lhs);
4899 if (0 != (lhs = build_range_check (loc, type, common,
4900 or_op ? ! in0_p : in0_p,
4901 low0, high0))
4902 && (0 != (rhs = build_range_check (loc, type, common,
4903 or_op ? ! in1_p : in1_p,
4904 low1, high1))))
4906 if (strict_overflow_p)
4907 fold_overflow_warning (warnmsg,
4908 WARN_STRICT_OVERFLOW_COMPARISON);
4909 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4910 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4911 type, lhs, rhs);
4916 return 0;
4919 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4920 bit value. Arrange things so the extra bits will be set to zero if and
4921 only if C is signed-extended to its full width. If MASK is nonzero,
4922 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4924 static tree
4925 unextend (tree c, int p, int unsignedp, tree mask)
4927 tree type = TREE_TYPE (c);
4928 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4929 tree temp;
4931 if (p == modesize || unsignedp)
4932 return c;
4934 /* We work by getting just the sign bit into the low-order bit, then
4935 into the high-order bit, then sign-extend. We then XOR that value
4936 with C. */
4937 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4938 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4940 /* We must use a signed type in order to get an arithmetic right shift.
4941 However, we must also avoid introducing accidental overflows, so that
4942 a subsequent call to integer_zerop will work. Hence we must
4943 do the type conversion here. At this point, the constant is either
4944 zero or one, and the conversion to a signed type can never overflow.
4945 We could get an overflow if this conversion is done anywhere else. */
4946 if (TYPE_UNSIGNED (type))
4947 temp = fold_convert (signed_type_for (type), temp);
4949 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4950 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4951 if (mask != 0)
4952 temp = const_binop (BIT_AND_EXPR, temp,
4953 fold_convert (TREE_TYPE (c), mask));
4954 /* If necessary, convert the type back to match the type of C. */
4955 if (TYPE_UNSIGNED (type))
4956 temp = fold_convert (type, temp);
4958 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4961 /* For an expression that has the form
4962 (A && B) || ~B
4964 (A || B) && ~B,
4965 we can drop one of the inner expressions and simplify to
4966 A || ~B
4968 A && ~B
4969 LOC is the location of the resulting expression. OP is the inner
4970 logical operation; the left-hand side in the examples above, while CMPOP
4971 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4972 removing a condition that guards another, as in
4973 (A != NULL && A->...) || A == NULL
4974 which we must not transform. If RHS_ONLY is true, only eliminate the
4975 right-most operand of the inner logical operation. */
4977 static tree
4978 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4979 bool rhs_only)
4981 tree type = TREE_TYPE (cmpop);
4982 enum tree_code code = TREE_CODE (cmpop);
4983 enum tree_code truthop_code = TREE_CODE (op);
4984 tree lhs = TREE_OPERAND (op, 0);
4985 tree rhs = TREE_OPERAND (op, 1);
4986 tree orig_lhs = lhs, orig_rhs = rhs;
4987 enum tree_code rhs_code = TREE_CODE (rhs);
4988 enum tree_code lhs_code = TREE_CODE (lhs);
4989 enum tree_code inv_code;
4991 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4992 return NULL_TREE;
4994 if (TREE_CODE_CLASS (code) != tcc_comparison)
4995 return NULL_TREE;
4997 if (rhs_code == truthop_code)
4999 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5000 if (newrhs != NULL_TREE)
5002 rhs = newrhs;
5003 rhs_code = TREE_CODE (rhs);
5006 if (lhs_code == truthop_code && !rhs_only)
5008 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5009 if (newlhs != NULL_TREE)
5011 lhs = newlhs;
5012 lhs_code = TREE_CODE (lhs);
5016 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5017 if (inv_code == rhs_code
5018 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5019 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5020 return lhs;
5021 if (!rhs_only && inv_code == lhs_code
5022 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5023 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5024 return rhs;
5025 if (rhs != orig_rhs || lhs != orig_lhs)
5026 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5027 lhs, rhs);
5028 return NULL_TREE;
5031 /* Find ways of folding logical expressions of LHS and RHS:
5032 Try to merge two comparisons to the same innermost item.
5033 Look for range tests like "ch >= '0' && ch <= '9'".
5034 Look for combinations of simple terms on machines with expensive branches
5035 and evaluate the RHS unconditionally.
5037 For example, if we have p->a == 2 && p->b == 4 and we can make an
5038 object large enough to span both A and B, we can do this with a comparison
5039 against the object ANDed with the a mask.
5041 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5042 operations to do this with one comparison.
5044 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5045 function and the one above.
5047 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5048 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5050 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5051 two operands.
5053 We return the simplified tree or 0 if no optimization is possible. */
5055 static tree
5056 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5057 tree lhs, tree rhs)
5059 /* If this is the "or" of two comparisons, we can do something if
5060 the comparisons are NE_EXPR. If this is the "and", we can do something
5061 if the comparisons are EQ_EXPR. I.e.,
5062 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5064 WANTED_CODE is this operation code. For single bit fields, we can
5065 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5066 comparison for one-bit fields. */
5068 enum tree_code wanted_code;
5069 enum tree_code lcode, rcode;
5070 tree ll_arg, lr_arg, rl_arg, rr_arg;
5071 tree ll_inner, lr_inner, rl_inner, rr_inner;
5072 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5073 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5074 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5075 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5076 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5077 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5078 enum machine_mode lnmode, rnmode;
5079 tree ll_mask, lr_mask, rl_mask, rr_mask;
5080 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5081 tree l_const, r_const;
5082 tree lntype, rntype, result;
5083 HOST_WIDE_INT first_bit, end_bit;
5084 int volatilep;
5086 /* Start by getting the comparison codes. Fail if anything is volatile.
5087 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5088 it were surrounded with a NE_EXPR. */
5090 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5091 return 0;
5093 lcode = TREE_CODE (lhs);
5094 rcode = TREE_CODE (rhs);
5096 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5098 lhs = build2 (NE_EXPR, truth_type, lhs,
5099 build_int_cst (TREE_TYPE (lhs), 0));
5100 lcode = NE_EXPR;
5103 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5105 rhs = build2 (NE_EXPR, truth_type, rhs,
5106 build_int_cst (TREE_TYPE (rhs), 0));
5107 rcode = NE_EXPR;
5110 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5111 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5112 return 0;
5114 ll_arg = TREE_OPERAND (lhs, 0);
5115 lr_arg = TREE_OPERAND (lhs, 1);
5116 rl_arg = TREE_OPERAND (rhs, 0);
5117 rr_arg = TREE_OPERAND (rhs, 1);
5119 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5120 if (simple_operand_p (ll_arg)
5121 && simple_operand_p (lr_arg))
5123 if (operand_equal_p (ll_arg, rl_arg, 0)
5124 && operand_equal_p (lr_arg, rr_arg, 0))
5126 result = combine_comparisons (loc, code, lcode, rcode,
5127 truth_type, ll_arg, lr_arg);
5128 if (result)
5129 return result;
5131 else if (operand_equal_p (ll_arg, rr_arg, 0)
5132 && operand_equal_p (lr_arg, rl_arg, 0))
5134 result = combine_comparisons (loc, code, lcode,
5135 swap_tree_comparison (rcode),
5136 truth_type, ll_arg, lr_arg);
5137 if (result)
5138 return result;
5142 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5143 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5145 /* If the RHS can be evaluated unconditionally and its operands are
5146 simple, it wins to evaluate the RHS unconditionally on machines
5147 with expensive branches. In this case, this isn't a comparison
5148 that can be merged. */
5150 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5151 false) >= 2
5152 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5153 && simple_operand_p (rl_arg)
5154 && simple_operand_p (rr_arg))
5156 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5157 if (code == TRUTH_OR_EXPR
5158 && lcode == NE_EXPR && integer_zerop (lr_arg)
5159 && rcode == NE_EXPR && integer_zerop (rr_arg)
5160 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5161 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5162 return build2_loc (loc, NE_EXPR, truth_type,
5163 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5164 ll_arg, rl_arg),
5165 build_int_cst (TREE_TYPE (ll_arg), 0));
5167 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5168 if (code == TRUTH_AND_EXPR
5169 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5170 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5171 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5172 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5173 return build2_loc (loc, EQ_EXPR, truth_type,
5174 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5175 ll_arg, rl_arg),
5176 build_int_cst (TREE_TYPE (ll_arg), 0));
5179 /* See if the comparisons can be merged. Then get all the parameters for
5180 each side. */
5182 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5183 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5184 return 0;
5186 volatilep = 0;
5187 ll_inner = decode_field_reference (loc, ll_arg,
5188 &ll_bitsize, &ll_bitpos, &ll_mode,
5189 &ll_unsignedp, &volatilep, &ll_mask,
5190 &ll_and_mask);
5191 lr_inner = decode_field_reference (loc, lr_arg,
5192 &lr_bitsize, &lr_bitpos, &lr_mode,
5193 &lr_unsignedp, &volatilep, &lr_mask,
5194 &lr_and_mask);
5195 rl_inner = decode_field_reference (loc, rl_arg,
5196 &rl_bitsize, &rl_bitpos, &rl_mode,
5197 &rl_unsignedp, &volatilep, &rl_mask,
5198 &rl_and_mask);
5199 rr_inner = decode_field_reference (loc, rr_arg,
5200 &rr_bitsize, &rr_bitpos, &rr_mode,
5201 &rr_unsignedp, &volatilep, &rr_mask,
5202 &rr_and_mask);
5204 /* It must be true that the inner operation on the lhs of each
5205 comparison must be the same if we are to be able to do anything.
5206 Then see if we have constants. If not, the same must be true for
5207 the rhs's. */
5208 if (volatilep || ll_inner == 0 || rl_inner == 0
5209 || ! operand_equal_p (ll_inner, rl_inner, 0))
5210 return 0;
5212 if (TREE_CODE (lr_arg) == INTEGER_CST
5213 && TREE_CODE (rr_arg) == INTEGER_CST)
5214 l_const = lr_arg, r_const = rr_arg;
5215 else if (lr_inner == 0 || rr_inner == 0
5216 || ! operand_equal_p (lr_inner, rr_inner, 0))
5217 return 0;
5218 else
5219 l_const = r_const = 0;
5221 /* If either comparison code is not correct for our logical operation,
5222 fail. However, we can convert a one-bit comparison against zero into
5223 the opposite comparison against that bit being set in the field. */
5225 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5226 if (lcode != wanted_code)
5228 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5230 /* Make the left operand unsigned, since we are only interested
5231 in the value of one bit. Otherwise we are doing the wrong
5232 thing below. */
5233 ll_unsignedp = 1;
5234 l_const = ll_mask;
5236 else
5237 return 0;
5240 /* This is analogous to the code for l_const above. */
5241 if (rcode != wanted_code)
5243 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5245 rl_unsignedp = 1;
5246 r_const = rl_mask;
5248 else
5249 return 0;
5252 /* See if we can find a mode that contains both fields being compared on
5253 the left. If we can't, fail. Otherwise, update all constants and masks
5254 to be relative to a field of that size. */
5255 first_bit = MIN (ll_bitpos, rl_bitpos);
5256 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5257 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5258 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5259 volatilep);
5260 if (lnmode == VOIDmode)
5261 return 0;
5263 lnbitsize = GET_MODE_BITSIZE (lnmode);
5264 lnbitpos = first_bit & ~ (lnbitsize - 1);
5265 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5266 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5268 if (BYTES_BIG_ENDIAN)
5270 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5271 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5274 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5275 size_int (xll_bitpos));
5276 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5277 size_int (xrl_bitpos));
5279 if (l_const)
5281 l_const = fold_convert_loc (loc, lntype, l_const);
5282 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5283 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5284 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5285 fold_build1_loc (loc, BIT_NOT_EXPR,
5286 lntype, ll_mask))))
5288 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5290 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5293 if (r_const)
5295 r_const = fold_convert_loc (loc, lntype, r_const);
5296 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5297 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5298 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5299 fold_build1_loc (loc, BIT_NOT_EXPR,
5300 lntype, rl_mask))))
5302 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5304 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5308 /* If the right sides are not constant, do the same for it. Also,
5309 disallow this optimization if a size or signedness mismatch occurs
5310 between the left and right sides. */
5311 if (l_const == 0)
5313 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5314 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5315 /* Make sure the two fields on the right
5316 correspond to the left without being swapped. */
5317 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5318 return 0;
5320 first_bit = MIN (lr_bitpos, rr_bitpos);
5321 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5322 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5323 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5324 volatilep);
5325 if (rnmode == VOIDmode)
5326 return 0;
5328 rnbitsize = GET_MODE_BITSIZE (rnmode);
5329 rnbitpos = first_bit & ~ (rnbitsize - 1);
5330 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5331 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5333 if (BYTES_BIG_ENDIAN)
5335 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5336 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5339 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5340 rntype, lr_mask),
5341 size_int (xlr_bitpos));
5342 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5343 rntype, rr_mask),
5344 size_int (xrr_bitpos));
5346 /* Make a mask that corresponds to both fields being compared.
5347 Do this for both items being compared. If the operands are the
5348 same size and the bits being compared are in the same position
5349 then we can do this by masking both and comparing the masked
5350 results. */
5351 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5352 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5353 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5355 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5356 ll_unsignedp || rl_unsignedp);
5357 if (! all_ones_mask_p (ll_mask, lnbitsize))
5358 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5360 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5361 lr_unsignedp || rr_unsignedp);
5362 if (! all_ones_mask_p (lr_mask, rnbitsize))
5363 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5365 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5368 /* There is still another way we can do something: If both pairs of
5369 fields being compared are adjacent, we may be able to make a wider
5370 field containing them both.
5372 Note that we still must mask the lhs/rhs expressions. Furthermore,
5373 the mask must be shifted to account for the shift done by
5374 make_bit_field_ref. */
5375 if ((ll_bitsize + ll_bitpos == rl_bitpos
5376 && lr_bitsize + lr_bitpos == rr_bitpos)
5377 || (ll_bitpos == rl_bitpos + rl_bitsize
5378 && lr_bitpos == rr_bitpos + rr_bitsize))
5380 tree type;
5382 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5383 ll_bitsize + rl_bitsize,
5384 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5385 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5386 lr_bitsize + rr_bitsize,
5387 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5389 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5390 size_int (MIN (xll_bitpos, xrl_bitpos)));
5391 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5392 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5394 /* Convert to the smaller type before masking out unwanted bits. */
5395 type = lntype;
5396 if (lntype != rntype)
5398 if (lnbitsize > rnbitsize)
5400 lhs = fold_convert_loc (loc, rntype, lhs);
5401 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5402 type = rntype;
5404 else if (lnbitsize < rnbitsize)
5406 rhs = fold_convert_loc (loc, lntype, rhs);
5407 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5408 type = lntype;
5412 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5413 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5415 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5416 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5418 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5421 return 0;
5424 /* Handle the case of comparisons with constants. If there is something in
5425 common between the masks, those bits of the constants must be the same.
5426 If not, the condition is always false. Test for this to avoid generating
5427 incorrect code below. */
5428 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5429 if (! integer_zerop (result)
5430 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5431 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5433 if (wanted_code == NE_EXPR)
5435 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5436 return constant_boolean_node (true, truth_type);
5438 else
5440 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5441 return constant_boolean_node (false, truth_type);
5445 /* Construct the expression we will return. First get the component
5446 reference we will make. Unless the mask is all ones the width of
5447 that field, perform the mask operation. Then compare with the
5448 merged constant. */
5449 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5450 ll_unsignedp || rl_unsignedp);
5452 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5453 if (! all_ones_mask_p (ll_mask, lnbitsize))
5454 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5456 return build2_loc (loc, wanted_code, truth_type, result,
5457 const_binop (BIT_IOR_EXPR, l_const, r_const));
5460 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5461 constant. */
5463 static tree
5464 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5465 tree op0, tree op1)
5467 tree arg0 = op0;
5468 enum tree_code op_code;
5469 tree comp_const;
5470 tree minmax_const;
5471 int consts_equal, consts_lt;
5472 tree inner;
5474 STRIP_SIGN_NOPS (arg0);
5476 op_code = TREE_CODE (arg0);
5477 minmax_const = TREE_OPERAND (arg0, 1);
5478 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5479 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5480 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5481 inner = TREE_OPERAND (arg0, 0);
5483 /* If something does not permit us to optimize, return the original tree. */
5484 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5485 || TREE_CODE (comp_const) != INTEGER_CST
5486 || TREE_OVERFLOW (comp_const)
5487 || TREE_CODE (minmax_const) != INTEGER_CST
5488 || TREE_OVERFLOW (minmax_const))
5489 return NULL_TREE;
5491 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5492 and GT_EXPR, doing the rest with recursive calls using logical
5493 simplifications. */
5494 switch (code)
5496 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5498 tree tem
5499 = optimize_minmax_comparison (loc,
5500 invert_tree_comparison (code, false),
5501 type, op0, op1);
5502 if (tem)
5503 return invert_truthvalue_loc (loc, tem);
5504 return NULL_TREE;
5507 case GE_EXPR:
5508 return
5509 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5510 optimize_minmax_comparison
5511 (loc, EQ_EXPR, type, arg0, comp_const),
5512 optimize_minmax_comparison
5513 (loc, GT_EXPR, type, arg0, comp_const));
5515 case EQ_EXPR:
5516 if (op_code == MAX_EXPR && consts_equal)
5517 /* MAX (X, 0) == 0 -> X <= 0 */
5518 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5520 else if (op_code == MAX_EXPR && consts_lt)
5521 /* MAX (X, 0) == 5 -> X == 5 */
5522 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5524 else if (op_code == MAX_EXPR)
5525 /* MAX (X, 0) == -1 -> false */
5526 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5528 else if (consts_equal)
5529 /* MIN (X, 0) == 0 -> X >= 0 */
5530 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5532 else if (consts_lt)
5533 /* MIN (X, 0) == 5 -> false */
5534 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5536 else
5537 /* MIN (X, 0) == -1 -> X == -1 */
5538 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5540 case GT_EXPR:
5541 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5542 /* MAX (X, 0) > 0 -> X > 0
5543 MAX (X, 0) > 5 -> X > 5 */
5544 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5546 else if (op_code == MAX_EXPR)
5547 /* MAX (X, 0) > -1 -> true */
5548 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5550 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5551 /* MIN (X, 0) > 0 -> false
5552 MIN (X, 0) > 5 -> false */
5553 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5555 else
5556 /* MIN (X, 0) > -1 -> X > -1 */
5557 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5559 default:
5560 return NULL_TREE;
5564 /* T is an integer expression that is being multiplied, divided, or taken a
5565 modulus (CODE says which and what kind of divide or modulus) by a
5566 constant C. See if we can eliminate that operation by folding it with
5567 other operations already in T. WIDE_TYPE, if non-null, is a type that
5568 should be used for the computation if wider than our type.
5570 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5571 (X * 2) + (Y * 4). We must, however, be assured that either the original
5572 expression would not overflow or that overflow is undefined for the type
5573 in the language in question.
5575 If we return a non-null expression, it is an equivalent form of the
5576 original computation, but need not be in the original type.
5578 We set *STRICT_OVERFLOW_P to true if the return values depends on
5579 signed overflow being undefined. Otherwise we do not change
5580 *STRICT_OVERFLOW_P. */
5582 static tree
5583 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5584 bool *strict_overflow_p)
5586 /* To avoid exponential search depth, refuse to allow recursion past
5587 three levels. Beyond that (1) it's highly unlikely that we'll find
5588 something interesting and (2) we've probably processed it before
5589 when we built the inner expression. */
5591 static int depth;
5592 tree ret;
5594 if (depth > 3)
5595 return NULL;
5597 depth++;
5598 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5599 depth--;
5601 return ret;
5604 static tree
5605 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5606 bool *strict_overflow_p)
5608 tree type = TREE_TYPE (t);
5609 enum tree_code tcode = TREE_CODE (t);
5610 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5611 > GET_MODE_SIZE (TYPE_MODE (type)))
5612 ? wide_type : type);
5613 tree t1, t2;
5614 int same_p = tcode == code;
5615 tree op0 = NULL_TREE, op1 = NULL_TREE;
5616 bool sub_strict_overflow_p;
5618 /* Don't deal with constants of zero here; they confuse the code below. */
5619 if (integer_zerop (c))
5620 return NULL_TREE;
5622 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5623 op0 = TREE_OPERAND (t, 0);
5625 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5626 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5628 /* Note that we need not handle conditional operations here since fold
5629 already handles those cases. So just do arithmetic here. */
5630 switch (tcode)
5632 case INTEGER_CST:
5633 /* For a constant, we can always simplify if we are a multiply
5634 or (for divide and modulus) if it is a multiple of our constant. */
5635 if (code == MULT_EXPR
5636 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5637 return const_binop (code, fold_convert (ctype, t),
5638 fold_convert (ctype, c));
5639 break;
5641 CASE_CONVERT: case NON_LVALUE_EXPR:
5642 /* If op0 is an expression ... */
5643 if ((COMPARISON_CLASS_P (op0)
5644 || UNARY_CLASS_P (op0)
5645 || BINARY_CLASS_P (op0)
5646 || VL_EXP_CLASS_P (op0)
5647 || EXPRESSION_CLASS_P (op0))
5648 /* ... and has wrapping overflow, and its type is smaller
5649 than ctype, then we cannot pass through as widening. */
5650 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5651 && (TYPE_PRECISION (ctype)
5652 > TYPE_PRECISION (TREE_TYPE (op0))))
5653 /* ... or this is a truncation (t is narrower than op0),
5654 then we cannot pass through this narrowing. */
5655 || (TYPE_PRECISION (type)
5656 < TYPE_PRECISION (TREE_TYPE (op0)))
5657 /* ... or signedness changes for division or modulus,
5658 then we cannot pass through this conversion. */
5659 || (code != MULT_EXPR
5660 && (TYPE_UNSIGNED (ctype)
5661 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5662 /* ... or has undefined overflow while the converted to
5663 type has not, we cannot do the operation in the inner type
5664 as that would introduce undefined overflow. */
5665 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5666 && !TYPE_OVERFLOW_UNDEFINED (type))))
5667 break;
5669 /* Pass the constant down and see if we can make a simplification. If
5670 we can, replace this expression with the inner simplification for
5671 possible later conversion to our or some other type. */
5672 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5673 && TREE_CODE (t2) == INTEGER_CST
5674 && !TREE_OVERFLOW (t2)
5675 && (0 != (t1 = extract_muldiv (op0, t2, code,
5676 code == MULT_EXPR
5677 ? ctype : NULL_TREE,
5678 strict_overflow_p))))
5679 return t1;
5680 break;
5682 case ABS_EXPR:
5683 /* If widening the type changes it from signed to unsigned, then we
5684 must avoid building ABS_EXPR itself as unsigned. */
5685 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5687 tree cstype = (*signed_type_for) (ctype);
5688 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5689 != 0)
5691 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5692 return fold_convert (ctype, t1);
5694 break;
5696 /* If the constant is negative, we cannot simplify this. */
5697 if (tree_int_cst_sgn (c) == -1)
5698 break;
5699 /* FALLTHROUGH */
5700 case NEGATE_EXPR:
5701 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5702 != 0)
5703 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5704 break;
5706 case MIN_EXPR: case MAX_EXPR:
5707 /* If widening the type changes the signedness, then we can't perform
5708 this optimization as that changes the result. */
5709 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5710 break;
5712 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5713 sub_strict_overflow_p = false;
5714 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5715 &sub_strict_overflow_p)) != 0
5716 && (t2 = extract_muldiv (op1, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0)
5719 if (tree_int_cst_sgn (c) < 0)
5720 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5721 if (sub_strict_overflow_p)
5722 *strict_overflow_p = true;
5723 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5724 fold_convert (ctype, t2));
5726 break;
5728 case LSHIFT_EXPR: case RSHIFT_EXPR:
5729 /* If the second operand is constant, this is a multiplication
5730 or floor division, by a power of two, so we can treat it that
5731 way unless the multiplier or divisor overflows. Signed
5732 left-shift overflow is implementation-defined rather than
5733 undefined in C90, so do not convert signed left shift into
5734 multiplication. */
5735 if (TREE_CODE (op1) == INTEGER_CST
5736 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5737 /* const_binop may not detect overflow correctly,
5738 so check for it explicitly here. */
5739 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5740 && TREE_INT_CST_HIGH (op1) == 0
5741 && 0 != (t1 = fold_convert (ctype,
5742 const_binop (LSHIFT_EXPR,
5743 size_one_node,
5744 op1)))
5745 && !TREE_OVERFLOW (t1))
5746 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5747 ? MULT_EXPR : FLOOR_DIV_EXPR,
5748 ctype,
5749 fold_convert (ctype, op0),
5750 t1),
5751 c, code, wide_type, strict_overflow_p);
5752 break;
5754 case PLUS_EXPR: case MINUS_EXPR:
5755 /* See if we can eliminate the operation on both sides. If we can, we
5756 can return a new PLUS or MINUS. If we can't, the only remaining
5757 cases where we can do anything are if the second operand is a
5758 constant. */
5759 sub_strict_overflow_p = false;
5760 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5761 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5762 if (t1 != 0 && t2 != 0
5763 && (code == MULT_EXPR
5764 /* If not multiplication, we can only do this if both operands
5765 are divisible by c. */
5766 || (multiple_of_p (ctype, op0, c)
5767 && multiple_of_p (ctype, op1, c))))
5769 if (sub_strict_overflow_p)
5770 *strict_overflow_p = true;
5771 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5772 fold_convert (ctype, t2));
5775 /* If this was a subtraction, negate OP1 and set it to be an addition.
5776 This simplifies the logic below. */
5777 if (tcode == MINUS_EXPR)
5779 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5780 /* If OP1 was not easily negatable, the constant may be OP0. */
5781 if (TREE_CODE (op0) == INTEGER_CST)
5783 tree tem = op0;
5784 op0 = op1;
5785 op1 = tem;
5786 tem = t1;
5787 t1 = t2;
5788 t2 = tem;
5792 if (TREE_CODE (op1) != INTEGER_CST)
5793 break;
5795 /* If either OP1 or C are negative, this optimization is not safe for
5796 some of the division and remainder types while for others we need
5797 to change the code. */
5798 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5800 if (code == CEIL_DIV_EXPR)
5801 code = FLOOR_DIV_EXPR;
5802 else if (code == FLOOR_DIV_EXPR)
5803 code = CEIL_DIV_EXPR;
5804 else if (code != MULT_EXPR
5805 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5806 break;
5809 /* If it's a multiply or a division/modulus operation of a multiple
5810 of our constant, do the operation and verify it doesn't overflow. */
5811 if (code == MULT_EXPR
5812 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5814 op1 = const_binop (code, fold_convert (ctype, op1),
5815 fold_convert (ctype, c));
5816 /* We allow the constant to overflow with wrapping semantics. */
5817 if (op1 == 0
5818 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5819 break;
5821 else
5822 break;
5824 /* If we have an unsigned type is not a sizetype, we cannot widen
5825 the operation since it will change the result if the original
5826 computation overflowed. */
5827 if (TYPE_UNSIGNED (ctype)
5828 && ctype != type)
5829 break;
5831 /* If we were able to eliminate our operation from the first side,
5832 apply our operation to the second side and reform the PLUS. */
5833 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5834 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5836 /* The last case is if we are a multiply. In that case, we can
5837 apply the distributive law to commute the multiply and addition
5838 if the multiplication of the constants doesn't overflow. */
5839 if (code == MULT_EXPR)
5840 return fold_build2 (tcode, ctype,
5841 fold_build2 (code, ctype,
5842 fold_convert (ctype, op0),
5843 fold_convert (ctype, c)),
5844 op1);
5846 break;
5848 case MULT_EXPR:
5849 /* We have a special case here if we are doing something like
5850 (C * 8) % 4 since we know that's zero. */
5851 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5852 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5853 /* If the multiplication can overflow we cannot optimize this. */
5854 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5855 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5856 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5858 *strict_overflow_p = true;
5859 return omit_one_operand (type, integer_zero_node, op0);
5862 /* ... fall through ... */
5864 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5865 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5866 /* If we can extract our operation from the LHS, do so and return a
5867 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5868 do something only if the second operand is a constant. */
5869 if (same_p
5870 && (t1 = extract_muldiv (op0, c, code, wide_type,
5871 strict_overflow_p)) != 0)
5872 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5873 fold_convert (ctype, op1));
5874 else if (tcode == MULT_EXPR && code == MULT_EXPR
5875 && (t1 = extract_muldiv (op1, c, code, wide_type,
5876 strict_overflow_p)) != 0)
5877 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5878 fold_convert (ctype, t1));
5879 else if (TREE_CODE (op1) != INTEGER_CST)
5880 return 0;
5882 /* If these are the same operation types, we can associate them
5883 assuming no overflow. */
5884 if (tcode == code)
5886 double_int mul;
5887 bool overflow_p;
5888 unsigned prec = TYPE_PRECISION (ctype);
5889 bool uns = TYPE_UNSIGNED (ctype);
5890 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5891 double_int dic = tree_to_double_int (c).ext (prec, uns);
5892 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5893 overflow_p = ((!uns && overflow_p)
5894 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5895 if (!double_int_fits_to_tree_p (ctype, mul)
5896 && ((uns && tcode != MULT_EXPR) || !uns))
5897 overflow_p = 1;
5898 if (!overflow_p)
5899 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5900 double_int_to_tree (ctype, mul));
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type, we cannot do this since it will change
5909 the result if the original computation overflowed. */
5910 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5911 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5912 || (tcode == MULT_EXPR
5913 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5914 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5915 && code != MULT_EXPR)))
5917 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5919 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5920 *strict_overflow_p = true;
5921 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5922 fold_convert (ctype,
5923 const_binop (TRUNC_DIV_EXPR,
5924 op1, c)));
5926 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5928 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5929 *strict_overflow_p = true;
5930 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5931 fold_convert (ctype,
5932 const_binop (TRUNC_DIV_EXPR,
5933 c, op1)));
5936 break;
5938 default:
5939 break;
5942 return 0;
5945 /* Return a node which has the indicated constant VALUE (either 0 or
5946 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5947 and is of the indicated TYPE. */
5949 tree
5950 constant_boolean_node (bool value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5956 else if (TREE_CODE (type) == VECTOR_TYPE)
5957 return build_vector_from_val (type,
5958 build_int_cst (TREE_TYPE (type),
5959 value ? -1 : 0));
5960 else
5961 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5965 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5966 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5967 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5968 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5969 COND is the first argument to CODE; otherwise (as in the example
5970 given here), it is the second argument. TYPE is the type of the
5971 original expression. Return NULL_TREE if no simplification is
5972 possible. */
5974 static tree
5975 fold_binary_op_with_conditional_arg (location_t loc,
5976 enum tree_code code,
5977 tree type, tree op0, tree op1,
5978 tree cond, tree arg, int cond_first_p)
5980 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5981 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5982 tree test, true_value, false_value;
5983 tree lhs = NULL_TREE;
5984 tree rhs = NULL_TREE;
5986 if (TREE_CODE (cond) == COND_EXPR)
5988 test = TREE_OPERAND (cond, 0);
5989 true_value = TREE_OPERAND (cond, 1);
5990 false_value = TREE_OPERAND (cond, 2);
5991 /* If this operand throws an expression, then it does not make
5992 sense to try to perform a logical or arithmetic operation
5993 involving it. */
5994 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5995 lhs = true_value;
5996 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5997 rhs = false_value;
5999 else
6001 tree testtype = TREE_TYPE (cond);
6002 test = cond;
6003 true_value = constant_boolean_node (true, testtype);
6004 false_value = constant_boolean_node (false, testtype);
6007 /* This transformation is only worthwhile if we don't have to wrap ARG
6008 in a SAVE_EXPR and the operation can be simplified on at least one
6009 of the branches once its pushed inside the COND_EXPR. */
6010 if (!TREE_CONSTANT (arg)
6011 && (TREE_SIDE_EFFECTS (arg)
6012 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6013 return NULL_TREE;
6015 arg = fold_convert_loc (loc, arg_type, arg);
6016 if (lhs == 0)
6018 true_value = fold_convert_loc (loc, cond_type, true_value);
6019 if (cond_first_p)
6020 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6021 else
6022 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6024 if (rhs == 0)
6026 false_value = fold_convert_loc (loc, cond_type, false_value);
6027 if (cond_first_p)
6028 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6029 else
6030 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6033 /* Check that we have simplified at least one of the branches. */
6034 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6035 return NULL_TREE;
6037 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6041 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6043 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6044 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6045 ADDEND is the same as X.
6047 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6048 and finite. The problematic cases are when X is zero, and its mode
6049 has signed zeros. In the case of rounding towards -infinity,
6050 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6051 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6053 bool
6054 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6056 if (!real_zerop (addend))
6057 return false;
6059 /* Don't allow the fold with -fsignaling-nans. */
6060 if (HONOR_SNANS (TYPE_MODE (type)))
6061 return false;
6063 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6064 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6065 return true;
6067 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6068 if (TREE_CODE (addend) == REAL_CST
6069 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6070 negate = !negate;
6072 /* The mode has signed zeros, and we have to honor their sign.
6073 In this situation, there is only one case we can return true for.
6074 X - 0 is the same as X unless rounding towards -infinity is
6075 supported. */
6076 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6079 /* Subroutine of fold() that checks comparisons of built-in math
6080 functions against real constants.
6082 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6083 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6084 is the type of the result and ARG0 and ARG1 are the operands of the
6085 comparison. ARG1 must be a TREE_REAL_CST.
6087 The function returns the constant folded tree if a simplification
6088 can be made, and NULL_TREE otherwise. */
6090 static tree
6091 fold_mathfn_compare (location_t loc,
6092 enum built_in_function fcode, enum tree_code code,
6093 tree type, tree arg0, tree arg1)
6095 REAL_VALUE_TYPE c;
6097 if (BUILTIN_SQRT_P (fcode))
6099 tree arg = CALL_EXPR_ARG (arg0, 0);
6100 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6102 c = TREE_REAL_CST (arg1);
6103 if (REAL_VALUE_NEGATIVE (c))
6105 /* sqrt(x) < y is always false, if y is negative. */
6106 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6107 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6109 /* sqrt(x) > y is always true, if y is negative and we
6110 don't care about NaNs, i.e. negative values of x. */
6111 if (code == NE_EXPR || !HONOR_NANS (mode))
6112 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6114 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6115 return fold_build2_loc (loc, GE_EXPR, type, arg,
6116 build_real (TREE_TYPE (arg), dconst0));
6118 else if (code == GT_EXPR || code == GE_EXPR)
6120 REAL_VALUE_TYPE c2;
6122 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6123 real_convert (&c2, mode, &c2);
6125 if (REAL_VALUE_ISINF (c2))
6127 /* sqrt(x) > y is x == +Inf, when y is very large. */
6128 if (HONOR_INFINITIES (mode))
6129 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6130 build_real (TREE_TYPE (arg), c2));
6132 /* sqrt(x) > y is always false, when y is very large
6133 and we don't care about infinities. */
6134 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6137 /* sqrt(x) > c is the same as x > c*c. */
6138 return fold_build2_loc (loc, code, type, arg,
6139 build_real (TREE_TYPE (arg), c2));
6141 else if (code == LT_EXPR || code == LE_EXPR)
6143 REAL_VALUE_TYPE c2;
6145 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6146 real_convert (&c2, mode, &c2);
6148 if (REAL_VALUE_ISINF (c2))
6150 /* sqrt(x) < y is always true, when y is a very large
6151 value and we don't care about NaNs or Infinities. */
6152 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6153 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6155 /* sqrt(x) < y is x != +Inf when y is very large and we
6156 don't care about NaNs. */
6157 if (! HONOR_NANS (mode))
6158 return fold_build2_loc (loc, NE_EXPR, type, arg,
6159 build_real (TREE_TYPE (arg), c2));
6161 /* sqrt(x) < y is x >= 0 when y is very large and we
6162 don't care about Infinities. */
6163 if (! HONOR_INFINITIES (mode))
6164 return fold_build2_loc (loc, GE_EXPR, type, arg,
6165 build_real (TREE_TYPE (arg), dconst0));
6167 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6168 arg = save_expr (arg);
6169 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6170 fold_build2_loc (loc, GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg),
6172 dconst0)),
6173 fold_build2_loc (loc, NE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg),
6175 c2)));
6178 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6179 if (! HONOR_NANS (mode))
6180 return fold_build2_loc (loc, code, type, arg,
6181 build_real (TREE_TYPE (arg), c2));
6183 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6184 arg = save_expr (arg);
6185 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6186 fold_build2_loc (loc, GE_EXPR, type, arg,
6187 build_real (TREE_TYPE (arg),
6188 dconst0)),
6189 fold_build2_loc (loc, code, type, arg,
6190 build_real (TREE_TYPE (arg),
6191 c2)));
6195 return NULL_TREE;
6198 /* Subroutine of fold() that optimizes comparisons against Infinities,
6199 either +Inf or -Inf.
6201 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6202 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6203 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6205 The function returns the constant folded tree if a simplification
6206 can be made, and NULL_TREE otherwise. */
6208 static tree
6209 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6210 tree arg0, tree arg1)
6212 enum machine_mode mode;
6213 REAL_VALUE_TYPE max;
6214 tree temp;
6215 bool neg;
6217 mode = TYPE_MODE (TREE_TYPE (arg0));
6219 /* For negative infinity swap the sense of the comparison. */
6220 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6221 if (neg)
6222 code = swap_tree_comparison (code);
6224 switch (code)
6226 case GT_EXPR:
6227 /* x > +Inf is always false, if with ignore sNANs. */
6228 if (HONOR_SNANS (mode))
6229 return NULL_TREE;
6230 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6232 case LE_EXPR:
6233 /* x <= +Inf is always true, if we don't case about NaNs. */
6234 if (! HONOR_NANS (mode))
6235 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6237 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6238 arg0 = save_expr (arg0);
6239 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6241 case EQ_EXPR:
6242 case GE_EXPR:
6243 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6244 real_maxval (&max, neg, mode);
6245 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6246 arg0, build_real (TREE_TYPE (arg0), max));
6248 case LT_EXPR:
6249 /* x < +Inf is always equal to x <= DBL_MAX. */
6250 real_maxval (&max, neg, mode);
6251 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6252 arg0, build_real (TREE_TYPE (arg0), max));
6254 case NE_EXPR:
6255 /* x != +Inf is always equal to !(x > DBL_MAX). */
6256 real_maxval (&max, neg, mode);
6257 if (! HONOR_NANS (mode))
6258 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6259 arg0, build_real (TREE_TYPE (arg0), max));
6261 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6262 arg0, build_real (TREE_TYPE (arg0), max));
6263 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6265 default:
6266 break;
6269 return NULL_TREE;
6272 /* Subroutine of fold() that optimizes comparisons of a division by
6273 a nonzero integer constant against an integer constant, i.e.
6274 X/C1 op C2.
6276 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6277 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6278 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6280 The function returns the constant folded tree if a simplification
6281 can be made, and NULL_TREE otherwise. */
6283 static tree
6284 fold_div_compare (location_t loc,
6285 enum tree_code code, tree type, tree arg0, tree arg1)
6287 tree prod, tmp, hi, lo;
6288 tree arg00 = TREE_OPERAND (arg0, 0);
6289 tree arg01 = TREE_OPERAND (arg0, 1);
6290 double_int val;
6291 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6292 bool neg_overflow;
6293 int overflow;
6295 /* We have to do this the hard way to detect unsigned overflow.
6296 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6297 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6298 TREE_INT_CST_HIGH (arg01),
6299 TREE_INT_CST_LOW (arg1),
6300 TREE_INT_CST_HIGH (arg1),
6301 &val.low, &val.high, unsigned_p);
6302 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6303 neg_overflow = false;
6305 if (unsigned_p)
6307 tmp = int_const_binop (MINUS_EXPR, arg01,
6308 build_int_cst (TREE_TYPE (arg01), 1));
6309 lo = prod;
6311 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6312 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6313 TREE_INT_CST_HIGH (prod),
6314 TREE_INT_CST_LOW (tmp),
6315 TREE_INT_CST_HIGH (tmp),
6316 &val.low, &val.high, unsigned_p);
6317 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6318 -1, overflow | TREE_OVERFLOW (prod));
6320 else if (tree_int_cst_sgn (arg01) >= 0)
6322 tmp = int_const_binop (MINUS_EXPR, arg01,
6323 build_int_cst (TREE_TYPE (arg01), 1));
6324 switch (tree_int_cst_sgn (arg1))
6326 case -1:
6327 neg_overflow = true;
6328 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6329 hi = prod;
6330 break;
6332 case 0:
6333 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6334 hi = tmp;
6335 break;
6337 case 1:
6338 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6339 lo = prod;
6340 break;
6342 default:
6343 gcc_unreachable ();
6346 else
6348 /* A negative divisor reverses the relational operators. */
6349 code = swap_tree_comparison (code);
6351 tmp = int_const_binop (PLUS_EXPR, arg01,
6352 build_int_cst (TREE_TYPE (arg01), 1));
6353 switch (tree_int_cst_sgn (arg1))
6355 case -1:
6356 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6357 lo = prod;
6358 break;
6360 case 0:
6361 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6362 lo = tmp;
6363 break;
6365 case 1:
6366 neg_overflow = true;
6367 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6368 hi = prod;
6369 break;
6371 default:
6372 gcc_unreachable ();
6376 switch (code)
6378 case EQ_EXPR:
6379 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6380 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6381 if (TREE_OVERFLOW (hi))
6382 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6383 if (TREE_OVERFLOW (lo))
6384 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6385 return build_range_check (loc, type, arg00, 1, lo, hi);
6387 case NE_EXPR:
6388 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6389 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6390 if (TREE_OVERFLOW (hi))
6391 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6392 if (TREE_OVERFLOW (lo))
6393 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6394 return build_range_check (loc, type, arg00, 0, lo, hi);
6396 case LT_EXPR:
6397 if (TREE_OVERFLOW (lo))
6399 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6400 return omit_one_operand_loc (loc, type, tmp, arg00);
6402 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6404 case LE_EXPR:
6405 if (TREE_OVERFLOW (hi))
6407 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6408 return omit_one_operand_loc (loc, type, tmp, arg00);
6410 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6412 case GT_EXPR:
6413 if (TREE_OVERFLOW (hi))
6415 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6420 case GE_EXPR:
6421 if (TREE_OVERFLOW (lo))
6423 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6424 return omit_one_operand_loc (loc, type, tmp, arg00);
6426 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6428 default:
6429 break;
6432 return NULL_TREE;
6436 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6437 equality/inequality test, then return a simplified form of the test
6438 using a sign testing. Otherwise return NULL. TYPE is the desired
6439 result type. */
6441 static tree
6442 fold_single_bit_test_into_sign_test (location_t loc,
6443 enum tree_code code, tree arg0, tree arg1,
6444 tree result_type)
6446 /* If this is testing a single bit, we can optimize the test. */
6447 if ((code == NE_EXPR || code == EQ_EXPR)
6448 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6449 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6451 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6452 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6453 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6455 if (arg00 != NULL_TREE
6456 /* This is only a win if casting to a signed type is cheap,
6457 i.e. when arg00's type is not a partial mode. */
6458 && TYPE_PRECISION (TREE_TYPE (arg00))
6459 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6461 tree stype = signed_type_for (TREE_TYPE (arg00));
6462 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6463 result_type,
6464 fold_convert_loc (loc, stype, arg00),
6465 build_int_cst (stype, 0));
6469 return NULL_TREE;
6472 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6473 equality/inequality test, then return a simplified form of
6474 the test using shifts and logical operations. Otherwise return
6475 NULL. TYPE is the desired result type. */
6477 tree
6478 fold_single_bit_test (location_t loc, enum tree_code code,
6479 tree arg0, tree arg1, tree result_type)
6481 /* If this is testing a single bit, we can optimize the test. */
6482 if ((code == NE_EXPR || code == EQ_EXPR)
6483 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6484 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6486 tree inner = TREE_OPERAND (arg0, 0);
6487 tree type = TREE_TYPE (arg0);
6488 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6489 enum machine_mode operand_mode = TYPE_MODE (type);
6490 int ops_unsigned;
6491 tree signed_type, unsigned_type, intermediate_type;
6492 tree tem, one;
6494 /* First, see if we can fold the single bit test into a sign-bit
6495 test. */
6496 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6497 result_type);
6498 if (tem)
6499 return tem;
6501 /* Otherwise we have (A & C) != 0 where C is a single bit,
6502 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6503 Similarly for (A & C) == 0. */
6505 /* If INNER is a right shift of a constant and it plus BITNUM does
6506 not overflow, adjust BITNUM and INNER. */
6507 if (TREE_CODE (inner) == RSHIFT_EXPR
6508 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6509 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6510 && bitnum < TYPE_PRECISION (type)
6511 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6512 bitnum - TYPE_PRECISION (type)))
6514 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6515 inner = TREE_OPERAND (inner, 0);
6518 /* If we are going to be able to omit the AND below, we must do our
6519 operations as unsigned. If we must use the AND, we have a choice.
6520 Normally unsigned is faster, but for some machines signed is. */
6521 #ifdef LOAD_EXTEND_OP
6522 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6523 && !flag_syntax_only) ? 0 : 1;
6524 #else
6525 ops_unsigned = 1;
6526 #endif
6528 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6529 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6530 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6531 inner = fold_convert_loc (loc, intermediate_type, inner);
6533 if (bitnum != 0)
6534 inner = build2 (RSHIFT_EXPR, intermediate_type,
6535 inner, size_int (bitnum));
6537 one = build_int_cst (intermediate_type, 1);
6539 if (code == EQ_EXPR)
6540 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6542 /* Put the AND last so it can combine with more things. */
6543 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6545 /* Make sure to return the proper type. */
6546 inner = fold_convert_loc (loc, result_type, inner);
6548 return inner;
6550 return NULL_TREE;
6553 /* Check whether we are allowed to reorder operands arg0 and arg1,
6554 such that the evaluation of arg1 occurs before arg0. */
6556 static bool
6557 reorder_operands_p (const_tree arg0, const_tree arg1)
6559 if (! flag_evaluation_order)
6560 return true;
6561 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6562 return true;
6563 return ! TREE_SIDE_EFFECTS (arg0)
6564 && ! TREE_SIDE_EFFECTS (arg1);
6567 /* Test whether it is preferable two swap two operands, ARG0 and
6568 ARG1, for example because ARG0 is an integer constant and ARG1
6569 isn't. If REORDER is true, only recommend swapping if we can
6570 evaluate the operands in reverse order. */
6572 bool
6573 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6575 STRIP_SIGN_NOPS (arg0);
6576 STRIP_SIGN_NOPS (arg1);
6578 if (TREE_CODE (arg1) == INTEGER_CST)
6579 return 0;
6580 if (TREE_CODE (arg0) == INTEGER_CST)
6581 return 1;
6583 if (TREE_CODE (arg1) == REAL_CST)
6584 return 0;
6585 if (TREE_CODE (arg0) == REAL_CST)
6586 return 1;
6588 if (TREE_CODE (arg1) == FIXED_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == FIXED_CST)
6591 return 1;
6593 if (TREE_CODE (arg1) == COMPLEX_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == COMPLEX_CST)
6596 return 1;
6598 if (TREE_CONSTANT (arg1))
6599 return 0;
6600 if (TREE_CONSTANT (arg0))
6601 return 1;
6603 if (optimize_function_for_size_p (cfun))
6604 return 0;
6606 if (reorder && flag_evaluation_order
6607 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6608 return 0;
6610 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6611 for commutative and comparison operators. Ensuring a canonical
6612 form allows the optimizers to find additional redundancies without
6613 having to explicitly check for both orderings. */
6614 if (TREE_CODE (arg0) == SSA_NAME
6615 && TREE_CODE (arg1) == SSA_NAME
6616 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6617 return 1;
6619 /* Put SSA_NAMEs last. */
6620 if (TREE_CODE (arg1) == SSA_NAME)
6621 return 0;
6622 if (TREE_CODE (arg0) == SSA_NAME)
6623 return 1;
6625 /* Put variables last. */
6626 if (DECL_P (arg1))
6627 return 0;
6628 if (DECL_P (arg0))
6629 return 1;
6631 return 0;
6634 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6635 ARG0 is extended to a wider type. */
6637 static tree
6638 fold_widened_comparison (location_t loc, enum tree_code code,
6639 tree type, tree arg0, tree arg1)
6641 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6642 tree arg1_unw;
6643 tree shorter_type, outer_type;
6644 tree min, max;
6645 bool above, below;
6647 if (arg0_unw == arg0)
6648 return NULL_TREE;
6649 shorter_type = TREE_TYPE (arg0_unw);
6651 #ifdef HAVE_canonicalize_funcptr_for_compare
6652 /* Disable this optimization if we're casting a function pointer
6653 type on targets that require function pointer canonicalization. */
6654 if (HAVE_canonicalize_funcptr_for_compare
6655 && TREE_CODE (shorter_type) == POINTER_TYPE
6656 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6657 return NULL_TREE;
6658 #endif
6660 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6661 return NULL_TREE;
6663 arg1_unw = get_unwidened (arg1, NULL_TREE);
6665 /* If possible, express the comparison in the shorter mode. */
6666 if ((code == EQ_EXPR || code == NE_EXPR
6667 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6668 && (TREE_TYPE (arg1_unw) == shorter_type
6669 || ((TYPE_PRECISION (shorter_type)
6670 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6671 && (TYPE_UNSIGNED (shorter_type)
6672 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6673 || (TREE_CODE (arg1_unw) == INTEGER_CST
6674 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6675 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6676 && int_fits_type_p (arg1_unw, shorter_type))))
6677 return fold_build2_loc (loc, code, type, arg0_unw,
6678 fold_convert_loc (loc, shorter_type, arg1_unw));
6680 if (TREE_CODE (arg1_unw) != INTEGER_CST
6681 || TREE_CODE (shorter_type) != INTEGER_TYPE
6682 || !int_fits_type_p (arg1_unw, shorter_type))
6683 return NULL_TREE;
6685 /* If we are comparing with the integer that does not fit into the range
6686 of the shorter type, the result is known. */
6687 outer_type = TREE_TYPE (arg1_unw);
6688 min = lower_bound_in_type (outer_type, shorter_type);
6689 max = upper_bound_in_type (outer_type, shorter_type);
6691 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6692 max, arg1_unw));
6693 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6694 arg1_unw, min));
6696 switch (code)
6698 case EQ_EXPR:
6699 if (above || below)
6700 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 break;
6703 case NE_EXPR:
6704 if (above || below)
6705 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6706 break;
6708 case LT_EXPR:
6709 case LE_EXPR:
6710 if (above)
6711 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6712 else if (below)
6713 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6715 case GT_EXPR:
6716 case GE_EXPR:
6717 if (above)
6718 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6719 else if (below)
6720 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6722 default:
6723 break;
6726 return NULL_TREE;
6729 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6730 ARG0 just the signedness is changed. */
6732 static tree
6733 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6734 tree arg0, tree arg1)
6736 tree arg0_inner;
6737 tree inner_type, outer_type;
6739 if (!CONVERT_EXPR_P (arg0))
6740 return NULL_TREE;
6742 outer_type = TREE_TYPE (arg0);
6743 arg0_inner = TREE_OPERAND (arg0, 0);
6744 inner_type = TREE_TYPE (arg0_inner);
6746 #ifdef HAVE_canonicalize_funcptr_for_compare
6747 /* Disable this optimization if we're casting a function pointer
6748 type on targets that require function pointer canonicalization. */
6749 if (HAVE_canonicalize_funcptr_for_compare
6750 && TREE_CODE (inner_type) == POINTER_TYPE
6751 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6752 return NULL_TREE;
6753 #endif
6755 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6756 return NULL_TREE;
6758 if (TREE_CODE (arg1) != INTEGER_CST
6759 && !(CONVERT_EXPR_P (arg1)
6760 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6761 return NULL_TREE;
6763 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6764 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6765 && code != NE_EXPR
6766 && code != EQ_EXPR)
6767 return NULL_TREE;
6769 if (TREE_CODE (arg1) == INTEGER_CST)
6770 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6771 0, TREE_OVERFLOW (arg1));
6772 else
6773 arg1 = fold_convert_loc (loc, inner_type, arg1);
6775 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6778 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6779 step of the array. Reconstructs s and delta in the case of s *
6780 delta being an integer constant (and thus already folded). ADDR is
6781 the address. MULT is the multiplicative expression. If the
6782 function succeeds, the new address expression is returned.
6783 Otherwise NULL_TREE is returned. LOC is the location of the
6784 resulting expression. */
6786 static tree
6787 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6789 tree s, delta, step;
6790 tree ref = TREE_OPERAND (addr, 0), pref;
6791 tree ret, pos;
6792 tree itype;
6793 bool mdim = false;
6795 /* Strip the nops that might be added when converting op1 to sizetype. */
6796 STRIP_NOPS (op1);
6798 /* Canonicalize op1 into a possibly non-constant delta
6799 and an INTEGER_CST s. */
6800 if (TREE_CODE (op1) == MULT_EXPR)
6802 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6804 STRIP_NOPS (arg0);
6805 STRIP_NOPS (arg1);
6807 if (TREE_CODE (arg0) == INTEGER_CST)
6809 s = arg0;
6810 delta = arg1;
6812 else if (TREE_CODE (arg1) == INTEGER_CST)
6814 s = arg1;
6815 delta = arg0;
6817 else
6818 return NULL_TREE;
6820 else if (TREE_CODE (op1) == INTEGER_CST)
6822 delta = op1;
6823 s = NULL_TREE;
6825 else
6827 /* Simulate we are delta * 1. */
6828 delta = op1;
6829 s = integer_one_node;
6832 /* Handle &x.array the same as we would handle &x.array[0]. */
6833 if (TREE_CODE (ref) == COMPONENT_REF
6834 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6836 tree domain;
6838 /* Remember if this was a multi-dimensional array. */
6839 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6840 mdim = true;
6842 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6843 if (! domain)
6844 goto cont;
6845 itype = TREE_TYPE (domain);
6847 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6848 if (TREE_CODE (step) != INTEGER_CST)
6849 goto cont;
6851 if (s)
6853 if (! tree_int_cst_equal (step, s))
6854 goto cont;
6856 else
6858 /* Try if delta is a multiple of step. */
6859 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6860 if (! tmp)
6861 goto cont;
6862 delta = tmp;
6865 /* Only fold here if we can verify we do not overflow one
6866 dimension of a multi-dimensional array. */
6867 if (mdim)
6869 tree tmp;
6871 if (!TYPE_MIN_VALUE (domain)
6872 || !TYPE_MAX_VALUE (domain)
6873 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6874 goto cont;
6876 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6877 fold_convert_loc (loc, itype,
6878 TYPE_MIN_VALUE (domain)),
6879 fold_convert_loc (loc, itype, delta));
6880 if (TREE_CODE (tmp) != INTEGER_CST
6881 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6882 goto cont;
6885 /* We found a suitable component reference. */
6887 pref = TREE_OPERAND (addr, 0);
6888 ret = copy_node (pref);
6889 SET_EXPR_LOCATION (ret, loc);
6891 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6892 fold_build2_loc
6893 (loc, PLUS_EXPR, itype,
6894 fold_convert_loc (loc, itype,
6895 TYPE_MIN_VALUE
6896 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6897 fold_convert_loc (loc, itype, delta)),
6898 NULL_TREE, NULL_TREE);
6899 return build_fold_addr_expr_loc (loc, ret);
6902 cont:
6904 for (;; ref = TREE_OPERAND (ref, 0))
6906 if (TREE_CODE (ref) == ARRAY_REF)
6908 tree domain;
6910 /* Remember if this was a multi-dimensional array. */
6911 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6912 mdim = true;
6914 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6915 if (! domain)
6916 continue;
6917 itype = TREE_TYPE (domain);
6919 step = array_ref_element_size (ref);
6920 if (TREE_CODE (step) != INTEGER_CST)
6921 continue;
6923 if (s)
6925 if (! tree_int_cst_equal (step, s))
6926 continue;
6928 else
6930 /* Try if delta is a multiple of step. */
6931 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6932 if (! tmp)
6933 continue;
6934 delta = tmp;
6937 /* Only fold here if we can verify we do not overflow one
6938 dimension of a multi-dimensional array. */
6939 if (mdim)
6941 tree tmp;
6943 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6944 || !TYPE_MAX_VALUE (domain)
6945 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6946 continue;
6948 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6949 fold_convert_loc (loc, itype,
6950 TREE_OPERAND (ref, 1)),
6951 fold_convert_loc (loc, itype, delta));
6952 if (!tmp
6953 || TREE_CODE (tmp) != INTEGER_CST
6954 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6955 continue;
6958 break;
6960 else
6961 mdim = false;
6963 if (!handled_component_p (ref))
6964 return NULL_TREE;
6967 /* We found the suitable array reference. So copy everything up to it,
6968 and replace the index. */
6970 pref = TREE_OPERAND (addr, 0);
6971 ret = copy_node (pref);
6972 SET_EXPR_LOCATION (ret, loc);
6973 pos = ret;
6975 while (pref != ref)
6977 pref = TREE_OPERAND (pref, 0);
6978 TREE_OPERAND (pos, 0) = copy_node (pref);
6979 pos = TREE_OPERAND (pos, 0);
6982 TREE_OPERAND (pos, 1)
6983 = fold_build2_loc (loc, PLUS_EXPR, itype,
6984 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6985 fold_convert_loc (loc, itype, delta));
6986 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6990 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6991 means A >= Y && A != MAX, but in this case we know that
6992 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6994 static tree
6995 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6997 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6999 if (TREE_CODE (bound) == LT_EXPR)
7000 a = TREE_OPERAND (bound, 0);
7001 else if (TREE_CODE (bound) == GT_EXPR)
7002 a = TREE_OPERAND (bound, 1);
7003 else
7004 return NULL_TREE;
7006 typea = TREE_TYPE (a);
7007 if (!INTEGRAL_TYPE_P (typea)
7008 && !POINTER_TYPE_P (typea))
7009 return NULL_TREE;
7011 if (TREE_CODE (ineq) == LT_EXPR)
7013 a1 = TREE_OPERAND (ineq, 1);
7014 y = TREE_OPERAND (ineq, 0);
7016 else if (TREE_CODE (ineq) == GT_EXPR)
7018 a1 = TREE_OPERAND (ineq, 0);
7019 y = TREE_OPERAND (ineq, 1);
7021 else
7022 return NULL_TREE;
7024 if (TREE_TYPE (a1) != typea)
7025 return NULL_TREE;
7027 if (POINTER_TYPE_P (typea))
7029 /* Convert the pointer types into integer before taking the difference. */
7030 tree ta = fold_convert_loc (loc, ssizetype, a);
7031 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7032 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7034 else
7035 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7037 if (!diff || !integer_onep (diff))
7038 return NULL_TREE;
7040 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7043 /* Fold a sum or difference of at least one multiplication.
7044 Returns the folded tree or NULL if no simplification could be made. */
7046 static tree
7047 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7048 tree arg0, tree arg1)
7050 tree arg00, arg01, arg10, arg11;
7051 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7053 /* (A * C) +- (B * C) -> (A+-B) * C.
7054 (A * C) +- A -> A * (C+-1).
7055 We are most concerned about the case where C is a constant,
7056 but other combinations show up during loop reduction. Since
7057 it is not difficult, try all four possibilities. */
7059 if (TREE_CODE (arg0) == MULT_EXPR)
7061 arg00 = TREE_OPERAND (arg0, 0);
7062 arg01 = TREE_OPERAND (arg0, 1);
7064 else if (TREE_CODE (arg0) == INTEGER_CST)
7066 arg00 = build_one_cst (type);
7067 arg01 = arg0;
7069 else
7071 /* We cannot generate constant 1 for fract. */
7072 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7073 return NULL_TREE;
7074 arg00 = arg0;
7075 arg01 = build_one_cst (type);
7077 if (TREE_CODE (arg1) == MULT_EXPR)
7079 arg10 = TREE_OPERAND (arg1, 0);
7080 arg11 = TREE_OPERAND (arg1, 1);
7082 else if (TREE_CODE (arg1) == INTEGER_CST)
7084 arg10 = build_one_cst (type);
7085 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7086 the purpose of this canonicalization. */
7087 if (TREE_INT_CST_HIGH (arg1) == -1
7088 && negate_expr_p (arg1)
7089 && code == PLUS_EXPR)
7091 arg11 = negate_expr (arg1);
7092 code = MINUS_EXPR;
7094 else
7095 arg11 = arg1;
7097 else
7099 /* We cannot generate constant 1 for fract. */
7100 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7101 return NULL_TREE;
7102 arg10 = arg1;
7103 arg11 = build_one_cst (type);
7105 same = NULL_TREE;
7107 if (operand_equal_p (arg01, arg11, 0))
7108 same = arg01, alt0 = arg00, alt1 = arg10;
7109 else if (operand_equal_p (arg00, arg10, 0))
7110 same = arg00, alt0 = arg01, alt1 = arg11;
7111 else if (operand_equal_p (arg00, arg11, 0))
7112 same = arg00, alt0 = arg01, alt1 = arg10;
7113 else if (operand_equal_p (arg01, arg10, 0))
7114 same = arg01, alt0 = arg00, alt1 = arg11;
7116 /* No identical multiplicands; see if we can find a common
7117 power-of-two factor in non-power-of-two multiplies. This
7118 can help in multi-dimensional array access. */
7119 else if (host_integerp (arg01, 0)
7120 && host_integerp (arg11, 0))
7122 HOST_WIDE_INT int01, int11, tmp;
7123 bool swap = false;
7124 tree maybe_same;
7125 int01 = TREE_INT_CST_LOW (arg01);
7126 int11 = TREE_INT_CST_LOW (arg11);
7128 /* Move min of absolute values to int11. */
7129 if (absu_hwi (int01) < absu_hwi (int11))
7131 tmp = int01, int01 = int11, int11 = tmp;
7132 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7133 maybe_same = arg01;
7134 swap = true;
7136 else
7137 maybe_same = arg11;
7139 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7140 /* The remainder should not be a constant, otherwise we
7141 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7142 increased the number of multiplications necessary. */
7143 && TREE_CODE (arg10) != INTEGER_CST)
7145 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7146 build_int_cst (TREE_TYPE (arg00),
7147 int01 / int11));
7148 alt1 = arg10;
7149 same = maybe_same;
7150 if (swap)
7151 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7155 if (same)
7156 return fold_build2_loc (loc, MULT_EXPR, type,
7157 fold_build2_loc (loc, code, type,
7158 fold_convert_loc (loc, type, alt0),
7159 fold_convert_loc (loc, type, alt1)),
7160 fold_convert_loc (loc, type, same));
7162 return NULL_TREE;
7165 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7166 specified by EXPR into the buffer PTR of length LEN bytes.
7167 Return the number of bytes placed in the buffer, or zero
7168 upon failure. */
7170 static int
7171 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7173 tree type = TREE_TYPE (expr);
7174 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7175 int byte, offset, word, words;
7176 unsigned char value;
7178 if (total_bytes > len)
7179 return 0;
7180 words = total_bytes / UNITS_PER_WORD;
7182 for (byte = 0; byte < total_bytes; byte++)
7184 int bitpos = byte * BITS_PER_UNIT;
7185 if (bitpos < HOST_BITS_PER_WIDE_INT)
7186 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7187 else
7188 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7189 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7191 if (total_bytes > UNITS_PER_WORD)
7193 word = byte / UNITS_PER_WORD;
7194 if (WORDS_BIG_ENDIAN)
7195 word = (words - 1) - word;
7196 offset = word * UNITS_PER_WORD;
7197 if (BYTES_BIG_ENDIAN)
7198 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7199 else
7200 offset += byte % UNITS_PER_WORD;
7202 else
7203 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7204 ptr[offset] = value;
7206 return total_bytes;
7210 /* Subroutine of native_encode_expr. Encode the REAL_CST
7211 specified by EXPR into the buffer PTR of length LEN bytes.
7212 Return the number of bytes placed in the buffer, or zero
7213 upon failure. */
7215 static int
7216 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7218 tree type = TREE_TYPE (expr);
7219 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7220 int byte, offset, word, words, bitpos;
7221 unsigned char value;
7223 /* There are always 32 bits in each long, no matter the size of
7224 the hosts long. We handle floating point representations with
7225 up to 192 bits. */
7226 long tmp[6];
7228 if (total_bytes > len)
7229 return 0;
7230 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7232 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7234 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7235 bitpos += BITS_PER_UNIT)
7237 byte = (bitpos / BITS_PER_UNIT) & 3;
7238 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7240 if (UNITS_PER_WORD < 4)
7242 word = byte / UNITS_PER_WORD;
7243 if (WORDS_BIG_ENDIAN)
7244 word = (words - 1) - word;
7245 offset = word * UNITS_PER_WORD;
7246 if (BYTES_BIG_ENDIAN)
7247 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7248 else
7249 offset += byte % UNITS_PER_WORD;
7251 else
7252 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7253 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7255 return total_bytes;
7258 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7259 specified by EXPR into the buffer PTR of length LEN bytes.
7260 Return the number of bytes placed in the buffer, or zero
7261 upon failure. */
7263 static int
7264 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7266 int rsize, isize;
7267 tree part;
7269 part = TREE_REALPART (expr);
7270 rsize = native_encode_expr (part, ptr, len);
7271 if (rsize == 0)
7272 return 0;
7273 part = TREE_IMAGPART (expr);
7274 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7275 if (isize != rsize)
7276 return 0;
7277 return rsize + isize;
7281 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7282 specified by EXPR into the buffer PTR of length LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero
7284 upon failure. */
7286 static int
7287 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7289 unsigned i, count;
7290 int size, offset;
7291 tree itype, elem;
7293 offset = 0;
7294 count = VECTOR_CST_NELTS (expr);
7295 itype = TREE_TYPE (TREE_TYPE (expr));
7296 size = GET_MODE_SIZE (TYPE_MODE (itype));
7297 for (i = 0; i < count; i++)
7299 elem = VECTOR_CST_ELT (expr, i);
7300 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7301 return 0;
7302 offset += size;
7304 return offset;
7308 /* Subroutine of native_encode_expr. Encode the STRING_CST
7309 specified by EXPR into the buffer PTR of length LEN bytes.
7310 Return the number of bytes placed in the buffer, or zero
7311 upon failure. */
7313 static int
7314 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7316 tree type = TREE_TYPE (expr);
7317 HOST_WIDE_INT total_bytes;
7319 if (TREE_CODE (type) != ARRAY_TYPE
7320 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7321 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7322 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7323 return 0;
7324 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7325 if (total_bytes > len)
7326 return 0;
7327 if (TREE_STRING_LENGTH (expr) < total_bytes)
7329 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7330 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7331 total_bytes - TREE_STRING_LENGTH (expr));
7333 else
7334 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7335 return total_bytes;
7339 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7340 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7341 buffer PTR of length LEN bytes. Return the number of bytes
7342 placed in the buffer, or zero upon failure. */
7345 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7347 switch (TREE_CODE (expr))
7349 case INTEGER_CST:
7350 return native_encode_int (expr, ptr, len);
7352 case REAL_CST:
7353 return native_encode_real (expr, ptr, len);
7355 case COMPLEX_CST:
7356 return native_encode_complex (expr, ptr, len);
7358 case VECTOR_CST:
7359 return native_encode_vector (expr, ptr, len);
7361 case STRING_CST:
7362 return native_encode_string (expr, ptr, len);
7364 default:
7365 return 0;
7370 /* Subroutine of native_interpret_expr. Interpret the contents of
7371 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7372 If the buffer cannot be interpreted, return NULL_TREE. */
7374 static tree
7375 native_interpret_int (tree type, const unsigned char *ptr, int len)
7377 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7378 int byte, offset, word, words;
7379 unsigned char value;
7380 double_int result;
7382 if (total_bytes > len)
7383 return NULL_TREE;
7384 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7385 return NULL_TREE;
7387 result = double_int_zero;
7388 words = total_bytes / UNITS_PER_WORD;
7390 for (byte = 0; byte < total_bytes; byte++)
7392 int bitpos = byte * BITS_PER_UNIT;
7393 if (total_bytes > UNITS_PER_WORD)
7395 word = byte / UNITS_PER_WORD;
7396 if (WORDS_BIG_ENDIAN)
7397 word = (words - 1) - word;
7398 offset = word * UNITS_PER_WORD;
7399 if (BYTES_BIG_ENDIAN)
7400 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7401 else
7402 offset += byte % UNITS_PER_WORD;
7404 else
7405 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7406 value = ptr[offset];
7408 if (bitpos < HOST_BITS_PER_WIDE_INT)
7409 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7410 else
7411 result.high |= (unsigned HOST_WIDE_INT) value
7412 << (bitpos - HOST_BITS_PER_WIDE_INT);
7415 return double_int_to_tree (type, result);
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7423 static tree
7424 native_interpret_real (tree type, const unsigned char *ptr, int len)
7426 enum machine_mode mode = TYPE_MODE (type);
7427 int total_bytes = GET_MODE_SIZE (mode);
7428 int byte, offset, word, words, bitpos;
7429 unsigned char value;
7430 /* There are always 32 bits in each long, no matter the size of
7431 the hosts long. We handle floating point representations with
7432 up to 192 bits. */
7433 REAL_VALUE_TYPE r;
7434 long tmp[6];
7436 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7437 if (total_bytes > len || total_bytes > 24)
7438 return NULL_TREE;
7439 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7441 memset (tmp, 0, sizeof (tmp));
7442 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7443 bitpos += BITS_PER_UNIT)
7445 byte = (bitpos / BITS_PER_UNIT) & 3;
7446 if (UNITS_PER_WORD < 4)
7448 word = byte / UNITS_PER_WORD;
7449 if (WORDS_BIG_ENDIAN)
7450 word = (words - 1) - word;
7451 offset = word * UNITS_PER_WORD;
7452 if (BYTES_BIG_ENDIAN)
7453 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7454 else
7455 offset += byte % UNITS_PER_WORD;
7457 else
7458 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7459 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7461 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7464 real_from_target (&r, tmp, mode);
7465 return build_real (type, r);
7469 /* Subroutine of native_interpret_expr. Interpret the contents of
7470 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7471 If the buffer cannot be interpreted, return NULL_TREE. */
7473 static tree
7474 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7476 tree etype, rpart, ipart;
7477 int size;
7479 etype = TREE_TYPE (type);
7480 size = GET_MODE_SIZE (TYPE_MODE (etype));
7481 if (size * 2 > len)
7482 return NULL_TREE;
7483 rpart = native_interpret_expr (etype, ptr, size);
7484 if (!rpart)
7485 return NULL_TREE;
7486 ipart = native_interpret_expr (etype, ptr+size, size);
7487 if (!ipart)
7488 return NULL_TREE;
7489 return build_complex (type, rpart, ipart);
7493 /* Subroutine of native_interpret_expr. Interpret the contents of
7494 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7495 If the buffer cannot be interpreted, return NULL_TREE. */
7497 static tree
7498 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7500 tree etype, elem;
7501 int i, size, count;
7502 tree *elements;
7504 etype = TREE_TYPE (type);
7505 size = GET_MODE_SIZE (TYPE_MODE (etype));
7506 count = TYPE_VECTOR_SUBPARTS (type);
7507 if (size * count > len)
7508 return NULL_TREE;
7510 elements = XALLOCAVEC (tree, count);
7511 for (i = count - 1; i >= 0; i--)
7513 elem = native_interpret_expr (etype, ptr+(i*size), size);
7514 if (!elem)
7515 return NULL_TREE;
7516 elements[i] = elem;
7518 return build_vector (type, elements);
7522 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7523 the buffer PTR of length LEN as a constant of type TYPE. For
7524 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7525 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7526 return NULL_TREE. */
7528 tree
7529 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7531 switch (TREE_CODE (type))
7533 case INTEGER_TYPE:
7534 case ENUMERAL_TYPE:
7535 case BOOLEAN_TYPE:
7536 case POINTER_TYPE:
7537 case REFERENCE_TYPE:
7538 return native_interpret_int (type, ptr, len);
7540 case REAL_TYPE:
7541 return native_interpret_real (type, ptr, len);
7543 case COMPLEX_TYPE:
7544 return native_interpret_complex (type, ptr, len);
7546 case VECTOR_TYPE:
7547 return native_interpret_vector (type, ptr, len);
7549 default:
7550 return NULL_TREE;
7554 /* Returns true if we can interpret the contents of a native encoding
7555 as TYPE. */
7557 static bool
7558 can_native_interpret_type_p (tree type)
7560 switch (TREE_CODE (type))
7562 case INTEGER_TYPE:
7563 case ENUMERAL_TYPE:
7564 case BOOLEAN_TYPE:
7565 case POINTER_TYPE:
7566 case REFERENCE_TYPE:
7567 case REAL_TYPE:
7568 case COMPLEX_TYPE:
7569 case VECTOR_TYPE:
7570 return true;
7571 default:
7572 return false;
7576 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7577 TYPE at compile-time. If we're unable to perform the conversion
7578 return NULL_TREE. */
7580 static tree
7581 fold_view_convert_expr (tree type, tree expr)
7583 /* We support up to 512-bit values (for V8DFmode). */
7584 unsigned char buffer[64];
7585 int len;
7587 /* Check that the host and target are sane. */
7588 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7589 return NULL_TREE;
7591 len = native_encode_expr (expr, buffer, sizeof (buffer));
7592 if (len == 0)
7593 return NULL_TREE;
7595 return native_interpret_expr (type, buffer, len);
7598 /* Build an expression for the address of T. Folds away INDIRECT_REF
7599 to avoid confusing the gimplify process. */
7601 tree
7602 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7604 /* The size of the object is not relevant when talking about its address. */
7605 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7606 t = TREE_OPERAND (t, 0);
7608 if (TREE_CODE (t) == INDIRECT_REF)
7610 t = TREE_OPERAND (t, 0);
7612 if (TREE_TYPE (t) != ptrtype)
7613 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7615 else if (TREE_CODE (t) == MEM_REF
7616 && integer_zerop (TREE_OPERAND (t, 1)))
7617 return TREE_OPERAND (t, 0);
7618 else if (TREE_CODE (t) == MEM_REF
7619 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7620 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7621 TREE_OPERAND (t, 0),
7622 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7623 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7625 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7627 if (TREE_TYPE (t) != ptrtype)
7628 t = fold_convert_loc (loc, ptrtype, t);
7630 else
7631 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7633 return t;
7636 /* Build an expression for the address of T. */
7638 tree
7639 build_fold_addr_expr_loc (location_t loc, tree t)
7641 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7643 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7646 static bool vec_cst_ctor_to_array (tree, tree *);
7648 /* Fold a unary expression of code CODE and type TYPE with operand
7649 OP0. Return the folded expression if folding is successful.
7650 Otherwise, return NULL_TREE. */
7652 tree
7653 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7655 tree tem;
7656 tree arg0;
7657 enum tree_code_class kind = TREE_CODE_CLASS (code);
7659 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7660 && TREE_CODE_LENGTH (code) == 1);
7662 arg0 = op0;
7663 if (arg0)
7665 if (CONVERT_EXPR_CODE_P (code)
7666 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7668 /* Don't use STRIP_NOPS, because signedness of argument type
7669 matters. */
7670 STRIP_SIGN_NOPS (arg0);
7672 else
7674 /* Strip any conversions that don't change the mode. This
7675 is safe for every expression, except for a comparison
7676 expression because its signedness is derived from its
7677 operands.
7679 Note that this is done as an internal manipulation within
7680 the constant folder, in order to find the simplest
7681 representation of the arguments so that their form can be
7682 studied. In any cases, the appropriate type conversions
7683 should be put back in the tree that will get out of the
7684 constant folder. */
7685 STRIP_NOPS (arg0);
7689 if (TREE_CODE_CLASS (code) == tcc_unary)
7691 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7692 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7693 fold_build1_loc (loc, code, type,
7694 fold_convert_loc (loc, TREE_TYPE (op0),
7695 TREE_OPERAND (arg0, 1))));
7696 else if (TREE_CODE (arg0) == COND_EXPR)
7698 tree arg01 = TREE_OPERAND (arg0, 1);
7699 tree arg02 = TREE_OPERAND (arg0, 2);
7700 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7701 arg01 = fold_build1_loc (loc, code, type,
7702 fold_convert_loc (loc,
7703 TREE_TYPE (op0), arg01));
7704 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7705 arg02 = fold_build1_loc (loc, code, type,
7706 fold_convert_loc (loc,
7707 TREE_TYPE (op0), arg02));
7708 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7709 arg01, arg02);
7711 /* If this was a conversion, and all we did was to move into
7712 inside the COND_EXPR, bring it back out. But leave it if
7713 it is a conversion from integer to integer and the
7714 result precision is no wider than a word since such a
7715 conversion is cheap and may be optimized away by combine,
7716 while it couldn't if it were outside the COND_EXPR. Then return
7717 so we don't get into an infinite recursion loop taking the
7718 conversion out and then back in. */
7720 if ((CONVERT_EXPR_CODE_P (code)
7721 || code == NON_LVALUE_EXPR)
7722 && TREE_CODE (tem) == COND_EXPR
7723 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7724 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7725 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7726 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7727 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7728 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7729 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7730 && (INTEGRAL_TYPE_P
7731 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7732 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7733 || flag_syntax_only))
7734 tem = build1_loc (loc, code, type,
7735 build3 (COND_EXPR,
7736 TREE_TYPE (TREE_OPERAND
7737 (TREE_OPERAND (tem, 1), 0)),
7738 TREE_OPERAND (tem, 0),
7739 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7740 TREE_OPERAND (TREE_OPERAND (tem, 2),
7741 0)));
7742 return tem;
7746 switch (code)
7748 case PAREN_EXPR:
7749 /* Re-association barriers around constants and other re-association
7750 barriers can be removed. */
7751 if (CONSTANT_CLASS_P (op0)
7752 || TREE_CODE (op0) == PAREN_EXPR)
7753 return fold_convert_loc (loc, type, op0);
7754 return NULL_TREE;
7756 CASE_CONVERT:
7757 case FLOAT_EXPR:
7758 case FIX_TRUNC_EXPR:
7759 if (TREE_TYPE (op0) == type)
7760 return op0;
7762 if (COMPARISON_CLASS_P (op0))
7764 /* If we have (type) (a CMP b) and type is an integral type, return
7765 new expression involving the new type. Canonicalize
7766 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7767 non-integral type.
7768 Do not fold the result as that would not simplify further, also
7769 folding again results in recursions. */
7770 if (TREE_CODE (type) == BOOLEAN_TYPE)
7771 return build2_loc (loc, TREE_CODE (op0), type,
7772 TREE_OPERAND (op0, 0),
7773 TREE_OPERAND (op0, 1));
7774 else if (!INTEGRAL_TYPE_P (type) && TREE_CODE (type) != VECTOR_TYPE)
7775 return build3_loc (loc, COND_EXPR, type, op0,
7776 constant_boolean_node (true, type),
7777 constant_boolean_node (false, type));
7780 /* Handle cases of two conversions in a row. */
7781 if (CONVERT_EXPR_P (op0))
7783 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7784 tree inter_type = TREE_TYPE (op0);
7785 int inside_int = INTEGRAL_TYPE_P (inside_type);
7786 int inside_ptr = POINTER_TYPE_P (inside_type);
7787 int inside_float = FLOAT_TYPE_P (inside_type);
7788 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7789 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7790 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7791 int inter_int = INTEGRAL_TYPE_P (inter_type);
7792 int inter_ptr = POINTER_TYPE_P (inter_type);
7793 int inter_float = FLOAT_TYPE_P (inter_type);
7794 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7795 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7796 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7797 int final_int = INTEGRAL_TYPE_P (type);
7798 int final_ptr = POINTER_TYPE_P (type);
7799 int final_float = FLOAT_TYPE_P (type);
7800 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7801 unsigned int final_prec = TYPE_PRECISION (type);
7802 int final_unsignedp = TYPE_UNSIGNED (type);
7804 /* In addition to the cases of two conversions in a row
7805 handled below, if we are converting something to its own
7806 type via an object of identical or wider precision, neither
7807 conversion is needed. */
7808 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7809 && (((inter_int || inter_ptr) && final_int)
7810 || (inter_float && final_float))
7811 && inter_prec >= final_prec)
7812 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7814 /* Likewise, if the intermediate and initial types are either both
7815 float or both integer, we don't need the middle conversion if the
7816 former is wider than the latter and doesn't change the signedness
7817 (for integers). Avoid this if the final type is a pointer since
7818 then we sometimes need the middle conversion. Likewise if the
7819 final type has a precision not equal to the size of its mode. */
7820 if (((inter_int && inside_int)
7821 || (inter_float && inside_float)
7822 || (inter_vec && inside_vec))
7823 && inter_prec >= inside_prec
7824 && (inter_float || inter_vec
7825 || inter_unsignedp == inside_unsignedp)
7826 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7827 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7828 && ! final_ptr
7829 && (! final_vec || inter_prec == inside_prec))
7830 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7832 /* If we have a sign-extension of a zero-extended value, we can
7833 replace that by a single zero-extension. Likewise if the
7834 final conversion does not change precision we can drop the
7835 intermediate conversion. */
7836 if (inside_int && inter_int && final_int
7837 && ((inside_prec < inter_prec && inter_prec < final_prec
7838 && inside_unsignedp && !inter_unsignedp)
7839 || final_prec == inter_prec))
7840 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7842 /* Two conversions in a row are not needed unless:
7843 - some conversion is floating-point (overstrict for now), or
7844 - some conversion is a vector (overstrict for now), or
7845 - the intermediate type is narrower than both initial and
7846 final, or
7847 - the intermediate type and innermost type differ in signedness,
7848 and the outermost type is wider than the intermediate, or
7849 - the initial type is a pointer type and the precisions of the
7850 intermediate and final types differ, or
7851 - the final type is a pointer type and the precisions of the
7852 initial and intermediate types differ. */
7853 if (! inside_float && ! inter_float && ! final_float
7854 && ! inside_vec && ! inter_vec && ! final_vec
7855 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7856 && ! (inside_int && inter_int
7857 && inter_unsignedp != inside_unsignedp
7858 && inter_prec < final_prec)
7859 && ((inter_unsignedp && inter_prec > inside_prec)
7860 == (final_unsignedp && final_prec > inter_prec))
7861 && ! (inside_ptr && inter_prec != final_prec)
7862 && ! (final_ptr && inside_prec != inter_prec)
7863 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7864 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7865 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7868 /* Handle (T *)&A.B.C for A being of type T and B and C
7869 living at offset zero. This occurs frequently in
7870 C++ upcasting and then accessing the base. */
7871 if (TREE_CODE (op0) == ADDR_EXPR
7872 && POINTER_TYPE_P (type)
7873 && handled_component_p (TREE_OPERAND (op0, 0)))
7875 HOST_WIDE_INT bitsize, bitpos;
7876 tree offset;
7877 enum machine_mode mode;
7878 int unsignedp, volatilep;
7879 tree base = TREE_OPERAND (op0, 0);
7880 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7881 &mode, &unsignedp, &volatilep, false);
7882 /* If the reference was to a (constant) zero offset, we can use
7883 the address of the base if it has the same base type
7884 as the result type and the pointer type is unqualified. */
7885 if (! offset && bitpos == 0
7886 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7887 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7888 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7889 return fold_convert_loc (loc, type,
7890 build_fold_addr_expr_loc (loc, base));
7893 if (TREE_CODE (op0) == MODIFY_EXPR
7894 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7895 /* Detect assigning a bitfield. */
7896 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7897 && DECL_BIT_FIELD
7898 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7900 /* Don't leave an assignment inside a conversion
7901 unless assigning a bitfield. */
7902 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7903 /* First do the assignment, then return converted constant. */
7904 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7905 TREE_NO_WARNING (tem) = 1;
7906 TREE_USED (tem) = 1;
7907 return tem;
7910 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7911 constants (if x has signed type, the sign bit cannot be set
7912 in c). This folds extension into the BIT_AND_EXPR.
7913 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7914 very likely don't have maximal range for their precision and this
7915 transformation effectively doesn't preserve non-maximal ranges. */
7916 if (TREE_CODE (type) == INTEGER_TYPE
7917 && TREE_CODE (op0) == BIT_AND_EXPR
7918 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7920 tree and_expr = op0;
7921 tree and0 = TREE_OPERAND (and_expr, 0);
7922 tree and1 = TREE_OPERAND (and_expr, 1);
7923 int change = 0;
7925 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7926 || (TYPE_PRECISION (type)
7927 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7928 change = 1;
7929 else if (TYPE_PRECISION (TREE_TYPE (and1))
7930 <= HOST_BITS_PER_WIDE_INT
7931 && host_integerp (and1, 1))
7933 unsigned HOST_WIDE_INT cst;
7935 cst = tree_low_cst (and1, 1);
7936 cst &= (HOST_WIDE_INT) -1
7937 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7938 change = (cst == 0);
7939 #ifdef LOAD_EXTEND_OP
7940 if (change
7941 && !flag_syntax_only
7942 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7943 == ZERO_EXTEND))
7945 tree uns = unsigned_type_for (TREE_TYPE (and0));
7946 and0 = fold_convert_loc (loc, uns, and0);
7947 and1 = fold_convert_loc (loc, uns, and1);
7949 #endif
7951 if (change)
7953 tem = force_fit_type_double (type, tree_to_double_int (and1),
7954 0, TREE_OVERFLOW (and1));
7955 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7956 fold_convert_loc (loc, type, and0), tem);
7960 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7961 when one of the new casts will fold away. Conservatively we assume
7962 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7963 if (POINTER_TYPE_P (type)
7964 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7965 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7966 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7967 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7968 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7970 tree arg00 = TREE_OPERAND (arg0, 0);
7971 tree arg01 = TREE_OPERAND (arg0, 1);
7973 return fold_build_pointer_plus_loc
7974 (loc, fold_convert_loc (loc, type, arg00), arg01);
7977 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7978 of the same precision, and X is an integer type not narrower than
7979 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7980 if (INTEGRAL_TYPE_P (type)
7981 && TREE_CODE (op0) == BIT_NOT_EXPR
7982 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7983 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7984 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7986 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7987 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7988 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7989 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7990 fold_convert_loc (loc, type, tem));
7993 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7994 type of X and Y (integer types only). */
7995 if (INTEGRAL_TYPE_P (type)
7996 && TREE_CODE (op0) == MULT_EXPR
7997 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7998 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8000 /* Be careful not to introduce new overflows. */
8001 tree mult_type;
8002 if (TYPE_OVERFLOW_WRAPS (type))
8003 mult_type = type;
8004 else
8005 mult_type = unsigned_type_for (type);
8007 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8009 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8010 fold_convert_loc (loc, mult_type,
8011 TREE_OPERAND (op0, 0)),
8012 fold_convert_loc (loc, mult_type,
8013 TREE_OPERAND (op0, 1)));
8014 return fold_convert_loc (loc, type, tem);
8018 tem = fold_convert_const (code, type, op0);
8019 return tem ? tem : NULL_TREE;
8021 case ADDR_SPACE_CONVERT_EXPR:
8022 if (integer_zerop (arg0))
8023 return fold_convert_const (code, type, arg0);
8024 return NULL_TREE;
8026 case FIXED_CONVERT_EXPR:
8027 tem = fold_convert_const (code, type, arg0);
8028 return tem ? tem : NULL_TREE;
8030 case VIEW_CONVERT_EXPR:
8031 if (TREE_TYPE (op0) == type)
8032 return op0;
8033 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8034 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8035 type, TREE_OPERAND (op0, 0));
8036 if (TREE_CODE (op0) == MEM_REF)
8037 return fold_build2_loc (loc, MEM_REF, type,
8038 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8040 /* For integral conversions with the same precision or pointer
8041 conversions use a NOP_EXPR instead. */
8042 if ((INTEGRAL_TYPE_P (type)
8043 || POINTER_TYPE_P (type))
8044 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8045 || POINTER_TYPE_P (TREE_TYPE (op0)))
8046 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8047 return fold_convert_loc (loc, type, op0);
8049 /* Strip inner integral conversions that do not change the precision. */
8050 if (CONVERT_EXPR_P (op0)
8051 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8052 || POINTER_TYPE_P (TREE_TYPE (op0)))
8053 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8054 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8055 && (TYPE_PRECISION (TREE_TYPE (op0))
8056 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8057 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8058 type, TREE_OPERAND (op0, 0));
8060 return fold_view_convert_expr (type, op0);
8062 case NEGATE_EXPR:
8063 tem = fold_negate_expr (loc, arg0);
8064 if (tem)
8065 return fold_convert_loc (loc, type, tem);
8066 return NULL_TREE;
8068 case ABS_EXPR:
8069 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8070 return fold_abs_const (arg0, type);
8071 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8072 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8073 /* Convert fabs((double)float) into (double)fabsf(float). */
8074 else if (TREE_CODE (arg0) == NOP_EXPR
8075 && TREE_CODE (type) == REAL_TYPE)
8077 tree targ0 = strip_float_extensions (arg0);
8078 if (targ0 != arg0)
8079 return fold_convert_loc (loc, type,
8080 fold_build1_loc (loc, ABS_EXPR,
8081 TREE_TYPE (targ0),
8082 targ0));
8084 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8085 else if (TREE_CODE (arg0) == ABS_EXPR)
8086 return arg0;
8087 else if (tree_expr_nonnegative_p (arg0))
8088 return arg0;
8090 /* Strip sign ops from argument. */
8091 if (TREE_CODE (type) == REAL_TYPE)
8093 tem = fold_strip_sign_ops (arg0);
8094 if (tem)
8095 return fold_build1_loc (loc, ABS_EXPR, type,
8096 fold_convert_loc (loc, type, tem));
8098 return NULL_TREE;
8100 case CONJ_EXPR:
8101 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8102 return fold_convert_loc (loc, type, arg0);
8103 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8105 tree itype = TREE_TYPE (type);
8106 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8107 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8108 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8109 negate_expr (ipart));
8111 if (TREE_CODE (arg0) == COMPLEX_CST)
8113 tree itype = TREE_TYPE (type);
8114 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8115 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8116 return build_complex (type, rpart, negate_expr (ipart));
8118 if (TREE_CODE (arg0) == CONJ_EXPR)
8119 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8120 return NULL_TREE;
8122 case BIT_NOT_EXPR:
8123 if (TREE_CODE (arg0) == INTEGER_CST)
8124 return fold_not_const (arg0, type);
8125 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8126 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8127 /* Convert ~ (-A) to A - 1. */
8128 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8129 return fold_build2_loc (loc, MINUS_EXPR, type,
8130 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8131 build_int_cst (type, 1));
8132 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8133 else if (INTEGRAL_TYPE_P (type)
8134 && ((TREE_CODE (arg0) == MINUS_EXPR
8135 && integer_onep (TREE_OPERAND (arg0, 1)))
8136 || (TREE_CODE (arg0) == PLUS_EXPR
8137 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8138 return fold_build1_loc (loc, NEGATE_EXPR, type,
8139 fold_convert_loc (loc, type,
8140 TREE_OPERAND (arg0, 0)));
8141 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8142 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8143 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8144 fold_convert_loc (loc, type,
8145 TREE_OPERAND (arg0, 0)))))
8146 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 1)));
8149 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8150 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8151 fold_convert_loc (loc, type,
8152 TREE_OPERAND (arg0, 1)))))
8153 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8154 fold_convert_loc (loc, type,
8155 TREE_OPERAND (arg0, 0)), tem);
8156 /* Perform BIT_NOT_EXPR on each element individually. */
8157 else if (TREE_CODE (arg0) == VECTOR_CST)
8159 tree *elements;
8160 tree elem;
8161 unsigned count = VECTOR_CST_NELTS (arg0), i;
8163 elements = XALLOCAVEC (tree, count);
8164 for (i = 0; i < count; i++)
8166 elem = VECTOR_CST_ELT (arg0, i);
8167 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8168 if (elem == NULL_TREE)
8169 break;
8170 elements[i] = elem;
8172 if (i == count)
8173 return build_vector (type, elements);
8176 return NULL_TREE;
8178 case TRUTH_NOT_EXPR:
8179 /* The argument to invert_truthvalue must have Boolean type. */
8180 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8181 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8183 /* Note that the operand of this must be an int
8184 and its values must be 0 or 1.
8185 ("true" is a fixed value perhaps depending on the language,
8186 but we don't handle values other than 1 correctly yet.) */
8187 tem = fold_truth_not_expr (loc, arg0);
8188 if (!tem)
8189 return NULL_TREE;
8190 return fold_convert_loc (loc, type, tem);
8192 case REALPART_EXPR:
8193 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8194 return fold_convert_loc (loc, type, arg0);
8195 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8196 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8197 TREE_OPERAND (arg0, 1));
8198 if (TREE_CODE (arg0) == COMPLEX_CST)
8199 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8200 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8202 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8203 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8204 fold_build1_loc (loc, REALPART_EXPR, itype,
8205 TREE_OPERAND (arg0, 0)),
8206 fold_build1_loc (loc, REALPART_EXPR, itype,
8207 TREE_OPERAND (arg0, 1)));
8208 return fold_convert_loc (loc, type, tem);
8210 if (TREE_CODE (arg0) == CONJ_EXPR)
8212 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8213 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8214 TREE_OPERAND (arg0, 0));
8215 return fold_convert_loc (loc, type, tem);
8217 if (TREE_CODE (arg0) == CALL_EXPR)
8219 tree fn = get_callee_fndecl (arg0);
8220 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8221 switch (DECL_FUNCTION_CODE (fn))
8223 CASE_FLT_FN (BUILT_IN_CEXPI):
8224 fn = mathfn_built_in (type, BUILT_IN_COS);
8225 if (fn)
8226 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8227 break;
8229 default:
8230 break;
8233 return NULL_TREE;
8235 case IMAGPART_EXPR:
8236 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8237 return build_zero_cst (type);
8238 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8239 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8240 TREE_OPERAND (arg0, 0));
8241 if (TREE_CODE (arg0) == COMPLEX_CST)
8242 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8243 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8245 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8246 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8247 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8248 TREE_OPERAND (arg0, 0)),
8249 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8250 TREE_OPERAND (arg0, 1)));
8251 return fold_convert_loc (loc, type, tem);
8253 if (TREE_CODE (arg0) == CONJ_EXPR)
8255 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8256 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8257 return fold_convert_loc (loc, type, negate_expr (tem));
8259 if (TREE_CODE (arg0) == CALL_EXPR)
8261 tree fn = get_callee_fndecl (arg0);
8262 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8263 switch (DECL_FUNCTION_CODE (fn))
8265 CASE_FLT_FN (BUILT_IN_CEXPI):
8266 fn = mathfn_built_in (type, BUILT_IN_SIN);
8267 if (fn)
8268 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8269 break;
8271 default:
8272 break;
8275 return NULL_TREE;
8277 case INDIRECT_REF:
8278 /* Fold *&X to X if X is an lvalue. */
8279 if (TREE_CODE (op0) == ADDR_EXPR)
8281 tree op00 = TREE_OPERAND (op0, 0);
8282 if ((TREE_CODE (op00) == VAR_DECL
8283 || TREE_CODE (op00) == PARM_DECL
8284 || TREE_CODE (op00) == RESULT_DECL)
8285 && !TREE_READONLY (op00))
8286 return op00;
8288 return NULL_TREE;
8290 case VEC_UNPACK_LO_EXPR:
8291 case VEC_UNPACK_HI_EXPR:
8292 case VEC_UNPACK_FLOAT_LO_EXPR:
8293 case VEC_UNPACK_FLOAT_HI_EXPR:
8295 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8296 tree *elts;
8297 enum tree_code subcode;
8299 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8300 if (TREE_CODE (arg0) != VECTOR_CST)
8301 return NULL_TREE;
8303 elts = XALLOCAVEC (tree, nelts * 2);
8304 if (!vec_cst_ctor_to_array (arg0, elts))
8305 return NULL_TREE;
8307 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8308 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8309 elts += nelts;
8311 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8312 subcode = NOP_EXPR;
8313 else
8314 subcode = FLOAT_EXPR;
8316 for (i = 0; i < nelts; i++)
8318 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8319 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8320 return NULL_TREE;
8323 return build_vector (type, elts);
8326 default:
8327 return NULL_TREE;
8328 } /* switch (code) */
8332 /* If the operation was a conversion do _not_ mark a resulting constant
8333 with TREE_OVERFLOW if the original constant was not. These conversions
8334 have implementation defined behavior and retaining the TREE_OVERFLOW
8335 flag here would confuse later passes such as VRP. */
8336 tree
8337 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8338 tree type, tree op0)
8340 tree res = fold_unary_loc (loc, code, type, op0);
8341 if (res
8342 && TREE_CODE (res) == INTEGER_CST
8343 && TREE_CODE (op0) == INTEGER_CST
8344 && CONVERT_EXPR_CODE_P (code))
8345 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8347 return res;
8350 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8351 operands OP0 and OP1. LOC is the location of the resulting expression.
8352 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8353 Return the folded expression if folding is successful. Otherwise,
8354 return NULL_TREE. */
8355 static tree
8356 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8357 tree arg0, tree arg1, tree op0, tree op1)
8359 tree tem;
8361 /* We only do these simplifications if we are optimizing. */
8362 if (!optimize)
8363 return NULL_TREE;
8365 /* Check for things like (A || B) && (A || C). We can convert this
8366 to A || (B && C). Note that either operator can be any of the four
8367 truth and/or operations and the transformation will still be
8368 valid. Also note that we only care about order for the
8369 ANDIF and ORIF operators. If B contains side effects, this
8370 might change the truth-value of A. */
8371 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8372 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8373 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8374 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8375 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8376 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8378 tree a00 = TREE_OPERAND (arg0, 0);
8379 tree a01 = TREE_OPERAND (arg0, 1);
8380 tree a10 = TREE_OPERAND (arg1, 0);
8381 tree a11 = TREE_OPERAND (arg1, 1);
8382 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8383 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8384 && (code == TRUTH_AND_EXPR
8385 || code == TRUTH_OR_EXPR));
8387 if (operand_equal_p (a00, a10, 0))
8388 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8389 fold_build2_loc (loc, code, type, a01, a11));
8390 else if (commutative && operand_equal_p (a00, a11, 0))
8391 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8392 fold_build2_loc (loc, code, type, a01, a10));
8393 else if (commutative && operand_equal_p (a01, a10, 0))
8394 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8395 fold_build2_loc (loc, code, type, a00, a11));
8397 /* This case if tricky because we must either have commutative
8398 operators or else A10 must not have side-effects. */
8400 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8401 && operand_equal_p (a01, a11, 0))
8402 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8403 fold_build2_loc (loc, code, type, a00, a10),
8404 a01);
8407 /* See if we can build a range comparison. */
8408 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8409 return tem;
8411 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8412 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8414 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8415 if (tem)
8416 return fold_build2_loc (loc, code, type, tem, arg1);
8419 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8420 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8422 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8423 if (tem)
8424 return fold_build2_loc (loc, code, type, arg0, tem);
8427 /* Check for the possibility of merging component references. If our
8428 lhs is another similar operation, try to merge its rhs with our
8429 rhs. Then try to merge our lhs and rhs. */
8430 if (TREE_CODE (arg0) == code
8431 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8432 TREE_OPERAND (arg0, 1), arg1)))
8433 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8435 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8436 return tem;
8438 if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8439 false) >= 2)
8440 && LOGICAL_OP_NON_SHORT_CIRCUIT
8441 && (code == TRUTH_AND_EXPR
8442 || code == TRUTH_ANDIF_EXPR
8443 || code == TRUTH_OR_EXPR
8444 || code == TRUTH_ORIF_EXPR))
8446 enum tree_code ncode, icode;
8448 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8449 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8450 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8452 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8453 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8454 We don't want to pack more than two leafs to a non-IF AND/OR
8455 expression.
8456 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8457 equal to IF-CODE, then we don't want to add right-hand operand.
8458 If the inner right-hand side of left-hand operand has
8459 side-effects, or isn't simple, then we can't add to it,
8460 as otherwise we might destroy if-sequence. */
8461 if (TREE_CODE (arg0) == icode
8462 && simple_operand_p_2 (arg1)
8463 /* Needed for sequence points to handle trappings, and
8464 side-effects. */
8465 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8467 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8468 arg1);
8469 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8470 tem);
8472 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8473 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8474 else if (TREE_CODE (arg1) == icode
8475 && simple_operand_p_2 (arg0)
8476 /* Needed for sequence points to handle trappings, and
8477 side-effects. */
8478 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8480 tem = fold_build2_loc (loc, ncode, type,
8481 arg0, TREE_OPERAND (arg1, 0));
8482 return fold_build2_loc (loc, icode, type, tem,
8483 TREE_OPERAND (arg1, 1));
8485 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8486 into (A OR B).
8487 For sequence point consistancy, we need to check for trapping,
8488 and side-effects. */
8489 else if (code == icode && simple_operand_p_2 (arg0)
8490 && simple_operand_p_2 (arg1))
8491 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8494 return NULL_TREE;
8497 /* Fold a binary expression of code CODE and type TYPE with operands
8498 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8499 Return the folded expression if folding is successful. Otherwise,
8500 return NULL_TREE. */
8502 static tree
8503 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8505 enum tree_code compl_code;
8507 if (code == MIN_EXPR)
8508 compl_code = MAX_EXPR;
8509 else if (code == MAX_EXPR)
8510 compl_code = MIN_EXPR;
8511 else
8512 gcc_unreachable ();
8514 /* MIN (MAX (a, b), b) == b. */
8515 if (TREE_CODE (op0) == compl_code
8516 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8517 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8519 /* MIN (MAX (b, a), b) == b. */
8520 if (TREE_CODE (op0) == compl_code
8521 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8522 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8523 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8525 /* MIN (a, MAX (a, b)) == a. */
8526 if (TREE_CODE (op1) == compl_code
8527 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8528 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8529 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8531 /* MIN (a, MAX (b, a)) == a. */
8532 if (TREE_CODE (op1) == compl_code
8533 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8534 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8535 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8537 return NULL_TREE;
8540 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8541 by changing CODE to reduce the magnitude of constants involved in
8542 ARG0 of the comparison.
8543 Returns a canonicalized comparison tree if a simplification was
8544 possible, otherwise returns NULL_TREE.
8545 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8546 valid if signed overflow is undefined. */
8548 static tree
8549 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8550 tree arg0, tree arg1,
8551 bool *strict_overflow_p)
8553 enum tree_code code0 = TREE_CODE (arg0);
8554 tree t, cst0 = NULL_TREE;
8555 int sgn0;
8556 bool swap = false;
8558 /* Match A +- CST code arg1 and CST code arg1. We can change the
8559 first form only if overflow is undefined. */
8560 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8561 /* In principle pointers also have undefined overflow behavior,
8562 but that causes problems elsewhere. */
8563 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8564 && (code0 == MINUS_EXPR
8565 || code0 == PLUS_EXPR)
8566 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8567 || code0 == INTEGER_CST))
8568 return NULL_TREE;
8570 /* Identify the constant in arg0 and its sign. */
8571 if (code0 == INTEGER_CST)
8572 cst0 = arg0;
8573 else
8574 cst0 = TREE_OPERAND (arg0, 1);
8575 sgn0 = tree_int_cst_sgn (cst0);
8577 /* Overflowed constants and zero will cause problems. */
8578 if (integer_zerop (cst0)
8579 || TREE_OVERFLOW (cst0))
8580 return NULL_TREE;
8582 /* See if we can reduce the magnitude of the constant in
8583 arg0 by changing the comparison code. */
8584 if (code0 == INTEGER_CST)
8586 /* CST <= arg1 -> CST-1 < arg1. */
8587 if (code == LE_EXPR && sgn0 == 1)
8588 code = LT_EXPR;
8589 /* -CST < arg1 -> -CST-1 <= arg1. */
8590 else if (code == LT_EXPR && sgn0 == -1)
8591 code = LE_EXPR;
8592 /* CST > arg1 -> CST-1 >= arg1. */
8593 else if (code == GT_EXPR && sgn0 == 1)
8594 code = GE_EXPR;
8595 /* -CST >= arg1 -> -CST-1 > arg1. */
8596 else if (code == GE_EXPR && sgn0 == -1)
8597 code = GT_EXPR;
8598 else
8599 return NULL_TREE;
8600 /* arg1 code' CST' might be more canonical. */
8601 swap = true;
8603 else
8605 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8606 if (code == LT_EXPR
8607 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8608 code = LE_EXPR;
8609 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8610 else if (code == GT_EXPR
8611 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8612 code = GE_EXPR;
8613 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8614 else if (code == LE_EXPR
8615 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8616 code = LT_EXPR;
8617 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8618 else if (code == GE_EXPR
8619 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8620 code = GT_EXPR;
8621 else
8622 return NULL_TREE;
8623 *strict_overflow_p = true;
8626 /* Now build the constant reduced in magnitude. But not if that
8627 would produce one outside of its types range. */
8628 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8629 && ((sgn0 == 1
8630 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8631 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8632 || (sgn0 == -1
8633 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8634 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8635 /* We cannot swap the comparison here as that would cause us to
8636 endlessly recurse. */
8637 return NULL_TREE;
8639 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8640 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8641 if (code0 != INTEGER_CST)
8642 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8643 t = fold_convert (TREE_TYPE (arg1), t);
8645 /* If swapping might yield to a more canonical form, do so. */
8646 if (swap)
8647 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8648 else
8649 return fold_build2_loc (loc, code, type, t, arg1);
8652 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8653 overflow further. Try to decrease the magnitude of constants involved
8654 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8655 and put sole constants at the second argument position.
8656 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8658 static tree
8659 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8660 tree arg0, tree arg1)
8662 tree t;
8663 bool strict_overflow_p;
8664 const char * const warnmsg = G_("assuming signed overflow does not occur "
8665 "when reducing constant in comparison");
8667 /* Try canonicalization by simplifying arg0. */
8668 strict_overflow_p = false;
8669 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8670 &strict_overflow_p);
8671 if (t)
8673 if (strict_overflow_p)
8674 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8675 return t;
8678 /* Try canonicalization by simplifying arg1 using the swapped
8679 comparison. */
8680 code = swap_tree_comparison (code);
8681 strict_overflow_p = false;
8682 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8683 &strict_overflow_p);
8684 if (t && strict_overflow_p)
8685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8686 return t;
8689 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8690 space. This is used to avoid issuing overflow warnings for
8691 expressions like &p->x which can not wrap. */
8693 static bool
8694 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8696 unsigned HOST_WIDE_INT offset_low, total_low;
8697 HOST_WIDE_INT size, offset_high, total_high;
8699 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8700 return true;
8702 if (bitpos < 0)
8703 return true;
8705 if (offset == NULL_TREE)
8707 offset_low = 0;
8708 offset_high = 0;
8710 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8711 return true;
8712 else
8714 offset_low = TREE_INT_CST_LOW (offset);
8715 offset_high = TREE_INT_CST_HIGH (offset);
8718 if (add_double_with_sign (offset_low, offset_high,
8719 bitpos / BITS_PER_UNIT, 0,
8720 &total_low, &total_high,
8721 true))
8722 return true;
8724 if (total_high != 0)
8725 return true;
8727 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8728 if (size <= 0)
8729 return true;
8731 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8732 array. */
8733 if (TREE_CODE (base) == ADDR_EXPR)
8735 HOST_WIDE_INT base_size;
8737 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8738 if (base_size > 0 && size < base_size)
8739 size = base_size;
8742 return total_low > (unsigned HOST_WIDE_INT) size;
8745 /* Subroutine of fold_binary. This routine performs all of the
8746 transformations that are common to the equality/inequality
8747 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8748 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8749 fold_binary should call fold_binary. Fold a comparison with
8750 tree code CODE and type TYPE with operands OP0 and OP1. Return
8751 the folded comparison or NULL_TREE. */
8753 static tree
8754 fold_comparison (location_t loc, enum tree_code code, tree type,
8755 tree op0, tree op1)
8757 tree arg0, arg1, tem;
8759 arg0 = op0;
8760 arg1 = op1;
8762 STRIP_SIGN_NOPS (arg0);
8763 STRIP_SIGN_NOPS (arg1);
8765 tem = fold_relational_const (code, type, arg0, arg1);
8766 if (tem != NULL_TREE)
8767 return tem;
8769 /* If one arg is a real or integer constant, put it last. */
8770 if (tree_swap_operands_p (arg0, arg1, true))
8771 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8773 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8774 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8775 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8776 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8777 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8778 && (TREE_CODE (arg1) == INTEGER_CST
8779 && !TREE_OVERFLOW (arg1)))
8781 tree const1 = TREE_OPERAND (arg0, 1);
8782 tree const2 = arg1;
8783 tree variable = TREE_OPERAND (arg0, 0);
8784 tree lhs;
8785 int lhs_add;
8786 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8788 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8789 TREE_TYPE (arg1), const2, const1);
8791 /* If the constant operation overflowed this can be
8792 simplified as a comparison against INT_MAX/INT_MIN. */
8793 if (TREE_CODE (lhs) == INTEGER_CST
8794 && TREE_OVERFLOW (lhs))
8796 int const1_sgn = tree_int_cst_sgn (const1);
8797 enum tree_code code2 = code;
8799 /* Get the sign of the constant on the lhs if the
8800 operation were VARIABLE + CONST1. */
8801 if (TREE_CODE (arg0) == MINUS_EXPR)
8802 const1_sgn = -const1_sgn;
8804 /* The sign of the constant determines if we overflowed
8805 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8806 Canonicalize to the INT_MIN overflow by swapping the comparison
8807 if necessary. */
8808 if (const1_sgn == -1)
8809 code2 = swap_tree_comparison (code);
8811 /* We now can look at the canonicalized case
8812 VARIABLE + 1 CODE2 INT_MIN
8813 and decide on the result. */
8814 if (code2 == LT_EXPR
8815 || code2 == LE_EXPR
8816 || code2 == EQ_EXPR)
8817 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8818 else if (code2 == NE_EXPR
8819 || code2 == GE_EXPR
8820 || code2 == GT_EXPR)
8821 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8824 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8825 && (TREE_CODE (lhs) != INTEGER_CST
8826 || !TREE_OVERFLOW (lhs)))
8828 if (code != EQ_EXPR && code != NE_EXPR)
8829 fold_overflow_warning ("assuming signed overflow does not occur "
8830 "when changing X +- C1 cmp C2 to "
8831 "X cmp C1 +- C2",
8832 WARN_STRICT_OVERFLOW_COMPARISON);
8833 return fold_build2_loc (loc, code, type, variable, lhs);
8837 /* For comparisons of pointers we can decompose it to a compile time
8838 comparison of the base objects and the offsets into the object.
8839 This requires at least one operand being an ADDR_EXPR or a
8840 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8841 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8842 && (TREE_CODE (arg0) == ADDR_EXPR
8843 || TREE_CODE (arg1) == ADDR_EXPR
8844 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8845 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8847 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8848 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8849 enum machine_mode mode;
8850 int volatilep, unsignedp;
8851 bool indirect_base0 = false, indirect_base1 = false;
8853 /* Get base and offset for the access. Strip ADDR_EXPR for
8854 get_inner_reference, but put it back by stripping INDIRECT_REF
8855 off the base object if possible. indirect_baseN will be true
8856 if baseN is not an address but refers to the object itself. */
8857 base0 = arg0;
8858 if (TREE_CODE (arg0) == ADDR_EXPR)
8860 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8861 &bitsize, &bitpos0, &offset0, &mode,
8862 &unsignedp, &volatilep, false);
8863 if (TREE_CODE (base0) == INDIRECT_REF)
8864 base0 = TREE_OPERAND (base0, 0);
8865 else
8866 indirect_base0 = true;
8868 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8870 base0 = TREE_OPERAND (arg0, 0);
8871 STRIP_SIGN_NOPS (base0);
8872 if (TREE_CODE (base0) == ADDR_EXPR)
8874 base0 = TREE_OPERAND (base0, 0);
8875 indirect_base0 = true;
8877 offset0 = TREE_OPERAND (arg0, 1);
8878 if (host_integerp (offset0, 0))
8880 HOST_WIDE_INT off = size_low_cst (offset0);
8881 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8882 * BITS_PER_UNIT)
8883 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8885 bitpos0 = off * BITS_PER_UNIT;
8886 offset0 = NULL_TREE;
8891 base1 = arg1;
8892 if (TREE_CODE (arg1) == ADDR_EXPR)
8894 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8895 &bitsize, &bitpos1, &offset1, &mode,
8896 &unsignedp, &volatilep, false);
8897 if (TREE_CODE (base1) == INDIRECT_REF)
8898 base1 = TREE_OPERAND (base1, 0);
8899 else
8900 indirect_base1 = true;
8902 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8904 base1 = TREE_OPERAND (arg1, 0);
8905 STRIP_SIGN_NOPS (base1);
8906 if (TREE_CODE (base1) == ADDR_EXPR)
8908 base1 = TREE_OPERAND (base1, 0);
8909 indirect_base1 = true;
8911 offset1 = TREE_OPERAND (arg1, 1);
8912 if (host_integerp (offset1, 0))
8914 HOST_WIDE_INT off = size_low_cst (offset1);
8915 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8916 * BITS_PER_UNIT)
8917 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8919 bitpos1 = off * BITS_PER_UNIT;
8920 offset1 = NULL_TREE;
8925 /* A local variable can never be pointed to by
8926 the default SSA name of an incoming parameter. */
8927 if ((TREE_CODE (arg0) == ADDR_EXPR
8928 && indirect_base0
8929 && TREE_CODE (base0) == VAR_DECL
8930 && auto_var_in_fn_p (base0, current_function_decl)
8931 && !indirect_base1
8932 && TREE_CODE (base1) == SSA_NAME
8933 && SSA_NAME_IS_DEFAULT_DEF (base1)
8934 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8935 || (TREE_CODE (arg1) == ADDR_EXPR
8936 && indirect_base1
8937 && TREE_CODE (base1) == VAR_DECL
8938 && auto_var_in_fn_p (base1, current_function_decl)
8939 && !indirect_base0
8940 && TREE_CODE (base0) == SSA_NAME
8941 && SSA_NAME_IS_DEFAULT_DEF (base0)
8942 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8944 if (code == NE_EXPR)
8945 return constant_boolean_node (1, type);
8946 else if (code == EQ_EXPR)
8947 return constant_boolean_node (0, type);
8949 /* If we have equivalent bases we might be able to simplify. */
8950 else if (indirect_base0 == indirect_base1
8951 && operand_equal_p (base0, base1, 0))
8953 /* We can fold this expression to a constant if the non-constant
8954 offset parts are equal. */
8955 if ((offset0 == offset1
8956 || (offset0 && offset1
8957 && operand_equal_p (offset0, offset1, 0)))
8958 && (code == EQ_EXPR
8959 || code == NE_EXPR
8960 || (indirect_base0 && DECL_P (base0))
8961 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8964 if (code != EQ_EXPR
8965 && code != NE_EXPR
8966 && bitpos0 != bitpos1
8967 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8968 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8969 fold_overflow_warning (("assuming pointer wraparound does not "
8970 "occur when comparing P +- C1 with "
8971 "P +- C2"),
8972 WARN_STRICT_OVERFLOW_CONDITIONAL);
8974 switch (code)
8976 case EQ_EXPR:
8977 return constant_boolean_node (bitpos0 == bitpos1, type);
8978 case NE_EXPR:
8979 return constant_boolean_node (bitpos0 != bitpos1, type);
8980 case LT_EXPR:
8981 return constant_boolean_node (bitpos0 < bitpos1, type);
8982 case LE_EXPR:
8983 return constant_boolean_node (bitpos0 <= bitpos1, type);
8984 case GE_EXPR:
8985 return constant_boolean_node (bitpos0 >= bitpos1, type);
8986 case GT_EXPR:
8987 return constant_boolean_node (bitpos0 > bitpos1, type);
8988 default:;
8991 /* We can simplify the comparison to a comparison of the variable
8992 offset parts if the constant offset parts are equal.
8993 Be careful to use signed size type here because otherwise we
8994 mess with array offsets in the wrong way. This is possible
8995 because pointer arithmetic is restricted to retain within an
8996 object and overflow on pointer differences is undefined as of
8997 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8998 else if (bitpos0 == bitpos1
8999 && ((code == EQ_EXPR || code == NE_EXPR)
9000 || (indirect_base0 && DECL_P (base0))
9001 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9003 /* By converting to signed size type we cover middle-end pointer
9004 arithmetic which operates on unsigned pointer types of size
9005 type size and ARRAY_REF offsets which are properly sign or
9006 zero extended from their type in case it is narrower than
9007 size type. */
9008 if (offset0 == NULL_TREE)
9009 offset0 = build_int_cst (ssizetype, 0);
9010 else
9011 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9012 if (offset1 == NULL_TREE)
9013 offset1 = build_int_cst (ssizetype, 0);
9014 else
9015 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9017 if (code != EQ_EXPR
9018 && code != NE_EXPR
9019 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9020 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9021 fold_overflow_warning (("assuming pointer wraparound does not "
9022 "occur when comparing P +- C1 with "
9023 "P +- C2"),
9024 WARN_STRICT_OVERFLOW_COMPARISON);
9026 return fold_build2_loc (loc, code, type, offset0, offset1);
9029 /* For non-equal bases we can simplify if they are addresses
9030 of local binding decls or constants. */
9031 else if (indirect_base0 && indirect_base1
9032 /* We know that !operand_equal_p (base0, base1, 0)
9033 because the if condition was false. But make
9034 sure two decls are not the same. */
9035 && base0 != base1
9036 && TREE_CODE (arg0) == ADDR_EXPR
9037 && TREE_CODE (arg1) == ADDR_EXPR
9038 && (((TREE_CODE (base0) == VAR_DECL
9039 || TREE_CODE (base0) == PARM_DECL)
9040 && (targetm.binds_local_p (base0)
9041 || CONSTANT_CLASS_P (base1)))
9042 || CONSTANT_CLASS_P (base0))
9043 && (((TREE_CODE (base1) == VAR_DECL
9044 || TREE_CODE (base1) == PARM_DECL)
9045 && (targetm.binds_local_p (base1)
9046 || CONSTANT_CLASS_P (base0)))
9047 || CONSTANT_CLASS_P (base1)))
9049 if (code == EQ_EXPR)
9050 return omit_two_operands_loc (loc, type, boolean_false_node,
9051 arg0, arg1);
9052 else if (code == NE_EXPR)
9053 return omit_two_operands_loc (loc, type, boolean_true_node,
9054 arg0, arg1);
9056 /* For equal offsets we can simplify to a comparison of the
9057 base addresses. */
9058 else if (bitpos0 == bitpos1
9059 && (indirect_base0
9060 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9061 && (indirect_base1
9062 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9063 && ((offset0 == offset1)
9064 || (offset0 && offset1
9065 && operand_equal_p (offset0, offset1, 0))))
9067 if (indirect_base0)
9068 base0 = build_fold_addr_expr_loc (loc, base0);
9069 if (indirect_base1)
9070 base1 = build_fold_addr_expr_loc (loc, base1);
9071 return fold_build2_loc (loc, code, type, base0, base1);
9075 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9076 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9077 the resulting offset is smaller in absolute value than the
9078 original one. */
9079 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9080 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9081 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9082 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9083 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9084 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9085 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9087 tree const1 = TREE_OPERAND (arg0, 1);
9088 tree const2 = TREE_OPERAND (arg1, 1);
9089 tree variable1 = TREE_OPERAND (arg0, 0);
9090 tree variable2 = TREE_OPERAND (arg1, 0);
9091 tree cst;
9092 const char * const warnmsg = G_("assuming signed overflow does not "
9093 "occur when combining constants around "
9094 "a comparison");
9096 /* Put the constant on the side where it doesn't overflow and is
9097 of lower absolute value than before. */
9098 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9099 ? MINUS_EXPR : PLUS_EXPR,
9100 const2, const1);
9101 if (!TREE_OVERFLOW (cst)
9102 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9104 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9105 return fold_build2_loc (loc, code, type,
9106 variable1,
9107 fold_build2_loc (loc,
9108 TREE_CODE (arg1), TREE_TYPE (arg1),
9109 variable2, cst));
9112 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9113 ? MINUS_EXPR : PLUS_EXPR,
9114 const1, const2);
9115 if (!TREE_OVERFLOW (cst)
9116 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9118 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9119 return fold_build2_loc (loc, code, type,
9120 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9121 variable1, cst),
9122 variable2);
9126 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9127 signed arithmetic case. That form is created by the compiler
9128 often enough for folding it to be of value. One example is in
9129 computing loop trip counts after Operator Strength Reduction. */
9130 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9131 && TREE_CODE (arg0) == MULT_EXPR
9132 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9133 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9134 && integer_zerop (arg1))
9136 tree const1 = TREE_OPERAND (arg0, 1);
9137 tree const2 = arg1; /* zero */
9138 tree variable1 = TREE_OPERAND (arg0, 0);
9139 enum tree_code cmp_code = code;
9141 /* Handle unfolded multiplication by zero. */
9142 if (integer_zerop (const1))
9143 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9145 fold_overflow_warning (("assuming signed overflow does not occur when "
9146 "eliminating multiplication in comparison "
9147 "with zero"),
9148 WARN_STRICT_OVERFLOW_COMPARISON);
9150 /* If const1 is negative we swap the sense of the comparison. */
9151 if (tree_int_cst_sgn (const1) < 0)
9152 cmp_code = swap_tree_comparison (cmp_code);
9154 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9157 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9158 if (tem)
9159 return tem;
9161 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9163 tree targ0 = strip_float_extensions (arg0);
9164 tree targ1 = strip_float_extensions (arg1);
9165 tree newtype = TREE_TYPE (targ0);
9167 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9168 newtype = TREE_TYPE (targ1);
9170 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9171 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9172 return fold_build2_loc (loc, code, type,
9173 fold_convert_loc (loc, newtype, targ0),
9174 fold_convert_loc (loc, newtype, targ1));
9176 /* (-a) CMP (-b) -> b CMP a */
9177 if (TREE_CODE (arg0) == NEGATE_EXPR
9178 && TREE_CODE (arg1) == NEGATE_EXPR)
9179 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9180 TREE_OPERAND (arg0, 0));
9182 if (TREE_CODE (arg1) == REAL_CST)
9184 REAL_VALUE_TYPE cst;
9185 cst = TREE_REAL_CST (arg1);
9187 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9188 if (TREE_CODE (arg0) == NEGATE_EXPR)
9189 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9190 TREE_OPERAND (arg0, 0),
9191 build_real (TREE_TYPE (arg1),
9192 real_value_negate (&cst)));
9194 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9195 /* a CMP (-0) -> a CMP 0 */
9196 if (REAL_VALUE_MINUS_ZERO (cst))
9197 return fold_build2_loc (loc, code, type, arg0,
9198 build_real (TREE_TYPE (arg1), dconst0));
9200 /* x != NaN is always true, other ops are always false. */
9201 if (REAL_VALUE_ISNAN (cst)
9202 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9204 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9205 return omit_one_operand_loc (loc, type, tem, arg0);
9208 /* Fold comparisons against infinity. */
9209 if (REAL_VALUE_ISINF (cst)
9210 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9212 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9213 if (tem != NULL_TREE)
9214 return tem;
9218 /* If this is a comparison of a real constant with a PLUS_EXPR
9219 or a MINUS_EXPR of a real constant, we can convert it into a
9220 comparison with a revised real constant as long as no overflow
9221 occurs when unsafe_math_optimizations are enabled. */
9222 if (flag_unsafe_math_optimizations
9223 && TREE_CODE (arg1) == REAL_CST
9224 && (TREE_CODE (arg0) == PLUS_EXPR
9225 || TREE_CODE (arg0) == MINUS_EXPR)
9226 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9227 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9228 ? MINUS_EXPR : PLUS_EXPR,
9229 arg1, TREE_OPERAND (arg0, 1)))
9230 && !TREE_OVERFLOW (tem))
9231 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9233 /* Likewise, we can simplify a comparison of a real constant with
9234 a MINUS_EXPR whose first operand is also a real constant, i.e.
9235 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9236 floating-point types only if -fassociative-math is set. */
9237 if (flag_associative_math
9238 && TREE_CODE (arg1) == REAL_CST
9239 && TREE_CODE (arg0) == MINUS_EXPR
9240 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9241 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9242 arg1))
9243 && !TREE_OVERFLOW (tem))
9244 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9245 TREE_OPERAND (arg0, 1), tem);
9247 /* Fold comparisons against built-in math functions. */
9248 if (TREE_CODE (arg1) == REAL_CST
9249 && flag_unsafe_math_optimizations
9250 && ! flag_errno_math)
9252 enum built_in_function fcode = builtin_mathfn_code (arg0);
9254 if (fcode != END_BUILTINS)
9256 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9257 if (tem != NULL_TREE)
9258 return tem;
9263 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9264 && CONVERT_EXPR_P (arg0))
9266 /* If we are widening one operand of an integer comparison,
9267 see if the other operand is similarly being widened. Perhaps we
9268 can do the comparison in the narrower type. */
9269 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9270 if (tem)
9271 return tem;
9273 /* Or if we are changing signedness. */
9274 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9275 if (tem)
9276 return tem;
9279 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9280 constant, we can simplify it. */
9281 if (TREE_CODE (arg1) == INTEGER_CST
9282 && (TREE_CODE (arg0) == MIN_EXPR
9283 || TREE_CODE (arg0) == MAX_EXPR)
9284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9286 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9287 if (tem)
9288 return tem;
9291 /* Simplify comparison of something with itself. (For IEEE
9292 floating-point, we can only do some of these simplifications.) */
9293 if (operand_equal_p (arg0, arg1, 0))
9295 switch (code)
9297 case EQ_EXPR:
9298 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9299 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9300 return constant_boolean_node (1, type);
9301 break;
9303 case GE_EXPR:
9304 case LE_EXPR:
9305 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9306 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9307 return constant_boolean_node (1, type);
9308 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9310 case NE_EXPR:
9311 /* For NE, we can only do this simplification if integer
9312 or we don't honor IEEE floating point NaNs. */
9313 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9314 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9315 break;
9316 /* ... fall through ... */
9317 case GT_EXPR:
9318 case LT_EXPR:
9319 return constant_boolean_node (0, type);
9320 default:
9321 gcc_unreachable ();
9325 /* If we are comparing an expression that just has comparisons
9326 of two integer values, arithmetic expressions of those comparisons,
9327 and constants, we can simplify it. There are only three cases
9328 to check: the two values can either be equal, the first can be
9329 greater, or the second can be greater. Fold the expression for
9330 those three values. Since each value must be 0 or 1, we have
9331 eight possibilities, each of which corresponds to the constant 0
9332 or 1 or one of the six possible comparisons.
9334 This handles common cases like (a > b) == 0 but also handles
9335 expressions like ((x > y) - (y > x)) > 0, which supposedly
9336 occur in macroized code. */
9338 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9340 tree cval1 = 0, cval2 = 0;
9341 int save_p = 0;
9343 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9344 /* Don't handle degenerate cases here; they should already
9345 have been handled anyway. */
9346 && cval1 != 0 && cval2 != 0
9347 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9348 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9349 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9350 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9351 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9352 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9353 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9355 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9356 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9358 /* We can't just pass T to eval_subst in case cval1 or cval2
9359 was the same as ARG1. */
9361 tree high_result
9362 = fold_build2_loc (loc, code, type,
9363 eval_subst (loc, arg0, cval1, maxval,
9364 cval2, minval),
9365 arg1);
9366 tree equal_result
9367 = fold_build2_loc (loc, code, type,
9368 eval_subst (loc, arg0, cval1, maxval,
9369 cval2, maxval),
9370 arg1);
9371 tree low_result
9372 = fold_build2_loc (loc, code, type,
9373 eval_subst (loc, arg0, cval1, minval,
9374 cval2, maxval),
9375 arg1);
9377 /* All three of these results should be 0 or 1. Confirm they are.
9378 Then use those values to select the proper code to use. */
9380 if (TREE_CODE (high_result) == INTEGER_CST
9381 && TREE_CODE (equal_result) == INTEGER_CST
9382 && TREE_CODE (low_result) == INTEGER_CST)
9384 /* Make a 3-bit mask with the high-order bit being the
9385 value for `>', the next for '=', and the low for '<'. */
9386 switch ((integer_onep (high_result) * 4)
9387 + (integer_onep (equal_result) * 2)
9388 + integer_onep (low_result))
9390 case 0:
9391 /* Always false. */
9392 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9393 case 1:
9394 code = LT_EXPR;
9395 break;
9396 case 2:
9397 code = EQ_EXPR;
9398 break;
9399 case 3:
9400 code = LE_EXPR;
9401 break;
9402 case 4:
9403 code = GT_EXPR;
9404 break;
9405 case 5:
9406 code = NE_EXPR;
9407 break;
9408 case 6:
9409 code = GE_EXPR;
9410 break;
9411 case 7:
9412 /* Always true. */
9413 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9416 if (save_p)
9418 tem = save_expr (build2 (code, type, cval1, cval2));
9419 SET_EXPR_LOCATION (tem, loc);
9420 return tem;
9422 return fold_build2_loc (loc, code, type, cval1, cval2);
9427 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9428 into a single range test. */
9429 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9430 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9431 && TREE_CODE (arg1) == INTEGER_CST
9432 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9433 && !integer_zerop (TREE_OPERAND (arg0, 1))
9434 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9435 && !TREE_OVERFLOW (arg1))
9437 tem = fold_div_compare (loc, code, type, arg0, arg1);
9438 if (tem != NULL_TREE)
9439 return tem;
9442 /* Fold ~X op ~Y as Y op X. */
9443 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9444 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9446 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9447 return fold_build2_loc (loc, code, type,
9448 fold_convert_loc (loc, cmp_type,
9449 TREE_OPERAND (arg1, 0)),
9450 TREE_OPERAND (arg0, 0));
9453 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9454 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9455 && TREE_CODE (arg1) == INTEGER_CST)
9457 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9458 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9459 TREE_OPERAND (arg0, 0),
9460 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9461 fold_convert_loc (loc, cmp_type, arg1)));
9464 return NULL_TREE;
9468 /* Subroutine of fold_binary. Optimize complex multiplications of the
9469 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9470 argument EXPR represents the expression "z" of type TYPE. */
9472 static tree
9473 fold_mult_zconjz (location_t loc, tree type, tree expr)
9475 tree itype = TREE_TYPE (type);
9476 tree rpart, ipart, tem;
9478 if (TREE_CODE (expr) == COMPLEX_EXPR)
9480 rpart = TREE_OPERAND (expr, 0);
9481 ipart = TREE_OPERAND (expr, 1);
9483 else if (TREE_CODE (expr) == COMPLEX_CST)
9485 rpart = TREE_REALPART (expr);
9486 ipart = TREE_IMAGPART (expr);
9488 else
9490 expr = save_expr (expr);
9491 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9492 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9495 rpart = save_expr (rpart);
9496 ipart = save_expr (ipart);
9497 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9498 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9499 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9500 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9501 build_zero_cst (itype));
9505 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9506 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9507 guarantees that P and N have the same least significant log2(M) bits.
9508 N is not otherwise constrained. In particular, N is not normalized to
9509 0 <= N < M as is common. In general, the precise value of P is unknown.
9510 M is chosen as large as possible such that constant N can be determined.
9512 Returns M and sets *RESIDUE to N.
9514 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9515 account. This is not always possible due to PR 35705.
9518 static unsigned HOST_WIDE_INT
9519 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9520 bool allow_func_align)
9522 enum tree_code code;
9524 *residue = 0;
9526 code = TREE_CODE (expr);
9527 if (code == ADDR_EXPR)
9529 unsigned int bitalign;
9530 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9531 *residue /= BITS_PER_UNIT;
9532 return bitalign / BITS_PER_UNIT;
9534 else if (code == POINTER_PLUS_EXPR)
9536 tree op0, op1;
9537 unsigned HOST_WIDE_INT modulus;
9538 enum tree_code inner_code;
9540 op0 = TREE_OPERAND (expr, 0);
9541 STRIP_NOPS (op0);
9542 modulus = get_pointer_modulus_and_residue (op0, residue,
9543 allow_func_align);
9545 op1 = TREE_OPERAND (expr, 1);
9546 STRIP_NOPS (op1);
9547 inner_code = TREE_CODE (op1);
9548 if (inner_code == INTEGER_CST)
9550 *residue += TREE_INT_CST_LOW (op1);
9551 return modulus;
9553 else if (inner_code == MULT_EXPR)
9555 op1 = TREE_OPERAND (op1, 1);
9556 if (TREE_CODE (op1) == INTEGER_CST)
9558 unsigned HOST_WIDE_INT align;
9560 /* Compute the greatest power-of-2 divisor of op1. */
9561 align = TREE_INT_CST_LOW (op1);
9562 align &= -align;
9564 /* If align is non-zero and less than *modulus, replace
9565 *modulus with align., If align is 0, then either op1 is 0
9566 or the greatest power-of-2 divisor of op1 doesn't fit in an
9567 unsigned HOST_WIDE_INT. In either case, no additional
9568 constraint is imposed. */
9569 if (align)
9570 modulus = MIN (modulus, align);
9572 return modulus;
9577 /* If we get here, we were unable to determine anything useful about the
9578 expression. */
9579 return 1;
9582 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9583 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9585 static bool
9586 vec_cst_ctor_to_array (tree arg, tree *elts)
9588 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9590 if (TREE_CODE (arg) == VECTOR_CST)
9592 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9593 elts[i] = VECTOR_CST_ELT (arg, i);
9595 else if (TREE_CODE (arg) == CONSTRUCTOR)
9597 constructor_elt *elt;
9599 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9600 if (i >= nelts)
9601 return false;
9602 else
9603 elts[i] = elt->value;
9605 else
9606 return false;
9607 for (; i < nelts; i++)
9608 elts[i]
9609 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9610 return true;
9613 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9614 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9615 NULL_TREE otherwise. */
9617 static tree
9618 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9620 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9621 tree *elts;
9622 bool need_ctor = false;
9624 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9625 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9626 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9627 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9628 return NULL_TREE;
9630 elts = XALLOCAVEC (tree, nelts * 3);
9631 if (!vec_cst_ctor_to_array (arg0, elts)
9632 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9633 return NULL_TREE;
9635 for (i = 0; i < nelts; i++)
9637 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9638 need_ctor = true;
9639 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9642 if (need_ctor)
9644 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9645 for (i = 0; i < nelts; i++)
9646 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9647 return build_constructor (type, v);
9649 else
9650 return build_vector (type, &elts[2 * nelts]);
9653 /* Try to fold a pointer difference of type TYPE two address expressions of
9654 array references AREF0 and AREF1 using location LOC. Return a
9655 simplified expression for the difference or NULL_TREE. */
9657 static tree
9658 fold_addr_of_array_ref_difference (location_t loc, tree type,
9659 tree aref0, tree aref1)
9661 tree base0 = TREE_OPERAND (aref0, 0);
9662 tree base1 = TREE_OPERAND (aref1, 0);
9663 tree base_offset = build_int_cst (type, 0);
9665 /* If the bases are array references as well, recurse. If the bases
9666 are pointer indirections compute the difference of the pointers.
9667 If the bases are equal, we are set. */
9668 if ((TREE_CODE (base0) == ARRAY_REF
9669 && TREE_CODE (base1) == ARRAY_REF
9670 && (base_offset
9671 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9672 || (INDIRECT_REF_P (base0)
9673 && INDIRECT_REF_P (base1)
9674 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9675 TREE_OPERAND (base0, 0),
9676 TREE_OPERAND (base1, 0))))
9677 || operand_equal_p (base0, base1, 0))
9679 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9680 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9681 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9682 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9683 return fold_build2_loc (loc, PLUS_EXPR, type,
9684 base_offset,
9685 fold_build2_loc (loc, MULT_EXPR, type,
9686 diff, esz));
9688 return NULL_TREE;
9691 /* If the real or vector real constant CST of type TYPE has an exact
9692 inverse, return it, else return NULL. */
9694 static tree
9695 exact_inverse (tree type, tree cst)
9697 REAL_VALUE_TYPE r;
9698 tree unit_type, *elts;
9699 enum machine_mode mode;
9700 unsigned vec_nelts, i;
9702 switch (TREE_CODE (cst))
9704 case REAL_CST:
9705 r = TREE_REAL_CST (cst);
9707 if (exact_real_inverse (TYPE_MODE (type), &r))
9708 return build_real (type, r);
9710 return NULL_TREE;
9712 case VECTOR_CST:
9713 vec_nelts = VECTOR_CST_NELTS (cst);
9714 elts = XALLOCAVEC (tree, vec_nelts);
9715 unit_type = TREE_TYPE (type);
9716 mode = TYPE_MODE (unit_type);
9718 for (i = 0; i < vec_nelts; i++)
9720 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9721 if (!exact_real_inverse (mode, &r))
9722 return NULL_TREE;
9723 elts[i] = build_real (unit_type, r);
9726 return build_vector (type, elts);
9728 default:
9729 return NULL_TREE;
9733 /* Fold a binary expression of code CODE and type TYPE with operands
9734 OP0 and OP1. LOC is the location of the resulting expression.
9735 Return the folded expression if folding is successful. Otherwise,
9736 return NULL_TREE. */
9738 tree
9739 fold_binary_loc (location_t loc,
9740 enum tree_code code, tree type, tree op0, tree op1)
9742 enum tree_code_class kind = TREE_CODE_CLASS (code);
9743 tree arg0, arg1, tem;
9744 tree t1 = NULL_TREE;
9745 bool strict_overflow_p;
9747 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9748 && TREE_CODE_LENGTH (code) == 2
9749 && op0 != NULL_TREE
9750 && op1 != NULL_TREE);
9752 arg0 = op0;
9753 arg1 = op1;
9755 /* Strip any conversions that don't change the mode. This is
9756 safe for every expression, except for a comparison expression
9757 because its signedness is derived from its operands. So, in
9758 the latter case, only strip conversions that don't change the
9759 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9760 preserved.
9762 Note that this is done as an internal manipulation within the
9763 constant folder, in order to find the simplest representation
9764 of the arguments so that their form can be studied. In any
9765 cases, the appropriate type conversions should be put back in
9766 the tree that will get out of the constant folder. */
9768 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9770 STRIP_SIGN_NOPS (arg0);
9771 STRIP_SIGN_NOPS (arg1);
9773 else
9775 STRIP_NOPS (arg0);
9776 STRIP_NOPS (arg1);
9779 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9780 constant but we can't do arithmetic on them. */
9781 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9782 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9783 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9784 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9785 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9786 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9788 if (kind == tcc_binary)
9790 /* Make sure type and arg0 have the same saturating flag. */
9791 gcc_assert (TYPE_SATURATING (type)
9792 == TYPE_SATURATING (TREE_TYPE (arg0)));
9793 tem = const_binop (code, arg0, arg1);
9795 else if (kind == tcc_comparison)
9796 tem = fold_relational_const (code, type, arg0, arg1);
9797 else
9798 tem = NULL_TREE;
9800 if (tem != NULL_TREE)
9802 if (TREE_TYPE (tem) != type)
9803 tem = fold_convert_loc (loc, type, tem);
9804 return tem;
9808 /* If this is a commutative operation, and ARG0 is a constant, move it
9809 to ARG1 to reduce the number of tests below. */
9810 if (commutative_tree_code (code)
9811 && tree_swap_operands_p (arg0, arg1, true))
9812 return fold_build2_loc (loc, code, type, op1, op0);
9814 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9816 First check for cases where an arithmetic operation is applied to a
9817 compound, conditional, or comparison operation. Push the arithmetic
9818 operation inside the compound or conditional to see if any folding
9819 can then be done. Convert comparison to conditional for this purpose.
9820 The also optimizes non-constant cases that used to be done in
9821 expand_expr.
9823 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9824 one of the operands is a comparison and the other is a comparison, a
9825 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9826 code below would make the expression more complex. Change it to a
9827 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9828 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9830 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9831 || code == EQ_EXPR || code == NE_EXPR)
9832 && TREE_CODE (type) != VECTOR_TYPE
9833 && ((truth_value_p (TREE_CODE (arg0))
9834 && (truth_value_p (TREE_CODE (arg1))
9835 || (TREE_CODE (arg1) == BIT_AND_EXPR
9836 && integer_onep (TREE_OPERAND (arg1, 1)))))
9837 || (truth_value_p (TREE_CODE (arg1))
9838 && (truth_value_p (TREE_CODE (arg0))
9839 || (TREE_CODE (arg0) == BIT_AND_EXPR
9840 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9842 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9843 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9844 : TRUTH_XOR_EXPR,
9845 boolean_type_node,
9846 fold_convert_loc (loc, boolean_type_node, arg0),
9847 fold_convert_loc (loc, boolean_type_node, arg1));
9849 if (code == EQ_EXPR)
9850 tem = invert_truthvalue_loc (loc, tem);
9852 return fold_convert_loc (loc, type, tem);
9855 if (TREE_CODE_CLASS (code) == tcc_binary
9856 || TREE_CODE_CLASS (code) == tcc_comparison)
9858 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9860 tem = fold_build2_loc (loc, code, type,
9861 fold_convert_loc (loc, TREE_TYPE (op0),
9862 TREE_OPERAND (arg0, 1)), op1);
9863 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9864 tem);
9866 if (TREE_CODE (arg1) == COMPOUND_EXPR
9867 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9869 tem = fold_build2_loc (loc, code, type, op0,
9870 fold_convert_loc (loc, TREE_TYPE (op1),
9871 TREE_OPERAND (arg1, 1)));
9872 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9873 tem);
9876 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9878 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9879 arg0, arg1,
9880 /*cond_first_p=*/1);
9881 if (tem != NULL_TREE)
9882 return tem;
9885 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9887 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9888 arg1, arg0,
9889 /*cond_first_p=*/0);
9890 if (tem != NULL_TREE)
9891 return tem;
9895 switch (code)
9897 case MEM_REF:
9898 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9899 if (TREE_CODE (arg0) == ADDR_EXPR
9900 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9902 tree iref = TREE_OPERAND (arg0, 0);
9903 return fold_build2 (MEM_REF, type,
9904 TREE_OPERAND (iref, 0),
9905 int_const_binop (PLUS_EXPR, arg1,
9906 TREE_OPERAND (iref, 1)));
9909 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9910 if (TREE_CODE (arg0) == ADDR_EXPR
9911 && handled_component_p (TREE_OPERAND (arg0, 0)))
9913 tree base;
9914 HOST_WIDE_INT coffset;
9915 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9916 &coffset);
9917 if (!base)
9918 return NULL_TREE;
9919 return fold_build2 (MEM_REF, type,
9920 build_fold_addr_expr (base),
9921 int_const_binop (PLUS_EXPR, arg1,
9922 size_int (coffset)));
9925 return NULL_TREE;
9927 case POINTER_PLUS_EXPR:
9928 /* 0 +p index -> (type)index */
9929 if (integer_zerop (arg0))
9930 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9932 /* PTR +p 0 -> PTR */
9933 if (integer_zerop (arg1))
9934 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9936 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9937 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9938 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9939 return fold_convert_loc (loc, type,
9940 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9941 fold_convert_loc (loc, sizetype,
9942 arg1),
9943 fold_convert_loc (loc, sizetype,
9944 arg0)));
9946 /* (PTR +p B) +p A -> PTR +p (B + A) */
9947 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9949 tree inner;
9950 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9951 tree arg00 = TREE_OPERAND (arg0, 0);
9952 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9953 arg01, fold_convert_loc (loc, sizetype, arg1));
9954 return fold_convert_loc (loc, type,
9955 fold_build_pointer_plus_loc (loc,
9956 arg00, inner));
9959 /* PTR_CST +p CST -> CST1 */
9960 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9961 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9962 fold_convert_loc (loc, type, arg1));
9964 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9965 of the array. Loop optimizer sometimes produce this type of
9966 expressions. */
9967 if (TREE_CODE (arg0) == ADDR_EXPR)
9969 tem = try_move_mult_to_index (loc, arg0,
9970 fold_convert_loc (loc,
9971 ssizetype, arg1));
9972 if (tem)
9973 return fold_convert_loc (loc, type, tem);
9976 return NULL_TREE;
9978 case PLUS_EXPR:
9979 /* A + (-B) -> A - B */
9980 if (TREE_CODE (arg1) == NEGATE_EXPR)
9981 return fold_build2_loc (loc, MINUS_EXPR, type,
9982 fold_convert_loc (loc, type, arg0),
9983 fold_convert_loc (loc, type,
9984 TREE_OPERAND (arg1, 0)));
9985 /* (-A) + B -> B - A */
9986 if (TREE_CODE (arg0) == NEGATE_EXPR
9987 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9988 return fold_build2_loc (loc, MINUS_EXPR, type,
9989 fold_convert_loc (loc, type, arg1),
9990 fold_convert_loc (loc, type,
9991 TREE_OPERAND (arg0, 0)));
9993 if (INTEGRAL_TYPE_P (type))
9995 /* Convert ~A + 1 to -A. */
9996 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9997 && integer_onep (arg1))
9998 return fold_build1_loc (loc, NEGATE_EXPR, type,
9999 fold_convert_loc (loc, type,
10000 TREE_OPERAND (arg0, 0)));
10002 /* ~X + X is -1. */
10003 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10004 && !TYPE_OVERFLOW_TRAPS (type))
10006 tree tem = TREE_OPERAND (arg0, 0);
10008 STRIP_NOPS (tem);
10009 if (operand_equal_p (tem, arg1, 0))
10011 t1 = build_int_cst_type (type, -1);
10012 return omit_one_operand_loc (loc, type, t1, arg1);
10016 /* X + ~X is -1. */
10017 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10018 && !TYPE_OVERFLOW_TRAPS (type))
10020 tree tem = TREE_OPERAND (arg1, 0);
10022 STRIP_NOPS (tem);
10023 if (operand_equal_p (arg0, tem, 0))
10025 t1 = build_int_cst_type (type, -1);
10026 return omit_one_operand_loc (loc, type, t1, arg0);
10030 /* X + (X / CST) * -CST is X % CST. */
10031 if (TREE_CODE (arg1) == MULT_EXPR
10032 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10033 && operand_equal_p (arg0,
10034 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10036 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10037 tree cst1 = TREE_OPERAND (arg1, 1);
10038 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10039 cst1, cst0);
10040 if (sum && integer_zerop (sum))
10041 return fold_convert_loc (loc, type,
10042 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10043 TREE_TYPE (arg0), arg0,
10044 cst0));
10048 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10049 one. Make sure the type is not saturating and has the signedness of
10050 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10051 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10052 if ((TREE_CODE (arg0) == MULT_EXPR
10053 || TREE_CODE (arg1) == MULT_EXPR)
10054 && !TYPE_SATURATING (type)
10055 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10056 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10057 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10059 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10060 if (tem)
10061 return tem;
10064 if (! FLOAT_TYPE_P (type))
10066 if (integer_zerop (arg1))
10067 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10069 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10070 with a constant, and the two constants have no bits in common,
10071 we should treat this as a BIT_IOR_EXPR since this may produce more
10072 simplifications. */
10073 if (TREE_CODE (arg0) == BIT_AND_EXPR
10074 && TREE_CODE (arg1) == BIT_AND_EXPR
10075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10076 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10077 && integer_zerop (const_binop (BIT_AND_EXPR,
10078 TREE_OPERAND (arg0, 1),
10079 TREE_OPERAND (arg1, 1))))
10081 code = BIT_IOR_EXPR;
10082 goto bit_ior;
10085 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10086 (plus (plus (mult) (mult)) (foo)) so that we can
10087 take advantage of the factoring cases below. */
10088 if (TYPE_OVERFLOW_WRAPS (type)
10089 && (((TREE_CODE (arg0) == PLUS_EXPR
10090 || TREE_CODE (arg0) == MINUS_EXPR)
10091 && TREE_CODE (arg1) == MULT_EXPR)
10092 || ((TREE_CODE (arg1) == PLUS_EXPR
10093 || TREE_CODE (arg1) == MINUS_EXPR)
10094 && TREE_CODE (arg0) == MULT_EXPR)))
10096 tree parg0, parg1, parg, marg;
10097 enum tree_code pcode;
10099 if (TREE_CODE (arg1) == MULT_EXPR)
10100 parg = arg0, marg = arg1;
10101 else
10102 parg = arg1, marg = arg0;
10103 pcode = TREE_CODE (parg);
10104 parg0 = TREE_OPERAND (parg, 0);
10105 parg1 = TREE_OPERAND (parg, 1);
10106 STRIP_NOPS (parg0);
10107 STRIP_NOPS (parg1);
10109 if (TREE_CODE (parg0) == MULT_EXPR
10110 && TREE_CODE (parg1) != MULT_EXPR)
10111 return fold_build2_loc (loc, pcode, type,
10112 fold_build2_loc (loc, PLUS_EXPR, type,
10113 fold_convert_loc (loc, type,
10114 parg0),
10115 fold_convert_loc (loc, type,
10116 marg)),
10117 fold_convert_loc (loc, type, parg1));
10118 if (TREE_CODE (parg0) != MULT_EXPR
10119 && TREE_CODE (parg1) == MULT_EXPR)
10120 return
10121 fold_build2_loc (loc, PLUS_EXPR, type,
10122 fold_convert_loc (loc, type, parg0),
10123 fold_build2_loc (loc, pcode, type,
10124 fold_convert_loc (loc, type, marg),
10125 fold_convert_loc (loc, type,
10126 parg1)));
10129 else
10131 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10132 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10135 /* Likewise if the operands are reversed. */
10136 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10137 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10139 /* Convert X + -C into X - C. */
10140 if (TREE_CODE (arg1) == REAL_CST
10141 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10143 tem = fold_negate_const (arg1, type);
10144 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10145 return fold_build2_loc (loc, MINUS_EXPR, type,
10146 fold_convert_loc (loc, type, arg0),
10147 fold_convert_loc (loc, type, tem));
10150 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10151 to __complex__ ( x, y ). This is not the same for SNaNs or
10152 if signed zeros are involved. */
10153 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10154 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10155 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10157 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10158 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10159 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10160 bool arg0rz = false, arg0iz = false;
10161 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10162 || (arg0i && (arg0iz = real_zerop (arg0i))))
10164 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10165 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10166 if (arg0rz && arg1i && real_zerop (arg1i))
10168 tree rp = arg1r ? arg1r
10169 : build1 (REALPART_EXPR, rtype, arg1);
10170 tree ip = arg0i ? arg0i
10171 : build1 (IMAGPART_EXPR, rtype, arg0);
10172 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10174 else if (arg0iz && arg1r && real_zerop (arg1r))
10176 tree rp = arg0r ? arg0r
10177 : build1 (REALPART_EXPR, rtype, arg0);
10178 tree ip = arg1i ? arg1i
10179 : build1 (IMAGPART_EXPR, rtype, arg1);
10180 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10185 if (flag_unsafe_math_optimizations
10186 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10187 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10188 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10189 return tem;
10191 /* Convert x+x into x*2.0. */
10192 if (operand_equal_p (arg0, arg1, 0)
10193 && SCALAR_FLOAT_TYPE_P (type))
10194 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10195 build_real (type, dconst2));
10197 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10198 We associate floats only if the user has specified
10199 -fassociative-math. */
10200 if (flag_associative_math
10201 && TREE_CODE (arg1) == PLUS_EXPR
10202 && TREE_CODE (arg0) != MULT_EXPR)
10204 tree tree10 = TREE_OPERAND (arg1, 0);
10205 tree tree11 = TREE_OPERAND (arg1, 1);
10206 if (TREE_CODE (tree11) == MULT_EXPR
10207 && TREE_CODE (tree10) == MULT_EXPR)
10209 tree tree0;
10210 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10211 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10214 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10215 We associate floats only if the user has specified
10216 -fassociative-math. */
10217 if (flag_associative_math
10218 && TREE_CODE (arg0) == PLUS_EXPR
10219 && TREE_CODE (arg1) != MULT_EXPR)
10221 tree tree00 = TREE_OPERAND (arg0, 0);
10222 tree tree01 = TREE_OPERAND (arg0, 1);
10223 if (TREE_CODE (tree01) == MULT_EXPR
10224 && TREE_CODE (tree00) == MULT_EXPR)
10226 tree tree0;
10227 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10228 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10233 bit_rotate:
10234 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10235 is a rotate of A by C1 bits. */
10236 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10237 is a rotate of A by B bits. */
10239 enum tree_code code0, code1;
10240 tree rtype;
10241 code0 = TREE_CODE (arg0);
10242 code1 = TREE_CODE (arg1);
10243 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10244 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10245 && operand_equal_p (TREE_OPERAND (arg0, 0),
10246 TREE_OPERAND (arg1, 0), 0)
10247 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10248 TYPE_UNSIGNED (rtype))
10249 /* Only create rotates in complete modes. Other cases are not
10250 expanded properly. */
10251 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10253 tree tree01, tree11;
10254 enum tree_code code01, code11;
10256 tree01 = TREE_OPERAND (arg0, 1);
10257 tree11 = TREE_OPERAND (arg1, 1);
10258 STRIP_NOPS (tree01);
10259 STRIP_NOPS (tree11);
10260 code01 = TREE_CODE (tree01);
10261 code11 = TREE_CODE (tree11);
10262 if (code01 == INTEGER_CST
10263 && code11 == INTEGER_CST
10264 && TREE_INT_CST_HIGH (tree01) == 0
10265 && TREE_INT_CST_HIGH (tree11) == 0
10266 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10267 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10269 tem = build2_loc (loc, LROTATE_EXPR,
10270 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10271 TREE_OPERAND (arg0, 0),
10272 code0 == LSHIFT_EXPR ? tree01 : tree11);
10273 return fold_convert_loc (loc, type, tem);
10275 else if (code11 == MINUS_EXPR)
10277 tree tree110, tree111;
10278 tree110 = TREE_OPERAND (tree11, 0);
10279 tree111 = TREE_OPERAND (tree11, 1);
10280 STRIP_NOPS (tree110);
10281 STRIP_NOPS (tree111);
10282 if (TREE_CODE (tree110) == INTEGER_CST
10283 && 0 == compare_tree_int (tree110,
10284 TYPE_PRECISION
10285 (TREE_TYPE (TREE_OPERAND
10286 (arg0, 0))))
10287 && operand_equal_p (tree01, tree111, 0))
10288 return
10289 fold_convert_loc (loc, type,
10290 build2 ((code0 == LSHIFT_EXPR
10291 ? LROTATE_EXPR
10292 : RROTATE_EXPR),
10293 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10294 TREE_OPERAND (arg0, 0), tree01));
10296 else if (code01 == MINUS_EXPR)
10298 tree tree010, tree011;
10299 tree010 = TREE_OPERAND (tree01, 0);
10300 tree011 = TREE_OPERAND (tree01, 1);
10301 STRIP_NOPS (tree010);
10302 STRIP_NOPS (tree011);
10303 if (TREE_CODE (tree010) == INTEGER_CST
10304 && 0 == compare_tree_int (tree010,
10305 TYPE_PRECISION
10306 (TREE_TYPE (TREE_OPERAND
10307 (arg0, 0))))
10308 && operand_equal_p (tree11, tree011, 0))
10309 return fold_convert_loc
10310 (loc, type,
10311 build2 ((code0 != LSHIFT_EXPR
10312 ? LROTATE_EXPR
10313 : RROTATE_EXPR),
10314 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10315 TREE_OPERAND (arg0, 0), tree11));
10320 associate:
10321 /* In most languages, can't associate operations on floats through
10322 parentheses. Rather than remember where the parentheses were, we
10323 don't associate floats at all, unless the user has specified
10324 -fassociative-math.
10325 And, we need to make sure type is not saturating. */
10327 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10328 && !TYPE_SATURATING (type))
10330 tree var0, con0, lit0, minus_lit0;
10331 tree var1, con1, lit1, minus_lit1;
10332 bool ok = true;
10334 /* Split both trees into variables, constants, and literals. Then
10335 associate each group together, the constants with literals,
10336 then the result with variables. This increases the chances of
10337 literals being recombined later and of generating relocatable
10338 expressions for the sum of a constant and literal. */
10339 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10340 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10341 code == MINUS_EXPR);
10343 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10344 if (code == MINUS_EXPR)
10345 code = PLUS_EXPR;
10347 /* With undefined overflow we can only associate constants with one
10348 variable, and constants whose association doesn't overflow. */
10349 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10350 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10352 if (var0 && var1)
10354 tree tmp0 = var0;
10355 tree tmp1 = var1;
10357 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10358 tmp0 = TREE_OPERAND (tmp0, 0);
10359 if (CONVERT_EXPR_P (tmp0)
10360 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10362 <= TYPE_PRECISION (type)))
10363 tmp0 = TREE_OPERAND (tmp0, 0);
10364 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10365 tmp1 = TREE_OPERAND (tmp1, 0);
10366 if (CONVERT_EXPR_P (tmp1)
10367 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10369 <= TYPE_PRECISION (type)))
10370 tmp1 = TREE_OPERAND (tmp1, 0);
10371 /* The only case we can still associate with two variables
10372 is if they are the same, modulo negation and bit-pattern
10373 preserving conversions. */
10374 if (!operand_equal_p (tmp0, tmp1, 0))
10375 ok = false;
10378 if (ok && lit0 && lit1)
10380 tree tmp0 = fold_convert (type, lit0);
10381 tree tmp1 = fold_convert (type, lit1);
10383 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10384 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10385 ok = false;
10389 /* Only do something if we found more than two objects. Otherwise,
10390 nothing has changed and we risk infinite recursion. */
10391 if (ok
10392 && (2 < ((var0 != 0) + (var1 != 0)
10393 + (con0 != 0) + (con1 != 0)
10394 + (lit0 != 0) + (lit1 != 0)
10395 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10397 var0 = associate_trees (loc, var0, var1, code, type);
10398 con0 = associate_trees (loc, con0, con1, code, type);
10399 lit0 = associate_trees (loc, lit0, lit1, code, type);
10400 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10402 /* Preserve the MINUS_EXPR if the negative part of the literal is
10403 greater than the positive part. Otherwise, the multiplicative
10404 folding code (i.e extract_muldiv) may be fooled in case
10405 unsigned constants are subtracted, like in the following
10406 example: ((X*2 + 4) - 8U)/2. */
10407 if (minus_lit0 && lit0)
10409 if (TREE_CODE (lit0) == INTEGER_CST
10410 && TREE_CODE (minus_lit0) == INTEGER_CST
10411 && tree_int_cst_lt (lit0, minus_lit0))
10413 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10414 MINUS_EXPR, type);
10415 lit0 = 0;
10417 else
10419 lit0 = associate_trees (loc, lit0, minus_lit0,
10420 MINUS_EXPR, type);
10421 minus_lit0 = 0;
10424 if (minus_lit0)
10426 if (con0 == 0)
10427 return
10428 fold_convert_loc (loc, type,
10429 associate_trees (loc, var0, minus_lit0,
10430 MINUS_EXPR, type));
10431 else
10433 con0 = associate_trees (loc, con0, minus_lit0,
10434 MINUS_EXPR, type);
10435 return
10436 fold_convert_loc (loc, type,
10437 associate_trees (loc, var0, con0,
10438 PLUS_EXPR, type));
10442 con0 = associate_trees (loc, con0, lit0, code, type);
10443 return
10444 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10445 code, type));
10449 return NULL_TREE;
10451 case MINUS_EXPR:
10452 /* Pointer simplifications for subtraction, simple reassociations. */
10453 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10455 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10456 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10457 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10459 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10460 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10461 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10462 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10463 return fold_build2_loc (loc, PLUS_EXPR, type,
10464 fold_build2_loc (loc, MINUS_EXPR, type,
10465 arg00, arg10),
10466 fold_build2_loc (loc, MINUS_EXPR, type,
10467 arg01, arg11));
10469 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10470 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10472 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10473 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10474 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10475 fold_convert_loc (loc, type, arg1));
10476 if (tmp)
10477 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10480 /* A - (-B) -> A + B */
10481 if (TREE_CODE (arg1) == NEGATE_EXPR)
10482 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10483 fold_convert_loc (loc, type,
10484 TREE_OPERAND (arg1, 0)));
10485 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10486 if (TREE_CODE (arg0) == NEGATE_EXPR
10487 && (FLOAT_TYPE_P (type)
10488 || INTEGRAL_TYPE_P (type))
10489 && negate_expr_p (arg1)
10490 && reorder_operands_p (arg0, arg1))
10491 return fold_build2_loc (loc, MINUS_EXPR, type,
10492 fold_convert_loc (loc, type,
10493 negate_expr (arg1)),
10494 fold_convert_loc (loc, type,
10495 TREE_OPERAND (arg0, 0)));
10496 /* Convert -A - 1 to ~A. */
10497 if (INTEGRAL_TYPE_P (type)
10498 && TREE_CODE (arg0) == NEGATE_EXPR
10499 && integer_onep (arg1)
10500 && !TYPE_OVERFLOW_TRAPS (type))
10501 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10502 fold_convert_loc (loc, type,
10503 TREE_OPERAND (arg0, 0)));
10505 /* Convert -1 - A to ~A. */
10506 if (INTEGRAL_TYPE_P (type)
10507 && integer_all_onesp (arg0))
10508 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10511 /* X - (X / CST) * CST is X % CST. */
10512 if (INTEGRAL_TYPE_P (type)
10513 && TREE_CODE (arg1) == MULT_EXPR
10514 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10515 && operand_equal_p (arg0,
10516 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10517 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10518 TREE_OPERAND (arg1, 1), 0))
10519 return
10520 fold_convert_loc (loc, type,
10521 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10522 arg0, TREE_OPERAND (arg1, 1)));
10524 if (! FLOAT_TYPE_P (type))
10526 if (integer_zerop (arg0))
10527 return negate_expr (fold_convert_loc (loc, type, arg1));
10528 if (integer_zerop (arg1))
10529 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10531 /* Fold A - (A & B) into ~B & A. */
10532 if (!TREE_SIDE_EFFECTS (arg0)
10533 && TREE_CODE (arg1) == BIT_AND_EXPR)
10535 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10537 tree arg10 = fold_convert_loc (loc, type,
10538 TREE_OPERAND (arg1, 0));
10539 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10540 fold_build1_loc (loc, BIT_NOT_EXPR,
10541 type, arg10),
10542 fold_convert_loc (loc, type, arg0));
10544 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10546 tree arg11 = fold_convert_loc (loc,
10547 type, TREE_OPERAND (arg1, 1));
10548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10549 fold_build1_loc (loc, BIT_NOT_EXPR,
10550 type, arg11),
10551 fold_convert_loc (loc, type, arg0));
10555 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10556 any power of 2 minus 1. */
10557 if (TREE_CODE (arg0) == BIT_AND_EXPR
10558 && TREE_CODE (arg1) == BIT_AND_EXPR
10559 && operand_equal_p (TREE_OPERAND (arg0, 0),
10560 TREE_OPERAND (arg1, 0), 0))
10562 tree mask0 = TREE_OPERAND (arg0, 1);
10563 tree mask1 = TREE_OPERAND (arg1, 1);
10564 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10566 if (operand_equal_p (tem, mask1, 0))
10568 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10569 TREE_OPERAND (arg0, 0), mask1);
10570 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10575 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10576 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10579 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10580 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10581 (-ARG1 + ARG0) reduces to -ARG1. */
10582 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10583 return negate_expr (fold_convert_loc (loc, type, arg1));
10585 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10586 __complex__ ( x, -y ). This is not the same for SNaNs or if
10587 signed zeros are involved. */
10588 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10589 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10590 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10592 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10593 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10594 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10595 bool arg0rz = false, arg0iz = false;
10596 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10597 || (arg0i && (arg0iz = real_zerop (arg0i))))
10599 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10600 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10601 if (arg0rz && arg1i && real_zerop (arg1i))
10603 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10604 arg1r ? arg1r
10605 : build1 (REALPART_EXPR, rtype, arg1));
10606 tree ip = arg0i ? arg0i
10607 : build1 (IMAGPART_EXPR, rtype, arg0);
10608 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10610 else if (arg0iz && arg1r && real_zerop (arg1r))
10612 tree rp = arg0r ? arg0r
10613 : build1 (REALPART_EXPR, rtype, arg0);
10614 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10615 arg1i ? arg1i
10616 : build1 (IMAGPART_EXPR, rtype, arg1));
10617 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10622 /* Fold &x - &x. This can happen from &x.foo - &x.
10623 This is unsafe for certain floats even in non-IEEE formats.
10624 In IEEE, it is unsafe because it does wrong for NaNs.
10625 Also note that operand_equal_p is always false if an operand
10626 is volatile. */
10628 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10629 && operand_equal_p (arg0, arg1, 0))
10630 return build_zero_cst (type);
10632 /* A - B -> A + (-B) if B is easily negatable. */
10633 if (negate_expr_p (arg1)
10634 && ((FLOAT_TYPE_P (type)
10635 /* Avoid this transformation if B is a positive REAL_CST. */
10636 && (TREE_CODE (arg1) != REAL_CST
10637 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10638 || INTEGRAL_TYPE_P (type)))
10639 return fold_build2_loc (loc, PLUS_EXPR, type,
10640 fold_convert_loc (loc, type, arg0),
10641 fold_convert_loc (loc, type,
10642 negate_expr (arg1)));
10644 /* Try folding difference of addresses. */
10646 HOST_WIDE_INT diff;
10648 if ((TREE_CODE (arg0) == ADDR_EXPR
10649 || TREE_CODE (arg1) == ADDR_EXPR)
10650 && ptr_difference_const (arg0, arg1, &diff))
10651 return build_int_cst_type (type, diff);
10654 /* Fold &a[i] - &a[j] to i-j. */
10655 if (TREE_CODE (arg0) == ADDR_EXPR
10656 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10657 && TREE_CODE (arg1) == ADDR_EXPR
10658 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10660 tree tem = fold_addr_of_array_ref_difference (loc, type,
10661 TREE_OPERAND (arg0, 0),
10662 TREE_OPERAND (arg1, 0));
10663 if (tem)
10664 return tem;
10667 if (FLOAT_TYPE_P (type)
10668 && flag_unsafe_math_optimizations
10669 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10670 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10671 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10672 return tem;
10674 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10675 one. Make sure the type is not saturating and has the signedness of
10676 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10677 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10678 if ((TREE_CODE (arg0) == MULT_EXPR
10679 || TREE_CODE (arg1) == MULT_EXPR)
10680 && !TYPE_SATURATING (type)
10681 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10682 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10683 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10685 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10686 if (tem)
10687 return tem;
10690 goto associate;
10692 case MULT_EXPR:
10693 /* (-A) * (-B) -> A * B */
10694 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10695 return fold_build2_loc (loc, MULT_EXPR, type,
10696 fold_convert_loc (loc, type,
10697 TREE_OPERAND (arg0, 0)),
10698 fold_convert_loc (loc, type,
10699 negate_expr (arg1)));
10700 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10701 return fold_build2_loc (loc, MULT_EXPR, type,
10702 fold_convert_loc (loc, type,
10703 negate_expr (arg0)),
10704 fold_convert_loc (loc, type,
10705 TREE_OPERAND (arg1, 0)));
10707 if (! FLOAT_TYPE_P (type))
10709 if (integer_zerop (arg1))
10710 return omit_one_operand_loc (loc, type, arg1, arg0);
10711 if (integer_onep (arg1))
10712 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10713 /* Transform x * -1 into -x. Make sure to do the negation
10714 on the original operand with conversions not stripped
10715 because we can only strip non-sign-changing conversions. */
10716 if (integer_all_onesp (arg1))
10717 return fold_convert_loc (loc, type, negate_expr (op0));
10718 /* Transform x * -C into -x * C if x is easily negatable. */
10719 if (TREE_CODE (arg1) == INTEGER_CST
10720 && tree_int_cst_sgn (arg1) == -1
10721 && negate_expr_p (arg0)
10722 && (tem = negate_expr (arg1)) != arg1
10723 && !TREE_OVERFLOW (tem))
10724 return fold_build2_loc (loc, MULT_EXPR, type,
10725 fold_convert_loc (loc, type,
10726 negate_expr (arg0)),
10727 tem);
10729 /* (a * (1 << b)) is (a << b) */
10730 if (TREE_CODE (arg1) == LSHIFT_EXPR
10731 && integer_onep (TREE_OPERAND (arg1, 0)))
10732 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10733 TREE_OPERAND (arg1, 1));
10734 if (TREE_CODE (arg0) == LSHIFT_EXPR
10735 && integer_onep (TREE_OPERAND (arg0, 0)))
10736 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10737 TREE_OPERAND (arg0, 1));
10739 /* (A + A) * C -> A * 2 * C */
10740 if (TREE_CODE (arg0) == PLUS_EXPR
10741 && TREE_CODE (arg1) == INTEGER_CST
10742 && operand_equal_p (TREE_OPERAND (arg0, 0),
10743 TREE_OPERAND (arg0, 1), 0))
10744 return fold_build2_loc (loc, MULT_EXPR, type,
10745 omit_one_operand_loc (loc, type,
10746 TREE_OPERAND (arg0, 0),
10747 TREE_OPERAND (arg0, 1)),
10748 fold_build2_loc (loc, MULT_EXPR, type,
10749 build_int_cst (type, 2) , arg1));
10751 strict_overflow_p = false;
10752 if (TREE_CODE (arg1) == INTEGER_CST
10753 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10754 &strict_overflow_p)))
10756 if (strict_overflow_p)
10757 fold_overflow_warning (("assuming signed overflow does not "
10758 "occur when simplifying "
10759 "multiplication"),
10760 WARN_STRICT_OVERFLOW_MISC);
10761 return fold_convert_loc (loc, type, tem);
10764 /* Optimize z * conj(z) for integer complex numbers. */
10765 if (TREE_CODE (arg0) == CONJ_EXPR
10766 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10767 return fold_mult_zconjz (loc, type, arg1);
10768 if (TREE_CODE (arg1) == CONJ_EXPR
10769 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10770 return fold_mult_zconjz (loc, type, arg0);
10772 else
10774 /* Maybe fold x * 0 to 0. The expressions aren't the same
10775 when x is NaN, since x * 0 is also NaN. Nor are they the
10776 same in modes with signed zeros, since multiplying a
10777 negative value by 0 gives -0, not +0. */
10778 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10779 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10780 && real_zerop (arg1))
10781 return omit_one_operand_loc (loc, type, arg1, arg0);
10782 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10783 Likewise for complex arithmetic with signed zeros. */
10784 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10785 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10786 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10787 && real_onep (arg1))
10788 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10790 /* Transform x * -1.0 into -x. */
10791 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10792 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10793 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10794 && real_minus_onep (arg1))
10795 return fold_convert_loc (loc, type, negate_expr (arg0));
10797 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10798 the result for floating point types due to rounding so it is applied
10799 only if -fassociative-math was specify. */
10800 if (flag_associative_math
10801 && TREE_CODE (arg0) == RDIV_EXPR
10802 && TREE_CODE (arg1) == REAL_CST
10803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10805 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10806 arg1);
10807 if (tem)
10808 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10809 TREE_OPERAND (arg0, 1));
10812 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10813 if (operand_equal_p (arg0, arg1, 0))
10815 tree tem = fold_strip_sign_ops (arg0);
10816 if (tem != NULL_TREE)
10818 tem = fold_convert_loc (loc, type, tem);
10819 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10823 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10824 This is not the same for NaNs or if signed zeros are
10825 involved. */
10826 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10827 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10828 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10829 && TREE_CODE (arg1) == COMPLEX_CST
10830 && real_zerop (TREE_REALPART (arg1)))
10832 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10833 if (real_onep (TREE_IMAGPART (arg1)))
10834 return
10835 fold_build2_loc (loc, COMPLEX_EXPR, type,
10836 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10837 rtype, arg0)),
10838 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10839 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10840 return
10841 fold_build2_loc (loc, COMPLEX_EXPR, type,
10842 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10843 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10844 rtype, arg0)));
10847 /* Optimize z * conj(z) for floating point complex numbers.
10848 Guarded by flag_unsafe_math_optimizations as non-finite
10849 imaginary components don't produce scalar results. */
10850 if (flag_unsafe_math_optimizations
10851 && TREE_CODE (arg0) == CONJ_EXPR
10852 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10853 return fold_mult_zconjz (loc, type, arg1);
10854 if (flag_unsafe_math_optimizations
10855 && TREE_CODE (arg1) == CONJ_EXPR
10856 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10857 return fold_mult_zconjz (loc, type, arg0);
10859 if (flag_unsafe_math_optimizations)
10861 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10862 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10864 /* Optimizations of root(...)*root(...). */
10865 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10867 tree rootfn, arg;
10868 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10869 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10871 /* Optimize sqrt(x)*sqrt(x) as x. */
10872 if (BUILTIN_SQRT_P (fcode0)
10873 && operand_equal_p (arg00, arg10, 0)
10874 && ! HONOR_SNANS (TYPE_MODE (type)))
10875 return arg00;
10877 /* Optimize root(x)*root(y) as root(x*y). */
10878 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10879 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10880 return build_call_expr_loc (loc, rootfn, 1, arg);
10883 /* Optimize expN(x)*expN(y) as expN(x+y). */
10884 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10886 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10887 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10888 CALL_EXPR_ARG (arg0, 0),
10889 CALL_EXPR_ARG (arg1, 0));
10890 return build_call_expr_loc (loc, expfn, 1, arg);
10893 /* Optimizations of pow(...)*pow(...). */
10894 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10895 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10896 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10898 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10899 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10900 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10901 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10903 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10904 if (operand_equal_p (arg01, arg11, 0))
10906 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10907 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10908 arg00, arg10);
10909 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10912 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10913 if (operand_equal_p (arg00, arg10, 0))
10915 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10916 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10917 arg01, arg11);
10918 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10922 /* Optimize tan(x)*cos(x) as sin(x). */
10923 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10924 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10925 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10926 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10927 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10928 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10929 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10930 CALL_EXPR_ARG (arg1, 0), 0))
10932 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10934 if (sinfn != NULL_TREE)
10935 return build_call_expr_loc (loc, sinfn, 1,
10936 CALL_EXPR_ARG (arg0, 0));
10939 /* Optimize x*pow(x,c) as pow(x,c+1). */
10940 if (fcode1 == BUILT_IN_POW
10941 || fcode1 == BUILT_IN_POWF
10942 || fcode1 == BUILT_IN_POWL)
10944 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10945 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10946 if (TREE_CODE (arg11) == REAL_CST
10947 && !TREE_OVERFLOW (arg11)
10948 && operand_equal_p (arg0, arg10, 0))
10950 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10951 REAL_VALUE_TYPE c;
10952 tree arg;
10954 c = TREE_REAL_CST (arg11);
10955 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10956 arg = build_real (type, c);
10957 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10961 /* Optimize pow(x,c)*x as pow(x,c+1). */
10962 if (fcode0 == BUILT_IN_POW
10963 || fcode0 == BUILT_IN_POWF
10964 || fcode0 == BUILT_IN_POWL)
10966 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10967 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10968 if (TREE_CODE (arg01) == REAL_CST
10969 && !TREE_OVERFLOW (arg01)
10970 && operand_equal_p (arg1, arg00, 0))
10972 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10973 REAL_VALUE_TYPE c;
10974 tree arg;
10976 c = TREE_REAL_CST (arg01);
10977 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10978 arg = build_real (type, c);
10979 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10983 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10984 if (!in_gimple_form
10985 && optimize
10986 && operand_equal_p (arg0, arg1, 0))
10988 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10990 if (powfn)
10992 tree arg = build_real (type, dconst2);
10993 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10998 goto associate;
11000 case BIT_IOR_EXPR:
11001 bit_ior:
11002 if (integer_all_onesp (arg1))
11003 return omit_one_operand_loc (loc, type, arg1, arg0);
11004 if (integer_zerop (arg1))
11005 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11006 if (operand_equal_p (arg0, arg1, 0))
11007 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11009 /* ~X | X is -1. */
11010 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11011 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11013 t1 = build_zero_cst (type);
11014 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11015 return omit_one_operand_loc (loc, type, t1, arg1);
11018 /* X | ~X is -1. */
11019 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11020 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11022 t1 = build_zero_cst (type);
11023 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11024 return omit_one_operand_loc (loc, type, t1, arg0);
11027 /* Canonicalize (X & C1) | C2. */
11028 if (TREE_CODE (arg0) == BIT_AND_EXPR
11029 && TREE_CODE (arg1) == INTEGER_CST
11030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11032 double_int c1, c2, c3, msk;
11033 int width = TYPE_PRECISION (type), w;
11034 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11035 c2 = tree_to_double_int (arg1);
11037 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11038 if ((c1 & c2) == c1)
11039 return omit_one_operand_loc (loc, type, arg1,
11040 TREE_OPERAND (arg0, 0));
11042 msk = double_int::mask (width);
11044 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11045 if (msk.and_not (c1 | c2).is_zero ())
11046 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11047 TREE_OPERAND (arg0, 0), arg1);
11049 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11050 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11051 mode which allows further optimizations. */
11052 c1 &= msk;
11053 c2 &= msk;
11054 c3 = c1.and_not (c2);
11055 for (w = BITS_PER_UNIT;
11056 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11057 w <<= 1)
11059 unsigned HOST_WIDE_INT mask
11060 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11061 if (((c1.low | c2.low) & mask) == mask
11062 && (c1.low & ~mask) == 0 && c1.high == 0)
11064 c3 = double_int::from_uhwi (mask);
11065 break;
11068 if (c3 != c1)
11069 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11070 fold_build2_loc (loc, BIT_AND_EXPR, type,
11071 TREE_OPERAND (arg0, 0),
11072 double_int_to_tree (type,
11073 c3)),
11074 arg1);
11077 /* (X & Y) | Y is (X, Y). */
11078 if (TREE_CODE (arg0) == BIT_AND_EXPR
11079 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11080 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11081 /* (X & Y) | X is (Y, X). */
11082 if (TREE_CODE (arg0) == BIT_AND_EXPR
11083 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11084 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11085 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11086 /* X | (X & Y) is (Y, X). */
11087 if (TREE_CODE (arg1) == BIT_AND_EXPR
11088 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11089 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11090 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11091 /* X | (Y & X) is (Y, X). */
11092 if (TREE_CODE (arg1) == BIT_AND_EXPR
11093 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11094 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11095 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11097 /* (X & ~Y) | (~X & Y) is X ^ Y */
11098 if (TREE_CODE (arg0) == BIT_AND_EXPR
11099 && TREE_CODE (arg1) == BIT_AND_EXPR)
11101 tree a0, a1, l0, l1, n0, n1;
11103 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11104 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11106 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11107 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11109 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11110 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11112 if ((operand_equal_p (n0, a0, 0)
11113 && operand_equal_p (n1, a1, 0))
11114 || (operand_equal_p (n0, a1, 0)
11115 && operand_equal_p (n1, a0, 0)))
11116 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11119 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11120 if (t1 != NULL_TREE)
11121 return t1;
11123 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11125 This results in more efficient code for machines without a NAND
11126 instruction. Combine will canonicalize to the first form
11127 which will allow use of NAND instructions provided by the
11128 backend if they exist. */
11129 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11130 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11132 return
11133 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11134 build2 (BIT_AND_EXPR, type,
11135 fold_convert_loc (loc, type,
11136 TREE_OPERAND (arg0, 0)),
11137 fold_convert_loc (loc, type,
11138 TREE_OPERAND (arg1, 0))));
11141 /* See if this can be simplified into a rotate first. If that
11142 is unsuccessful continue in the association code. */
11143 goto bit_rotate;
11145 case BIT_XOR_EXPR:
11146 if (integer_zerop (arg1))
11147 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11148 if (integer_all_onesp (arg1))
11149 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11150 if (operand_equal_p (arg0, arg1, 0))
11151 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11153 /* ~X ^ X is -1. */
11154 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11157 t1 = build_zero_cst (type);
11158 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11159 return omit_one_operand_loc (loc, type, t1, arg1);
11162 /* X ^ ~X is -1. */
11163 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11166 t1 = build_zero_cst (type);
11167 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11168 return omit_one_operand_loc (loc, type, t1, arg0);
11171 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11172 with a constant, and the two constants have no bits in common,
11173 we should treat this as a BIT_IOR_EXPR since this may produce more
11174 simplifications. */
11175 if (TREE_CODE (arg0) == BIT_AND_EXPR
11176 && TREE_CODE (arg1) == BIT_AND_EXPR
11177 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11178 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11179 && integer_zerop (const_binop (BIT_AND_EXPR,
11180 TREE_OPERAND (arg0, 1),
11181 TREE_OPERAND (arg1, 1))))
11183 code = BIT_IOR_EXPR;
11184 goto bit_ior;
11187 /* (X | Y) ^ X -> Y & ~ X*/
11188 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11189 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11191 tree t2 = TREE_OPERAND (arg0, 1);
11192 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11193 arg1);
11194 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11195 fold_convert_loc (loc, type, t2),
11196 fold_convert_loc (loc, type, t1));
11197 return t1;
11200 /* (Y | X) ^ X -> Y & ~ X*/
11201 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11202 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11204 tree t2 = TREE_OPERAND (arg0, 0);
11205 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11206 arg1);
11207 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11208 fold_convert_loc (loc, type, t2),
11209 fold_convert_loc (loc, type, t1));
11210 return t1;
11213 /* X ^ (X | Y) -> Y & ~ X*/
11214 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11215 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11217 tree t2 = TREE_OPERAND (arg1, 1);
11218 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11219 arg0);
11220 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11221 fold_convert_loc (loc, type, t2),
11222 fold_convert_loc (loc, type, t1));
11223 return t1;
11226 /* X ^ (Y | X) -> Y & ~ X*/
11227 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11228 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11230 tree t2 = TREE_OPERAND (arg1, 0);
11231 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11232 arg0);
11233 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11234 fold_convert_loc (loc, type, t2),
11235 fold_convert_loc (loc, type, t1));
11236 return t1;
11239 /* Convert ~X ^ ~Y to X ^ Y. */
11240 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11241 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11242 return fold_build2_loc (loc, code, type,
11243 fold_convert_loc (loc, type,
11244 TREE_OPERAND (arg0, 0)),
11245 fold_convert_loc (loc, type,
11246 TREE_OPERAND (arg1, 0)));
11248 /* Convert ~X ^ C to X ^ ~C. */
11249 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11250 && TREE_CODE (arg1) == INTEGER_CST)
11251 return fold_build2_loc (loc, code, type,
11252 fold_convert_loc (loc, type,
11253 TREE_OPERAND (arg0, 0)),
11254 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11256 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11257 if (TREE_CODE (arg0) == BIT_AND_EXPR
11258 && integer_onep (TREE_OPERAND (arg0, 1))
11259 && integer_onep (arg1))
11260 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11261 build_zero_cst (TREE_TYPE (arg0)));
11263 /* Fold (X & Y) ^ Y as ~X & Y. */
11264 if (TREE_CODE (arg0) == BIT_AND_EXPR
11265 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11267 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11268 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11269 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11270 fold_convert_loc (loc, type, arg1));
11272 /* Fold (X & Y) ^ X as ~Y & X. */
11273 if (TREE_CODE (arg0) == BIT_AND_EXPR
11274 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11275 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11277 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11278 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11279 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11280 fold_convert_loc (loc, type, arg1));
11282 /* Fold X ^ (X & Y) as X & ~Y. */
11283 if (TREE_CODE (arg1) == BIT_AND_EXPR
11284 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11286 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11287 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11288 fold_convert_loc (loc, type, arg0),
11289 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11291 /* Fold X ^ (Y & X) as ~Y & X. */
11292 if (TREE_CODE (arg1) == BIT_AND_EXPR
11293 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11294 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11296 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11297 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11298 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11299 fold_convert_loc (loc, type, arg0));
11302 /* See if this can be simplified into a rotate first. If that
11303 is unsuccessful continue in the association code. */
11304 goto bit_rotate;
11306 case BIT_AND_EXPR:
11307 if (integer_all_onesp (arg1))
11308 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11309 if (integer_zerop (arg1))
11310 return omit_one_operand_loc (loc, type, arg1, arg0);
11311 if (operand_equal_p (arg0, arg1, 0))
11312 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11314 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11315 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11316 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11317 || (TREE_CODE (arg0) == EQ_EXPR
11318 && integer_zerop (TREE_OPERAND (arg0, 1))))
11319 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11320 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11322 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11323 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11324 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11325 || (TREE_CODE (arg1) == EQ_EXPR
11326 && integer_zerop (TREE_OPERAND (arg1, 1))))
11327 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11328 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11330 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11331 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11332 && TREE_CODE (arg1) == INTEGER_CST
11333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11335 tree tmp1 = fold_convert_loc (loc, type, arg1);
11336 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11337 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11338 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11339 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11340 return
11341 fold_convert_loc (loc, type,
11342 fold_build2_loc (loc, BIT_IOR_EXPR,
11343 type, tmp2, tmp3));
11346 /* (X | Y) & Y is (X, Y). */
11347 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11348 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11349 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11350 /* (X | Y) & X is (Y, X). */
11351 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11352 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11353 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11354 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11355 /* X & (X | Y) is (Y, X). */
11356 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11357 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11358 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11359 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11360 /* X & (Y | X) is (Y, X). */
11361 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11362 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11363 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11364 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11366 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11367 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11368 && integer_onep (TREE_OPERAND (arg0, 1))
11369 && integer_onep (arg1))
11371 tree tem2;
11372 tem = TREE_OPERAND (arg0, 0);
11373 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11374 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11375 tem, tem2);
11376 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11377 build_zero_cst (TREE_TYPE (tem)));
11379 /* Fold ~X & 1 as (X & 1) == 0. */
11380 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11381 && integer_onep (arg1))
11383 tree tem2;
11384 tem = TREE_OPERAND (arg0, 0);
11385 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11386 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11387 tem, tem2);
11388 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11389 build_zero_cst (TREE_TYPE (tem)));
11391 /* Fold !X & 1 as X == 0. */
11392 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11393 && integer_onep (arg1))
11395 tem = TREE_OPERAND (arg0, 0);
11396 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11397 build_zero_cst (TREE_TYPE (tem)));
11400 /* Fold (X ^ Y) & Y as ~X & Y. */
11401 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11402 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11404 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11405 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11406 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11407 fold_convert_loc (loc, type, arg1));
11409 /* Fold (X ^ Y) & X as ~Y & X. */
11410 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11411 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11412 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11414 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11415 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11416 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11417 fold_convert_loc (loc, type, arg1));
11419 /* Fold X & (X ^ Y) as X & ~Y. */
11420 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11421 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11423 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11424 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11425 fold_convert_loc (loc, type, arg0),
11426 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11428 /* Fold X & (Y ^ X) as ~Y & X. */
11429 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11430 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11431 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11433 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11434 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11435 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11436 fold_convert_loc (loc, type, arg0));
11439 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11440 multiple of 1 << CST. */
11441 if (TREE_CODE (arg1) == INTEGER_CST)
11443 double_int cst1 = tree_to_double_int (arg1);
11444 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11445 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11446 if ((cst1 & ncst1) == ncst1
11447 && multiple_of_p (type, arg0,
11448 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11449 return fold_convert_loc (loc, type, arg0);
11452 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11453 bits from CST2. */
11454 if (TREE_CODE (arg1) == INTEGER_CST
11455 && TREE_CODE (arg0) == MULT_EXPR
11456 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11458 int arg1tz
11459 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11460 if (arg1tz > 0)
11462 double_int arg1mask, masked;
11463 arg1mask = ~double_int::mask (arg1tz);
11464 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11465 TYPE_UNSIGNED (type));
11466 masked = arg1mask & tree_to_double_int (arg1);
11467 if (masked.is_zero ())
11468 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11469 arg0, arg1);
11470 else if (masked != tree_to_double_int (arg1))
11471 return fold_build2_loc (loc, code, type, op0,
11472 double_int_to_tree (type, masked));
11476 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11477 ((A & N) + B) & M -> (A + B) & M
11478 Similarly if (N & M) == 0,
11479 ((A | N) + B) & M -> (A + B) & M
11480 and for - instead of + (or unary - instead of +)
11481 and/or ^ instead of |.
11482 If B is constant and (B & M) == 0, fold into A & M. */
11483 if (host_integerp (arg1, 1))
11485 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11486 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11487 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11488 && (TREE_CODE (arg0) == PLUS_EXPR
11489 || TREE_CODE (arg0) == MINUS_EXPR
11490 || TREE_CODE (arg0) == NEGATE_EXPR)
11491 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11492 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11494 tree pmop[2];
11495 int which = 0;
11496 unsigned HOST_WIDE_INT cst0;
11498 /* Now we know that arg0 is (C + D) or (C - D) or
11499 -C and arg1 (M) is == (1LL << cst) - 1.
11500 Store C into PMOP[0] and D into PMOP[1]. */
11501 pmop[0] = TREE_OPERAND (arg0, 0);
11502 pmop[1] = NULL;
11503 if (TREE_CODE (arg0) != NEGATE_EXPR)
11505 pmop[1] = TREE_OPERAND (arg0, 1);
11506 which = 1;
11509 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11510 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11511 & cst1) != cst1)
11512 which = -1;
11514 for (; which >= 0; which--)
11515 switch (TREE_CODE (pmop[which]))
11517 case BIT_AND_EXPR:
11518 case BIT_IOR_EXPR:
11519 case BIT_XOR_EXPR:
11520 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11521 != INTEGER_CST)
11522 break;
11523 /* tree_low_cst not used, because we don't care about
11524 the upper bits. */
11525 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11526 cst0 &= cst1;
11527 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11529 if (cst0 != cst1)
11530 break;
11532 else if (cst0 != 0)
11533 break;
11534 /* If C or D is of the form (A & N) where
11535 (N & M) == M, or of the form (A | N) or
11536 (A ^ N) where (N & M) == 0, replace it with A. */
11537 pmop[which] = TREE_OPERAND (pmop[which], 0);
11538 break;
11539 case INTEGER_CST:
11540 /* If C or D is a N where (N & M) == 0, it can be
11541 omitted (assumed 0). */
11542 if ((TREE_CODE (arg0) == PLUS_EXPR
11543 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11544 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11545 pmop[which] = NULL;
11546 break;
11547 default:
11548 break;
11551 /* Only build anything new if we optimized one or both arguments
11552 above. */
11553 if (pmop[0] != TREE_OPERAND (arg0, 0)
11554 || (TREE_CODE (arg0) != NEGATE_EXPR
11555 && pmop[1] != TREE_OPERAND (arg0, 1)))
11557 tree utype = TREE_TYPE (arg0);
11558 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11560 /* Perform the operations in a type that has defined
11561 overflow behavior. */
11562 utype = unsigned_type_for (TREE_TYPE (arg0));
11563 if (pmop[0] != NULL)
11564 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11565 if (pmop[1] != NULL)
11566 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11569 if (TREE_CODE (arg0) == NEGATE_EXPR)
11570 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11571 else if (TREE_CODE (arg0) == PLUS_EXPR)
11573 if (pmop[0] != NULL && pmop[1] != NULL)
11574 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11575 pmop[0], pmop[1]);
11576 else if (pmop[0] != NULL)
11577 tem = pmop[0];
11578 else if (pmop[1] != NULL)
11579 tem = pmop[1];
11580 else
11581 return build_int_cst (type, 0);
11583 else if (pmop[0] == NULL)
11584 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11585 else
11586 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11587 pmop[0], pmop[1]);
11588 /* TEM is now the new binary +, - or unary - replacement. */
11589 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11590 fold_convert_loc (loc, utype, arg1));
11591 return fold_convert_loc (loc, type, tem);
11596 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11597 if (t1 != NULL_TREE)
11598 return t1;
11599 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11600 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11601 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11603 unsigned int prec
11604 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11606 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11607 && (~TREE_INT_CST_LOW (arg1)
11608 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11609 return
11610 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11613 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11615 This results in more efficient code for machines without a NOR
11616 instruction. Combine will canonicalize to the first form
11617 which will allow use of NOR instructions provided by the
11618 backend if they exist. */
11619 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11620 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11622 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11623 build2 (BIT_IOR_EXPR, type,
11624 fold_convert_loc (loc, type,
11625 TREE_OPERAND (arg0, 0)),
11626 fold_convert_loc (loc, type,
11627 TREE_OPERAND (arg1, 0))));
11630 /* If arg0 is derived from the address of an object or function, we may
11631 be able to fold this expression using the object or function's
11632 alignment. */
11633 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11635 unsigned HOST_WIDE_INT modulus, residue;
11636 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11638 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11639 integer_onep (arg1));
11641 /* This works because modulus is a power of 2. If this weren't the
11642 case, we'd have to replace it by its greatest power-of-2
11643 divisor: modulus & -modulus. */
11644 if (low < modulus)
11645 return build_int_cst (type, residue & low);
11648 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11649 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11650 if the new mask might be further optimized. */
11651 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11652 || TREE_CODE (arg0) == RSHIFT_EXPR)
11653 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11654 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11655 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11656 < TYPE_PRECISION (TREE_TYPE (arg0))
11657 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11658 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11660 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11661 unsigned HOST_WIDE_INT mask
11662 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11663 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11664 tree shift_type = TREE_TYPE (arg0);
11666 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11667 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11668 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11669 && TYPE_PRECISION (TREE_TYPE (arg0))
11670 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11672 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11673 tree arg00 = TREE_OPERAND (arg0, 0);
11674 /* See if more bits can be proven as zero because of
11675 zero extension. */
11676 if (TREE_CODE (arg00) == NOP_EXPR
11677 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11679 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11680 if (TYPE_PRECISION (inner_type)
11681 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11682 && TYPE_PRECISION (inner_type) < prec)
11684 prec = TYPE_PRECISION (inner_type);
11685 /* See if we can shorten the right shift. */
11686 if (shiftc < prec)
11687 shift_type = inner_type;
11690 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11691 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11692 zerobits <<= prec - shiftc;
11693 /* For arithmetic shift if sign bit could be set, zerobits
11694 can contain actually sign bits, so no transformation is
11695 possible, unless MASK masks them all away. In that
11696 case the shift needs to be converted into logical shift. */
11697 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11698 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11700 if ((mask & zerobits) == 0)
11701 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11702 else
11703 zerobits = 0;
11707 /* ((X << 16) & 0xff00) is (X, 0). */
11708 if ((mask & zerobits) == mask)
11709 return omit_one_operand_loc (loc, type,
11710 build_int_cst (type, 0), arg0);
11712 newmask = mask | zerobits;
11713 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11715 unsigned int prec;
11717 /* Only do the transformation if NEWMASK is some integer
11718 mode's mask. */
11719 for (prec = BITS_PER_UNIT;
11720 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11721 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11722 break;
11723 if (prec < HOST_BITS_PER_WIDE_INT
11724 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11726 tree newmaskt;
11728 if (shift_type != TREE_TYPE (arg0))
11730 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11731 fold_convert_loc (loc, shift_type,
11732 TREE_OPERAND (arg0, 0)),
11733 TREE_OPERAND (arg0, 1));
11734 tem = fold_convert_loc (loc, type, tem);
11736 else
11737 tem = op0;
11738 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11739 if (!tree_int_cst_equal (newmaskt, arg1))
11740 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11745 goto associate;
11747 case RDIV_EXPR:
11748 /* Don't touch a floating-point divide by zero unless the mode
11749 of the constant can represent infinity. */
11750 if (TREE_CODE (arg1) == REAL_CST
11751 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11752 && real_zerop (arg1))
11753 return NULL_TREE;
11755 /* Optimize A / A to 1.0 if we don't care about
11756 NaNs or Infinities. Skip the transformation
11757 for non-real operands. */
11758 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11759 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11760 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11761 && operand_equal_p (arg0, arg1, 0))
11763 tree r = build_real (TREE_TYPE (arg0), dconst1);
11765 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11768 /* The complex version of the above A / A optimization. */
11769 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11770 && operand_equal_p (arg0, arg1, 0))
11772 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11773 if (! HONOR_NANS (TYPE_MODE (elem_type))
11774 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11776 tree r = build_real (elem_type, dconst1);
11777 /* omit_two_operands will call fold_convert for us. */
11778 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11782 /* (-A) / (-B) -> A / B */
11783 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11784 return fold_build2_loc (loc, RDIV_EXPR, type,
11785 TREE_OPERAND (arg0, 0),
11786 negate_expr (arg1));
11787 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11788 return fold_build2_loc (loc, RDIV_EXPR, type,
11789 negate_expr (arg0),
11790 TREE_OPERAND (arg1, 0));
11792 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11793 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11794 && real_onep (arg1))
11795 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11797 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11798 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11799 && real_minus_onep (arg1))
11800 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11801 negate_expr (arg0)));
11803 /* If ARG1 is a constant, we can convert this to a multiply by the
11804 reciprocal. This does not have the same rounding properties,
11805 so only do this if -freciprocal-math. We can actually
11806 always safely do it if ARG1 is a power of two, but it's hard to
11807 tell if it is or not in a portable manner. */
11808 if (optimize
11809 && (TREE_CODE (arg1) == REAL_CST
11810 || (TREE_CODE (arg1) == COMPLEX_CST
11811 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11812 || (TREE_CODE (arg1) == VECTOR_CST
11813 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11815 if (flag_reciprocal_math
11816 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11817 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11818 /* Find the reciprocal if optimizing and the result is exact.
11819 TODO: Complex reciprocal not implemented. */
11820 if (TREE_CODE (arg1) != COMPLEX_CST)
11822 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11824 if (inverse)
11825 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11828 /* Convert A/B/C to A/(B*C). */
11829 if (flag_reciprocal_math
11830 && TREE_CODE (arg0) == RDIV_EXPR)
11831 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11832 fold_build2_loc (loc, MULT_EXPR, type,
11833 TREE_OPERAND (arg0, 1), arg1));
11835 /* Convert A/(B/C) to (A/B)*C. */
11836 if (flag_reciprocal_math
11837 && TREE_CODE (arg1) == RDIV_EXPR)
11838 return fold_build2_loc (loc, MULT_EXPR, type,
11839 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11840 TREE_OPERAND (arg1, 0)),
11841 TREE_OPERAND (arg1, 1));
11843 /* Convert C1/(X*C2) into (C1/C2)/X. */
11844 if (flag_reciprocal_math
11845 && TREE_CODE (arg1) == MULT_EXPR
11846 && TREE_CODE (arg0) == REAL_CST
11847 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11849 tree tem = const_binop (RDIV_EXPR, arg0,
11850 TREE_OPERAND (arg1, 1));
11851 if (tem)
11852 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11853 TREE_OPERAND (arg1, 0));
11856 if (flag_unsafe_math_optimizations)
11858 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11859 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11861 /* Optimize sin(x)/cos(x) as tan(x). */
11862 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11863 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11864 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11865 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11866 CALL_EXPR_ARG (arg1, 0), 0))
11868 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11870 if (tanfn != NULL_TREE)
11871 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11874 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11875 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11876 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11877 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11878 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11879 CALL_EXPR_ARG (arg1, 0), 0))
11881 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11883 if (tanfn != NULL_TREE)
11885 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11886 CALL_EXPR_ARG (arg0, 0));
11887 return fold_build2_loc (loc, RDIV_EXPR, type,
11888 build_real (type, dconst1), tmp);
11892 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11893 NaNs or Infinities. */
11894 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11895 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11896 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11898 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11899 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11901 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11902 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11903 && operand_equal_p (arg00, arg01, 0))
11905 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11907 if (cosfn != NULL_TREE)
11908 return build_call_expr_loc (loc, cosfn, 1, arg00);
11912 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11913 NaNs or Infinities. */
11914 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11915 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11916 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11918 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11919 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11921 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11922 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11923 && operand_equal_p (arg00, arg01, 0))
11925 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11927 if (cosfn != NULL_TREE)
11929 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11930 return fold_build2_loc (loc, RDIV_EXPR, type,
11931 build_real (type, dconst1),
11932 tmp);
11937 /* Optimize pow(x,c)/x as pow(x,c-1). */
11938 if (fcode0 == BUILT_IN_POW
11939 || fcode0 == BUILT_IN_POWF
11940 || fcode0 == BUILT_IN_POWL)
11942 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11943 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11944 if (TREE_CODE (arg01) == REAL_CST
11945 && !TREE_OVERFLOW (arg01)
11946 && operand_equal_p (arg1, arg00, 0))
11948 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11949 REAL_VALUE_TYPE c;
11950 tree arg;
11952 c = TREE_REAL_CST (arg01);
11953 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11954 arg = build_real (type, c);
11955 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11959 /* Optimize a/root(b/c) into a*root(c/b). */
11960 if (BUILTIN_ROOT_P (fcode1))
11962 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11964 if (TREE_CODE (rootarg) == RDIV_EXPR)
11966 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11967 tree b = TREE_OPERAND (rootarg, 0);
11968 tree c = TREE_OPERAND (rootarg, 1);
11970 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11972 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11973 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11977 /* Optimize x/expN(y) into x*expN(-y). */
11978 if (BUILTIN_EXPONENT_P (fcode1))
11980 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11981 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11982 arg1 = build_call_expr_loc (loc,
11983 expfn, 1,
11984 fold_convert_loc (loc, type, arg));
11985 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11988 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11989 if (fcode1 == BUILT_IN_POW
11990 || fcode1 == BUILT_IN_POWF
11991 || fcode1 == BUILT_IN_POWL)
11993 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11994 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11995 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11996 tree neg11 = fold_convert_loc (loc, type,
11997 negate_expr (arg11));
11998 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11999 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12002 return NULL_TREE;
12004 case TRUNC_DIV_EXPR:
12005 /* Optimize (X & (-A)) / A where A is a power of 2,
12006 to X >> log2(A) */
12007 if (TREE_CODE (arg0) == BIT_AND_EXPR
12008 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12009 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12011 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12012 arg1, TREE_OPERAND (arg0, 1));
12013 if (sum && integer_zerop (sum)) {
12014 unsigned long pow2;
12016 if (TREE_INT_CST_LOW (arg1))
12017 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12018 else
12019 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12020 + HOST_BITS_PER_WIDE_INT;
12022 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12023 TREE_OPERAND (arg0, 0),
12024 build_int_cst (integer_type_node, pow2));
12028 /* Fall through */
12030 case FLOOR_DIV_EXPR:
12031 /* Simplify A / (B << N) where A and B are positive and B is
12032 a power of 2, to A >> (N + log2(B)). */
12033 strict_overflow_p = false;
12034 if (TREE_CODE (arg1) == LSHIFT_EXPR
12035 && (TYPE_UNSIGNED (type)
12036 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12038 tree sval = TREE_OPERAND (arg1, 0);
12039 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12041 tree sh_cnt = TREE_OPERAND (arg1, 1);
12042 unsigned long pow2;
12044 if (TREE_INT_CST_LOW (sval))
12045 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12046 else
12047 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12048 + HOST_BITS_PER_WIDE_INT;
12050 if (strict_overflow_p)
12051 fold_overflow_warning (("assuming signed overflow does not "
12052 "occur when simplifying A / (B << N)"),
12053 WARN_STRICT_OVERFLOW_MISC);
12055 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12056 sh_cnt,
12057 build_int_cst (TREE_TYPE (sh_cnt),
12058 pow2));
12059 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12060 fold_convert_loc (loc, type, arg0), sh_cnt);
12064 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12065 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12066 if (INTEGRAL_TYPE_P (type)
12067 && TYPE_UNSIGNED (type)
12068 && code == FLOOR_DIV_EXPR)
12069 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12071 /* Fall through */
12073 case ROUND_DIV_EXPR:
12074 case CEIL_DIV_EXPR:
12075 case EXACT_DIV_EXPR:
12076 if (integer_onep (arg1))
12077 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12078 if (integer_zerop (arg1))
12079 return NULL_TREE;
12080 /* X / -1 is -X. */
12081 if (!TYPE_UNSIGNED (type)
12082 && TREE_CODE (arg1) == INTEGER_CST
12083 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12084 && TREE_INT_CST_HIGH (arg1) == -1)
12085 return fold_convert_loc (loc, type, negate_expr (arg0));
12087 /* Convert -A / -B to A / B when the type is signed and overflow is
12088 undefined. */
12089 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12090 && TREE_CODE (arg0) == NEGATE_EXPR
12091 && negate_expr_p (arg1))
12093 if (INTEGRAL_TYPE_P (type))
12094 fold_overflow_warning (("assuming signed overflow does not occur "
12095 "when distributing negation across "
12096 "division"),
12097 WARN_STRICT_OVERFLOW_MISC);
12098 return fold_build2_loc (loc, code, type,
12099 fold_convert_loc (loc, type,
12100 TREE_OPERAND (arg0, 0)),
12101 fold_convert_loc (loc, type,
12102 negate_expr (arg1)));
12104 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12105 && TREE_CODE (arg1) == NEGATE_EXPR
12106 && negate_expr_p (arg0))
12108 if (INTEGRAL_TYPE_P (type))
12109 fold_overflow_warning (("assuming signed overflow does not occur "
12110 "when distributing negation across "
12111 "division"),
12112 WARN_STRICT_OVERFLOW_MISC);
12113 return fold_build2_loc (loc, code, type,
12114 fold_convert_loc (loc, type,
12115 negate_expr (arg0)),
12116 fold_convert_loc (loc, type,
12117 TREE_OPERAND (arg1, 0)));
12120 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12121 operation, EXACT_DIV_EXPR.
12123 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12124 At one time others generated faster code, it's not clear if they do
12125 after the last round to changes to the DIV code in expmed.c. */
12126 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12127 && multiple_of_p (type, arg0, arg1))
12128 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12130 strict_overflow_p = false;
12131 if (TREE_CODE (arg1) == INTEGER_CST
12132 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12133 &strict_overflow_p)))
12135 if (strict_overflow_p)
12136 fold_overflow_warning (("assuming signed overflow does not occur "
12137 "when simplifying division"),
12138 WARN_STRICT_OVERFLOW_MISC);
12139 return fold_convert_loc (loc, type, tem);
12142 return NULL_TREE;
12144 case CEIL_MOD_EXPR:
12145 case FLOOR_MOD_EXPR:
12146 case ROUND_MOD_EXPR:
12147 case TRUNC_MOD_EXPR:
12148 /* X % 1 is always zero, but be sure to preserve any side
12149 effects in X. */
12150 if (integer_onep (arg1))
12151 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12153 /* X % 0, return X % 0 unchanged so that we can get the
12154 proper warnings and errors. */
12155 if (integer_zerop (arg1))
12156 return NULL_TREE;
12158 /* 0 % X is always zero, but be sure to preserve any side
12159 effects in X. Place this after checking for X == 0. */
12160 if (integer_zerop (arg0))
12161 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12163 /* X % -1 is zero. */
12164 if (!TYPE_UNSIGNED (type)
12165 && TREE_CODE (arg1) == INTEGER_CST
12166 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12167 && TREE_INT_CST_HIGH (arg1) == -1)
12168 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12170 /* X % -C is the same as X % C. */
12171 if (code == TRUNC_MOD_EXPR
12172 && !TYPE_UNSIGNED (type)
12173 && TREE_CODE (arg1) == INTEGER_CST
12174 && !TREE_OVERFLOW (arg1)
12175 && TREE_INT_CST_HIGH (arg1) < 0
12176 && !TYPE_OVERFLOW_TRAPS (type)
12177 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12178 && !sign_bit_p (arg1, arg1))
12179 return fold_build2_loc (loc, code, type,
12180 fold_convert_loc (loc, type, arg0),
12181 fold_convert_loc (loc, type,
12182 negate_expr (arg1)));
12184 /* X % -Y is the same as X % Y. */
12185 if (code == TRUNC_MOD_EXPR
12186 && !TYPE_UNSIGNED (type)
12187 && TREE_CODE (arg1) == NEGATE_EXPR
12188 && !TYPE_OVERFLOW_TRAPS (type))
12189 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12190 fold_convert_loc (loc, type,
12191 TREE_OPERAND (arg1, 0)));
12193 strict_overflow_p = false;
12194 if (TREE_CODE (arg1) == INTEGER_CST
12195 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12196 &strict_overflow_p)))
12198 if (strict_overflow_p)
12199 fold_overflow_warning (("assuming signed overflow does not occur "
12200 "when simplifying modulus"),
12201 WARN_STRICT_OVERFLOW_MISC);
12202 return fold_convert_loc (loc, type, tem);
12205 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12206 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12207 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12208 && (TYPE_UNSIGNED (type)
12209 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12211 tree c = arg1;
12212 /* Also optimize A % (C << N) where C is a power of 2,
12213 to A & ((C << N) - 1). */
12214 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12215 c = TREE_OPERAND (arg1, 0);
12217 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12219 tree mask
12220 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12221 build_int_cst (TREE_TYPE (arg1), 1));
12222 if (strict_overflow_p)
12223 fold_overflow_warning (("assuming signed overflow does not "
12224 "occur when simplifying "
12225 "X % (power of two)"),
12226 WARN_STRICT_OVERFLOW_MISC);
12227 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12228 fold_convert_loc (loc, type, arg0),
12229 fold_convert_loc (loc, type, mask));
12233 return NULL_TREE;
12235 case LROTATE_EXPR:
12236 case RROTATE_EXPR:
12237 if (integer_all_onesp (arg0))
12238 return omit_one_operand_loc (loc, type, arg0, arg1);
12239 goto shift;
12241 case RSHIFT_EXPR:
12242 /* Optimize -1 >> x for arithmetic right shifts. */
12243 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12244 && tree_expr_nonnegative_p (arg1))
12245 return omit_one_operand_loc (loc, type, arg0, arg1);
12246 /* ... fall through ... */
12248 case LSHIFT_EXPR:
12249 shift:
12250 if (integer_zerop (arg1))
12251 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12252 if (integer_zerop (arg0))
12253 return omit_one_operand_loc (loc, type, arg0, arg1);
12255 /* Since negative shift count is not well-defined,
12256 don't try to compute it in the compiler. */
12257 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12258 return NULL_TREE;
12260 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12261 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12262 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12263 && host_integerp (TREE_OPERAND (arg0, 1), false)
12264 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12266 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12267 + TREE_INT_CST_LOW (arg1));
12269 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12270 being well defined. */
12271 if (low >= TYPE_PRECISION (type))
12273 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12274 low = low % TYPE_PRECISION (type);
12275 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12276 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12277 TREE_OPERAND (arg0, 0));
12278 else
12279 low = TYPE_PRECISION (type) - 1;
12282 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12283 build_int_cst (type, low));
12286 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12287 into x & ((unsigned)-1 >> c) for unsigned types. */
12288 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12289 || (TYPE_UNSIGNED (type)
12290 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12291 && host_integerp (arg1, false)
12292 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12293 && host_integerp (TREE_OPERAND (arg0, 1), false)
12294 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12296 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12297 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12298 tree lshift;
12299 tree arg00;
12301 if (low0 == low1)
12303 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12305 lshift = build_int_cst (type, -1);
12306 lshift = int_const_binop (code, lshift, arg1);
12308 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12312 /* Rewrite an LROTATE_EXPR by a constant into an
12313 RROTATE_EXPR by a new constant. */
12314 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12316 tree tem = build_int_cst (TREE_TYPE (arg1),
12317 TYPE_PRECISION (type));
12318 tem = const_binop (MINUS_EXPR, tem, arg1);
12319 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12322 /* If we have a rotate of a bit operation with the rotate count and
12323 the second operand of the bit operation both constant,
12324 permute the two operations. */
12325 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12326 && (TREE_CODE (arg0) == BIT_AND_EXPR
12327 || TREE_CODE (arg0) == BIT_IOR_EXPR
12328 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12330 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12331 fold_build2_loc (loc, code, type,
12332 TREE_OPERAND (arg0, 0), arg1),
12333 fold_build2_loc (loc, code, type,
12334 TREE_OPERAND (arg0, 1), arg1));
12336 /* Two consecutive rotates adding up to the precision of the
12337 type can be ignored. */
12338 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12339 && TREE_CODE (arg0) == RROTATE_EXPR
12340 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12341 && TREE_INT_CST_HIGH (arg1) == 0
12342 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12343 && ((TREE_INT_CST_LOW (arg1)
12344 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12345 == (unsigned int) TYPE_PRECISION (type)))
12346 return TREE_OPERAND (arg0, 0);
12348 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12349 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12350 if the latter can be further optimized. */
12351 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12352 && TREE_CODE (arg0) == BIT_AND_EXPR
12353 && TREE_CODE (arg1) == INTEGER_CST
12354 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12356 tree mask = fold_build2_loc (loc, code, type,
12357 fold_convert_loc (loc, type,
12358 TREE_OPERAND (arg0, 1)),
12359 arg1);
12360 tree shift = fold_build2_loc (loc, code, type,
12361 fold_convert_loc (loc, type,
12362 TREE_OPERAND (arg0, 0)),
12363 arg1);
12364 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12365 if (tem)
12366 return tem;
12369 return NULL_TREE;
12371 case MIN_EXPR:
12372 if (operand_equal_p (arg0, arg1, 0))
12373 return omit_one_operand_loc (loc, type, arg0, arg1);
12374 if (INTEGRAL_TYPE_P (type)
12375 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12376 return omit_one_operand_loc (loc, type, arg1, arg0);
12377 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12378 if (tem)
12379 return tem;
12380 goto associate;
12382 case MAX_EXPR:
12383 if (operand_equal_p (arg0, arg1, 0))
12384 return omit_one_operand_loc (loc, type, arg0, arg1);
12385 if (INTEGRAL_TYPE_P (type)
12386 && TYPE_MAX_VALUE (type)
12387 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12388 return omit_one_operand_loc (loc, type, arg1, arg0);
12389 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12390 if (tem)
12391 return tem;
12392 goto associate;
12394 case TRUTH_ANDIF_EXPR:
12395 /* Note that the operands of this must be ints
12396 and their values must be 0 or 1.
12397 ("true" is a fixed value perhaps depending on the language.) */
12398 /* If first arg is constant zero, return it. */
12399 if (integer_zerop (arg0))
12400 return fold_convert_loc (loc, type, arg0);
12401 case TRUTH_AND_EXPR:
12402 /* If either arg is constant true, drop it. */
12403 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12404 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12405 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12406 /* Preserve sequence points. */
12407 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12409 /* If second arg is constant zero, result is zero, but first arg
12410 must be evaluated. */
12411 if (integer_zerop (arg1))
12412 return omit_one_operand_loc (loc, type, arg1, arg0);
12413 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12414 case will be handled here. */
12415 if (integer_zerop (arg0))
12416 return omit_one_operand_loc (loc, type, arg0, arg1);
12418 /* !X && X is always false. */
12419 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12420 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12421 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12422 /* X && !X is always false. */
12423 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12424 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12425 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12427 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12428 means A >= Y && A != MAX, but in this case we know that
12429 A < X <= MAX. */
12431 if (!TREE_SIDE_EFFECTS (arg0)
12432 && !TREE_SIDE_EFFECTS (arg1))
12434 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12435 if (tem && !operand_equal_p (tem, arg0, 0))
12436 return fold_build2_loc (loc, code, type, tem, arg1);
12438 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12439 if (tem && !operand_equal_p (tem, arg1, 0))
12440 return fold_build2_loc (loc, code, type, arg0, tem);
12443 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12444 != NULL_TREE)
12445 return tem;
12447 return NULL_TREE;
12449 case TRUTH_ORIF_EXPR:
12450 /* Note that the operands of this must be ints
12451 and their values must be 0 or true.
12452 ("true" is a fixed value perhaps depending on the language.) */
12453 /* If first arg is constant true, return it. */
12454 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12455 return fold_convert_loc (loc, type, arg0);
12456 case TRUTH_OR_EXPR:
12457 /* If either arg is constant zero, drop it. */
12458 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12459 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12460 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12461 /* Preserve sequence points. */
12462 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12463 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12464 /* If second arg is constant true, result is true, but we must
12465 evaluate first arg. */
12466 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12467 return omit_one_operand_loc (loc, type, arg1, arg0);
12468 /* Likewise for first arg, but note this only occurs here for
12469 TRUTH_OR_EXPR. */
12470 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12471 return omit_one_operand_loc (loc, type, arg0, arg1);
12473 /* !X || X is always true. */
12474 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12475 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12476 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12477 /* X || !X is always true. */
12478 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12479 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12480 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12482 /* (X && !Y) || (!X && Y) is X ^ Y */
12483 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12484 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12486 tree a0, a1, l0, l1, n0, n1;
12488 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12489 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12491 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12492 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12494 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12495 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12497 if ((operand_equal_p (n0, a0, 0)
12498 && operand_equal_p (n1, a1, 0))
12499 || (operand_equal_p (n0, a1, 0)
12500 && operand_equal_p (n1, a0, 0)))
12501 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12504 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12505 != NULL_TREE)
12506 return tem;
12508 return NULL_TREE;
12510 case TRUTH_XOR_EXPR:
12511 /* If the second arg is constant zero, drop it. */
12512 if (integer_zerop (arg1))
12513 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12514 /* If the second arg is constant true, this is a logical inversion. */
12515 if (integer_onep (arg1))
12517 /* Only call invert_truthvalue if operand is a truth value. */
12518 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12519 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12520 else
12521 tem = invert_truthvalue_loc (loc, arg0);
12522 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12524 /* Identical arguments cancel to zero. */
12525 if (operand_equal_p (arg0, arg1, 0))
12526 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12528 /* !X ^ X is always true. */
12529 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12530 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12531 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12533 /* X ^ !X is always true. */
12534 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12535 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12536 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12538 return NULL_TREE;
12540 case EQ_EXPR:
12541 case NE_EXPR:
12542 STRIP_NOPS (arg0);
12543 STRIP_NOPS (arg1);
12545 tem = fold_comparison (loc, code, type, op0, op1);
12546 if (tem != NULL_TREE)
12547 return tem;
12549 /* bool_var != 0 becomes bool_var. */
12550 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12551 && code == NE_EXPR)
12552 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12554 /* bool_var == 1 becomes bool_var. */
12555 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12556 && code == EQ_EXPR)
12557 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12559 /* bool_var != 1 becomes !bool_var. */
12560 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12561 && code == NE_EXPR)
12562 return fold_convert_loc (loc, type,
12563 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12564 TREE_TYPE (arg0), arg0));
12566 /* bool_var == 0 becomes !bool_var. */
12567 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12568 && code == EQ_EXPR)
12569 return fold_convert_loc (loc, type,
12570 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12571 TREE_TYPE (arg0), arg0));
12573 /* !exp != 0 becomes !exp */
12574 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12575 && code == NE_EXPR)
12576 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12578 /* If this is an equality comparison of the address of two non-weak,
12579 unaliased symbols neither of which are extern (since we do not
12580 have access to attributes for externs), then we know the result. */
12581 if (TREE_CODE (arg0) == ADDR_EXPR
12582 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12583 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12584 && ! lookup_attribute ("alias",
12585 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12586 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12587 && TREE_CODE (arg1) == ADDR_EXPR
12588 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12589 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12590 && ! lookup_attribute ("alias",
12591 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12592 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12594 /* We know that we're looking at the address of two
12595 non-weak, unaliased, static _DECL nodes.
12597 It is both wasteful and incorrect to call operand_equal_p
12598 to compare the two ADDR_EXPR nodes. It is wasteful in that
12599 all we need to do is test pointer equality for the arguments
12600 to the two ADDR_EXPR nodes. It is incorrect to use
12601 operand_equal_p as that function is NOT equivalent to a
12602 C equality test. It can in fact return false for two
12603 objects which would test as equal using the C equality
12604 operator. */
12605 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12606 return constant_boolean_node (equal
12607 ? code == EQ_EXPR : code != EQ_EXPR,
12608 type);
12611 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12612 a MINUS_EXPR of a constant, we can convert it into a comparison with
12613 a revised constant as long as no overflow occurs. */
12614 if (TREE_CODE (arg1) == INTEGER_CST
12615 && (TREE_CODE (arg0) == PLUS_EXPR
12616 || TREE_CODE (arg0) == MINUS_EXPR)
12617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12618 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12619 ? MINUS_EXPR : PLUS_EXPR,
12620 fold_convert_loc (loc, TREE_TYPE (arg0),
12621 arg1),
12622 TREE_OPERAND (arg0, 1)))
12623 && !TREE_OVERFLOW (tem))
12624 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12626 /* Similarly for a NEGATE_EXPR. */
12627 if (TREE_CODE (arg0) == NEGATE_EXPR
12628 && TREE_CODE (arg1) == INTEGER_CST
12629 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12630 arg1)))
12631 && TREE_CODE (tem) == INTEGER_CST
12632 && !TREE_OVERFLOW (tem))
12633 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12635 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12636 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12637 && TREE_CODE (arg1) == INTEGER_CST
12638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12639 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12640 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12641 fold_convert_loc (loc,
12642 TREE_TYPE (arg0),
12643 arg1),
12644 TREE_OPERAND (arg0, 1)));
12646 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12647 if ((TREE_CODE (arg0) == PLUS_EXPR
12648 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12649 || TREE_CODE (arg0) == MINUS_EXPR)
12650 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12651 0)),
12652 arg1, 0)
12653 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12654 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12656 tree val = TREE_OPERAND (arg0, 1);
12657 return omit_two_operands_loc (loc, type,
12658 fold_build2_loc (loc, code, type,
12659 val,
12660 build_int_cst (TREE_TYPE (val),
12661 0)),
12662 TREE_OPERAND (arg0, 0), arg1);
12665 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12666 if (TREE_CODE (arg0) == MINUS_EXPR
12667 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12668 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12669 1)),
12670 arg1, 0)
12671 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12673 return omit_two_operands_loc (loc, type,
12674 code == NE_EXPR
12675 ? boolean_true_node : boolean_false_node,
12676 TREE_OPERAND (arg0, 1), arg1);
12679 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12680 for !=. Don't do this for ordered comparisons due to overflow. */
12681 if (TREE_CODE (arg0) == MINUS_EXPR
12682 && integer_zerop (arg1))
12683 return fold_build2_loc (loc, code, type,
12684 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12686 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12687 if (TREE_CODE (arg0) == ABS_EXPR
12688 && (integer_zerop (arg1) || real_zerop (arg1)))
12689 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12691 /* If this is an EQ or NE comparison with zero and ARG0 is
12692 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12693 two operations, but the latter can be done in one less insn
12694 on machines that have only two-operand insns or on which a
12695 constant cannot be the first operand. */
12696 if (TREE_CODE (arg0) == BIT_AND_EXPR
12697 && integer_zerop (arg1))
12699 tree arg00 = TREE_OPERAND (arg0, 0);
12700 tree arg01 = TREE_OPERAND (arg0, 1);
12701 if (TREE_CODE (arg00) == LSHIFT_EXPR
12702 && integer_onep (TREE_OPERAND (arg00, 0)))
12704 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12705 arg01, TREE_OPERAND (arg00, 1));
12706 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12707 build_int_cst (TREE_TYPE (arg0), 1));
12708 return fold_build2_loc (loc, code, type,
12709 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12710 arg1);
12712 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12713 && integer_onep (TREE_OPERAND (arg01, 0)))
12715 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12716 arg00, TREE_OPERAND (arg01, 1));
12717 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12718 build_int_cst (TREE_TYPE (arg0), 1));
12719 return fold_build2_loc (loc, code, type,
12720 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12721 arg1);
12725 /* If this is an NE or EQ comparison of zero against the result of a
12726 signed MOD operation whose second operand is a power of 2, make
12727 the MOD operation unsigned since it is simpler and equivalent. */
12728 if (integer_zerop (arg1)
12729 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12730 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12731 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12732 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12733 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12734 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12736 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12737 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12738 fold_convert_loc (loc, newtype,
12739 TREE_OPERAND (arg0, 0)),
12740 fold_convert_loc (loc, newtype,
12741 TREE_OPERAND (arg0, 1)));
12743 return fold_build2_loc (loc, code, type, newmod,
12744 fold_convert_loc (loc, newtype, arg1));
12747 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12748 C1 is a valid shift constant, and C2 is a power of two, i.e.
12749 a single bit. */
12750 if (TREE_CODE (arg0) == BIT_AND_EXPR
12751 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12752 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12753 == INTEGER_CST
12754 && integer_pow2p (TREE_OPERAND (arg0, 1))
12755 && integer_zerop (arg1))
12757 tree itype = TREE_TYPE (arg0);
12758 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12759 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12761 /* Check for a valid shift count. */
12762 if (TREE_INT_CST_HIGH (arg001) == 0
12763 && TREE_INT_CST_LOW (arg001) < prec)
12765 tree arg01 = TREE_OPERAND (arg0, 1);
12766 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12767 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12768 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12769 can be rewritten as (X & (C2 << C1)) != 0. */
12770 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12772 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12773 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12774 return fold_build2_loc (loc, code, type, tem,
12775 fold_convert_loc (loc, itype, arg1));
12777 /* Otherwise, for signed (arithmetic) shifts,
12778 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12779 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12780 else if (!TYPE_UNSIGNED (itype))
12781 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12782 arg000, build_int_cst (itype, 0));
12783 /* Otherwise, of unsigned (logical) shifts,
12784 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12785 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12786 else
12787 return omit_one_operand_loc (loc, type,
12788 code == EQ_EXPR ? integer_one_node
12789 : integer_zero_node,
12790 arg000);
12794 /* If we have (A & C) == C where C is a power of 2, convert this into
12795 (A & C) != 0. Similarly for NE_EXPR. */
12796 if (TREE_CODE (arg0) == BIT_AND_EXPR
12797 && integer_pow2p (TREE_OPERAND (arg0, 1))
12798 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12799 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12800 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12801 integer_zero_node));
12803 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12804 bit, then fold the expression into A < 0 or A >= 0. */
12805 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12806 if (tem)
12807 return tem;
12809 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12810 Similarly for NE_EXPR. */
12811 if (TREE_CODE (arg0) == BIT_AND_EXPR
12812 && TREE_CODE (arg1) == INTEGER_CST
12813 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12815 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12816 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12817 TREE_OPERAND (arg0, 1));
12818 tree dandnotc
12819 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12820 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12821 notc);
12822 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12823 if (integer_nonzerop (dandnotc))
12824 return omit_one_operand_loc (loc, type, rslt, arg0);
12827 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12828 Similarly for NE_EXPR. */
12829 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12830 && TREE_CODE (arg1) == INTEGER_CST
12831 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12833 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12834 tree candnotd
12835 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12836 TREE_OPERAND (arg0, 1),
12837 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12838 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12839 if (integer_nonzerop (candnotd))
12840 return omit_one_operand_loc (loc, type, rslt, arg0);
12843 /* If this is a comparison of a field, we may be able to simplify it. */
12844 if ((TREE_CODE (arg0) == COMPONENT_REF
12845 || TREE_CODE (arg0) == BIT_FIELD_REF)
12846 /* Handle the constant case even without -O
12847 to make sure the warnings are given. */
12848 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12850 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12851 if (t1)
12852 return t1;
12855 /* Optimize comparisons of strlen vs zero to a compare of the
12856 first character of the string vs zero. To wit,
12857 strlen(ptr) == 0 => *ptr == 0
12858 strlen(ptr) != 0 => *ptr != 0
12859 Other cases should reduce to one of these two (or a constant)
12860 due to the return value of strlen being unsigned. */
12861 if (TREE_CODE (arg0) == CALL_EXPR
12862 && integer_zerop (arg1))
12864 tree fndecl = get_callee_fndecl (arg0);
12866 if (fndecl
12867 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12868 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12869 && call_expr_nargs (arg0) == 1
12870 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12872 tree iref = build_fold_indirect_ref_loc (loc,
12873 CALL_EXPR_ARG (arg0, 0));
12874 return fold_build2_loc (loc, code, type, iref,
12875 build_int_cst (TREE_TYPE (iref), 0));
12879 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12880 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12881 if (TREE_CODE (arg0) == RSHIFT_EXPR
12882 && integer_zerop (arg1)
12883 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12885 tree arg00 = TREE_OPERAND (arg0, 0);
12886 tree arg01 = TREE_OPERAND (arg0, 1);
12887 tree itype = TREE_TYPE (arg00);
12888 if (TREE_INT_CST_HIGH (arg01) == 0
12889 && TREE_INT_CST_LOW (arg01)
12890 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12892 if (TYPE_UNSIGNED (itype))
12894 itype = signed_type_for (itype);
12895 arg00 = fold_convert_loc (loc, itype, arg00);
12897 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12898 type, arg00, build_int_cst (itype, 0));
12902 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12903 if (integer_zerop (arg1)
12904 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12905 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12906 TREE_OPERAND (arg0, 1));
12908 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12909 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12910 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12911 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12912 build_zero_cst (TREE_TYPE (arg0)));
12913 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12914 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12915 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12916 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12917 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12918 build_zero_cst (TREE_TYPE (arg0)));
12920 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12921 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12922 && TREE_CODE (arg1) == INTEGER_CST
12923 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12924 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12925 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12926 TREE_OPERAND (arg0, 1), arg1));
12928 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12929 (X & C) == 0 when C is a single bit. */
12930 if (TREE_CODE (arg0) == BIT_AND_EXPR
12931 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12932 && integer_zerop (arg1)
12933 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12935 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12936 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12937 TREE_OPERAND (arg0, 1));
12938 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12939 type, tem,
12940 fold_convert_loc (loc, TREE_TYPE (arg0),
12941 arg1));
12944 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12945 constant C is a power of two, i.e. a single bit. */
12946 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12948 && integer_zerop (arg1)
12949 && integer_pow2p (TREE_OPERAND (arg0, 1))
12950 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12951 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12953 tree arg00 = TREE_OPERAND (arg0, 0);
12954 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12955 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12958 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12959 when is C is a power of two, i.e. a single bit. */
12960 if (TREE_CODE (arg0) == BIT_AND_EXPR
12961 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12962 && integer_zerop (arg1)
12963 && integer_pow2p (TREE_OPERAND (arg0, 1))
12964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12965 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12967 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12968 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12969 arg000, TREE_OPERAND (arg0, 1));
12970 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12971 tem, build_int_cst (TREE_TYPE (tem), 0));
12974 if (integer_zerop (arg1)
12975 && tree_expr_nonzero_p (arg0))
12977 tree res = constant_boolean_node (code==NE_EXPR, type);
12978 return omit_one_operand_loc (loc, type, res, arg0);
12981 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12982 if (TREE_CODE (arg0) == NEGATE_EXPR
12983 && TREE_CODE (arg1) == NEGATE_EXPR)
12984 return fold_build2_loc (loc, code, type,
12985 TREE_OPERAND (arg0, 0),
12986 fold_convert_loc (loc, TREE_TYPE (arg0),
12987 TREE_OPERAND (arg1, 0)));
12989 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12990 if (TREE_CODE (arg0) == BIT_AND_EXPR
12991 && TREE_CODE (arg1) == BIT_AND_EXPR)
12993 tree arg00 = TREE_OPERAND (arg0, 0);
12994 tree arg01 = TREE_OPERAND (arg0, 1);
12995 tree arg10 = TREE_OPERAND (arg1, 0);
12996 tree arg11 = TREE_OPERAND (arg1, 1);
12997 tree itype = TREE_TYPE (arg0);
12999 if (operand_equal_p (arg01, arg11, 0))
13000 return fold_build2_loc (loc, code, type,
13001 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13002 fold_build2_loc (loc,
13003 BIT_XOR_EXPR, itype,
13004 arg00, arg10),
13005 arg01),
13006 build_zero_cst (itype));
13008 if (operand_equal_p (arg01, arg10, 0))
13009 return fold_build2_loc (loc, code, type,
13010 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13011 fold_build2_loc (loc,
13012 BIT_XOR_EXPR, itype,
13013 arg00, arg11),
13014 arg01),
13015 build_zero_cst (itype));
13017 if (operand_equal_p (arg00, arg11, 0))
13018 return fold_build2_loc (loc, code, type,
13019 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13020 fold_build2_loc (loc,
13021 BIT_XOR_EXPR, itype,
13022 arg01, arg10),
13023 arg00),
13024 build_zero_cst (itype));
13026 if (operand_equal_p (arg00, arg10, 0))
13027 return fold_build2_loc (loc, code, type,
13028 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13029 fold_build2_loc (loc,
13030 BIT_XOR_EXPR, itype,
13031 arg01, arg11),
13032 arg00),
13033 build_zero_cst (itype));
13036 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13037 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13039 tree arg00 = TREE_OPERAND (arg0, 0);
13040 tree arg01 = TREE_OPERAND (arg0, 1);
13041 tree arg10 = TREE_OPERAND (arg1, 0);
13042 tree arg11 = TREE_OPERAND (arg1, 1);
13043 tree itype = TREE_TYPE (arg0);
13045 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13046 operand_equal_p guarantees no side-effects so we don't need
13047 to use omit_one_operand on Z. */
13048 if (operand_equal_p (arg01, arg11, 0))
13049 return fold_build2_loc (loc, code, type, arg00,
13050 fold_convert_loc (loc, TREE_TYPE (arg00),
13051 arg10));
13052 if (operand_equal_p (arg01, arg10, 0))
13053 return fold_build2_loc (loc, code, type, arg00,
13054 fold_convert_loc (loc, TREE_TYPE (arg00),
13055 arg11));
13056 if (operand_equal_p (arg00, arg11, 0))
13057 return fold_build2_loc (loc, code, type, arg01,
13058 fold_convert_loc (loc, TREE_TYPE (arg01),
13059 arg10));
13060 if (operand_equal_p (arg00, arg10, 0))
13061 return fold_build2_loc (loc, code, type, arg01,
13062 fold_convert_loc (loc, TREE_TYPE (arg01),
13063 arg11));
13065 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13066 if (TREE_CODE (arg01) == INTEGER_CST
13067 && TREE_CODE (arg11) == INTEGER_CST)
13069 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13070 fold_convert_loc (loc, itype, arg11));
13071 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13072 return fold_build2_loc (loc, code, type, tem,
13073 fold_convert_loc (loc, itype, arg10));
13077 /* Attempt to simplify equality/inequality comparisons of complex
13078 values. Only lower the comparison if the result is known or
13079 can be simplified to a single scalar comparison. */
13080 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13081 || TREE_CODE (arg0) == COMPLEX_CST)
13082 && (TREE_CODE (arg1) == COMPLEX_EXPR
13083 || TREE_CODE (arg1) == COMPLEX_CST))
13085 tree real0, imag0, real1, imag1;
13086 tree rcond, icond;
13088 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13090 real0 = TREE_OPERAND (arg0, 0);
13091 imag0 = TREE_OPERAND (arg0, 1);
13093 else
13095 real0 = TREE_REALPART (arg0);
13096 imag0 = TREE_IMAGPART (arg0);
13099 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13101 real1 = TREE_OPERAND (arg1, 0);
13102 imag1 = TREE_OPERAND (arg1, 1);
13104 else
13106 real1 = TREE_REALPART (arg1);
13107 imag1 = TREE_IMAGPART (arg1);
13110 rcond = fold_binary_loc (loc, code, type, real0, real1);
13111 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13113 if (integer_zerop (rcond))
13115 if (code == EQ_EXPR)
13116 return omit_two_operands_loc (loc, type, boolean_false_node,
13117 imag0, imag1);
13118 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13120 else
13122 if (code == NE_EXPR)
13123 return omit_two_operands_loc (loc, type, boolean_true_node,
13124 imag0, imag1);
13125 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13129 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13130 if (icond && TREE_CODE (icond) == INTEGER_CST)
13132 if (integer_zerop (icond))
13134 if (code == EQ_EXPR)
13135 return omit_two_operands_loc (loc, type, boolean_false_node,
13136 real0, real1);
13137 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13139 else
13141 if (code == NE_EXPR)
13142 return omit_two_operands_loc (loc, type, boolean_true_node,
13143 real0, real1);
13144 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13149 return NULL_TREE;
13151 case LT_EXPR:
13152 case GT_EXPR:
13153 case LE_EXPR:
13154 case GE_EXPR:
13155 tem = fold_comparison (loc, code, type, op0, op1);
13156 if (tem != NULL_TREE)
13157 return tem;
13159 /* Transform comparisons of the form X +- C CMP X. */
13160 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13161 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13162 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13163 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13164 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13165 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13167 tree arg01 = TREE_OPERAND (arg0, 1);
13168 enum tree_code code0 = TREE_CODE (arg0);
13169 int is_positive;
13171 if (TREE_CODE (arg01) == REAL_CST)
13172 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13173 else
13174 is_positive = tree_int_cst_sgn (arg01);
13176 /* (X - c) > X becomes false. */
13177 if (code == GT_EXPR
13178 && ((code0 == MINUS_EXPR && is_positive >= 0)
13179 || (code0 == PLUS_EXPR && is_positive <= 0)))
13181 if (TREE_CODE (arg01) == INTEGER_CST
13182 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13183 fold_overflow_warning (("assuming signed overflow does not "
13184 "occur when assuming that (X - c) > X "
13185 "is always false"),
13186 WARN_STRICT_OVERFLOW_ALL);
13187 return constant_boolean_node (0, type);
13190 /* Likewise (X + c) < X becomes false. */
13191 if (code == LT_EXPR
13192 && ((code0 == PLUS_EXPR && is_positive >= 0)
13193 || (code0 == MINUS_EXPR && is_positive <= 0)))
13195 if (TREE_CODE (arg01) == INTEGER_CST
13196 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13197 fold_overflow_warning (("assuming signed overflow does not "
13198 "occur when assuming that "
13199 "(X + c) < X is always false"),
13200 WARN_STRICT_OVERFLOW_ALL);
13201 return constant_boolean_node (0, type);
13204 /* Convert (X - c) <= X to true. */
13205 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13206 && code == LE_EXPR
13207 && ((code0 == MINUS_EXPR && is_positive >= 0)
13208 || (code0 == PLUS_EXPR && is_positive <= 0)))
13210 if (TREE_CODE (arg01) == INTEGER_CST
13211 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13212 fold_overflow_warning (("assuming signed overflow does not "
13213 "occur when assuming that "
13214 "(X - c) <= X is always true"),
13215 WARN_STRICT_OVERFLOW_ALL);
13216 return constant_boolean_node (1, type);
13219 /* Convert (X + c) >= X to true. */
13220 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13221 && code == GE_EXPR
13222 && ((code0 == PLUS_EXPR && is_positive >= 0)
13223 || (code0 == MINUS_EXPR && is_positive <= 0)))
13225 if (TREE_CODE (arg01) == INTEGER_CST
13226 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13227 fold_overflow_warning (("assuming signed overflow does not "
13228 "occur when assuming that "
13229 "(X + c) >= X is always true"),
13230 WARN_STRICT_OVERFLOW_ALL);
13231 return constant_boolean_node (1, type);
13234 if (TREE_CODE (arg01) == INTEGER_CST)
13236 /* Convert X + c > X and X - c < X to true for integers. */
13237 if (code == GT_EXPR
13238 && ((code0 == PLUS_EXPR && is_positive > 0)
13239 || (code0 == MINUS_EXPR && is_positive < 0)))
13241 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13242 fold_overflow_warning (("assuming signed overflow does "
13243 "not occur when assuming that "
13244 "(X + c) > X is always true"),
13245 WARN_STRICT_OVERFLOW_ALL);
13246 return constant_boolean_node (1, type);
13249 if (code == LT_EXPR
13250 && ((code0 == MINUS_EXPR && is_positive > 0)
13251 || (code0 == PLUS_EXPR && is_positive < 0)))
13253 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13254 fold_overflow_warning (("assuming signed overflow does "
13255 "not occur when assuming that "
13256 "(X - c) < X is always true"),
13257 WARN_STRICT_OVERFLOW_ALL);
13258 return constant_boolean_node (1, type);
13261 /* Convert X + c <= X and X - c >= X to false for integers. */
13262 if (code == LE_EXPR
13263 && ((code0 == PLUS_EXPR && is_positive > 0)
13264 || (code0 == MINUS_EXPR && is_positive < 0)))
13266 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13267 fold_overflow_warning (("assuming signed overflow does "
13268 "not occur when assuming that "
13269 "(X + c) <= X is always false"),
13270 WARN_STRICT_OVERFLOW_ALL);
13271 return constant_boolean_node (0, type);
13274 if (code == GE_EXPR
13275 && ((code0 == MINUS_EXPR && is_positive > 0)
13276 || (code0 == PLUS_EXPR && is_positive < 0)))
13278 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13279 fold_overflow_warning (("assuming signed overflow does "
13280 "not occur when assuming that "
13281 "(X - c) >= X is always false"),
13282 WARN_STRICT_OVERFLOW_ALL);
13283 return constant_boolean_node (0, type);
13288 /* Comparisons with the highest or lowest possible integer of
13289 the specified precision will have known values. */
13291 tree arg1_type = TREE_TYPE (arg1);
13292 unsigned int width = TYPE_PRECISION (arg1_type);
13294 if (TREE_CODE (arg1) == INTEGER_CST
13295 && width <= HOST_BITS_PER_DOUBLE_INT
13296 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13298 HOST_WIDE_INT signed_max_hi;
13299 unsigned HOST_WIDE_INT signed_max_lo;
13300 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13302 if (width <= HOST_BITS_PER_WIDE_INT)
13304 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13305 - 1;
13306 signed_max_hi = 0;
13307 max_hi = 0;
13309 if (TYPE_UNSIGNED (arg1_type))
13311 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13312 min_lo = 0;
13313 min_hi = 0;
13315 else
13317 max_lo = signed_max_lo;
13318 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13319 min_hi = -1;
13322 else
13324 width -= HOST_BITS_PER_WIDE_INT;
13325 signed_max_lo = -1;
13326 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13327 - 1;
13328 max_lo = -1;
13329 min_lo = 0;
13331 if (TYPE_UNSIGNED (arg1_type))
13333 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13334 min_hi = 0;
13336 else
13338 max_hi = signed_max_hi;
13339 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13343 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13344 && TREE_INT_CST_LOW (arg1) == max_lo)
13345 switch (code)
13347 case GT_EXPR:
13348 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13350 case GE_EXPR:
13351 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13353 case LE_EXPR:
13354 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13356 case LT_EXPR:
13357 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13359 /* The GE_EXPR and LT_EXPR cases above are not normally
13360 reached because of previous transformations. */
13362 default:
13363 break;
13365 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13366 == max_hi
13367 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13368 switch (code)
13370 case GT_EXPR:
13371 arg1 = const_binop (PLUS_EXPR, arg1,
13372 build_int_cst (TREE_TYPE (arg1), 1));
13373 return fold_build2_loc (loc, EQ_EXPR, type,
13374 fold_convert_loc (loc,
13375 TREE_TYPE (arg1), arg0),
13376 arg1);
13377 case LE_EXPR:
13378 arg1 = const_binop (PLUS_EXPR, arg1,
13379 build_int_cst (TREE_TYPE (arg1), 1));
13380 return fold_build2_loc (loc, NE_EXPR, type,
13381 fold_convert_loc (loc, TREE_TYPE (arg1),
13382 arg0),
13383 arg1);
13384 default:
13385 break;
13387 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13388 == min_hi
13389 && TREE_INT_CST_LOW (arg1) == min_lo)
13390 switch (code)
13392 case LT_EXPR:
13393 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13395 case LE_EXPR:
13396 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13398 case GE_EXPR:
13399 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13401 case GT_EXPR:
13402 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13404 default:
13405 break;
13407 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13408 == min_hi
13409 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13410 switch (code)
13412 case GE_EXPR:
13413 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13414 return fold_build2_loc (loc, NE_EXPR, type,
13415 fold_convert_loc (loc,
13416 TREE_TYPE (arg1), arg0),
13417 arg1);
13418 case LT_EXPR:
13419 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13420 return fold_build2_loc (loc, EQ_EXPR, type,
13421 fold_convert_loc (loc, TREE_TYPE (arg1),
13422 arg0),
13423 arg1);
13424 default:
13425 break;
13428 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13429 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13430 && TYPE_UNSIGNED (arg1_type)
13431 /* We will flip the signedness of the comparison operator
13432 associated with the mode of arg1, so the sign bit is
13433 specified by this mode. Check that arg1 is the signed
13434 max associated with this sign bit. */
13435 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13436 /* signed_type does not work on pointer types. */
13437 && INTEGRAL_TYPE_P (arg1_type))
13439 /* The following case also applies to X < signed_max+1
13440 and X >= signed_max+1 because previous transformations. */
13441 if (code == LE_EXPR || code == GT_EXPR)
13443 tree st;
13444 st = signed_type_for (TREE_TYPE (arg1));
13445 return fold_build2_loc (loc,
13446 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13447 type, fold_convert_loc (loc, st, arg0),
13448 build_int_cst (st, 0));
13454 /* If we are comparing an ABS_EXPR with a constant, we can
13455 convert all the cases into explicit comparisons, but they may
13456 well not be faster than doing the ABS and one comparison.
13457 But ABS (X) <= C is a range comparison, which becomes a subtraction
13458 and a comparison, and is probably faster. */
13459 if (code == LE_EXPR
13460 && TREE_CODE (arg1) == INTEGER_CST
13461 && TREE_CODE (arg0) == ABS_EXPR
13462 && ! TREE_SIDE_EFFECTS (arg0)
13463 && (0 != (tem = negate_expr (arg1)))
13464 && TREE_CODE (tem) == INTEGER_CST
13465 && !TREE_OVERFLOW (tem))
13466 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13467 build2 (GE_EXPR, type,
13468 TREE_OPERAND (arg0, 0), tem),
13469 build2 (LE_EXPR, type,
13470 TREE_OPERAND (arg0, 0), arg1));
13472 /* Convert ABS_EXPR<x> >= 0 to true. */
13473 strict_overflow_p = false;
13474 if (code == GE_EXPR
13475 && (integer_zerop (arg1)
13476 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13477 && real_zerop (arg1)))
13478 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13480 if (strict_overflow_p)
13481 fold_overflow_warning (("assuming signed overflow does not occur "
13482 "when simplifying comparison of "
13483 "absolute value and zero"),
13484 WARN_STRICT_OVERFLOW_CONDITIONAL);
13485 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13488 /* Convert ABS_EXPR<x> < 0 to false. */
13489 strict_overflow_p = false;
13490 if (code == LT_EXPR
13491 && (integer_zerop (arg1) || real_zerop (arg1))
13492 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13494 if (strict_overflow_p)
13495 fold_overflow_warning (("assuming signed overflow does not occur "
13496 "when simplifying comparison of "
13497 "absolute value and zero"),
13498 WARN_STRICT_OVERFLOW_CONDITIONAL);
13499 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13502 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13503 and similarly for >= into !=. */
13504 if ((code == LT_EXPR || code == GE_EXPR)
13505 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13506 && TREE_CODE (arg1) == LSHIFT_EXPR
13507 && integer_onep (TREE_OPERAND (arg1, 0)))
13508 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13509 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13510 TREE_OPERAND (arg1, 1)),
13511 build_zero_cst (TREE_TYPE (arg0)));
13513 if ((code == LT_EXPR || code == GE_EXPR)
13514 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13515 && CONVERT_EXPR_P (arg1)
13516 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13517 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13519 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13520 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13521 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13522 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13523 build_zero_cst (TREE_TYPE (arg0)));
13526 return NULL_TREE;
13528 case UNORDERED_EXPR:
13529 case ORDERED_EXPR:
13530 case UNLT_EXPR:
13531 case UNLE_EXPR:
13532 case UNGT_EXPR:
13533 case UNGE_EXPR:
13534 case UNEQ_EXPR:
13535 case LTGT_EXPR:
13536 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13538 t1 = fold_relational_const (code, type, arg0, arg1);
13539 if (t1 != NULL_TREE)
13540 return t1;
13543 /* If the first operand is NaN, the result is constant. */
13544 if (TREE_CODE (arg0) == REAL_CST
13545 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13546 && (code != LTGT_EXPR || ! flag_trapping_math))
13548 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13549 ? integer_zero_node
13550 : integer_one_node;
13551 return omit_one_operand_loc (loc, type, t1, arg1);
13554 /* If the second operand is NaN, the result is constant. */
13555 if (TREE_CODE (arg1) == REAL_CST
13556 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13557 && (code != LTGT_EXPR || ! flag_trapping_math))
13559 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13560 ? integer_zero_node
13561 : integer_one_node;
13562 return omit_one_operand_loc (loc, type, t1, arg0);
13565 /* Simplify unordered comparison of something with itself. */
13566 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13567 && operand_equal_p (arg0, arg1, 0))
13568 return constant_boolean_node (1, type);
13570 if (code == LTGT_EXPR
13571 && !flag_trapping_math
13572 && operand_equal_p (arg0, arg1, 0))
13573 return constant_boolean_node (0, type);
13575 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13577 tree targ0 = strip_float_extensions (arg0);
13578 tree targ1 = strip_float_extensions (arg1);
13579 tree newtype = TREE_TYPE (targ0);
13581 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13582 newtype = TREE_TYPE (targ1);
13584 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13585 return fold_build2_loc (loc, code, type,
13586 fold_convert_loc (loc, newtype, targ0),
13587 fold_convert_loc (loc, newtype, targ1));
13590 return NULL_TREE;
13592 case COMPOUND_EXPR:
13593 /* When pedantic, a compound expression can be neither an lvalue
13594 nor an integer constant expression. */
13595 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13596 return NULL_TREE;
13597 /* Don't let (0, 0) be null pointer constant. */
13598 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13599 : fold_convert_loc (loc, type, arg1);
13600 return pedantic_non_lvalue_loc (loc, tem);
13602 case COMPLEX_EXPR:
13603 if ((TREE_CODE (arg0) == REAL_CST
13604 && TREE_CODE (arg1) == REAL_CST)
13605 || (TREE_CODE (arg0) == INTEGER_CST
13606 && TREE_CODE (arg1) == INTEGER_CST))
13607 return build_complex (type, arg0, arg1);
13608 if (TREE_CODE (arg0) == REALPART_EXPR
13609 && TREE_CODE (arg1) == IMAGPART_EXPR
13610 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13611 && operand_equal_p (TREE_OPERAND (arg0, 0),
13612 TREE_OPERAND (arg1, 0), 0))
13613 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13614 TREE_OPERAND (arg1, 0));
13615 return NULL_TREE;
13617 case ASSERT_EXPR:
13618 /* An ASSERT_EXPR should never be passed to fold_binary. */
13619 gcc_unreachable ();
13621 case VEC_PACK_TRUNC_EXPR:
13622 case VEC_PACK_FIX_TRUNC_EXPR:
13624 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13625 tree *elts;
13627 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13628 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13629 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13630 return NULL_TREE;
13632 elts = XALLOCAVEC (tree, nelts);
13633 if (!vec_cst_ctor_to_array (arg0, elts)
13634 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13635 return NULL_TREE;
13637 for (i = 0; i < nelts; i++)
13639 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13640 ? NOP_EXPR : FIX_TRUNC_EXPR,
13641 TREE_TYPE (type), elts[i]);
13642 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13643 return NULL_TREE;
13646 return build_vector (type, elts);
13649 case VEC_WIDEN_MULT_LO_EXPR:
13650 case VEC_WIDEN_MULT_HI_EXPR:
13651 case VEC_WIDEN_MULT_EVEN_EXPR:
13652 case VEC_WIDEN_MULT_ODD_EXPR:
13654 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13655 unsigned int out, ofs, scale;
13656 tree *elts;
13658 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13659 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13660 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13661 return NULL_TREE;
13663 elts = XALLOCAVEC (tree, nelts * 4);
13664 if (!vec_cst_ctor_to_array (arg0, elts)
13665 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13666 return NULL_TREE;
13668 if (code == VEC_WIDEN_MULT_LO_EXPR)
13669 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13670 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13671 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13672 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13673 scale = 1, ofs = 0;
13674 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13675 scale = 1, ofs = 1;
13677 for (out = 0; out < nelts; out++)
13679 unsigned int in1 = (out << scale) + ofs;
13680 unsigned int in2 = in1 + nelts * 2;
13681 tree t1, t2;
13683 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13684 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13686 if (t1 == NULL_TREE || t2 == NULL_TREE)
13687 return NULL_TREE;
13688 elts[out] = const_binop (MULT_EXPR, t1, t2);
13689 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13690 return NULL_TREE;
13693 return build_vector (type, elts);
13696 default:
13697 return NULL_TREE;
13698 } /* switch (code) */
13701 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13702 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13703 of GOTO_EXPR. */
13705 static tree
13706 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13708 switch (TREE_CODE (*tp))
13710 case LABEL_EXPR:
13711 return *tp;
13713 case GOTO_EXPR:
13714 *walk_subtrees = 0;
13716 /* ... fall through ... */
13718 default:
13719 return NULL_TREE;
13723 /* Return whether the sub-tree ST contains a label which is accessible from
13724 outside the sub-tree. */
13726 static bool
13727 contains_label_p (tree st)
13729 return
13730 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13733 /* Fold a ternary expression of code CODE and type TYPE with operands
13734 OP0, OP1, and OP2. Return the folded expression if folding is
13735 successful. Otherwise, return NULL_TREE. */
13737 tree
13738 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13739 tree op0, tree op1, tree op2)
13741 tree tem;
13742 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13743 enum tree_code_class kind = TREE_CODE_CLASS (code);
13745 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13746 && TREE_CODE_LENGTH (code) == 3);
13748 /* Strip any conversions that don't change the mode. This is safe
13749 for every expression, except for a comparison expression because
13750 its signedness is derived from its operands. So, in the latter
13751 case, only strip conversions that don't change the signedness.
13753 Note that this is done as an internal manipulation within the
13754 constant folder, in order to find the simplest representation of
13755 the arguments so that their form can be studied. In any cases,
13756 the appropriate type conversions should be put back in the tree
13757 that will get out of the constant folder. */
13758 if (op0)
13760 arg0 = op0;
13761 STRIP_NOPS (arg0);
13764 if (op1)
13766 arg1 = op1;
13767 STRIP_NOPS (arg1);
13770 if (op2)
13772 arg2 = op2;
13773 STRIP_NOPS (arg2);
13776 switch (code)
13778 case COMPONENT_REF:
13779 if (TREE_CODE (arg0) == CONSTRUCTOR
13780 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13782 unsigned HOST_WIDE_INT idx;
13783 tree field, value;
13784 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13785 if (field == arg1)
13786 return value;
13788 return NULL_TREE;
13790 case COND_EXPR:
13791 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13792 so all simple results must be passed through pedantic_non_lvalue. */
13793 if (TREE_CODE (arg0) == INTEGER_CST)
13795 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13796 tem = integer_zerop (arg0) ? op2 : op1;
13797 /* Only optimize constant conditions when the selected branch
13798 has the same type as the COND_EXPR. This avoids optimizing
13799 away "c ? x : throw", where the throw has a void type.
13800 Avoid throwing away that operand which contains label. */
13801 if ((!TREE_SIDE_EFFECTS (unused_op)
13802 || !contains_label_p (unused_op))
13803 && (! VOID_TYPE_P (TREE_TYPE (tem))
13804 || VOID_TYPE_P (type)))
13805 return pedantic_non_lvalue_loc (loc, tem);
13806 return NULL_TREE;
13808 if (operand_equal_p (arg1, op2, 0))
13809 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13811 /* If we have A op B ? A : C, we may be able to convert this to a
13812 simpler expression, depending on the operation and the values
13813 of B and C. Signed zeros prevent all of these transformations,
13814 for reasons given above each one.
13816 Also try swapping the arguments and inverting the conditional. */
13817 if (COMPARISON_CLASS_P (arg0)
13818 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13819 arg1, TREE_OPERAND (arg0, 1))
13820 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13822 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13823 if (tem)
13824 return tem;
13827 if (COMPARISON_CLASS_P (arg0)
13828 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13829 op2,
13830 TREE_OPERAND (arg0, 1))
13831 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13833 location_t loc0 = expr_location_or (arg0, loc);
13834 tem = fold_truth_not_expr (loc0, arg0);
13835 if (tem && COMPARISON_CLASS_P (tem))
13837 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13838 if (tem)
13839 return tem;
13843 /* If the second operand is simpler than the third, swap them
13844 since that produces better jump optimization results. */
13845 if (truth_value_p (TREE_CODE (arg0))
13846 && tree_swap_operands_p (op1, op2, false))
13848 location_t loc0 = expr_location_or (arg0, loc);
13849 /* See if this can be inverted. If it can't, possibly because
13850 it was a floating-point inequality comparison, don't do
13851 anything. */
13852 tem = fold_truth_not_expr (loc0, arg0);
13853 if (tem)
13854 return fold_build3_loc (loc, code, type, tem, op2, op1);
13857 /* Convert A ? 1 : 0 to simply A. */
13858 if (integer_onep (op1)
13859 && integer_zerop (op2)
13860 /* If we try to convert OP0 to our type, the
13861 call to fold will try to move the conversion inside
13862 a COND, which will recurse. In that case, the COND_EXPR
13863 is probably the best choice, so leave it alone. */
13864 && type == TREE_TYPE (arg0))
13865 return pedantic_non_lvalue_loc (loc, arg0);
13867 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13868 over COND_EXPR in cases such as floating point comparisons. */
13869 if (integer_zerop (op1)
13870 && integer_onep (op2)
13871 && truth_value_p (TREE_CODE (arg0)))
13872 return pedantic_non_lvalue_loc (loc,
13873 fold_convert_loc (loc, type,
13874 invert_truthvalue_loc (loc,
13875 arg0)));
13877 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13878 if (TREE_CODE (arg0) == LT_EXPR
13879 && integer_zerop (TREE_OPERAND (arg0, 1))
13880 && integer_zerop (op2)
13881 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13883 /* sign_bit_p only checks ARG1 bits within A's precision.
13884 If <sign bit of A> has wider type than A, bits outside
13885 of A's precision in <sign bit of A> need to be checked.
13886 If they are all 0, this optimization needs to be done
13887 in unsigned A's type, if they are all 1 in signed A's type,
13888 otherwise this can't be done. */
13889 if (TYPE_PRECISION (TREE_TYPE (tem))
13890 < TYPE_PRECISION (TREE_TYPE (arg1))
13891 && TYPE_PRECISION (TREE_TYPE (tem))
13892 < TYPE_PRECISION (type))
13894 unsigned HOST_WIDE_INT mask_lo;
13895 HOST_WIDE_INT mask_hi;
13896 int inner_width, outer_width;
13897 tree tem_type;
13899 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13900 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13901 if (outer_width > TYPE_PRECISION (type))
13902 outer_width = TYPE_PRECISION (type);
13904 if (outer_width > HOST_BITS_PER_WIDE_INT)
13906 mask_hi = ((unsigned HOST_WIDE_INT) -1
13907 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13908 mask_lo = -1;
13910 else
13912 mask_hi = 0;
13913 mask_lo = ((unsigned HOST_WIDE_INT) -1
13914 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13916 if (inner_width > HOST_BITS_PER_WIDE_INT)
13918 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13919 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13920 mask_lo = 0;
13922 else
13923 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13924 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13926 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13927 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13929 tem_type = signed_type_for (TREE_TYPE (tem));
13930 tem = fold_convert_loc (loc, tem_type, tem);
13932 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13933 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13935 tem_type = unsigned_type_for (TREE_TYPE (tem));
13936 tem = fold_convert_loc (loc, tem_type, tem);
13938 else
13939 tem = NULL;
13942 if (tem)
13943 return
13944 fold_convert_loc (loc, type,
13945 fold_build2_loc (loc, BIT_AND_EXPR,
13946 TREE_TYPE (tem), tem,
13947 fold_convert_loc (loc,
13948 TREE_TYPE (tem),
13949 arg1)));
13952 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13953 already handled above. */
13954 if (TREE_CODE (arg0) == BIT_AND_EXPR
13955 && integer_onep (TREE_OPERAND (arg0, 1))
13956 && integer_zerop (op2)
13957 && integer_pow2p (arg1))
13959 tree tem = TREE_OPERAND (arg0, 0);
13960 STRIP_NOPS (tem);
13961 if (TREE_CODE (tem) == RSHIFT_EXPR
13962 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13963 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13964 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13965 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13966 TREE_OPERAND (tem, 0), arg1);
13969 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13970 is probably obsolete because the first operand should be a
13971 truth value (that's why we have the two cases above), but let's
13972 leave it in until we can confirm this for all front-ends. */
13973 if (integer_zerop (op2)
13974 && TREE_CODE (arg0) == NE_EXPR
13975 && integer_zerop (TREE_OPERAND (arg0, 1))
13976 && integer_pow2p (arg1)
13977 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13978 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13979 arg1, OEP_ONLY_CONST))
13980 return pedantic_non_lvalue_loc (loc,
13981 fold_convert_loc (loc, type,
13982 TREE_OPERAND (arg0, 0)));
13984 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13985 if (integer_zerop (op2)
13986 && truth_value_p (TREE_CODE (arg0))
13987 && truth_value_p (TREE_CODE (arg1)))
13988 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13989 fold_convert_loc (loc, type, arg0),
13990 arg1);
13992 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13993 if (integer_onep (op2)
13994 && truth_value_p (TREE_CODE (arg0))
13995 && truth_value_p (TREE_CODE (arg1)))
13997 location_t loc0 = expr_location_or (arg0, loc);
13998 /* Only perform transformation if ARG0 is easily inverted. */
13999 tem = fold_truth_not_expr (loc0, arg0);
14000 if (tem)
14001 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14002 fold_convert_loc (loc, type, tem),
14003 arg1);
14006 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14007 if (integer_zerop (arg1)
14008 && truth_value_p (TREE_CODE (arg0))
14009 && truth_value_p (TREE_CODE (op2)))
14011 location_t loc0 = expr_location_or (arg0, loc);
14012 /* Only perform transformation if ARG0 is easily inverted. */
14013 tem = fold_truth_not_expr (loc0, arg0);
14014 if (tem)
14015 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14016 fold_convert_loc (loc, type, tem),
14017 op2);
14020 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14021 if (integer_onep (arg1)
14022 && truth_value_p (TREE_CODE (arg0))
14023 && truth_value_p (TREE_CODE (op2)))
14024 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14025 fold_convert_loc (loc, type, arg0),
14026 op2);
14028 return NULL_TREE;
14030 case CALL_EXPR:
14031 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14032 of fold_ternary on them. */
14033 gcc_unreachable ();
14035 case BIT_FIELD_REF:
14036 if ((TREE_CODE (arg0) == VECTOR_CST
14037 || (TREE_CODE (arg0) == CONSTRUCTOR
14038 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14039 && (type == TREE_TYPE (TREE_TYPE (arg0))
14040 || (TREE_CODE (type) == VECTOR_TYPE
14041 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14043 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14044 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14045 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14046 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14048 if (n != 0
14049 && (idx % width) == 0
14050 && (n % width) == 0
14051 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14053 idx = idx / width;
14054 n = n / width;
14055 if (TREE_CODE (type) == VECTOR_TYPE)
14057 if (TREE_CODE (arg0) == VECTOR_CST)
14059 tree *vals = XALLOCAVEC (tree, n);
14060 unsigned i;
14061 for (i = 0; i < n; ++i)
14062 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14063 return build_vector (type, vals);
14065 else
14067 VEC(constructor_elt, gc) *vals;
14068 unsigned i;
14069 if (CONSTRUCTOR_NELTS (arg0) == 0)
14070 return build_constructor (type, NULL);
14071 vals = VEC_alloc (constructor_elt, gc, n);
14072 for (i = 0; i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14073 ++i)
14074 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14075 CONSTRUCTOR_ELT
14076 (arg0, idx + i)->value);
14077 return build_constructor (type, vals);
14080 else if (n == 1)
14082 if (TREE_CODE (arg0) == VECTOR_CST)
14083 return VECTOR_CST_ELT (arg0, idx);
14084 else if (idx < CONSTRUCTOR_NELTS (arg0))
14085 return CONSTRUCTOR_ELT (arg0, idx)->value;
14086 return build_zero_cst (type);
14091 /* A bit-field-ref that referenced the full argument can be stripped. */
14092 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14093 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14094 && integer_zerop (op2))
14095 return fold_convert_loc (loc, type, arg0);
14097 /* On constants we can use native encode/interpret to constant
14098 fold (nearly) all BIT_FIELD_REFs. */
14099 if (CONSTANT_CLASS_P (arg0)
14100 && can_native_interpret_type_p (type)
14101 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14102 /* This limitation should not be necessary, we just need to
14103 round this up to mode size. */
14104 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14105 /* Need bit-shifting of the buffer to relax the following. */
14106 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14108 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14109 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14110 unsigned HOST_WIDE_INT clen;
14111 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14112 /* ??? We cannot tell native_encode_expr to start at
14113 some random byte only. So limit us to a reasonable amount
14114 of work. */
14115 if (clen <= 4096)
14117 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14118 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14119 if (len > 0
14120 && len * BITS_PER_UNIT >= bitpos + bitsize)
14122 tree v = native_interpret_expr (type,
14123 b + bitpos / BITS_PER_UNIT,
14124 bitsize / BITS_PER_UNIT);
14125 if (v)
14126 return v;
14131 return NULL_TREE;
14133 case FMA_EXPR:
14134 /* For integers we can decompose the FMA if possible. */
14135 if (TREE_CODE (arg0) == INTEGER_CST
14136 && TREE_CODE (arg1) == INTEGER_CST)
14137 return fold_build2_loc (loc, PLUS_EXPR, type,
14138 const_binop (MULT_EXPR, arg0, arg1), arg2);
14139 if (integer_zerop (arg2))
14140 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14142 return fold_fma (loc, type, arg0, arg1, arg2);
14144 case VEC_PERM_EXPR:
14145 if (TREE_CODE (arg2) == VECTOR_CST)
14147 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14148 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14149 tree t;
14150 bool need_mask_canon = false;
14151 bool all_in_vec0 = true;
14152 bool all_in_vec1 = true;
14153 bool maybe_identity = true;
14154 bool single_arg = (op0 == op1);
14155 bool changed = false;
14157 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14158 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14159 for (i = 0; i < nelts; i++)
14161 tree val = VECTOR_CST_ELT (arg2, i);
14162 if (TREE_CODE (val) != INTEGER_CST)
14163 return NULL_TREE;
14165 sel[i] = TREE_INT_CST_LOW (val) & mask;
14166 if (TREE_INT_CST_HIGH (val)
14167 || ((unsigned HOST_WIDE_INT)
14168 TREE_INT_CST_LOW (val) != sel[i]))
14169 need_mask_canon = true;
14171 if (sel[i] < nelts)
14172 all_in_vec1 = false;
14173 else
14174 all_in_vec0 = false;
14176 if ((sel[i] & (nelts-1)) != i)
14177 maybe_identity = false;
14180 if (maybe_identity)
14182 if (all_in_vec0)
14183 return op0;
14184 if (all_in_vec1)
14185 return op1;
14188 if (all_in_vec0)
14189 op1 = op0;
14190 else if (all_in_vec1)
14192 op0 = op1;
14193 for (i = 0; i < nelts; i++)
14194 sel[i] -= nelts;
14195 need_mask_canon = true;
14198 if ((TREE_CODE (op0) == VECTOR_CST
14199 || TREE_CODE (op0) == CONSTRUCTOR)
14200 && (TREE_CODE (op1) == VECTOR_CST
14201 || TREE_CODE (op1) == CONSTRUCTOR))
14203 t = fold_vec_perm (type, op0, op1, sel);
14204 if (t != NULL_TREE)
14205 return t;
14208 if (op0 == op1 && !single_arg)
14209 changed = true;
14211 if (need_mask_canon && arg2 == op2)
14213 tree *tsel = XALLOCAVEC (tree, nelts);
14214 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14215 for (i = 0; i < nelts; i++)
14216 tsel[i] = build_int_cst (eltype, sel[i]);
14217 op2 = build_vector (TREE_TYPE (arg2), tsel);
14218 changed = true;
14221 if (changed)
14222 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14224 return NULL_TREE;
14226 default:
14227 return NULL_TREE;
14228 } /* switch (code) */
14231 /* Perform constant folding and related simplification of EXPR.
14232 The related simplifications include x*1 => x, x*0 => 0, etc.,
14233 and application of the associative law.
14234 NOP_EXPR conversions may be removed freely (as long as we
14235 are careful not to change the type of the overall expression).
14236 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14237 but we can constant-fold them if they have constant operands. */
14239 #ifdef ENABLE_FOLD_CHECKING
14240 # define fold(x) fold_1 (x)
14241 static tree fold_1 (tree);
14242 static
14243 #endif
14244 tree
14245 fold (tree expr)
14247 const tree t = expr;
14248 enum tree_code code = TREE_CODE (t);
14249 enum tree_code_class kind = TREE_CODE_CLASS (code);
14250 tree tem;
14251 location_t loc = EXPR_LOCATION (expr);
14253 /* Return right away if a constant. */
14254 if (kind == tcc_constant)
14255 return t;
14257 /* CALL_EXPR-like objects with variable numbers of operands are
14258 treated specially. */
14259 if (kind == tcc_vl_exp)
14261 if (code == CALL_EXPR)
14263 tem = fold_call_expr (loc, expr, false);
14264 return tem ? tem : expr;
14266 return expr;
14269 if (IS_EXPR_CODE_CLASS (kind))
14271 tree type = TREE_TYPE (t);
14272 tree op0, op1, op2;
14274 switch (TREE_CODE_LENGTH (code))
14276 case 1:
14277 op0 = TREE_OPERAND (t, 0);
14278 tem = fold_unary_loc (loc, code, type, op0);
14279 return tem ? tem : expr;
14280 case 2:
14281 op0 = TREE_OPERAND (t, 0);
14282 op1 = TREE_OPERAND (t, 1);
14283 tem = fold_binary_loc (loc, code, type, op0, op1);
14284 return tem ? tem : expr;
14285 case 3:
14286 op0 = TREE_OPERAND (t, 0);
14287 op1 = TREE_OPERAND (t, 1);
14288 op2 = TREE_OPERAND (t, 2);
14289 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14290 return tem ? tem : expr;
14291 default:
14292 break;
14296 switch (code)
14298 case ARRAY_REF:
14300 tree op0 = TREE_OPERAND (t, 0);
14301 tree op1 = TREE_OPERAND (t, 1);
14303 if (TREE_CODE (op1) == INTEGER_CST
14304 && TREE_CODE (op0) == CONSTRUCTOR
14305 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14307 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14308 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14309 unsigned HOST_WIDE_INT begin = 0;
14311 /* Find a matching index by means of a binary search. */
14312 while (begin != end)
14314 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14315 tree index = VEC_index (constructor_elt, elts, middle).index;
14317 if (TREE_CODE (index) == INTEGER_CST
14318 && tree_int_cst_lt (index, op1))
14319 begin = middle + 1;
14320 else if (TREE_CODE (index) == INTEGER_CST
14321 && tree_int_cst_lt (op1, index))
14322 end = middle;
14323 else if (TREE_CODE (index) == RANGE_EXPR
14324 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14325 begin = middle + 1;
14326 else if (TREE_CODE (index) == RANGE_EXPR
14327 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14328 end = middle;
14329 else
14330 return VEC_index (constructor_elt, elts, middle).value;
14334 return t;
14337 case CONST_DECL:
14338 return fold (DECL_INITIAL (t));
14340 default:
14341 return t;
14342 } /* switch (code) */
14345 #ifdef ENABLE_FOLD_CHECKING
14346 #undef fold
14348 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14349 static void fold_check_failed (const_tree, const_tree);
14350 void print_fold_checksum (const_tree);
14352 /* When --enable-checking=fold, compute a digest of expr before
14353 and after actual fold call to see if fold did not accidentally
14354 change original expr. */
14356 tree
14357 fold (tree expr)
14359 tree ret;
14360 struct md5_ctx ctx;
14361 unsigned char checksum_before[16], checksum_after[16];
14362 htab_t ht;
14364 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14365 md5_init_ctx (&ctx);
14366 fold_checksum_tree (expr, &ctx, ht);
14367 md5_finish_ctx (&ctx, checksum_before);
14368 htab_empty (ht);
14370 ret = fold_1 (expr);
14372 md5_init_ctx (&ctx);
14373 fold_checksum_tree (expr, &ctx, ht);
14374 md5_finish_ctx (&ctx, checksum_after);
14375 htab_delete (ht);
14377 if (memcmp (checksum_before, checksum_after, 16))
14378 fold_check_failed (expr, ret);
14380 return ret;
14383 void
14384 print_fold_checksum (const_tree expr)
14386 struct md5_ctx ctx;
14387 unsigned char checksum[16], cnt;
14388 htab_t ht;
14390 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14391 md5_init_ctx (&ctx);
14392 fold_checksum_tree (expr, &ctx, ht);
14393 md5_finish_ctx (&ctx, checksum);
14394 htab_delete (ht);
14395 for (cnt = 0; cnt < 16; ++cnt)
14396 fprintf (stderr, "%02x", checksum[cnt]);
14397 putc ('\n', stderr);
14400 static void
14401 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14403 internal_error ("fold check: original tree changed by fold");
14406 static void
14407 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14409 void **slot;
14410 enum tree_code code;
14411 union tree_node buf;
14412 int i, len;
14414 recursive_label:
14415 if (expr == NULL)
14416 return;
14417 slot = (void **) htab_find_slot (ht, expr, INSERT);
14418 if (*slot != NULL)
14419 return;
14420 *slot = CONST_CAST_TREE (expr);
14421 code = TREE_CODE (expr);
14422 if (TREE_CODE_CLASS (code) == tcc_declaration
14423 && DECL_ASSEMBLER_NAME_SET_P (expr))
14425 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14426 memcpy ((char *) &buf, expr, tree_size (expr));
14427 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14428 expr = (tree) &buf;
14430 else if (TREE_CODE_CLASS (code) == tcc_type
14431 && (TYPE_POINTER_TO (expr)
14432 || TYPE_REFERENCE_TO (expr)
14433 || TYPE_CACHED_VALUES_P (expr)
14434 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14435 || TYPE_NEXT_VARIANT (expr)))
14437 /* Allow these fields to be modified. */
14438 tree tmp;
14439 memcpy ((char *) &buf, expr, tree_size (expr));
14440 expr = tmp = (tree) &buf;
14441 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14442 TYPE_POINTER_TO (tmp) = NULL;
14443 TYPE_REFERENCE_TO (tmp) = NULL;
14444 TYPE_NEXT_VARIANT (tmp) = NULL;
14445 if (TYPE_CACHED_VALUES_P (tmp))
14447 TYPE_CACHED_VALUES_P (tmp) = 0;
14448 TYPE_CACHED_VALUES (tmp) = NULL;
14451 md5_process_bytes (expr, tree_size (expr), ctx);
14452 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14453 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14454 if (TREE_CODE_CLASS (code) != tcc_type
14455 && TREE_CODE_CLASS (code) != tcc_declaration
14456 && code != TREE_LIST
14457 && code != SSA_NAME
14458 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14459 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14460 switch (TREE_CODE_CLASS (code))
14462 case tcc_constant:
14463 switch (code)
14465 case STRING_CST:
14466 md5_process_bytes (TREE_STRING_POINTER (expr),
14467 TREE_STRING_LENGTH (expr), ctx);
14468 break;
14469 case COMPLEX_CST:
14470 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14471 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14472 break;
14473 case VECTOR_CST:
14474 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14475 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14476 break;
14477 default:
14478 break;
14480 break;
14481 case tcc_exceptional:
14482 switch (code)
14484 case TREE_LIST:
14485 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14486 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14487 expr = TREE_CHAIN (expr);
14488 goto recursive_label;
14489 break;
14490 case TREE_VEC:
14491 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14492 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14493 break;
14494 default:
14495 break;
14497 break;
14498 case tcc_expression:
14499 case tcc_reference:
14500 case tcc_comparison:
14501 case tcc_unary:
14502 case tcc_binary:
14503 case tcc_statement:
14504 case tcc_vl_exp:
14505 len = TREE_OPERAND_LENGTH (expr);
14506 for (i = 0; i < len; ++i)
14507 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14508 break;
14509 case tcc_declaration:
14510 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14511 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14512 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14514 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14515 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14516 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14517 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14518 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14520 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14521 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14523 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14525 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14526 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14527 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14529 break;
14530 case tcc_type:
14531 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14532 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14533 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14534 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14535 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14536 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14537 if (INTEGRAL_TYPE_P (expr)
14538 || SCALAR_FLOAT_TYPE_P (expr))
14540 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14541 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14543 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14544 if (TREE_CODE (expr) == RECORD_TYPE
14545 || TREE_CODE (expr) == UNION_TYPE
14546 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14547 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14548 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14549 break;
14550 default:
14551 break;
14555 /* Helper function for outputting the checksum of a tree T. When
14556 debugging with gdb, you can "define mynext" to be "next" followed
14557 by "call debug_fold_checksum (op0)", then just trace down till the
14558 outputs differ. */
14560 DEBUG_FUNCTION void
14561 debug_fold_checksum (const_tree t)
14563 int i;
14564 unsigned char checksum[16];
14565 struct md5_ctx ctx;
14566 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14568 md5_init_ctx (&ctx);
14569 fold_checksum_tree (t, &ctx, ht);
14570 md5_finish_ctx (&ctx, checksum);
14571 htab_empty (ht);
14573 for (i = 0; i < 16; i++)
14574 fprintf (stderr, "%d ", checksum[i]);
14576 fprintf (stderr, "\n");
14579 #endif
14581 /* Fold a unary tree expression with code CODE of type TYPE with an
14582 operand OP0. LOC is the location of the resulting expression.
14583 Return a folded expression if successful. Otherwise, return a tree
14584 expression with code CODE of type TYPE with an operand OP0. */
14586 tree
14587 fold_build1_stat_loc (location_t loc,
14588 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14590 tree tem;
14591 #ifdef ENABLE_FOLD_CHECKING
14592 unsigned char checksum_before[16], checksum_after[16];
14593 struct md5_ctx ctx;
14594 htab_t ht;
14596 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14597 md5_init_ctx (&ctx);
14598 fold_checksum_tree (op0, &ctx, ht);
14599 md5_finish_ctx (&ctx, checksum_before);
14600 htab_empty (ht);
14601 #endif
14603 tem = fold_unary_loc (loc, code, type, op0);
14604 if (!tem)
14605 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14607 #ifdef ENABLE_FOLD_CHECKING
14608 md5_init_ctx (&ctx);
14609 fold_checksum_tree (op0, &ctx, ht);
14610 md5_finish_ctx (&ctx, checksum_after);
14611 htab_delete (ht);
14613 if (memcmp (checksum_before, checksum_after, 16))
14614 fold_check_failed (op0, tem);
14615 #endif
14616 return tem;
14619 /* Fold a binary tree expression with code CODE of type TYPE with
14620 operands OP0 and OP1. LOC is the location of the resulting
14621 expression. Return a folded expression if successful. Otherwise,
14622 return a tree expression with code CODE of type TYPE with operands
14623 OP0 and OP1. */
14625 tree
14626 fold_build2_stat_loc (location_t loc,
14627 enum tree_code code, tree type, tree op0, tree op1
14628 MEM_STAT_DECL)
14630 tree tem;
14631 #ifdef ENABLE_FOLD_CHECKING
14632 unsigned char checksum_before_op0[16],
14633 checksum_before_op1[16],
14634 checksum_after_op0[16],
14635 checksum_after_op1[16];
14636 struct md5_ctx ctx;
14637 htab_t ht;
14639 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14640 md5_init_ctx (&ctx);
14641 fold_checksum_tree (op0, &ctx, ht);
14642 md5_finish_ctx (&ctx, checksum_before_op0);
14643 htab_empty (ht);
14645 md5_init_ctx (&ctx);
14646 fold_checksum_tree (op1, &ctx, ht);
14647 md5_finish_ctx (&ctx, checksum_before_op1);
14648 htab_empty (ht);
14649 #endif
14651 tem = fold_binary_loc (loc, code, type, op0, op1);
14652 if (!tem)
14653 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14655 #ifdef ENABLE_FOLD_CHECKING
14656 md5_init_ctx (&ctx);
14657 fold_checksum_tree (op0, &ctx, ht);
14658 md5_finish_ctx (&ctx, checksum_after_op0);
14659 htab_empty (ht);
14661 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14662 fold_check_failed (op0, tem);
14664 md5_init_ctx (&ctx);
14665 fold_checksum_tree (op1, &ctx, ht);
14666 md5_finish_ctx (&ctx, checksum_after_op1);
14667 htab_delete (ht);
14669 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14670 fold_check_failed (op1, tem);
14671 #endif
14672 return tem;
14675 /* Fold a ternary tree expression with code CODE of type TYPE with
14676 operands OP0, OP1, and OP2. Return a folded expression if
14677 successful. Otherwise, return a tree expression with code CODE of
14678 type TYPE with operands OP0, OP1, and OP2. */
14680 tree
14681 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14682 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14684 tree tem;
14685 #ifdef ENABLE_FOLD_CHECKING
14686 unsigned char checksum_before_op0[16],
14687 checksum_before_op1[16],
14688 checksum_before_op2[16],
14689 checksum_after_op0[16],
14690 checksum_after_op1[16],
14691 checksum_after_op2[16];
14692 struct md5_ctx ctx;
14693 htab_t ht;
14695 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14696 md5_init_ctx (&ctx);
14697 fold_checksum_tree (op0, &ctx, ht);
14698 md5_finish_ctx (&ctx, checksum_before_op0);
14699 htab_empty (ht);
14701 md5_init_ctx (&ctx);
14702 fold_checksum_tree (op1, &ctx, ht);
14703 md5_finish_ctx (&ctx, checksum_before_op1);
14704 htab_empty (ht);
14706 md5_init_ctx (&ctx);
14707 fold_checksum_tree (op2, &ctx, ht);
14708 md5_finish_ctx (&ctx, checksum_before_op2);
14709 htab_empty (ht);
14710 #endif
14712 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14713 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14714 if (!tem)
14715 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14717 #ifdef ENABLE_FOLD_CHECKING
14718 md5_init_ctx (&ctx);
14719 fold_checksum_tree (op0, &ctx, ht);
14720 md5_finish_ctx (&ctx, checksum_after_op0);
14721 htab_empty (ht);
14723 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14724 fold_check_failed (op0, tem);
14726 md5_init_ctx (&ctx);
14727 fold_checksum_tree (op1, &ctx, ht);
14728 md5_finish_ctx (&ctx, checksum_after_op1);
14729 htab_empty (ht);
14731 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14732 fold_check_failed (op1, tem);
14734 md5_init_ctx (&ctx);
14735 fold_checksum_tree (op2, &ctx, ht);
14736 md5_finish_ctx (&ctx, checksum_after_op2);
14737 htab_delete (ht);
14739 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14740 fold_check_failed (op2, tem);
14741 #endif
14742 return tem;
14745 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14746 arguments in ARGARRAY, and a null static chain.
14747 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14748 of type TYPE from the given operands as constructed by build_call_array. */
14750 tree
14751 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14752 int nargs, tree *argarray)
14754 tree tem;
14755 #ifdef ENABLE_FOLD_CHECKING
14756 unsigned char checksum_before_fn[16],
14757 checksum_before_arglist[16],
14758 checksum_after_fn[16],
14759 checksum_after_arglist[16];
14760 struct md5_ctx ctx;
14761 htab_t ht;
14762 int i;
14764 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14765 md5_init_ctx (&ctx);
14766 fold_checksum_tree (fn, &ctx, ht);
14767 md5_finish_ctx (&ctx, checksum_before_fn);
14768 htab_empty (ht);
14770 md5_init_ctx (&ctx);
14771 for (i = 0; i < nargs; i++)
14772 fold_checksum_tree (argarray[i], &ctx, ht);
14773 md5_finish_ctx (&ctx, checksum_before_arglist);
14774 htab_empty (ht);
14775 #endif
14777 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14779 #ifdef ENABLE_FOLD_CHECKING
14780 md5_init_ctx (&ctx);
14781 fold_checksum_tree (fn, &ctx, ht);
14782 md5_finish_ctx (&ctx, checksum_after_fn);
14783 htab_empty (ht);
14785 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14786 fold_check_failed (fn, tem);
14788 md5_init_ctx (&ctx);
14789 for (i = 0; i < nargs; i++)
14790 fold_checksum_tree (argarray[i], &ctx, ht);
14791 md5_finish_ctx (&ctx, checksum_after_arglist);
14792 htab_delete (ht);
14794 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14795 fold_check_failed (NULL_TREE, tem);
14796 #endif
14797 return tem;
14800 /* Perform constant folding and related simplification of initializer
14801 expression EXPR. These behave identically to "fold_buildN" but ignore
14802 potential run-time traps and exceptions that fold must preserve. */
14804 #define START_FOLD_INIT \
14805 int saved_signaling_nans = flag_signaling_nans;\
14806 int saved_trapping_math = flag_trapping_math;\
14807 int saved_rounding_math = flag_rounding_math;\
14808 int saved_trapv = flag_trapv;\
14809 int saved_folding_initializer = folding_initializer;\
14810 flag_signaling_nans = 0;\
14811 flag_trapping_math = 0;\
14812 flag_rounding_math = 0;\
14813 flag_trapv = 0;\
14814 folding_initializer = 1;
14816 #define END_FOLD_INIT \
14817 flag_signaling_nans = saved_signaling_nans;\
14818 flag_trapping_math = saved_trapping_math;\
14819 flag_rounding_math = saved_rounding_math;\
14820 flag_trapv = saved_trapv;\
14821 folding_initializer = saved_folding_initializer;
14823 tree
14824 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14825 tree type, tree op)
14827 tree result;
14828 START_FOLD_INIT;
14830 result = fold_build1_loc (loc, code, type, op);
14832 END_FOLD_INIT;
14833 return result;
14836 tree
14837 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14838 tree type, tree op0, tree op1)
14840 tree result;
14841 START_FOLD_INIT;
14843 result = fold_build2_loc (loc, code, type, op0, op1);
14845 END_FOLD_INIT;
14846 return result;
14849 tree
14850 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14851 tree type, tree op0, tree op1, tree op2)
14853 tree result;
14854 START_FOLD_INIT;
14856 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14858 END_FOLD_INIT;
14859 return result;
14862 tree
14863 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14864 int nargs, tree *argarray)
14866 tree result;
14867 START_FOLD_INIT;
14869 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14871 END_FOLD_INIT;
14872 return result;
14875 #undef START_FOLD_INIT
14876 #undef END_FOLD_INIT
14878 /* Determine if first argument is a multiple of second argument. Return 0 if
14879 it is not, or we cannot easily determined it to be.
14881 An example of the sort of thing we care about (at this point; this routine
14882 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14883 fold cases do now) is discovering that
14885 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14887 is a multiple of
14889 SAVE_EXPR (J * 8)
14891 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14893 This code also handles discovering that
14895 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14897 is a multiple of 8 so we don't have to worry about dealing with a
14898 possible remainder.
14900 Note that we *look* inside a SAVE_EXPR only to determine how it was
14901 calculated; it is not safe for fold to do much of anything else with the
14902 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14903 at run time. For example, the latter example above *cannot* be implemented
14904 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14905 evaluation time of the original SAVE_EXPR is not necessarily the same at
14906 the time the new expression is evaluated. The only optimization of this
14907 sort that would be valid is changing
14909 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14911 divided by 8 to
14913 SAVE_EXPR (I) * SAVE_EXPR (J)
14915 (where the same SAVE_EXPR (J) is used in the original and the
14916 transformed version). */
14919 multiple_of_p (tree type, const_tree top, const_tree bottom)
14921 if (operand_equal_p (top, bottom, 0))
14922 return 1;
14924 if (TREE_CODE (type) != INTEGER_TYPE)
14925 return 0;
14927 switch (TREE_CODE (top))
14929 case BIT_AND_EXPR:
14930 /* Bitwise and provides a power of two multiple. If the mask is
14931 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14932 if (!integer_pow2p (bottom))
14933 return 0;
14934 /* FALLTHRU */
14936 case MULT_EXPR:
14937 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14938 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14940 case PLUS_EXPR:
14941 case MINUS_EXPR:
14942 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14943 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14945 case LSHIFT_EXPR:
14946 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14948 tree op1, t1;
14950 op1 = TREE_OPERAND (top, 1);
14951 /* const_binop may not detect overflow correctly,
14952 so check for it explicitly here. */
14953 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14954 > TREE_INT_CST_LOW (op1)
14955 && TREE_INT_CST_HIGH (op1) == 0
14956 && 0 != (t1 = fold_convert (type,
14957 const_binop (LSHIFT_EXPR,
14958 size_one_node,
14959 op1)))
14960 && !TREE_OVERFLOW (t1))
14961 return multiple_of_p (type, t1, bottom);
14963 return 0;
14965 case NOP_EXPR:
14966 /* Can't handle conversions from non-integral or wider integral type. */
14967 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14968 || (TYPE_PRECISION (type)
14969 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14970 return 0;
14972 /* .. fall through ... */
14974 case SAVE_EXPR:
14975 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14977 case COND_EXPR:
14978 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14979 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14981 case INTEGER_CST:
14982 if (TREE_CODE (bottom) != INTEGER_CST
14983 || integer_zerop (bottom)
14984 || (TYPE_UNSIGNED (type)
14985 && (tree_int_cst_sgn (top) < 0
14986 || tree_int_cst_sgn (bottom) < 0)))
14987 return 0;
14988 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14989 top, bottom));
14991 default:
14992 return 0;
14996 /* Return true if CODE or TYPE is known to be non-negative. */
14998 static bool
14999 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15001 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15002 && truth_value_p (code))
15003 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15004 have a signed:1 type (where the value is -1 and 0). */
15005 return true;
15006 return false;
15009 /* Return true if (CODE OP0) is known to be non-negative. If the return
15010 value is based on the assumption that signed overflow is undefined,
15011 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15012 *STRICT_OVERFLOW_P. */
15014 bool
15015 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15016 bool *strict_overflow_p)
15018 if (TYPE_UNSIGNED (type))
15019 return true;
15021 switch (code)
15023 case ABS_EXPR:
15024 /* We can't return 1 if flag_wrapv is set because
15025 ABS_EXPR<INT_MIN> = INT_MIN. */
15026 if (!INTEGRAL_TYPE_P (type))
15027 return true;
15028 if (TYPE_OVERFLOW_UNDEFINED (type))
15030 *strict_overflow_p = true;
15031 return true;
15033 break;
15035 case NON_LVALUE_EXPR:
15036 case FLOAT_EXPR:
15037 case FIX_TRUNC_EXPR:
15038 return tree_expr_nonnegative_warnv_p (op0,
15039 strict_overflow_p);
15041 case NOP_EXPR:
15043 tree inner_type = TREE_TYPE (op0);
15044 tree outer_type = type;
15046 if (TREE_CODE (outer_type) == REAL_TYPE)
15048 if (TREE_CODE (inner_type) == REAL_TYPE)
15049 return tree_expr_nonnegative_warnv_p (op0,
15050 strict_overflow_p);
15051 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15053 if (TYPE_UNSIGNED (inner_type))
15054 return true;
15055 return tree_expr_nonnegative_warnv_p (op0,
15056 strict_overflow_p);
15059 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15061 if (TREE_CODE (inner_type) == REAL_TYPE)
15062 return tree_expr_nonnegative_warnv_p (op0,
15063 strict_overflow_p);
15064 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15065 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15066 && TYPE_UNSIGNED (inner_type);
15069 break;
15071 default:
15072 return tree_simple_nonnegative_warnv_p (code, type);
15075 /* We don't know sign of `t', so be conservative and return false. */
15076 return false;
15079 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15080 value is based on the assumption that signed overflow is undefined,
15081 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15082 *STRICT_OVERFLOW_P. */
15084 bool
15085 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15086 tree op1, bool *strict_overflow_p)
15088 if (TYPE_UNSIGNED (type))
15089 return true;
15091 switch (code)
15093 case POINTER_PLUS_EXPR:
15094 case PLUS_EXPR:
15095 if (FLOAT_TYPE_P (type))
15096 return (tree_expr_nonnegative_warnv_p (op0,
15097 strict_overflow_p)
15098 && tree_expr_nonnegative_warnv_p (op1,
15099 strict_overflow_p));
15101 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15102 both unsigned and at least 2 bits shorter than the result. */
15103 if (TREE_CODE (type) == INTEGER_TYPE
15104 && TREE_CODE (op0) == NOP_EXPR
15105 && TREE_CODE (op1) == NOP_EXPR)
15107 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15108 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15109 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15110 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15112 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15113 TYPE_PRECISION (inner2)) + 1;
15114 return prec < TYPE_PRECISION (type);
15117 break;
15119 case MULT_EXPR:
15120 if (FLOAT_TYPE_P (type))
15122 /* x * x for floating point x is always non-negative. */
15123 if (operand_equal_p (op0, op1, 0))
15124 return true;
15125 return (tree_expr_nonnegative_warnv_p (op0,
15126 strict_overflow_p)
15127 && tree_expr_nonnegative_warnv_p (op1,
15128 strict_overflow_p));
15131 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15132 both unsigned and their total bits is shorter than the result. */
15133 if (TREE_CODE (type) == INTEGER_TYPE
15134 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15135 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15137 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15138 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15139 : TREE_TYPE (op0);
15140 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15141 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15142 : TREE_TYPE (op1);
15144 bool unsigned0 = TYPE_UNSIGNED (inner0);
15145 bool unsigned1 = TYPE_UNSIGNED (inner1);
15147 if (TREE_CODE (op0) == INTEGER_CST)
15148 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15150 if (TREE_CODE (op1) == INTEGER_CST)
15151 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15153 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15154 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15156 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15157 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15158 : TYPE_PRECISION (inner0);
15160 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15161 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15162 : TYPE_PRECISION (inner1);
15164 return precision0 + precision1 < TYPE_PRECISION (type);
15167 return false;
15169 case BIT_AND_EXPR:
15170 case MAX_EXPR:
15171 return (tree_expr_nonnegative_warnv_p (op0,
15172 strict_overflow_p)
15173 || tree_expr_nonnegative_warnv_p (op1,
15174 strict_overflow_p));
15176 case BIT_IOR_EXPR:
15177 case BIT_XOR_EXPR:
15178 case MIN_EXPR:
15179 case RDIV_EXPR:
15180 case TRUNC_DIV_EXPR:
15181 case CEIL_DIV_EXPR:
15182 case FLOOR_DIV_EXPR:
15183 case ROUND_DIV_EXPR:
15184 return (tree_expr_nonnegative_warnv_p (op0,
15185 strict_overflow_p)
15186 && tree_expr_nonnegative_warnv_p (op1,
15187 strict_overflow_p));
15189 case TRUNC_MOD_EXPR:
15190 case CEIL_MOD_EXPR:
15191 case FLOOR_MOD_EXPR:
15192 case ROUND_MOD_EXPR:
15193 return tree_expr_nonnegative_warnv_p (op0,
15194 strict_overflow_p);
15195 default:
15196 return tree_simple_nonnegative_warnv_p (code, type);
15199 /* We don't know sign of `t', so be conservative and return false. */
15200 return false;
15203 /* Return true if T is known to be non-negative. If the return
15204 value is based on the assumption that signed overflow is undefined,
15205 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15206 *STRICT_OVERFLOW_P. */
15208 bool
15209 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15211 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15212 return true;
15214 switch (TREE_CODE (t))
15216 case INTEGER_CST:
15217 return tree_int_cst_sgn (t) >= 0;
15219 case REAL_CST:
15220 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15222 case FIXED_CST:
15223 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15225 case COND_EXPR:
15226 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15227 strict_overflow_p)
15228 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15229 strict_overflow_p));
15230 default:
15231 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15232 TREE_TYPE (t));
15234 /* We don't know sign of `t', so be conservative and return false. */
15235 return false;
15238 /* Return true if T is known to be non-negative. If the return
15239 value is based on the assumption that signed overflow is undefined,
15240 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15241 *STRICT_OVERFLOW_P. */
15243 bool
15244 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15245 tree arg0, tree arg1, bool *strict_overflow_p)
15247 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15248 switch (DECL_FUNCTION_CODE (fndecl))
15250 CASE_FLT_FN (BUILT_IN_ACOS):
15251 CASE_FLT_FN (BUILT_IN_ACOSH):
15252 CASE_FLT_FN (BUILT_IN_CABS):
15253 CASE_FLT_FN (BUILT_IN_COSH):
15254 CASE_FLT_FN (BUILT_IN_ERFC):
15255 CASE_FLT_FN (BUILT_IN_EXP):
15256 CASE_FLT_FN (BUILT_IN_EXP10):
15257 CASE_FLT_FN (BUILT_IN_EXP2):
15258 CASE_FLT_FN (BUILT_IN_FABS):
15259 CASE_FLT_FN (BUILT_IN_FDIM):
15260 CASE_FLT_FN (BUILT_IN_HYPOT):
15261 CASE_FLT_FN (BUILT_IN_POW10):
15262 CASE_INT_FN (BUILT_IN_FFS):
15263 CASE_INT_FN (BUILT_IN_PARITY):
15264 CASE_INT_FN (BUILT_IN_POPCOUNT):
15265 case BUILT_IN_BSWAP32:
15266 case BUILT_IN_BSWAP64:
15267 /* Always true. */
15268 return true;
15270 CASE_FLT_FN (BUILT_IN_SQRT):
15271 /* sqrt(-0.0) is -0.0. */
15272 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15273 return true;
15274 return tree_expr_nonnegative_warnv_p (arg0,
15275 strict_overflow_p);
15277 CASE_FLT_FN (BUILT_IN_ASINH):
15278 CASE_FLT_FN (BUILT_IN_ATAN):
15279 CASE_FLT_FN (BUILT_IN_ATANH):
15280 CASE_FLT_FN (BUILT_IN_CBRT):
15281 CASE_FLT_FN (BUILT_IN_CEIL):
15282 CASE_FLT_FN (BUILT_IN_ERF):
15283 CASE_FLT_FN (BUILT_IN_EXPM1):
15284 CASE_FLT_FN (BUILT_IN_FLOOR):
15285 CASE_FLT_FN (BUILT_IN_FMOD):
15286 CASE_FLT_FN (BUILT_IN_FREXP):
15287 CASE_FLT_FN (BUILT_IN_ICEIL):
15288 CASE_FLT_FN (BUILT_IN_IFLOOR):
15289 CASE_FLT_FN (BUILT_IN_IRINT):
15290 CASE_FLT_FN (BUILT_IN_IROUND):
15291 CASE_FLT_FN (BUILT_IN_LCEIL):
15292 CASE_FLT_FN (BUILT_IN_LDEXP):
15293 CASE_FLT_FN (BUILT_IN_LFLOOR):
15294 CASE_FLT_FN (BUILT_IN_LLCEIL):
15295 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15296 CASE_FLT_FN (BUILT_IN_LLRINT):
15297 CASE_FLT_FN (BUILT_IN_LLROUND):
15298 CASE_FLT_FN (BUILT_IN_LRINT):
15299 CASE_FLT_FN (BUILT_IN_LROUND):
15300 CASE_FLT_FN (BUILT_IN_MODF):
15301 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15302 CASE_FLT_FN (BUILT_IN_RINT):
15303 CASE_FLT_FN (BUILT_IN_ROUND):
15304 CASE_FLT_FN (BUILT_IN_SCALB):
15305 CASE_FLT_FN (BUILT_IN_SCALBLN):
15306 CASE_FLT_FN (BUILT_IN_SCALBN):
15307 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15308 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15309 CASE_FLT_FN (BUILT_IN_SINH):
15310 CASE_FLT_FN (BUILT_IN_TANH):
15311 CASE_FLT_FN (BUILT_IN_TRUNC):
15312 /* True if the 1st argument is nonnegative. */
15313 return tree_expr_nonnegative_warnv_p (arg0,
15314 strict_overflow_p);
15316 CASE_FLT_FN (BUILT_IN_FMAX):
15317 /* True if the 1st OR 2nd arguments are nonnegative. */
15318 return (tree_expr_nonnegative_warnv_p (arg0,
15319 strict_overflow_p)
15320 || (tree_expr_nonnegative_warnv_p (arg1,
15321 strict_overflow_p)));
15323 CASE_FLT_FN (BUILT_IN_FMIN):
15324 /* True if the 1st AND 2nd arguments are nonnegative. */
15325 return (tree_expr_nonnegative_warnv_p (arg0,
15326 strict_overflow_p)
15327 && (tree_expr_nonnegative_warnv_p (arg1,
15328 strict_overflow_p)));
15330 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15331 /* True if the 2nd argument is nonnegative. */
15332 return tree_expr_nonnegative_warnv_p (arg1,
15333 strict_overflow_p);
15335 CASE_FLT_FN (BUILT_IN_POWI):
15336 /* True if the 1st argument is nonnegative or the second
15337 argument is an even integer. */
15338 if (TREE_CODE (arg1) == INTEGER_CST
15339 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15340 return true;
15341 return tree_expr_nonnegative_warnv_p (arg0,
15342 strict_overflow_p);
15344 CASE_FLT_FN (BUILT_IN_POW):
15345 /* True if the 1st argument is nonnegative or the second
15346 argument is an even integer valued real. */
15347 if (TREE_CODE (arg1) == REAL_CST)
15349 REAL_VALUE_TYPE c;
15350 HOST_WIDE_INT n;
15352 c = TREE_REAL_CST (arg1);
15353 n = real_to_integer (&c);
15354 if ((n & 1) == 0)
15356 REAL_VALUE_TYPE cint;
15357 real_from_integer (&cint, VOIDmode, n,
15358 n < 0 ? -1 : 0, 0);
15359 if (real_identical (&c, &cint))
15360 return true;
15363 return tree_expr_nonnegative_warnv_p (arg0,
15364 strict_overflow_p);
15366 default:
15367 break;
15369 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15370 type);
15373 /* Return true if T is known to be non-negative. If the return
15374 value is based on the assumption that signed overflow is undefined,
15375 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15376 *STRICT_OVERFLOW_P. */
15378 bool
15379 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15381 enum tree_code code = TREE_CODE (t);
15382 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15383 return true;
15385 switch (code)
15387 case TARGET_EXPR:
15389 tree temp = TARGET_EXPR_SLOT (t);
15390 t = TARGET_EXPR_INITIAL (t);
15392 /* If the initializer is non-void, then it's a normal expression
15393 that will be assigned to the slot. */
15394 if (!VOID_TYPE_P (t))
15395 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15397 /* Otherwise, the initializer sets the slot in some way. One common
15398 way is an assignment statement at the end of the initializer. */
15399 while (1)
15401 if (TREE_CODE (t) == BIND_EXPR)
15402 t = expr_last (BIND_EXPR_BODY (t));
15403 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15404 || TREE_CODE (t) == TRY_CATCH_EXPR)
15405 t = expr_last (TREE_OPERAND (t, 0));
15406 else if (TREE_CODE (t) == STATEMENT_LIST)
15407 t = expr_last (t);
15408 else
15409 break;
15411 if (TREE_CODE (t) == MODIFY_EXPR
15412 && TREE_OPERAND (t, 0) == temp)
15413 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15414 strict_overflow_p);
15416 return false;
15419 case CALL_EXPR:
15421 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15422 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15424 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15425 get_callee_fndecl (t),
15426 arg0,
15427 arg1,
15428 strict_overflow_p);
15430 case COMPOUND_EXPR:
15431 case MODIFY_EXPR:
15432 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15433 strict_overflow_p);
15434 case BIND_EXPR:
15435 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15436 strict_overflow_p);
15437 case SAVE_EXPR:
15438 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15439 strict_overflow_p);
15441 default:
15442 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15443 TREE_TYPE (t));
15446 /* We don't know sign of `t', so be conservative and return false. */
15447 return false;
15450 /* Return true if T is known to be non-negative. If the return
15451 value is based on the assumption that signed overflow is undefined,
15452 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15453 *STRICT_OVERFLOW_P. */
15455 bool
15456 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15458 enum tree_code code;
15459 if (t == error_mark_node)
15460 return false;
15462 code = TREE_CODE (t);
15463 switch (TREE_CODE_CLASS (code))
15465 case tcc_binary:
15466 case tcc_comparison:
15467 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15468 TREE_TYPE (t),
15469 TREE_OPERAND (t, 0),
15470 TREE_OPERAND (t, 1),
15471 strict_overflow_p);
15473 case tcc_unary:
15474 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15475 TREE_TYPE (t),
15476 TREE_OPERAND (t, 0),
15477 strict_overflow_p);
15479 case tcc_constant:
15480 case tcc_declaration:
15481 case tcc_reference:
15482 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15484 default:
15485 break;
15488 switch (code)
15490 case TRUTH_AND_EXPR:
15491 case TRUTH_OR_EXPR:
15492 case TRUTH_XOR_EXPR:
15493 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15494 TREE_TYPE (t),
15495 TREE_OPERAND (t, 0),
15496 TREE_OPERAND (t, 1),
15497 strict_overflow_p);
15498 case TRUTH_NOT_EXPR:
15499 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15500 TREE_TYPE (t),
15501 TREE_OPERAND (t, 0),
15502 strict_overflow_p);
15504 case COND_EXPR:
15505 case CONSTRUCTOR:
15506 case OBJ_TYPE_REF:
15507 case ASSERT_EXPR:
15508 case ADDR_EXPR:
15509 case WITH_SIZE_EXPR:
15510 case SSA_NAME:
15511 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15513 default:
15514 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15518 /* Return true if `t' is known to be non-negative. Handle warnings
15519 about undefined signed overflow. */
15521 bool
15522 tree_expr_nonnegative_p (tree t)
15524 bool ret, strict_overflow_p;
15526 strict_overflow_p = false;
15527 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15528 if (strict_overflow_p)
15529 fold_overflow_warning (("assuming signed overflow does not occur when "
15530 "determining that expression is always "
15531 "non-negative"),
15532 WARN_STRICT_OVERFLOW_MISC);
15533 return ret;
15537 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15538 For floating point we further ensure that T is not denormal.
15539 Similar logic is present in nonzero_address in rtlanal.h.
15541 If the return value is based on the assumption that signed overflow
15542 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15543 change *STRICT_OVERFLOW_P. */
15545 bool
15546 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15547 bool *strict_overflow_p)
15549 switch (code)
15551 case ABS_EXPR:
15552 return tree_expr_nonzero_warnv_p (op0,
15553 strict_overflow_p);
15555 case NOP_EXPR:
15557 tree inner_type = TREE_TYPE (op0);
15558 tree outer_type = type;
15560 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15561 && tree_expr_nonzero_warnv_p (op0,
15562 strict_overflow_p));
15564 break;
15566 case NON_LVALUE_EXPR:
15567 return tree_expr_nonzero_warnv_p (op0,
15568 strict_overflow_p);
15570 default:
15571 break;
15574 return false;
15577 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15578 For floating point we further ensure that T is not denormal.
15579 Similar logic is present in nonzero_address in rtlanal.h.
15581 If the return value is based on the assumption that signed overflow
15582 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15583 change *STRICT_OVERFLOW_P. */
15585 bool
15586 tree_binary_nonzero_warnv_p (enum tree_code code,
15587 tree type,
15588 tree op0,
15589 tree op1, bool *strict_overflow_p)
15591 bool sub_strict_overflow_p;
15592 switch (code)
15594 case POINTER_PLUS_EXPR:
15595 case PLUS_EXPR:
15596 if (TYPE_OVERFLOW_UNDEFINED (type))
15598 /* With the presence of negative values it is hard
15599 to say something. */
15600 sub_strict_overflow_p = false;
15601 if (!tree_expr_nonnegative_warnv_p (op0,
15602 &sub_strict_overflow_p)
15603 || !tree_expr_nonnegative_warnv_p (op1,
15604 &sub_strict_overflow_p))
15605 return false;
15606 /* One of operands must be positive and the other non-negative. */
15607 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15608 overflows, on a twos-complement machine the sum of two
15609 nonnegative numbers can never be zero. */
15610 return (tree_expr_nonzero_warnv_p (op0,
15611 strict_overflow_p)
15612 || tree_expr_nonzero_warnv_p (op1,
15613 strict_overflow_p));
15615 break;
15617 case MULT_EXPR:
15618 if (TYPE_OVERFLOW_UNDEFINED (type))
15620 if (tree_expr_nonzero_warnv_p (op0,
15621 strict_overflow_p)
15622 && tree_expr_nonzero_warnv_p (op1,
15623 strict_overflow_p))
15625 *strict_overflow_p = true;
15626 return true;
15629 break;
15631 case MIN_EXPR:
15632 sub_strict_overflow_p = false;
15633 if (tree_expr_nonzero_warnv_p (op0,
15634 &sub_strict_overflow_p)
15635 && tree_expr_nonzero_warnv_p (op1,
15636 &sub_strict_overflow_p))
15638 if (sub_strict_overflow_p)
15639 *strict_overflow_p = true;
15641 break;
15643 case MAX_EXPR:
15644 sub_strict_overflow_p = false;
15645 if (tree_expr_nonzero_warnv_p (op0,
15646 &sub_strict_overflow_p))
15648 if (sub_strict_overflow_p)
15649 *strict_overflow_p = true;
15651 /* When both operands are nonzero, then MAX must be too. */
15652 if (tree_expr_nonzero_warnv_p (op1,
15653 strict_overflow_p))
15654 return true;
15656 /* MAX where operand 0 is positive is positive. */
15657 return tree_expr_nonnegative_warnv_p (op0,
15658 strict_overflow_p);
15660 /* MAX where operand 1 is positive is positive. */
15661 else if (tree_expr_nonzero_warnv_p (op1,
15662 &sub_strict_overflow_p)
15663 && tree_expr_nonnegative_warnv_p (op1,
15664 &sub_strict_overflow_p))
15666 if (sub_strict_overflow_p)
15667 *strict_overflow_p = true;
15668 return true;
15670 break;
15672 case BIT_IOR_EXPR:
15673 return (tree_expr_nonzero_warnv_p (op1,
15674 strict_overflow_p)
15675 || tree_expr_nonzero_warnv_p (op0,
15676 strict_overflow_p));
15678 default:
15679 break;
15682 return false;
15685 /* Return true when T is an address and is known to be nonzero.
15686 For floating point we further ensure that T is not denormal.
15687 Similar logic is present in nonzero_address in rtlanal.h.
15689 If the return value is based on the assumption that signed overflow
15690 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15691 change *STRICT_OVERFLOW_P. */
15693 bool
15694 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15696 bool sub_strict_overflow_p;
15697 switch (TREE_CODE (t))
15699 case INTEGER_CST:
15700 return !integer_zerop (t);
15702 case ADDR_EXPR:
15704 tree base = TREE_OPERAND (t, 0);
15705 if (!DECL_P (base))
15706 base = get_base_address (base);
15708 if (!base)
15709 return false;
15711 /* Weak declarations may link to NULL. Other things may also be NULL
15712 so protect with -fdelete-null-pointer-checks; but not variables
15713 allocated on the stack. */
15714 if (DECL_P (base)
15715 && (flag_delete_null_pointer_checks
15716 || (DECL_CONTEXT (base)
15717 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15718 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15719 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15721 /* Constants are never weak. */
15722 if (CONSTANT_CLASS_P (base))
15723 return true;
15725 return false;
15728 case COND_EXPR:
15729 sub_strict_overflow_p = false;
15730 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15731 &sub_strict_overflow_p)
15732 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15733 &sub_strict_overflow_p))
15735 if (sub_strict_overflow_p)
15736 *strict_overflow_p = true;
15737 return true;
15739 break;
15741 default:
15742 break;
15744 return false;
15747 /* Return true when T is an address and is known to be nonzero.
15748 For floating point we further ensure that T is not denormal.
15749 Similar logic is present in nonzero_address in rtlanal.h.
15751 If the return value is based on the assumption that signed overflow
15752 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15753 change *STRICT_OVERFLOW_P. */
15755 bool
15756 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15758 tree type = TREE_TYPE (t);
15759 enum tree_code code;
15761 /* Doing something useful for floating point would need more work. */
15762 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15763 return false;
15765 code = TREE_CODE (t);
15766 switch (TREE_CODE_CLASS (code))
15768 case tcc_unary:
15769 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15770 strict_overflow_p);
15771 case tcc_binary:
15772 case tcc_comparison:
15773 return tree_binary_nonzero_warnv_p (code, type,
15774 TREE_OPERAND (t, 0),
15775 TREE_OPERAND (t, 1),
15776 strict_overflow_p);
15777 case tcc_constant:
15778 case tcc_declaration:
15779 case tcc_reference:
15780 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15782 default:
15783 break;
15786 switch (code)
15788 case TRUTH_NOT_EXPR:
15789 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15790 strict_overflow_p);
15792 case TRUTH_AND_EXPR:
15793 case TRUTH_OR_EXPR:
15794 case TRUTH_XOR_EXPR:
15795 return tree_binary_nonzero_warnv_p (code, type,
15796 TREE_OPERAND (t, 0),
15797 TREE_OPERAND (t, 1),
15798 strict_overflow_p);
15800 case COND_EXPR:
15801 case CONSTRUCTOR:
15802 case OBJ_TYPE_REF:
15803 case ASSERT_EXPR:
15804 case ADDR_EXPR:
15805 case WITH_SIZE_EXPR:
15806 case SSA_NAME:
15807 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15809 case COMPOUND_EXPR:
15810 case MODIFY_EXPR:
15811 case BIND_EXPR:
15812 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15813 strict_overflow_p);
15815 case SAVE_EXPR:
15816 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15817 strict_overflow_p);
15819 case CALL_EXPR:
15820 return alloca_call_p (t);
15822 default:
15823 break;
15825 return false;
15828 /* Return true when T is an address and is known to be nonzero.
15829 Handle warnings about undefined signed overflow. */
15831 bool
15832 tree_expr_nonzero_p (tree t)
15834 bool ret, strict_overflow_p;
15836 strict_overflow_p = false;
15837 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15838 if (strict_overflow_p)
15839 fold_overflow_warning (("assuming signed overflow does not occur when "
15840 "determining that expression is always "
15841 "non-zero"),
15842 WARN_STRICT_OVERFLOW_MISC);
15843 return ret;
15846 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15847 attempt to fold the expression to a constant without modifying TYPE,
15848 OP0 or OP1.
15850 If the expression could be simplified to a constant, then return
15851 the constant. If the expression would not be simplified to a
15852 constant, then return NULL_TREE. */
15854 tree
15855 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15857 tree tem = fold_binary (code, type, op0, op1);
15858 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15861 /* Given the components of a unary expression CODE, TYPE and OP0,
15862 attempt to fold the expression to a constant without modifying
15863 TYPE or OP0.
15865 If the expression could be simplified to a constant, then return
15866 the constant. If the expression would not be simplified to a
15867 constant, then return NULL_TREE. */
15869 tree
15870 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15872 tree tem = fold_unary (code, type, op0);
15873 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15876 /* If EXP represents referencing an element in a constant string
15877 (either via pointer arithmetic or array indexing), return the
15878 tree representing the value accessed, otherwise return NULL. */
15880 tree
15881 fold_read_from_constant_string (tree exp)
15883 if ((TREE_CODE (exp) == INDIRECT_REF
15884 || TREE_CODE (exp) == ARRAY_REF)
15885 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15887 tree exp1 = TREE_OPERAND (exp, 0);
15888 tree index;
15889 tree string;
15890 location_t loc = EXPR_LOCATION (exp);
15892 if (TREE_CODE (exp) == INDIRECT_REF)
15893 string = string_constant (exp1, &index);
15894 else
15896 tree low_bound = array_ref_low_bound (exp);
15897 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15899 /* Optimize the special-case of a zero lower bound.
15901 We convert the low_bound to sizetype to avoid some problems
15902 with constant folding. (E.g. suppose the lower bound is 1,
15903 and its mode is QI. Without the conversion,l (ARRAY
15904 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15905 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15906 if (! integer_zerop (low_bound))
15907 index = size_diffop_loc (loc, index,
15908 fold_convert_loc (loc, sizetype, low_bound));
15910 string = exp1;
15913 if (string
15914 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15915 && TREE_CODE (string) == STRING_CST
15916 && TREE_CODE (index) == INTEGER_CST
15917 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15918 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15919 == MODE_INT)
15920 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15921 return build_int_cst_type (TREE_TYPE (exp),
15922 (TREE_STRING_POINTER (string)
15923 [TREE_INT_CST_LOW (index)]));
15925 return NULL;
15928 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15929 an integer constant, real, or fixed-point constant.
15931 TYPE is the type of the result. */
15933 static tree
15934 fold_negate_const (tree arg0, tree type)
15936 tree t = NULL_TREE;
15938 switch (TREE_CODE (arg0))
15940 case INTEGER_CST:
15942 double_int val = tree_to_double_int (arg0);
15943 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15945 t = force_fit_type_double (type, val, 1,
15946 (overflow | TREE_OVERFLOW (arg0))
15947 && !TYPE_UNSIGNED (type));
15948 break;
15951 case REAL_CST:
15952 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15953 break;
15955 case FIXED_CST:
15957 FIXED_VALUE_TYPE f;
15958 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15959 &(TREE_FIXED_CST (arg0)), NULL,
15960 TYPE_SATURATING (type));
15961 t = build_fixed (type, f);
15962 /* Propagate overflow flags. */
15963 if (overflow_p | TREE_OVERFLOW (arg0))
15964 TREE_OVERFLOW (t) = 1;
15965 break;
15968 default:
15969 gcc_unreachable ();
15972 return t;
15975 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15976 an integer constant or real constant.
15978 TYPE is the type of the result. */
15980 tree
15981 fold_abs_const (tree arg0, tree type)
15983 tree t = NULL_TREE;
15985 switch (TREE_CODE (arg0))
15987 case INTEGER_CST:
15989 double_int val = tree_to_double_int (arg0);
15991 /* If the value is unsigned or non-negative, then the absolute value
15992 is the same as the ordinary value. */
15993 if (TYPE_UNSIGNED (type)
15994 || !val.is_negative ())
15995 t = arg0;
15997 /* If the value is negative, then the absolute value is
15998 its negation. */
15999 else
16001 int overflow;
16003 overflow = neg_double (val.low, val.high, &val.low, &val.high);
16004 t = force_fit_type_double (type, val, -1,
16005 overflow | TREE_OVERFLOW (arg0));
16008 break;
16010 case REAL_CST:
16011 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16012 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16013 else
16014 t = arg0;
16015 break;
16017 default:
16018 gcc_unreachable ();
16021 return t;
16024 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16025 constant. TYPE is the type of the result. */
16027 static tree
16028 fold_not_const (const_tree arg0, tree type)
16030 double_int val;
16032 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16034 val = ~tree_to_double_int (arg0);
16035 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16038 /* Given CODE, a relational operator, the target type, TYPE and two
16039 constant operands OP0 and OP1, return the result of the
16040 relational operation. If the result is not a compile time
16041 constant, then return NULL_TREE. */
16043 static tree
16044 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16046 int result, invert;
16048 /* From here on, the only cases we handle are when the result is
16049 known to be a constant. */
16051 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16053 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16054 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16056 /* Handle the cases where either operand is a NaN. */
16057 if (real_isnan (c0) || real_isnan (c1))
16059 switch (code)
16061 case EQ_EXPR:
16062 case ORDERED_EXPR:
16063 result = 0;
16064 break;
16066 case NE_EXPR:
16067 case UNORDERED_EXPR:
16068 case UNLT_EXPR:
16069 case UNLE_EXPR:
16070 case UNGT_EXPR:
16071 case UNGE_EXPR:
16072 case UNEQ_EXPR:
16073 result = 1;
16074 break;
16076 case LT_EXPR:
16077 case LE_EXPR:
16078 case GT_EXPR:
16079 case GE_EXPR:
16080 case LTGT_EXPR:
16081 if (flag_trapping_math)
16082 return NULL_TREE;
16083 result = 0;
16084 break;
16086 default:
16087 gcc_unreachable ();
16090 return constant_boolean_node (result, type);
16093 return constant_boolean_node (real_compare (code, c0, c1), type);
16096 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16098 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16099 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16100 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16103 /* Handle equality/inequality of complex constants. */
16104 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16106 tree rcond = fold_relational_const (code, type,
16107 TREE_REALPART (op0),
16108 TREE_REALPART (op1));
16109 tree icond = fold_relational_const (code, type,
16110 TREE_IMAGPART (op0),
16111 TREE_IMAGPART (op1));
16112 if (code == EQ_EXPR)
16113 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16114 else if (code == NE_EXPR)
16115 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16116 else
16117 return NULL_TREE;
16120 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16122 To compute GT, swap the arguments and do LT.
16123 To compute GE, do LT and invert the result.
16124 To compute LE, swap the arguments, do LT and invert the result.
16125 To compute NE, do EQ and invert the result.
16127 Therefore, the code below must handle only EQ and LT. */
16129 if (code == LE_EXPR || code == GT_EXPR)
16131 tree tem = op0;
16132 op0 = op1;
16133 op1 = tem;
16134 code = swap_tree_comparison (code);
16137 /* Note that it is safe to invert for real values here because we
16138 have already handled the one case that it matters. */
16140 invert = 0;
16141 if (code == NE_EXPR || code == GE_EXPR)
16143 invert = 1;
16144 code = invert_tree_comparison (code, false);
16147 /* Compute a result for LT or EQ if args permit;
16148 Otherwise return T. */
16149 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16151 if (code == EQ_EXPR)
16152 result = tree_int_cst_equal (op0, op1);
16153 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16154 result = INT_CST_LT_UNSIGNED (op0, op1);
16155 else
16156 result = INT_CST_LT (op0, op1);
16158 else
16159 return NULL_TREE;
16161 if (invert)
16162 result ^= 1;
16163 return constant_boolean_node (result, type);
16166 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16167 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16168 itself. */
16170 tree
16171 fold_build_cleanup_point_expr (tree type, tree expr)
16173 /* If the expression does not have side effects then we don't have to wrap
16174 it with a cleanup point expression. */
16175 if (!TREE_SIDE_EFFECTS (expr))
16176 return expr;
16178 /* If the expression is a return, check to see if the expression inside the
16179 return has no side effects or the right hand side of the modify expression
16180 inside the return. If either don't have side effects set we don't need to
16181 wrap the expression in a cleanup point expression. Note we don't check the
16182 left hand side of the modify because it should always be a return decl. */
16183 if (TREE_CODE (expr) == RETURN_EXPR)
16185 tree op = TREE_OPERAND (expr, 0);
16186 if (!op || !TREE_SIDE_EFFECTS (op))
16187 return expr;
16188 op = TREE_OPERAND (op, 1);
16189 if (!TREE_SIDE_EFFECTS (op))
16190 return expr;
16193 return build1 (CLEANUP_POINT_EXPR, type, expr);
16196 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16197 of an indirection through OP0, or NULL_TREE if no simplification is
16198 possible. */
16200 tree
16201 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16203 tree sub = op0;
16204 tree subtype;
16206 STRIP_NOPS (sub);
16207 subtype = TREE_TYPE (sub);
16208 if (!POINTER_TYPE_P (subtype))
16209 return NULL_TREE;
16211 if (TREE_CODE (sub) == ADDR_EXPR)
16213 tree op = TREE_OPERAND (sub, 0);
16214 tree optype = TREE_TYPE (op);
16215 /* *&CONST_DECL -> to the value of the const decl. */
16216 if (TREE_CODE (op) == CONST_DECL)
16217 return DECL_INITIAL (op);
16218 /* *&p => p; make sure to handle *&"str"[cst] here. */
16219 if (type == optype)
16221 tree fop = fold_read_from_constant_string (op);
16222 if (fop)
16223 return fop;
16224 else
16225 return op;
16227 /* *(foo *)&fooarray => fooarray[0] */
16228 else if (TREE_CODE (optype) == ARRAY_TYPE
16229 && type == TREE_TYPE (optype)
16230 && (!in_gimple_form
16231 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16233 tree type_domain = TYPE_DOMAIN (optype);
16234 tree min_val = size_zero_node;
16235 if (type_domain && TYPE_MIN_VALUE (type_domain))
16236 min_val = TYPE_MIN_VALUE (type_domain);
16237 if (in_gimple_form
16238 && TREE_CODE (min_val) != INTEGER_CST)
16239 return NULL_TREE;
16240 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16241 NULL_TREE, NULL_TREE);
16243 /* *(foo *)&complexfoo => __real__ complexfoo */
16244 else if (TREE_CODE (optype) == COMPLEX_TYPE
16245 && type == TREE_TYPE (optype))
16246 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16247 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16248 else if (TREE_CODE (optype) == VECTOR_TYPE
16249 && type == TREE_TYPE (optype))
16251 tree part_width = TYPE_SIZE (type);
16252 tree index = bitsize_int (0);
16253 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16257 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16258 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16260 tree op00 = TREE_OPERAND (sub, 0);
16261 tree op01 = TREE_OPERAND (sub, 1);
16263 STRIP_NOPS (op00);
16264 if (TREE_CODE (op00) == ADDR_EXPR)
16266 tree op00type;
16267 op00 = TREE_OPERAND (op00, 0);
16268 op00type = TREE_TYPE (op00);
16270 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16271 if (TREE_CODE (op00type) == VECTOR_TYPE
16272 && type == TREE_TYPE (op00type))
16274 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16275 tree part_width = TYPE_SIZE (type);
16276 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16277 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16278 tree index = bitsize_int (indexi);
16280 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16281 return fold_build3_loc (loc,
16282 BIT_FIELD_REF, type, op00,
16283 part_width, index);
16286 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16287 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16288 && type == TREE_TYPE (op00type))
16290 tree size = TYPE_SIZE_UNIT (type);
16291 if (tree_int_cst_equal (size, op01))
16292 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16294 /* ((foo *)&fooarray)[1] => fooarray[1] */
16295 else if (TREE_CODE (op00type) == ARRAY_TYPE
16296 && type == TREE_TYPE (op00type))
16298 tree type_domain = TYPE_DOMAIN (op00type);
16299 tree min_val = size_zero_node;
16300 if (type_domain && TYPE_MIN_VALUE (type_domain))
16301 min_val = TYPE_MIN_VALUE (type_domain);
16302 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16303 TYPE_SIZE_UNIT (type));
16304 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16305 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16306 NULL_TREE, NULL_TREE);
16311 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16312 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16313 && type == TREE_TYPE (TREE_TYPE (subtype))
16314 && (!in_gimple_form
16315 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16317 tree type_domain;
16318 tree min_val = size_zero_node;
16319 sub = build_fold_indirect_ref_loc (loc, sub);
16320 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16321 if (type_domain && TYPE_MIN_VALUE (type_domain))
16322 min_val = TYPE_MIN_VALUE (type_domain);
16323 if (in_gimple_form
16324 && TREE_CODE (min_val) != INTEGER_CST)
16325 return NULL_TREE;
16326 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16327 NULL_TREE);
16330 return NULL_TREE;
16333 /* Builds an expression for an indirection through T, simplifying some
16334 cases. */
16336 tree
16337 build_fold_indirect_ref_loc (location_t loc, tree t)
16339 tree type = TREE_TYPE (TREE_TYPE (t));
16340 tree sub = fold_indirect_ref_1 (loc, type, t);
16342 if (sub)
16343 return sub;
16345 return build1_loc (loc, INDIRECT_REF, type, t);
16348 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16350 tree
16351 fold_indirect_ref_loc (location_t loc, tree t)
16353 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16355 if (sub)
16356 return sub;
16357 else
16358 return t;
16361 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16362 whose result is ignored. The type of the returned tree need not be
16363 the same as the original expression. */
16365 tree
16366 fold_ignored_result (tree t)
16368 if (!TREE_SIDE_EFFECTS (t))
16369 return integer_zero_node;
16371 for (;;)
16372 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16374 case tcc_unary:
16375 t = TREE_OPERAND (t, 0);
16376 break;
16378 case tcc_binary:
16379 case tcc_comparison:
16380 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16381 t = TREE_OPERAND (t, 0);
16382 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16383 t = TREE_OPERAND (t, 1);
16384 else
16385 return t;
16386 break;
16388 case tcc_expression:
16389 switch (TREE_CODE (t))
16391 case COMPOUND_EXPR:
16392 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16393 return t;
16394 t = TREE_OPERAND (t, 0);
16395 break;
16397 case COND_EXPR:
16398 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16399 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16400 return t;
16401 t = TREE_OPERAND (t, 0);
16402 break;
16404 default:
16405 return t;
16407 break;
16409 default:
16410 return t;
16414 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16415 This can only be applied to objects of a sizetype. */
16417 tree
16418 round_up_loc (location_t loc, tree value, int divisor)
16420 tree div = NULL_TREE;
16422 gcc_assert (divisor > 0);
16423 if (divisor == 1)
16424 return value;
16426 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16427 have to do anything. Only do this when we are not given a const,
16428 because in that case, this check is more expensive than just
16429 doing it. */
16430 if (TREE_CODE (value) != INTEGER_CST)
16432 div = build_int_cst (TREE_TYPE (value), divisor);
16434 if (multiple_of_p (TREE_TYPE (value), value, div))
16435 return value;
16438 /* If divisor is a power of two, simplify this to bit manipulation. */
16439 if (divisor == (divisor & -divisor))
16441 if (TREE_CODE (value) == INTEGER_CST)
16443 double_int val = tree_to_double_int (value);
16444 bool overflow_p;
16446 if ((val.low & (divisor - 1)) == 0)
16447 return value;
16449 overflow_p = TREE_OVERFLOW (value);
16450 val.low &= ~(divisor - 1);
16451 val.low += divisor;
16452 if (val.low == 0)
16454 val.high++;
16455 if (val.high == 0)
16456 overflow_p = true;
16459 return force_fit_type_double (TREE_TYPE (value), val,
16460 -1, overflow_p);
16462 else
16464 tree t;
16466 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16467 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16468 t = build_int_cst (TREE_TYPE (value), -divisor);
16469 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16472 else
16474 if (!div)
16475 div = build_int_cst (TREE_TYPE (value), divisor);
16476 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16477 value = size_binop_loc (loc, MULT_EXPR, value, div);
16480 return value;
16483 /* Likewise, but round down. */
16485 tree
16486 round_down_loc (location_t loc, tree value, int divisor)
16488 tree div = NULL_TREE;
16490 gcc_assert (divisor > 0);
16491 if (divisor == 1)
16492 return value;
16494 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16495 have to do anything. Only do this when we are not given a const,
16496 because in that case, this check is more expensive than just
16497 doing it. */
16498 if (TREE_CODE (value) != INTEGER_CST)
16500 div = build_int_cst (TREE_TYPE (value), divisor);
16502 if (multiple_of_p (TREE_TYPE (value), value, div))
16503 return value;
16506 /* If divisor is a power of two, simplify this to bit manipulation. */
16507 if (divisor == (divisor & -divisor))
16509 tree t;
16511 t = build_int_cst (TREE_TYPE (value), -divisor);
16512 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16514 else
16516 if (!div)
16517 div = build_int_cst (TREE_TYPE (value), divisor);
16518 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16519 value = size_binop_loc (loc, MULT_EXPR, value, div);
16522 return value;
16525 /* Returns the pointer to the base of the object addressed by EXP and
16526 extracts the information about the offset of the access, storing it
16527 to PBITPOS and POFFSET. */
16529 static tree
16530 split_address_to_core_and_offset (tree exp,
16531 HOST_WIDE_INT *pbitpos, tree *poffset)
16533 tree core;
16534 enum machine_mode mode;
16535 int unsignedp, volatilep;
16536 HOST_WIDE_INT bitsize;
16537 location_t loc = EXPR_LOCATION (exp);
16539 if (TREE_CODE (exp) == ADDR_EXPR)
16541 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16542 poffset, &mode, &unsignedp, &volatilep,
16543 false);
16544 core = build_fold_addr_expr_loc (loc, core);
16546 else
16548 core = exp;
16549 *pbitpos = 0;
16550 *poffset = NULL_TREE;
16553 return core;
16556 /* Returns true if addresses of E1 and E2 differ by a constant, false
16557 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16559 bool
16560 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16562 tree core1, core2;
16563 HOST_WIDE_INT bitpos1, bitpos2;
16564 tree toffset1, toffset2, tdiff, type;
16566 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16567 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16569 if (bitpos1 % BITS_PER_UNIT != 0
16570 || bitpos2 % BITS_PER_UNIT != 0
16571 || !operand_equal_p (core1, core2, 0))
16572 return false;
16574 if (toffset1 && toffset2)
16576 type = TREE_TYPE (toffset1);
16577 if (type != TREE_TYPE (toffset2))
16578 toffset2 = fold_convert (type, toffset2);
16580 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16581 if (!cst_and_fits_in_hwi (tdiff))
16582 return false;
16584 *diff = int_cst_value (tdiff);
16586 else if (toffset1 || toffset2)
16588 /* If only one of the offsets is non-constant, the difference cannot
16589 be a constant. */
16590 return false;
16592 else
16593 *diff = 0;
16595 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16596 return true;
16599 /* Simplify the floating point expression EXP when the sign of the
16600 result is not significant. Return NULL_TREE if no simplification
16601 is possible. */
16603 tree
16604 fold_strip_sign_ops (tree exp)
16606 tree arg0, arg1;
16607 location_t loc = EXPR_LOCATION (exp);
16609 switch (TREE_CODE (exp))
16611 case ABS_EXPR:
16612 case NEGATE_EXPR:
16613 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16614 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16616 case MULT_EXPR:
16617 case RDIV_EXPR:
16618 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16619 return NULL_TREE;
16620 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16621 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16622 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16623 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16624 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16625 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16626 break;
16628 case COMPOUND_EXPR:
16629 arg0 = TREE_OPERAND (exp, 0);
16630 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16631 if (arg1)
16632 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16633 break;
16635 case COND_EXPR:
16636 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16637 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16638 if (arg0 || arg1)
16639 return fold_build3_loc (loc,
16640 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16641 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16642 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16643 break;
16645 case CALL_EXPR:
16647 const enum built_in_function fcode = builtin_mathfn_code (exp);
16648 switch (fcode)
16650 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16651 /* Strip copysign function call, return the 1st argument. */
16652 arg0 = CALL_EXPR_ARG (exp, 0);
16653 arg1 = CALL_EXPR_ARG (exp, 1);
16654 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16656 default:
16657 /* Strip sign ops from the argument of "odd" math functions. */
16658 if (negate_mathfn_p (fcode))
16660 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16661 if (arg0)
16662 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16664 break;
16667 break;
16669 default:
16670 break;
16672 return NULL_TREE;