2012-10-06 Janus Weil <janus@gcc.gnu.org>
[official-gcc.git] / gcc / fold-const.c
blobfd0075c4971cc8c85633ae183e5fcac1ca259d67
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
173 tree
174 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
176 double_int quo, rem;
177 int uns;
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
184 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
185 uns, code, &rem);
187 if (rem.is_zero ())
188 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 unsigned HOST_WIDE_INT val;
363 unsigned int prec;
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 prec = TYPE_PRECISION (type);
373 if (prec > HOST_BITS_PER_WIDE_INT)
375 if (TREE_INT_CST_LOW (t) != 0)
376 return true;
377 prec -= HOST_BITS_PER_WIDE_INT;
378 val = TREE_INT_CST_HIGH (t);
380 else
381 val = TREE_INT_CST_LOW (t);
382 if (prec < HOST_BITS_PER_WIDE_INT)
383 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
384 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
390 static bool
391 negate_expr_p (tree t)
393 tree type;
395 if (t == 0)
396 return false;
398 type = TREE_TYPE (t);
400 STRIP_SIGN_NOPS (t);
401 switch (TREE_CODE (t))
403 case INTEGER_CST:
404 if (TYPE_OVERFLOW_WRAPS (type))
405 return true;
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t);
409 case BIT_NOT_EXPR:
410 return (INTEGRAL_TYPE_P (type)
411 && TYPE_OVERFLOW_WRAPS (type));
413 case FIXED_CST:
414 case NEGATE_EXPR:
415 return true;
417 case REAL_CST:
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422 case COMPLEX_CST:
423 return negate_expr_p (TREE_REALPART (t))
424 && negate_expr_p (TREE_IMAGPART (t));
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case FLOOR_DIV_EXPR:
467 case CEIL_DIV_EXPR:
468 case EXACT_DIV_EXPR:
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
473 overflow. */
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
480 case NOP_EXPR:
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type) == REAL_TYPE)
484 tree tem = strip_float_extensions (t);
485 if (tem != t)
486 return negate_expr_p (tem);
488 break;
490 case CALL_EXPR:
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t)))
493 return negate_expr_p (CALL_EXPR_ARG (t, 0));
494 break;
496 case RSHIFT_EXPR:
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
500 tree op1 = TREE_OPERAND (t, 1);
501 if (TREE_INT_CST_HIGH (op1) == 0
502 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
503 == TREE_INT_CST_LOW (op1))
504 return true;
506 break;
508 default:
509 break;
511 return false;
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
517 returned. */
519 static tree
520 fold_negate_expr (location_t loc, tree t)
522 tree type = TREE_TYPE (t);
523 tree tem;
525 switch (TREE_CODE (t))
527 /* Convert - (~A) to A + 1. */
528 case BIT_NOT_EXPR:
529 if (INTEGRAL_TYPE_P (type))
530 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
531 build_int_cst (type, 1));
532 break;
534 case INTEGER_CST:
535 tem = fold_negate_const (t, type);
536 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
537 || !TYPE_OVERFLOW_TRAPS (type))
538 return tem;
539 break;
541 case REAL_CST:
542 tem = fold_negate_const (t, type);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
545 return tem;
546 break;
548 case FIXED_CST:
549 tem = fold_negate_const (t, type);
550 return tem;
552 case COMPLEX_CST:
554 tree rpart = negate_expr (TREE_REALPART (t));
555 tree ipart = negate_expr (TREE_IMAGPART (t));
557 if ((TREE_CODE (rpart) == REAL_CST
558 && TREE_CODE (ipart) == REAL_CST)
559 || (TREE_CODE (rpart) == INTEGER_CST
560 && TREE_CODE (ipart) == INTEGER_CST))
561 return build_complex (type, rpart, ipart);
563 break;
565 case COMPLEX_EXPR:
566 if (negate_expr_p (t))
567 return fold_build2_loc (loc, COMPLEX_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
569 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
570 break;
572 case CONJ_EXPR:
573 if (negate_expr_p (t))
574 return fold_build1_loc (loc, CONJ_EXPR, type,
575 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
576 break;
578 case NEGATE_EXPR:
579 return TREE_OPERAND (t, 0);
581 case PLUS_EXPR:
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t, 1))
587 && reorder_operands_p (TREE_OPERAND (t, 0),
588 TREE_OPERAND (t, 1)))
590 tem = negate_expr (TREE_OPERAND (t, 1));
591 return fold_build2_loc (loc, MINUS_EXPR, type,
592 tem, TREE_OPERAND (t, 0));
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t, 0)))
598 tem = negate_expr (TREE_OPERAND (t, 0));
599 return fold_build2_loc (loc, MINUS_EXPR, type,
600 tem, TREE_OPERAND (t, 1));
603 break;
605 case MINUS_EXPR:
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
609 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
612 break;
614 case MULT_EXPR:
615 if (TYPE_UNSIGNED (type))
616 break;
618 /* Fall through. */
620 case RDIV_EXPR:
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
623 tem = TREE_OPERAND (t, 1);
624 if (negate_expr_p (tem))
625 return fold_build2_loc (loc, TREE_CODE (t), type,
626 TREE_OPERAND (t, 0), negate_expr (tem));
627 tem = TREE_OPERAND (t, 0);
628 if (negate_expr_p (tem))
629 return fold_build2_loc (loc, TREE_CODE (t), type,
630 negate_expr (tem), TREE_OPERAND (t, 1));
632 break;
634 case TRUNC_DIV_EXPR:
635 case ROUND_DIV_EXPR:
636 case FLOOR_DIV_EXPR:
637 case CEIL_DIV_EXPR:
638 case EXACT_DIV_EXPR:
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
643 overflow. */
644 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
646 const char * const warnmsg = G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || integer_onep (tem)))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
661 if (INTEGRAL_TYPE_P (type)
662 && (TREE_CODE (tem) != INTEGER_CST
663 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (TREE_INT_CST_HIGH (op1) == 0
700 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
701 == TREE_INT_CST_LOW (op1))
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
722 return NULL_TREE. */
724 static tree
725 negate_expr (tree t)
727 tree type, tem;
728 location_t loc;
730 if (t == NULL_TREE)
731 return NULL_TREE;
733 loc = EXPR_LOCATION (t);
734 type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
737 tem = fold_negate_expr (loc, t);
738 if (!tem)
739 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
740 return fold_convert_loc (loc, type, tem);
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
758 If IN is itself a literal or constant, return it as appropriate.
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
763 static tree
764 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
765 tree *minus_litp, int negate_p)
767 tree var = 0;
769 *conp = 0;
770 *litp = 0;
771 *minus_litp = 0;
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in);
776 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
777 || TREE_CODE (in) == FIXED_CST)
778 *litp = in;
779 else if (TREE_CODE (in) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
787 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
789 tree op0 = TREE_OPERAND (in, 0);
790 tree op1 = TREE_OPERAND (in, 1);
791 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
792 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
796 || TREE_CODE (op0) == FIXED_CST)
797 *litp = op0, op0 = 0;
798 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
799 || TREE_CODE (op1) == FIXED_CST)
800 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
802 if (op0 != 0 && TREE_CONSTANT (op0))
803 *conp = op0, op0 = 0;
804 else if (op1 != 0 && TREE_CONSTANT (op1))
805 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0 != 0 && op1 != 0)
810 var = in;
811 else if (op0 != 0)
812 var = op0;
813 else
814 var = op1, neg_var_p = neg1_p;
816 /* Now do any needed negations. */
817 if (neg_litp_p)
818 *minus_litp = *litp, *litp = 0;
819 if (neg_conp_p)
820 *conp = negate_expr (*conp);
821 if (neg_var_p)
822 var = negate_expr (var);
824 else if (TREE_CONSTANT (in))
825 *conp = in;
826 else
827 var = in;
829 if (negate_p)
831 if (*litp)
832 *minus_litp = *litp, *litp = 0;
833 else if (*minus_litp)
834 *litp = *minus_litp, *minus_litp = 0;
835 *conp = negate_expr (*conp);
836 var = negate_expr (var);
839 return var;
842 /* Re-associate trees split by the above function. T1 and T2 are
843 either expressions to associate or null. Return the new
844 expression, if any. LOC is the location of the new expression. If
845 we build an operation, do it in TYPE and with CODE. */
847 static tree
848 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
861 if (code == PLUS_EXPR)
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 return build2_loc (loc, MINUS_EXPR, type,
865 fold_convert_loc (loc, type, t2),
866 fold_convert_loc (loc, type,
867 TREE_OPERAND (t1, 0)));
868 else if (TREE_CODE (t2) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t1),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t2, 0)));
873 else if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
876 else if (code == MINUS_EXPR)
878 if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
882 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
886 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
887 fold_convert_loc (loc, type, t2));
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
901 switch (code)
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
909 default:
910 break;
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time. */
923 static tree
924 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
925 int overflowable)
927 double_int op1, op2, res, tmp;
928 tree t;
929 tree type = TREE_TYPE (arg1);
930 bool uns = TYPE_UNSIGNED (type);
931 bool overflow = false;
933 op1 = tree_to_double_int (arg1);
934 op2 = tree_to_double_int (arg2);
936 switch (code)
938 case BIT_IOR_EXPR:
939 res = op1 | op2;
940 break;
942 case BIT_XOR_EXPR:
943 res = op1 ^ op2;
944 break;
946 case BIT_AND_EXPR:
947 res = op1 & op2;
948 break;
950 case RSHIFT_EXPR:
951 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
959 break;
961 case RROTATE_EXPR:
962 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
963 break;
965 case LROTATE_EXPR:
966 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
967 break;
969 case PLUS_EXPR:
970 res = op1.add_with_sign (op2, false, &overflow);
971 break;
973 case MINUS_EXPR:
974 res = op1.sub_with_overflow (op2, &overflow);
975 break;
977 case MULT_EXPR:
978 res = op1.mul_with_sign (op2, false, &overflow);
979 break;
981 case MULT_HIGHPART_EXPR:
982 /* ??? Need quad precision, or an additional shift operand
983 to the multiply primitive, to handle very large highparts. */
984 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
985 return NULL_TREE;
986 tmp = op1 - op2;
987 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
988 break;
990 case TRUNC_DIV_EXPR:
991 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
992 case EXACT_DIV_EXPR:
993 /* This is a shortcut for a common special case. */
994 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
995 && !TREE_OVERFLOW (arg1)
996 && !TREE_OVERFLOW (arg2)
997 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
999 if (code == CEIL_DIV_EXPR)
1000 op1.low += op2.low - 1;
1002 res.low = op1.low / op2.low, res.high = 0;
1003 break;
1006 /* ... fall through ... */
1008 case ROUND_DIV_EXPR:
1009 if (op2.is_zero ())
1010 return NULL_TREE;
1011 if (op2.is_one ())
1013 res = op1;
1014 break;
1016 if (op1 == op2 && !op1.is_zero ())
1018 res = double_int_one;
1019 break;
1021 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1022 break;
1024 case TRUNC_MOD_EXPR:
1025 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1026 /* This is a shortcut for a common special case. */
1027 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1028 && !TREE_OVERFLOW (arg1)
1029 && !TREE_OVERFLOW (arg2)
1030 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1032 if (code == CEIL_MOD_EXPR)
1033 op1.low += op2.low - 1;
1034 res.low = op1.low % op2.low, res.high = 0;
1035 break;
1038 /* ... fall through ... */
1040 case ROUND_MOD_EXPR:
1041 if (op2.is_zero ())
1042 return NULL_TREE;
1043 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1044 break;
1046 case MIN_EXPR:
1047 res = op1.min (op2, uns);
1048 break;
1050 case MAX_EXPR:
1051 res = op1.max (op2, uns);
1052 break;
1054 default:
1055 return NULL_TREE;
1058 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1059 (!uns && overflow)
1060 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1062 return t;
1065 tree
1066 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1068 return int_const_binop_1 (code, arg1, arg2, 1);
1071 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1072 constant. We assume ARG1 and ARG2 have the same data type, or at least
1073 are the same kind of constant and the same machine mode. Return zero if
1074 combining the constants is not allowed in the current operating mode. */
1076 static tree
1077 const_binop (enum tree_code code, tree arg1, tree arg2)
1079 /* Sanity check for the recursive cases. */
1080 if (!arg1 || !arg2)
1081 return NULL_TREE;
1083 STRIP_NOPS (arg1);
1084 STRIP_NOPS (arg2);
1086 if (TREE_CODE (arg1) == INTEGER_CST)
1087 return int_const_binop (code, arg1, arg2);
1089 if (TREE_CODE (arg1) == REAL_CST)
1091 enum machine_mode mode;
1092 REAL_VALUE_TYPE d1;
1093 REAL_VALUE_TYPE d2;
1094 REAL_VALUE_TYPE value;
1095 REAL_VALUE_TYPE result;
1096 bool inexact;
1097 tree t, type;
1099 /* The following codes are handled by real_arithmetic. */
1100 switch (code)
1102 case PLUS_EXPR:
1103 case MINUS_EXPR:
1104 case MULT_EXPR:
1105 case RDIV_EXPR:
1106 case MIN_EXPR:
1107 case MAX_EXPR:
1108 break;
1110 default:
1111 return NULL_TREE;
1114 d1 = TREE_REAL_CST (arg1);
1115 d2 = TREE_REAL_CST (arg2);
1117 type = TREE_TYPE (arg1);
1118 mode = TYPE_MODE (type);
1120 /* Don't perform operation if we honor signaling NaNs and
1121 either operand is a NaN. */
1122 if (HONOR_SNANS (mode)
1123 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1124 return NULL_TREE;
1126 /* Don't perform operation if it would raise a division
1127 by zero exception. */
1128 if (code == RDIV_EXPR
1129 && REAL_VALUES_EQUAL (d2, dconst0)
1130 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1131 return NULL_TREE;
1133 /* If either operand is a NaN, just return it. Otherwise, set up
1134 for floating-point trap; we return an overflow. */
1135 if (REAL_VALUE_ISNAN (d1))
1136 return arg1;
1137 else if (REAL_VALUE_ISNAN (d2))
1138 return arg2;
1140 inexact = real_arithmetic (&value, code, &d1, &d2);
1141 real_convert (&result, mode, &value);
1143 /* Don't constant fold this floating point operation if
1144 the result has overflowed and flag_trapping_math. */
1145 if (flag_trapping_math
1146 && MODE_HAS_INFINITIES (mode)
1147 && REAL_VALUE_ISINF (result)
1148 && !REAL_VALUE_ISINF (d1)
1149 && !REAL_VALUE_ISINF (d2))
1150 return NULL_TREE;
1152 /* Don't constant fold this floating point operation if the
1153 result may dependent upon the run-time rounding mode and
1154 flag_rounding_math is set, or if GCC's software emulation
1155 is unable to accurately represent the result. */
1156 if ((flag_rounding_math
1157 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1158 && (inexact || !real_identical (&result, &value)))
1159 return NULL_TREE;
1161 t = build_real (type, result);
1163 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1164 return t;
1167 if (TREE_CODE (arg1) == FIXED_CST)
1169 FIXED_VALUE_TYPE f1;
1170 FIXED_VALUE_TYPE f2;
1171 FIXED_VALUE_TYPE result;
1172 tree t, type;
1173 int sat_p;
1174 bool overflow_p;
1176 /* The following codes are handled by fixed_arithmetic. */
1177 switch (code)
1179 case PLUS_EXPR:
1180 case MINUS_EXPR:
1181 case MULT_EXPR:
1182 case TRUNC_DIV_EXPR:
1183 f2 = TREE_FIXED_CST (arg2);
1184 break;
1186 case LSHIFT_EXPR:
1187 case RSHIFT_EXPR:
1188 f2.data.high = TREE_INT_CST_HIGH (arg2);
1189 f2.data.low = TREE_INT_CST_LOW (arg2);
1190 f2.mode = SImode;
1191 break;
1193 default:
1194 return NULL_TREE;
1197 f1 = TREE_FIXED_CST (arg1);
1198 type = TREE_TYPE (arg1);
1199 sat_p = TYPE_SATURATING (type);
1200 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1201 t = build_fixed (type, result);
1202 /* Propagate overflow flags. */
1203 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1204 TREE_OVERFLOW (t) = 1;
1205 return t;
1208 if (TREE_CODE (arg1) == COMPLEX_CST)
1210 tree type = TREE_TYPE (arg1);
1211 tree r1 = TREE_REALPART (arg1);
1212 tree i1 = TREE_IMAGPART (arg1);
1213 tree r2 = TREE_REALPART (arg2);
1214 tree i2 = TREE_IMAGPART (arg2);
1215 tree real, imag;
1217 switch (code)
1219 case PLUS_EXPR:
1220 case MINUS_EXPR:
1221 real = const_binop (code, r1, r2);
1222 imag = const_binop (code, i1, i2);
1223 break;
1225 case MULT_EXPR:
1226 if (COMPLEX_FLOAT_TYPE_P (type))
1227 return do_mpc_arg2 (arg1, arg2, type,
1228 /* do_nonfinite= */ folding_initializer,
1229 mpc_mul);
1231 real = const_binop (MINUS_EXPR,
1232 const_binop (MULT_EXPR, r1, r2),
1233 const_binop (MULT_EXPR, i1, i2));
1234 imag = const_binop (PLUS_EXPR,
1235 const_binop (MULT_EXPR, r1, i2),
1236 const_binop (MULT_EXPR, i1, r2));
1237 break;
1239 case RDIV_EXPR:
1240 if (COMPLEX_FLOAT_TYPE_P (type))
1241 return do_mpc_arg2 (arg1, arg2, type,
1242 /* do_nonfinite= */ folding_initializer,
1243 mpc_div);
1244 /* Fallthru ... */
1245 case TRUNC_DIV_EXPR:
1246 case CEIL_DIV_EXPR:
1247 case FLOOR_DIV_EXPR:
1248 case ROUND_DIV_EXPR:
1249 if (flag_complex_method == 0)
1251 /* Keep this algorithm in sync with
1252 tree-complex.c:expand_complex_div_straight().
1254 Expand complex division to scalars, straightforward algorithm.
1255 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1256 t = br*br + bi*bi
1258 tree magsquared
1259 = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r2, r2),
1261 const_binop (MULT_EXPR, i2, i2));
1262 tree t1
1263 = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, r2),
1265 const_binop (MULT_EXPR, i1, i2));
1266 tree t2
1267 = const_binop (MINUS_EXPR,
1268 const_binop (MULT_EXPR, i1, r2),
1269 const_binop (MULT_EXPR, r1, i2));
1271 real = const_binop (code, t1, magsquared);
1272 imag = const_binop (code, t2, magsquared);
1274 else
1276 /* Keep this algorithm in sync with
1277 tree-complex.c:expand_complex_div_wide().
1279 Expand complex division to scalars, modified algorithm to minimize
1280 overflow with wide input ranges. */
1281 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1282 fold_abs_const (r2, TREE_TYPE (type)),
1283 fold_abs_const (i2, TREE_TYPE (type)));
1285 if (integer_nonzerop (compare))
1287 /* In the TRUE branch, we compute
1288 ratio = br/bi;
1289 div = (br * ratio) + bi;
1290 tr = (ar * ratio) + ai;
1291 ti = (ai * ratio) - ar;
1292 tr = tr / div;
1293 ti = ti / div; */
1294 tree ratio = const_binop (code, r2, i2);
1295 tree div = const_binop (PLUS_EXPR, i2,
1296 const_binop (MULT_EXPR, r2, ratio));
1297 real = const_binop (MULT_EXPR, r1, ratio);
1298 real = const_binop (PLUS_EXPR, real, i1);
1299 real = const_binop (code, real, div);
1301 imag = const_binop (MULT_EXPR, i1, ratio);
1302 imag = const_binop (MINUS_EXPR, imag, r1);
1303 imag = const_binop (code, imag, div);
1305 else
1307 /* In the FALSE branch, we compute
1308 ratio = d/c;
1309 divisor = (d * ratio) + c;
1310 tr = (b * ratio) + a;
1311 ti = b - (a * ratio);
1312 tr = tr / div;
1313 ti = ti / div; */
1314 tree ratio = const_binop (code, i2, r2);
1315 tree div = const_binop (PLUS_EXPR, r2,
1316 const_binop (MULT_EXPR, i2, ratio));
1318 real = const_binop (MULT_EXPR, i1, ratio);
1319 real = const_binop (PLUS_EXPR, real, r1);
1320 real = const_binop (code, real, div);
1322 imag = const_binop (MULT_EXPR, r1, ratio);
1323 imag = const_binop (MINUS_EXPR, i1, imag);
1324 imag = const_binop (code, imag, div);
1327 break;
1329 default:
1330 return NULL_TREE;
1333 if (real && imag)
1334 return build_complex (type, real, imag);
1337 if (TREE_CODE (arg1) == VECTOR_CST
1338 && TREE_CODE (arg2) == VECTOR_CST)
1340 tree type = TREE_TYPE(arg1);
1341 int count = TYPE_VECTOR_SUBPARTS (type), i;
1342 tree *elts = XALLOCAVEC (tree, count);
1344 for (i = 0; i < count; i++)
1346 tree elem1 = VECTOR_CST_ELT (arg1, i);
1347 tree elem2 = VECTOR_CST_ELT (arg2, i);
1349 elts[i] = const_binop (code, elem1, elem2);
1351 /* It is possible that const_binop cannot handle the given
1352 code and return NULL_TREE */
1353 if(elts[i] == NULL_TREE)
1354 return NULL_TREE;
1357 return build_vector (type, elts);
1359 return NULL_TREE;
1362 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1363 indicates which particular sizetype to create. */
1365 tree
1366 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1368 return build_int_cst (sizetype_tab[(int) kind], number);
1371 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1372 is a tree code. The type of the result is taken from the operands.
1373 Both must be equivalent integer types, ala int_binop_types_match_p.
1374 If the operands are constant, so is the result. */
1376 tree
1377 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1379 tree type = TREE_TYPE (arg0);
1381 if (arg0 == error_mark_node || arg1 == error_mark_node)
1382 return error_mark_node;
1384 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1385 TREE_TYPE (arg1)));
1387 /* Handle the special case of two integer constants faster. */
1388 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1390 /* And some specific cases even faster than that. */
1391 if (code == PLUS_EXPR)
1393 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1394 return arg1;
1395 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1396 return arg0;
1398 else if (code == MINUS_EXPR)
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MULT_EXPR)
1405 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1406 return arg1;
1409 /* Handle general case of two integer constants. For sizetype
1410 constant calculations we always want to know about overflow,
1411 even in the unsigned case. */
1412 return int_const_binop_1 (code, arg0, arg1, -1);
1415 return fold_build2_loc (loc, code, type, arg0, arg1);
1418 /* Given two values, either both of sizetype or both of bitsizetype,
1419 compute the difference between the two values. Return the value
1420 in signed type corresponding to the type of the operands. */
1422 tree
1423 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1425 tree type = TREE_TYPE (arg0);
1426 tree ctype;
1428 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1429 TREE_TYPE (arg1)));
1431 /* If the type is already signed, just do the simple thing. */
1432 if (!TYPE_UNSIGNED (type))
1433 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1435 if (type == sizetype)
1436 ctype = ssizetype;
1437 else if (type == bitsizetype)
1438 ctype = sbitsizetype;
1439 else
1440 ctype = signed_type_for (type);
1442 /* If either operand is not a constant, do the conversions to the signed
1443 type and subtract. The hardware will do the right thing with any
1444 overflow in the subtraction. */
1445 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1446 return size_binop_loc (loc, MINUS_EXPR,
1447 fold_convert_loc (loc, ctype, arg0),
1448 fold_convert_loc (loc, ctype, arg1));
1450 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1451 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1452 overflow) and negate (which can't either). Special-case a result
1453 of zero while we're here. */
1454 if (tree_int_cst_equal (arg0, arg1))
1455 return build_int_cst (ctype, 0);
1456 else if (tree_int_cst_lt (arg1, arg0))
1457 return fold_convert_loc (loc, ctype,
1458 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1459 else
1460 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1461 fold_convert_loc (loc, ctype,
1462 size_binop_loc (loc,
1463 MINUS_EXPR,
1464 arg1, arg0)));
1467 /* A subroutine of fold_convert_const handling conversions of an
1468 INTEGER_CST to another integer type. */
1470 static tree
1471 fold_convert_const_int_from_int (tree type, const_tree arg1)
1473 tree t;
1475 /* Given an integer constant, make new constant with new type,
1476 appropriately sign-extended or truncated. */
1477 t = force_fit_type_double (type, tree_to_double_int (arg1),
1478 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1479 (TREE_INT_CST_HIGH (arg1) < 0
1480 && (TYPE_UNSIGNED (type)
1481 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1482 | TREE_OVERFLOW (arg1));
1484 return t;
1487 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1488 to an integer type. */
1490 static tree
1491 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1493 int overflow = 0;
1494 tree t;
1496 /* The following code implements the floating point to integer
1497 conversion rules required by the Java Language Specification,
1498 that IEEE NaNs are mapped to zero and values that overflow
1499 the target precision saturate, i.e. values greater than
1500 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1501 are mapped to INT_MIN. These semantics are allowed by the
1502 C and C++ standards that simply state that the behavior of
1503 FP-to-integer conversion is unspecified upon overflow. */
1505 double_int val;
1506 REAL_VALUE_TYPE r;
1507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1509 switch (code)
1511 case FIX_TRUNC_EXPR:
1512 real_trunc (&r, VOIDmode, &x);
1513 break;
1515 default:
1516 gcc_unreachable ();
1519 /* If R is NaN, return zero and show we have an overflow. */
1520 if (REAL_VALUE_ISNAN (r))
1522 overflow = 1;
1523 val = double_int_zero;
1526 /* See if R is less than the lower bound or greater than the
1527 upper bound. */
1529 if (! overflow)
1531 tree lt = TYPE_MIN_VALUE (type);
1532 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1533 if (REAL_VALUES_LESS (r, l))
1535 overflow = 1;
1536 val = tree_to_double_int (lt);
1540 if (! overflow)
1542 tree ut = TYPE_MAX_VALUE (type);
1543 if (ut)
1545 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1546 if (REAL_VALUES_LESS (u, r))
1548 overflow = 1;
1549 val = tree_to_double_int (ut);
1554 if (! overflow)
1555 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1557 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1558 return t;
1561 /* A subroutine of fold_convert_const handling conversions of a
1562 FIXED_CST to an integer type. */
1564 static tree
1565 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1567 tree t;
1568 double_int temp, temp_trunc;
1569 unsigned int mode;
1571 /* Right shift FIXED_CST to temp by fbit. */
1572 temp = TREE_FIXED_CST (arg1).data;
1573 mode = TREE_FIXED_CST (arg1).mode;
1574 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1576 temp = temp.rshift (GET_MODE_FBIT (mode),
1577 HOST_BITS_PER_DOUBLE_INT,
1578 SIGNED_FIXED_POINT_MODE_P (mode));
1580 /* Left shift temp to temp_trunc by fbit. */
1581 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 else
1587 temp = double_int_zero;
1588 temp_trunc = double_int_zero;
1591 /* If FIXED_CST is negative, we need to round the value toward 0.
1592 By checking if the fractional bits are not zero to add 1 to temp. */
1593 if (SIGNED_FIXED_POINT_MODE_P (mode)
1594 && temp_trunc.is_negative ()
1595 && TREE_FIXED_CST (arg1).data != temp_trunc)
1596 temp += double_int_one;
1598 /* Given a fixed-point constant, make new constant with new type,
1599 appropriately sign-extended or truncated. */
1600 t = force_fit_type_double (type, temp, -1,
1601 (temp.is_negative ()
1602 && (TYPE_UNSIGNED (type)
1603 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1604 | TREE_OVERFLOW (arg1));
1606 return t;
1609 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1610 to another floating point type. */
1612 static tree
1613 fold_convert_const_real_from_real (tree type, const_tree arg1)
1615 REAL_VALUE_TYPE value;
1616 tree t;
1618 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1619 t = build_real (type, value);
1621 /* If converting an infinity or NAN to a representation that doesn't
1622 have one, set the overflow bit so that we can produce some kind of
1623 error message at the appropriate point if necessary. It's not the
1624 most user-friendly message, but it's better than nothing. */
1625 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1626 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1627 TREE_OVERFLOW (t) = 1;
1628 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1629 && !MODE_HAS_NANS (TYPE_MODE (type)))
1630 TREE_OVERFLOW (t) = 1;
1631 /* Regular overflow, conversion produced an infinity in a mode that
1632 can't represent them. */
1633 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1634 && REAL_VALUE_ISINF (value)
1635 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1636 TREE_OVERFLOW (t) = 1;
1637 else
1638 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1643 to a floating point type. */
1645 static tree
1646 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1652 t = build_real (type, value);
1654 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1659 to another fixed-point type. */
1661 static tree
1662 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1664 FIXED_VALUE_TYPE value;
1665 tree t;
1666 bool overflow_p;
1668 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1669 TYPE_SATURATING (type));
1670 t = build_fixed (type, value);
1672 /* Propagate overflow flags. */
1673 if (overflow_p | TREE_OVERFLOW (arg1))
1674 TREE_OVERFLOW (t) = 1;
1675 return t;
1678 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1679 to a fixed-point type. */
1681 static tree
1682 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1684 FIXED_VALUE_TYPE value;
1685 tree t;
1686 bool overflow_p;
1688 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1689 TREE_INT_CST (arg1),
1690 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1691 TYPE_SATURATING (type));
1692 t = build_fixed (type, value);
1694 /* Propagate overflow flags. */
1695 if (overflow_p | TREE_OVERFLOW (arg1))
1696 TREE_OVERFLOW (t) = 1;
1697 return t;
1700 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1701 to a fixed-point type. */
1703 static tree
1704 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1706 FIXED_VALUE_TYPE value;
1707 tree t;
1708 bool overflow_p;
1710 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1711 &TREE_REAL_CST (arg1),
1712 TYPE_SATURATING (type));
1713 t = build_fixed (type, value);
1715 /* Propagate overflow flags. */
1716 if (overflow_p | TREE_OVERFLOW (arg1))
1717 TREE_OVERFLOW (t) = 1;
1718 return t;
1721 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1722 type TYPE. If no simplification can be done return NULL_TREE. */
1724 static tree
1725 fold_convert_const (enum tree_code code, tree type, tree arg1)
1727 if (TREE_TYPE (arg1) == type)
1728 return arg1;
1730 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1731 || TREE_CODE (type) == OFFSET_TYPE)
1733 if (TREE_CODE (arg1) == INTEGER_CST)
1734 return fold_convert_const_int_from_int (type, arg1);
1735 else if (TREE_CODE (arg1) == REAL_CST)
1736 return fold_convert_const_int_from_real (code, type, arg1);
1737 else if (TREE_CODE (arg1) == FIXED_CST)
1738 return fold_convert_const_int_from_fixed (type, arg1);
1740 else if (TREE_CODE (type) == REAL_TYPE)
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1743 return build_real_from_int_cst (type, arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1745 return fold_convert_const_real_from_real (type, arg1);
1746 else if (TREE_CODE (arg1) == FIXED_CST)
1747 return fold_convert_const_real_from_fixed (type, arg1);
1749 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1751 if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_fixed_from_fixed (type, arg1);
1753 else if (TREE_CODE (arg1) == INTEGER_CST)
1754 return fold_convert_const_fixed_from_int (type, arg1);
1755 else if (TREE_CODE (arg1) == REAL_CST)
1756 return fold_convert_const_fixed_from_real (type, arg1);
1758 return NULL_TREE;
1761 /* Construct a vector of zero elements of vector type TYPE. */
1763 static tree
1764 build_zero_vector (tree type)
1766 tree t;
1768 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1769 return build_vector_from_val (type, t);
1772 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1774 bool
1775 fold_convertible_p (const_tree type, const_tree arg)
1777 tree orig = TREE_TYPE (arg);
1779 if (type == orig)
1780 return true;
1782 if (TREE_CODE (arg) == ERROR_MARK
1783 || TREE_CODE (type) == ERROR_MARK
1784 || TREE_CODE (orig) == ERROR_MARK)
1785 return false;
1787 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1788 return true;
1790 switch (TREE_CODE (type))
1792 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1793 case POINTER_TYPE: case REFERENCE_TYPE:
1794 case OFFSET_TYPE:
1795 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1796 || TREE_CODE (orig) == OFFSET_TYPE)
1797 return true;
1798 return (TREE_CODE (orig) == VECTOR_TYPE
1799 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1801 case REAL_TYPE:
1802 case FIXED_POINT_TYPE:
1803 case COMPLEX_TYPE:
1804 case VECTOR_TYPE:
1805 case VOID_TYPE:
1806 return TREE_CODE (type) == TREE_CODE (orig);
1808 default:
1809 return false;
1813 /* Convert expression ARG to type TYPE. Used by the middle-end for
1814 simple conversions in preference to calling the front-end's convert. */
1816 tree
1817 fold_convert_loc (location_t loc, tree type, tree arg)
1819 tree orig = TREE_TYPE (arg);
1820 tree tem;
1822 if (type == orig)
1823 return arg;
1825 if (TREE_CODE (arg) == ERROR_MARK
1826 || TREE_CODE (type) == ERROR_MARK
1827 || TREE_CODE (orig) == ERROR_MARK)
1828 return error_mark_node;
1830 switch (TREE_CODE (type))
1832 case POINTER_TYPE:
1833 case REFERENCE_TYPE:
1834 /* Handle conversions between pointers to different address spaces. */
1835 if (POINTER_TYPE_P (orig)
1836 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1837 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1838 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1839 /* fall through */
1841 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1842 case OFFSET_TYPE:
1843 if (TREE_CODE (arg) == INTEGER_CST)
1845 tem = fold_convert_const (NOP_EXPR, type, arg);
1846 if (tem != NULL_TREE)
1847 return tem;
1849 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1850 || TREE_CODE (orig) == OFFSET_TYPE)
1851 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1852 if (TREE_CODE (orig) == COMPLEX_TYPE)
1853 return fold_convert_loc (loc, type,
1854 fold_build1_loc (loc, REALPART_EXPR,
1855 TREE_TYPE (orig), arg));
1856 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1857 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1858 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1860 case REAL_TYPE:
1861 if (TREE_CODE (arg) == INTEGER_CST)
1863 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1864 if (tem != NULL_TREE)
1865 return tem;
1867 else if (TREE_CODE (arg) == REAL_CST)
1869 tem = fold_convert_const (NOP_EXPR, type, arg);
1870 if (tem != NULL_TREE)
1871 return tem;
1873 else if (TREE_CODE (arg) == FIXED_CST)
1875 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1876 if (tem != NULL_TREE)
1877 return tem;
1880 switch (TREE_CODE (orig))
1882 case INTEGER_TYPE:
1883 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1884 case POINTER_TYPE: case REFERENCE_TYPE:
1885 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1887 case REAL_TYPE:
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1890 case FIXED_POINT_TYPE:
1891 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1893 case COMPLEX_TYPE:
1894 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1895 return fold_convert_loc (loc, type, tem);
1897 default:
1898 gcc_unreachable ();
1901 case FIXED_POINT_TYPE:
1902 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1903 || TREE_CODE (arg) == REAL_CST)
1905 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 goto fold_convert_exit;
1910 switch (TREE_CODE (orig))
1912 case FIXED_POINT_TYPE:
1913 case INTEGER_TYPE:
1914 case ENUMERAL_TYPE:
1915 case BOOLEAN_TYPE:
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1919 case COMPLEX_TYPE:
1920 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1921 return fold_convert_loc (loc, type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 case FIXED_POINT_TYPE:
1935 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1936 fold_convert_loc (loc, TREE_TYPE (type), arg),
1937 fold_convert_loc (loc, TREE_TYPE (type),
1938 integer_zero_node));
1939 case COMPLEX_TYPE:
1941 tree rpart, ipart;
1943 if (TREE_CODE (arg) == COMPLEX_EXPR)
1945 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1946 TREE_OPERAND (arg, 0));
1947 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1948 TREE_OPERAND (arg, 1));
1949 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1952 arg = save_expr (arg);
1953 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1954 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1955 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1956 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1957 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1960 default:
1961 gcc_unreachable ();
1964 case VECTOR_TYPE:
1965 if (integer_zerop (arg))
1966 return build_zero_vector (type);
1967 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1968 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1969 || TREE_CODE (orig) == VECTOR_TYPE);
1970 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1972 case VOID_TYPE:
1973 tem = fold_ignored_result (arg);
1974 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1976 default:
1977 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1978 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1979 gcc_unreachable ();
1981 fold_convert_exit:
1982 protected_set_expr_location_unshare (tem, loc);
1983 return tem;
1986 /* Return false if expr can be assumed not to be an lvalue, true
1987 otherwise. */
1989 static bool
1990 maybe_lvalue_p (const_tree x)
1992 /* We only need to wrap lvalue tree codes. */
1993 switch (TREE_CODE (x))
1995 case VAR_DECL:
1996 case PARM_DECL:
1997 case RESULT_DECL:
1998 case LABEL_DECL:
1999 case FUNCTION_DECL:
2000 case SSA_NAME:
2002 case COMPONENT_REF:
2003 case MEM_REF:
2004 case INDIRECT_REF:
2005 case ARRAY_REF:
2006 case ARRAY_RANGE_REF:
2007 case BIT_FIELD_REF:
2008 case OBJ_TYPE_REF:
2010 case REALPART_EXPR:
2011 case IMAGPART_EXPR:
2012 case PREINCREMENT_EXPR:
2013 case PREDECREMENT_EXPR:
2014 case SAVE_EXPR:
2015 case TRY_CATCH_EXPR:
2016 case WITH_CLEANUP_EXPR:
2017 case COMPOUND_EXPR:
2018 case MODIFY_EXPR:
2019 case TARGET_EXPR:
2020 case COND_EXPR:
2021 case BIND_EXPR:
2022 break;
2024 default:
2025 /* Assume the worst for front-end tree codes. */
2026 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2027 break;
2028 return false;
2031 return true;
2034 /* Return an expr equal to X but certainly not valid as an lvalue. */
2036 tree
2037 non_lvalue_loc (location_t loc, tree x)
2039 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2040 us. */
2041 if (in_gimple_form)
2042 return x;
2044 if (! maybe_lvalue_p (x))
2045 return x;
2046 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2049 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2050 Zero means allow extended lvalues. */
2052 int pedantic_lvalues;
2054 /* When pedantic, return an expr equal to X but certainly not valid as a
2055 pedantic lvalue. Otherwise, return X. */
2057 static tree
2058 pedantic_non_lvalue_loc (location_t loc, tree x)
2060 if (pedantic_lvalues)
2061 return non_lvalue_loc (loc, x);
2063 return protected_set_expr_location_unshare (x, loc);
2066 /* Given a tree comparison code, return the code that is the logical inverse.
2067 It is generally not safe to do this for floating-point comparisons, except
2068 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2069 ERROR_MARK in this case. */
2071 enum tree_code
2072 invert_tree_comparison (enum tree_code code, bool honor_nans)
2074 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2075 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2076 return ERROR_MARK;
2078 switch (code)
2080 case EQ_EXPR:
2081 return NE_EXPR;
2082 case NE_EXPR:
2083 return EQ_EXPR;
2084 case GT_EXPR:
2085 return honor_nans ? UNLE_EXPR : LE_EXPR;
2086 case GE_EXPR:
2087 return honor_nans ? UNLT_EXPR : LT_EXPR;
2088 case LT_EXPR:
2089 return honor_nans ? UNGE_EXPR : GE_EXPR;
2090 case LE_EXPR:
2091 return honor_nans ? UNGT_EXPR : GT_EXPR;
2092 case LTGT_EXPR:
2093 return UNEQ_EXPR;
2094 case UNEQ_EXPR:
2095 return LTGT_EXPR;
2096 case UNGT_EXPR:
2097 return LE_EXPR;
2098 case UNGE_EXPR:
2099 return LT_EXPR;
2100 case UNLT_EXPR:
2101 return GE_EXPR;
2102 case UNLE_EXPR:
2103 return GT_EXPR;
2104 case ORDERED_EXPR:
2105 return UNORDERED_EXPR;
2106 case UNORDERED_EXPR:
2107 return ORDERED_EXPR;
2108 default:
2109 gcc_unreachable ();
2113 /* Similar, but return the comparison that results if the operands are
2114 swapped. This is safe for floating-point. */
2116 enum tree_code
2117 swap_tree_comparison (enum tree_code code)
2119 switch (code)
2121 case EQ_EXPR:
2122 case NE_EXPR:
2123 case ORDERED_EXPR:
2124 case UNORDERED_EXPR:
2125 case LTGT_EXPR:
2126 case UNEQ_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 case UNGT_EXPR:
2137 return UNLT_EXPR;
2138 case UNGE_EXPR:
2139 return UNLE_EXPR;
2140 case UNLT_EXPR:
2141 return UNGT_EXPR;
2142 case UNLE_EXPR:
2143 return UNGE_EXPR;
2144 default:
2145 gcc_unreachable ();
2150 /* Convert a comparison tree code from an enum tree_code representation
2151 into a compcode bit-based encoding. This function is the inverse of
2152 compcode_to_comparison. */
2154 static enum comparison_code
2155 comparison_to_compcode (enum tree_code code)
2157 switch (code)
2159 case LT_EXPR:
2160 return COMPCODE_LT;
2161 case EQ_EXPR:
2162 return COMPCODE_EQ;
2163 case LE_EXPR:
2164 return COMPCODE_LE;
2165 case GT_EXPR:
2166 return COMPCODE_GT;
2167 case NE_EXPR:
2168 return COMPCODE_NE;
2169 case GE_EXPR:
2170 return COMPCODE_GE;
2171 case ORDERED_EXPR:
2172 return COMPCODE_ORD;
2173 case UNORDERED_EXPR:
2174 return COMPCODE_UNORD;
2175 case UNLT_EXPR:
2176 return COMPCODE_UNLT;
2177 case UNEQ_EXPR:
2178 return COMPCODE_UNEQ;
2179 case UNLE_EXPR:
2180 return COMPCODE_UNLE;
2181 case UNGT_EXPR:
2182 return COMPCODE_UNGT;
2183 case LTGT_EXPR:
2184 return COMPCODE_LTGT;
2185 case UNGE_EXPR:
2186 return COMPCODE_UNGE;
2187 default:
2188 gcc_unreachable ();
2192 /* Convert a compcode bit-based encoding of a comparison operator back
2193 to GCC's enum tree_code representation. This function is the
2194 inverse of comparison_to_compcode. */
2196 static enum tree_code
2197 compcode_to_comparison (enum comparison_code code)
2199 switch (code)
2201 case COMPCODE_LT:
2202 return LT_EXPR;
2203 case COMPCODE_EQ:
2204 return EQ_EXPR;
2205 case COMPCODE_LE:
2206 return LE_EXPR;
2207 case COMPCODE_GT:
2208 return GT_EXPR;
2209 case COMPCODE_NE:
2210 return NE_EXPR;
2211 case COMPCODE_GE:
2212 return GE_EXPR;
2213 case COMPCODE_ORD:
2214 return ORDERED_EXPR;
2215 case COMPCODE_UNORD:
2216 return UNORDERED_EXPR;
2217 case COMPCODE_UNLT:
2218 return UNLT_EXPR;
2219 case COMPCODE_UNEQ:
2220 return UNEQ_EXPR;
2221 case COMPCODE_UNLE:
2222 return UNLE_EXPR;
2223 case COMPCODE_UNGT:
2224 return UNGT_EXPR;
2225 case COMPCODE_LTGT:
2226 return LTGT_EXPR;
2227 case COMPCODE_UNGE:
2228 return UNGE_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Return a tree for the comparison which is the combination of
2235 doing the AND or OR (depending on CODE) of the two operations LCODE
2236 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2237 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2238 if this makes the transformation invalid. */
2240 tree
2241 combine_comparisons (location_t loc,
2242 enum tree_code code, enum tree_code lcode,
2243 enum tree_code rcode, tree truth_type,
2244 tree ll_arg, tree lr_arg)
2246 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2247 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2248 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2249 int compcode;
2251 switch (code)
2253 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2254 compcode = lcompcode & rcompcode;
2255 break;
2257 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2258 compcode = lcompcode | rcompcode;
2259 break;
2261 default:
2262 return NULL_TREE;
2265 if (!honor_nans)
2267 /* Eliminate unordered comparisons, as well as LTGT and ORD
2268 which are not used unless the mode has NaNs. */
2269 compcode &= ~COMPCODE_UNORD;
2270 if (compcode == COMPCODE_LTGT)
2271 compcode = COMPCODE_NE;
2272 else if (compcode == COMPCODE_ORD)
2273 compcode = COMPCODE_TRUE;
2275 else if (flag_trapping_math)
2277 /* Check that the original operation and the optimized ones will trap
2278 under the same condition. */
2279 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2280 && (lcompcode != COMPCODE_EQ)
2281 && (lcompcode != COMPCODE_ORD);
2282 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2283 && (rcompcode != COMPCODE_EQ)
2284 && (rcompcode != COMPCODE_ORD);
2285 bool trap = (compcode & COMPCODE_UNORD) == 0
2286 && (compcode != COMPCODE_EQ)
2287 && (compcode != COMPCODE_ORD);
2289 /* In a short-circuited boolean expression the LHS might be
2290 such that the RHS, if evaluated, will never trap. For
2291 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2292 if neither x nor y is NaN. (This is a mixed blessing: for
2293 example, the expression above will never trap, hence
2294 optimizing it to x < y would be invalid). */
2295 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2296 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 rtrap = false;
2299 /* If the comparison was short-circuited, and only the RHS
2300 trapped, we may now generate a spurious trap. */
2301 if (rtrap && !ltrap
2302 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 return NULL_TREE;
2305 /* If we changed the conditions that cause a trap, we lose. */
2306 if ((ltrap || rtrap) != trap)
2307 return NULL_TREE;
2310 if (compcode == COMPCODE_TRUE)
2311 return constant_boolean_node (true, truth_type);
2312 else if (compcode == COMPCODE_FALSE)
2313 return constant_boolean_node (false, truth_type);
2314 else
2316 enum tree_code tcode;
2318 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2319 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2354 || TREE_TYPE (arg0) == error_mark_node
2355 || TREE_TYPE (arg1) == error_mark_node)
2356 return 0;
2358 /* Similar, if either does not have a type (like a released SSA name),
2359 they aren't equal. */
2360 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2361 return 0;
2363 /* Check equality of integer constants before bailing out due to
2364 precision differences. */
2365 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2366 return tree_int_cst_equal (arg0, arg1);
2368 /* If both types don't have the same signedness, then we can't consider
2369 them equal. We must check this before the STRIP_NOPS calls
2370 because they may change the signedness of the arguments. As pointers
2371 strictly don't have a signedness, require either two pointers or
2372 two non-pointers as well. */
2373 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2374 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2375 return 0;
2377 /* We cannot consider pointers to different address space equal. */
2378 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2379 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2380 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2381 return 0;
2383 /* If both types don't have the same precision, then it is not safe
2384 to strip NOPs. */
2385 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2386 return 0;
2388 STRIP_NOPS (arg0);
2389 STRIP_NOPS (arg1);
2391 /* In case both args are comparisons but with different comparison
2392 code, try to swap the comparison operands of one arg to produce
2393 a match and compare that variant. */
2394 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2395 && COMPARISON_CLASS_P (arg0)
2396 && COMPARISON_CLASS_P (arg1))
2398 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2400 if (TREE_CODE (arg0) == swap_code)
2401 return operand_equal_p (TREE_OPERAND (arg0, 0),
2402 TREE_OPERAND (arg1, 1), flags)
2403 && operand_equal_p (TREE_OPERAND (arg0, 1),
2404 TREE_OPERAND (arg1, 0), flags);
2407 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2408 /* This is needed for conversions and for COMPONENT_REF.
2409 Might as well play it safe and always test this. */
2410 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2411 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2412 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2413 return 0;
2415 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2416 We don't care about side effects in that case because the SAVE_EXPR
2417 takes care of that for us. In all other cases, two expressions are
2418 equal if they have no side effects. If we have two identical
2419 expressions with side effects that should be treated the same due
2420 to the only side effects being identical SAVE_EXPR's, that will
2421 be detected in the recursive calls below.
2422 If we are taking an invariant address of two identical objects
2423 they are necessarily equal as well. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (flags & OEP_CONSTANT_ADDRESS_OF)
2427 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2428 return 1;
2430 /* Next handle constant cases, those for which we can return 1 even
2431 if ONLY_CONST is set. */
2432 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2433 switch (TREE_CODE (arg0))
2435 case INTEGER_CST:
2436 return tree_int_cst_equal (arg0, arg1);
2438 case FIXED_CST:
2439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2440 TREE_FIXED_CST (arg1));
2442 case REAL_CST:
2443 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2444 TREE_REAL_CST (arg1)))
2445 return 1;
2448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2450 /* If we do not distinguish between signed and unsigned zero,
2451 consider them equal. */
2452 if (real_zerop (arg0) && real_zerop (arg1))
2453 return 1;
2455 return 0;
2457 case VECTOR_CST:
2459 unsigned i;
2461 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2462 return 0;
2464 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2466 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2467 VECTOR_CST_ELT (arg1, i), flags))
2468 return 0;
2470 return 1;
2473 case COMPLEX_CST:
2474 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2475 flags)
2476 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2477 flags));
2479 case STRING_CST:
2480 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2481 && ! memcmp (TREE_STRING_POINTER (arg0),
2482 TREE_STRING_POINTER (arg1),
2483 TREE_STRING_LENGTH (arg0)));
2485 case ADDR_EXPR:
2486 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2487 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2488 ? OEP_CONSTANT_ADDRESS_OF : 0);
2489 default:
2490 break;
2493 if (flags & OEP_ONLY_CONST)
2494 return 0;
2496 /* Define macros to test an operand from arg0 and arg1 for equality and a
2497 variant that allows null and views null as being different from any
2498 non-null value. In the latter case, if either is null, the both
2499 must be; otherwise, do the normal comparison. */
2500 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2501 TREE_OPERAND (arg1, N), flags)
2503 #define OP_SAME_WITH_NULL(N) \
2504 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2505 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2507 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2509 case tcc_unary:
2510 /* Two conversions are equal only if signedness and modes match. */
2511 switch (TREE_CODE (arg0))
2513 CASE_CONVERT:
2514 case FIX_TRUNC_EXPR:
2515 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2516 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2517 return 0;
2518 break;
2519 default:
2520 break;
2523 return OP_SAME (0);
2526 case tcc_comparison:
2527 case tcc_binary:
2528 if (OP_SAME (0) && OP_SAME (1))
2529 return 1;
2531 /* For commutative ops, allow the other order. */
2532 return (commutative_tree_code (TREE_CODE (arg0))
2533 && operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 1), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 0), flags));
2538 case tcc_reference:
2539 /* If either of the pointer (or reference) expressions we are
2540 dereferencing contain a side effect, these cannot be equal. */
2541 if (TREE_SIDE_EFFECTS (arg0)
2542 || TREE_SIDE_EFFECTS (arg1))
2543 return 0;
2545 switch (TREE_CODE (arg0))
2547 case INDIRECT_REF:
2548 case REALPART_EXPR:
2549 case IMAGPART_EXPR:
2550 return OP_SAME (0);
2552 case TARGET_MEM_REF:
2553 /* Require equal extra operands and then fall through to MEM_REF
2554 handling of the two common operands. */
2555 if (!OP_SAME_WITH_NULL (2)
2556 || !OP_SAME_WITH_NULL (3)
2557 || !OP_SAME_WITH_NULL (4))
2558 return 0;
2559 /* Fallthru. */
2560 case MEM_REF:
2561 /* Require equal access sizes, and similar pointer types.
2562 We can have incomplete types for array references of
2563 variable-sized arrays from the Fortran frontent
2564 though. */
2565 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2566 || (TYPE_SIZE (TREE_TYPE (arg0))
2567 && TYPE_SIZE (TREE_TYPE (arg1))
2568 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2569 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2570 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2571 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2572 && OP_SAME (0) && OP_SAME (1));
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 /* Operands 2 and 3 may be null.
2577 Compare the array index by value if it is constant first as we
2578 may have different types but same value here. */
2579 return (OP_SAME (0)
2580 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2581 TREE_OPERAND (arg1, 1))
2582 || OP_SAME (1))
2583 && OP_SAME_WITH_NULL (2)
2584 && OP_SAME_WITH_NULL (3));
2586 case COMPONENT_REF:
2587 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2588 may be NULL when we're called to compare MEM_EXPRs. */
2589 return OP_SAME_WITH_NULL (0)
2590 && OP_SAME (1)
2591 && OP_SAME_WITH_NULL (2);
2593 case BIT_FIELD_REF:
2594 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2596 default:
2597 return 0;
2600 case tcc_expression:
2601 switch (TREE_CODE (arg0))
2603 case ADDR_EXPR:
2604 case TRUTH_NOT_EXPR:
2605 return OP_SAME (0);
2607 case TRUTH_ANDIF_EXPR:
2608 case TRUTH_ORIF_EXPR:
2609 return OP_SAME (0) && OP_SAME (1);
2611 case FMA_EXPR:
2612 case WIDEN_MULT_PLUS_EXPR:
2613 case WIDEN_MULT_MINUS_EXPR:
2614 if (!OP_SAME (2))
2615 return 0;
2616 /* The multiplcation operands are commutative. */
2617 /* FALLTHRU */
2619 case TRUTH_AND_EXPR:
2620 case TRUTH_OR_EXPR:
2621 case TRUTH_XOR_EXPR:
2622 if (OP_SAME (0) && OP_SAME (1))
2623 return 1;
2625 /* Otherwise take into account this is a commutative operation. */
2626 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2627 TREE_OPERAND (arg1, 1), flags)
2628 && operand_equal_p (TREE_OPERAND (arg0, 1),
2629 TREE_OPERAND (arg1, 0), flags));
2631 case COND_EXPR:
2632 case VEC_COND_EXPR:
2633 case DOT_PROD_EXPR:
2634 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 default:
2637 return 0;
2640 case tcc_vl_exp:
2641 switch (TREE_CODE (arg0))
2643 case CALL_EXPR:
2644 /* If the CALL_EXPRs call different functions, then they
2645 clearly can not be equal. */
2646 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2647 flags))
2648 return 0;
2651 unsigned int cef = call_expr_flags (arg0);
2652 if (flags & OEP_PURE_SAME)
2653 cef &= ECF_CONST | ECF_PURE;
2654 else
2655 cef &= ECF_CONST;
2656 if (!cef)
2657 return 0;
2660 /* Now see if all the arguments are the same. */
2662 const_call_expr_arg_iterator iter0, iter1;
2663 const_tree a0, a1;
2664 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2665 a1 = first_const_call_expr_arg (arg1, &iter1);
2666 a0 && a1;
2667 a0 = next_const_call_expr_arg (&iter0),
2668 a1 = next_const_call_expr_arg (&iter1))
2669 if (! operand_equal_p (a0, a1, flags))
2670 return 0;
2672 /* If we get here and both argument lists are exhausted
2673 then the CALL_EXPRs are equal. */
2674 return ! (a0 || a1);
2676 default:
2677 return 0;
2680 case tcc_declaration:
2681 /* Consider __builtin_sqrt equal to sqrt. */
2682 return (TREE_CODE (arg0) == FUNCTION_DECL
2683 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2684 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2685 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2687 default:
2688 return 0;
2691 #undef OP_SAME
2692 #undef OP_SAME_WITH_NULL
2695 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2696 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2698 When in doubt, return 0. */
2700 static int
2701 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2703 int unsignedp1, unsignedpo;
2704 tree primarg0, primarg1, primother;
2705 unsigned int correct_width;
2707 if (operand_equal_p (arg0, arg1, 0))
2708 return 1;
2710 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2711 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2712 return 0;
2714 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2715 and see if the inner values are the same. This removes any
2716 signedness comparison, which doesn't matter here. */
2717 primarg0 = arg0, primarg1 = arg1;
2718 STRIP_NOPS (primarg0);
2719 STRIP_NOPS (primarg1);
2720 if (operand_equal_p (primarg0, primarg1, 0))
2721 return 1;
2723 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2724 actual comparison operand, ARG0.
2726 First throw away any conversions to wider types
2727 already present in the operands. */
2729 primarg1 = get_narrower (arg1, &unsignedp1);
2730 primother = get_narrower (other, &unsignedpo);
2732 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2733 if (unsignedp1 == unsignedpo
2734 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2735 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2737 tree type = TREE_TYPE (arg0);
2739 /* Make sure shorter operand is extended the right way
2740 to match the longer operand. */
2741 primarg1 = fold_convert (signed_or_unsigned_type_for
2742 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2744 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2745 return 1;
2748 return 0;
2751 /* See if ARG is an expression that is either a comparison or is performing
2752 arithmetic on comparisons. The comparisons must only be comparing
2753 two different values, which will be stored in *CVAL1 and *CVAL2; if
2754 they are nonzero it means that some operands have already been found.
2755 No variables may be used anywhere else in the expression except in the
2756 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2757 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2759 If this is true, return 1. Otherwise, return zero. */
2761 static int
2762 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2764 enum tree_code code = TREE_CODE (arg);
2765 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2767 /* We can handle some of the tcc_expression cases here. */
2768 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2769 tclass = tcc_unary;
2770 else if (tclass == tcc_expression
2771 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2772 || code == COMPOUND_EXPR))
2773 tclass = tcc_binary;
2775 else if (tclass == tcc_expression && code == SAVE_EXPR
2776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2778 /* If we've already found a CVAL1 or CVAL2, this expression is
2779 two complex to handle. */
2780 if (*cval1 || *cval2)
2781 return 0;
2783 tclass = tcc_unary;
2784 *save_p = 1;
2787 switch (tclass)
2789 case tcc_unary:
2790 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2792 case tcc_binary:
2793 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2794 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2795 cval1, cval2, save_p));
2797 case tcc_constant:
2798 return 1;
2800 case tcc_expression:
2801 if (code == COND_EXPR)
2802 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2803 cval1, cval2, save_p)
2804 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2805 cval1, cval2, save_p)
2806 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2807 cval1, cval2, save_p));
2808 return 0;
2810 case tcc_comparison:
2811 /* First see if we can handle the first operand, then the second. For
2812 the second operand, we know *CVAL1 can't be zero. It must be that
2813 one side of the comparison is each of the values; test for the
2814 case where this isn't true by failing if the two operands
2815 are the same. */
2817 if (operand_equal_p (TREE_OPERAND (arg, 0),
2818 TREE_OPERAND (arg, 1), 0))
2819 return 0;
2821 if (*cval1 == 0)
2822 *cval1 = TREE_OPERAND (arg, 0);
2823 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2825 else if (*cval2 == 0)
2826 *cval2 = TREE_OPERAND (arg, 0);
2827 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2829 else
2830 return 0;
2832 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2834 else if (*cval2 == 0)
2835 *cval2 = TREE_OPERAND (arg, 1);
2836 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2838 else
2839 return 0;
2841 return 1;
2843 default:
2844 return 0;
2848 /* ARG is a tree that is known to contain just arithmetic operations and
2849 comparisons. Evaluate the operations in the tree substituting NEW0 for
2850 any occurrence of OLD0 as an operand of a comparison and likewise for
2851 NEW1 and OLD1. */
2853 static tree
2854 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2855 tree old1, tree new1)
2857 tree type = TREE_TYPE (arg);
2858 enum tree_code code = TREE_CODE (arg);
2859 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2861 /* We can handle some of the tcc_expression cases here. */
2862 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2863 tclass = tcc_unary;
2864 else if (tclass == tcc_expression
2865 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2866 tclass = tcc_binary;
2868 switch (tclass)
2870 case tcc_unary:
2871 return fold_build1_loc (loc, code, type,
2872 eval_subst (loc, TREE_OPERAND (arg, 0),
2873 old0, new0, old1, new1));
2875 case tcc_binary:
2876 return fold_build2_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1),
2879 eval_subst (loc, TREE_OPERAND (arg, 1),
2880 old0, new0, old1, new1));
2882 case tcc_expression:
2883 switch (code)
2885 case SAVE_EXPR:
2886 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2887 old1, new1);
2889 case COMPOUND_EXPR:
2890 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2891 old1, new1);
2893 case COND_EXPR:
2894 return fold_build3_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (loc, TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1),
2899 eval_subst (loc, TREE_OPERAND (arg, 2),
2900 old0, new0, old1, new1));
2901 default:
2902 break;
2904 /* Fall through - ??? */
2906 case tcc_comparison:
2908 tree arg0 = TREE_OPERAND (arg, 0);
2909 tree arg1 = TREE_OPERAND (arg, 1);
2911 /* We need to check both for exact equality and tree equality. The
2912 former will be true if the operand has a side-effect. In that
2913 case, we know the operand occurred exactly once. */
2915 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2916 arg0 = new0;
2917 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2918 arg0 = new1;
2920 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2921 arg1 = new0;
2922 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2923 arg1 = new1;
2925 return fold_build2_loc (loc, code, type, arg0, arg1);
2928 default:
2929 return arg;
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED was previously an operand of the expression
2935 but is now not needed (e.g., we folded OMITTED * 0).
2937 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2938 the conversion of RESULT to TYPE. */
2940 tree
2941 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2943 tree t = fold_convert_loc (loc, type, result);
2945 /* If the resulting operand is an empty statement, just return the omitted
2946 statement casted to void. */
2947 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2948 return build1_loc (loc, NOP_EXPR, void_type_node,
2949 fold_ignored_result (omitted));
2951 if (TREE_SIDE_EFFECTS (omitted))
2952 return build2_loc (loc, COMPOUND_EXPR, type,
2953 fold_ignored_result (omitted), t);
2955 return non_lvalue_loc (loc, t);
2958 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 static tree
2961 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2962 tree omitted)
2964 tree t = fold_convert_loc (loc, type, result);
2966 /* If the resulting operand is an empty statement, just return the omitted
2967 statement casted to void. */
2968 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2969 return build1_loc (loc, NOP_EXPR, void_type_node,
2970 fold_ignored_result (omitted));
2972 if (TREE_SIDE_EFFECTS (omitted))
2973 return build2_loc (loc, COMPOUND_EXPR, type,
2974 fold_ignored_result (omitted), t);
2976 return pedantic_non_lvalue_loc (loc, t);
2979 /* Return a tree for the case when the result of an expression is RESULT
2980 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2981 of the expression but are now not needed.
2983 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2984 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2985 evaluated before OMITTED2. Otherwise, if neither has side effects,
2986 just do the conversion of RESULT to TYPE. */
2988 tree
2989 omit_two_operands_loc (location_t loc, tree type, tree result,
2990 tree omitted1, tree omitted2)
2992 tree t = fold_convert_loc (loc, type, result);
2994 if (TREE_SIDE_EFFECTS (omitted2))
2995 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
2996 if (TREE_SIDE_EFFECTS (omitted1))
2997 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
2999 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3003 /* Return a simplified tree node for the truth-negation of ARG. This
3004 never alters ARG itself. We assume that ARG is an operation that
3005 returns a truth value (0 or 1).
3007 FIXME: one would think we would fold the result, but it causes
3008 problems with the dominator optimizer. */
3010 tree
3011 fold_truth_not_expr (location_t loc, tree arg)
3013 tree type = TREE_TYPE (arg);
3014 enum tree_code code = TREE_CODE (arg);
3015 location_t loc1, loc2;
3017 /* If this is a comparison, we can simply invert it, except for
3018 floating-point non-equality comparisons, in which case we just
3019 enclose a TRUTH_NOT_EXPR around what we have. */
3021 if (TREE_CODE_CLASS (code) == tcc_comparison)
3023 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3024 if (FLOAT_TYPE_P (op_type)
3025 && flag_trapping_math
3026 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3027 && code != NE_EXPR && code != EQ_EXPR)
3028 return NULL_TREE;
3030 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3031 if (code == ERROR_MARK)
3032 return NULL_TREE;
3034 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3035 TREE_OPERAND (arg, 1));
3038 switch (code)
3040 case INTEGER_CST:
3041 return constant_boolean_node (integer_zerop (arg), type);
3043 case TRUTH_AND_EXPR:
3044 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3045 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3046 return build2_loc (loc, TRUTH_OR_EXPR, type,
3047 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3048 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3050 case TRUTH_OR_EXPR:
3051 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3052 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3053 return build2_loc (loc, TRUTH_AND_EXPR, type,
3054 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3055 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3057 case TRUTH_XOR_EXPR:
3058 /* Here we can invert either operand. We invert the first operand
3059 unless the second operand is a TRUTH_NOT_EXPR in which case our
3060 result is the XOR of the first operand with the inside of the
3061 negation of the second operand. */
3063 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3064 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3066 else
3067 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3068 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3069 TREE_OPERAND (arg, 1));
3071 case TRUTH_ANDIF_EXPR:
3072 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3073 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3074 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3075 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3076 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3078 case TRUTH_ORIF_EXPR:
3079 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3080 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3081 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3082 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3083 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 case TRUTH_NOT_EXPR:
3086 return TREE_OPERAND (arg, 0);
3088 case COND_EXPR:
3090 tree arg1 = TREE_OPERAND (arg, 1);
3091 tree arg2 = TREE_OPERAND (arg, 2);
3093 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3094 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3096 /* A COND_EXPR may have a throw as one operand, which
3097 then has void type. Just leave void operands
3098 as they are. */
3099 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3100 VOID_TYPE_P (TREE_TYPE (arg1))
3101 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3102 VOID_TYPE_P (TREE_TYPE (arg2))
3103 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3106 case COMPOUND_EXPR:
3107 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, COMPOUND_EXPR, type,
3109 TREE_OPERAND (arg, 0),
3110 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3112 case NON_LVALUE_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3116 CASE_CONVERT:
3117 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3118 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3120 /* ... fall through ... */
3122 case FLOAT_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 return build1_loc (loc, TREE_CODE (arg), type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3127 case BIT_AND_EXPR:
3128 if (!integer_onep (TREE_OPERAND (arg, 1)))
3129 return NULL_TREE;
3130 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3132 case SAVE_EXPR:
3133 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3135 case CLEANUP_POINT_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3138 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3140 default:
3141 return NULL_TREE;
3145 /* Return a simplified tree node for the truth-negation of ARG. This
3146 never alters ARG itself. We assume that ARG is an operation that
3147 returns a truth value (0 or 1).
3149 FIXME: one would think we would fold the result, but it causes
3150 problems with the dominator optimizer. */
3152 tree
3153 invert_truthvalue_loc (location_t loc, tree arg)
3155 tree tem;
3157 if (TREE_CODE (arg) == ERROR_MARK)
3158 return arg;
3160 tem = fold_truth_not_expr (loc, arg);
3161 if (!tem)
3162 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3164 return tem;
3167 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3168 operands are another bit-wise operation with a common input. If so,
3169 distribute the bit operations to save an operation and possibly two if
3170 constants are involved. For example, convert
3171 (A | B) & (A | C) into A | (B & C)
3172 Further simplification will occur if B and C are constants.
3174 If this optimization cannot be done, 0 will be returned. */
3176 static tree
3177 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3178 tree arg0, tree arg1)
3180 tree common;
3181 tree left, right;
3183 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3184 || TREE_CODE (arg0) == code
3185 || (TREE_CODE (arg0) != BIT_AND_EXPR
3186 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3187 return 0;
3189 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3191 common = TREE_OPERAND (arg0, 0);
3192 left = TREE_OPERAND (arg0, 1);
3193 right = TREE_OPERAND (arg1, 1);
3195 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3197 common = TREE_OPERAND (arg0, 0);
3198 left = TREE_OPERAND (arg0, 1);
3199 right = TREE_OPERAND (arg1, 0);
3201 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3203 common = TREE_OPERAND (arg0, 1);
3204 left = TREE_OPERAND (arg0, 0);
3205 right = TREE_OPERAND (arg1, 1);
3207 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3209 common = TREE_OPERAND (arg0, 1);
3210 left = TREE_OPERAND (arg0, 0);
3211 right = TREE_OPERAND (arg1, 0);
3213 else
3214 return 0;
3216 common = fold_convert_loc (loc, type, common);
3217 left = fold_convert_loc (loc, type, left);
3218 right = fold_convert_loc (loc, type, right);
3219 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3220 fold_build2_loc (loc, code, type, left, right));
3223 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3224 with code CODE. This optimization is unsafe. */
3225 static tree
3226 distribute_real_division (location_t loc, enum tree_code code, tree type,
3227 tree arg0, tree arg1)
3229 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3230 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3232 /* (A / C) +- (B / C) -> (A +- B) / C. */
3233 if (mul0 == mul1
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 1), 0))
3236 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3237 fold_build2_loc (loc, code, type,
3238 TREE_OPERAND (arg0, 0),
3239 TREE_OPERAND (arg1, 0)),
3240 TREE_OPERAND (arg0, 1));
3242 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3243 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0), 0)
3245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3248 REAL_VALUE_TYPE r0, r1;
3249 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3250 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3251 if (!mul0)
3252 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3253 if (!mul1)
3254 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3255 real_arithmetic (&r0, code, &r0, &r1);
3256 return fold_build2_loc (loc, MULT_EXPR, type,
3257 TREE_OPERAND (arg0, 0),
3258 build_real (type, r0));
3261 return NULL_TREE;
3264 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3265 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3267 static tree
3268 make_bit_field_ref (location_t loc, tree inner, tree type,
3269 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3271 tree result, bftype;
3273 if (bitpos == 0)
3275 tree size = TYPE_SIZE (TREE_TYPE (inner));
3276 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3277 || POINTER_TYPE_P (TREE_TYPE (inner)))
3278 && host_integerp (size, 0)
3279 && tree_low_cst (size, 0) == bitsize)
3280 return fold_convert_loc (loc, type, inner);
3283 bftype = type;
3284 if (TYPE_PRECISION (bftype) != bitsize
3285 || TYPE_UNSIGNED (bftype) == !unsignedp)
3286 bftype = build_nonstandard_integer_type (bitsize, 0);
3288 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 if (bftype != type)
3292 result = fold_convert_loc (loc, type, result);
3294 return result;
3297 /* Optimize a bit-field compare.
3299 There are two cases: First is a compare against a constant and the
3300 second is a comparison of two items where the fields are at the same
3301 bit position relative to the start of a chunk (byte, halfword, word)
3302 large enough to contain it. In these cases we can avoid the shift
3303 implicit in bitfield extractions.
3305 For constants, we emit a compare of the shifted constant with the
3306 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3307 compared. For two fields at the same position, we do the ANDs with the
3308 similar mask and compare the result of the ANDs.
3310 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3311 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3312 are the left and right operands of the comparison, respectively.
3314 If the optimization described above can be done, we return the resulting
3315 tree. Otherwise we return zero. */
3317 static tree
3318 optimize_bit_field_compare (location_t loc, enum tree_code code,
3319 tree compare_type, tree lhs, tree rhs)
3321 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3322 tree type = TREE_TYPE (lhs);
3323 tree signed_type, unsigned_type;
3324 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3325 enum machine_mode lmode, rmode, nmode;
3326 int lunsignedp, runsignedp;
3327 int lvolatilep = 0, rvolatilep = 0;
3328 tree linner, rinner = NULL_TREE;
3329 tree mask;
3330 tree offset;
3332 /* In the strict volatile bitfields case, doing code changes here may prevent
3333 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3334 if (flag_strict_volatile_bitfields > 0)
3335 return 0;
3337 /* Get all the information about the extractions being done. If the bit size
3338 if the same as the size of the underlying object, we aren't doing an
3339 extraction at all and so can do nothing. We also don't want to
3340 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3341 then will no longer be able to replace it. */
3342 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3343 &lunsignedp, &lvolatilep, false);
3344 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3345 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3346 return 0;
3348 if (!const_p)
3350 /* If this is not a constant, we can only do something if bit positions,
3351 sizes, and signedness are the same. */
3352 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3353 &runsignedp, &rvolatilep, false);
3355 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3356 || lunsignedp != runsignedp || offset != 0
3357 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3358 return 0;
3361 /* See if we can find a mode to refer to this field. We should be able to,
3362 but fail if we can't. */
3363 if (lvolatilep
3364 && GET_MODE_BITSIZE (lmode) > 0
3365 && flag_strict_volatile_bitfields > 0)
3366 nmode = lmode;
3367 else
3368 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3369 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3370 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3371 TYPE_ALIGN (TREE_TYPE (rinner))),
3372 word_mode, lvolatilep || rvolatilep);
3373 if (nmode == VOIDmode)
3374 return 0;
3376 /* Set signed and unsigned types of the precision of this mode for the
3377 shifts below. */
3378 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3379 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3381 /* Compute the bit position and size for the new reference and our offset
3382 within it. If the new reference is the same size as the original, we
3383 won't optimize anything, so return zero. */
3384 nbitsize = GET_MODE_BITSIZE (nmode);
3385 nbitpos = lbitpos & ~ (nbitsize - 1);
3386 lbitpos -= nbitpos;
3387 if (nbitsize == lbitsize)
3388 return 0;
3390 if (BYTES_BIG_ENDIAN)
3391 lbitpos = nbitsize - lbitsize - lbitpos;
3393 /* Make the mask to be used against the extracted field. */
3394 mask = build_int_cst_type (unsigned_type, -1);
3395 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3396 mask = const_binop (RSHIFT_EXPR, mask,
3397 size_int (nbitsize - lbitsize - lbitpos));
3399 if (! const_p)
3400 /* If not comparing with constant, just rework the comparison
3401 and return. */
3402 return fold_build2_loc (loc, code, compare_type,
3403 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3404 make_bit_field_ref (loc, linner,
3405 unsigned_type,
3406 nbitsize, nbitpos,
3408 mask),
3409 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3410 make_bit_field_ref (loc, rinner,
3411 unsigned_type,
3412 nbitsize, nbitpos,
3414 mask));
3416 /* Otherwise, we are handling the constant case. See if the constant is too
3417 big for the field. Warn and return a tree of for 0 (false) if so. We do
3418 this not only for its own sake, but to avoid having to test for this
3419 error case below. If we didn't, we might generate wrong code.
3421 For unsigned fields, the constant shifted right by the field length should
3422 be all zero. For signed fields, the high-order bits should agree with
3423 the sign bit. */
3425 if (lunsignedp)
3427 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3428 fold_convert_loc (loc,
3429 unsigned_type, rhs),
3430 size_int (lbitsize))))
3432 warning (0, "comparison is always %d due to width of bit-field",
3433 code == NE_EXPR);
3434 return constant_boolean_node (code == NE_EXPR, compare_type);
3437 else
3439 tree tem = const_binop (RSHIFT_EXPR,
3440 fold_convert_loc (loc, signed_type, rhs),
3441 size_int (lbitsize - 1));
3442 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3444 warning (0, "comparison is always %d due to width of bit-field",
3445 code == NE_EXPR);
3446 return constant_boolean_node (code == NE_EXPR, compare_type);
3450 /* Single-bit compares should always be against zero. */
3451 if (lbitsize == 1 && ! integer_zerop (rhs))
3453 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3454 rhs = build_int_cst (type, 0);
3457 /* Make a new bitfield reference, shift the constant over the
3458 appropriate number of bits and mask it with the computed mask
3459 (in case this was a signed field). If we changed it, make a new one. */
3460 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3461 if (lvolatilep)
3463 TREE_SIDE_EFFECTS (lhs) = 1;
3464 TREE_THIS_VOLATILE (lhs) = 1;
3467 rhs = const_binop (BIT_AND_EXPR,
3468 const_binop (LSHIFT_EXPR,
3469 fold_convert_loc (loc, unsigned_type, rhs),
3470 size_int (lbitpos)),
3471 mask);
3473 lhs = build2_loc (loc, code, compare_type,
3474 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3475 return lhs;
3478 /* Subroutine for fold_truth_andor_1: decode a field reference.
3480 If EXP is a comparison reference, we return the innermost reference.
3482 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3483 set to the starting bit number.
3485 If the innermost field can be completely contained in a mode-sized
3486 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3488 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3489 otherwise it is not changed.
3491 *PUNSIGNEDP is set to the signedness of the field.
3493 *PMASK is set to the mask used. This is either contained in a
3494 BIT_AND_EXPR or derived from the width of the field.
3496 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3498 Return 0 if this is not a component reference or is one that we can't
3499 do anything with. */
3501 static tree
3502 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3503 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3504 int *punsignedp, int *pvolatilep,
3505 tree *pmask, tree *pand_mask)
3507 tree outer_type = 0;
3508 tree and_mask = 0;
3509 tree mask, inner, offset;
3510 tree unsigned_type;
3511 unsigned int precision;
3513 /* All the optimizations using this function assume integer fields.
3514 There are problems with FP fields since the type_for_size call
3515 below can fail for, e.g., XFmode. */
3516 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3517 return 0;
3519 /* We are interested in the bare arrangement of bits, so strip everything
3520 that doesn't affect the machine mode. However, record the type of the
3521 outermost expression if it may matter below. */
3522 if (CONVERT_EXPR_P (exp)
3523 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3524 outer_type = TREE_TYPE (exp);
3525 STRIP_NOPS (exp);
3527 if (TREE_CODE (exp) == BIT_AND_EXPR)
3529 and_mask = TREE_OPERAND (exp, 1);
3530 exp = TREE_OPERAND (exp, 0);
3531 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3532 if (TREE_CODE (and_mask) != INTEGER_CST)
3533 return 0;
3536 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3537 punsignedp, pvolatilep, false);
3538 if ((inner == exp && and_mask == 0)
3539 || *pbitsize < 0 || offset != 0
3540 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3541 return 0;
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3547 *punsignedp = TYPE_UNSIGNED (outer_type);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3551 precision = TYPE_PRECISION (unsigned_type);
3553 mask = build_int_cst_type (unsigned_type, -1);
3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3556 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3558 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3559 if (and_mask != 0)
3560 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3561 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3563 *pmask = mask;
3564 *pand_mask = and_mask;
3565 return inner;
3568 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3569 bit positions. */
3571 static int
3572 all_ones_mask_p (const_tree mask, int size)
3574 tree type = TREE_TYPE (mask);
3575 unsigned int precision = TYPE_PRECISION (type);
3576 tree tmask;
3578 tmask = build_int_cst_type (signed_type_for (type), -1);
3580 return
3581 tree_int_cst_equal (mask,
3582 const_binop (RSHIFT_EXPR,
3583 const_binop (LSHIFT_EXPR, tmask,
3584 size_int (precision - size)),
3585 size_int (precision - size)));
3588 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3589 represents the sign bit of EXP's type. If EXP represents a sign
3590 or zero extension, also test VAL against the unextended type.
3591 The return value is the (sub)expression whose sign bit is VAL,
3592 or NULL_TREE otherwise. */
3594 static tree
3595 sign_bit_p (tree exp, const_tree val)
3597 unsigned HOST_WIDE_INT mask_lo, lo;
3598 HOST_WIDE_INT mask_hi, hi;
3599 int width;
3600 tree t;
3602 /* Tree EXP must have an integral type. */
3603 t = TREE_TYPE (exp);
3604 if (! INTEGRAL_TYPE_P (t))
3605 return NULL_TREE;
3607 /* Tree VAL must be an integer constant. */
3608 if (TREE_CODE (val) != INTEGER_CST
3609 || TREE_OVERFLOW (val))
3610 return NULL_TREE;
3612 width = TYPE_PRECISION (t);
3613 if (width > HOST_BITS_PER_WIDE_INT)
3615 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3616 lo = 0;
3618 mask_hi = ((unsigned HOST_WIDE_INT) -1
3619 >> (HOST_BITS_PER_DOUBLE_INT - width));
3620 mask_lo = -1;
3622 else
3624 hi = 0;
3625 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3627 mask_hi = 0;
3628 mask_lo = ((unsigned HOST_WIDE_INT) -1
3629 >> (HOST_BITS_PER_WIDE_INT - width));
3632 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3633 treat VAL as if it were unsigned. */
3634 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3635 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3636 return exp;
3638 /* Handle extension from a narrower type. */
3639 if (TREE_CODE (exp) == NOP_EXPR
3640 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3641 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3643 return NULL_TREE;
3646 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3647 to be evaluated unconditionally. */
3649 static int
3650 simple_operand_p (const_tree exp)
3652 /* Strip any conversions that don't change the machine mode. */
3653 STRIP_NOPS (exp);
3655 return (CONSTANT_CLASS_P (exp)
3656 || TREE_CODE (exp) == SSA_NAME
3657 || (DECL_P (exp)
3658 && ! TREE_ADDRESSABLE (exp)
3659 && ! TREE_THIS_VOLATILE (exp)
3660 && ! DECL_NONLOCAL (exp)
3661 /* Don't regard global variables as simple. They may be
3662 allocated in ways unknown to the compiler (shared memory,
3663 #pragma weak, etc). */
3664 && ! TREE_PUBLIC (exp)
3665 && ! DECL_EXTERNAL (exp)
3666 /* Loading a static variable is unduly expensive, but global
3667 registers aren't expensive. */
3668 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3671 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3672 to be evaluated unconditionally.
3673 I addition to simple_operand_p, we assume that comparisons, conversions,
3674 and logic-not operations are simple, if their operands are simple, too. */
3676 static bool
3677 simple_operand_p_2 (tree exp)
3679 enum tree_code code;
3681 if (TREE_SIDE_EFFECTS (exp)
3682 || tree_could_trap_p (exp))
3683 return false;
3685 while (CONVERT_EXPR_P (exp))
3686 exp = TREE_OPERAND (exp, 0);
3688 code = TREE_CODE (exp);
3690 if (TREE_CODE_CLASS (code) == tcc_comparison)
3691 return (simple_operand_p (TREE_OPERAND (exp, 0))
3692 && simple_operand_p (TREE_OPERAND (exp, 1)));
3694 if (code == TRUTH_NOT_EXPR)
3695 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3697 return simple_operand_p (exp);
3701 /* The following functions are subroutines to fold_range_test and allow it to
3702 try to change a logical combination of comparisons into a range test.
3704 For example, both
3705 X == 2 || X == 3 || X == 4 || X == 5
3707 X >= 2 && X <= 5
3708 are converted to
3709 (unsigned) (X - 2) <= 3
3711 We describe each set of comparisons as being either inside or outside
3712 a range, using a variable named like IN_P, and then describe the
3713 range with a lower and upper bound. If one of the bounds is omitted,
3714 it represents either the highest or lowest value of the type.
3716 In the comments below, we represent a range by two numbers in brackets
3717 preceded by a "+" to designate being inside that range, or a "-" to
3718 designate being outside that range, so the condition can be inverted by
3719 flipping the prefix. An omitted bound is represented by a "-". For
3720 example, "- [-, 10]" means being outside the range starting at the lowest
3721 possible value and ending at 10, in other words, being greater than 10.
3722 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3723 always false.
3725 We set up things so that the missing bounds are handled in a consistent
3726 manner so neither a missing bound nor "true" and "false" need to be
3727 handled using a special case. */
3729 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3730 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3731 and UPPER1_P are nonzero if the respective argument is an upper bound
3732 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3733 must be specified for a comparison. ARG1 will be converted to ARG0's
3734 type if both are specified. */
3736 static tree
3737 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3738 tree arg1, int upper1_p)
3740 tree tem;
3741 int result;
3742 int sgn0, sgn1;
3744 /* If neither arg represents infinity, do the normal operation.
3745 Else, if not a comparison, return infinity. Else handle the special
3746 comparison rules. Note that most of the cases below won't occur, but
3747 are handled for consistency. */
3749 if (arg0 != 0 && arg1 != 0)
3751 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3752 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 STRIP_NOPS (tem);
3754 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3757 if (TREE_CODE_CLASS (code) != tcc_comparison)
3758 return 0;
3760 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3761 for neither. In real maths, we cannot assume open ended ranges are
3762 the same. But, this is computer arithmetic, where numbers are finite.
3763 We can therefore make the transformation of any unbounded range with
3764 the value Z, Z being greater than any representable number. This permits
3765 us to treat unbounded ranges as equal. */
3766 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3767 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3768 switch (code)
3770 case EQ_EXPR:
3771 result = sgn0 == sgn1;
3772 break;
3773 case NE_EXPR:
3774 result = sgn0 != sgn1;
3775 break;
3776 case LT_EXPR:
3777 result = sgn0 < sgn1;
3778 break;
3779 case LE_EXPR:
3780 result = sgn0 <= sgn1;
3781 break;
3782 case GT_EXPR:
3783 result = sgn0 > sgn1;
3784 break;
3785 case GE_EXPR:
3786 result = sgn0 >= sgn1;
3787 break;
3788 default:
3789 gcc_unreachable ();
3792 return constant_boolean_node (result, type);
3795 /* Helper routine for make_range. Perform one step for it, return
3796 new expression if the loop should continue or NULL_TREE if it should
3797 stop. */
3799 tree
3800 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3801 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3802 bool *strict_overflow_p)
3804 tree arg0_type = TREE_TYPE (arg0);
3805 tree n_low, n_high, low = *p_low, high = *p_high;
3806 int in_p = *p_in_p, n_in_p;
3808 switch (code)
3810 case TRUTH_NOT_EXPR:
3811 *p_in_p = ! in_p;
3812 return arg0;
3814 case EQ_EXPR: case NE_EXPR:
3815 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3816 /* We can only do something if the range is testing for zero
3817 and if the second operand is an integer constant. Note that
3818 saying something is "in" the range we make is done by
3819 complementing IN_P since it will set in the initial case of
3820 being not equal to zero; "out" is leaving it alone. */
3821 if (low == NULL_TREE || high == NULL_TREE
3822 || ! integer_zerop (low) || ! integer_zerop (high)
3823 || TREE_CODE (arg1) != INTEGER_CST)
3824 return NULL_TREE;
3826 switch (code)
3828 case NE_EXPR: /* - [c, c] */
3829 low = high = arg1;
3830 break;
3831 case EQ_EXPR: /* + [c, c] */
3832 in_p = ! in_p, low = high = arg1;
3833 break;
3834 case GT_EXPR: /* - [-, c] */
3835 low = 0, high = arg1;
3836 break;
3837 case GE_EXPR: /* + [c, -] */
3838 in_p = ! in_p, low = arg1, high = 0;
3839 break;
3840 case LT_EXPR: /* - [c, -] */
3841 low = arg1, high = 0;
3842 break;
3843 case LE_EXPR: /* + [-, c] */
3844 in_p = ! in_p, low = 0, high = arg1;
3845 break;
3846 default:
3847 gcc_unreachable ();
3850 /* If this is an unsigned comparison, we also know that EXP is
3851 greater than or equal to zero. We base the range tests we make
3852 on that fact, so we record it here so we can parse existing
3853 range tests. We test arg0_type since often the return type
3854 of, e.g. EQ_EXPR, is boolean. */
3855 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3857 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3858 in_p, low, high, 1,
3859 build_int_cst (arg0_type, 0),
3860 NULL_TREE))
3861 return NULL_TREE;
3863 in_p = n_in_p, low = n_low, high = n_high;
3865 /* If the high bound is missing, but we have a nonzero low
3866 bound, reverse the range so it goes from zero to the low bound
3867 minus 1. */
3868 if (high == 0 && low && ! integer_zerop (low))
3870 in_p = ! in_p;
3871 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3872 integer_one_node, 0);
3873 low = build_int_cst (arg0_type, 0);
3877 *p_low = low;
3878 *p_high = high;
3879 *p_in_p = in_p;
3880 return arg0;
3882 case NEGATE_EXPR:
3883 /* (-x) IN [a,b] -> x in [-b, -a] */
3884 n_low = range_binop (MINUS_EXPR, exp_type,
3885 build_int_cst (exp_type, 0),
3886 0, high, 1);
3887 n_high = range_binop (MINUS_EXPR, exp_type,
3888 build_int_cst (exp_type, 0),
3889 0, low, 0);
3890 if (n_high != 0 && TREE_OVERFLOW (n_high))
3891 return NULL_TREE;
3892 goto normalize;
3894 case BIT_NOT_EXPR:
3895 /* ~ X -> -X - 1 */
3896 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3897 build_int_cst (exp_type, 1));
3899 case PLUS_EXPR:
3900 case MINUS_EXPR:
3901 if (TREE_CODE (arg1) != INTEGER_CST)
3902 return NULL_TREE;
3904 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3905 move a constant to the other side. */
3906 if (!TYPE_UNSIGNED (arg0_type)
3907 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3908 return NULL_TREE;
3910 /* If EXP is signed, any overflow in the computation is undefined,
3911 so we don't worry about it so long as our computations on
3912 the bounds don't overflow. For unsigned, overflow is defined
3913 and this is exactly the right thing. */
3914 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3915 arg0_type, low, 0, arg1, 0);
3916 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3917 arg0_type, high, 1, arg1, 0);
3918 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3919 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3920 return NULL_TREE;
3922 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3923 *strict_overflow_p = true;
3925 normalize:
3926 /* Check for an unsigned range which has wrapped around the maximum
3927 value thus making n_high < n_low, and normalize it. */
3928 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3930 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3931 integer_one_node, 0);
3932 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3933 integer_one_node, 0);
3935 /* If the range is of the form +/- [ x+1, x ], we won't
3936 be able to normalize it. But then, it represents the
3937 whole range or the empty set, so make it
3938 +/- [ -, - ]. */
3939 if (tree_int_cst_equal (n_low, low)
3940 && tree_int_cst_equal (n_high, high))
3941 low = high = 0;
3942 else
3943 in_p = ! in_p;
3945 else
3946 low = n_low, high = n_high;
3948 *p_low = low;
3949 *p_high = high;
3950 *p_in_p = in_p;
3951 return arg0;
3953 CASE_CONVERT:
3954 case NON_LVALUE_EXPR:
3955 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3956 return NULL_TREE;
3958 if (! INTEGRAL_TYPE_P (arg0_type)
3959 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3960 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3961 return NULL_TREE;
3963 n_low = low, n_high = high;
3965 if (n_low != 0)
3966 n_low = fold_convert_loc (loc, arg0_type, n_low);
3968 if (n_high != 0)
3969 n_high = fold_convert_loc (loc, arg0_type, n_high);
3971 /* If we're converting arg0 from an unsigned type, to exp,
3972 a signed type, we will be doing the comparison as unsigned.
3973 The tests above have already verified that LOW and HIGH
3974 are both positive.
3976 So we have to ensure that we will handle large unsigned
3977 values the same way that the current signed bounds treat
3978 negative values. */
3980 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3982 tree high_positive;
3983 tree equiv_type;
3984 /* For fixed-point modes, we need to pass the saturating flag
3985 as the 2nd parameter. */
3986 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3987 equiv_type
3988 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
3989 TYPE_SATURATING (arg0_type));
3990 else
3991 equiv_type
3992 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
3994 /* A range without an upper bound is, naturally, unbounded.
3995 Since convert would have cropped a very large value, use
3996 the max value for the destination type. */
3997 high_positive
3998 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3999 : TYPE_MAX_VALUE (arg0_type);
4001 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4002 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4003 fold_convert_loc (loc, arg0_type,
4004 high_positive),
4005 build_int_cst (arg0_type, 1));
4007 /* If the low bound is specified, "and" the range with the
4008 range for which the original unsigned value will be
4009 positive. */
4010 if (low != 0)
4012 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4013 1, fold_convert_loc (loc, arg0_type,
4014 integer_zero_node),
4015 high_positive))
4016 return NULL_TREE;
4018 in_p = (n_in_p == in_p);
4020 else
4022 /* Otherwise, "or" the range with the range of the input
4023 that will be interpreted as negative. */
4024 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4025 1, fold_convert_loc (loc, arg0_type,
4026 integer_zero_node),
4027 high_positive))
4028 return NULL_TREE;
4030 in_p = (in_p != n_in_p);
4034 *p_low = n_low;
4035 *p_high = n_high;
4036 *p_in_p = in_p;
4037 return arg0;
4039 default:
4040 return NULL_TREE;
4044 /* Given EXP, a logical expression, set the range it is testing into
4045 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4046 actually being tested. *PLOW and *PHIGH will be made of the same
4047 type as the returned expression. If EXP is not a comparison, we
4048 will most likely not be returning a useful value and range. Set
4049 *STRICT_OVERFLOW_P to true if the return value is only valid
4050 because signed overflow is undefined; otherwise, do not change
4051 *STRICT_OVERFLOW_P. */
4053 tree
4054 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4055 bool *strict_overflow_p)
4057 enum tree_code code;
4058 tree arg0, arg1 = NULL_TREE;
4059 tree exp_type, nexp;
4060 int in_p;
4061 tree low, high;
4062 location_t loc = EXPR_LOCATION (exp);
4064 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4065 and see if we can refine the range. Some of the cases below may not
4066 happen, but it doesn't seem worth worrying about this. We "continue"
4067 the outer loop when we've changed something; otherwise we "break"
4068 the switch, which will "break" the while. */
4070 in_p = 0;
4071 low = high = build_int_cst (TREE_TYPE (exp), 0);
4073 while (1)
4075 code = TREE_CODE (exp);
4076 exp_type = TREE_TYPE (exp);
4077 arg0 = NULL_TREE;
4079 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4081 if (TREE_OPERAND_LENGTH (exp) > 0)
4082 arg0 = TREE_OPERAND (exp, 0);
4083 if (TREE_CODE_CLASS (code) == tcc_binary
4084 || TREE_CODE_CLASS (code) == tcc_comparison
4085 || (TREE_CODE_CLASS (code) == tcc_expression
4086 && TREE_OPERAND_LENGTH (exp) > 1))
4087 arg1 = TREE_OPERAND (exp, 1);
4089 if (arg0 == NULL_TREE)
4090 break;
4092 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4093 &high, &in_p, strict_overflow_p);
4094 if (nexp == NULL_TREE)
4095 break;
4096 exp = nexp;
4099 /* If EXP is a constant, we can evaluate whether this is true or false. */
4100 if (TREE_CODE (exp) == INTEGER_CST)
4102 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4103 exp, 0, low, 0))
4104 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4105 exp, 1, high, 1)));
4106 low = high = 0;
4107 exp = 0;
4110 *pin_p = in_p, *plow = low, *phigh = high;
4111 return exp;
4114 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4115 type, TYPE, return an expression to test if EXP is in (or out of, depending
4116 on IN_P) the range. Return 0 if the test couldn't be created. */
4118 tree
4119 build_range_check (location_t loc, tree type, tree exp, int in_p,
4120 tree low, tree high)
4122 tree etype = TREE_TYPE (exp), value;
4124 #ifdef HAVE_canonicalize_funcptr_for_compare
4125 /* Disable this optimization for function pointer expressions
4126 on targets that require function pointer canonicalization. */
4127 if (HAVE_canonicalize_funcptr_for_compare
4128 && TREE_CODE (etype) == POINTER_TYPE
4129 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4130 return NULL_TREE;
4131 #endif
4133 if (! in_p)
4135 value = build_range_check (loc, type, exp, 1, low, high);
4136 if (value != 0)
4137 return invert_truthvalue_loc (loc, value);
4139 return 0;
4142 if (low == 0 && high == 0)
4143 return build_int_cst (type, 1);
4145 if (low == 0)
4146 return fold_build2_loc (loc, LE_EXPR, type, exp,
4147 fold_convert_loc (loc, etype, high));
4149 if (high == 0)
4150 return fold_build2_loc (loc, GE_EXPR, type, exp,
4151 fold_convert_loc (loc, etype, low));
4153 if (operand_equal_p (low, high, 0))
4154 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4155 fold_convert_loc (loc, etype, low));
4157 if (integer_zerop (low))
4159 if (! TYPE_UNSIGNED (etype))
4161 etype = unsigned_type_for (etype);
4162 high = fold_convert_loc (loc, etype, high);
4163 exp = fold_convert_loc (loc, etype, exp);
4165 return build_range_check (loc, type, exp, 1, 0, high);
4168 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4169 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4171 unsigned HOST_WIDE_INT lo;
4172 HOST_WIDE_INT hi;
4173 int prec;
4175 prec = TYPE_PRECISION (etype);
4176 if (prec <= HOST_BITS_PER_WIDE_INT)
4178 hi = 0;
4179 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4181 else
4183 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4184 lo = (unsigned HOST_WIDE_INT) -1;
4187 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4189 if (TYPE_UNSIGNED (etype))
4191 tree signed_etype = signed_type_for (etype);
4192 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4193 etype
4194 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4195 else
4196 etype = signed_etype;
4197 exp = fold_convert_loc (loc, etype, exp);
4199 return fold_build2_loc (loc, GT_EXPR, type, exp,
4200 build_int_cst (etype, 0));
4204 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4205 This requires wrap-around arithmetics for the type of the expression.
4206 First make sure that arithmetics in this type is valid, then make sure
4207 that it wraps around. */
4208 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4209 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4210 TYPE_UNSIGNED (etype));
4212 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4214 tree utype, minv, maxv;
4216 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4217 for the type in question, as we rely on this here. */
4218 utype = unsigned_type_for (etype);
4219 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4220 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4221 integer_one_node, 1);
4222 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4224 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4225 minv, 1, maxv, 1)))
4226 etype = utype;
4227 else
4228 return 0;
4231 high = fold_convert_loc (loc, etype, high);
4232 low = fold_convert_loc (loc, etype, low);
4233 exp = fold_convert_loc (loc, etype, exp);
4235 value = const_binop (MINUS_EXPR, high, low);
4238 if (POINTER_TYPE_P (etype))
4240 if (value != 0 && !TREE_OVERFLOW (value))
4242 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4243 return build_range_check (loc, type,
4244 fold_build_pointer_plus_loc (loc, exp, low),
4245 1, build_int_cst (etype, 0), value);
4247 return 0;
4250 if (value != 0 && !TREE_OVERFLOW (value))
4251 return build_range_check (loc, type,
4252 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4253 1, build_int_cst (etype, 0), value);
4255 return 0;
4258 /* Return the predecessor of VAL in its type, handling the infinite case. */
4260 static tree
4261 range_predecessor (tree val)
4263 tree type = TREE_TYPE (val);
4265 if (INTEGRAL_TYPE_P (type)
4266 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4267 return 0;
4268 else
4269 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4272 /* Return the successor of VAL in its type, handling the infinite case. */
4274 static tree
4275 range_successor (tree val)
4277 tree type = TREE_TYPE (val);
4279 if (INTEGRAL_TYPE_P (type)
4280 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4281 return 0;
4282 else
4283 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4286 /* Given two ranges, see if we can merge them into one. Return 1 if we
4287 can, 0 if we can't. Set the output range into the specified parameters. */
4289 bool
4290 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4291 tree high0, int in1_p, tree low1, tree high1)
4293 int no_overlap;
4294 int subset;
4295 int temp;
4296 tree tem;
4297 int in_p;
4298 tree low, high;
4299 int lowequal = ((low0 == 0 && low1 == 0)
4300 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4301 low0, 0, low1, 0)));
4302 int highequal = ((high0 == 0 && high1 == 0)
4303 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4304 high0, 1, high1, 1)));
4306 /* Make range 0 be the range that starts first, or ends last if they
4307 start at the same value. Swap them if it isn't. */
4308 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4309 low0, 0, low1, 0))
4310 || (lowequal
4311 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4312 high1, 1, high0, 1))))
4314 temp = in0_p, in0_p = in1_p, in1_p = temp;
4315 tem = low0, low0 = low1, low1 = tem;
4316 tem = high0, high0 = high1, high1 = tem;
4319 /* Now flag two cases, whether the ranges are disjoint or whether the
4320 second range is totally subsumed in the first. Note that the tests
4321 below are simplified by the ones above. */
4322 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4323 high0, 1, low1, 0));
4324 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4325 high1, 1, high0, 1));
4327 /* We now have four cases, depending on whether we are including or
4328 excluding the two ranges. */
4329 if (in0_p && in1_p)
4331 /* If they don't overlap, the result is false. If the second range
4332 is a subset it is the result. Otherwise, the range is from the start
4333 of the second to the end of the first. */
4334 if (no_overlap)
4335 in_p = 0, low = high = 0;
4336 else if (subset)
4337 in_p = 1, low = low1, high = high1;
4338 else
4339 in_p = 1, low = low1, high = high0;
4342 else if (in0_p && ! in1_p)
4344 /* If they don't overlap, the result is the first range. If they are
4345 equal, the result is false. If the second range is a subset of the
4346 first, and the ranges begin at the same place, we go from just after
4347 the end of the second range to the end of the first. If the second
4348 range is not a subset of the first, or if it is a subset and both
4349 ranges end at the same place, the range starts at the start of the
4350 first range and ends just before the second range.
4351 Otherwise, we can't describe this as a single range. */
4352 if (no_overlap)
4353 in_p = 1, low = low0, high = high0;
4354 else if (lowequal && highequal)
4355 in_p = 0, low = high = 0;
4356 else if (subset && lowequal)
4358 low = range_successor (high1);
4359 high = high0;
4360 in_p = 1;
4361 if (low == 0)
4363 /* We are in the weird situation where high0 > high1 but
4364 high1 has no successor. Punt. */
4365 return 0;
4368 else if (! subset || highequal)
4370 low = low0;
4371 high = range_predecessor (low1);
4372 in_p = 1;
4373 if (high == 0)
4375 /* low0 < low1 but low1 has no predecessor. Punt. */
4376 return 0;
4379 else
4380 return 0;
4383 else if (! in0_p && in1_p)
4385 /* If they don't overlap, the result is the second range. If the second
4386 is a subset of the first, the result is false. Otherwise,
4387 the range starts just after the first range and ends at the
4388 end of the second. */
4389 if (no_overlap)
4390 in_p = 1, low = low1, high = high1;
4391 else if (subset || highequal)
4392 in_p = 0, low = high = 0;
4393 else
4395 low = range_successor (high0);
4396 high = high1;
4397 in_p = 1;
4398 if (low == 0)
4400 /* high1 > high0 but high0 has no successor. Punt. */
4401 return 0;
4406 else
4408 /* The case where we are excluding both ranges. Here the complex case
4409 is if they don't overlap. In that case, the only time we have a
4410 range is if they are adjacent. If the second is a subset of the
4411 first, the result is the first. Otherwise, the range to exclude
4412 starts at the beginning of the first range and ends at the end of the
4413 second. */
4414 if (no_overlap)
4416 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4417 range_successor (high0),
4418 1, low1, 0)))
4419 in_p = 0, low = low0, high = high1;
4420 else
4422 /* Canonicalize - [min, x] into - [-, x]. */
4423 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4424 switch (TREE_CODE (TREE_TYPE (low0)))
4426 case ENUMERAL_TYPE:
4427 if (TYPE_PRECISION (TREE_TYPE (low0))
4428 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4429 break;
4430 /* FALLTHROUGH */
4431 case INTEGER_TYPE:
4432 if (tree_int_cst_equal (low0,
4433 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4434 low0 = 0;
4435 break;
4436 case POINTER_TYPE:
4437 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4438 && integer_zerop (low0))
4439 low0 = 0;
4440 break;
4441 default:
4442 break;
4445 /* Canonicalize - [x, max] into - [x, -]. */
4446 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4447 switch (TREE_CODE (TREE_TYPE (high1)))
4449 case ENUMERAL_TYPE:
4450 if (TYPE_PRECISION (TREE_TYPE (high1))
4451 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4452 break;
4453 /* FALLTHROUGH */
4454 case INTEGER_TYPE:
4455 if (tree_int_cst_equal (high1,
4456 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4457 high1 = 0;
4458 break;
4459 case POINTER_TYPE:
4460 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4461 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4462 high1, 1,
4463 integer_one_node, 1)))
4464 high1 = 0;
4465 break;
4466 default:
4467 break;
4470 /* The ranges might be also adjacent between the maximum and
4471 minimum values of the given type. For
4472 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4473 return + [x + 1, y - 1]. */
4474 if (low0 == 0 && high1 == 0)
4476 low = range_successor (high0);
4477 high = range_predecessor (low1);
4478 if (low == 0 || high == 0)
4479 return 0;
4481 in_p = 1;
4483 else
4484 return 0;
4487 else if (subset)
4488 in_p = 0, low = low0, high = high0;
4489 else
4490 in_p = 0, low = low0, high = high1;
4493 *pin_p = in_p, *plow = low, *phigh = high;
4494 return 1;
4498 /* Subroutine of fold, looking inside expressions of the form
4499 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4500 of the COND_EXPR. This function is being used also to optimize
4501 A op B ? C : A, by reversing the comparison first.
4503 Return a folded expression whose code is not a COND_EXPR
4504 anymore, or NULL_TREE if no folding opportunity is found. */
4506 static tree
4507 fold_cond_expr_with_comparison (location_t loc, tree type,
4508 tree arg0, tree arg1, tree arg2)
4510 enum tree_code comp_code = TREE_CODE (arg0);
4511 tree arg00 = TREE_OPERAND (arg0, 0);
4512 tree arg01 = TREE_OPERAND (arg0, 1);
4513 tree arg1_type = TREE_TYPE (arg1);
4514 tree tem;
4516 STRIP_NOPS (arg1);
4517 STRIP_NOPS (arg2);
4519 /* If we have A op 0 ? A : -A, consider applying the following
4520 transformations:
4522 A == 0? A : -A same as -A
4523 A != 0? A : -A same as A
4524 A >= 0? A : -A same as abs (A)
4525 A > 0? A : -A same as abs (A)
4526 A <= 0? A : -A same as -abs (A)
4527 A < 0? A : -A same as -abs (A)
4529 None of these transformations work for modes with signed
4530 zeros. If A is +/-0, the first two transformations will
4531 change the sign of the result (from +0 to -0, or vice
4532 versa). The last four will fix the sign of the result,
4533 even though the original expressions could be positive or
4534 negative, depending on the sign of A.
4536 Note that all these transformations are correct if A is
4537 NaN, since the two alternatives (A and -A) are also NaNs. */
4538 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4539 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4540 ? real_zerop (arg01)
4541 : integer_zerop (arg01))
4542 && ((TREE_CODE (arg2) == NEGATE_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4544 /* In the case that A is of the form X-Y, '-A' (arg2) may
4545 have already been folded to Y-X, check for that. */
4546 || (TREE_CODE (arg1) == MINUS_EXPR
4547 && TREE_CODE (arg2) == MINUS_EXPR
4548 && operand_equal_p (TREE_OPERAND (arg1, 0),
4549 TREE_OPERAND (arg2, 1), 0)
4550 && operand_equal_p (TREE_OPERAND (arg1, 1),
4551 TREE_OPERAND (arg2, 0), 0))))
4552 switch (comp_code)
4554 case EQ_EXPR:
4555 case UNEQ_EXPR:
4556 tem = fold_convert_loc (loc, arg1_type, arg1);
4557 return pedantic_non_lvalue_loc (loc,
4558 fold_convert_loc (loc, type,
4559 negate_expr (tem)));
4560 case NE_EXPR:
4561 case LTGT_EXPR:
4562 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4563 case UNGE_EXPR:
4564 case UNGT_EXPR:
4565 if (flag_trapping_math)
4566 break;
4567 /* Fall through. */
4568 case GE_EXPR:
4569 case GT_EXPR:
4570 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4571 arg1 = fold_convert_loc (loc, signed_type_for
4572 (TREE_TYPE (arg1)), arg1);
4573 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4574 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4575 case UNLE_EXPR:
4576 case UNLT_EXPR:
4577 if (flag_trapping_math)
4578 break;
4579 case LE_EXPR:
4580 case LT_EXPR:
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return negate_expr (fold_convert_loc (loc, type, tem));
4586 default:
4587 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4588 break;
4591 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4592 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4593 both transformations are correct when A is NaN: A != 0
4594 is then true, and A == 0 is false. */
4596 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4597 && integer_zerop (arg01) && integer_zerop (arg2))
4599 if (comp_code == NE_EXPR)
4600 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4601 else if (comp_code == EQ_EXPR)
4602 return build_int_cst (type, 0);
4605 /* Try some transformations of A op B ? A : B.
4607 A == B? A : B same as B
4608 A != B? A : B same as A
4609 A >= B? A : B same as max (A, B)
4610 A > B? A : B same as max (B, A)
4611 A <= B? A : B same as min (A, B)
4612 A < B? A : B same as min (B, A)
4614 As above, these transformations don't work in the presence
4615 of signed zeros. For example, if A and B are zeros of
4616 opposite sign, the first two transformations will change
4617 the sign of the result. In the last four, the original
4618 expressions give different results for (A=+0, B=-0) and
4619 (A=-0, B=+0), but the transformed expressions do not.
4621 The first two transformations are correct if either A or B
4622 is a NaN. In the first transformation, the condition will
4623 be false, and B will indeed be chosen. In the case of the
4624 second transformation, the condition A != B will be true,
4625 and A will be chosen.
4627 The conversions to max() and min() are not correct if B is
4628 a number and A is not. The conditions in the original
4629 expressions will be false, so all four give B. The min()
4630 and max() versions would give a NaN instead. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4632 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4633 /* Avoid these transformations if the COND_EXPR may be used
4634 as an lvalue in the C++ front-end. PR c++/19199. */
4635 && (in_gimple_form
4636 || (strcmp (lang_hooks.name, "GNU C++") != 0
4637 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4638 || ! maybe_lvalue_p (arg1)
4639 || ! maybe_lvalue_p (arg2)))
4641 tree comp_op0 = arg00;
4642 tree comp_op1 = arg01;
4643 tree comp_type = TREE_TYPE (comp_op0);
4645 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4646 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4648 comp_type = type;
4649 comp_op0 = arg1;
4650 comp_op1 = arg2;
4653 switch (comp_code)
4655 case EQ_EXPR:
4656 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4657 case NE_EXPR:
4658 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4659 case LE_EXPR:
4660 case LT_EXPR:
4661 case UNLE_EXPR:
4662 case UNLT_EXPR:
4663 /* In C++ a ?: expression can be an lvalue, so put the
4664 operand which will be used if they are equal first
4665 so that we can convert this back to the
4666 corresponding COND_EXPR. */
4667 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4669 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4670 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4671 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4672 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4673 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4674 comp_op1, comp_op0);
4675 return pedantic_non_lvalue_loc (loc,
4676 fold_convert_loc (loc, type, tem));
4678 break;
4679 case GE_EXPR:
4680 case GT_EXPR:
4681 case UNGE_EXPR:
4682 case UNGT_EXPR:
4683 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4685 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4686 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4687 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4688 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4689 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4690 comp_op1, comp_op0);
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, tem));
4694 break;
4695 case UNEQ_EXPR:
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4697 return pedantic_non_lvalue_loc (loc,
4698 fold_convert_loc (loc, type, arg2));
4699 break;
4700 case LTGT_EXPR:
4701 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, arg1));
4704 break;
4705 default:
4706 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4707 break;
4711 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4712 we might still be able to simplify this. For example,
4713 if C1 is one less or one more than C2, this might have started
4714 out as a MIN or MAX and been transformed by this function.
4715 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4717 if (INTEGRAL_TYPE_P (type)
4718 && TREE_CODE (arg01) == INTEGER_CST
4719 && TREE_CODE (arg2) == INTEGER_CST)
4720 switch (comp_code)
4722 case EQ_EXPR:
4723 if (TREE_CODE (arg1) == INTEGER_CST)
4724 break;
4725 /* We can replace A with C1 in this case. */
4726 arg1 = fold_convert_loc (loc, type, arg01);
4727 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4729 case LT_EXPR:
4730 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4731 MIN_EXPR, to preserve the signedness of the comparison. */
4732 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4733 OEP_ONLY_CONST)
4734 && operand_equal_p (arg01,
4735 const_binop (PLUS_EXPR, arg2,
4736 build_int_cst (type, 1)),
4737 OEP_ONLY_CONST))
4739 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4740 fold_convert_loc (loc, TREE_TYPE (arg00),
4741 arg2));
4742 return pedantic_non_lvalue_loc (loc,
4743 fold_convert_loc (loc, type, tem));
4745 break;
4747 case LE_EXPR:
4748 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4749 as above. */
4750 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4751 OEP_ONLY_CONST)
4752 && operand_equal_p (arg01,
4753 const_binop (MINUS_EXPR, arg2,
4754 build_int_cst (type, 1)),
4755 OEP_ONLY_CONST))
4757 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4758 fold_convert_loc (loc, TREE_TYPE (arg00),
4759 arg2));
4760 return pedantic_non_lvalue_loc (loc,
4761 fold_convert_loc (loc, type, tem));
4763 break;
4765 case GT_EXPR:
4766 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4767 MAX_EXPR, to preserve the signedness of the comparison. */
4768 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4769 OEP_ONLY_CONST)
4770 && operand_equal_p (arg01,
4771 const_binop (MINUS_EXPR, arg2,
4772 build_int_cst (type, 1)),
4773 OEP_ONLY_CONST))
4775 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4776 fold_convert_loc (loc, TREE_TYPE (arg00),
4777 arg2));
4778 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4780 break;
4782 case GE_EXPR:
4783 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4784 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (PLUS_EXPR, arg2,
4788 build_int_cst (type, 1)),
4789 OEP_ONLY_CONST))
4791 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4796 break;
4797 case NE_EXPR:
4798 break;
4799 default:
4800 gcc_unreachable ();
4803 return NULL_TREE;
4808 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4809 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4810 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4811 false) >= 2)
4812 #endif
4814 /* EXP is some logical combination of boolean tests. See if we can
4815 merge it into some range test. Return the new tree if so. */
4817 static tree
4818 fold_range_test (location_t loc, enum tree_code code, tree type,
4819 tree op0, tree op1)
4821 int or_op = (code == TRUTH_ORIF_EXPR
4822 || code == TRUTH_OR_EXPR);
4823 int in0_p, in1_p, in_p;
4824 tree low0, low1, low, high0, high1, high;
4825 bool strict_overflow_p = false;
4826 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4827 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4828 tree tem;
4829 const char * const warnmsg = G_("assuming signed overflow does not occur "
4830 "when simplifying range test");
4832 /* If this is an OR operation, invert both sides; we will invert
4833 again at the end. */
4834 if (or_op)
4835 in0_p = ! in0_p, in1_p = ! in1_p;
4837 /* If both expressions are the same, if we can merge the ranges, and we
4838 can build the range test, return it or it inverted. If one of the
4839 ranges is always true or always false, consider it to be the same
4840 expression as the other. */
4841 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4842 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4843 in1_p, low1, high1)
4844 && 0 != (tem = (build_range_check (loc, type,
4845 lhs != 0 ? lhs
4846 : rhs != 0 ? rhs : integer_zero_node,
4847 in_p, low, high))))
4849 if (strict_overflow_p)
4850 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4851 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4854 /* On machines where the branch cost is expensive, if this is a
4855 short-circuited branch and the underlying object on both sides
4856 is the same, make a non-short-circuit operation. */
4857 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4858 && lhs != 0 && rhs != 0
4859 && (code == TRUTH_ANDIF_EXPR
4860 || code == TRUTH_ORIF_EXPR)
4861 && operand_equal_p (lhs, rhs, 0))
4863 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4864 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4865 which cases we can't do this. */
4866 if (simple_operand_p (lhs))
4867 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4868 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4869 type, op0, op1);
4871 else if (!lang_hooks.decls.global_bindings_p ()
4872 && !CONTAINS_PLACEHOLDER_P (lhs))
4874 tree common = save_expr (lhs);
4876 if (0 != (lhs = build_range_check (loc, type, common,
4877 or_op ? ! in0_p : in0_p,
4878 low0, high0))
4879 && (0 != (rhs = build_range_check (loc, type, common,
4880 or_op ? ! in1_p : in1_p,
4881 low1, high1))))
4883 if (strict_overflow_p)
4884 fold_overflow_warning (warnmsg,
4885 WARN_STRICT_OVERFLOW_COMPARISON);
4886 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4887 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4888 type, lhs, rhs);
4893 return 0;
4896 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4897 bit value. Arrange things so the extra bits will be set to zero if and
4898 only if C is signed-extended to its full width. If MASK is nonzero,
4899 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4901 static tree
4902 unextend (tree c, int p, int unsignedp, tree mask)
4904 tree type = TREE_TYPE (c);
4905 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4906 tree temp;
4908 if (p == modesize || unsignedp)
4909 return c;
4911 /* We work by getting just the sign bit into the low-order bit, then
4912 into the high-order bit, then sign-extend. We then XOR that value
4913 with C. */
4914 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4915 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4917 /* We must use a signed type in order to get an arithmetic right shift.
4918 However, we must also avoid introducing accidental overflows, so that
4919 a subsequent call to integer_zerop will work. Hence we must
4920 do the type conversion here. At this point, the constant is either
4921 zero or one, and the conversion to a signed type can never overflow.
4922 We could get an overflow if this conversion is done anywhere else. */
4923 if (TYPE_UNSIGNED (type))
4924 temp = fold_convert (signed_type_for (type), temp);
4926 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4927 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4928 if (mask != 0)
4929 temp = const_binop (BIT_AND_EXPR, temp,
4930 fold_convert (TREE_TYPE (c), mask));
4931 /* If necessary, convert the type back to match the type of C. */
4932 if (TYPE_UNSIGNED (type))
4933 temp = fold_convert (type, temp);
4935 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4938 /* For an expression that has the form
4939 (A && B) || ~B
4941 (A || B) && ~B,
4942 we can drop one of the inner expressions and simplify to
4943 A || ~B
4945 A && ~B
4946 LOC is the location of the resulting expression. OP is the inner
4947 logical operation; the left-hand side in the examples above, while CMPOP
4948 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4949 removing a condition that guards another, as in
4950 (A != NULL && A->...) || A == NULL
4951 which we must not transform. If RHS_ONLY is true, only eliminate the
4952 right-most operand of the inner logical operation. */
4954 static tree
4955 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4956 bool rhs_only)
4958 tree type = TREE_TYPE (cmpop);
4959 enum tree_code code = TREE_CODE (cmpop);
4960 enum tree_code truthop_code = TREE_CODE (op);
4961 tree lhs = TREE_OPERAND (op, 0);
4962 tree rhs = TREE_OPERAND (op, 1);
4963 tree orig_lhs = lhs, orig_rhs = rhs;
4964 enum tree_code rhs_code = TREE_CODE (rhs);
4965 enum tree_code lhs_code = TREE_CODE (lhs);
4966 enum tree_code inv_code;
4968 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4969 return NULL_TREE;
4971 if (TREE_CODE_CLASS (code) != tcc_comparison)
4972 return NULL_TREE;
4974 if (rhs_code == truthop_code)
4976 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4977 if (newrhs != NULL_TREE)
4979 rhs = newrhs;
4980 rhs_code = TREE_CODE (rhs);
4983 if (lhs_code == truthop_code && !rhs_only)
4985 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4986 if (newlhs != NULL_TREE)
4988 lhs = newlhs;
4989 lhs_code = TREE_CODE (lhs);
4993 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4994 if (inv_code == rhs_code
4995 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4996 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4997 return lhs;
4998 if (!rhs_only && inv_code == lhs_code
4999 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5000 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5001 return rhs;
5002 if (rhs != orig_rhs || lhs != orig_lhs)
5003 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5004 lhs, rhs);
5005 return NULL_TREE;
5008 /* Find ways of folding logical expressions of LHS and RHS:
5009 Try to merge two comparisons to the same innermost item.
5010 Look for range tests like "ch >= '0' && ch <= '9'".
5011 Look for combinations of simple terms on machines with expensive branches
5012 and evaluate the RHS unconditionally.
5014 For example, if we have p->a == 2 && p->b == 4 and we can make an
5015 object large enough to span both A and B, we can do this with a comparison
5016 against the object ANDed with the a mask.
5018 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5019 operations to do this with one comparison.
5021 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5022 function and the one above.
5024 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5025 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5027 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5028 two operands.
5030 We return the simplified tree or 0 if no optimization is possible. */
5032 static tree
5033 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5034 tree lhs, tree rhs)
5036 /* If this is the "or" of two comparisons, we can do something if
5037 the comparisons are NE_EXPR. If this is the "and", we can do something
5038 if the comparisons are EQ_EXPR. I.e.,
5039 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5041 WANTED_CODE is this operation code. For single bit fields, we can
5042 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5043 comparison for one-bit fields. */
5045 enum tree_code wanted_code;
5046 enum tree_code lcode, rcode;
5047 tree ll_arg, lr_arg, rl_arg, rr_arg;
5048 tree ll_inner, lr_inner, rl_inner, rr_inner;
5049 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5050 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5051 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5052 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5053 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5054 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5055 enum machine_mode lnmode, rnmode;
5056 tree ll_mask, lr_mask, rl_mask, rr_mask;
5057 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5058 tree l_const, r_const;
5059 tree lntype, rntype, result;
5060 HOST_WIDE_INT first_bit, end_bit;
5061 int volatilep;
5063 /* Start by getting the comparison codes. Fail if anything is volatile.
5064 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5065 it were surrounded with a NE_EXPR. */
5067 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5068 return 0;
5070 lcode = TREE_CODE (lhs);
5071 rcode = TREE_CODE (rhs);
5073 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5075 lhs = build2 (NE_EXPR, truth_type, lhs,
5076 build_int_cst (TREE_TYPE (lhs), 0));
5077 lcode = NE_EXPR;
5080 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5082 rhs = build2 (NE_EXPR, truth_type, rhs,
5083 build_int_cst (TREE_TYPE (rhs), 0));
5084 rcode = NE_EXPR;
5087 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5088 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5089 return 0;
5091 ll_arg = TREE_OPERAND (lhs, 0);
5092 lr_arg = TREE_OPERAND (lhs, 1);
5093 rl_arg = TREE_OPERAND (rhs, 0);
5094 rr_arg = TREE_OPERAND (rhs, 1);
5096 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5097 if (simple_operand_p (ll_arg)
5098 && simple_operand_p (lr_arg))
5100 if (operand_equal_p (ll_arg, rl_arg, 0)
5101 && operand_equal_p (lr_arg, rr_arg, 0))
5103 result = combine_comparisons (loc, code, lcode, rcode,
5104 truth_type, ll_arg, lr_arg);
5105 if (result)
5106 return result;
5108 else if (operand_equal_p (ll_arg, rr_arg, 0)
5109 && operand_equal_p (lr_arg, rl_arg, 0))
5111 result = combine_comparisons (loc, code, lcode,
5112 swap_tree_comparison (rcode),
5113 truth_type, ll_arg, lr_arg);
5114 if (result)
5115 return result;
5119 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5120 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5122 /* If the RHS can be evaluated unconditionally and its operands are
5123 simple, it wins to evaluate the RHS unconditionally on machines
5124 with expensive branches. In this case, this isn't a comparison
5125 that can be merged. */
5127 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5128 false) >= 2
5129 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5130 && simple_operand_p (rl_arg)
5131 && simple_operand_p (rr_arg))
5133 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5134 if (code == TRUTH_OR_EXPR
5135 && lcode == NE_EXPR && integer_zerop (lr_arg)
5136 && rcode == NE_EXPR && integer_zerop (rr_arg)
5137 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5138 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5139 return build2_loc (loc, NE_EXPR, truth_type,
5140 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5141 ll_arg, rl_arg),
5142 build_int_cst (TREE_TYPE (ll_arg), 0));
5144 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5145 if (code == TRUTH_AND_EXPR
5146 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5147 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5148 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5149 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5150 return build2_loc (loc, EQ_EXPR, truth_type,
5151 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5152 ll_arg, rl_arg),
5153 build_int_cst (TREE_TYPE (ll_arg), 0));
5156 /* See if the comparisons can be merged. Then get all the parameters for
5157 each side. */
5159 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5160 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5161 return 0;
5163 volatilep = 0;
5164 ll_inner = decode_field_reference (loc, ll_arg,
5165 &ll_bitsize, &ll_bitpos, &ll_mode,
5166 &ll_unsignedp, &volatilep, &ll_mask,
5167 &ll_and_mask);
5168 lr_inner = decode_field_reference (loc, lr_arg,
5169 &lr_bitsize, &lr_bitpos, &lr_mode,
5170 &lr_unsignedp, &volatilep, &lr_mask,
5171 &lr_and_mask);
5172 rl_inner = decode_field_reference (loc, rl_arg,
5173 &rl_bitsize, &rl_bitpos, &rl_mode,
5174 &rl_unsignedp, &volatilep, &rl_mask,
5175 &rl_and_mask);
5176 rr_inner = decode_field_reference (loc, rr_arg,
5177 &rr_bitsize, &rr_bitpos, &rr_mode,
5178 &rr_unsignedp, &volatilep, &rr_mask,
5179 &rr_and_mask);
5181 /* It must be true that the inner operation on the lhs of each
5182 comparison must be the same if we are to be able to do anything.
5183 Then see if we have constants. If not, the same must be true for
5184 the rhs's. */
5185 if (volatilep || ll_inner == 0 || rl_inner == 0
5186 || ! operand_equal_p (ll_inner, rl_inner, 0))
5187 return 0;
5189 if (TREE_CODE (lr_arg) == INTEGER_CST
5190 && TREE_CODE (rr_arg) == INTEGER_CST)
5191 l_const = lr_arg, r_const = rr_arg;
5192 else if (lr_inner == 0 || rr_inner == 0
5193 || ! operand_equal_p (lr_inner, rr_inner, 0))
5194 return 0;
5195 else
5196 l_const = r_const = 0;
5198 /* If either comparison code is not correct for our logical operation,
5199 fail. However, we can convert a one-bit comparison against zero into
5200 the opposite comparison against that bit being set in the field. */
5202 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5203 if (lcode != wanted_code)
5205 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5207 /* Make the left operand unsigned, since we are only interested
5208 in the value of one bit. Otherwise we are doing the wrong
5209 thing below. */
5210 ll_unsignedp = 1;
5211 l_const = ll_mask;
5213 else
5214 return 0;
5217 /* This is analogous to the code for l_const above. */
5218 if (rcode != wanted_code)
5220 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5222 rl_unsignedp = 1;
5223 r_const = rl_mask;
5225 else
5226 return 0;
5229 /* See if we can find a mode that contains both fields being compared on
5230 the left. If we can't, fail. Otherwise, update all constants and masks
5231 to be relative to a field of that size. */
5232 first_bit = MIN (ll_bitpos, rl_bitpos);
5233 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5234 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5235 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5236 volatilep);
5237 if (lnmode == VOIDmode)
5238 return 0;
5240 lnbitsize = GET_MODE_BITSIZE (lnmode);
5241 lnbitpos = first_bit & ~ (lnbitsize - 1);
5242 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5243 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5245 if (BYTES_BIG_ENDIAN)
5247 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5248 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5251 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5252 size_int (xll_bitpos));
5253 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5254 size_int (xrl_bitpos));
5256 if (l_const)
5258 l_const = fold_convert_loc (loc, lntype, l_const);
5259 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5260 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5261 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5262 fold_build1_loc (loc, BIT_NOT_EXPR,
5263 lntype, ll_mask))))
5265 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5267 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5270 if (r_const)
5272 r_const = fold_convert_loc (loc, lntype, r_const);
5273 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5274 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5275 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5276 fold_build1_loc (loc, BIT_NOT_EXPR,
5277 lntype, rl_mask))))
5279 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5281 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5285 /* If the right sides are not constant, do the same for it. Also,
5286 disallow this optimization if a size or signedness mismatch occurs
5287 between the left and right sides. */
5288 if (l_const == 0)
5290 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5291 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5292 /* Make sure the two fields on the right
5293 correspond to the left without being swapped. */
5294 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5295 return 0;
5297 first_bit = MIN (lr_bitpos, rr_bitpos);
5298 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5299 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5300 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5301 volatilep);
5302 if (rnmode == VOIDmode)
5303 return 0;
5305 rnbitsize = GET_MODE_BITSIZE (rnmode);
5306 rnbitpos = first_bit & ~ (rnbitsize - 1);
5307 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5308 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5310 if (BYTES_BIG_ENDIAN)
5312 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5313 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5316 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5317 rntype, lr_mask),
5318 size_int (xlr_bitpos));
5319 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5320 rntype, rr_mask),
5321 size_int (xrr_bitpos));
5323 /* Make a mask that corresponds to both fields being compared.
5324 Do this for both items being compared. If the operands are the
5325 same size and the bits being compared are in the same position
5326 then we can do this by masking both and comparing the masked
5327 results. */
5328 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5329 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5330 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5332 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5333 ll_unsignedp || rl_unsignedp);
5334 if (! all_ones_mask_p (ll_mask, lnbitsize))
5335 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5337 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5338 lr_unsignedp || rr_unsignedp);
5339 if (! all_ones_mask_p (lr_mask, rnbitsize))
5340 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5342 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5345 /* There is still another way we can do something: If both pairs of
5346 fields being compared are adjacent, we may be able to make a wider
5347 field containing them both.
5349 Note that we still must mask the lhs/rhs expressions. Furthermore,
5350 the mask must be shifted to account for the shift done by
5351 make_bit_field_ref. */
5352 if ((ll_bitsize + ll_bitpos == rl_bitpos
5353 && lr_bitsize + lr_bitpos == rr_bitpos)
5354 || (ll_bitpos == rl_bitpos + rl_bitsize
5355 && lr_bitpos == rr_bitpos + rr_bitsize))
5357 tree type;
5359 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5360 ll_bitsize + rl_bitsize,
5361 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5362 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5363 lr_bitsize + rr_bitsize,
5364 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5366 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5367 size_int (MIN (xll_bitpos, xrl_bitpos)));
5368 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5369 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5371 /* Convert to the smaller type before masking out unwanted bits. */
5372 type = lntype;
5373 if (lntype != rntype)
5375 if (lnbitsize > rnbitsize)
5377 lhs = fold_convert_loc (loc, rntype, lhs);
5378 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5379 type = rntype;
5381 else if (lnbitsize < rnbitsize)
5383 rhs = fold_convert_loc (loc, lntype, rhs);
5384 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5385 type = lntype;
5389 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5390 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5392 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5393 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5395 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5398 return 0;
5401 /* Handle the case of comparisons with constants. If there is something in
5402 common between the masks, those bits of the constants must be the same.
5403 If not, the condition is always false. Test for this to avoid generating
5404 incorrect code below. */
5405 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5406 if (! integer_zerop (result)
5407 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5408 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5410 if (wanted_code == NE_EXPR)
5412 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5413 return constant_boolean_node (true, truth_type);
5415 else
5417 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5418 return constant_boolean_node (false, truth_type);
5422 /* Construct the expression we will return. First get the component
5423 reference we will make. Unless the mask is all ones the width of
5424 that field, perform the mask operation. Then compare with the
5425 merged constant. */
5426 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5427 ll_unsignedp || rl_unsignedp);
5429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5430 if (! all_ones_mask_p (ll_mask, lnbitsize))
5431 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5433 return build2_loc (loc, wanted_code, truth_type, result,
5434 const_binop (BIT_IOR_EXPR, l_const, r_const));
5437 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5438 constant. */
5440 static tree
5441 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5442 tree op0, tree op1)
5444 tree arg0 = op0;
5445 enum tree_code op_code;
5446 tree comp_const;
5447 tree minmax_const;
5448 int consts_equal, consts_lt;
5449 tree inner;
5451 STRIP_SIGN_NOPS (arg0);
5453 op_code = TREE_CODE (arg0);
5454 minmax_const = TREE_OPERAND (arg0, 1);
5455 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5456 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5457 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5458 inner = TREE_OPERAND (arg0, 0);
5460 /* If something does not permit us to optimize, return the original tree. */
5461 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5462 || TREE_CODE (comp_const) != INTEGER_CST
5463 || TREE_OVERFLOW (comp_const)
5464 || TREE_CODE (minmax_const) != INTEGER_CST
5465 || TREE_OVERFLOW (minmax_const))
5466 return NULL_TREE;
5468 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5469 and GT_EXPR, doing the rest with recursive calls using logical
5470 simplifications. */
5471 switch (code)
5473 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5475 tree tem
5476 = optimize_minmax_comparison (loc,
5477 invert_tree_comparison (code, false),
5478 type, op0, op1);
5479 if (tem)
5480 return invert_truthvalue_loc (loc, tem);
5481 return NULL_TREE;
5484 case GE_EXPR:
5485 return
5486 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5487 optimize_minmax_comparison
5488 (loc, EQ_EXPR, type, arg0, comp_const),
5489 optimize_minmax_comparison
5490 (loc, GT_EXPR, type, arg0, comp_const));
5492 case EQ_EXPR:
5493 if (op_code == MAX_EXPR && consts_equal)
5494 /* MAX (X, 0) == 0 -> X <= 0 */
5495 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5497 else if (op_code == MAX_EXPR && consts_lt)
5498 /* MAX (X, 0) == 5 -> X == 5 */
5499 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5501 else if (op_code == MAX_EXPR)
5502 /* MAX (X, 0) == -1 -> false */
5503 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5505 else if (consts_equal)
5506 /* MIN (X, 0) == 0 -> X >= 0 */
5507 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5509 else if (consts_lt)
5510 /* MIN (X, 0) == 5 -> false */
5511 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5513 else
5514 /* MIN (X, 0) == -1 -> X == -1 */
5515 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5517 case GT_EXPR:
5518 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5519 /* MAX (X, 0) > 0 -> X > 0
5520 MAX (X, 0) > 5 -> X > 5 */
5521 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5523 else if (op_code == MAX_EXPR)
5524 /* MAX (X, 0) > -1 -> true */
5525 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5527 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5528 /* MIN (X, 0) > 0 -> false
5529 MIN (X, 0) > 5 -> false */
5530 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5532 else
5533 /* MIN (X, 0) > -1 -> X > -1 */
5534 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5536 default:
5537 return NULL_TREE;
5541 /* T is an integer expression that is being multiplied, divided, or taken a
5542 modulus (CODE says which and what kind of divide or modulus) by a
5543 constant C. See if we can eliminate that operation by folding it with
5544 other operations already in T. WIDE_TYPE, if non-null, is a type that
5545 should be used for the computation if wider than our type.
5547 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5548 (X * 2) + (Y * 4). We must, however, be assured that either the original
5549 expression would not overflow or that overflow is undefined for the type
5550 in the language in question.
5552 If we return a non-null expression, it is an equivalent form of the
5553 original computation, but need not be in the original type.
5555 We set *STRICT_OVERFLOW_P to true if the return values depends on
5556 signed overflow being undefined. Otherwise we do not change
5557 *STRICT_OVERFLOW_P. */
5559 static tree
5560 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5561 bool *strict_overflow_p)
5563 /* To avoid exponential search depth, refuse to allow recursion past
5564 three levels. Beyond that (1) it's highly unlikely that we'll find
5565 something interesting and (2) we've probably processed it before
5566 when we built the inner expression. */
5568 static int depth;
5569 tree ret;
5571 if (depth > 3)
5572 return NULL;
5574 depth++;
5575 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5576 depth--;
5578 return ret;
5581 static tree
5582 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5583 bool *strict_overflow_p)
5585 tree type = TREE_TYPE (t);
5586 enum tree_code tcode = TREE_CODE (t);
5587 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5588 > GET_MODE_SIZE (TYPE_MODE (type)))
5589 ? wide_type : type);
5590 tree t1, t2;
5591 int same_p = tcode == code;
5592 tree op0 = NULL_TREE, op1 = NULL_TREE;
5593 bool sub_strict_overflow_p;
5595 /* Don't deal with constants of zero here; they confuse the code below. */
5596 if (integer_zerop (c))
5597 return NULL_TREE;
5599 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5600 op0 = TREE_OPERAND (t, 0);
5602 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5603 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5605 /* Note that we need not handle conditional operations here since fold
5606 already handles those cases. So just do arithmetic here. */
5607 switch (tcode)
5609 case INTEGER_CST:
5610 /* For a constant, we can always simplify if we are a multiply
5611 or (for divide and modulus) if it is a multiple of our constant. */
5612 if (code == MULT_EXPR
5613 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5614 return const_binop (code, fold_convert (ctype, t),
5615 fold_convert (ctype, c));
5616 break;
5618 CASE_CONVERT: case NON_LVALUE_EXPR:
5619 /* If op0 is an expression ... */
5620 if ((COMPARISON_CLASS_P (op0)
5621 || UNARY_CLASS_P (op0)
5622 || BINARY_CLASS_P (op0)
5623 || VL_EXP_CLASS_P (op0)
5624 || EXPRESSION_CLASS_P (op0))
5625 /* ... and has wrapping overflow, and its type is smaller
5626 than ctype, then we cannot pass through as widening. */
5627 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5628 && (TYPE_PRECISION (ctype)
5629 > TYPE_PRECISION (TREE_TYPE (op0))))
5630 /* ... or this is a truncation (t is narrower than op0),
5631 then we cannot pass through this narrowing. */
5632 || (TYPE_PRECISION (type)
5633 < TYPE_PRECISION (TREE_TYPE (op0)))
5634 /* ... or signedness changes for division or modulus,
5635 then we cannot pass through this conversion. */
5636 || (code != MULT_EXPR
5637 && (TYPE_UNSIGNED (ctype)
5638 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5639 /* ... or has undefined overflow while the converted to
5640 type has not, we cannot do the operation in the inner type
5641 as that would introduce undefined overflow. */
5642 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5643 && !TYPE_OVERFLOW_UNDEFINED (type))))
5644 break;
5646 /* Pass the constant down and see if we can make a simplification. If
5647 we can, replace this expression with the inner simplification for
5648 possible later conversion to our or some other type. */
5649 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5650 && TREE_CODE (t2) == INTEGER_CST
5651 && !TREE_OVERFLOW (t2)
5652 && (0 != (t1 = extract_muldiv (op0, t2, code,
5653 code == MULT_EXPR
5654 ? ctype : NULL_TREE,
5655 strict_overflow_p))))
5656 return t1;
5657 break;
5659 case ABS_EXPR:
5660 /* If widening the type changes it from signed to unsigned, then we
5661 must avoid building ABS_EXPR itself as unsigned. */
5662 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5664 tree cstype = (*signed_type_for) (ctype);
5665 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5666 != 0)
5668 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5669 return fold_convert (ctype, t1);
5671 break;
5673 /* If the constant is negative, we cannot simplify this. */
5674 if (tree_int_cst_sgn (c) == -1)
5675 break;
5676 /* FALLTHROUGH */
5677 case NEGATE_EXPR:
5678 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5679 != 0)
5680 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5681 break;
5683 case MIN_EXPR: case MAX_EXPR:
5684 /* If widening the type changes the signedness, then we can't perform
5685 this optimization as that changes the result. */
5686 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5687 break;
5689 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5690 sub_strict_overflow_p = false;
5691 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5692 &sub_strict_overflow_p)) != 0
5693 && (t2 = extract_muldiv (op1, c, code, wide_type,
5694 &sub_strict_overflow_p)) != 0)
5696 if (tree_int_cst_sgn (c) < 0)
5697 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5698 if (sub_strict_overflow_p)
5699 *strict_overflow_p = true;
5700 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5701 fold_convert (ctype, t2));
5703 break;
5705 case LSHIFT_EXPR: case RSHIFT_EXPR:
5706 /* If the second operand is constant, this is a multiplication
5707 or floor division, by a power of two, so we can treat it that
5708 way unless the multiplier or divisor overflows. Signed
5709 left-shift overflow is implementation-defined rather than
5710 undefined in C90, so do not convert signed left shift into
5711 multiplication. */
5712 if (TREE_CODE (op1) == INTEGER_CST
5713 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5714 /* const_binop may not detect overflow correctly,
5715 so check for it explicitly here. */
5716 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5717 && TREE_INT_CST_HIGH (op1) == 0
5718 && 0 != (t1 = fold_convert (ctype,
5719 const_binop (LSHIFT_EXPR,
5720 size_one_node,
5721 op1)))
5722 && !TREE_OVERFLOW (t1))
5723 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5724 ? MULT_EXPR : FLOOR_DIV_EXPR,
5725 ctype,
5726 fold_convert (ctype, op0),
5727 t1),
5728 c, code, wide_type, strict_overflow_p);
5729 break;
5731 case PLUS_EXPR: case MINUS_EXPR:
5732 /* See if we can eliminate the operation on both sides. If we can, we
5733 can return a new PLUS or MINUS. If we can't, the only remaining
5734 cases where we can do anything are if the second operand is a
5735 constant. */
5736 sub_strict_overflow_p = false;
5737 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5738 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5739 if (t1 != 0 && t2 != 0
5740 && (code == MULT_EXPR
5741 /* If not multiplication, we can only do this if both operands
5742 are divisible by c. */
5743 || (multiple_of_p (ctype, op0, c)
5744 && multiple_of_p (ctype, op1, c))))
5746 if (sub_strict_overflow_p)
5747 *strict_overflow_p = true;
5748 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5749 fold_convert (ctype, t2));
5752 /* If this was a subtraction, negate OP1 and set it to be an addition.
5753 This simplifies the logic below. */
5754 if (tcode == MINUS_EXPR)
5756 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5757 /* If OP1 was not easily negatable, the constant may be OP0. */
5758 if (TREE_CODE (op0) == INTEGER_CST)
5760 tree tem = op0;
5761 op0 = op1;
5762 op1 = tem;
5763 tem = t1;
5764 t1 = t2;
5765 t2 = tem;
5769 if (TREE_CODE (op1) != INTEGER_CST)
5770 break;
5772 /* If either OP1 or C are negative, this optimization is not safe for
5773 some of the division and remainder types while for others we need
5774 to change the code. */
5775 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5777 if (code == CEIL_DIV_EXPR)
5778 code = FLOOR_DIV_EXPR;
5779 else if (code == FLOOR_DIV_EXPR)
5780 code = CEIL_DIV_EXPR;
5781 else if (code != MULT_EXPR
5782 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5783 break;
5786 /* If it's a multiply or a division/modulus operation of a multiple
5787 of our constant, do the operation and verify it doesn't overflow. */
5788 if (code == MULT_EXPR
5789 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5791 op1 = const_binop (code, fold_convert (ctype, op1),
5792 fold_convert (ctype, c));
5793 /* We allow the constant to overflow with wrapping semantics. */
5794 if (op1 == 0
5795 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5796 break;
5798 else
5799 break;
5801 /* If we have an unsigned type is not a sizetype, we cannot widen
5802 the operation since it will change the result if the original
5803 computation overflowed. */
5804 if (TYPE_UNSIGNED (ctype)
5805 && ctype != type)
5806 break;
5808 /* If we were able to eliminate our operation from the first side,
5809 apply our operation to the second side and reform the PLUS. */
5810 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5811 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5813 /* The last case is if we are a multiply. In that case, we can
5814 apply the distributive law to commute the multiply and addition
5815 if the multiplication of the constants doesn't overflow. */
5816 if (code == MULT_EXPR)
5817 return fold_build2 (tcode, ctype,
5818 fold_build2 (code, ctype,
5819 fold_convert (ctype, op0),
5820 fold_convert (ctype, c)),
5821 op1);
5823 break;
5825 case MULT_EXPR:
5826 /* We have a special case here if we are doing something like
5827 (C * 8) % 4 since we know that's zero. */
5828 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5829 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5830 /* If the multiplication can overflow we cannot optimize this. */
5831 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5832 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5833 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5835 *strict_overflow_p = true;
5836 return omit_one_operand (type, integer_zero_node, op0);
5839 /* ... fall through ... */
5841 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5842 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5843 /* If we can extract our operation from the LHS, do so and return a
5844 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5845 do something only if the second operand is a constant. */
5846 if (same_p
5847 && (t1 = extract_muldiv (op0, c, code, wide_type,
5848 strict_overflow_p)) != 0)
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, op1));
5851 else if (tcode == MULT_EXPR && code == MULT_EXPR
5852 && (t1 = extract_muldiv (op1, c, code, wide_type,
5853 strict_overflow_p)) != 0)
5854 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5855 fold_convert (ctype, t1));
5856 else if (TREE_CODE (op1) != INTEGER_CST)
5857 return 0;
5859 /* If these are the same operation types, we can associate them
5860 assuming no overflow. */
5861 if (tcode == code)
5863 double_int mul;
5864 bool overflow_p;
5865 unsigned prec = TYPE_PRECISION (ctype);
5866 bool uns = TYPE_UNSIGNED (ctype);
5867 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5868 double_int dic = tree_to_double_int (c).ext (prec, uns);
5869 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5870 overflow_p = ((!uns && overflow_p)
5871 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5872 if (!double_int_fits_to_tree_p (ctype, mul)
5873 && ((uns && tcode != MULT_EXPR) || !uns))
5874 overflow_p = 1;
5875 if (!overflow_p)
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5877 double_int_to_tree (ctype, mul));
5880 /* If these operations "cancel" each other, we have the main
5881 optimizations of this pass, which occur when either constant is a
5882 multiple of the other, in which case we replace this with either an
5883 operation or CODE or TCODE.
5885 If we have an unsigned type, we cannot do this since it will change
5886 the result if the original computation overflowed. */
5887 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5888 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5889 || (tcode == MULT_EXPR
5890 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5891 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5892 && code != MULT_EXPR)))
5894 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5896 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5897 *strict_overflow_p = true;
5898 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5899 fold_convert (ctype,
5900 const_binop (TRUNC_DIV_EXPR,
5901 op1, c)));
5903 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5906 *strict_overflow_p = true;
5907 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5908 fold_convert (ctype,
5909 const_binop (TRUNC_DIV_EXPR,
5910 c, op1)));
5913 break;
5915 default:
5916 break;
5919 return 0;
5922 /* Return a node which has the indicated constant VALUE (either 0 or
5923 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5924 and is of the indicated TYPE. */
5926 tree
5927 constant_boolean_node (bool value, tree type)
5929 if (type == integer_type_node)
5930 return value ? integer_one_node : integer_zero_node;
5931 else if (type == boolean_type_node)
5932 return value ? boolean_true_node : boolean_false_node;
5933 else if (TREE_CODE (type) == VECTOR_TYPE)
5934 return build_vector_from_val (type,
5935 build_int_cst (TREE_TYPE (type),
5936 value ? -1 : 0));
5937 else
5938 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5942 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5943 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5944 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5945 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5946 COND is the first argument to CODE; otherwise (as in the example
5947 given here), it is the second argument. TYPE is the type of the
5948 original expression. Return NULL_TREE if no simplification is
5949 possible. */
5951 static tree
5952 fold_binary_op_with_conditional_arg (location_t loc,
5953 enum tree_code code,
5954 tree type, tree op0, tree op1,
5955 tree cond, tree arg, int cond_first_p)
5957 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5958 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5959 tree test, true_value, false_value;
5960 tree lhs = NULL_TREE;
5961 tree rhs = NULL_TREE;
5963 if (TREE_CODE (cond) == COND_EXPR)
5965 test = TREE_OPERAND (cond, 0);
5966 true_value = TREE_OPERAND (cond, 1);
5967 false_value = TREE_OPERAND (cond, 2);
5968 /* If this operand throws an expression, then it does not make
5969 sense to try to perform a logical or arithmetic operation
5970 involving it. */
5971 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5972 lhs = true_value;
5973 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5974 rhs = false_value;
5976 else
5978 tree testtype = TREE_TYPE (cond);
5979 test = cond;
5980 true_value = constant_boolean_node (true, testtype);
5981 false_value = constant_boolean_node (false, testtype);
5984 /* This transformation is only worthwhile if we don't have to wrap ARG
5985 in a SAVE_EXPR and the operation can be simplified on at least one
5986 of the branches once its pushed inside the COND_EXPR. */
5987 if (!TREE_CONSTANT (arg)
5988 && (TREE_SIDE_EFFECTS (arg)
5989 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5990 return NULL_TREE;
5992 arg = fold_convert_loc (loc, arg_type, arg);
5993 if (lhs == 0)
5995 true_value = fold_convert_loc (loc, cond_type, true_value);
5996 if (cond_first_p)
5997 lhs = fold_build2_loc (loc, code, type, true_value, arg);
5998 else
5999 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6001 if (rhs == 0)
6003 false_value = fold_convert_loc (loc, cond_type, false_value);
6004 if (cond_first_p)
6005 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6006 else
6007 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6010 /* Check that we have simplified at least one of the branches. */
6011 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6012 return NULL_TREE;
6014 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6018 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6020 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6021 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6022 ADDEND is the same as X.
6024 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6025 and finite. The problematic cases are when X is zero, and its mode
6026 has signed zeros. In the case of rounding towards -infinity,
6027 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6028 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6030 bool
6031 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6033 if (!real_zerop (addend))
6034 return false;
6036 /* Don't allow the fold with -fsignaling-nans. */
6037 if (HONOR_SNANS (TYPE_MODE (type)))
6038 return false;
6040 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6041 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6042 return true;
6044 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6045 if (TREE_CODE (addend) == REAL_CST
6046 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6047 negate = !negate;
6049 /* The mode has signed zeros, and we have to honor their sign.
6050 In this situation, there is only one case we can return true for.
6051 X - 0 is the same as X unless rounding towards -infinity is
6052 supported. */
6053 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6056 /* Subroutine of fold() that checks comparisons of built-in math
6057 functions against real constants.
6059 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6060 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6061 is the type of the result and ARG0 and ARG1 are the operands of the
6062 comparison. ARG1 must be a TREE_REAL_CST.
6064 The function returns the constant folded tree if a simplification
6065 can be made, and NULL_TREE otherwise. */
6067 static tree
6068 fold_mathfn_compare (location_t loc,
6069 enum built_in_function fcode, enum tree_code code,
6070 tree type, tree arg0, tree arg1)
6072 REAL_VALUE_TYPE c;
6074 if (BUILTIN_SQRT_P (fcode))
6076 tree arg = CALL_EXPR_ARG (arg0, 0);
6077 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6079 c = TREE_REAL_CST (arg1);
6080 if (REAL_VALUE_NEGATIVE (c))
6082 /* sqrt(x) < y is always false, if y is negative. */
6083 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6084 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6086 /* sqrt(x) > y is always true, if y is negative and we
6087 don't care about NaNs, i.e. negative values of x. */
6088 if (code == NE_EXPR || !HONOR_NANS (mode))
6089 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6091 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6092 return fold_build2_loc (loc, GE_EXPR, type, arg,
6093 build_real (TREE_TYPE (arg), dconst0));
6095 else if (code == GT_EXPR || code == GE_EXPR)
6097 REAL_VALUE_TYPE c2;
6099 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6100 real_convert (&c2, mode, &c2);
6102 if (REAL_VALUE_ISINF (c2))
6104 /* sqrt(x) > y is x == +Inf, when y is very large. */
6105 if (HONOR_INFINITIES (mode))
6106 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6107 build_real (TREE_TYPE (arg), c2));
6109 /* sqrt(x) > y is always false, when y is very large
6110 and we don't care about infinities. */
6111 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6114 /* sqrt(x) > c is the same as x > c*c. */
6115 return fold_build2_loc (loc, code, type, arg,
6116 build_real (TREE_TYPE (arg), c2));
6118 else if (code == LT_EXPR || code == LE_EXPR)
6120 REAL_VALUE_TYPE c2;
6122 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6123 real_convert (&c2, mode, &c2);
6125 if (REAL_VALUE_ISINF (c2))
6127 /* sqrt(x) < y is always true, when y is a very large
6128 value and we don't care about NaNs or Infinities. */
6129 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6130 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6132 /* sqrt(x) < y is x != +Inf when y is very large and we
6133 don't care about NaNs. */
6134 if (! HONOR_NANS (mode))
6135 return fold_build2_loc (loc, NE_EXPR, type, arg,
6136 build_real (TREE_TYPE (arg), c2));
6138 /* sqrt(x) < y is x >= 0 when y is very large and we
6139 don't care about Infinities. */
6140 if (! HONOR_INFINITIES (mode))
6141 return fold_build2_loc (loc, GE_EXPR, type, arg,
6142 build_real (TREE_TYPE (arg), dconst0));
6144 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6145 arg = save_expr (arg);
6146 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6147 fold_build2_loc (loc, GE_EXPR, type, arg,
6148 build_real (TREE_TYPE (arg),
6149 dconst0)),
6150 fold_build2_loc (loc, NE_EXPR, type, arg,
6151 build_real (TREE_TYPE (arg),
6152 c2)));
6155 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6156 if (! HONOR_NANS (mode))
6157 return fold_build2_loc (loc, code, type, arg,
6158 build_real (TREE_TYPE (arg), c2));
6160 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6161 arg = save_expr (arg);
6162 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6163 fold_build2_loc (loc, GE_EXPR, type, arg,
6164 build_real (TREE_TYPE (arg),
6165 dconst0)),
6166 fold_build2_loc (loc, code, type, arg,
6167 build_real (TREE_TYPE (arg),
6168 c2)));
6172 return NULL_TREE;
6175 /* Subroutine of fold() that optimizes comparisons against Infinities,
6176 either +Inf or -Inf.
6178 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6179 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6180 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6182 The function returns the constant folded tree if a simplification
6183 can be made, and NULL_TREE otherwise. */
6185 static tree
6186 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6187 tree arg0, tree arg1)
6189 enum machine_mode mode;
6190 REAL_VALUE_TYPE max;
6191 tree temp;
6192 bool neg;
6194 mode = TYPE_MODE (TREE_TYPE (arg0));
6196 /* For negative infinity swap the sense of the comparison. */
6197 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6198 if (neg)
6199 code = swap_tree_comparison (code);
6201 switch (code)
6203 case GT_EXPR:
6204 /* x > +Inf is always false, if with ignore sNANs. */
6205 if (HONOR_SNANS (mode))
6206 return NULL_TREE;
6207 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6209 case LE_EXPR:
6210 /* x <= +Inf is always true, if we don't case about NaNs. */
6211 if (! HONOR_NANS (mode))
6212 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6214 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6215 arg0 = save_expr (arg0);
6216 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6218 case EQ_EXPR:
6219 case GE_EXPR:
6220 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6221 real_maxval (&max, neg, mode);
6222 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6223 arg0, build_real (TREE_TYPE (arg0), max));
6225 case LT_EXPR:
6226 /* x < +Inf is always equal to x <= DBL_MAX. */
6227 real_maxval (&max, neg, mode);
6228 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6229 arg0, build_real (TREE_TYPE (arg0), max));
6231 case NE_EXPR:
6232 /* x != +Inf is always equal to !(x > DBL_MAX). */
6233 real_maxval (&max, neg, mode);
6234 if (! HONOR_NANS (mode))
6235 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6236 arg0, build_real (TREE_TYPE (arg0), max));
6238 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6239 arg0, build_real (TREE_TYPE (arg0), max));
6240 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6242 default:
6243 break;
6246 return NULL_TREE;
6249 /* Subroutine of fold() that optimizes comparisons of a division by
6250 a nonzero integer constant against an integer constant, i.e.
6251 X/C1 op C2.
6253 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6254 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6255 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6257 The function returns the constant folded tree if a simplification
6258 can be made, and NULL_TREE otherwise. */
6260 static tree
6261 fold_div_compare (location_t loc,
6262 enum tree_code code, tree type, tree arg0, tree arg1)
6264 tree prod, tmp, hi, lo;
6265 tree arg00 = TREE_OPERAND (arg0, 0);
6266 tree arg01 = TREE_OPERAND (arg0, 1);
6267 double_int val;
6268 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6269 bool neg_overflow;
6270 bool overflow;
6272 /* We have to do this the hard way to detect unsigned overflow.
6273 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6274 val = TREE_INT_CST (arg01)
6275 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6276 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6277 neg_overflow = false;
6279 if (unsigned_p)
6281 tmp = int_const_binop (MINUS_EXPR, arg01,
6282 build_int_cst (TREE_TYPE (arg01), 1));
6283 lo = prod;
6285 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6286 val = TREE_INT_CST (prod)
6287 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6288 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6289 -1, overflow | TREE_OVERFLOW (prod));
6291 else if (tree_int_cst_sgn (arg01) >= 0)
6293 tmp = int_const_binop (MINUS_EXPR, arg01,
6294 build_int_cst (TREE_TYPE (arg01), 1));
6295 switch (tree_int_cst_sgn (arg1))
6297 case -1:
6298 neg_overflow = true;
6299 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6300 hi = prod;
6301 break;
6303 case 0:
6304 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6305 hi = tmp;
6306 break;
6308 case 1:
6309 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6310 lo = prod;
6311 break;
6313 default:
6314 gcc_unreachable ();
6317 else
6319 /* A negative divisor reverses the relational operators. */
6320 code = swap_tree_comparison (code);
6322 tmp = int_const_binop (PLUS_EXPR, arg01,
6323 build_int_cst (TREE_TYPE (arg01), 1));
6324 switch (tree_int_cst_sgn (arg1))
6326 case -1:
6327 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6328 lo = prod;
6329 break;
6331 case 0:
6332 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6333 lo = tmp;
6334 break;
6336 case 1:
6337 neg_overflow = true;
6338 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6339 hi = prod;
6340 break;
6342 default:
6343 gcc_unreachable ();
6347 switch (code)
6349 case EQ_EXPR:
6350 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6351 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6352 if (TREE_OVERFLOW (hi))
6353 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6354 if (TREE_OVERFLOW (lo))
6355 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6356 return build_range_check (loc, type, arg00, 1, lo, hi);
6358 case NE_EXPR:
6359 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6360 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6361 if (TREE_OVERFLOW (hi))
6362 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6363 if (TREE_OVERFLOW (lo))
6364 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6365 return build_range_check (loc, type, arg00, 0, lo, hi);
6367 case LT_EXPR:
6368 if (TREE_OVERFLOW (lo))
6370 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6371 return omit_one_operand_loc (loc, type, tmp, arg00);
6373 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6375 case LE_EXPR:
6376 if (TREE_OVERFLOW (hi))
6378 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6379 return omit_one_operand_loc (loc, type, tmp, arg00);
6381 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6383 case GT_EXPR:
6384 if (TREE_OVERFLOW (hi))
6386 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6387 return omit_one_operand_loc (loc, type, tmp, arg00);
6389 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6391 case GE_EXPR:
6392 if (TREE_OVERFLOW (lo))
6394 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6395 return omit_one_operand_loc (loc, type, tmp, arg00);
6397 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6399 default:
6400 break;
6403 return NULL_TREE;
6407 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6408 equality/inequality test, then return a simplified form of the test
6409 using a sign testing. Otherwise return NULL. TYPE is the desired
6410 result type. */
6412 static tree
6413 fold_single_bit_test_into_sign_test (location_t loc,
6414 enum tree_code code, tree arg0, tree arg1,
6415 tree result_type)
6417 /* If this is testing a single bit, we can optimize the test. */
6418 if ((code == NE_EXPR || code == EQ_EXPR)
6419 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6420 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6422 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6423 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6424 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6426 if (arg00 != NULL_TREE
6427 /* This is only a win if casting to a signed type is cheap,
6428 i.e. when arg00's type is not a partial mode. */
6429 && TYPE_PRECISION (TREE_TYPE (arg00))
6430 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6432 tree stype = signed_type_for (TREE_TYPE (arg00));
6433 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6434 result_type,
6435 fold_convert_loc (loc, stype, arg00),
6436 build_int_cst (stype, 0));
6440 return NULL_TREE;
6443 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6444 equality/inequality test, then return a simplified form of
6445 the test using shifts and logical operations. Otherwise return
6446 NULL. TYPE is the desired result type. */
6448 tree
6449 fold_single_bit_test (location_t loc, enum tree_code code,
6450 tree arg0, tree arg1, tree result_type)
6452 /* If this is testing a single bit, we can optimize the test. */
6453 if ((code == NE_EXPR || code == EQ_EXPR)
6454 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6455 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6457 tree inner = TREE_OPERAND (arg0, 0);
6458 tree type = TREE_TYPE (arg0);
6459 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6460 enum machine_mode operand_mode = TYPE_MODE (type);
6461 int ops_unsigned;
6462 tree signed_type, unsigned_type, intermediate_type;
6463 tree tem, one;
6465 /* First, see if we can fold the single bit test into a sign-bit
6466 test. */
6467 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6468 result_type);
6469 if (tem)
6470 return tem;
6472 /* Otherwise we have (A & C) != 0 where C is a single bit,
6473 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6474 Similarly for (A & C) == 0. */
6476 /* If INNER is a right shift of a constant and it plus BITNUM does
6477 not overflow, adjust BITNUM and INNER. */
6478 if (TREE_CODE (inner) == RSHIFT_EXPR
6479 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6480 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6481 && bitnum < TYPE_PRECISION (type)
6482 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6483 bitnum - TYPE_PRECISION (type)))
6485 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6486 inner = TREE_OPERAND (inner, 0);
6489 /* If we are going to be able to omit the AND below, we must do our
6490 operations as unsigned. If we must use the AND, we have a choice.
6491 Normally unsigned is faster, but for some machines signed is. */
6492 #ifdef LOAD_EXTEND_OP
6493 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6494 && !flag_syntax_only) ? 0 : 1;
6495 #else
6496 ops_unsigned = 1;
6497 #endif
6499 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6500 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6501 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6502 inner = fold_convert_loc (loc, intermediate_type, inner);
6504 if (bitnum != 0)
6505 inner = build2 (RSHIFT_EXPR, intermediate_type,
6506 inner, size_int (bitnum));
6508 one = build_int_cst (intermediate_type, 1);
6510 if (code == EQ_EXPR)
6511 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6513 /* Put the AND last so it can combine with more things. */
6514 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6516 /* Make sure to return the proper type. */
6517 inner = fold_convert_loc (loc, result_type, inner);
6519 return inner;
6521 return NULL_TREE;
6524 /* Check whether we are allowed to reorder operands arg0 and arg1,
6525 such that the evaluation of arg1 occurs before arg0. */
6527 static bool
6528 reorder_operands_p (const_tree arg0, const_tree arg1)
6530 if (! flag_evaluation_order)
6531 return true;
6532 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6533 return true;
6534 return ! TREE_SIDE_EFFECTS (arg0)
6535 && ! TREE_SIDE_EFFECTS (arg1);
6538 /* Test whether it is preferable two swap two operands, ARG0 and
6539 ARG1, for example because ARG0 is an integer constant and ARG1
6540 isn't. If REORDER is true, only recommend swapping if we can
6541 evaluate the operands in reverse order. */
6543 bool
6544 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6546 STRIP_SIGN_NOPS (arg0);
6547 STRIP_SIGN_NOPS (arg1);
6549 if (TREE_CODE (arg1) == INTEGER_CST)
6550 return 0;
6551 if (TREE_CODE (arg0) == INTEGER_CST)
6552 return 1;
6554 if (TREE_CODE (arg1) == REAL_CST)
6555 return 0;
6556 if (TREE_CODE (arg0) == REAL_CST)
6557 return 1;
6559 if (TREE_CODE (arg1) == FIXED_CST)
6560 return 0;
6561 if (TREE_CODE (arg0) == FIXED_CST)
6562 return 1;
6564 if (TREE_CODE (arg1) == COMPLEX_CST)
6565 return 0;
6566 if (TREE_CODE (arg0) == COMPLEX_CST)
6567 return 1;
6569 if (TREE_CONSTANT (arg1))
6570 return 0;
6571 if (TREE_CONSTANT (arg0))
6572 return 1;
6574 if (optimize_function_for_size_p (cfun))
6575 return 0;
6577 if (reorder && flag_evaluation_order
6578 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6579 return 0;
6581 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6582 for commutative and comparison operators. Ensuring a canonical
6583 form allows the optimizers to find additional redundancies without
6584 having to explicitly check for both orderings. */
6585 if (TREE_CODE (arg0) == SSA_NAME
6586 && TREE_CODE (arg1) == SSA_NAME
6587 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6588 return 1;
6590 /* Put SSA_NAMEs last. */
6591 if (TREE_CODE (arg1) == SSA_NAME)
6592 return 0;
6593 if (TREE_CODE (arg0) == SSA_NAME)
6594 return 1;
6596 /* Put variables last. */
6597 if (DECL_P (arg1))
6598 return 0;
6599 if (DECL_P (arg0))
6600 return 1;
6602 return 0;
6605 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6606 ARG0 is extended to a wider type. */
6608 static tree
6609 fold_widened_comparison (location_t loc, enum tree_code code,
6610 tree type, tree arg0, tree arg1)
6612 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6613 tree arg1_unw;
6614 tree shorter_type, outer_type;
6615 tree min, max;
6616 bool above, below;
6618 if (arg0_unw == arg0)
6619 return NULL_TREE;
6620 shorter_type = TREE_TYPE (arg0_unw);
6622 #ifdef HAVE_canonicalize_funcptr_for_compare
6623 /* Disable this optimization if we're casting a function pointer
6624 type on targets that require function pointer canonicalization. */
6625 if (HAVE_canonicalize_funcptr_for_compare
6626 && TREE_CODE (shorter_type) == POINTER_TYPE
6627 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6628 return NULL_TREE;
6629 #endif
6631 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6632 return NULL_TREE;
6634 arg1_unw = get_unwidened (arg1, NULL_TREE);
6636 /* If possible, express the comparison in the shorter mode. */
6637 if ((code == EQ_EXPR || code == NE_EXPR
6638 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6639 && (TREE_TYPE (arg1_unw) == shorter_type
6640 || ((TYPE_PRECISION (shorter_type)
6641 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6642 && (TYPE_UNSIGNED (shorter_type)
6643 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6644 || (TREE_CODE (arg1_unw) == INTEGER_CST
6645 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6646 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6647 && int_fits_type_p (arg1_unw, shorter_type))))
6648 return fold_build2_loc (loc, code, type, arg0_unw,
6649 fold_convert_loc (loc, shorter_type, arg1_unw));
6651 if (TREE_CODE (arg1_unw) != INTEGER_CST
6652 || TREE_CODE (shorter_type) != INTEGER_TYPE
6653 || !int_fits_type_p (arg1_unw, shorter_type))
6654 return NULL_TREE;
6656 /* If we are comparing with the integer that does not fit into the range
6657 of the shorter type, the result is known. */
6658 outer_type = TREE_TYPE (arg1_unw);
6659 min = lower_bound_in_type (outer_type, shorter_type);
6660 max = upper_bound_in_type (outer_type, shorter_type);
6662 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6663 max, arg1_unw));
6664 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6665 arg1_unw, min));
6667 switch (code)
6669 case EQ_EXPR:
6670 if (above || below)
6671 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6672 break;
6674 case NE_EXPR:
6675 if (above || below)
6676 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6677 break;
6679 case LT_EXPR:
6680 case LE_EXPR:
6681 if (above)
6682 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6683 else if (below)
6684 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6686 case GT_EXPR:
6687 case GE_EXPR:
6688 if (above)
6689 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6690 else if (below)
6691 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6693 default:
6694 break;
6697 return NULL_TREE;
6700 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6701 ARG0 just the signedness is changed. */
6703 static tree
6704 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6705 tree arg0, tree arg1)
6707 tree arg0_inner;
6708 tree inner_type, outer_type;
6710 if (!CONVERT_EXPR_P (arg0))
6711 return NULL_TREE;
6713 outer_type = TREE_TYPE (arg0);
6714 arg0_inner = TREE_OPERAND (arg0, 0);
6715 inner_type = TREE_TYPE (arg0_inner);
6717 #ifdef HAVE_canonicalize_funcptr_for_compare
6718 /* Disable this optimization if we're casting a function pointer
6719 type on targets that require function pointer canonicalization. */
6720 if (HAVE_canonicalize_funcptr_for_compare
6721 && TREE_CODE (inner_type) == POINTER_TYPE
6722 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6723 return NULL_TREE;
6724 #endif
6726 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6727 return NULL_TREE;
6729 if (TREE_CODE (arg1) != INTEGER_CST
6730 && !(CONVERT_EXPR_P (arg1)
6731 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6732 return NULL_TREE;
6734 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6735 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6736 && code != NE_EXPR
6737 && code != EQ_EXPR)
6738 return NULL_TREE;
6740 if (TREE_CODE (arg1) == INTEGER_CST)
6741 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6742 0, TREE_OVERFLOW (arg1));
6743 else
6744 arg1 = fold_convert_loc (loc, inner_type, arg1);
6746 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6749 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6750 step of the array. Reconstructs s and delta in the case of s *
6751 delta being an integer constant (and thus already folded). ADDR is
6752 the address. MULT is the multiplicative expression. If the
6753 function succeeds, the new address expression is returned.
6754 Otherwise NULL_TREE is returned. LOC is the location of the
6755 resulting expression. */
6757 static tree
6758 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6760 tree s, delta, step;
6761 tree ref = TREE_OPERAND (addr, 0), pref;
6762 tree ret, pos;
6763 tree itype;
6764 bool mdim = false;
6766 /* Strip the nops that might be added when converting op1 to sizetype. */
6767 STRIP_NOPS (op1);
6769 /* Canonicalize op1 into a possibly non-constant delta
6770 and an INTEGER_CST s. */
6771 if (TREE_CODE (op1) == MULT_EXPR)
6773 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6775 STRIP_NOPS (arg0);
6776 STRIP_NOPS (arg1);
6778 if (TREE_CODE (arg0) == INTEGER_CST)
6780 s = arg0;
6781 delta = arg1;
6783 else if (TREE_CODE (arg1) == INTEGER_CST)
6785 s = arg1;
6786 delta = arg0;
6788 else
6789 return NULL_TREE;
6791 else if (TREE_CODE (op1) == INTEGER_CST)
6793 delta = op1;
6794 s = NULL_TREE;
6796 else
6798 /* Simulate we are delta * 1. */
6799 delta = op1;
6800 s = integer_one_node;
6803 /* Handle &x.array the same as we would handle &x.array[0]. */
6804 if (TREE_CODE (ref) == COMPONENT_REF
6805 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6807 tree domain;
6809 /* Remember if this was a multi-dimensional array. */
6810 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6811 mdim = true;
6813 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6814 if (! domain)
6815 goto cont;
6816 itype = TREE_TYPE (domain);
6818 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6819 if (TREE_CODE (step) != INTEGER_CST)
6820 goto cont;
6822 if (s)
6824 if (! tree_int_cst_equal (step, s))
6825 goto cont;
6827 else
6829 /* Try if delta is a multiple of step. */
6830 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6831 if (! tmp)
6832 goto cont;
6833 delta = tmp;
6836 /* Only fold here if we can verify we do not overflow one
6837 dimension of a multi-dimensional array. */
6838 if (mdim)
6840 tree tmp;
6842 if (!TYPE_MIN_VALUE (domain)
6843 || !TYPE_MAX_VALUE (domain)
6844 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6845 goto cont;
6847 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6848 fold_convert_loc (loc, itype,
6849 TYPE_MIN_VALUE (domain)),
6850 fold_convert_loc (loc, itype, delta));
6851 if (TREE_CODE (tmp) != INTEGER_CST
6852 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6853 goto cont;
6856 /* We found a suitable component reference. */
6858 pref = TREE_OPERAND (addr, 0);
6859 ret = copy_node (pref);
6860 SET_EXPR_LOCATION (ret, loc);
6862 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6863 fold_build2_loc
6864 (loc, PLUS_EXPR, itype,
6865 fold_convert_loc (loc, itype,
6866 TYPE_MIN_VALUE
6867 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6868 fold_convert_loc (loc, itype, delta)),
6869 NULL_TREE, NULL_TREE);
6870 return build_fold_addr_expr_loc (loc, ret);
6873 cont:
6875 for (;; ref = TREE_OPERAND (ref, 0))
6877 if (TREE_CODE (ref) == ARRAY_REF)
6879 tree domain;
6881 /* Remember if this was a multi-dimensional array. */
6882 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6883 mdim = true;
6885 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6886 if (! domain)
6887 continue;
6888 itype = TREE_TYPE (domain);
6890 step = array_ref_element_size (ref);
6891 if (TREE_CODE (step) != INTEGER_CST)
6892 continue;
6894 if (s)
6896 if (! tree_int_cst_equal (step, s))
6897 continue;
6899 else
6901 /* Try if delta is a multiple of step. */
6902 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6903 if (! tmp)
6904 continue;
6905 delta = tmp;
6908 /* Only fold here if we can verify we do not overflow one
6909 dimension of a multi-dimensional array. */
6910 if (mdim)
6912 tree tmp;
6914 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6915 || !TYPE_MAX_VALUE (domain)
6916 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6917 continue;
6919 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6920 fold_convert_loc (loc, itype,
6921 TREE_OPERAND (ref, 1)),
6922 fold_convert_loc (loc, itype, delta));
6923 if (!tmp
6924 || TREE_CODE (tmp) != INTEGER_CST
6925 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6926 continue;
6929 break;
6931 else
6932 mdim = false;
6934 if (!handled_component_p (ref))
6935 return NULL_TREE;
6938 /* We found the suitable array reference. So copy everything up to it,
6939 and replace the index. */
6941 pref = TREE_OPERAND (addr, 0);
6942 ret = copy_node (pref);
6943 SET_EXPR_LOCATION (ret, loc);
6944 pos = ret;
6946 while (pref != ref)
6948 pref = TREE_OPERAND (pref, 0);
6949 TREE_OPERAND (pos, 0) = copy_node (pref);
6950 pos = TREE_OPERAND (pos, 0);
6953 TREE_OPERAND (pos, 1)
6954 = fold_build2_loc (loc, PLUS_EXPR, itype,
6955 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6956 fold_convert_loc (loc, itype, delta));
6957 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6961 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6962 means A >= Y && A != MAX, but in this case we know that
6963 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6965 static tree
6966 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6968 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6970 if (TREE_CODE (bound) == LT_EXPR)
6971 a = TREE_OPERAND (bound, 0);
6972 else if (TREE_CODE (bound) == GT_EXPR)
6973 a = TREE_OPERAND (bound, 1);
6974 else
6975 return NULL_TREE;
6977 typea = TREE_TYPE (a);
6978 if (!INTEGRAL_TYPE_P (typea)
6979 && !POINTER_TYPE_P (typea))
6980 return NULL_TREE;
6982 if (TREE_CODE (ineq) == LT_EXPR)
6984 a1 = TREE_OPERAND (ineq, 1);
6985 y = TREE_OPERAND (ineq, 0);
6987 else if (TREE_CODE (ineq) == GT_EXPR)
6989 a1 = TREE_OPERAND (ineq, 0);
6990 y = TREE_OPERAND (ineq, 1);
6992 else
6993 return NULL_TREE;
6995 if (TREE_TYPE (a1) != typea)
6996 return NULL_TREE;
6998 if (POINTER_TYPE_P (typea))
7000 /* Convert the pointer types into integer before taking the difference. */
7001 tree ta = fold_convert_loc (loc, ssizetype, a);
7002 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7003 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7005 else
7006 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7008 if (!diff || !integer_onep (diff))
7009 return NULL_TREE;
7011 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7014 /* Fold a sum or difference of at least one multiplication.
7015 Returns the folded tree or NULL if no simplification could be made. */
7017 static tree
7018 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7019 tree arg0, tree arg1)
7021 tree arg00, arg01, arg10, arg11;
7022 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7024 /* (A * C) +- (B * C) -> (A+-B) * C.
7025 (A * C) +- A -> A * (C+-1).
7026 We are most concerned about the case where C is a constant,
7027 but other combinations show up during loop reduction. Since
7028 it is not difficult, try all four possibilities. */
7030 if (TREE_CODE (arg0) == MULT_EXPR)
7032 arg00 = TREE_OPERAND (arg0, 0);
7033 arg01 = TREE_OPERAND (arg0, 1);
7035 else if (TREE_CODE (arg0) == INTEGER_CST)
7037 arg00 = build_one_cst (type);
7038 arg01 = arg0;
7040 else
7042 /* We cannot generate constant 1 for fract. */
7043 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7044 return NULL_TREE;
7045 arg00 = arg0;
7046 arg01 = build_one_cst (type);
7048 if (TREE_CODE (arg1) == MULT_EXPR)
7050 arg10 = TREE_OPERAND (arg1, 0);
7051 arg11 = TREE_OPERAND (arg1, 1);
7053 else if (TREE_CODE (arg1) == INTEGER_CST)
7055 arg10 = build_one_cst (type);
7056 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7057 the purpose of this canonicalization. */
7058 if (TREE_INT_CST_HIGH (arg1) == -1
7059 && negate_expr_p (arg1)
7060 && code == PLUS_EXPR)
7062 arg11 = negate_expr (arg1);
7063 code = MINUS_EXPR;
7065 else
7066 arg11 = arg1;
7068 else
7070 /* We cannot generate constant 1 for fract. */
7071 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7072 return NULL_TREE;
7073 arg10 = arg1;
7074 arg11 = build_one_cst (type);
7076 same = NULL_TREE;
7078 if (operand_equal_p (arg01, arg11, 0))
7079 same = arg01, alt0 = arg00, alt1 = arg10;
7080 else if (operand_equal_p (arg00, arg10, 0))
7081 same = arg00, alt0 = arg01, alt1 = arg11;
7082 else if (operand_equal_p (arg00, arg11, 0))
7083 same = arg00, alt0 = arg01, alt1 = arg10;
7084 else if (operand_equal_p (arg01, arg10, 0))
7085 same = arg01, alt0 = arg00, alt1 = arg11;
7087 /* No identical multiplicands; see if we can find a common
7088 power-of-two factor in non-power-of-two multiplies. This
7089 can help in multi-dimensional array access. */
7090 else if (host_integerp (arg01, 0)
7091 && host_integerp (arg11, 0))
7093 HOST_WIDE_INT int01, int11, tmp;
7094 bool swap = false;
7095 tree maybe_same;
7096 int01 = TREE_INT_CST_LOW (arg01);
7097 int11 = TREE_INT_CST_LOW (arg11);
7099 /* Move min of absolute values to int11. */
7100 if (absu_hwi (int01) < absu_hwi (int11))
7102 tmp = int01, int01 = int11, int11 = tmp;
7103 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7104 maybe_same = arg01;
7105 swap = true;
7107 else
7108 maybe_same = arg11;
7110 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7111 /* The remainder should not be a constant, otherwise we
7112 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7113 increased the number of multiplications necessary. */
7114 && TREE_CODE (arg10) != INTEGER_CST)
7116 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7117 build_int_cst (TREE_TYPE (arg00),
7118 int01 / int11));
7119 alt1 = arg10;
7120 same = maybe_same;
7121 if (swap)
7122 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7126 if (same)
7127 return fold_build2_loc (loc, MULT_EXPR, type,
7128 fold_build2_loc (loc, code, type,
7129 fold_convert_loc (loc, type, alt0),
7130 fold_convert_loc (loc, type, alt1)),
7131 fold_convert_loc (loc, type, same));
7133 return NULL_TREE;
7136 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7137 specified by EXPR into the buffer PTR of length LEN bytes.
7138 Return the number of bytes placed in the buffer, or zero
7139 upon failure. */
7141 static int
7142 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7144 tree type = TREE_TYPE (expr);
7145 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7146 int byte, offset, word, words;
7147 unsigned char value;
7149 if (total_bytes > len)
7150 return 0;
7151 words = total_bytes / UNITS_PER_WORD;
7153 for (byte = 0; byte < total_bytes; byte++)
7155 int bitpos = byte * BITS_PER_UNIT;
7156 if (bitpos < HOST_BITS_PER_WIDE_INT)
7157 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7158 else
7159 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7160 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7162 if (total_bytes > UNITS_PER_WORD)
7164 word = byte / UNITS_PER_WORD;
7165 if (WORDS_BIG_ENDIAN)
7166 word = (words - 1) - word;
7167 offset = word * UNITS_PER_WORD;
7168 if (BYTES_BIG_ENDIAN)
7169 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7170 else
7171 offset += byte % UNITS_PER_WORD;
7173 else
7174 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7175 ptr[offset] = value;
7177 return total_bytes;
7181 /* Subroutine of native_encode_expr. Encode the REAL_CST
7182 specified by EXPR into the buffer PTR of length LEN bytes.
7183 Return the number of bytes placed in the buffer, or zero
7184 upon failure. */
7186 static int
7187 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7189 tree type = TREE_TYPE (expr);
7190 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7191 int byte, offset, word, words, bitpos;
7192 unsigned char value;
7194 /* There are always 32 bits in each long, no matter the size of
7195 the hosts long. We handle floating point representations with
7196 up to 192 bits. */
7197 long tmp[6];
7199 if (total_bytes > len)
7200 return 0;
7201 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7203 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7205 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7206 bitpos += BITS_PER_UNIT)
7208 byte = (bitpos / BITS_PER_UNIT) & 3;
7209 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7211 if (UNITS_PER_WORD < 4)
7213 word = byte / UNITS_PER_WORD;
7214 if (WORDS_BIG_ENDIAN)
7215 word = (words - 1) - word;
7216 offset = word * UNITS_PER_WORD;
7217 if (BYTES_BIG_ENDIAN)
7218 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7219 else
7220 offset += byte % UNITS_PER_WORD;
7222 else
7223 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7224 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7226 return total_bytes;
7229 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7230 specified by EXPR into the buffer PTR of length LEN bytes.
7231 Return the number of bytes placed in the buffer, or zero
7232 upon failure. */
7234 static int
7235 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7237 int rsize, isize;
7238 tree part;
7240 part = TREE_REALPART (expr);
7241 rsize = native_encode_expr (part, ptr, len);
7242 if (rsize == 0)
7243 return 0;
7244 part = TREE_IMAGPART (expr);
7245 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7246 if (isize != rsize)
7247 return 0;
7248 return rsize + isize;
7252 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7253 specified by EXPR into the buffer PTR of length LEN bytes.
7254 Return the number of bytes placed in the buffer, or zero
7255 upon failure. */
7257 static int
7258 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7260 unsigned i, count;
7261 int size, offset;
7262 tree itype, elem;
7264 offset = 0;
7265 count = VECTOR_CST_NELTS (expr);
7266 itype = TREE_TYPE (TREE_TYPE (expr));
7267 size = GET_MODE_SIZE (TYPE_MODE (itype));
7268 for (i = 0; i < count; i++)
7270 elem = VECTOR_CST_ELT (expr, i);
7271 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7272 return 0;
7273 offset += size;
7275 return offset;
7279 /* Subroutine of native_encode_expr. Encode the STRING_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7284 static int
7285 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7287 tree type = TREE_TYPE (expr);
7288 HOST_WIDE_INT total_bytes;
7290 if (TREE_CODE (type) != ARRAY_TYPE
7291 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7292 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7293 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7294 return 0;
7295 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7296 if (total_bytes > len)
7297 return 0;
7298 if (TREE_STRING_LENGTH (expr) < total_bytes)
7300 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7301 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7302 total_bytes - TREE_STRING_LENGTH (expr));
7304 else
7305 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7306 return total_bytes;
7310 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7311 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7312 buffer PTR of length LEN bytes. Return the number of bytes
7313 placed in the buffer, or zero upon failure. */
7316 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7318 switch (TREE_CODE (expr))
7320 case INTEGER_CST:
7321 return native_encode_int (expr, ptr, len);
7323 case REAL_CST:
7324 return native_encode_real (expr, ptr, len);
7326 case COMPLEX_CST:
7327 return native_encode_complex (expr, ptr, len);
7329 case VECTOR_CST:
7330 return native_encode_vector (expr, ptr, len);
7332 case STRING_CST:
7333 return native_encode_string (expr, ptr, len);
7335 default:
7336 return 0;
7341 /* Subroutine of native_interpret_expr. Interpret the contents of
7342 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7343 If the buffer cannot be interpreted, return NULL_TREE. */
7345 static tree
7346 native_interpret_int (tree type, const unsigned char *ptr, int len)
7348 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7349 int byte, offset, word, words;
7350 unsigned char value;
7351 double_int result;
7353 if (total_bytes > len)
7354 return NULL_TREE;
7355 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7356 return NULL_TREE;
7358 result = double_int_zero;
7359 words = total_bytes / UNITS_PER_WORD;
7361 for (byte = 0; byte < total_bytes; byte++)
7363 int bitpos = byte * BITS_PER_UNIT;
7364 if (total_bytes > UNITS_PER_WORD)
7366 word = byte / UNITS_PER_WORD;
7367 if (WORDS_BIG_ENDIAN)
7368 word = (words - 1) - word;
7369 offset = word * UNITS_PER_WORD;
7370 if (BYTES_BIG_ENDIAN)
7371 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7372 else
7373 offset += byte % UNITS_PER_WORD;
7375 else
7376 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7377 value = ptr[offset];
7379 if (bitpos < HOST_BITS_PER_WIDE_INT)
7380 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7381 else
7382 result.high |= (unsigned HOST_WIDE_INT) value
7383 << (bitpos - HOST_BITS_PER_WIDE_INT);
7386 return double_int_to_tree (type, result);
7390 /* Subroutine of native_interpret_expr. Interpret the contents of
7391 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7392 If the buffer cannot be interpreted, return NULL_TREE. */
7394 static tree
7395 native_interpret_real (tree type, const unsigned char *ptr, int len)
7397 enum machine_mode mode = TYPE_MODE (type);
7398 int total_bytes = GET_MODE_SIZE (mode);
7399 int byte, offset, word, words, bitpos;
7400 unsigned char value;
7401 /* There are always 32 bits in each long, no matter the size of
7402 the hosts long. We handle floating point representations with
7403 up to 192 bits. */
7404 REAL_VALUE_TYPE r;
7405 long tmp[6];
7407 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7408 if (total_bytes > len || total_bytes > 24)
7409 return NULL_TREE;
7410 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7412 memset (tmp, 0, sizeof (tmp));
7413 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7414 bitpos += BITS_PER_UNIT)
7416 byte = (bitpos / BITS_PER_UNIT) & 3;
7417 if (UNITS_PER_WORD < 4)
7419 word = byte / UNITS_PER_WORD;
7420 if (WORDS_BIG_ENDIAN)
7421 word = (words - 1) - word;
7422 offset = word * UNITS_PER_WORD;
7423 if (BYTES_BIG_ENDIAN)
7424 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7425 else
7426 offset += byte % UNITS_PER_WORD;
7428 else
7429 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7430 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7432 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7435 real_from_target (&r, tmp, mode);
7436 return build_real (type, r);
7440 /* Subroutine of native_interpret_expr. Interpret the contents of
7441 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7442 If the buffer cannot be interpreted, return NULL_TREE. */
7444 static tree
7445 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7447 tree etype, rpart, ipart;
7448 int size;
7450 etype = TREE_TYPE (type);
7451 size = GET_MODE_SIZE (TYPE_MODE (etype));
7452 if (size * 2 > len)
7453 return NULL_TREE;
7454 rpart = native_interpret_expr (etype, ptr, size);
7455 if (!rpart)
7456 return NULL_TREE;
7457 ipart = native_interpret_expr (etype, ptr+size, size);
7458 if (!ipart)
7459 return NULL_TREE;
7460 return build_complex (type, rpart, ipart);
7464 /* Subroutine of native_interpret_expr. Interpret the contents of
7465 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7466 If the buffer cannot be interpreted, return NULL_TREE. */
7468 static tree
7469 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7471 tree etype, elem;
7472 int i, size, count;
7473 tree *elements;
7475 etype = TREE_TYPE (type);
7476 size = GET_MODE_SIZE (TYPE_MODE (etype));
7477 count = TYPE_VECTOR_SUBPARTS (type);
7478 if (size * count > len)
7479 return NULL_TREE;
7481 elements = XALLOCAVEC (tree, count);
7482 for (i = count - 1; i >= 0; i--)
7484 elem = native_interpret_expr (etype, ptr+(i*size), size);
7485 if (!elem)
7486 return NULL_TREE;
7487 elements[i] = elem;
7489 return build_vector (type, elements);
7493 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7494 the buffer PTR of length LEN as a constant of type TYPE. For
7495 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7496 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7497 return NULL_TREE. */
7499 tree
7500 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7502 switch (TREE_CODE (type))
7504 case INTEGER_TYPE:
7505 case ENUMERAL_TYPE:
7506 case BOOLEAN_TYPE:
7507 case POINTER_TYPE:
7508 case REFERENCE_TYPE:
7509 return native_interpret_int (type, ptr, len);
7511 case REAL_TYPE:
7512 return native_interpret_real (type, ptr, len);
7514 case COMPLEX_TYPE:
7515 return native_interpret_complex (type, ptr, len);
7517 case VECTOR_TYPE:
7518 return native_interpret_vector (type, ptr, len);
7520 default:
7521 return NULL_TREE;
7525 /* Returns true if we can interpret the contents of a native encoding
7526 as TYPE. */
7528 static bool
7529 can_native_interpret_type_p (tree type)
7531 switch (TREE_CODE (type))
7533 case INTEGER_TYPE:
7534 case ENUMERAL_TYPE:
7535 case BOOLEAN_TYPE:
7536 case POINTER_TYPE:
7537 case REFERENCE_TYPE:
7538 case REAL_TYPE:
7539 case COMPLEX_TYPE:
7540 case VECTOR_TYPE:
7541 return true;
7542 default:
7543 return false;
7547 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7548 TYPE at compile-time. If we're unable to perform the conversion
7549 return NULL_TREE. */
7551 static tree
7552 fold_view_convert_expr (tree type, tree expr)
7554 /* We support up to 512-bit values (for V8DFmode). */
7555 unsigned char buffer[64];
7556 int len;
7558 /* Check that the host and target are sane. */
7559 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7560 return NULL_TREE;
7562 len = native_encode_expr (expr, buffer, sizeof (buffer));
7563 if (len == 0)
7564 return NULL_TREE;
7566 return native_interpret_expr (type, buffer, len);
7569 /* Build an expression for the address of T. Folds away INDIRECT_REF
7570 to avoid confusing the gimplify process. */
7572 tree
7573 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7575 /* The size of the object is not relevant when talking about its address. */
7576 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7577 t = TREE_OPERAND (t, 0);
7579 if (TREE_CODE (t) == INDIRECT_REF)
7581 t = TREE_OPERAND (t, 0);
7583 if (TREE_TYPE (t) != ptrtype)
7584 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7586 else if (TREE_CODE (t) == MEM_REF
7587 && integer_zerop (TREE_OPERAND (t, 1)))
7588 return TREE_OPERAND (t, 0);
7589 else if (TREE_CODE (t) == MEM_REF
7590 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7591 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7592 TREE_OPERAND (t, 0),
7593 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7594 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7596 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7598 if (TREE_TYPE (t) != ptrtype)
7599 t = fold_convert_loc (loc, ptrtype, t);
7601 else
7602 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7604 return t;
7607 /* Build an expression for the address of T. */
7609 tree
7610 build_fold_addr_expr_loc (location_t loc, tree t)
7612 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7614 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7617 static bool vec_cst_ctor_to_array (tree, tree *);
7619 /* Fold a unary expression of code CODE and type TYPE with operand
7620 OP0. Return the folded expression if folding is successful.
7621 Otherwise, return NULL_TREE. */
7623 tree
7624 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7626 tree tem;
7627 tree arg0;
7628 enum tree_code_class kind = TREE_CODE_CLASS (code);
7630 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7631 && TREE_CODE_LENGTH (code) == 1);
7633 arg0 = op0;
7634 if (arg0)
7636 if (CONVERT_EXPR_CODE_P (code)
7637 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7639 /* Don't use STRIP_NOPS, because signedness of argument type
7640 matters. */
7641 STRIP_SIGN_NOPS (arg0);
7643 else
7645 /* Strip any conversions that don't change the mode. This
7646 is safe for every expression, except for a comparison
7647 expression because its signedness is derived from its
7648 operands.
7650 Note that this is done as an internal manipulation within
7651 the constant folder, in order to find the simplest
7652 representation of the arguments so that their form can be
7653 studied. In any cases, the appropriate type conversions
7654 should be put back in the tree that will get out of the
7655 constant folder. */
7656 STRIP_NOPS (arg0);
7660 if (TREE_CODE_CLASS (code) == tcc_unary)
7662 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7663 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7664 fold_build1_loc (loc, code, type,
7665 fold_convert_loc (loc, TREE_TYPE (op0),
7666 TREE_OPERAND (arg0, 1))));
7667 else if (TREE_CODE (arg0) == COND_EXPR)
7669 tree arg01 = TREE_OPERAND (arg0, 1);
7670 tree arg02 = TREE_OPERAND (arg0, 2);
7671 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7672 arg01 = fold_build1_loc (loc, code, type,
7673 fold_convert_loc (loc,
7674 TREE_TYPE (op0), arg01));
7675 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7676 arg02 = fold_build1_loc (loc, code, type,
7677 fold_convert_loc (loc,
7678 TREE_TYPE (op0), arg02));
7679 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7680 arg01, arg02);
7682 /* If this was a conversion, and all we did was to move into
7683 inside the COND_EXPR, bring it back out. But leave it if
7684 it is a conversion from integer to integer and the
7685 result precision is no wider than a word since such a
7686 conversion is cheap and may be optimized away by combine,
7687 while it couldn't if it were outside the COND_EXPR. Then return
7688 so we don't get into an infinite recursion loop taking the
7689 conversion out and then back in. */
7691 if ((CONVERT_EXPR_CODE_P (code)
7692 || code == NON_LVALUE_EXPR)
7693 && TREE_CODE (tem) == COND_EXPR
7694 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7695 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7696 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7697 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7698 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7699 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7700 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7701 && (INTEGRAL_TYPE_P
7702 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7703 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7704 || flag_syntax_only))
7705 tem = build1_loc (loc, code, type,
7706 build3 (COND_EXPR,
7707 TREE_TYPE (TREE_OPERAND
7708 (TREE_OPERAND (tem, 1), 0)),
7709 TREE_OPERAND (tem, 0),
7710 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7711 TREE_OPERAND (TREE_OPERAND (tem, 2),
7712 0)));
7713 return tem;
7717 switch (code)
7719 case PAREN_EXPR:
7720 /* Re-association barriers around constants and other re-association
7721 barriers can be removed. */
7722 if (CONSTANT_CLASS_P (op0)
7723 || TREE_CODE (op0) == PAREN_EXPR)
7724 return fold_convert_loc (loc, type, op0);
7725 return NULL_TREE;
7727 CASE_CONVERT:
7728 case FLOAT_EXPR:
7729 case FIX_TRUNC_EXPR:
7730 if (TREE_TYPE (op0) == type)
7731 return op0;
7733 if (COMPARISON_CLASS_P (op0))
7735 /* If we have (type) (a CMP b) and type is an integral type, return
7736 new expression involving the new type. Canonicalize
7737 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7738 non-integral type.
7739 Do not fold the result as that would not simplify further, also
7740 folding again results in recursions. */
7741 if (TREE_CODE (type) == BOOLEAN_TYPE)
7742 return build2_loc (loc, TREE_CODE (op0), type,
7743 TREE_OPERAND (op0, 0),
7744 TREE_OPERAND (op0, 1));
7745 else if (!INTEGRAL_TYPE_P (type) && TREE_CODE (type) != VECTOR_TYPE)
7746 return build3_loc (loc, COND_EXPR, type, op0,
7747 constant_boolean_node (true, type),
7748 constant_boolean_node (false, type));
7751 /* Handle cases of two conversions in a row. */
7752 if (CONVERT_EXPR_P (op0))
7754 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7755 tree inter_type = TREE_TYPE (op0);
7756 int inside_int = INTEGRAL_TYPE_P (inside_type);
7757 int inside_ptr = POINTER_TYPE_P (inside_type);
7758 int inside_float = FLOAT_TYPE_P (inside_type);
7759 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7760 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7761 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7762 int inter_int = INTEGRAL_TYPE_P (inter_type);
7763 int inter_ptr = POINTER_TYPE_P (inter_type);
7764 int inter_float = FLOAT_TYPE_P (inter_type);
7765 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7766 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7767 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7768 int final_int = INTEGRAL_TYPE_P (type);
7769 int final_ptr = POINTER_TYPE_P (type);
7770 int final_float = FLOAT_TYPE_P (type);
7771 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7772 unsigned int final_prec = TYPE_PRECISION (type);
7773 int final_unsignedp = TYPE_UNSIGNED (type);
7775 /* In addition to the cases of two conversions in a row
7776 handled below, if we are converting something to its own
7777 type via an object of identical or wider precision, neither
7778 conversion is needed. */
7779 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7780 && (((inter_int || inter_ptr) && final_int)
7781 || (inter_float && final_float))
7782 && inter_prec >= final_prec)
7783 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7785 /* Likewise, if the intermediate and initial types are either both
7786 float or both integer, we don't need the middle conversion if the
7787 former is wider than the latter and doesn't change the signedness
7788 (for integers). Avoid this if the final type is a pointer since
7789 then we sometimes need the middle conversion. Likewise if the
7790 final type has a precision not equal to the size of its mode. */
7791 if (((inter_int && inside_int)
7792 || (inter_float && inside_float)
7793 || (inter_vec && inside_vec))
7794 && inter_prec >= inside_prec
7795 && (inter_float || inter_vec
7796 || inter_unsignedp == inside_unsignedp)
7797 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7798 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7799 && ! final_ptr
7800 && (! final_vec || inter_prec == inside_prec))
7801 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7803 /* If we have a sign-extension of a zero-extended value, we can
7804 replace that by a single zero-extension. Likewise if the
7805 final conversion does not change precision we can drop the
7806 intermediate conversion. */
7807 if (inside_int && inter_int && final_int
7808 && ((inside_prec < inter_prec && inter_prec < final_prec
7809 && inside_unsignedp && !inter_unsignedp)
7810 || final_prec == inter_prec))
7811 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7813 /* Two conversions in a row are not needed unless:
7814 - some conversion is floating-point (overstrict for now), or
7815 - some conversion is a vector (overstrict for now), or
7816 - the intermediate type is narrower than both initial and
7817 final, or
7818 - the intermediate type and innermost type differ in signedness,
7819 and the outermost type is wider than the intermediate, or
7820 - the initial type is a pointer type and the precisions of the
7821 intermediate and final types differ, or
7822 - the final type is a pointer type and the precisions of the
7823 initial and intermediate types differ. */
7824 if (! inside_float && ! inter_float && ! final_float
7825 && ! inside_vec && ! inter_vec && ! final_vec
7826 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7827 && ! (inside_int && inter_int
7828 && inter_unsignedp != inside_unsignedp
7829 && inter_prec < final_prec)
7830 && ((inter_unsignedp && inter_prec > inside_prec)
7831 == (final_unsignedp && final_prec > inter_prec))
7832 && ! (inside_ptr && inter_prec != final_prec)
7833 && ! (final_ptr && inside_prec != inter_prec)
7834 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7835 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7836 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7839 /* Handle (T *)&A.B.C for A being of type T and B and C
7840 living at offset zero. This occurs frequently in
7841 C++ upcasting and then accessing the base. */
7842 if (TREE_CODE (op0) == ADDR_EXPR
7843 && POINTER_TYPE_P (type)
7844 && handled_component_p (TREE_OPERAND (op0, 0)))
7846 HOST_WIDE_INT bitsize, bitpos;
7847 tree offset;
7848 enum machine_mode mode;
7849 int unsignedp, volatilep;
7850 tree base = TREE_OPERAND (op0, 0);
7851 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7852 &mode, &unsignedp, &volatilep, false);
7853 /* If the reference was to a (constant) zero offset, we can use
7854 the address of the base if it has the same base type
7855 as the result type and the pointer type is unqualified. */
7856 if (! offset && bitpos == 0
7857 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7858 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7859 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7860 return fold_convert_loc (loc, type,
7861 build_fold_addr_expr_loc (loc, base));
7864 if (TREE_CODE (op0) == MODIFY_EXPR
7865 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7866 /* Detect assigning a bitfield. */
7867 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7868 && DECL_BIT_FIELD
7869 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7871 /* Don't leave an assignment inside a conversion
7872 unless assigning a bitfield. */
7873 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7874 /* First do the assignment, then return converted constant. */
7875 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7876 TREE_NO_WARNING (tem) = 1;
7877 TREE_USED (tem) = 1;
7878 return tem;
7881 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7882 constants (if x has signed type, the sign bit cannot be set
7883 in c). This folds extension into the BIT_AND_EXPR.
7884 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7885 very likely don't have maximal range for their precision and this
7886 transformation effectively doesn't preserve non-maximal ranges. */
7887 if (TREE_CODE (type) == INTEGER_TYPE
7888 && TREE_CODE (op0) == BIT_AND_EXPR
7889 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7891 tree and_expr = op0;
7892 tree and0 = TREE_OPERAND (and_expr, 0);
7893 tree and1 = TREE_OPERAND (and_expr, 1);
7894 int change = 0;
7896 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7897 || (TYPE_PRECISION (type)
7898 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7899 change = 1;
7900 else if (TYPE_PRECISION (TREE_TYPE (and1))
7901 <= HOST_BITS_PER_WIDE_INT
7902 && host_integerp (and1, 1))
7904 unsigned HOST_WIDE_INT cst;
7906 cst = tree_low_cst (and1, 1);
7907 cst &= (HOST_WIDE_INT) -1
7908 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7909 change = (cst == 0);
7910 #ifdef LOAD_EXTEND_OP
7911 if (change
7912 && !flag_syntax_only
7913 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7914 == ZERO_EXTEND))
7916 tree uns = unsigned_type_for (TREE_TYPE (and0));
7917 and0 = fold_convert_loc (loc, uns, and0);
7918 and1 = fold_convert_loc (loc, uns, and1);
7920 #endif
7922 if (change)
7924 tem = force_fit_type_double (type, tree_to_double_int (and1),
7925 0, TREE_OVERFLOW (and1));
7926 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7927 fold_convert_loc (loc, type, and0), tem);
7931 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7932 when one of the new casts will fold away. Conservatively we assume
7933 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7934 if (POINTER_TYPE_P (type)
7935 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7936 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7937 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7938 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7939 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7941 tree arg00 = TREE_OPERAND (arg0, 0);
7942 tree arg01 = TREE_OPERAND (arg0, 1);
7944 return fold_build_pointer_plus_loc
7945 (loc, fold_convert_loc (loc, type, arg00), arg01);
7948 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7949 of the same precision, and X is an integer type not narrower than
7950 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7951 if (INTEGRAL_TYPE_P (type)
7952 && TREE_CODE (op0) == BIT_NOT_EXPR
7953 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7954 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7955 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7957 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7958 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7959 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7960 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7961 fold_convert_loc (loc, type, tem));
7964 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7965 type of X and Y (integer types only). */
7966 if (INTEGRAL_TYPE_P (type)
7967 && TREE_CODE (op0) == MULT_EXPR
7968 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7969 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7971 /* Be careful not to introduce new overflows. */
7972 tree mult_type;
7973 if (TYPE_OVERFLOW_WRAPS (type))
7974 mult_type = type;
7975 else
7976 mult_type = unsigned_type_for (type);
7978 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7980 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7981 fold_convert_loc (loc, mult_type,
7982 TREE_OPERAND (op0, 0)),
7983 fold_convert_loc (loc, mult_type,
7984 TREE_OPERAND (op0, 1)));
7985 return fold_convert_loc (loc, type, tem);
7989 tem = fold_convert_const (code, type, op0);
7990 return tem ? tem : NULL_TREE;
7992 case ADDR_SPACE_CONVERT_EXPR:
7993 if (integer_zerop (arg0))
7994 return fold_convert_const (code, type, arg0);
7995 return NULL_TREE;
7997 case FIXED_CONVERT_EXPR:
7998 tem = fold_convert_const (code, type, arg0);
7999 return tem ? tem : NULL_TREE;
8001 case VIEW_CONVERT_EXPR:
8002 if (TREE_TYPE (op0) == type)
8003 return op0;
8004 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8005 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8006 type, TREE_OPERAND (op0, 0));
8007 if (TREE_CODE (op0) == MEM_REF)
8008 return fold_build2_loc (loc, MEM_REF, type,
8009 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8011 /* For integral conversions with the same precision or pointer
8012 conversions use a NOP_EXPR instead. */
8013 if ((INTEGRAL_TYPE_P (type)
8014 || POINTER_TYPE_P (type))
8015 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8016 || POINTER_TYPE_P (TREE_TYPE (op0)))
8017 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8018 return fold_convert_loc (loc, type, op0);
8020 /* Strip inner integral conversions that do not change the precision. */
8021 if (CONVERT_EXPR_P (op0)
8022 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8023 || POINTER_TYPE_P (TREE_TYPE (op0)))
8024 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8025 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8026 && (TYPE_PRECISION (TREE_TYPE (op0))
8027 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8028 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8029 type, TREE_OPERAND (op0, 0));
8031 return fold_view_convert_expr (type, op0);
8033 case NEGATE_EXPR:
8034 tem = fold_negate_expr (loc, arg0);
8035 if (tem)
8036 return fold_convert_loc (loc, type, tem);
8037 return NULL_TREE;
8039 case ABS_EXPR:
8040 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8041 return fold_abs_const (arg0, type);
8042 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8043 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8044 /* Convert fabs((double)float) into (double)fabsf(float). */
8045 else if (TREE_CODE (arg0) == NOP_EXPR
8046 && TREE_CODE (type) == REAL_TYPE)
8048 tree targ0 = strip_float_extensions (arg0);
8049 if (targ0 != arg0)
8050 return fold_convert_loc (loc, type,
8051 fold_build1_loc (loc, ABS_EXPR,
8052 TREE_TYPE (targ0),
8053 targ0));
8055 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8056 else if (TREE_CODE (arg0) == ABS_EXPR)
8057 return arg0;
8058 else if (tree_expr_nonnegative_p (arg0))
8059 return arg0;
8061 /* Strip sign ops from argument. */
8062 if (TREE_CODE (type) == REAL_TYPE)
8064 tem = fold_strip_sign_ops (arg0);
8065 if (tem)
8066 return fold_build1_loc (loc, ABS_EXPR, type,
8067 fold_convert_loc (loc, type, tem));
8069 return NULL_TREE;
8071 case CONJ_EXPR:
8072 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8073 return fold_convert_loc (loc, type, arg0);
8074 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8076 tree itype = TREE_TYPE (type);
8077 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8078 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8079 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8080 negate_expr (ipart));
8082 if (TREE_CODE (arg0) == COMPLEX_CST)
8084 tree itype = TREE_TYPE (type);
8085 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8086 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8087 return build_complex (type, rpart, negate_expr (ipart));
8089 if (TREE_CODE (arg0) == CONJ_EXPR)
8090 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8091 return NULL_TREE;
8093 case BIT_NOT_EXPR:
8094 if (TREE_CODE (arg0) == INTEGER_CST)
8095 return fold_not_const (arg0, type);
8096 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8097 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8098 /* Convert ~ (-A) to A - 1. */
8099 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8100 return fold_build2_loc (loc, MINUS_EXPR, type,
8101 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8102 build_int_cst (type, 1));
8103 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8104 else if (INTEGRAL_TYPE_P (type)
8105 && ((TREE_CODE (arg0) == MINUS_EXPR
8106 && integer_onep (TREE_OPERAND (arg0, 1)))
8107 || (TREE_CODE (arg0) == PLUS_EXPR
8108 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8109 return fold_build1_loc (loc, NEGATE_EXPR, type,
8110 fold_convert_loc (loc, type,
8111 TREE_OPERAND (arg0, 0)));
8112 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8113 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8114 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8115 fold_convert_loc (loc, type,
8116 TREE_OPERAND (arg0, 0)))))
8117 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8118 fold_convert_loc (loc, type,
8119 TREE_OPERAND (arg0, 1)));
8120 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8121 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8122 fold_convert_loc (loc, type,
8123 TREE_OPERAND (arg0, 1)))))
8124 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8125 fold_convert_loc (loc, type,
8126 TREE_OPERAND (arg0, 0)), tem);
8127 /* Perform BIT_NOT_EXPR on each element individually. */
8128 else if (TREE_CODE (arg0) == VECTOR_CST)
8130 tree *elements;
8131 tree elem;
8132 unsigned count = VECTOR_CST_NELTS (arg0), i;
8134 elements = XALLOCAVEC (tree, count);
8135 for (i = 0; i < count; i++)
8137 elem = VECTOR_CST_ELT (arg0, i);
8138 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8139 if (elem == NULL_TREE)
8140 break;
8141 elements[i] = elem;
8143 if (i == count)
8144 return build_vector (type, elements);
8147 return NULL_TREE;
8149 case TRUTH_NOT_EXPR:
8150 /* The argument to invert_truthvalue must have Boolean type. */
8151 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8152 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8154 /* Note that the operand of this must be an int
8155 and its values must be 0 or 1.
8156 ("true" is a fixed value perhaps depending on the language,
8157 but we don't handle values other than 1 correctly yet.) */
8158 tem = fold_truth_not_expr (loc, arg0);
8159 if (!tem)
8160 return NULL_TREE;
8161 return fold_convert_loc (loc, type, tem);
8163 case REALPART_EXPR:
8164 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8165 return fold_convert_loc (loc, type, arg0);
8166 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8167 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8168 TREE_OPERAND (arg0, 1));
8169 if (TREE_CODE (arg0) == COMPLEX_CST)
8170 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8171 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8173 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8174 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8175 fold_build1_loc (loc, REALPART_EXPR, itype,
8176 TREE_OPERAND (arg0, 0)),
8177 fold_build1_loc (loc, REALPART_EXPR, itype,
8178 TREE_OPERAND (arg0, 1)));
8179 return fold_convert_loc (loc, type, tem);
8181 if (TREE_CODE (arg0) == CONJ_EXPR)
8183 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8184 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8185 TREE_OPERAND (arg0, 0));
8186 return fold_convert_loc (loc, type, tem);
8188 if (TREE_CODE (arg0) == CALL_EXPR)
8190 tree fn = get_callee_fndecl (arg0);
8191 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8192 switch (DECL_FUNCTION_CODE (fn))
8194 CASE_FLT_FN (BUILT_IN_CEXPI):
8195 fn = mathfn_built_in (type, BUILT_IN_COS);
8196 if (fn)
8197 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8198 break;
8200 default:
8201 break;
8204 return NULL_TREE;
8206 case IMAGPART_EXPR:
8207 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8208 return build_zero_cst (type);
8209 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8210 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8211 TREE_OPERAND (arg0, 0));
8212 if (TREE_CODE (arg0) == COMPLEX_CST)
8213 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8214 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8216 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8217 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8218 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8219 TREE_OPERAND (arg0, 0)),
8220 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8221 TREE_OPERAND (arg0, 1)));
8222 return fold_convert_loc (loc, type, tem);
8224 if (TREE_CODE (arg0) == CONJ_EXPR)
8226 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8227 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8228 return fold_convert_loc (loc, type, negate_expr (tem));
8230 if (TREE_CODE (arg0) == CALL_EXPR)
8232 tree fn = get_callee_fndecl (arg0);
8233 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8234 switch (DECL_FUNCTION_CODE (fn))
8236 CASE_FLT_FN (BUILT_IN_CEXPI):
8237 fn = mathfn_built_in (type, BUILT_IN_SIN);
8238 if (fn)
8239 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8240 break;
8242 default:
8243 break;
8246 return NULL_TREE;
8248 case INDIRECT_REF:
8249 /* Fold *&X to X if X is an lvalue. */
8250 if (TREE_CODE (op0) == ADDR_EXPR)
8252 tree op00 = TREE_OPERAND (op0, 0);
8253 if ((TREE_CODE (op00) == VAR_DECL
8254 || TREE_CODE (op00) == PARM_DECL
8255 || TREE_CODE (op00) == RESULT_DECL)
8256 && !TREE_READONLY (op00))
8257 return op00;
8259 return NULL_TREE;
8261 case VEC_UNPACK_LO_EXPR:
8262 case VEC_UNPACK_HI_EXPR:
8263 case VEC_UNPACK_FLOAT_LO_EXPR:
8264 case VEC_UNPACK_FLOAT_HI_EXPR:
8266 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8267 tree *elts;
8268 enum tree_code subcode;
8270 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8271 if (TREE_CODE (arg0) != VECTOR_CST)
8272 return NULL_TREE;
8274 elts = XALLOCAVEC (tree, nelts * 2);
8275 if (!vec_cst_ctor_to_array (arg0, elts))
8276 return NULL_TREE;
8278 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8279 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8280 elts += nelts;
8282 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8283 subcode = NOP_EXPR;
8284 else
8285 subcode = FLOAT_EXPR;
8287 for (i = 0; i < nelts; i++)
8289 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8290 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8291 return NULL_TREE;
8294 return build_vector (type, elts);
8297 default:
8298 return NULL_TREE;
8299 } /* switch (code) */
8303 /* If the operation was a conversion do _not_ mark a resulting constant
8304 with TREE_OVERFLOW if the original constant was not. These conversions
8305 have implementation defined behavior and retaining the TREE_OVERFLOW
8306 flag here would confuse later passes such as VRP. */
8307 tree
8308 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8309 tree type, tree op0)
8311 tree res = fold_unary_loc (loc, code, type, op0);
8312 if (res
8313 && TREE_CODE (res) == INTEGER_CST
8314 && TREE_CODE (op0) == INTEGER_CST
8315 && CONVERT_EXPR_CODE_P (code))
8316 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8318 return res;
8321 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8322 operands OP0 and OP1. LOC is the location of the resulting expression.
8323 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8324 Return the folded expression if folding is successful. Otherwise,
8325 return NULL_TREE. */
8326 static tree
8327 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8328 tree arg0, tree arg1, tree op0, tree op1)
8330 tree tem;
8332 /* We only do these simplifications if we are optimizing. */
8333 if (!optimize)
8334 return NULL_TREE;
8336 /* Check for things like (A || B) && (A || C). We can convert this
8337 to A || (B && C). Note that either operator can be any of the four
8338 truth and/or operations and the transformation will still be
8339 valid. Also note that we only care about order for the
8340 ANDIF and ORIF operators. If B contains side effects, this
8341 might change the truth-value of A. */
8342 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8343 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8344 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8345 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8346 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8347 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8349 tree a00 = TREE_OPERAND (arg0, 0);
8350 tree a01 = TREE_OPERAND (arg0, 1);
8351 tree a10 = TREE_OPERAND (arg1, 0);
8352 tree a11 = TREE_OPERAND (arg1, 1);
8353 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8354 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8355 && (code == TRUTH_AND_EXPR
8356 || code == TRUTH_OR_EXPR));
8358 if (operand_equal_p (a00, a10, 0))
8359 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8360 fold_build2_loc (loc, code, type, a01, a11));
8361 else if (commutative && operand_equal_p (a00, a11, 0))
8362 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8363 fold_build2_loc (loc, code, type, a01, a10));
8364 else if (commutative && operand_equal_p (a01, a10, 0))
8365 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8366 fold_build2_loc (loc, code, type, a00, a11));
8368 /* This case if tricky because we must either have commutative
8369 operators or else A10 must not have side-effects. */
8371 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8372 && operand_equal_p (a01, a11, 0))
8373 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8374 fold_build2_loc (loc, code, type, a00, a10),
8375 a01);
8378 /* See if we can build a range comparison. */
8379 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8380 return tem;
8382 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8383 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8385 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8386 if (tem)
8387 return fold_build2_loc (loc, code, type, tem, arg1);
8390 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8391 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8393 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8394 if (tem)
8395 return fold_build2_loc (loc, code, type, arg0, tem);
8398 /* Check for the possibility of merging component references. If our
8399 lhs is another similar operation, try to merge its rhs with our
8400 rhs. Then try to merge our lhs and rhs. */
8401 if (TREE_CODE (arg0) == code
8402 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8403 TREE_OPERAND (arg0, 1), arg1)))
8404 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8406 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8407 return tem;
8409 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8410 && (code == TRUTH_AND_EXPR
8411 || code == TRUTH_ANDIF_EXPR
8412 || code == TRUTH_OR_EXPR
8413 || code == TRUTH_ORIF_EXPR))
8415 enum tree_code ncode, icode;
8417 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8418 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8419 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8421 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8422 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8423 We don't want to pack more than two leafs to a non-IF AND/OR
8424 expression.
8425 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8426 equal to IF-CODE, then we don't want to add right-hand operand.
8427 If the inner right-hand side of left-hand operand has
8428 side-effects, or isn't simple, then we can't add to it,
8429 as otherwise we might destroy if-sequence. */
8430 if (TREE_CODE (arg0) == icode
8431 && simple_operand_p_2 (arg1)
8432 /* Needed for sequence points to handle trappings, and
8433 side-effects. */
8434 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8436 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8437 arg1);
8438 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8439 tem);
8441 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8442 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8443 else if (TREE_CODE (arg1) == icode
8444 && simple_operand_p_2 (arg0)
8445 /* Needed for sequence points to handle trappings, and
8446 side-effects. */
8447 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8449 tem = fold_build2_loc (loc, ncode, type,
8450 arg0, TREE_OPERAND (arg1, 0));
8451 return fold_build2_loc (loc, icode, type, tem,
8452 TREE_OPERAND (arg1, 1));
8454 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8455 into (A OR B).
8456 For sequence point consistancy, we need to check for trapping,
8457 and side-effects. */
8458 else if (code == icode && simple_operand_p_2 (arg0)
8459 && simple_operand_p_2 (arg1))
8460 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8463 return NULL_TREE;
8466 /* Fold a binary expression of code CODE and type TYPE with operands
8467 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8468 Return the folded expression if folding is successful. Otherwise,
8469 return NULL_TREE. */
8471 static tree
8472 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8474 enum tree_code compl_code;
8476 if (code == MIN_EXPR)
8477 compl_code = MAX_EXPR;
8478 else if (code == MAX_EXPR)
8479 compl_code = MIN_EXPR;
8480 else
8481 gcc_unreachable ();
8483 /* MIN (MAX (a, b), b) == b. */
8484 if (TREE_CODE (op0) == compl_code
8485 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8486 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8488 /* MIN (MAX (b, a), b) == b. */
8489 if (TREE_CODE (op0) == compl_code
8490 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8491 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8492 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8494 /* MIN (a, MAX (a, b)) == a. */
8495 if (TREE_CODE (op1) == compl_code
8496 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8497 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8498 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8500 /* MIN (a, MAX (b, a)) == a. */
8501 if (TREE_CODE (op1) == compl_code
8502 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8503 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8504 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8506 return NULL_TREE;
8509 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8510 by changing CODE to reduce the magnitude of constants involved in
8511 ARG0 of the comparison.
8512 Returns a canonicalized comparison tree if a simplification was
8513 possible, otherwise returns NULL_TREE.
8514 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8515 valid if signed overflow is undefined. */
8517 static tree
8518 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8519 tree arg0, tree arg1,
8520 bool *strict_overflow_p)
8522 enum tree_code code0 = TREE_CODE (arg0);
8523 tree t, cst0 = NULL_TREE;
8524 int sgn0;
8525 bool swap = false;
8527 /* Match A +- CST code arg1 and CST code arg1. We can change the
8528 first form only if overflow is undefined. */
8529 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8530 /* In principle pointers also have undefined overflow behavior,
8531 but that causes problems elsewhere. */
8532 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8533 && (code0 == MINUS_EXPR
8534 || code0 == PLUS_EXPR)
8535 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8536 || code0 == INTEGER_CST))
8537 return NULL_TREE;
8539 /* Identify the constant in arg0 and its sign. */
8540 if (code0 == INTEGER_CST)
8541 cst0 = arg0;
8542 else
8543 cst0 = TREE_OPERAND (arg0, 1);
8544 sgn0 = tree_int_cst_sgn (cst0);
8546 /* Overflowed constants and zero will cause problems. */
8547 if (integer_zerop (cst0)
8548 || TREE_OVERFLOW (cst0))
8549 return NULL_TREE;
8551 /* See if we can reduce the magnitude of the constant in
8552 arg0 by changing the comparison code. */
8553 if (code0 == INTEGER_CST)
8555 /* CST <= arg1 -> CST-1 < arg1. */
8556 if (code == LE_EXPR && sgn0 == 1)
8557 code = LT_EXPR;
8558 /* -CST < arg1 -> -CST-1 <= arg1. */
8559 else if (code == LT_EXPR && sgn0 == -1)
8560 code = LE_EXPR;
8561 /* CST > arg1 -> CST-1 >= arg1. */
8562 else if (code == GT_EXPR && sgn0 == 1)
8563 code = GE_EXPR;
8564 /* -CST >= arg1 -> -CST-1 > arg1. */
8565 else if (code == GE_EXPR && sgn0 == -1)
8566 code = GT_EXPR;
8567 else
8568 return NULL_TREE;
8569 /* arg1 code' CST' might be more canonical. */
8570 swap = true;
8572 else
8574 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8575 if (code == LT_EXPR
8576 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8577 code = LE_EXPR;
8578 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8579 else if (code == GT_EXPR
8580 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8581 code = GE_EXPR;
8582 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8583 else if (code == LE_EXPR
8584 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8585 code = LT_EXPR;
8586 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8587 else if (code == GE_EXPR
8588 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8589 code = GT_EXPR;
8590 else
8591 return NULL_TREE;
8592 *strict_overflow_p = true;
8595 /* Now build the constant reduced in magnitude. But not if that
8596 would produce one outside of its types range. */
8597 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8598 && ((sgn0 == 1
8599 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8600 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8601 || (sgn0 == -1
8602 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8603 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8604 /* We cannot swap the comparison here as that would cause us to
8605 endlessly recurse. */
8606 return NULL_TREE;
8608 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8609 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8610 if (code0 != INTEGER_CST)
8611 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8612 t = fold_convert (TREE_TYPE (arg1), t);
8614 /* If swapping might yield to a more canonical form, do so. */
8615 if (swap)
8616 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8617 else
8618 return fold_build2_loc (loc, code, type, t, arg1);
8621 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8622 overflow further. Try to decrease the magnitude of constants involved
8623 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8624 and put sole constants at the second argument position.
8625 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8627 static tree
8628 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8629 tree arg0, tree arg1)
8631 tree t;
8632 bool strict_overflow_p;
8633 const char * const warnmsg = G_("assuming signed overflow does not occur "
8634 "when reducing constant in comparison");
8636 /* Try canonicalization by simplifying arg0. */
8637 strict_overflow_p = false;
8638 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8639 &strict_overflow_p);
8640 if (t)
8642 if (strict_overflow_p)
8643 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8644 return t;
8647 /* Try canonicalization by simplifying arg1 using the swapped
8648 comparison. */
8649 code = swap_tree_comparison (code);
8650 strict_overflow_p = false;
8651 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8652 &strict_overflow_p);
8653 if (t && strict_overflow_p)
8654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8655 return t;
8658 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8659 space. This is used to avoid issuing overflow warnings for
8660 expressions like &p->x which can not wrap. */
8662 static bool
8663 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8665 double_int di_offset, total;
8667 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8668 return true;
8670 if (bitpos < 0)
8671 return true;
8673 if (offset == NULL_TREE)
8674 di_offset = double_int_zero;
8675 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8676 return true;
8677 else
8678 di_offset = TREE_INT_CST (offset);
8680 bool overflow;
8681 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8682 total = di_offset.add_with_sign (units, true, &overflow);
8683 if (overflow)
8684 return true;
8686 if (total.high != 0)
8687 return true;
8689 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8690 if (size <= 0)
8691 return true;
8693 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8694 array. */
8695 if (TREE_CODE (base) == ADDR_EXPR)
8697 HOST_WIDE_INT base_size;
8699 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8700 if (base_size > 0 && size < base_size)
8701 size = base_size;
8704 return total.low > (unsigned HOST_WIDE_INT) size;
8707 /* Subroutine of fold_binary. This routine performs all of the
8708 transformations that are common to the equality/inequality
8709 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8710 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8711 fold_binary should call fold_binary. Fold a comparison with
8712 tree code CODE and type TYPE with operands OP0 and OP1. Return
8713 the folded comparison or NULL_TREE. */
8715 static tree
8716 fold_comparison (location_t loc, enum tree_code code, tree type,
8717 tree op0, tree op1)
8719 tree arg0, arg1, tem;
8721 arg0 = op0;
8722 arg1 = op1;
8724 STRIP_SIGN_NOPS (arg0);
8725 STRIP_SIGN_NOPS (arg1);
8727 tem = fold_relational_const (code, type, arg0, arg1);
8728 if (tem != NULL_TREE)
8729 return tem;
8731 /* If one arg is a real or integer constant, put it last. */
8732 if (tree_swap_operands_p (arg0, arg1, true))
8733 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8735 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8736 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8737 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8738 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8739 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8740 && (TREE_CODE (arg1) == INTEGER_CST
8741 && !TREE_OVERFLOW (arg1)))
8743 tree const1 = TREE_OPERAND (arg0, 1);
8744 tree const2 = arg1;
8745 tree variable = TREE_OPERAND (arg0, 0);
8746 tree lhs;
8747 int lhs_add;
8748 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8750 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8751 TREE_TYPE (arg1), const2, const1);
8753 /* If the constant operation overflowed this can be
8754 simplified as a comparison against INT_MAX/INT_MIN. */
8755 if (TREE_CODE (lhs) == INTEGER_CST
8756 && TREE_OVERFLOW (lhs))
8758 int const1_sgn = tree_int_cst_sgn (const1);
8759 enum tree_code code2 = code;
8761 /* Get the sign of the constant on the lhs if the
8762 operation were VARIABLE + CONST1. */
8763 if (TREE_CODE (arg0) == MINUS_EXPR)
8764 const1_sgn = -const1_sgn;
8766 /* The sign of the constant determines if we overflowed
8767 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8768 Canonicalize to the INT_MIN overflow by swapping the comparison
8769 if necessary. */
8770 if (const1_sgn == -1)
8771 code2 = swap_tree_comparison (code);
8773 /* We now can look at the canonicalized case
8774 VARIABLE + 1 CODE2 INT_MIN
8775 and decide on the result. */
8776 if (code2 == LT_EXPR
8777 || code2 == LE_EXPR
8778 || code2 == EQ_EXPR)
8779 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8780 else if (code2 == NE_EXPR
8781 || code2 == GE_EXPR
8782 || code2 == GT_EXPR)
8783 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8786 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8787 && (TREE_CODE (lhs) != INTEGER_CST
8788 || !TREE_OVERFLOW (lhs)))
8790 if (code != EQ_EXPR && code != NE_EXPR)
8791 fold_overflow_warning ("assuming signed overflow does not occur "
8792 "when changing X +- C1 cmp C2 to "
8793 "X cmp C1 +- C2",
8794 WARN_STRICT_OVERFLOW_COMPARISON);
8795 return fold_build2_loc (loc, code, type, variable, lhs);
8799 /* For comparisons of pointers we can decompose it to a compile time
8800 comparison of the base objects and the offsets into the object.
8801 This requires at least one operand being an ADDR_EXPR or a
8802 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8803 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8804 && (TREE_CODE (arg0) == ADDR_EXPR
8805 || TREE_CODE (arg1) == ADDR_EXPR
8806 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8807 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8809 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8810 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8811 enum machine_mode mode;
8812 int volatilep, unsignedp;
8813 bool indirect_base0 = false, indirect_base1 = false;
8815 /* Get base and offset for the access. Strip ADDR_EXPR for
8816 get_inner_reference, but put it back by stripping INDIRECT_REF
8817 off the base object if possible. indirect_baseN will be true
8818 if baseN is not an address but refers to the object itself. */
8819 base0 = arg0;
8820 if (TREE_CODE (arg0) == ADDR_EXPR)
8822 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8823 &bitsize, &bitpos0, &offset0, &mode,
8824 &unsignedp, &volatilep, false);
8825 if (TREE_CODE (base0) == INDIRECT_REF)
8826 base0 = TREE_OPERAND (base0, 0);
8827 else
8828 indirect_base0 = true;
8830 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8832 base0 = TREE_OPERAND (arg0, 0);
8833 STRIP_SIGN_NOPS (base0);
8834 if (TREE_CODE (base0) == ADDR_EXPR)
8836 base0 = TREE_OPERAND (base0, 0);
8837 indirect_base0 = true;
8839 offset0 = TREE_OPERAND (arg0, 1);
8840 if (host_integerp (offset0, 0))
8842 HOST_WIDE_INT off = size_low_cst (offset0);
8843 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8844 * BITS_PER_UNIT)
8845 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8847 bitpos0 = off * BITS_PER_UNIT;
8848 offset0 = NULL_TREE;
8853 base1 = arg1;
8854 if (TREE_CODE (arg1) == ADDR_EXPR)
8856 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8857 &bitsize, &bitpos1, &offset1, &mode,
8858 &unsignedp, &volatilep, false);
8859 if (TREE_CODE (base1) == INDIRECT_REF)
8860 base1 = TREE_OPERAND (base1, 0);
8861 else
8862 indirect_base1 = true;
8864 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8866 base1 = TREE_OPERAND (arg1, 0);
8867 STRIP_SIGN_NOPS (base1);
8868 if (TREE_CODE (base1) == ADDR_EXPR)
8870 base1 = TREE_OPERAND (base1, 0);
8871 indirect_base1 = true;
8873 offset1 = TREE_OPERAND (arg1, 1);
8874 if (host_integerp (offset1, 0))
8876 HOST_WIDE_INT off = size_low_cst (offset1);
8877 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8878 * BITS_PER_UNIT)
8879 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8881 bitpos1 = off * BITS_PER_UNIT;
8882 offset1 = NULL_TREE;
8887 /* A local variable can never be pointed to by
8888 the default SSA name of an incoming parameter. */
8889 if ((TREE_CODE (arg0) == ADDR_EXPR
8890 && indirect_base0
8891 && TREE_CODE (base0) == VAR_DECL
8892 && auto_var_in_fn_p (base0, current_function_decl)
8893 && !indirect_base1
8894 && TREE_CODE (base1) == SSA_NAME
8895 && SSA_NAME_IS_DEFAULT_DEF (base1)
8896 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8897 || (TREE_CODE (arg1) == ADDR_EXPR
8898 && indirect_base1
8899 && TREE_CODE (base1) == VAR_DECL
8900 && auto_var_in_fn_p (base1, current_function_decl)
8901 && !indirect_base0
8902 && TREE_CODE (base0) == SSA_NAME
8903 && SSA_NAME_IS_DEFAULT_DEF (base0)
8904 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8906 if (code == NE_EXPR)
8907 return constant_boolean_node (1, type);
8908 else if (code == EQ_EXPR)
8909 return constant_boolean_node (0, type);
8911 /* If we have equivalent bases we might be able to simplify. */
8912 else if (indirect_base0 == indirect_base1
8913 && operand_equal_p (base0, base1, 0))
8915 /* We can fold this expression to a constant if the non-constant
8916 offset parts are equal. */
8917 if ((offset0 == offset1
8918 || (offset0 && offset1
8919 && operand_equal_p (offset0, offset1, 0)))
8920 && (code == EQ_EXPR
8921 || code == NE_EXPR
8922 || (indirect_base0 && DECL_P (base0))
8923 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8926 if (code != EQ_EXPR
8927 && code != NE_EXPR
8928 && bitpos0 != bitpos1
8929 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8930 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8931 fold_overflow_warning (("assuming pointer wraparound does not "
8932 "occur when comparing P +- C1 with "
8933 "P +- C2"),
8934 WARN_STRICT_OVERFLOW_CONDITIONAL);
8936 switch (code)
8938 case EQ_EXPR:
8939 return constant_boolean_node (bitpos0 == bitpos1, type);
8940 case NE_EXPR:
8941 return constant_boolean_node (bitpos0 != bitpos1, type);
8942 case LT_EXPR:
8943 return constant_boolean_node (bitpos0 < bitpos1, type);
8944 case LE_EXPR:
8945 return constant_boolean_node (bitpos0 <= bitpos1, type);
8946 case GE_EXPR:
8947 return constant_boolean_node (bitpos0 >= bitpos1, type);
8948 case GT_EXPR:
8949 return constant_boolean_node (bitpos0 > bitpos1, type);
8950 default:;
8953 /* We can simplify the comparison to a comparison of the variable
8954 offset parts if the constant offset parts are equal.
8955 Be careful to use signed size type here because otherwise we
8956 mess with array offsets in the wrong way. This is possible
8957 because pointer arithmetic is restricted to retain within an
8958 object and overflow on pointer differences is undefined as of
8959 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8960 else if (bitpos0 == bitpos1
8961 && ((code == EQ_EXPR || code == NE_EXPR)
8962 || (indirect_base0 && DECL_P (base0))
8963 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8965 /* By converting to signed size type we cover middle-end pointer
8966 arithmetic which operates on unsigned pointer types of size
8967 type size and ARRAY_REF offsets which are properly sign or
8968 zero extended from their type in case it is narrower than
8969 size type. */
8970 if (offset0 == NULL_TREE)
8971 offset0 = build_int_cst (ssizetype, 0);
8972 else
8973 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8974 if (offset1 == NULL_TREE)
8975 offset1 = build_int_cst (ssizetype, 0);
8976 else
8977 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8979 if (code != EQ_EXPR
8980 && code != NE_EXPR
8981 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8982 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8983 fold_overflow_warning (("assuming pointer wraparound does not "
8984 "occur when comparing P +- C1 with "
8985 "P +- C2"),
8986 WARN_STRICT_OVERFLOW_COMPARISON);
8988 return fold_build2_loc (loc, code, type, offset0, offset1);
8991 /* For non-equal bases we can simplify if they are addresses
8992 of local binding decls or constants. */
8993 else if (indirect_base0 && indirect_base1
8994 /* We know that !operand_equal_p (base0, base1, 0)
8995 because the if condition was false. But make
8996 sure two decls are not the same. */
8997 && base0 != base1
8998 && TREE_CODE (arg0) == ADDR_EXPR
8999 && TREE_CODE (arg1) == ADDR_EXPR
9000 && (((TREE_CODE (base0) == VAR_DECL
9001 || TREE_CODE (base0) == PARM_DECL)
9002 && (targetm.binds_local_p (base0)
9003 || CONSTANT_CLASS_P (base1)))
9004 || CONSTANT_CLASS_P (base0))
9005 && (((TREE_CODE (base1) == VAR_DECL
9006 || TREE_CODE (base1) == PARM_DECL)
9007 && (targetm.binds_local_p (base1)
9008 || CONSTANT_CLASS_P (base0)))
9009 || CONSTANT_CLASS_P (base1)))
9011 if (code == EQ_EXPR)
9012 return omit_two_operands_loc (loc, type, boolean_false_node,
9013 arg0, arg1);
9014 else if (code == NE_EXPR)
9015 return omit_two_operands_loc (loc, type, boolean_true_node,
9016 arg0, arg1);
9018 /* For equal offsets we can simplify to a comparison of the
9019 base addresses. */
9020 else if (bitpos0 == bitpos1
9021 && (indirect_base0
9022 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9023 && (indirect_base1
9024 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9025 && ((offset0 == offset1)
9026 || (offset0 && offset1
9027 && operand_equal_p (offset0, offset1, 0))))
9029 if (indirect_base0)
9030 base0 = build_fold_addr_expr_loc (loc, base0);
9031 if (indirect_base1)
9032 base1 = build_fold_addr_expr_loc (loc, base1);
9033 return fold_build2_loc (loc, code, type, base0, base1);
9037 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9038 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9039 the resulting offset is smaller in absolute value than the
9040 original one. */
9041 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9042 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9043 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9044 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9045 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9046 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9047 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9049 tree const1 = TREE_OPERAND (arg0, 1);
9050 tree const2 = TREE_OPERAND (arg1, 1);
9051 tree variable1 = TREE_OPERAND (arg0, 0);
9052 tree variable2 = TREE_OPERAND (arg1, 0);
9053 tree cst;
9054 const char * const warnmsg = G_("assuming signed overflow does not "
9055 "occur when combining constants around "
9056 "a comparison");
9058 /* Put the constant on the side where it doesn't overflow and is
9059 of lower absolute value than before. */
9060 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9061 ? MINUS_EXPR : PLUS_EXPR,
9062 const2, const1);
9063 if (!TREE_OVERFLOW (cst)
9064 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9066 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9067 return fold_build2_loc (loc, code, type,
9068 variable1,
9069 fold_build2_loc (loc,
9070 TREE_CODE (arg1), TREE_TYPE (arg1),
9071 variable2, cst));
9074 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9075 ? MINUS_EXPR : PLUS_EXPR,
9076 const1, const2);
9077 if (!TREE_OVERFLOW (cst)
9078 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9080 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9081 return fold_build2_loc (loc, code, type,
9082 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9083 variable1, cst),
9084 variable2);
9088 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9089 signed arithmetic case. That form is created by the compiler
9090 often enough for folding it to be of value. One example is in
9091 computing loop trip counts after Operator Strength Reduction. */
9092 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9093 && TREE_CODE (arg0) == MULT_EXPR
9094 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9095 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9096 && integer_zerop (arg1))
9098 tree const1 = TREE_OPERAND (arg0, 1);
9099 tree const2 = arg1; /* zero */
9100 tree variable1 = TREE_OPERAND (arg0, 0);
9101 enum tree_code cmp_code = code;
9103 /* Handle unfolded multiplication by zero. */
9104 if (integer_zerop (const1))
9105 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9107 fold_overflow_warning (("assuming signed overflow does not occur when "
9108 "eliminating multiplication in comparison "
9109 "with zero"),
9110 WARN_STRICT_OVERFLOW_COMPARISON);
9112 /* If const1 is negative we swap the sense of the comparison. */
9113 if (tree_int_cst_sgn (const1) < 0)
9114 cmp_code = swap_tree_comparison (cmp_code);
9116 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9119 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9120 if (tem)
9121 return tem;
9123 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9125 tree targ0 = strip_float_extensions (arg0);
9126 tree targ1 = strip_float_extensions (arg1);
9127 tree newtype = TREE_TYPE (targ0);
9129 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9130 newtype = TREE_TYPE (targ1);
9132 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9133 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9134 return fold_build2_loc (loc, code, type,
9135 fold_convert_loc (loc, newtype, targ0),
9136 fold_convert_loc (loc, newtype, targ1));
9138 /* (-a) CMP (-b) -> b CMP a */
9139 if (TREE_CODE (arg0) == NEGATE_EXPR
9140 && TREE_CODE (arg1) == NEGATE_EXPR)
9141 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9142 TREE_OPERAND (arg0, 0));
9144 if (TREE_CODE (arg1) == REAL_CST)
9146 REAL_VALUE_TYPE cst;
9147 cst = TREE_REAL_CST (arg1);
9149 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9150 if (TREE_CODE (arg0) == NEGATE_EXPR)
9151 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9152 TREE_OPERAND (arg0, 0),
9153 build_real (TREE_TYPE (arg1),
9154 real_value_negate (&cst)));
9156 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9157 /* a CMP (-0) -> a CMP 0 */
9158 if (REAL_VALUE_MINUS_ZERO (cst))
9159 return fold_build2_loc (loc, code, type, arg0,
9160 build_real (TREE_TYPE (arg1), dconst0));
9162 /* x != NaN is always true, other ops are always false. */
9163 if (REAL_VALUE_ISNAN (cst)
9164 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9166 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9167 return omit_one_operand_loc (loc, type, tem, arg0);
9170 /* Fold comparisons against infinity. */
9171 if (REAL_VALUE_ISINF (cst)
9172 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9174 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9175 if (tem != NULL_TREE)
9176 return tem;
9180 /* If this is a comparison of a real constant with a PLUS_EXPR
9181 or a MINUS_EXPR of a real constant, we can convert it into a
9182 comparison with a revised real constant as long as no overflow
9183 occurs when unsafe_math_optimizations are enabled. */
9184 if (flag_unsafe_math_optimizations
9185 && TREE_CODE (arg1) == REAL_CST
9186 && (TREE_CODE (arg0) == PLUS_EXPR
9187 || TREE_CODE (arg0) == MINUS_EXPR)
9188 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9189 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9190 ? MINUS_EXPR : PLUS_EXPR,
9191 arg1, TREE_OPERAND (arg0, 1)))
9192 && !TREE_OVERFLOW (tem))
9193 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9195 /* Likewise, we can simplify a comparison of a real constant with
9196 a MINUS_EXPR whose first operand is also a real constant, i.e.
9197 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9198 floating-point types only if -fassociative-math is set. */
9199 if (flag_associative_math
9200 && TREE_CODE (arg1) == REAL_CST
9201 && TREE_CODE (arg0) == MINUS_EXPR
9202 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9203 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9204 arg1))
9205 && !TREE_OVERFLOW (tem))
9206 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9207 TREE_OPERAND (arg0, 1), tem);
9209 /* Fold comparisons against built-in math functions. */
9210 if (TREE_CODE (arg1) == REAL_CST
9211 && flag_unsafe_math_optimizations
9212 && ! flag_errno_math)
9214 enum built_in_function fcode = builtin_mathfn_code (arg0);
9216 if (fcode != END_BUILTINS)
9218 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9219 if (tem != NULL_TREE)
9220 return tem;
9225 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9226 && CONVERT_EXPR_P (arg0))
9228 /* If we are widening one operand of an integer comparison,
9229 see if the other operand is similarly being widened. Perhaps we
9230 can do the comparison in the narrower type. */
9231 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9232 if (tem)
9233 return tem;
9235 /* Or if we are changing signedness. */
9236 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9237 if (tem)
9238 return tem;
9241 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9242 constant, we can simplify it. */
9243 if (TREE_CODE (arg1) == INTEGER_CST
9244 && (TREE_CODE (arg0) == MIN_EXPR
9245 || TREE_CODE (arg0) == MAX_EXPR)
9246 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9248 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9249 if (tem)
9250 return tem;
9253 /* Simplify comparison of something with itself. (For IEEE
9254 floating-point, we can only do some of these simplifications.) */
9255 if (operand_equal_p (arg0, arg1, 0))
9257 switch (code)
9259 case EQ_EXPR:
9260 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9261 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9262 return constant_boolean_node (1, type);
9263 break;
9265 case GE_EXPR:
9266 case LE_EXPR:
9267 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9268 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9269 return constant_boolean_node (1, type);
9270 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9272 case NE_EXPR:
9273 /* For NE, we can only do this simplification if integer
9274 or we don't honor IEEE floating point NaNs. */
9275 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9276 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9277 break;
9278 /* ... fall through ... */
9279 case GT_EXPR:
9280 case LT_EXPR:
9281 return constant_boolean_node (0, type);
9282 default:
9283 gcc_unreachable ();
9287 /* If we are comparing an expression that just has comparisons
9288 of two integer values, arithmetic expressions of those comparisons,
9289 and constants, we can simplify it. There are only three cases
9290 to check: the two values can either be equal, the first can be
9291 greater, or the second can be greater. Fold the expression for
9292 those three values. Since each value must be 0 or 1, we have
9293 eight possibilities, each of which corresponds to the constant 0
9294 or 1 or one of the six possible comparisons.
9296 This handles common cases like (a > b) == 0 but also handles
9297 expressions like ((x > y) - (y > x)) > 0, which supposedly
9298 occur in macroized code. */
9300 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9302 tree cval1 = 0, cval2 = 0;
9303 int save_p = 0;
9305 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9306 /* Don't handle degenerate cases here; they should already
9307 have been handled anyway. */
9308 && cval1 != 0 && cval2 != 0
9309 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9310 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9311 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9312 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9313 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9314 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9315 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9317 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9318 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9320 /* We can't just pass T to eval_subst in case cval1 or cval2
9321 was the same as ARG1. */
9323 tree high_result
9324 = fold_build2_loc (loc, code, type,
9325 eval_subst (loc, arg0, cval1, maxval,
9326 cval2, minval),
9327 arg1);
9328 tree equal_result
9329 = fold_build2_loc (loc, code, type,
9330 eval_subst (loc, arg0, cval1, maxval,
9331 cval2, maxval),
9332 arg1);
9333 tree low_result
9334 = fold_build2_loc (loc, code, type,
9335 eval_subst (loc, arg0, cval1, minval,
9336 cval2, maxval),
9337 arg1);
9339 /* All three of these results should be 0 or 1. Confirm they are.
9340 Then use those values to select the proper code to use. */
9342 if (TREE_CODE (high_result) == INTEGER_CST
9343 && TREE_CODE (equal_result) == INTEGER_CST
9344 && TREE_CODE (low_result) == INTEGER_CST)
9346 /* Make a 3-bit mask with the high-order bit being the
9347 value for `>', the next for '=', and the low for '<'. */
9348 switch ((integer_onep (high_result) * 4)
9349 + (integer_onep (equal_result) * 2)
9350 + integer_onep (low_result))
9352 case 0:
9353 /* Always false. */
9354 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9355 case 1:
9356 code = LT_EXPR;
9357 break;
9358 case 2:
9359 code = EQ_EXPR;
9360 break;
9361 case 3:
9362 code = LE_EXPR;
9363 break;
9364 case 4:
9365 code = GT_EXPR;
9366 break;
9367 case 5:
9368 code = NE_EXPR;
9369 break;
9370 case 6:
9371 code = GE_EXPR;
9372 break;
9373 case 7:
9374 /* Always true. */
9375 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9378 if (save_p)
9380 tem = save_expr (build2 (code, type, cval1, cval2));
9381 SET_EXPR_LOCATION (tem, loc);
9382 return tem;
9384 return fold_build2_loc (loc, code, type, cval1, cval2);
9389 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9390 into a single range test. */
9391 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9392 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9393 && TREE_CODE (arg1) == INTEGER_CST
9394 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9395 && !integer_zerop (TREE_OPERAND (arg0, 1))
9396 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9397 && !TREE_OVERFLOW (arg1))
9399 tem = fold_div_compare (loc, code, type, arg0, arg1);
9400 if (tem != NULL_TREE)
9401 return tem;
9404 /* Fold ~X op ~Y as Y op X. */
9405 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9406 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9408 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9409 return fold_build2_loc (loc, code, type,
9410 fold_convert_loc (loc, cmp_type,
9411 TREE_OPERAND (arg1, 0)),
9412 TREE_OPERAND (arg0, 0));
9415 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9416 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9417 && TREE_CODE (arg1) == INTEGER_CST)
9419 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9420 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9421 TREE_OPERAND (arg0, 0),
9422 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9423 fold_convert_loc (loc, cmp_type, arg1)));
9426 return NULL_TREE;
9430 /* Subroutine of fold_binary. Optimize complex multiplications of the
9431 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9432 argument EXPR represents the expression "z" of type TYPE. */
9434 static tree
9435 fold_mult_zconjz (location_t loc, tree type, tree expr)
9437 tree itype = TREE_TYPE (type);
9438 tree rpart, ipart, tem;
9440 if (TREE_CODE (expr) == COMPLEX_EXPR)
9442 rpart = TREE_OPERAND (expr, 0);
9443 ipart = TREE_OPERAND (expr, 1);
9445 else if (TREE_CODE (expr) == COMPLEX_CST)
9447 rpart = TREE_REALPART (expr);
9448 ipart = TREE_IMAGPART (expr);
9450 else
9452 expr = save_expr (expr);
9453 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9454 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9457 rpart = save_expr (rpart);
9458 ipart = save_expr (ipart);
9459 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9460 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9461 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9462 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9463 build_zero_cst (itype));
9467 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9468 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9469 guarantees that P and N have the same least significant log2(M) bits.
9470 N is not otherwise constrained. In particular, N is not normalized to
9471 0 <= N < M as is common. In general, the precise value of P is unknown.
9472 M is chosen as large as possible such that constant N can be determined.
9474 Returns M and sets *RESIDUE to N.
9476 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9477 account. This is not always possible due to PR 35705.
9480 static unsigned HOST_WIDE_INT
9481 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9482 bool allow_func_align)
9484 enum tree_code code;
9486 *residue = 0;
9488 code = TREE_CODE (expr);
9489 if (code == ADDR_EXPR)
9491 unsigned int bitalign;
9492 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9493 *residue /= BITS_PER_UNIT;
9494 return bitalign / BITS_PER_UNIT;
9496 else if (code == POINTER_PLUS_EXPR)
9498 tree op0, op1;
9499 unsigned HOST_WIDE_INT modulus;
9500 enum tree_code inner_code;
9502 op0 = TREE_OPERAND (expr, 0);
9503 STRIP_NOPS (op0);
9504 modulus = get_pointer_modulus_and_residue (op0, residue,
9505 allow_func_align);
9507 op1 = TREE_OPERAND (expr, 1);
9508 STRIP_NOPS (op1);
9509 inner_code = TREE_CODE (op1);
9510 if (inner_code == INTEGER_CST)
9512 *residue += TREE_INT_CST_LOW (op1);
9513 return modulus;
9515 else if (inner_code == MULT_EXPR)
9517 op1 = TREE_OPERAND (op1, 1);
9518 if (TREE_CODE (op1) == INTEGER_CST)
9520 unsigned HOST_WIDE_INT align;
9522 /* Compute the greatest power-of-2 divisor of op1. */
9523 align = TREE_INT_CST_LOW (op1);
9524 align &= -align;
9526 /* If align is non-zero and less than *modulus, replace
9527 *modulus with align., If align is 0, then either op1 is 0
9528 or the greatest power-of-2 divisor of op1 doesn't fit in an
9529 unsigned HOST_WIDE_INT. In either case, no additional
9530 constraint is imposed. */
9531 if (align)
9532 modulus = MIN (modulus, align);
9534 return modulus;
9539 /* If we get here, we were unable to determine anything useful about the
9540 expression. */
9541 return 1;
9544 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9545 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9547 static bool
9548 vec_cst_ctor_to_array (tree arg, tree *elts)
9550 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9552 if (TREE_CODE (arg) == VECTOR_CST)
9554 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9555 elts[i] = VECTOR_CST_ELT (arg, i);
9557 else if (TREE_CODE (arg) == CONSTRUCTOR)
9559 constructor_elt *elt;
9561 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9562 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9563 return false;
9564 else
9565 elts[i] = elt->value;
9567 else
9568 return false;
9569 for (; i < nelts; i++)
9570 elts[i]
9571 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9572 return true;
9575 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9576 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9577 NULL_TREE otherwise. */
9579 static tree
9580 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9582 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9583 tree *elts;
9584 bool need_ctor = false;
9586 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9587 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9588 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9589 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9590 return NULL_TREE;
9592 elts = XALLOCAVEC (tree, nelts * 3);
9593 if (!vec_cst_ctor_to_array (arg0, elts)
9594 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9595 return NULL_TREE;
9597 for (i = 0; i < nelts; i++)
9599 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9600 need_ctor = true;
9601 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9604 if (need_ctor)
9606 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9607 for (i = 0; i < nelts; i++)
9608 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9609 return build_constructor (type, v);
9611 else
9612 return build_vector (type, &elts[2 * nelts]);
9615 /* Try to fold a pointer difference of type TYPE two address expressions of
9616 array references AREF0 and AREF1 using location LOC. Return a
9617 simplified expression for the difference or NULL_TREE. */
9619 static tree
9620 fold_addr_of_array_ref_difference (location_t loc, tree type,
9621 tree aref0, tree aref1)
9623 tree base0 = TREE_OPERAND (aref0, 0);
9624 tree base1 = TREE_OPERAND (aref1, 0);
9625 tree base_offset = build_int_cst (type, 0);
9627 /* If the bases are array references as well, recurse. If the bases
9628 are pointer indirections compute the difference of the pointers.
9629 If the bases are equal, we are set. */
9630 if ((TREE_CODE (base0) == ARRAY_REF
9631 && TREE_CODE (base1) == ARRAY_REF
9632 && (base_offset
9633 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9634 || (INDIRECT_REF_P (base0)
9635 && INDIRECT_REF_P (base1)
9636 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9637 TREE_OPERAND (base0, 0),
9638 TREE_OPERAND (base1, 0))))
9639 || operand_equal_p (base0, base1, 0))
9641 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9642 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9643 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9644 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9645 return fold_build2_loc (loc, PLUS_EXPR, type,
9646 base_offset,
9647 fold_build2_loc (loc, MULT_EXPR, type,
9648 diff, esz));
9650 return NULL_TREE;
9653 /* If the real or vector real constant CST of type TYPE has an exact
9654 inverse, return it, else return NULL. */
9656 static tree
9657 exact_inverse (tree type, tree cst)
9659 REAL_VALUE_TYPE r;
9660 tree unit_type, *elts;
9661 enum machine_mode mode;
9662 unsigned vec_nelts, i;
9664 switch (TREE_CODE (cst))
9666 case REAL_CST:
9667 r = TREE_REAL_CST (cst);
9669 if (exact_real_inverse (TYPE_MODE (type), &r))
9670 return build_real (type, r);
9672 return NULL_TREE;
9674 case VECTOR_CST:
9675 vec_nelts = VECTOR_CST_NELTS (cst);
9676 elts = XALLOCAVEC (tree, vec_nelts);
9677 unit_type = TREE_TYPE (type);
9678 mode = TYPE_MODE (unit_type);
9680 for (i = 0; i < vec_nelts; i++)
9682 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9683 if (!exact_real_inverse (mode, &r))
9684 return NULL_TREE;
9685 elts[i] = build_real (unit_type, r);
9688 return build_vector (type, elts);
9690 default:
9691 return NULL_TREE;
9695 /* Fold a binary expression of code CODE and type TYPE with operands
9696 OP0 and OP1. LOC is the location of the resulting expression.
9697 Return the folded expression if folding is successful. Otherwise,
9698 return NULL_TREE. */
9700 tree
9701 fold_binary_loc (location_t loc,
9702 enum tree_code code, tree type, tree op0, tree op1)
9704 enum tree_code_class kind = TREE_CODE_CLASS (code);
9705 tree arg0, arg1, tem;
9706 tree t1 = NULL_TREE;
9707 bool strict_overflow_p;
9709 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9710 && TREE_CODE_LENGTH (code) == 2
9711 && op0 != NULL_TREE
9712 && op1 != NULL_TREE);
9714 arg0 = op0;
9715 arg1 = op1;
9717 /* Strip any conversions that don't change the mode. This is
9718 safe for every expression, except for a comparison expression
9719 because its signedness is derived from its operands. So, in
9720 the latter case, only strip conversions that don't change the
9721 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9722 preserved.
9724 Note that this is done as an internal manipulation within the
9725 constant folder, in order to find the simplest representation
9726 of the arguments so that their form can be studied. In any
9727 cases, the appropriate type conversions should be put back in
9728 the tree that will get out of the constant folder. */
9730 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9732 STRIP_SIGN_NOPS (arg0);
9733 STRIP_SIGN_NOPS (arg1);
9735 else
9737 STRIP_NOPS (arg0);
9738 STRIP_NOPS (arg1);
9741 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9742 constant but we can't do arithmetic on them. */
9743 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9744 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9745 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9746 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9747 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9748 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9750 if (kind == tcc_binary)
9752 /* Make sure type and arg0 have the same saturating flag. */
9753 gcc_assert (TYPE_SATURATING (type)
9754 == TYPE_SATURATING (TREE_TYPE (arg0)));
9755 tem = const_binop (code, arg0, arg1);
9757 else if (kind == tcc_comparison)
9758 tem = fold_relational_const (code, type, arg0, arg1);
9759 else
9760 tem = NULL_TREE;
9762 if (tem != NULL_TREE)
9764 if (TREE_TYPE (tem) != type)
9765 tem = fold_convert_loc (loc, type, tem);
9766 return tem;
9770 /* If this is a commutative operation, and ARG0 is a constant, move it
9771 to ARG1 to reduce the number of tests below. */
9772 if (commutative_tree_code (code)
9773 && tree_swap_operands_p (arg0, arg1, true))
9774 return fold_build2_loc (loc, code, type, op1, op0);
9776 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9778 First check for cases where an arithmetic operation is applied to a
9779 compound, conditional, or comparison operation. Push the arithmetic
9780 operation inside the compound or conditional to see if any folding
9781 can then be done. Convert comparison to conditional for this purpose.
9782 The also optimizes non-constant cases that used to be done in
9783 expand_expr.
9785 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9786 one of the operands is a comparison and the other is a comparison, a
9787 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9788 code below would make the expression more complex. Change it to a
9789 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9790 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9792 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9793 || code == EQ_EXPR || code == NE_EXPR)
9794 && TREE_CODE (type) != VECTOR_TYPE
9795 && ((truth_value_p (TREE_CODE (arg0))
9796 && (truth_value_p (TREE_CODE (arg1))
9797 || (TREE_CODE (arg1) == BIT_AND_EXPR
9798 && integer_onep (TREE_OPERAND (arg1, 1)))))
9799 || (truth_value_p (TREE_CODE (arg1))
9800 && (truth_value_p (TREE_CODE (arg0))
9801 || (TREE_CODE (arg0) == BIT_AND_EXPR
9802 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9804 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9805 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9806 : TRUTH_XOR_EXPR,
9807 boolean_type_node,
9808 fold_convert_loc (loc, boolean_type_node, arg0),
9809 fold_convert_loc (loc, boolean_type_node, arg1));
9811 if (code == EQ_EXPR)
9812 tem = invert_truthvalue_loc (loc, tem);
9814 return fold_convert_loc (loc, type, tem);
9817 if (TREE_CODE_CLASS (code) == tcc_binary
9818 || TREE_CODE_CLASS (code) == tcc_comparison)
9820 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9822 tem = fold_build2_loc (loc, code, type,
9823 fold_convert_loc (loc, TREE_TYPE (op0),
9824 TREE_OPERAND (arg0, 1)), op1);
9825 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9826 tem);
9828 if (TREE_CODE (arg1) == COMPOUND_EXPR
9829 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9831 tem = fold_build2_loc (loc, code, type, op0,
9832 fold_convert_loc (loc, TREE_TYPE (op1),
9833 TREE_OPERAND (arg1, 1)));
9834 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9835 tem);
9838 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9840 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9841 arg0, arg1,
9842 /*cond_first_p=*/1);
9843 if (tem != NULL_TREE)
9844 return tem;
9847 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9849 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9850 arg1, arg0,
9851 /*cond_first_p=*/0);
9852 if (tem != NULL_TREE)
9853 return tem;
9857 switch (code)
9859 case MEM_REF:
9860 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9861 if (TREE_CODE (arg0) == ADDR_EXPR
9862 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9864 tree iref = TREE_OPERAND (arg0, 0);
9865 return fold_build2 (MEM_REF, type,
9866 TREE_OPERAND (iref, 0),
9867 int_const_binop (PLUS_EXPR, arg1,
9868 TREE_OPERAND (iref, 1)));
9871 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9872 if (TREE_CODE (arg0) == ADDR_EXPR
9873 && handled_component_p (TREE_OPERAND (arg0, 0)))
9875 tree base;
9876 HOST_WIDE_INT coffset;
9877 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9878 &coffset);
9879 if (!base)
9880 return NULL_TREE;
9881 return fold_build2 (MEM_REF, type,
9882 build_fold_addr_expr (base),
9883 int_const_binop (PLUS_EXPR, arg1,
9884 size_int (coffset)));
9887 return NULL_TREE;
9889 case POINTER_PLUS_EXPR:
9890 /* 0 +p index -> (type)index */
9891 if (integer_zerop (arg0))
9892 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9894 /* PTR +p 0 -> PTR */
9895 if (integer_zerop (arg1))
9896 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9898 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9899 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9900 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9901 return fold_convert_loc (loc, type,
9902 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9903 fold_convert_loc (loc, sizetype,
9904 arg1),
9905 fold_convert_loc (loc, sizetype,
9906 arg0)));
9908 /* (PTR +p B) +p A -> PTR +p (B + A) */
9909 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9911 tree inner;
9912 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9913 tree arg00 = TREE_OPERAND (arg0, 0);
9914 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9915 arg01, fold_convert_loc (loc, sizetype, arg1));
9916 return fold_convert_loc (loc, type,
9917 fold_build_pointer_plus_loc (loc,
9918 arg00, inner));
9921 /* PTR_CST +p CST -> CST1 */
9922 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9923 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9924 fold_convert_loc (loc, type, arg1));
9926 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9927 of the array. Loop optimizer sometimes produce this type of
9928 expressions. */
9929 if (TREE_CODE (arg0) == ADDR_EXPR)
9931 tem = try_move_mult_to_index (loc, arg0,
9932 fold_convert_loc (loc,
9933 ssizetype, arg1));
9934 if (tem)
9935 return fold_convert_loc (loc, type, tem);
9938 return NULL_TREE;
9940 case PLUS_EXPR:
9941 /* A + (-B) -> A - B */
9942 if (TREE_CODE (arg1) == NEGATE_EXPR)
9943 return fold_build2_loc (loc, MINUS_EXPR, type,
9944 fold_convert_loc (loc, type, arg0),
9945 fold_convert_loc (loc, type,
9946 TREE_OPERAND (arg1, 0)));
9947 /* (-A) + B -> B - A */
9948 if (TREE_CODE (arg0) == NEGATE_EXPR
9949 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9950 return fold_build2_loc (loc, MINUS_EXPR, type,
9951 fold_convert_loc (loc, type, arg1),
9952 fold_convert_loc (loc, type,
9953 TREE_OPERAND (arg0, 0)));
9955 if (INTEGRAL_TYPE_P (type))
9957 /* Convert ~A + 1 to -A. */
9958 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9959 && integer_onep (arg1))
9960 return fold_build1_loc (loc, NEGATE_EXPR, type,
9961 fold_convert_loc (loc, type,
9962 TREE_OPERAND (arg0, 0)));
9964 /* ~X + X is -1. */
9965 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9966 && !TYPE_OVERFLOW_TRAPS (type))
9968 tree tem = TREE_OPERAND (arg0, 0);
9970 STRIP_NOPS (tem);
9971 if (operand_equal_p (tem, arg1, 0))
9973 t1 = build_int_cst_type (type, -1);
9974 return omit_one_operand_loc (loc, type, t1, arg1);
9978 /* X + ~X is -1. */
9979 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9980 && !TYPE_OVERFLOW_TRAPS (type))
9982 tree tem = TREE_OPERAND (arg1, 0);
9984 STRIP_NOPS (tem);
9985 if (operand_equal_p (arg0, tem, 0))
9987 t1 = build_int_cst_type (type, -1);
9988 return omit_one_operand_loc (loc, type, t1, arg0);
9992 /* X + (X / CST) * -CST is X % CST. */
9993 if (TREE_CODE (arg1) == MULT_EXPR
9994 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9995 && operand_equal_p (arg0,
9996 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9998 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9999 tree cst1 = TREE_OPERAND (arg1, 1);
10000 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10001 cst1, cst0);
10002 if (sum && integer_zerop (sum))
10003 return fold_convert_loc (loc, type,
10004 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10005 TREE_TYPE (arg0), arg0,
10006 cst0));
10010 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10011 one. Make sure the type is not saturating and has the signedness of
10012 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10013 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10014 if ((TREE_CODE (arg0) == MULT_EXPR
10015 || TREE_CODE (arg1) == MULT_EXPR)
10016 && !TYPE_SATURATING (type)
10017 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10018 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10019 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10021 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10022 if (tem)
10023 return tem;
10026 if (! FLOAT_TYPE_P (type))
10028 if (integer_zerop (arg1))
10029 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10031 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10032 with a constant, and the two constants have no bits in common,
10033 we should treat this as a BIT_IOR_EXPR since this may produce more
10034 simplifications. */
10035 if (TREE_CODE (arg0) == BIT_AND_EXPR
10036 && TREE_CODE (arg1) == BIT_AND_EXPR
10037 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10038 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10039 && integer_zerop (const_binop (BIT_AND_EXPR,
10040 TREE_OPERAND (arg0, 1),
10041 TREE_OPERAND (arg1, 1))))
10043 code = BIT_IOR_EXPR;
10044 goto bit_ior;
10047 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10048 (plus (plus (mult) (mult)) (foo)) so that we can
10049 take advantage of the factoring cases below. */
10050 if (TYPE_OVERFLOW_WRAPS (type)
10051 && (((TREE_CODE (arg0) == PLUS_EXPR
10052 || TREE_CODE (arg0) == MINUS_EXPR)
10053 && TREE_CODE (arg1) == MULT_EXPR)
10054 || ((TREE_CODE (arg1) == PLUS_EXPR
10055 || TREE_CODE (arg1) == MINUS_EXPR)
10056 && TREE_CODE (arg0) == MULT_EXPR)))
10058 tree parg0, parg1, parg, marg;
10059 enum tree_code pcode;
10061 if (TREE_CODE (arg1) == MULT_EXPR)
10062 parg = arg0, marg = arg1;
10063 else
10064 parg = arg1, marg = arg0;
10065 pcode = TREE_CODE (parg);
10066 parg0 = TREE_OPERAND (parg, 0);
10067 parg1 = TREE_OPERAND (parg, 1);
10068 STRIP_NOPS (parg0);
10069 STRIP_NOPS (parg1);
10071 if (TREE_CODE (parg0) == MULT_EXPR
10072 && TREE_CODE (parg1) != MULT_EXPR)
10073 return fold_build2_loc (loc, pcode, type,
10074 fold_build2_loc (loc, PLUS_EXPR, type,
10075 fold_convert_loc (loc, type,
10076 parg0),
10077 fold_convert_loc (loc, type,
10078 marg)),
10079 fold_convert_loc (loc, type, parg1));
10080 if (TREE_CODE (parg0) != MULT_EXPR
10081 && TREE_CODE (parg1) == MULT_EXPR)
10082 return
10083 fold_build2_loc (loc, PLUS_EXPR, type,
10084 fold_convert_loc (loc, type, parg0),
10085 fold_build2_loc (loc, pcode, type,
10086 fold_convert_loc (loc, type, marg),
10087 fold_convert_loc (loc, type,
10088 parg1)));
10091 else
10093 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10094 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10095 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10097 /* Likewise if the operands are reversed. */
10098 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10099 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10101 /* Convert X + -C into X - C. */
10102 if (TREE_CODE (arg1) == REAL_CST
10103 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10105 tem = fold_negate_const (arg1, type);
10106 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10107 return fold_build2_loc (loc, MINUS_EXPR, type,
10108 fold_convert_loc (loc, type, arg0),
10109 fold_convert_loc (loc, type, tem));
10112 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10113 to __complex__ ( x, y ). This is not the same for SNaNs or
10114 if signed zeros are involved. */
10115 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10116 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10117 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10119 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10120 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10121 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10122 bool arg0rz = false, arg0iz = false;
10123 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10124 || (arg0i && (arg0iz = real_zerop (arg0i))))
10126 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10127 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10128 if (arg0rz && arg1i && real_zerop (arg1i))
10130 tree rp = arg1r ? arg1r
10131 : build1 (REALPART_EXPR, rtype, arg1);
10132 tree ip = arg0i ? arg0i
10133 : build1 (IMAGPART_EXPR, rtype, arg0);
10134 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10136 else if (arg0iz && arg1r && real_zerop (arg1r))
10138 tree rp = arg0r ? arg0r
10139 : build1 (REALPART_EXPR, rtype, arg0);
10140 tree ip = arg1i ? arg1i
10141 : build1 (IMAGPART_EXPR, rtype, arg1);
10142 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10147 if (flag_unsafe_math_optimizations
10148 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10149 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10150 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10151 return tem;
10153 /* Convert x+x into x*2.0. */
10154 if (operand_equal_p (arg0, arg1, 0)
10155 && SCALAR_FLOAT_TYPE_P (type))
10156 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10157 build_real (type, dconst2));
10159 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10160 We associate floats only if the user has specified
10161 -fassociative-math. */
10162 if (flag_associative_math
10163 && TREE_CODE (arg1) == PLUS_EXPR
10164 && TREE_CODE (arg0) != MULT_EXPR)
10166 tree tree10 = TREE_OPERAND (arg1, 0);
10167 tree tree11 = TREE_OPERAND (arg1, 1);
10168 if (TREE_CODE (tree11) == MULT_EXPR
10169 && TREE_CODE (tree10) == MULT_EXPR)
10171 tree tree0;
10172 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10173 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10176 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10177 We associate floats only if the user has specified
10178 -fassociative-math. */
10179 if (flag_associative_math
10180 && TREE_CODE (arg0) == PLUS_EXPR
10181 && TREE_CODE (arg1) != MULT_EXPR)
10183 tree tree00 = TREE_OPERAND (arg0, 0);
10184 tree tree01 = TREE_OPERAND (arg0, 1);
10185 if (TREE_CODE (tree01) == MULT_EXPR
10186 && TREE_CODE (tree00) == MULT_EXPR)
10188 tree tree0;
10189 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10190 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10195 bit_rotate:
10196 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10197 is a rotate of A by C1 bits. */
10198 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10199 is a rotate of A by B bits. */
10201 enum tree_code code0, code1;
10202 tree rtype;
10203 code0 = TREE_CODE (arg0);
10204 code1 = TREE_CODE (arg1);
10205 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10206 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10207 && operand_equal_p (TREE_OPERAND (arg0, 0),
10208 TREE_OPERAND (arg1, 0), 0)
10209 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10210 TYPE_UNSIGNED (rtype))
10211 /* Only create rotates in complete modes. Other cases are not
10212 expanded properly. */
10213 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10215 tree tree01, tree11;
10216 enum tree_code code01, code11;
10218 tree01 = TREE_OPERAND (arg0, 1);
10219 tree11 = TREE_OPERAND (arg1, 1);
10220 STRIP_NOPS (tree01);
10221 STRIP_NOPS (tree11);
10222 code01 = TREE_CODE (tree01);
10223 code11 = TREE_CODE (tree11);
10224 if (code01 == INTEGER_CST
10225 && code11 == INTEGER_CST
10226 && TREE_INT_CST_HIGH (tree01) == 0
10227 && TREE_INT_CST_HIGH (tree11) == 0
10228 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10229 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10231 tem = build2_loc (loc, LROTATE_EXPR,
10232 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10233 TREE_OPERAND (arg0, 0),
10234 code0 == LSHIFT_EXPR ? tree01 : tree11);
10235 return fold_convert_loc (loc, type, tem);
10237 else if (code11 == MINUS_EXPR)
10239 tree tree110, tree111;
10240 tree110 = TREE_OPERAND (tree11, 0);
10241 tree111 = TREE_OPERAND (tree11, 1);
10242 STRIP_NOPS (tree110);
10243 STRIP_NOPS (tree111);
10244 if (TREE_CODE (tree110) == INTEGER_CST
10245 && 0 == compare_tree_int (tree110,
10246 TYPE_PRECISION
10247 (TREE_TYPE (TREE_OPERAND
10248 (arg0, 0))))
10249 && operand_equal_p (tree01, tree111, 0))
10250 return
10251 fold_convert_loc (loc, type,
10252 build2 ((code0 == LSHIFT_EXPR
10253 ? LROTATE_EXPR
10254 : RROTATE_EXPR),
10255 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10256 TREE_OPERAND (arg0, 0), tree01));
10258 else if (code01 == MINUS_EXPR)
10260 tree tree010, tree011;
10261 tree010 = TREE_OPERAND (tree01, 0);
10262 tree011 = TREE_OPERAND (tree01, 1);
10263 STRIP_NOPS (tree010);
10264 STRIP_NOPS (tree011);
10265 if (TREE_CODE (tree010) == INTEGER_CST
10266 && 0 == compare_tree_int (tree010,
10267 TYPE_PRECISION
10268 (TREE_TYPE (TREE_OPERAND
10269 (arg0, 0))))
10270 && operand_equal_p (tree11, tree011, 0))
10271 return fold_convert_loc
10272 (loc, type,
10273 build2 ((code0 != LSHIFT_EXPR
10274 ? LROTATE_EXPR
10275 : RROTATE_EXPR),
10276 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10277 TREE_OPERAND (arg0, 0), tree11));
10282 associate:
10283 /* In most languages, can't associate operations on floats through
10284 parentheses. Rather than remember where the parentheses were, we
10285 don't associate floats at all, unless the user has specified
10286 -fassociative-math.
10287 And, we need to make sure type is not saturating. */
10289 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10290 && !TYPE_SATURATING (type))
10292 tree var0, con0, lit0, minus_lit0;
10293 tree var1, con1, lit1, minus_lit1;
10294 bool ok = true;
10296 /* Split both trees into variables, constants, and literals. Then
10297 associate each group together, the constants with literals,
10298 then the result with variables. This increases the chances of
10299 literals being recombined later and of generating relocatable
10300 expressions for the sum of a constant and literal. */
10301 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10302 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10303 code == MINUS_EXPR);
10305 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10306 if (code == MINUS_EXPR)
10307 code = PLUS_EXPR;
10309 /* With undefined overflow we can only associate constants with one
10310 variable, and constants whose association doesn't overflow. */
10311 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10312 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10314 if (var0 && var1)
10316 tree tmp0 = var0;
10317 tree tmp1 = var1;
10319 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10320 tmp0 = TREE_OPERAND (tmp0, 0);
10321 if (CONVERT_EXPR_P (tmp0)
10322 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10323 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10324 <= TYPE_PRECISION (type)))
10325 tmp0 = TREE_OPERAND (tmp0, 0);
10326 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10327 tmp1 = TREE_OPERAND (tmp1, 0);
10328 if (CONVERT_EXPR_P (tmp1)
10329 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10330 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10331 <= TYPE_PRECISION (type)))
10332 tmp1 = TREE_OPERAND (tmp1, 0);
10333 /* The only case we can still associate with two variables
10334 is if they are the same, modulo negation and bit-pattern
10335 preserving conversions. */
10336 if (!operand_equal_p (tmp0, tmp1, 0))
10337 ok = false;
10340 if (ok && lit0 && lit1)
10342 tree tmp0 = fold_convert (type, lit0);
10343 tree tmp1 = fold_convert (type, lit1);
10345 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10346 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10347 ok = false;
10351 /* Only do something if we found more than two objects. Otherwise,
10352 nothing has changed and we risk infinite recursion. */
10353 if (ok
10354 && (2 < ((var0 != 0) + (var1 != 0)
10355 + (con0 != 0) + (con1 != 0)
10356 + (lit0 != 0) + (lit1 != 0)
10357 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10359 var0 = associate_trees (loc, var0, var1, code, type);
10360 con0 = associate_trees (loc, con0, con1, code, type);
10361 lit0 = associate_trees (loc, lit0, lit1, code, type);
10362 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10364 /* Preserve the MINUS_EXPR if the negative part of the literal is
10365 greater than the positive part. Otherwise, the multiplicative
10366 folding code (i.e extract_muldiv) may be fooled in case
10367 unsigned constants are subtracted, like in the following
10368 example: ((X*2 + 4) - 8U)/2. */
10369 if (minus_lit0 && lit0)
10371 if (TREE_CODE (lit0) == INTEGER_CST
10372 && TREE_CODE (minus_lit0) == INTEGER_CST
10373 && tree_int_cst_lt (lit0, minus_lit0))
10375 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10376 MINUS_EXPR, type);
10377 lit0 = 0;
10379 else
10381 lit0 = associate_trees (loc, lit0, minus_lit0,
10382 MINUS_EXPR, type);
10383 minus_lit0 = 0;
10386 if (minus_lit0)
10388 if (con0 == 0)
10389 return
10390 fold_convert_loc (loc, type,
10391 associate_trees (loc, var0, minus_lit0,
10392 MINUS_EXPR, type));
10393 else
10395 con0 = associate_trees (loc, con0, minus_lit0,
10396 MINUS_EXPR, type);
10397 return
10398 fold_convert_loc (loc, type,
10399 associate_trees (loc, var0, con0,
10400 PLUS_EXPR, type));
10404 con0 = associate_trees (loc, con0, lit0, code, type);
10405 return
10406 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10407 code, type));
10411 return NULL_TREE;
10413 case MINUS_EXPR:
10414 /* Pointer simplifications for subtraction, simple reassociations. */
10415 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10417 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10418 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10419 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10421 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10422 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10423 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10424 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10425 return fold_build2_loc (loc, PLUS_EXPR, type,
10426 fold_build2_loc (loc, MINUS_EXPR, type,
10427 arg00, arg10),
10428 fold_build2_loc (loc, MINUS_EXPR, type,
10429 arg01, arg11));
10431 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10432 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10434 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10435 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10436 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10437 fold_convert_loc (loc, type, arg1));
10438 if (tmp)
10439 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10442 /* A - (-B) -> A + B */
10443 if (TREE_CODE (arg1) == NEGATE_EXPR)
10444 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10445 fold_convert_loc (loc, type,
10446 TREE_OPERAND (arg1, 0)));
10447 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10448 if (TREE_CODE (arg0) == NEGATE_EXPR
10449 && (FLOAT_TYPE_P (type)
10450 || INTEGRAL_TYPE_P (type))
10451 && negate_expr_p (arg1)
10452 && reorder_operands_p (arg0, arg1))
10453 return fold_build2_loc (loc, MINUS_EXPR, type,
10454 fold_convert_loc (loc, type,
10455 negate_expr (arg1)),
10456 fold_convert_loc (loc, type,
10457 TREE_OPERAND (arg0, 0)));
10458 /* Convert -A - 1 to ~A. */
10459 if (INTEGRAL_TYPE_P (type)
10460 && TREE_CODE (arg0) == NEGATE_EXPR
10461 && integer_onep (arg1)
10462 && !TYPE_OVERFLOW_TRAPS (type))
10463 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10464 fold_convert_loc (loc, type,
10465 TREE_OPERAND (arg0, 0)));
10467 /* Convert -1 - A to ~A. */
10468 if (INTEGRAL_TYPE_P (type)
10469 && integer_all_onesp (arg0))
10470 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10473 /* X - (X / CST) * CST is X % CST. */
10474 if (INTEGRAL_TYPE_P (type)
10475 && TREE_CODE (arg1) == MULT_EXPR
10476 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10477 && operand_equal_p (arg0,
10478 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10479 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10480 TREE_OPERAND (arg1, 1), 0))
10481 return
10482 fold_convert_loc (loc, type,
10483 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10484 arg0, TREE_OPERAND (arg1, 1)));
10486 if (! FLOAT_TYPE_P (type))
10488 if (integer_zerop (arg0))
10489 return negate_expr (fold_convert_loc (loc, type, arg1));
10490 if (integer_zerop (arg1))
10491 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10493 /* Fold A - (A & B) into ~B & A. */
10494 if (!TREE_SIDE_EFFECTS (arg0)
10495 && TREE_CODE (arg1) == BIT_AND_EXPR)
10497 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10499 tree arg10 = fold_convert_loc (loc, type,
10500 TREE_OPERAND (arg1, 0));
10501 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10502 fold_build1_loc (loc, BIT_NOT_EXPR,
10503 type, arg10),
10504 fold_convert_loc (loc, type, arg0));
10506 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10508 tree arg11 = fold_convert_loc (loc,
10509 type, TREE_OPERAND (arg1, 1));
10510 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10511 fold_build1_loc (loc, BIT_NOT_EXPR,
10512 type, arg11),
10513 fold_convert_loc (loc, type, arg0));
10517 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10518 any power of 2 minus 1. */
10519 if (TREE_CODE (arg0) == BIT_AND_EXPR
10520 && TREE_CODE (arg1) == BIT_AND_EXPR
10521 && operand_equal_p (TREE_OPERAND (arg0, 0),
10522 TREE_OPERAND (arg1, 0), 0))
10524 tree mask0 = TREE_OPERAND (arg0, 1);
10525 tree mask1 = TREE_OPERAND (arg1, 1);
10526 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10528 if (operand_equal_p (tem, mask1, 0))
10530 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10531 TREE_OPERAND (arg0, 0), mask1);
10532 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10537 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10538 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10539 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10541 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10542 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10543 (-ARG1 + ARG0) reduces to -ARG1. */
10544 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10545 return negate_expr (fold_convert_loc (loc, type, arg1));
10547 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10548 __complex__ ( x, -y ). This is not the same for SNaNs or if
10549 signed zeros are involved. */
10550 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10551 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10552 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10554 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10555 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10556 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10557 bool arg0rz = false, arg0iz = false;
10558 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10559 || (arg0i && (arg0iz = real_zerop (arg0i))))
10561 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10562 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10563 if (arg0rz && arg1i && real_zerop (arg1i))
10565 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10566 arg1r ? arg1r
10567 : build1 (REALPART_EXPR, rtype, arg1));
10568 tree ip = arg0i ? arg0i
10569 : build1 (IMAGPART_EXPR, rtype, arg0);
10570 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10572 else if (arg0iz && arg1r && real_zerop (arg1r))
10574 tree rp = arg0r ? arg0r
10575 : build1 (REALPART_EXPR, rtype, arg0);
10576 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10577 arg1i ? arg1i
10578 : build1 (IMAGPART_EXPR, rtype, arg1));
10579 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10584 /* Fold &x - &x. This can happen from &x.foo - &x.
10585 This is unsafe for certain floats even in non-IEEE formats.
10586 In IEEE, it is unsafe because it does wrong for NaNs.
10587 Also note that operand_equal_p is always false if an operand
10588 is volatile. */
10590 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10591 && operand_equal_p (arg0, arg1, 0))
10592 return build_zero_cst (type);
10594 /* A - B -> A + (-B) if B is easily negatable. */
10595 if (negate_expr_p (arg1)
10596 && ((FLOAT_TYPE_P (type)
10597 /* Avoid this transformation if B is a positive REAL_CST. */
10598 && (TREE_CODE (arg1) != REAL_CST
10599 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10600 || INTEGRAL_TYPE_P (type)))
10601 return fold_build2_loc (loc, PLUS_EXPR, type,
10602 fold_convert_loc (loc, type, arg0),
10603 fold_convert_loc (loc, type,
10604 negate_expr (arg1)));
10606 /* Try folding difference of addresses. */
10608 HOST_WIDE_INT diff;
10610 if ((TREE_CODE (arg0) == ADDR_EXPR
10611 || TREE_CODE (arg1) == ADDR_EXPR)
10612 && ptr_difference_const (arg0, arg1, &diff))
10613 return build_int_cst_type (type, diff);
10616 /* Fold &a[i] - &a[j] to i-j. */
10617 if (TREE_CODE (arg0) == ADDR_EXPR
10618 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10619 && TREE_CODE (arg1) == ADDR_EXPR
10620 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10622 tree tem = fold_addr_of_array_ref_difference (loc, type,
10623 TREE_OPERAND (arg0, 0),
10624 TREE_OPERAND (arg1, 0));
10625 if (tem)
10626 return tem;
10629 if (FLOAT_TYPE_P (type)
10630 && flag_unsafe_math_optimizations
10631 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10632 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10633 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10634 return tem;
10636 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10637 one. Make sure the type is not saturating and has the signedness of
10638 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10639 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10640 if ((TREE_CODE (arg0) == MULT_EXPR
10641 || TREE_CODE (arg1) == MULT_EXPR)
10642 && !TYPE_SATURATING (type)
10643 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10644 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10645 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10647 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10648 if (tem)
10649 return tem;
10652 goto associate;
10654 case MULT_EXPR:
10655 /* (-A) * (-B) -> A * B */
10656 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10657 return fold_build2_loc (loc, MULT_EXPR, type,
10658 fold_convert_loc (loc, type,
10659 TREE_OPERAND (arg0, 0)),
10660 fold_convert_loc (loc, type,
10661 negate_expr (arg1)));
10662 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10663 return fold_build2_loc (loc, MULT_EXPR, type,
10664 fold_convert_loc (loc, type,
10665 negate_expr (arg0)),
10666 fold_convert_loc (loc, type,
10667 TREE_OPERAND (arg1, 0)));
10669 if (! FLOAT_TYPE_P (type))
10671 if (integer_zerop (arg1))
10672 return omit_one_operand_loc (loc, type, arg1, arg0);
10673 if (integer_onep (arg1))
10674 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10675 /* Transform x * -1 into -x. Make sure to do the negation
10676 on the original operand with conversions not stripped
10677 because we can only strip non-sign-changing conversions. */
10678 if (integer_all_onesp (arg1))
10679 return fold_convert_loc (loc, type, negate_expr (op0));
10680 /* Transform x * -C into -x * C if x is easily negatable. */
10681 if (TREE_CODE (arg1) == INTEGER_CST
10682 && tree_int_cst_sgn (arg1) == -1
10683 && negate_expr_p (arg0)
10684 && (tem = negate_expr (arg1)) != arg1
10685 && !TREE_OVERFLOW (tem))
10686 return fold_build2_loc (loc, MULT_EXPR, type,
10687 fold_convert_loc (loc, type,
10688 negate_expr (arg0)),
10689 tem);
10691 /* (a * (1 << b)) is (a << b) */
10692 if (TREE_CODE (arg1) == LSHIFT_EXPR
10693 && integer_onep (TREE_OPERAND (arg1, 0)))
10694 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10695 TREE_OPERAND (arg1, 1));
10696 if (TREE_CODE (arg0) == LSHIFT_EXPR
10697 && integer_onep (TREE_OPERAND (arg0, 0)))
10698 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10699 TREE_OPERAND (arg0, 1));
10701 /* (A + A) * C -> A * 2 * C */
10702 if (TREE_CODE (arg0) == PLUS_EXPR
10703 && TREE_CODE (arg1) == INTEGER_CST
10704 && operand_equal_p (TREE_OPERAND (arg0, 0),
10705 TREE_OPERAND (arg0, 1), 0))
10706 return fold_build2_loc (loc, MULT_EXPR, type,
10707 omit_one_operand_loc (loc, type,
10708 TREE_OPERAND (arg0, 0),
10709 TREE_OPERAND (arg0, 1)),
10710 fold_build2_loc (loc, MULT_EXPR, type,
10711 build_int_cst (type, 2) , arg1));
10713 strict_overflow_p = false;
10714 if (TREE_CODE (arg1) == INTEGER_CST
10715 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10716 &strict_overflow_p)))
10718 if (strict_overflow_p)
10719 fold_overflow_warning (("assuming signed overflow does not "
10720 "occur when simplifying "
10721 "multiplication"),
10722 WARN_STRICT_OVERFLOW_MISC);
10723 return fold_convert_loc (loc, type, tem);
10726 /* Optimize z * conj(z) for integer complex numbers. */
10727 if (TREE_CODE (arg0) == CONJ_EXPR
10728 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10729 return fold_mult_zconjz (loc, type, arg1);
10730 if (TREE_CODE (arg1) == CONJ_EXPR
10731 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10732 return fold_mult_zconjz (loc, type, arg0);
10734 else
10736 /* Maybe fold x * 0 to 0. The expressions aren't the same
10737 when x is NaN, since x * 0 is also NaN. Nor are they the
10738 same in modes with signed zeros, since multiplying a
10739 negative value by 0 gives -0, not +0. */
10740 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10741 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10742 && real_zerop (arg1))
10743 return omit_one_operand_loc (loc, type, arg1, arg0);
10744 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10745 Likewise for complex arithmetic with signed zeros. */
10746 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10747 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10748 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10749 && real_onep (arg1))
10750 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10752 /* Transform x * -1.0 into -x. */
10753 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10754 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10755 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10756 && real_minus_onep (arg1))
10757 return fold_convert_loc (loc, type, negate_expr (arg0));
10759 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10760 the result for floating point types due to rounding so it is applied
10761 only if -fassociative-math was specify. */
10762 if (flag_associative_math
10763 && TREE_CODE (arg0) == RDIV_EXPR
10764 && TREE_CODE (arg1) == REAL_CST
10765 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10767 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10768 arg1);
10769 if (tem)
10770 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10771 TREE_OPERAND (arg0, 1));
10774 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10775 if (operand_equal_p (arg0, arg1, 0))
10777 tree tem = fold_strip_sign_ops (arg0);
10778 if (tem != NULL_TREE)
10780 tem = fold_convert_loc (loc, type, tem);
10781 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10785 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10786 This is not the same for NaNs or if signed zeros are
10787 involved. */
10788 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10789 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10790 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10791 && TREE_CODE (arg1) == COMPLEX_CST
10792 && real_zerop (TREE_REALPART (arg1)))
10794 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10795 if (real_onep (TREE_IMAGPART (arg1)))
10796 return
10797 fold_build2_loc (loc, COMPLEX_EXPR, type,
10798 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10799 rtype, arg0)),
10800 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10801 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10802 return
10803 fold_build2_loc (loc, COMPLEX_EXPR, type,
10804 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10805 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10806 rtype, arg0)));
10809 /* Optimize z * conj(z) for floating point complex numbers.
10810 Guarded by flag_unsafe_math_optimizations as non-finite
10811 imaginary components don't produce scalar results. */
10812 if (flag_unsafe_math_optimizations
10813 && TREE_CODE (arg0) == CONJ_EXPR
10814 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10815 return fold_mult_zconjz (loc, type, arg1);
10816 if (flag_unsafe_math_optimizations
10817 && TREE_CODE (arg1) == CONJ_EXPR
10818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10819 return fold_mult_zconjz (loc, type, arg0);
10821 if (flag_unsafe_math_optimizations)
10823 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10824 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10826 /* Optimizations of root(...)*root(...). */
10827 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10829 tree rootfn, arg;
10830 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10831 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10833 /* Optimize sqrt(x)*sqrt(x) as x. */
10834 if (BUILTIN_SQRT_P (fcode0)
10835 && operand_equal_p (arg00, arg10, 0)
10836 && ! HONOR_SNANS (TYPE_MODE (type)))
10837 return arg00;
10839 /* Optimize root(x)*root(y) as root(x*y). */
10840 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10841 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10842 return build_call_expr_loc (loc, rootfn, 1, arg);
10845 /* Optimize expN(x)*expN(y) as expN(x+y). */
10846 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10848 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10849 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10850 CALL_EXPR_ARG (arg0, 0),
10851 CALL_EXPR_ARG (arg1, 0));
10852 return build_call_expr_loc (loc, expfn, 1, arg);
10855 /* Optimizations of pow(...)*pow(...). */
10856 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10857 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10858 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10860 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10861 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10862 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10863 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10865 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10866 if (operand_equal_p (arg01, arg11, 0))
10868 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10869 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10870 arg00, arg10);
10871 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10874 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10875 if (operand_equal_p (arg00, arg10, 0))
10877 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10878 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10879 arg01, arg11);
10880 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10884 /* Optimize tan(x)*cos(x) as sin(x). */
10885 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10886 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10887 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10888 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10889 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10890 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10891 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10892 CALL_EXPR_ARG (arg1, 0), 0))
10894 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10896 if (sinfn != NULL_TREE)
10897 return build_call_expr_loc (loc, sinfn, 1,
10898 CALL_EXPR_ARG (arg0, 0));
10901 /* Optimize x*pow(x,c) as pow(x,c+1). */
10902 if (fcode1 == BUILT_IN_POW
10903 || fcode1 == BUILT_IN_POWF
10904 || fcode1 == BUILT_IN_POWL)
10906 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10907 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10908 if (TREE_CODE (arg11) == REAL_CST
10909 && !TREE_OVERFLOW (arg11)
10910 && operand_equal_p (arg0, arg10, 0))
10912 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10913 REAL_VALUE_TYPE c;
10914 tree arg;
10916 c = TREE_REAL_CST (arg11);
10917 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10918 arg = build_real (type, c);
10919 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10923 /* Optimize pow(x,c)*x as pow(x,c+1). */
10924 if (fcode0 == BUILT_IN_POW
10925 || fcode0 == BUILT_IN_POWF
10926 || fcode0 == BUILT_IN_POWL)
10928 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10929 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10930 if (TREE_CODE (arg01) == REAL_CST
10931 && !TREE_OVERFLOW (arg01)
10932 && operand_equal_p (arg1, arg00, 0))
10934 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10935 REAL_VALUE_TYPE c;
10936 tree arg;
10938 c = TREE_REAL_CST (arg01);
10939 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10940 arg = build_real (type, c);
10941 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10945 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10946 if (!in_gimple_form
10947 && optimize
10948 && operand_equal_p (arg0, arg1, 0))
10950 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10952 if (powfn)
10954 tree arg = build_real (type, dconst2);
10955 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10960 goto associate;
10962 case BIT_IOR_EXPR:
10963 bit_ior:
10964 if (integer_all_onesp (arg1))
10965 return omit_one_operand_loc (loc, type, arg1, arg0);
10966 if (integer_zerop (arg1))
10967 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10968 if (operand_equal_p (arg0, arg1, 0))
10969 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10971 /* ~X | X is -1. */
10972 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10973 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10975 t1 = build_zero_cst (type);
10976 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10977 return omit_one_operand_loc (loc, type, t1, arg1);
10980 /* X | ~X is -1. */
10981 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10984 t1 = build_zero_cst (type);
10985 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10986 return omit_one_operand_loc (loc, type, t1, arg0);
10989 /* Canonicalize (X & C1) | C2. */
10990 if (TREE_CODE (arg0) == BIT_AND_EXPR
10991 && TREE_CODE (arg1) == INTEGER_CST
10992 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10994 double_int c1, c2, c3, msk;
10995 int width = TYPE_PRECISION (type), w;
10996 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
10997 c2 = tree_to_double_int (arg1);
10999 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11000 if ((c1 & c2) == c1)
11001 return omit_one_operand_loc (loc, type, arg1,
11002 TREE_OPERAND (arg0, 0));
11004 msk = double_int::mask (width);
11006 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11007 if (msk.and_not (c1 | c2).is_zero ())
11008 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11009 TREE_OPERAND (arg0, 0), arg1);
11011 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11012 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11013 mode which allows further optimizations. */
11014 c1 &= msk;
11015 c2 &= msk;
11016 c3 = c1.and_not (c2);
11017 for (w = BITS_PER_UNIT;
11018 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11019 w <<= 1)
11021 unsigned HOST_WIDE_INT mask
11022 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11023 if (((c1.low | c2.low) & mask) == mask
11024 && (c1.low & ~mask) == 0 && c1.high == 0)
11026 c3 = double_int::from_uhwi (mask);
11027 break;
11030 if (c3 != c1)
11031 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11032 fold_build2_loc (loc, BIT_AND_EXPR, type,
11033 TREE_OPERAND (arg0, 0),
11034 double_int_to_tree (type,
11035 c3)),
11036 arg1);
11039 /* (X & Y) | Y is (X, Y). */
11040 if (TREE_CODE (arg0) == BIT_AND_EXPR
11041 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11042 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11043 /* (X & Y) | X is (Y, X). */
11044 if (TREE_CODE (arg0) == BIT_AND_EXPR
11045 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11046 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11047 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11048 /* X | (X & Y) is (Y, X). */
11049 if (TREE_CODE (arg1) == BIT_AND_EXPR
11050 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11051 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11052 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11053 /* X | (Y & X) is (Y, X). */
11054 if (TREE_CODE (arg1) == BIT_AND_EXPR
11055 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11056 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11057 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11059 /* (X & ~Y) | (~X & Y) is X ^ Y */
11060 if (TREE_CODE (arg0) == BIT_AND_EXPR
11061 && TREE_CODE (arg1) == BIT_AND_EXPR)
11063 tree a0, a1, l0, l1, n0, n1;
11065 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11066 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11068 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11069 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11071 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11072 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11074 if ((operand_equal_p (n0, a0, 0)
11075 && operand_equal_p (n1, a1, 0))
11076 || (operand_equal_p (n0, a1, 0)
11077 && operand_equal_p (n1, a0, 0)))
11078 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11081 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11082 if (t1 != NULL_TREE)
11083 return t1;
11085 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11087 This results in more efficient code for machines without a NAND
11088 instruction. Combine will canonicalize to the first form
11089 which will allow use of NAND instructions provided by the
11090 backend if they exist. */
11091 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11092 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11094 return
11095 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11096 build2 (BIT_AND_EXPR, type,
11097 fold_convert_loc (loc, type,
11098 TREE_OPERAND (arg0, 0)),
11099 fold_convert_loc (loc, type,
11100 TREE_OPERAND (arg1, 0))));
11103 /* See if this can be simplified into a rotate first. If that
11104 is unsuccessful continue in the association code. */
11105 goto bit_rotate;
11107 case BIT_XOR_EXPR:
11108 if (integer_zerop (arg1))
11109 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11110 if (integer_all_onesp (arg1))
11111 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11112 if (operand_equal_p (arg0, arg1, 0))
11113 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11115 /* ~X ^ X is -1. */
11116 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11117 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11119 t1 = build_zero_cst (type);
11120 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11121 return omit_one_operand_loc (loc, type, t1, arg1);
11124 /* X ^ ~X is -1. */
11125 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11126 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11128 t1 = build_zero_cst (type);
11129 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11130 return omit_one_operand_loc (loc, type, t1, arg0);
11133 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11134 with a constant, and the two constants have no bits in common,
11135 we should treat this as a BIT_IOR_EXPR since this may produce more
11136 simplifications. */
11137 if (TREE_CODE (arg0) == BIT_AND_EXPR
11138 && TREE_CODE (arg1) == BIT_AND_EXPR
11139 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11140 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11141 && integer_zerop (const_binop (BIT_AND_EXPR,
11142 TREE_OPERAND (arg0, 1),
11143 TREE_OPERAND (arg1, 1))))
11145 code = BIT_IOR_EXPR;
11146 goto bit_ior;
11149 /* (X | Y) ^ X -> Y & ~ X*/
11150 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11151 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11153 tree t2 = TREE_OPERAND (arg0, 1);
11154 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11155 arg1);
11156 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11157 fold_convert_loc (loc, type, t2),
11158 fold_convert_loc (loc, type, t1));
11159 return t1;
11162 /* (Y | X) ^ X -> Y & ~ X*/
11163 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11164 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11166 tree t2 = TREE_OPERAND (arg0, 0);
11167 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11168 arg1);
11169 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11170 fold_convert_loc (loc, type, t2),
11171 fold_convert_loc (loc, type, t1));
11172 return t1;
11175 /* X ^ (X | Y) -> Y & ~ X*/
11176 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11177 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11179 tree t2 = TREE_OPERAND (arg1, 1);
11180 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11181 arg0);
11182 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11183 fold_convert_loc (loc, type, t2),
11184 fold_convert_loc (loc, type, t1));
11185 return t1;
11188 /* X ^ (Y | X) -> Y & ~ X*/
11189 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11190 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11192 tree t2 = TREE_OPERAND (arg1, 0);
11193 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11194 arg0);
11195 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11196 fold_convert_loc (loc, type, t2),
11197 fold_convert_loc (loc, type, t1));
11198 return t1;
11201 /* Convert ~X ^ ~Y to X ^ Y. */
11202 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11203 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11204 return fold_build2_loc (loc, code, type,
11205 fold_convert_loc (loc, type,
11206 TREE_OPERAND (arg0, 0)),
11207 fold_convert_loc (loc, type,
11208 TREE_OPERAND (arg1, 0)));
11210 /* Convert ~X ^ C to X ^ ~C. */
11211 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11212 && TREE_CODE (arg1) == INTEGER_CST)
11213 return fold_build2_loc (loc, code, type,
11214 fold_convert_loc (loc, type,
11215 TREE_OPERAND (arg0, 0)),
11216 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11218 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11219 if (TREE_CODE (arg0) == BIT_AND_EXPR
11220 && integer_onep (TREE_OPERAND (arg0, 1))
11221 && integer_onep (arg1))
11222 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11223 build_zero_cst (TREE_TYPE (arg0)));
11225 /* Fold (X & Y) ^ Y as ~X & Y. */
11226 if (TREE_CODE (arg0) == BIT_AND_EXPR
11227 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11229 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11230 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11231 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11232 fold_convert_loc (loc, type, arg1));
11234 /* Fold (X & Y) ^ X as ~Y & X. */
11235 if (TREE_CODE (arg0) == BIT_AND_EXPR
11236 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11237 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11239 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11240 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11241 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11242 fold_convert_loc (loc, type, arg1));
11244 /* Fold X ^ (X & Y) as X & ~Y. */
11245 if (TREE_CODE (arg1) == BIT_AND_EXPR
11246 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11248 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11249 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11250 fold_convert_loc (loc, type, arg0),
11251 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11253 /* Fold X ^ (Y & X) as ~Y & X. */
11254 if (TREE_CODE (arg1) == BIT_AND_EXPR
11255 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11256 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11258 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11259 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11260 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11261 fold_convert_loc (loc, type, arg0));
11264 /* See if this can be simplified into a rotate first. If that
11265 is unsuccessful continue in the association code. */
11266 goto bit_rotate;
11268 case BIT_AND_EXPR:
11269 if (integer_all_onesp (arg1))
11270 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11271 if (integer_zerop (arg1))
11272 return omit_one_operand_loc (loc, type, arg1, arg0);
11273 if (operand_equal_p (arg0, arg1, 0))
11274 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11276 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11277 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11278 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11279 || (TREE_CODE (arg0) == EQ_EXPR
11280 && integer_zerop (TREE_OPERAND (arg0, 1))))
11281 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11282 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11284 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11285 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11286 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11287 || (TREE_CODE (arg1) == EQ_EXPR
11288 && integer_zerop (TREE_OPERAND (arg1, 1))))
11289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11290 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11292 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11293 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11294 && TREE_CODE (arg1) == INTEGER_CST
11295 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11297 tree tmp1 = fold_convert_loc (loc, type, arg1);
11298 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11299 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11300 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11301 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11302 return
11303 fold_convert_loc (loc, type,
11304 fold_build2_loc (loc, BIT_IOR_EXPR,
11305 type, tmp2, tmp3));
11308 /* (X | Y) & Y is (X, Y). */
11309 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11310 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11311 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11312 /* (X | Y) & X is (Y, X). */
11313 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11314 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11315 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11316 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11317 /* X & (X | Y) is (Y, X). */
11318 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11319 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11320 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11321 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11322 /* X & (Y | X) is (Y, X). */
11323 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11324 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11325 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11326 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11328 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11329 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11330 && integer_onep (TREE_OPERAND (arg0, 1))
11331 && integer_onep (arg1))
11333 tree tem2;
11334 tem = TREE_OPERAND (arg0, 0);
11335 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11336 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11337 tem, tem2);
11338 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11339 build_zero_cst (TREE_TYPE (tem)));
11341 /* Fold ~X & 1 as (X & 1) == 0. */
11342 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11343 && integer_onep (arg1))
11345 tree tem2;
11346 tem = TREE_OPERAND (arg0, 0);
11347 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11348 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11349 tem, tem2);
11350 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11351 build_zero_cst (TREE_TYPE (tem)));
11353 /* Fold !X & 1 as X == 0. */
11354 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11355 && integer_onep (arg1))
11357 tem = TREE_OPERAND (arg0, 0);
11358 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11359 build_zero_cst (TREE_TYPE (tem)));
11362 /* Fold (X ^ Y) & Y as ~X & Y. */
11363 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11364 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11366 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11367 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11368 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11369 fold_convert_loc (loc, type, arg1));
11371 /* Fold (X ^ Y) & X as ~Y & X. */
11372 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11373 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11374 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11376 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11377 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11378 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11379 fold_convert_loc (loc, type, arg1));
11381 /* Fold X & (X ^ Y) as X & ~Y. */
11382 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11383 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11385 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11386 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11387 fold_convert_loc (loc, type, arg0),
11388 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11390 /* Fold X & (Y ^ X) as ~Y & X. */
11391 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11392 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11393 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11395 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11396 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11397 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11398 fold_convert_loc (loc, type, arg0));
11401 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11402 multiple of 1 << CST. */
11403 if (TREE_CODE (arg1) == INTEGER_CST)
11405 double_int cst1 = tree_to_double_int (arg1);
11406 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11407 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11408 if ((cst1 & ncst1) == ncst1
11409 && multiple_of_p (type, arg0,
11410 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11411 return fold_convert_loc (loc, type, arg0);
11414 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11415 bits from CST2. */
11416 if (TREE_CODE (arg1) == INTEGER_CST
11417 && TREE_CODE (arg0) == MULT_EXPR
11418 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11420 int arg1tz
11421 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11422 if (arg1tz > 0)
11424 double_int arg1mask, masked;
11425 arg1mask = ~double_int::mask (arg1tz);
11426 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11427 TYPE_UNSIGNED (type));
11428 masked = arg1mask & tree_to_double_int (arg1);
11429 if (masked.is_zero ())
11430 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11431 arg0, arg1);
11432 else if (masked != tree_to_double_int (arg1))
11433 return fold_build2_loc (loc, code, type, op0,
11434 double_int_to_tree (type, masked));
11438 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11439 ((A & N) + B) & M -> (A + B) & M
11440 Similarly if (N & M) == 0,
11441 ((A | N) + B) & M -> (A + B) & M
11442 and for - instead of + (or unary - instead of +)
11443 and/or ^ instead of |.
11444 If B is constant and (B & M) == 0, fold into A & M. */
11445 if (host_integerp (arg1, 1))
11447 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11448 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11449 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11450 && (TREE_CODE (arg0) == PLUS_EXPR
11451 || TREE_CODE (arg0) == MINUS_EXPR
11452 || TREE_CODE (arg0) == NEGATE_EXPR)
11453 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11454 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11456 tree pmop[2];
11457 int which = 0;
11458 unsigned HOST_WIDE_INT cst0;
11460 /* Now we know that arg0 is (C + D) or (C - D) or
11461 -C and arg1 (M) is == (1LL << cst) - 1.
11462 Store C into PMOP[0] and D into PMOP[1]. */
11463 pmop[0] = TREE_OPERAND (arg0, 0);
11464 pmop[1] = NULL;
11465 if (TREE_CODE (arg0) != NEGATE_EXPR)
11467 pmop[1] = TREE_OPERAND (arg0, 1);
11468 which = 1;
11471 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11472 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11473 & cst1) != cst1)
11474 which = -1;
11476 for (; which >= 0; which--)
11477 switch (TREE_CODE (pmop[which]))
11479 case BIT_AND_EXPR:
11480 case BIT_IOR_EXPR:
11481 case BIT_XOR_EXPR:
11482 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11483 != INTEGER_CST)
11484 break;
11485 /* tree_low_cst not used, because we don't care about
11486 the upper bits. */
11487 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11488 cst0 &= cst1;
11489 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11491 if (cst0 != cst1)
11492 break;
11494 else if (cst0 != 0)
11495 break;
11496 /* If C or D is of the form (A & N) where
11497 (N & M) == M, or of the form (A | N) or
11498 (A ^ N) where (N & M) == 0, replace it with A. */
11499 pmop[which] = TREE_OPERAND (pmop[which], 0);
11500 break;
11501 case INTEGER_CST:
11502 /* If C or D is a N where (N & M) == 0, it can be
11503 omitted (assumed 0). */
11504 if ((TREE_CODE (arg0) == PLUS_EXPR
11505 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11506 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11507 pmop[which] = NULL;
11508 break;
11509 default:
11510 break;
11513 /* Only build anything new if we optimized one or both arguments
11514 above. */
11515 if (pmop[0] != TREE_OPERAND (arg0, 0)
11516 || (TREE_CODE (arg0) != NEGATE_EXPR
11517 && pmop[1] != TREE_OPERAND (arg0, 1)))
11519 tree utype = TREE_TYPE (arg0);
11520 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11522 /* Perform the operations in a type that has defined
11523 overflow behavior. */
11524 utype = unsigned_type_for (TREE_TYPE (arg0));
11525 if (pmop[0] != NULL)
11526 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11527 if (pmop[1] != NULL)
11528 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11531 if (TREE_CODE (arg0) == NEGATE_EXPR)
11532 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11533 else if (TREE_CODE (arg0) == PLUS_EXPR)
11535 if (pmop[0] != NULL && pmop[1] != NULL)
11536 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11537 pmop[0], pmop[1]);
11538 else if (pmop[0] != NULL)
11539 tem = pmop[0];
11540 else if (pmop[1] != NULL)
11541 tem = pmop[1];
11542 else
11543 return build_int_cst (type, 0);
11545 else if (pmop[0] == NULL)
11546 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11547 else
11548 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11549 pmop[0], pmop[1]);
11550 /* TEM is now the new binary +, - or unary - replacement. */
11551 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11552 fold_convert_loc (loc, utype, arg1));
11553 return fold_convert_loc (loc, type, tem);
11558 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11559 if (t1 != NULL_TREE)
11560 return t1;
11561 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11562 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11563 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11565 unsigned int prec
11566 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11568 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11569 && (~TREE_INT_CST_LOW (arg1)
11570 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11571 return
11572 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11575 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11577 This results in more efficient code for machines without a NOR
11578 instruction. Combine will canonicalize to the first form
11579 which will allow use of NOR instructions provided by the
11580 backend if they exist. */
11581 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11582 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11584 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11585 build2 (BIT_IOR_EXPR, type,
11586 fold_convert_loc (loc, type,
11587 TREE_OPERAND (arg0, 0)),
11588 fold_convert_loc (loc, type,
11589 TREE_OPERAND (arg1, 0))));
11592 /* If arg0 is derived from the address of an object or function, we may
11593 be able to fold this expression using the object or function's
11594 alignment. */
11595 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11597 unsigned HOST_WIDE_INT modulus, residue;
11598 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11600 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11601 integer_onep (arg1));
11603 /* This works because modulus is a power of 2. If this weren't the
11604 case, we'd have to replace it by its greatest power-of-2
11605 divisor: modulus & -modulus. */
11606 if (low < modulus)
11607 return build_int_cst (type, residue & low);
11610 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11611 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11612 if the new mask might be further optimized. */
11613 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11614 || TREE_CODE (arg0) == RSHIFT_EXPR)
11615 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11616 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11617 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11618 < TYPE_PRECISION (TREE_TYPE (arg0))
11619 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11620 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11622 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11623 unsigned HOST_WIDE_INT mask
11624 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11625 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11626 tree shift_type = TREE_TYPE (arg0);
11628 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11629 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11630 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11631 && TYPE_PRECISION (TREE_TYPE (arg0))
11632 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11634 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11635 tree arg00 = TREE_OPERAND (arg0, 0);
11636 /* See if more bits can be proven as zero because of
11637 zero extension. */
11638 if (TREE_CODE (arg00) == NOP_EXPR
11639 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11641 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11642 if (TYPE_PRECISION (inner_type)
11643 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11644 && TYPE_PRECISION (inner_type) < prec)
11646 prec = TYPE_PRECISION (inner_type);
11647 /* See if we can shorten the right shift. */
11648 if (shiftc < prec)
11649 shift_type = inner_type;
11652 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11653 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11654 zerobits <<= prec - shiftc;
11655 /* For arithmetic shift if sign bit could be set, zerobits
11656 can contain actually sign bits, so no transformation is
11657 possible, unless MASK masks them all away. In that
11658 case the shift needs to be converted into logical shift. */
11659 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11660 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11662 if ((mask & zerobits) == 0)
11663 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11664 else
11665 zerobits = 0;
11669 /* ((X << 16) & 0xff00) is (X, 0). */
11670 if ((mask & zerobits) == mask)
11671 return omit_one_operand_loc (loc, type,
11672 build_int_cst (type, 0), arg0);
11674 newmask = mask | zerobits;
11675 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11677 unsigned int prec;
11679 /* Only do the transformation if NEWMASK is some integer
11680 mode's mask. */
11681 for (prec = BITS_PER_UNIT;
11682 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11683 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11684 break;
11685 if (prec < HOST_BITS_PER_WIDE_INT
11686 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11688 tree newmaskt;
11690 if (shift_type != TREE_TYPE (arg0))
11692 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11693 fold_convert_loc (loc, shift_type,
11694 TREE_OPERAND (arg0, 0)),
11695 TREE_OPERAND (arg0, 1));
11696 tem = fold_convert_loc (loc, type, tem);
11698 else
11699 tem = op0;
11700 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11701 if (!tree_int_cst_equal (newmaskt, arg1))
11702 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11707 goto associate;
11709 case RDIV_EXPR:
11710 /* Don't touch a floating-point divide by zero unless the mode
11711 of the constant can represent infinity. */
11712 if (TREE_CODE (arg1) == REAL_CST
11713 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11714 && real_zerop (arg1))
11715 return NULL_TREE;
11717 /* Optimize A / A to 1.0 if we don't care about
11718 NaNs or Infinities. Skip the transformation
11719 for non-real operands. */
11720 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11721 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11722 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11723 && operand_equal_p (arg0, arg1, 0))
11725 tree r = build_real (TREE_TYPE (arg0), dconst1);
11727 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11730 /* The complex version of the above A / A optimization. */
11731 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11732 && operand_equal_p (arg0, arg1, 0))
11734 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11735 if (! HONOR_NANS (TYPE_MODE (elem_type))
11736 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11738 tree r = build_real (elem_type, dconst1);
11739 /* omit_two_operands will call fold_convert for us. */
11740 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11744 /* (-A) / (-B) -> A / B */
11745 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11746 return fold_build2_loc (loc, RDIV_EXPR, type,
11747 TREE_OPERAND (arg0, 0),
11748 negate_expr (arg1));
11749 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11750 return fold_build2_loc (loc, RDIV_EXPR, type,
11751 negate_expr (arg0),
11752 TREE_OPERAND (arg1, 0));
11754 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11755 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11756 && real_onep (arg1))
11757 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11759 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11760 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11761 && real_minus_onep (arg1))
11762 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11763 negate_expr (arg0)));
11765 /* If ARG1 is a constant, we can convert this to a multiply by the
11766 reciprocal. This does not have the same rounding properties,
11767 so only do this if -freciprocal-math. We can actually
11768 always safely do it if ARG1 is a power of two, but it's hard to
11769 tell if it is or not in a portable manner. */
11770 if (optimize
11771 && (TREE_CODE (arg1) == REAL_CST
11772 || (TREE_CODE (arg1) == COMPLEX_CST
11773 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11774 || (TREE_CODE (arg1) == VECTOR_CST
11775 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11777 if (flag_reciprocal_math
11778 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11779 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11780 /* Find the reciprocal if optimizing and the result is exact.
11781 TODO: Complex reciprocal not implemented. */
11782 if (TREE_CODE (arg1) != COMPLEX_CST)
11784 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11786 if (inverse)
11787 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11790 /* Convert A/B/C to A/(B*C). */
11791 if (flag_reciprocal_math
11792 && TREE_CODE (arg0) == RDIV_EXPR)
11793 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11794 fold_build2_loc (loc, MULT_EXPR, type,
11795 TREE_OPERAND (arg0, 1), arg1));
11797 /* Convert A/(B/C) to (A/B)*C. */
11798 if (flag_reciprocal_math
11799 && TREE_CODE (arg1) == RDIV_EXPR)
11800 return fold_build2_loc (loc, MULT_EXPR, type,
11801 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11802 TREE_OPERAND (arg1, 0)),
11803 TREE_OPERAND (arg1, 1));
11805 /* Convert C1/(X*C2) into (C1/C2)/X. */
11806 if (flag_reciprocal_math
11807 && TREE_CODE (arg1) == MULT_EXPR
11808 && TREE_CODE (arg0) == REAL_CST
11809 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11811 tree tem = const_binop (RDIV_EXPR, arg0,
11812 TREE_OPERAND (arg1, 1));
11813 if (tem)
11814 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11815 TREE_OPERAND (arg1, 0));
11818 if (flag_unsafe_math_optimizations)
11820 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11821 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11823 /* Optimize sin(x)/cos(x) as tan(x). */
11824 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11825 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11826 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11827 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11828 CALL_EXPR_ARG (arg1, 0), 0))
11830 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11832 if (tanfn != NULL_TREE)
11833 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11836 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11837 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11838 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11839 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11840 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11841 CALL_EXPR_ARG (arg1, 0), 0))
11843 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11845 if (tanfn != NULL_TREE)
11847 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11848 CALL_EXPR_ARG (arg0, 0));
11849 return fold_build2_loc (loc, RDIV_EXPR, type,
11850 build_real (type, dconst1), tmp);
11854 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11855 NaNs or Infinities. */
11856 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11857 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11858 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11860 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11861 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11863 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11864 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11865 && operand_equal_p (arg00, arg01, 0))
11867 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11869 if (cosfn != NULL_TREE)
11870 return build_call_expr_loc (loc, cosfn, 1, arg00);
11874 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11875 NaNs or Infinities. */
11876 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11877 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11878 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11880 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11881 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11883 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11884 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11885 && operand_equal_p (arg00, arg01, 0))
11887 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11889 if (cosfn != NULL_TREE)
11891 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11892 return fold_build2_loc (loc, RDIV_EXPR, type,
11893 build_real (type, dconst1),
11894 tmp);
11899 /* Optimize pow(x,c)/x as pow(x,c-1). */
11900 if (fcode0 == BUILT_IN_POW
11901 || fcode0 == BUILT_IN_POWF
11902 || fcode0 == BUILT_IN_POWL)
11904 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11905 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11906 if (TREE_CODE (arg01) == REAL_CST
11907 && !TREE_OVERFLOW (arg01)
11908 && operand_equal_p (arg1, arg00, 0))
11910 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11911 REAL_VALUE_TYPE c;
11912 tree arg;
11914 c = TREE_REAL_CST (arg01);
11915 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11916 arg = build_real (type, c);
11917 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11921 /* Optimize a/root(b/c) into a*root(c/b). */
11922 if (BUILTIN_ROOT_P (fcode1))
11924 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11926 if (TREE_CODE (rootarg) == RDIV_EXPR)
11928 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11929 tree b = TREE_OPERAND (rootarg, 0);
11930 tree c = TREE_OPERAND (rootarg, 1);
11932 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11934 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11935 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11939 /* Optimize x/expN(y) into x*expN(-y). */
11940 if (BUILTIN_EXPONENT_P (fcode1))
11942 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11943 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11944 arg1 = build_call_expr_loc (loc,
11945 expfn, 1,
11946 fold_convert_loc (loc, type, arg));
11947 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11950 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11951 if (fcode1 == BUILT_IN_POW
11952 || fcode1 == BUILT_IN_POWF
11953 || fcode1 == BUILT_IN_POWL)
11955 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11956 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11957 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11958 tree neg11 = fold_convert_loc (loc, type,
11959 negate_expr (arg11));
11960 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11961 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11964 return NULL_TREE;
11966 case TRUNC_DIV_EXPR:
11967 /* Optimize (X & (-A)) / A where A is a power of 2,
11968 to X >> log2(A) */
11969 if (TREE_CODE (arg0) == BIT_AND_EXPR
11970 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11971 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11973 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11974 arg1, TREE_OPERAND (arg0, 1));
11975 if (sum && integer_zerop (sum)) {
11976 unsigned long pow2;
11978 if (TREE_INT_CST_LOW (arg1))
11979 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11980 else
11981 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11982 + HOST_BITS_PER_WIDE_INT;
11984 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11985 TREE_OPERAND (arg0, 0),
11986 build_int_cst (integer_type_node, pow2));
11990 /* Fall through */
11992 case FLOOR_DIV_EXPR:
11993 /* Simplify A / (B << N) where A and B are positive and B is
11994 a power of 2, to A >> (N + log2(B)). */
11995 strict_overflow_p = false;
11996 if (TREE_CODE (arg1) == LSHIFT_EXPR
11997 && (TYPE_UNSIGNED (type)
11998 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12000 tree sval = TREE_OPERAND (arg1, 0);
12001 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12003 tree sh_cnt = TREE_OPERAND (arg1, 1);
12004 unsigned long pow2;
12006 if (TREE_INT_CST_LOW (sval))
12007 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12008 else
12009 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12010 + HOST_BITS_PER_WIDE_INT;
12012 if (strict_overflow_p)
12013 fold_overflow_warning (("assuming signed overflow does not "
12014 "occur when simplifying A / (B << N)"),
12015 WARN_STRICT_OVERFLOW_MISC);
12017 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12018 sh_cnt,
12019 build_int_cst (TREE_TYPE (sh_cnt),
12020 pow2));
12021 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12022 fold_convert_loc (loc, type, arg0), sh_cnt);
12026 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12027 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12028 if (INTEGRAL_TYPE_P (type)
12029 && TYPE_UNSIGNED (type)
12030 && code == FLOOR_DIV_EXPR)
12031 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12033 /* Fall through */
12035 case ROUND_DIV_EXPR:
12036 case CEIL_DIV_EXPR:
12037 case EXACT_DIV_EXPR:
12038 if (integer_onep (arg1))
12039 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12040 if (integer_zerop (arg1))
12041 return NULL_TREE;
12042 /* X / -1 is -X. */
12043 if (!TYPE_UNSIGNED (type)
12044 && TREE_CODE (arg1) == INTEGER_CST
12045 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12046 && TREE_INT_CST_HIGH (arg1) == -1)
12047 return fold_convert_loc (loc, type, negate_expr (arg0));
12049 /* Convert -A / -B to A / B when the type is signed and overflow is
12050 undefined. */
12051 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12052 && TREE_CODE (arg0) == NEGATE_EXPR
12053 && negate_expr_p (arg1))
12055 if (INTEGRAL_TYPE_P (type))
12056 fold_overflow_warning (("assuming signed overflow does not occur "
12057 "when distributing negation across "
12058 "division"),
12059 WARN_STRICT_OVERFLOW_MISC);
12060 return fold_build2_loc (loc, code, type,
12061 fold_convert_loc (loc, type,
12062 TREE_OPERAND (arg0, 0)),
12063 fold_convert_loc (loc, type,
12064 negate_expr (arg1)));
12066 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12067 && TREE_CODE (arg1) == NEGATE_EXPR
12068 && negate_expr_p (arg0))
12070 if (INTEGRAL_TYPE_P (type))
12071 fold_overflow_warning (("assuming signed overflow does not occur "
12072 "when distributing negation across "
12073 "division"),
12074 WARN_STRICT_OVERFLOW_MISC);
12075 return fold_build2_loc (loc, code, type,
12076 fold_convert_loc (loc, type,
12077 negate_expr (arg0)),
12078 fold_convert_loc (loc, type,
12079 TREE_OPERAND (arg1, 0)));
12082 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12083 operation, EXACT_DIV_EXPR.
12085 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12086 At one time others generated faster code, it's not clear if they do
12087 after the last round to changes to the DIV code in expmed.c. */
12088 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12089 && multiple_of_p (type, arg0, arg1))
12090 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12092 strict_overflow_p = false;
12093 if (TREE_CODE (arg1) == INTEGER_CST
12094 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12095 &strict_overflow_p)))
12097 if (strict_overflow_p)
12098 fold_overflow_warning (("assuming signed overflow does not occur "
12099 "when simplifying division"),
12100 WARN_STRICT_OVERFLOW_MISC);
12101 return fold_convert_loc (loc, type, tem);
12104 return NULL_TREE;
12106 case CEIL_MOD_EXPR:
12107 case FLOOR_MOD_EXPR:
12108 case ROUND_MOD_EXPR:
12109 case TRUNC_MOD_EXPR:
12110 /* X % 1 is always zero, but be sure to preserve any side
12111 effects in X. */
12112 if (integer_onep (arg1))
12113 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12115 /* X % 0, return X % 0 unchanged so that we can get the
12116 proper warnings and errors. */
12117 if (integer_zerop (arg1))
12118 return NULL_TREE;
12120 /* 0 % X is always zero, but be sure to preserve any side
12121 effects in X. Place this after checking for X == 0. */
12122 if (integer_zerop (arg0))
12123 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12125 /* X % -1 is zero. */
12126 if (!TYPE_UNSIGNED (type)
12127 && TREE_CODE (arg1) == INTEGER_CST
12128 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12129 && TREE_INT_CST_HIGH (arg1) == -1)
12130 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12132 /* X % -C is the same as X % C. */
12133 if (code == TRUNC_MOD_EXPR
12134 && !TYPE_UNSIGNED (type)
12135 && TREE_CODE (arg1) == INTEGER_CST
12136 && !TREE_OVERFLOW (arg1)
12137 && TREE_INT_CST_HIGH (arg1) < 0
12138 && !TYPE_OVERFLOW_TRAPS (type)
12139 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12140 && !sign_bit_p (arg1, arg1))
12141 return fold_build2_loc (loc, code, type,
12142 fold_convert_loc (loc, type, arg0),
12143 fold_convert_loc (loc, type,
12144 negate_expr (arg1)));
12146 /* X % -Y is the same as X % Y. */
12147 if (code == TRUNC_MOD_EXPR
12148 && !TYPE_UNSIGNED (type)
12149 && TREE_CODE (arg1) == NEGATE_EXPR
12150 && !TYPE_OVERFLOW_TRAPS (type))
12151 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12152 fold_convert_loc (loc, type,
12153 TREE_OPERAND (arg1, 0)));
12155 strict_overflow_p = false;
12156 if (TREE_CODE (arg1) == INTEGER_CST
12157 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12158 &strict_overflow_p)))
12160 if (strict_overflow_p)
12161 fold_overflow_warning (("assuming signed overflow does not occur "
12162 "when simplifying modulus"),
12163 WARN_STRICT_OVERFLOW_MISC);
12164 return fold_convert_loc (loc, type, tem);
12167 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12168 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12169 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12170 && (TYPE_UNSIGNED (type)
12171 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12173 tree c = arg1;
12174 /* Also optimize A % (C << N) where C is a power of 2,
12175 to A & ((C << N) - 1). */
12176 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12177 c = TREE_OPERAND (arg1, 0);
12179 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12181 tree mask
12182 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12183 build_int_cst (TREE_TYPE (arg1), 1));
12184 if (strict_overflow_p)
12185 fold_overflow_warning (("assuming signed overflow does not "
12186 "occur when simplifying "
12187 "X % (power of two)"),
12188 WARN_STRICT_OVERFLOW_MISC);
12189 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12190 fold_convert_loc (loc, type, arg0),
12191 fold_convert_loc (loc, type, mask));
12195 return NULL_TREE;
12197 case LROTATE_EXPR:
12198 case RROTATE_EXPR:
12199 if (integer_all_onesp (arg0))
12200 return omit_one_operand_loc (loc, type, arg0, arg1);
12201 goto shift;
12203 case RSHIFT_EXPR:
12204 /* Optimize -1 >> x for arithmetic right shifts. */
12205 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12206 && tree_expr_nonnegative_p (arg1))
12207 return omit_one_operand_loc (loc, type, arg0, arg1);
12208 /* ... fall through ... */
12210 case LSHIFT_EXPR:
12211 shift:
12212 if (integer_zerop (arg1))
12213 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12214 if (integer_zerop (arg0))
12215 return omit_one_operand_loc (loc, type, arg0, arg1);
12217 /* Since negative shift count is not well-defined,
12218 don't try to compute it in the compiler. */
12219 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12220 return NULL_TREE;
12222 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12223 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12224 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12225 && host_integerp (TREE_OPERAND (arg0, 1), false)
12226 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12228 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12229 + TREE_INT_CST_LOW (arg1));
12231 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12232 being well defined. */
12233 if (low >= TYPE_PRECISION (type))
12235 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12236 low = low % TYPE_PRECISION (type);
12237 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12238 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12239 TREE_OPERAND (arg0, 0));
12240 else
12241 low = TYPE_PRECISION (type) - 1;
12244 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12245 build_int_cst (type, low));
12248 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12249 into x & ((unsigned)-1 >> c) for unsigned types. */
12250 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12251 || (TYPE_UNSIGNED (type)
12252 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12253 && host_integerp (arg1, false)
12254 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12255 && host_integerp (TREE_OPERAND (arg0, 1), false)
12256 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12258 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12259 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12260 tree lshift;
12261 tree arg00;
12263 if (low0 == low1)
12265 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12267 lshift = build_int_cst (type, -1);
12268 lshift = int_const_binop (code, lshift, arg1);
12270 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12274 /* Rewrite an LROTATE_EXPR by a constant into an
12275 RROTATE_EXPR by a new constant. */
12276 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12278 tree tem = build_int_cst (TREE_TYPE (arg1),
12279 TYPE_PRECISION (type));
12280 tem = const_binop (MINUS_EXPR, tem, arg1);
12281 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12284 /* If we have a rotate of a bit operation with the rotate count and
12285 the second operand of the bit operation both constant,
12286 permute the two operations. */
12287 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12288 && (TREE_CODE (arg0) == BIT_AND_EXPR
12289 || TREE_CODE (arg0) == BIT_IOR_EXPR
12290 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12291 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12292 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12293 fold_build2_loc (loc, code, type,
12294 TREE_OPERAND (arg0, 0), arg1),
12295 fold_build2_loc (loc, code, type,
12296 TREE_OPERAND (arg0, 1), arg1));
12298 /* Two consecutive rotates adding up to the precision of the
12299 type can be ignored. */
12300 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12301 && TREE_CODE (arg0) == RROTATE_EXPR
12302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12303 && TREE_INT_CST_HIGH (arg1) == 0
12304 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12305 && ((TREE_INT_CST_LOW (arg1)
12306 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12307 == (unsigned int) TYPE_PRECISION (type)))
12308 return TREE_OPERAND (arg0, 0);
12310 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12311 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12312 if the latter can be further optimized. */
12313 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12314 && TREE_CODE (arg0) == BIT_AND_EXPR
12315 && TREE_CODE (arg1) == INTEGER_CST
12316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12318 tree mask = fold_build2_loc (loc, code, type,
12319 fold_convert_loc (loc, type,
12320 TREE_OPERAND (arg0, 1)),
12321 arg1);
12322 tree shift = fold_build2_loc (loc, code, type,
12323 fold_convert_loc (loc, type,
12324 TREE_OPERAND (arg0, 0)),
12325 arg1);
12326 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12327 if (tem)
12328 return tem;
12331 return NULL_TREE;
12333 case MIN_EXPR:
12334 if (operand_equal_p (arg0, arg1, 0))
12335 return omit_one_operand_loc (loc, type, arg0, arg1);
12336 if (INTEGRAL_TYPE_P (type)
12337 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12338 return omit_one_operand_loc (loc, type, arg1, arg0);
12339 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12340 if (tem)
12341 return tem;
12342 goto associate;
12344 case MAX_EXPR:
12345 if (operand_equal_p (arg0, arg1, 0))
12346 return omit_one_operand_loc (loc, type, arg0, arg1);
12347 if (INTEGRAL_TYPE_P (type)
12348 && TYPE_MAX_VALUE (type)
12349 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12350 return omit_one_operand_loc (loc, type, arg1, arg0);
12351 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12352 if (tem)
12353 return tem;
12354 goto associate;
12356 case TRUTH_ANDIF_EXPR:
12357 /* Note that the operands of this must be ints
12358 and their values must be 0 or 1.
12359 ("true" is a fixed value perhaps depending on the language.) */
12360 /* If first arg is constant zero, return it. */
12361 if (integer_zerop (arg0))
12362 return fold_convert_loc (loc, type, arg0);
12363 case TRUTH_AND_EXPR:
12364 /* If either arg is constant true, drop it. */
12365 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12366 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12367 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12368 /* Preserve sequence points. */
12369 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12370 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12371 /* If second arg is constant zero, result is zero, but first arg
12372 must be evaluated. */
12373 if (integer_zerop (arg1))
12374 return omit_one_operand_loc (loc, type, arg1, arg0);
12375 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12376 case will be handled here. */
12377 if (integer_zerop (arg0))
12378 return omit_one_operand_loc (loc, type, arg0, arg1);
12380 /* !X && X is always false. */
12381 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12382 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12383 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12384 /* X && !X is always false. */
12385 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12386 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12387 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12389 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12390 means A >= Y && A != MAX, but in this case we know that
12391 A < X <= MAX. */
12393 if (!TREE_SIDE_EFFECTS (arg0)
12394 && !TREE_SIDE_EFFECTS (arg1))
12396 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12397 if (tem && !operand_equal_p (tem, arg0, 0))
12398 return fold_build2_loc (loc, code, type, tem, arg1);
12400 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12401 if (tem && !operand_equal_p (tem, arg1, 0))
12402 return fold_build2_loc (loc, code, type, arg0, tem);
12405 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12406 != NULL_TREE)
12407 return tem;
12409 return NULL_TREE;
12411 case TRUTH_ORIF_EXPR:
12412 /* Note that the operands of this must be ints
12413 and their values must be 0 or true.
12414 ("true" is a fixed value perhaps depending on the language.) */
12415 /* If first arg is constant true, return it. */
12416 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12417 return fold_convert_loc (loc, type, arg0);
12418 case TRUTH_OR_EXPR:
12419 /* If either arg is constant zero, drop it. */
12420 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12421 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12422 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12423 /* Preserve sequence points. */
12424 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12426 /* If second arg is constant true, result is true, but we must
12427 evaluate first arg. */
12428 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12429 return omit_one_operand_loc (loc, type, arg1, arg0);
12430 /* Likewise for first arg, but note this only occurs here for
12431 TRUTH_OR_EXPR. */
12432 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12433 return omit_one_operand_loc (loc, type, arg0, arg1);
12435 /* !X || X is always true. */
12436 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12437 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12438 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12439 /* X || !X is always true. */
12440 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12441 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12442 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12444 /* (X && !Y) || (!X && Y) is X ^ Y */
12445 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12446 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12448 tree a0, a1, l0, l1, n0, n1;
12450 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12451 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12453 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12454 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12456 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12457 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12459 if ((operand_equal_p (n0, a0, 0)
12460 && operand_equal_p (n1, a1, 0))
12461 || (operand_equal_p (n0, a1, 0)
12462 && operand_equal_p (n1, a0, 0)))
12463 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12466 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12467 != NULL_TREE)
12468 return tem;
12470 return NULL_TREE;
12472 case TRUTH_XOR_EXPR:
12473 /* If the second arg is constant zero, drop it. */
12474 if (integer_zerop (arg1))
12475 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12476 /* If the second arg is constant true, this is a logical inversion. */
12477 if (integer_onep (arg1))
12479 /* Only call invert_truthvalue if operand is a truth value. */
12480 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12481 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12482 else
12483 tem = invert_truthvalue_loc (loc, arg0);
12484 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12486 /* Identical arguments cancel to zero. */
12487 if (operand_equal_p (arg0, arg1, 0))
12488 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12490 /* !X ^ X is always true. */
12491 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12492 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12493 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12495 /* X ^ !X is always true. */
12496 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12497 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12498 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12500 return NULL_TREE;
12502 case EQ_EXPR:
12503 case NE_EXPR:
12504 STRIP_NOPS (arg0);
12505 STRIP_NOPS (arg1);
12507 tem = fold_comparison (loc, code, type, op0, op1);
12508 if (tem != NULL_TREE)
12509 return tem;
12511 /* bool_var != 0 becomes bool_var. */
12512 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12513 && code == NE_EXPR)
12514 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12516 /* bool_var == 1 becomes bool_var. */
12517 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12518 && code == EQ_EXPR)
12519 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12521 /* bool_var != 1 becomes !bool_var. */
12522 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12523 && code == NE_EXPR)
12524 return fold_convert_loc (loc, type,
12525 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12526 TREE_TYPE (arg0), arg0));
12528 /* bool_var == 0 becomes !bool_var. */
12529 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12530 && code == EQ_EXPR)
12531 return fold_convert_loc (loc, type,
12532 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12533 TREE_TYPE (arg0), arg0));
12535 /* !exp != 0 becomes !exp */
12536 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12537 && code == NE_EXPR)
12538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12540 /* If this is an equality comparison of the address of two non-weak,
12541 unaliased symbols neither of which are extern (since we do not
12542 have access to attributes for externs), then we know the result. */
12543 if (TREE_CODE (arg0) == ADDR_EXPR
12544 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12545 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12546 && ! lookup_attribute ("alias",
12547 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12548 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12549 && TREE_CODE (arg1) == ADDR_EXPR
12550 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12551 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12552 && ! lookup_attribute ("alias",
12553 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12554 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12556 /* We know that we're looking at the address of two
12557 non-weak, unaliased, static _DECL nodes.
12559 It is both wasteful and incorrect to call operand_equal_p
12560 to compare the two ADDR_EXPR nodes. It is wasteful in that
12561 all we need to do is test pointer equality for the arguments
12562 to the two ADDR_EXPR nodes. It is incorrect to use
12563 operand_equal_p as that function is NOT equivalent to a
12564 C equality test. It can in fact return false for two
12565 objects which would test as equal using the C equality
12566 operator. */
12567 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12568 return constant_boolean_node (equal
12569 ? code == EQ_EXPR : code != EQ_EXPR,
12570 type);
12573 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12574 a MINUS_EXPR of a constant, we can convert it into a comparison with
12575 a revised constant as long as no overflow occurs. */
12576 if (TREE_CODE (arg1) == INTEGER_CST
12577 && (TREE_CODE (arg0) == PLUS_EXPR
12578 || TREE_CODE (arg0) == MINUS_EXPR)
12579 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12580 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12581 ? MINUS_EXPR : PLUS_EXPR,
12582 fold_convert_loc (loc, TREE_TYPE (arg0),
12583 arg1),
12584 TREE_OPERAND (arg0, 1)))
12585 && !TREE_OVERFLOW (tem))
12586 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12588 /* Similarly for a NEGATE_EXPR. */
12589 if (TREE_CODE (arg0) == NEGATE_EXPR
12590 && TREE_CODE (arg1) == INTEGER_CST
12591 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12592 arg1)))
12593 && TREE_CODE (tem) == INTEGER_CST
12594 && !TREE_OVERFLOW (tem))
12595 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12597 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12598 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12599 && TREE_CODE (arg1) == INTEGER_CST
12600 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12601 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12602 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12603 fold_convert_loc (loc,
12604 TREE_TYPE (arg0),
12605 arg1),
12606 TREE_OPERAND (arg0, 1)));
12608 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12609 if ((TREE_CODE (arg0) == PLUS_EXPR
12610 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12611 || TREE_CODE (arg0) == MINUS_EXPR)
12612 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12613 0)),
12614 arg1, 0)
12615 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12616 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12618 tree val = TREE_OPERAND (arg0, 1);
12619 return omit_two_operands_loc (loc, type,
12620 fold_build2_loc (loc, code, type,
12621 val,
12622 build_int_cst (TREE_TYPE (val),
12623 0)),
12624 TREE_OPERAND (arg0, 0), arg1);
12627 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12628 if (TREE_CODE (arg0) == MINUS_EXPR
12629 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12630 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12631 1)),
12632 arg1, 0)
12633 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12635 return omit_two_operands_loc (loc, type,
12636 code == NE_EXPR
12637 ? boolean_true_node : boolean_false_node,
12638 TREE_OPERAND (arg0, 1), arg1);
12641 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12642 for !=. Don't do this for ordered comparisons due to overflow. */
12643 if (TREE_CODE (arg0) == MINUS_EXPR
12644 && integer_zerop (arg1))
12645 return fold_build2_loc (loc, code, type,
12646 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12648 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12649 if (TREE_CODE (arg0) == ABS_EXPR
12650 && (integer_zerop (arg1) || real_zerop (arg1)))
12651 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12653 /* If this is an EQ or NE comparison with zero and ARG0 is
12654 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12655 two operations, but the latter can be done in one less insn
12656 on machines that have only two-operand insns or on which a
12657 constant cannot be the first operand. */
12658 if (TREE_CODE (arg0) == BIT_AND_EXPR
12659 && integer_zerop (arg1))
12661 tree arg00 = TREE_OPERAND (arg0, 0);
12662 tree arg01 = TREE_OPERAND (arg0, 1);
12663 if (TREE_CODE (arg00) == LSHIFT_EXPR
12664 && integer_onep (TREE_OPERAND (arg00, 0)))
12666 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12667 arg01, TREE_OPERAND (arg00, 1));
12668 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12669 build_int_cst (TREE_TYPE (arg0), 1));
12670 return fold_build2_loc (loc, code, type,
12671 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12672 arg1);
12674 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12675 && integer_onep (TREE_OPERAND (arg01, 0)))
12677 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12678 arg00, TREE_OPERAND (arg01, 1));
12679 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12680 build_int_cst (TREE_TYPE (arg0), 1));
12681 return fold_build2_loc (loc, code, type,
12682 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12683 arg1);
12687 /* If this is an NE or EQ comparison of zero against the result of a
12688 signed MOD operation whose second operand is a power of 2, make
12689 the MOD operation unsigned since it is simpler and equivalent. */
12690 if (integer_zerop (arg1)
12691 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12692 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12693 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12694 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12695 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12696 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12698 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12699 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12700 fold_convert_loc (loc, newtype,
12701 TREE_OPERAND (arg0, 0)),
12702 fold_convert_loc (loc, newtype,
12703 TREE_OPERAND (arg0, 1)));
12705 return fold_build2_loc (loc, code, type, newmod,
12706 fold_convert_loc (loc, newtype, arg1));
12709 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12710 C1 is a valid shift constant, and C2 is a power of two, i.e.
12711 a single bit. */
12712 if (TREE_CODE (arg0) == BIT_AND_EXPR
12713 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12714 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12715 == INTEGER_CST
12716 && integer_pow2p (TREE_OPERAND (arg0, 1))
12717 && integer_zerop (arg1))
12719 tree itype = TREE_TYPE (arg0);
12720 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12721 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12723 /* Check for a valid shift count. */
12724 if (TREE_INT_CST_HIGH (arg001) == 0
12725 && TREE_INT_CST_LOW (arg001) < prec)
12727 tree arg01 = TREE_OPERAND (arg0, 1);
12728 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12729 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12730 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12731 can be rewritten as (X & (C2 << C1)) != 0. */
12732 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12734 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12735 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12736 return fold_build2_loc (loc, code, type, tem,
12737 fold_convert_loc (loc, itype, arg1));
12739 /* Otherwise, for signed (arithmetic) shifts,
12740 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12741 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12742 else if (!TYPE_UNSIGNED (itype))
12743 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12744 arg000, build_int_cst (itype, 0));
12745 /* Otherwise, of unsigned (logical) shifts,
12746 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12747 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12748 else
12749 return omit_one_operand_loc (loc, type,
12750 code == EQ_EXPR ? integer_one_node
12751 : integer_zero_node,
12752 arg000);
12756 /* If we have (A & C) == C where C is a power of 2, convert this into
12757 (A & C) != 0. Similarly for NE_EXPR. */
12758 if (TREE_CODE (arg0) == BIT_AND_EXPR
12759 && integer_pow2p (TREE_OPERAND (arg0, 1))
12760 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12761 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12762 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12763 integer_zero_node));
12765 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12766 bit, then fold the expression into A < 0 or A >= 0. */
12767 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12768 if (tem)
12769 return tem;
12771 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12772 Similarly for NE_EXPR. */
12773 if (TREE_CODE (arg0) == BIT_AND_EXPR
12774 && TREE_CODE (arg1) == INTEGER_CST
12775 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12777 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12778 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12779 TREE_OPERAND (arg0, 1));
12780 tree dandnotc
12781 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12782 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12783 notc);
12784 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12785 if (integer_nonzerop (dandnotc))
12786 return omit_one_operand_loc (loc, type, rslt, arg0);
12789 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12790 Similarly for NE_EXPR. */
12791 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12792 && TREE_CODE (arg1) == INTEGER_CST
12793 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12795 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12796 tree candnotd
12797 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12798 TREE_OPERAND (arg0, 1),
12799 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12800 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12801 if (integer_nonzerop (candnotd))
12802 return omit_one_operand_loc (loc, type, rslt, arg0);
12805 /* If this is a comparison of a field, we may be able to simplify it. */
12806 if ((TREE_CODE (arg0) == COMPONENT_REF
12807 || TREE_CODE (arg0) == BIT_FIELD_REF)
12808 /* Handle the constant case even without -O
12809 to make sure the warnings are given. */
12810 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12812 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12813 if (t1)
12814 return t1;
12817 /* Optimize comparisons of strlen vs zero to a compare of the
12818 first character of the string vs zero. To wit,
12819 strlen(ptr) == 0 => *ptr == 0
12820 strlen(ptr) != 0 => *ptr != 0
12821 Other cases should reduce to one of these two (or a constant)
12822 due to the return value of strlen being unsigned. */
12823 if (TREE_CODE (arg0) == CALL_EXPR
12824 && integer_zerop (arg1))
12826 tree fndecl = get_callee_fndecl (arg0);
12828 if (fndecl
12829 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12830 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12831 && call_expr_nargs (arg0) == 1
12832 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12834 tree iref = build_fold_indirect_ref_loc (loc,
12835 CALL_EXPR_ARG (arg0, 0));
12836 return fold_build2_loc (loc, code, type, iref,
12837 build_int_cst (TREE_TYPE (iref), 0));
12841 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12842 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12843 if (TREE_CODE (arg0) == RSHIFT_EXPR
12844 && integer_zerop (arg1)
12845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12847 tree arg00 = TREE_OPERAND (arg0, 0);
12848 tree arg01 = TREE_OPERAND (arg0, 1);
12849 tree itype = TREE_TYPE (arg00);
12850 if (TREE_INT_CST_HIGH (arg01) == 0
12851 && TREE_INT_CST_LOW (arg01)
12852 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12854 if (TYPE_UNSIGNED (itype))
12856 itype = signed_type_for (itype);
12857 arg00 = fold_convert_loc (loc, itype, arg00);
12859 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12860 type, arg00, build_int_cst (itype, 0));
12864 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12865 if (integer_zerop (arg1)
12866 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12867 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12868 TREE_OPERAND (arg0, 1));
12870 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12871 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12872 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12873 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12874 build_zero_cst (TREE_TYPE (arg0)));
12875 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12876 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12877 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12878 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12879 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12880 build_zero_cst (TREE_TYPE (arg0)));
12882 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12883 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12884 && TREE_CODE (arg1) == INTEGER_CST
12885 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12886 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12887 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12888 TREE_OPERAND (arg0, 1), arg1));
12890 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12891 (X & C) == 0 when C is a single bit. */
12892 if (TREE_CODE (arg0) == BIT_AND_EXPR
12893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12894 && integer_zerop (arg1)
12895 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12897 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12898 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12899 TREE_OPERAND (arg0, 1));
12900 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12901 type, tem,
12902 fold_convert_loc (loc, TREE_TYPE (arg0),
12903 arg1));
12906 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12907 constant C is a power of two, i.e. a single bit. */
12908 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12909 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12910 && integer_zerop (arg1)
12911 && integer_pow2p (TREE_OPERAND (arg0, 1))
12912 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12913 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12915 tree arg00 = TREE_OPERAND (arg0, 0);
12916 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12917 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12920 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12921 when is C is a power of two, i.e. a single bit. */
12922 if (TREE_CODE (arg0) == BIT_AND_EXPR
12923 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12924 && integer_zerop (arg1)
12925 && integer_pow2p (TREE_OPERAND (arg0, 1))
12926 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12927 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12929 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12930 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12931 arg000, TREE_OPERAND (arg0, 1));
12932 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12933 tem, build_int_cst (TREE_TYPE (tem), 0));
12936 if (integer_zerop (arg1)
12937 && tree_expr_nonzero_p (arg0))
12939 tree res = constant_boolean_node (code==NE_EXPR, type);
12940 return omit_one_operand_loc (loc, type, res, arg0);
12943 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12944 if (TREE_CODE (arg0) == NEGATE_EXPR
12945 && TREE_CODE (arg1) == NEGATE_EXPR)
12946 return fold_build2_loc (loc, code, type,
12947 TREE_OPERAND (arg0, 0),
12948 fold_convert_loc (loc, TREE_TYPE (arg0),
12949 TREE_OPERAND (arg1, 0)));
12951 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12952 if (TREE_CODE (arg0) == BIT_AND_EXPR
12953 && TREE_CODE (arg1) == BIT_AND_EXPR)
12955 tree arg00 = TREE_OPERAND (arg0, 0);
12956 tree arg01 = TREE_OPERAND (arg0, 1);
12957 tree arg10 = TREE_OPERAND (arg1, 0);
12958 tree arg11 = TREE_OPERAND (arg1, 1);
12959 tree itype = TREE_TYPE (arg0);
12961 if (operand_equal_p (arg01, arg11, 0))
12962 return fold_build2_loc (loc, code, type,
12963 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12964 fold_build2_loc (loc,
12965 BIT_XOR_EXPR, itype,
12966 arg00, arg10),
12967 arg01),
12968 build_zero_cst (itype));
12970 if (operand_equal_p (arg01, arg10, 0))
12971 return fold_build2_loc (loc, code, type,
12972 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12973 fold_build2_loc (loc,
12974 BIT_XOR_EXPR, itype,
12975 arg00, arg11),
12976 arg01),
12977 build_zero_cst (itype));
12979 if (operand_equal_p (arg00, arg11, 0))
12980 return fold_build2_loc (loc, code, type,
12981 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12982 fold_build2_loc (loc,
12983 BIT_XOR_EXPR, itype,
12984 arg01, arg10),
12985 arg00),
12986 build_zero_cst (itype));
12988 if (operand_equal_p (arg00, arg10, 0))
12989 return fold_build2_loc (loc, code, type,
12990 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12991 fold_build2_loc (loc,
12992 BIT_XOR_EXPR, itype,
12993 arg01, arg11),
12994 arg00),
12995 build_zero_cst (itype));
12998 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12999 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13001 tree arg00 = TREE_OPERAND (arg0, 0);
13002 tree arg01 = TREE_OPERAND (arg0, 1);
13003 tree arg10 = TREE_OPERAND (arg1, 0);
13004 tree arg11 = TREE_OPERAND (arg1, 1);
13005 tree itype = TREE_TYPE (arg0);
13007 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13008 operand_equal_p guarantees no side-effects so we don't need
13009 to use omit_one_operand on Z. */
13010 if (operand_equal_p (arg01, arg11, 0))
13011 return fold_build2_loc (loc, code, type, arg00,
13012 fold_convert_loc (loc, TREE_TYPE (arg00),
13013 arg10));
13014 if (operand_equal_p (arg01, arg10, 0))
13015 return fold_build2_loc (loc, code, type, arg00,
13016 fold_convert_loc (loc, TREE_TYPE (arg00),
13017 arg11));
13018 if (operand_equal_p (arg00, arg11, 0))
13019 return fold_build2_loc (loc, code, type, arg01,
13020 fold_convert_loc (loc, TREE_TYPE (arg01),
13021 arg10));
13022 if (operand_equal_p (arg00, arg10, 0))
13023 return fold_build2_loc (loc, code, type, arg01,
13024 fold_convert_loc (loc, TREE_TYPE (arg01),
13025 arg11));
13027 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13028 if (TREE_CODE (arg01) == INTEGER_CST
13029 && TREE_CODE (arg11) == INTEGER_CST)
13031 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13032 fold_convert_loc (loc, itype, arg11));
13033 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13034 return fold_build2_loc (loc, code, type, tem,
13035 fold_convert_loc (loc, itype, arg10));
13039 /* Attempt to simplify equality/inequality comparisons of complex
13040 values. Only lower the comparison if the result is known or
13041 can be simplified to a single scalar comparison. */
13042 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13043 || TREE_CODE (arg0) == COMPLEX_CST)
13044 && (TREE_CODE (arg1) == COMPLEX_EXPR
13045 || TREE_CODE (arg1) == COMPLEX_CST))
13047 tree real0, imag0, real1, imag1;
13048 tree rcond, icond;
13050 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13052 real0 = TREE_OPERAND (arg0, 0);
13053 imag0 = TREE_OPERAND (arg0, 1);
13055 else
13057 real0 = TREE_REALPART (arg0);
13058 imag0 = TREE_IMAGPART (arg0);
13061 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13063 real1 = TREE_OPERAND (arg1, 0);
13064 imag1 = TREE_OPERAND (arg1, 1);
13066 else
13068 real1 = TREE_REALPART (arg1);
13069 imag1 = TREE_IMAGPART (arg1);
13072 rcond = fold_binary_loc (loc, code, type, real0, real1);
13073 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13075 if (integer_zerop (rcond))
13077 if (code == EQ_EXPR)
13078 return omit_two_operands_loc (loc, type, boolean_false_node,
13079 imag0, imag1);
13080 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13082 else
13084 if (code == NE_EXPR)
13085 return omit_two_operands_loc (loc, type, boolean_true_node,
13086 imag0, imag1);
13087 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13091 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13092 if (icond && TREE_CODE (icond) == INTEGER_CST)
13094 if (integer_zerop (icond))
13096 if (code == EQ_EXPR)
13097 return omit_two_operands_loc (loc, type, boolean_false_node,
13098 real0, real1);
13099 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13101 else
13103 if (code == NE_EXPR)
13104 return omit_two_operands_loc (loc, type, boolean_true_node,
13105 real0, real1);
13106 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13111 return NULL_TREE;
13113 case LT_EXPR:
13114 case GT_EXPR:
13115 case LE_EXPR:
13116 case GE_EXPR:
13117 tem = fold_comparison (loc, code, type, op0, op1);
13118 if (tem != NULL_TREE)
13119 return tem;
13121 /* Transform comparisons of the form X +- C CMP X. */
13122 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13123 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13124 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13125 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13126 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13127 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13129 tree arg01 = TREE_OPERAND (arg0, 1);
13130 enum tree_code code0 = TREE_CODE (arg0);
13131 int is_positive;
13133 if (TREE_CODE (arg01) == REAL_CST)
13134 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13135 else
13136 is_positive = tree_int_cst_sgn (arg01);
13138 /* (X - c) > X becomes false. */
13139 if (code == GT_EXPR
13140 && ((code0 == MINUS_EXPR && is_positive >= 0)
13141 || (code0 == PLUS_EXPR && is_positive <= 0)))
13143 if (TREE_CODE (arg01) == INTEGER_CST
13144 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13145 fold_overflow_warning (("assuming signed overflow does not "
13146 "occur when assuming that (X - c) > X "
13147 "is always false"),
13148 WARN_STRICT_OVERFLOW_ALL);
13149 return constant_boolean_node (0, type);
13152 /* Likewise (X + c) < X becomes false. */
13153 if (code == LT_EXPR
13154 && ((code0 == PLUS_EXPR && is_positive >= 0)
13155 || (code0 == MINUS_EXPR && is_positive <= 0)))
13157 if (TREE_CODE (arg01) == INTEGER_CST
13158 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13159 fold_overflow_warning (("assuming signed overflow does not "
13160 "occur when assuming that "
13161 "(X + c) < X is always false"),
13162 WARN_STRICT_OVERFLOW_ALL);
13163 return constant_boolean_node (0, type);
13166 /* Convert (X - c) <= X to true. */
13167 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13168 && code == LE_EXPR
13169 && ((code0 == MINUS_EXPR && is_positive >= 0)
13170 || (code0 == PLUS_EXPR && is_positive <= 0)))
13172 if (TREE_CODE (arg01) == INTEGER_CST
13173 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13174 fold_overflow_warning (("assuming signed overflow does not "
13175 "occur when assuming that "
13176 "(X - c) <= X is always true"),
13177 WARN_STRICT_OVERFLOW_ALL);
13178 return constant_boolean_node (1, type);
13181 /* Convert (X + c) >= X to true. */
13182 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13183 && code == GE_EXPR
13184 && ((code0 == PLUS_EXPR && is_positive >= 0)
13185 || (code0 == MINUS_EXPR && is_positive <= 0)))
13187 if (TREE_CODE (arg01) == INTEGER_CST
13188 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13189 fold_overflow_warning (("assuming signed overflow does not "
13190 "occur when assuming that "
13191 "(X + c) >= X is always true"),
13192 WARN_STRICT_OVERFLOW_ALL);
13193 return constant_boolean_node (1, type);
13196 if (TREE_CODE (arg01) == INTEGER_CST)
13198 /* Convert X + c > X and X - c < X to true for integers. */
13199 if (code == GT_EXPR
13200 && ((code0 == PLUS_EXPR && is_positive > 0)
13201 || (code0 == MINUS_EXPR && is_positive < 0)))
13203 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13204 fold_overflow_warning (("assuming signed overflow does "
13205 "not occur when assuming that "
13206 "(X + c) > X is always true"),
13207 WARN_STRICT_OVERFLOW_ALL);
13208 return constant_boolean_node (1, type);
13211 if (code == LT_EXPR
13212 && ((code0 == MINUS_EXPR && is_positive > 0)
13213 || (code0 == PLUS_EXPR && is_positive < 0)))
13215 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13216 fold_overflow_warning (("assuming signed overflow does "
13217 "not occur when assuming that "
13218 "(X - c) < X is always true"),
13219 WARN_STRICT_OVERFLOW_ALL);
13220 return constant_boolean_node (1, type);
13223 /* Convert X + c <= X and X - c >= X to false for integers. */
13224 if (code == LE_EXPR
13225 && ((code0 == PLUS_EXPR && is_positive > 0)
13226 || (code0 == MINUS_EXPR && is_positive < 0)))
13228 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13229 fold_overflow_warning (("assuming signed overflow does "
13230 "not occur when assuming that "
13231 "(X + c) <= X is always false"),
13232 WARN_STRICT_OVERFLOW_ALL);
13233 return constant_boolean_node (0, type);
13236 if (code == GE_EXPR
13237 && ((code0 == MINUS_EXPR && is_positive > 0)
13238 || (code0 == PLUS_EXPR && is_positive < 0)))
13240 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13241 fold_overflow_warning (("assuming signed overflow does "
13242 "not occur when assuming that "
13243 "(X - c) >= X is always false"),
13244 WARN_STRICT_OVERFLOW_ALL);
13245 return constant_boolean_node (0, type);
13250 /* Comparisons with the highest or lowest possible integer of
13251 the specified precision will have known values. */
13253 tree arg1_type = TREE_TYPE (arg1);
13254 unsigned int width = TYPE_PRECISION (arg1_type);
13256 if (TREE_CODE (arg1) == INTEGER_CST
13257 && width <= HOST_BITS_PER_DOUBLE_INT
13258 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13260 HOST_WIDE_INT signed_max_hi;
13261 unsigned HOST_WIDE_INT signed_max_lo;
13262 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13264 if (width <= HOST_BITS_PER_WIDE_INT)
13266 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13267 - 1;
13268 signed_max_hi = 0;
13269 max_hi = 0;
13271 if (TYPE_UNSIGNED (arg1_type))
13273 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13274 min_lo = 0;
13275 min_hi = 0;
13277 else
13279 max_lo = signed_max_lo;
13280 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13281 min_hi = -1;
13284 else
13286 width -= HOST_BITS_PER_WIDE_INT;
13287 signed_max_lo = -1;
13288 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13289 - 1;
13290 max_lo = -1;
13291 min_lo = 0;
13293 if (TYPE_UNSIGNED (arg1_type))
13295 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13296 min_hi = 0;
13298 else
13300 max_hi = signed_max_hi;
13301 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13305 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13306 && TREE_INT_CST_LOW (arg1) == max_lo)
13307 switch (code)
13309 case GT_EXPR:
13310 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13312 case GE_EXPR:
13313 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13315 case LE_EXPR:
13316 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13318 case LT_EXPR:
13319 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13321 /* The GE_EXPR and LT_EXPR cases above are not normally
13322 reached because of previous transformations. */
13324 default:
13325 break;
13327 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13328 == max_hi
13329 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13330 switch (code)
13332 case GT_EXPR:
13333 arg1 = const_binop (PLUS_EXPR, arg1,
13334 build_int_cst (TREE_TYPE (arg1), 1));
13335 return fold_build2_loc (loc, EQ_EXPR, type,
13336 fold_convert_loc (loc,
13337 TREE_TYPE (arg1), arg0),
13338 arg1);
13339 case LE_EXPR:
13340 arg1 = const_binop (PLUS_EXPR, arg1,
13341 build_int_cst (TREE_TYPE (arg1), 1));
13342 return fold_build2_loc (loc, NE_EXPR, type,
13343 fold_convert_loc (loc, TREE_TYPE (arg1),
13344 arg0),
13345 arg1);
13346 default:
13347 break;
13349 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13350 == min_hi
13351 && TREE_INT_CST_LOW (arg1) == min_lo)
13352 switch (code)
13354 case LT_EXPR:
13355 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13357 case LE_EXPR:
13358 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13360 case GE_EXPR:
13361 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13363 case GT_EXPR:
13364 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13366 default:
13367 break;
13369 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13370 == min_hi
13371 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13372 switch (code)
13374 case GE_EXPR:
13375 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13376 return fold_build2_loc (loc, NE_EXPR, type,
13377 fold_convert_loc (loc,
13378 TREE_TYPE (arg1), arg0),
13379 arg1);
13380 case LT_EXPR:
13381 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13382 return fold_build2_loc (loc, EQ_EXPR, type,
13383 fold_convert_loc (loc, TREE_TYPE (arg1),
13384 arg0),
13385 arg1);
13386 default:
13387 break;
13390 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13391 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13392 && TYPE_UNSIGNED (arg1_type)
13393 /* We will flip the signedness of the comparison operator
13394 associated with the mode of arg1, so the sign bit is
13395 specified by this mode. Check that arg1 is the signed
13396 max associated with this sign bit. */
13397 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13398 /* signed_type does not work on pointer types. */
13399 && INTEGRAL_TYPE_P (arg1_type))
13401 /* The following case also applies to X < signed_max+1
13402 and X >= signed_max+1 because previous transformations. */
13403 if (code == LE_EXPR || code == GT_EXPR)
13405 tree st;
13406 st = signed_type_for (TREE_TYPE (arg1));
13407 return fold_build2_loc (loc,
13408 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13409 type, fold_convert_loc (loc, st, arg0),
13410 build_int_cst (st, 0));
13416 /* If we are comparing an ABS_EXPR with a constant, we can
13417 convert all the cases into explicit comparisons, but they may
13418 well not be faster than doing the ABS and one comparison.
13419 But ABS (X) <= C is a range comparison, which becomes a subtraction
13420 and a comparison, and is probably faster. */
13421 if (code == LE_EXPR
13422 && TREE_CODE (arg1) == INTEGER_CST
13423 && TREE_CODE (arg0) == ABS_EXPR
13424 && ! TREE_SIDE_EFFECTS (arg0)
13425 && (0 != (tem = negate_expr (arg1)))
13426 && TREE_CODE (tem) == INTEGER_CST
13427 && !TREE_OVERFLOW (tem))
13428 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13429 build2 (GE_EXPR, type,
13430 TREE_OPERAND (arg0, 0), tem),
13431 build2 (LE_EXPR, type,
13432 TREE_OPERAND (arg0, 0), arg1));
13434 /* Convert ABS_EXPR<x> >= 0 to true. */
13435 strict_overflow_p = false;
13436 if (code == GE_EXPR
13437 && (integer_zerop (arg1)
13438 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13439 && real_zerop (arg1)))
13440 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13442 if (strict_overflow_p)
13443 fold_overflow_warning (("assuming signed overflow does not occur "
13444 "when simplifying comparison of "
13445 "absolute value and zero"),
13446 WARN_STRICT_OVERFLOW_CONDITIONAL);
13447 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13450 /* Convert ABS_EXPR<x> < 0 to false. */
13451 strict_overflow_p = false;
13452 if (code == LT_EXPR
13453 && (integer_zerop (arg1) || real_zerop (arg1))
13454 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13456 if (strict_overflow_p)
13457 fold_overflow_warning (("assuming signed overflow does not occur "
13458 "when simplifying comparison of "
13459 "absolute value and zero"),
13460 WARN_STRICT_OVERFLOW_CONDITIONAL);
13461 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13464 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13465 and similarly for >= into !=. */
13466 if ((code == LT_EXPR || code == GE_EXPR)
13467 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13468 && TREE_CODE (arg1) == LSHIFT_EXPR
13469 && integer_onep (TREE_OPERAND (arg1, 0)))
13470 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13471 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13472 TREE_OPERAND (arg1, 1)),
13473 build_zero_cst (TREE_TYPE (arg0)));
13475 if ((code == LT_EXPR || code == GE_EXPR)
13476 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13477 && CONVERT_EXPR_P (arg1)
13478 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13479 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13481 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13482 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13483 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13484 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13485 build_zero_cst (TREE_TYPE (arg0)));
13488 return NULL_TREE;
13490 case UNORDERED_EXPR:
13491 case ORDERED_EXPR:
13492 case UNLT_EXPR:
13493 case UNLE_EXPR:
13494 case UNGT_EXPR:
13495 case UNGE_EXPR:
13496 case UNEQ_EXPR:
13497 case LTGT_EXPR:
13498 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13500 t1 = fold_relational_const (code, type, arg0, arg1);
13501 if (t1 != NULL_TREE)
13502 return t1;
13505 /* If the first operand is NaN, the result is constant. */
13506 if (TREE_CODE (arg0) == REAL_CST
13507 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13508 && (code != LTGT_EXPR || ! flag_trapping_math))
13510 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13511 ? integer_zero_node
13512 : integer_one_node;
13513 return omit_one_operand_loc (loc, type, t1, arg1);
13516 /* If the second operand is NaN, the result is constant. */
13517 if (TREE_CODE (arg1) == REAL_CST
13518 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13519 && (code != LTGT_EXPR || ! flag_trapping_math))
13521 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13522 ? integer_zero_node
13523 : integer_one_node;
13524 return omit_one_operand_loc (loc, type, t1, arg0);
13527 /* Simplify unordered comparison of something with itself. */
13528 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13529 && operand_equal_p (arg0, arg1, 0))
13530 return constant_boolean_node (1, type);
13532 if (code == LTGT_EXPR
13533 && !flag_trapping_math
13534 && operand_equal_p (arg0, arg1, 0))
13535 return constant_boolean_node (0, type);
13537 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13539 tree targ0 = strip_float_extensions (arg0);
13540 tree targ1 = strip_float_extensions (arg1);
13541 tree newtype = TREE_TYPE (targ0);
13543 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13544 newtype = TREE_TYPE (targ1);
13546 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13547 return fold_build2_loc (loc, code, type,
13548 fold_convert_loc (loc, newtype, targ0),
13549 fold_convert_loc (loc, newtype, targ1));
13552 return NULL_TREE;
13554 case COMPOUND_EXPR:
13555 /* When pedantic, a compound expression can be neither an lvalue
13556 nor an integer constant expression. */
13557 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13558 return NULL_TREE;
13559 /* Don't let (0, 0) be null pointer constant. */
13560 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13561 : fold_convert_loc (loc, type, arg1);
13562 return pedantic_non_lvalue_loc (loc, tem);
13564 case COMPLEX_EXPR:
13565 if ((TREE_CODE (arg0) == REAL_CST
13566 && TREE_CODE (arg1) == REAL_CST)
13567 || (TREE_CODE (arg0) == INTEGER_CST
13568 && TREE_CODE (arg1) == INTEGER_CST))
13569 return build_complex (type, arg0, arg1);
13570 if (TREE_CODE (arg0) == REALPART_EXPR
13571 && TREE_CODE (arg1) == IMAGPART_EXPR
13572 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13573 && operand_equal_p (TREE_OPERAND (arg0, 0),
13574 TREE_OPERAND (arg1, 0), 0))
13575 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13576 TREE_OPERAND (arg1, 0));
13577 return NULL_TREE;
13579 case ASSERT_EXPR:
13580 /* An ASSERT_EXPR should never be passed to fold_binary. */
13581 gcc_unreachable ();
13583 case VEC_PACK_TRUNC_EXPR:
13584 case VEC_PACK_FIX_TRUNC_EXPR:
13586 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13587 tree *elts;
13589 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13590 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13591 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13592 return NULL_TREE;
13594 elts = XALLOCAVEC (tree, nelts);
13595 if (!vec_cst_ctor_to_array (arg0, elts)
13596 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13597 return NULL_TREE;
13599 for (i = 0; i < nelts; i++)
13601 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13602 ? NOP_EXPR : FIX_TRUNC_EXPR,
13603 TREE_TYPE (type), elts[i]);
13604 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13605 return NULL_TREE;
13608 return build_vector (type, elts);
13611 case VEC_WIDEN_MULT_LO_EXPR:
13612 case VEC_WIDEN_MULT_HI_EXPR:
13613 case VEC_WIDEN_MULT_EVEN_EXPR:
13614 case VEC_WIDEN_MULT_ODD_EXPR:
13616 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13617 unsigned int out, ofs, scale;
13618 tree *elts;
13620 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13621 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13622 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13623 return NULL_TREE;
13625 elts = XALLOCAVEC (tree, nelts * 4);
13626 if (!vec_cst_ctor_to_array (arg0, elts)
13627 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13628 return NULL_TREE;
13630 if (code == VEC_WIDEN_MULT_LO_EXPR)
13631 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13632 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13633 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13634 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13635 scale = 1, ofs = 0;
13636 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13637 scale = 1, ofs = 1;
13639 for (out = 0; out < nelts; out++)
13641 unsigned int in1 = (out << scale) + ofs;
13642 unsigned int in2 = in1 + nelts * 2;
13643 tree t1, t2;
13645 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13646 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13648 if (t1 == NULL_TREE || t2 == NULL_TREE)
13649 return NULL_TREE;
13650 elts[out] = const_binop (MULT_EXPR, t1, t2);
13651 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13652 return NULL_TREE;
13655 return build_vector (type, elts);
13658 default:
13659 return NULL_TREE;
13660 } /* switch (code) */
13663 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13664 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13665 of GOTO_EXPR. */
13667 static tree
13668 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13670 switch (TREE_CODE (*tp))
13672 case LABEL_EXPR:
13673 return *tp;
13675 case GOTO_EXPR:
13676 *walk_subtrees = 0;
13678 /* ... fall through ... */
13680 default:
13681 return NULL_TREE;
13685 /* Return whether the sub-tree ST contains a label which is accessible from
13686 outside the sub-tree. */
13688 static bool
13689 contains_label_p (tree st)
13691 return
13692 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13695 /* Fold a ternary expression of code CODE and type TYPE with operands
13696 OP0, OP1, and OP2. Return the folded expression if folding is
13697 successful. Otherwise, return NULL_TREE. */
13699 tree
13700 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13701 tree op0, tree op1, tree op2)
13703 tree tem;
13704 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13705 enum tree_code_class kind = TREE_CODE_CLASS (code);
13707 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13708 && TREE_CODE_LENGTH (code) == 3);
13710 /* Strip any conversions that don't change the mode. This is safe
13711 for every expression, except for a comparison expression because
13712 its signedness is derived from its operands. So, in the latter
13713 case, only strip conversions that don't change the signedness.
13715 Note that this is done as an internal manipulation within the
13716 constant folder, in order to find the simplest representation of
13717 the arguments so that their form can be studied. In any cases,
13718 the appropriate type conversions should be put back in the tree
13719 that will get out of the constant folder. */
13720 if (op0)
13722 arg0 = op0;
13723 STRIP_NOPS (arg0);
13726 if (op1)
13728 arg1 = op1;
13729 STRIP_NOPS (arg1);
13732 if (op2)
13734 arg2 = op2;
13735 STRIP_NOPS (arg2);
13738 switch (code)
13740 case COMPONENT_REF:
13741 if (TREE_CODE (arg0) == CONSTRUCTOR
13742 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13744 unsigned HOST_WIDE_INT idx;
13745 tree field, value;
13746 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13747 if (field == arg1)
13748 return value;
13750 return NULL_TREE;
13752 case COND_EXPR:
13753 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13754 so all simple results must be passed through pedantic_non_lvalue. */
13755 if (TREE_CODE (arg0) == INTEGER_CST)
13757 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13758 tem = integer_zerop (arg0) ? op2 : op1;
13759 /* Only optimize constant conditions when the selected branch
13760 has the same type as the COND_EXPR. This avoids optimizing
13761 away "c ? x : throw", where the throw has a void type.
13762 Avoid throwing away that operand which contains label. */
13763 if ((!TREE_SIDE_EFFECTS (unused_op)
13764 || !contains_label_p (unused_op))
13765 && (! VOID_TYPE_P (TREE_TYPE (tem))
13766 || VOID_TYPE_P (type)))
13767 return pedantic_non_lvalue_loc (loc, tem);
13768 return NULL_TREE;
13770 if (operand_equal_p (arg1, op2, 0))
13771 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13773 /* If we have A op B ? A : C, we may be able to convert this to a
13774 simpler expression, depending on the operation and the values
13775 of B and C. Signed zeros prevent all of these transformations,
13776 for reasons given above each one.
13778 Also try swapping the arguments and inverting the conditional. */
13779 if (COMPARISON_CLASS_P (arg0)
13780 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13781 arg1, TREE_OPERAND (arg0, 1))
13782 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13784 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13785 if (tem)
13786 return tem;
13789 if (COMPARISON_CLASS_P (arg0)
13790 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13791 op2,
13792 TREE_OPERAND (arg0, 1))
13793 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13795 location_t loc0 = expr_location_or (arg0, loc);
13796 tem = fold_truth_not_expr (loc0, arg0);
13797 if (tem && COMPARISON_CLASS_P (tem))
13799 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13800 if (tem)
13801 return tem;
13805 /* If the second operand is simpler than the third, swap them
13806 since that produces better jump optimization results. */
13807 if (truth_value_p (TREE_CODE (arg0))
13808 && tree_swap_operands_p (op1, op2, false))
13810 location_t loc0 = expr_location_or (arg0, loc);
13811 /* See if this can be inverted. If it can't, possibly because
13812 it was a floating-point inequality comparison, don't do
13813 anything. */
13814 tem = fold_truth_not_expr (loc0, arg0);
13815 if (tem)
13816 return fold_build3_loc (loc, code, type, tem, op2, op1);
13819 /* Convert A ? 1 : 0 to simply A. */
13820 if (integer_onep (op1)
13821 && integer_zerop (op2)
13822 /* If we try to convert OP0 to our type, the
13823 call to fold will try to move the conversion inside
13824 a COND, which will recurse. In that case, the COND_EXPR
13825 is probably the best choice, so leave it alone. */
13826 && type == TREE_TYPE (arg0))
13827 return pedantic_non_lvalue_loc (loc, arg0);
13829 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13830 over COND_EXPR in cases such as floating point comparisons. */
13831 if (integer_zerop (op1)
13832 && integer_onep (op2)
13833 && truth_value_p (TREE_CODE (arg0)))
13834 return pedantic_non_lvalue_loc (loc,
13835 fold_convert_loc (loc, type,
13836 invert_truthvalue_loc (loc,
13837 arg0)));
13839 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13840 if (TREE_CODE (arg0) == LT_EXPR
13841 && integer_zerop (TREE_OPERAND (arg0, 1))
13842 && integer_zerop (op2)
13843 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13845 /* sign_bit_p only checks ARG1 bits within A's precision.
13846 If <sign bit of A> has wider type than A, bits outside
13847 of A's precision in <sign bit of A> need to be checked.
13848 If they are all 0, this optimization needs to be done
13849 in unsigned A's type, if they are all 1 in signed A's type,
13850 otherwise this can't be done. */
13851 if (TYPE_PRECISION (TREE_TYPE (tem))
13852 < TYPE_PRECISION (TREE_TYPE (arg1))
13853 && TYPE_PRECISION (TREE_TYPE (tem))
13854 < TYPE_PRECISION (type))
13856 unsigned HOST_WIDE_INT mask_lo;
13857 HOST_WIDE_INT mask_hi;
13858 int inner_width, outer_width;
13859 tree tem_type;
13861 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13862 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13863 if (outer_width > TYPE_PRECISION (type))
13864 outer_width = TYPE_PRECISION (type);
13866 if (outer_width > HOST_BITS_PER_WIDE_INT)
13868 mask_hi = ((unsigned HOST_WIDE_INT) -1
13869 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13870 mask_lo = -1;
13872 else
13874 mask_hi = 0;
13875 mask_lo = ((unsigned HOST_WIDE_INT) -1
13876 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13878 if (inner_width > HOST_BITS_PER_WIDE_INT)
13880 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13881 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13882 mask_lo = 0;
13884 else
13885 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13886 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13888 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13889 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13891 tem_type = signed_type_for (TREE_TYPE (tem));
13892 tem = fold_convert_loc (loc, tem_type, tem);
13894 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13895 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13897 tem_type = unsigned_type_for (TREE_TYPE (tem));
13898 tem = fold_convert_loc (loc, tem_type, tem);
13900 else
13901 tem = NULL;
13904 if (tem)
13905 return
13906 fold_convert_loc (loc, type,
13907 fold_build2_loc (loc, BIT_AND_EXPR,
13908 TREE_TYPE (tem), tem,
13909 fold_convert_loc (loc,
13910 TREE_TYPE (tem),
13911 arg1)));
13914 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13915 already handled above. */
13916 if (TREE_CODE (arg0) == BIT_AND_EXPR
13917 && integer_onep (TREE_OPERAND (arg0, 1))
13918 && integer_zerop (op2)
13919 && integer_pow2p (arg1))
13921 tree tem = TREE_OPERAND (arg0, 0);
13922 STRIP_NOPS (tem);
13923 if (TREE_CODE (tem) == RSHIFT_EXPR
13924 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13925 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13926 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13927 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13928 TREE_OPERAND (tem, 0), arg1);
13931 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13932 is probably obsolete because the first operand should be a
13933 truth value (that's why we have the two cases above), but let's
13934 leave it in until we can confirm this for all front-ends. */
13935 if (integer_zerop (op2)
13936 && TREE_CODE (arg0) == NE_EXPR
13937 && integer_zerop (TREE_OPERAND (arg0, 1))
13938 && integer_pow2p (arg1)
13939 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13940 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13941 arg1, OEP_ONLY_CONST))
13942 return pedantic_non_lvalue_loc (loc,
13943 fold_convert_loc (loc, type,
13944 TREE_OPERAND (arg0, 0)));
13946 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13947 if (integer_zerop (op2)
13948 && truth_value_p (TREE_CODE (arg0))
13949 && truth_value_p (TREE_CODE (arg1)))
13950 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13951 fold_convert_loc (loc, type, arg0),
13952 arg1);
13954 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13955 if (integer_onep (op2)
13956 && truth_value_p (TREE_CODE (arg0))
13957 && truth_value_p (TREE_CODE (arg1)))
13959 location_t loc0 = expr_location_or (arg0, loc);
13960 /* Only perform transformation if ARG0 is easily inverted. */
13961 tem = fold_truth_not_expr (loc0, arg0);
13962 if (tem)
13963 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13964 fold_convert_loc (loc, type, tem),
13965 arg1);
13968 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13969 if (integer_zerop (arg1)
13970 && truth_value_p (TREE_CODE (arg0))
13971 && truth_value_p (TREE_CODE (op2)))
13973 location_t loc0 = expr_location_or (arg0, loc);
13974 /* Only perform transformation if ARG0 is easily inverted. */
13975 tem = fold_truth_not_expr (loc0, arg0);
13976 if (tem)
13977 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13978 fold_convert_loc (loc, type, tem),
13979 op2);
13982 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13983 if (integer_onep (arg1)
13984 && truth_value_p (TREE_CODE (arg0))
13985 && truth_value_p (TREE_CODE (op2)))
13986 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13987 fold_convert_loc (loc, type, arg0),
13988 op2);
13990 return NULL_TREE;
13992 case CALL_EXPR:
13993 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13994 of fold_ternary on them. */
13995 gcc_unreachable ();
13997 case BIT_FIELD_REF:
13998 if ((TREE_CODE (arg0) == VECTOR_CST
13999 || (TREE_CODE (arg0) == CONSTRUCTOR
14000 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14001 && (type == TREE_TYPE (TREE_TYPE (arg0))
14002 || (TREE_CODE (type) == VECTOR_TYPE
14003 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14005 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14006 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14007 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14008 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14010 if (n != 0
14011 && (idx % width) == 0
14012 && (n % width) == 0
14013 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14015 idx = idx / width;
14016 n = n / width;
14017 if (TREE_CODE (type) == VECTOR_TYPE)
14019 if (TREE_CODE (arg0) == VECTOR_CST)
14021 tree *vals = XALLOCAVEC (tree, n);
14022 unsigned i;
14023 for (i = 0; i < n; ++i)
14024 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14025 return build_vector (type, vals);
14027 else
14029 VEC(constructor_elt, gc) *vals;
14030 unsigned i;
14031 if (CONSTRUCTOR_NELTS (arg0) == 0)
14032 return build_constructor (type, NULL);
14033 if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14034 0)->value))
14035 != VECTOR_TYPE)
14037 vals = VEC_alloc (constructor_elt, gc, n);
14038 for (i = 0;
14039 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14040 ++i)
14041 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14042 CONSTRUCTOR_ELT
14043 (arg0, idx + i)->value);
14044 return build_constructor (type, vals);
14048 else if (n == 1)
14050 if (TREE_CODE (arg0) == VECTOR_CST)
14051 return VECTOR_CST_ELT (arg0, idx);
14052 else if (CONSTRUCTOR_NELTS (arg0) == 0)
14053 return build_zero_cst (type);
14054 else if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14055 0)->value))
14056 != VECTOR_TYPE)
14058 if (idx < CONSTRUCTOR_NELTS (arg0))
14059 return CONSTRUCTOR_ELT (arg0, idx)->value;
14060 return build_zero_cst (type);
14066 /* A bit-field-ref that referenced the full argument can be stripped. */
14067 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14068 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14069 && integer_zerop (op2))
14070 return fold_convert_loc (loc, type, arg0);
14072 /* On constants we can use native encode/interpret to constant
14073 fold (nearly) all BIT_FIELD_REFs. */
14074 if (CONSTANT_CLASS_P (arg0)
14075 && can_native_interpret_type_p (type)
14076 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14077 /* This limitation should not be necessary, we just need to
14078 round this up to mode size. */
14079 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14080 /* Need bit-shifting of the buffer to relax the following. */
14081 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14083 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14084 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14085 unsigned HOST_WIDE_INT clen;
14086 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14087 /* ??? We cannot tell native_encode_expr to start at
14088 some random byte only. So limit us to a reasonable amount
14089 of work. */
14090 if (clen <= 4096)
14092 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14093 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14094 if (len > 0
14095 && len * BITS_PER_UNIT >= bitpos + bitsize)
14097 tree v = native_interpret_expr (type,
14098 b + bitpos / BITS_PER_UNIT,
14099 bitsize / BITS_PER_UNIT);
14100 if (v)
14101 return v;
14106 return NULL_TREE;
14108 case FMA_EXPR:
14109 /* For integers we can decompose the FMA if possible. */
14110 if (TREE_CODE (arg0) == INTEGER_CST
14111 && TREE_CODE (arg1) == INTEGER_CST)
14112 return fold_build2_loc (loc, PLUS_EXPR, type,
14113 const_binop (MULT_EXPR, arg0, arg1), arg2);
14114 if (integer_zerop (arg2))
14115 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14117 return fold_fma (loc, type, arg0, arg1, arg2);
14119 case VEC_PERM_EXPR:
14120 if (TREE_CODE (arg2) == VECTOR_CST)
14122 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14123 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14124 tree t;
14125 bool need_mask_canon = false;
14126 bool all_in_vec0 = true;
14127 bool all_in_vec1 = true;
14128 bool maybe_identity = true;
14129 bool single_arg = (op0 == op1);
14130 bool changed = false;
14132 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14133 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14134 for (i = 0; i < nelts; i++)
14136 tree val = VECTOR_CST_ELT (arg2, i);
14137 if (TREE_CODE (val) != INTEGER_CST)
14138 return NULL_TREE;
14140 sel[i] = TREE_INT_CST_LOW (val) & mask;
14141 if (TREE_INT_CST_HIGH (val)
14142 || ((unsigned HOST_WIDE_INT)
14143 TREE_INT_CST_LOW (val) != sel[i]))
14144 need_mask_canon = true;
14146 if (sel[i] < nelts)
14147 all_in_vec1 = false;
14148 else
14149 all_in_vec0 = false;
14151 if ((sel[i] & (nelts-1)) != i)
14152 maybe_identity = false;
14155 if (maybe_identity)
14157 if (all_in_vec0)
14158 return op0;
14159 if (all_in_vec1)
14160 return op1;
14163 if (all_in_vec0)
14164 op1 = op0;
14165 else if (all_in_vec1)
14167 op0 = op1;
14168 for (i = 0; i < nelts; i++)
14169 sel[i] -= nelts;
14170 need_mask_canon = true;
14173 if ((TREE_CODE (op0) == VECTOR_CST
14174 || TREE_CODE (op0) == CONSTRUCTOR)
14175 && (TREE_CODE (op1) == VECTOR_CST
14176 || TREE_CODE (op1) == CONSTRUCTOR))
14178 t = fold_vec_perm (type, op0, op1, sel);
14179 if (t != NULL_TREE)
14180 return t;
14183 if (op0 == op1 && !single_arg)
14184 changed = true;
14186 if (need_mask_canon && arg2 == op2)
14188 tree *tsel = XALLOCAVEC (tree, nelts);
14189 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14190 for (i = 0; i < nelts; i++)
14191 tsel[i] = build_int_cst (eltype, sel[i]);
14192 op2 = build_vector (TREE_TYPE (arg2), tsel);
14193 changed = true;
14196 if (changed)
14197 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14199 return NULL_TREE;
14201 default:
14202 return NULL_TREE;
14203 } /* switch (code) */
14206 /* Perform constant folding and related simplification of EXPR.
14207 The related simplifications include x*1 => x, x*0 => 0, etc.,
14208 and application of the associative law.
14209 NOP_EXPR conversions may be removed freely (as long as we
14210 are careful not to change the type of the overall expression).
14211 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14212 but we can constant-fold them if they have constant operands. */
14214 #ifdef ENABLE_FOLD_CHECKING
14215 # define fold(x) fold_1 (x)
14216 static tree fold_1 (tree);
14217 static
14218 #endif
14219 tree
14220 fold (tree expr)
14222 const tree t = expr;
14223 enum tree_code code = TREE_CODE (t);
14224 enum tree_code_class kind = TREE_CODE_CLASS (code);
14225 tree tem;
14226 location_t loc = EXPR_LOCATION (expr);
14228 /* Return right away if a constant. */
14229 if (kind == tcc_constant)
14230 return t;
14232 /* CALL_EXPR-like objects with variable numbers of operands are
14233 treated specially. */
14234 if (kind == tcc_vl_exp)
14236 if (code == CALL_EXPR)
14238 tem = fold_call_expr (loc, expr, false);
14239 return tem ? tem : expr;
14241 return expr;
14244 if (IS_EXPR_CODE_CLASS (kind))
14246 tree type = TREE_TYPE (t);
14247 tree op0, op1, op2;
14249 switch (TREE_CODE_LENGTH (code))
14251 case 1:
14252 op0 = TREE_OPERAND (t, 0);
14253 tem = fold_unary_loc (loc, code, type, op0);
14254 return tem ? tem : expr;
14255 case 2:
14256 op0 = TREE_OPERAND (t, 0);
14257 op1 = TREE_OPERAND (t, 1);
14258 tem = fold_binary_loc (loc, code, type, op0, op1);
14259 return tem ? tem : expr;
14260 case 3:
14261 op0 = TREE_OPERAND (t, 0);
14262 op1 = TREE_OPERAND (t, 1);
14263 op2 = TREE_OPERAND (t, 2);
14264 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14265 return tem ? tem : expr;
14266 default:
14267 break;
14271 switch (code)
14273 case ARRAY_REF:
14275 tree op0 = TREE_OPERAND (t, 0);
14276 tree op1 = TREE_OPERAND (t, 1);
14278 if (TREE_CODE (op1) == INTEGER_CST
14279 && TREE_CODE (op0) == CONSTRUCTOR
14280 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14282 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14283 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14284 unsigned HOST_WIDE_INT begin = 0;
14286 /* Find a matching index by means of a binary search. */
14287 while (begin != end)
14289 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14290 tree index = VEC_index (constructor_elt, elts, middle).index;
14292 if (TREE_CODE (index) == INTEGER_CST
14293 && tree_int_cst_lt (index, op1))
14294 begin = middle + 1;
14295 else if (TREE_CODE (index) == INTEGER_CST
14296 && tree_int_cst_lt (op1, index))
14297 end = middle;
14298 else if (TREE_CODE (index) == RANGE_EXPR
14299 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14300 begin = middle + 1;
14301 else if (TREE_CODE (index) == RANGE_EXPR
14302 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14303 end = middle;
14304 else
14305 return VEC_index (constructor_elt, elts, middle).value;
14309 return t;
14312 case CONST_DECL:
14313 return fold (DECL_INITIAL (t));
14315 default:
14316 return t;
14317 } /* switch (code) */
14320 #ifdef ENABLE_FOLD_CHECKING
14321 #undef fold
14323 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14324 static void fold_check_failed (const_tree, const_tree);
14325 void print_fold_checksum (const_tree);
14327 /* When --enable-checking=fold, compute a digest of expr before
14328 and after actual fold call to see if fold did not accidentally
14329 change original expr. */
14331 tree
14332 fold (tree expr)
14334 tree ret;
14335 struct md5_ctx ctx;
14336 unsigned char checksum_before[16], checksum_after[16];
14337 htab_t ht;
14339 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14340 md5_init_ctx (&ctx);
14341 fold_checksum_tree (expr, &ctx, ht);
14342 md5_finish_ctx (&ctx, checksum_before);
14343 htab_empty (ht);
14345 ret = fold_1 (expr);
14347 md5_init_ctx (&ctx);
14348 fold_checksum_tree (expr, &ctx, ht);
14349 md5_finish_ctx (&ctx, checksum_after);
14350 htab_delete (ht);
14352 if (memcmp (checksum_before, checksum_after, 16))
14353 fold_check_failed (expr, ret);
14355 return ret;
14358 void
14359 print_fold_checksum (const_tree expr)
14361 struct md5_ctx ctx;
14362 unsigned char checksum[16], cnt;
14363 htab_t ht;
14365 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14366 md5_init_ctx (&ctx);
14367 fold_checksum_tree (expr, &ctx, ht);
14368 md5_finish_ctx (&ctx, checksum);
14369 htab_delete (ht);
14370 for (cnt = 0; cnt < 16; ++cnt)
14371 fprintf (stderr, "%02x", checksum[cnt]);
14372 putc ('\n', stderr);
14375 static void
14376 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14378 internal_error ("fold check: original tree changed by fold");
14381 static void
14382 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14384 void **slot;
14385 enum tree_code code;
14386 union tree_node buf;
14387 int i, len;
14389 recursive_label:
14390 if (expr == NULL)
14391 return;
14392 slot = (void **) htab_find_slot (ht, expr, INSERT);
14393 if (*slot != NULL)
14394 return;
14395 *slot = CONST_CAST_TREE (expr);
14396 code = TREE_CODE (expr);
14397 if (TREE_CODE_CLASS (code) == tcc_declaration
14398 && DECL_ASSEMBLER_NAME_SET_P (expr))
14400 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14401 memcpy ((char *) &buf, expr, tree_size (expr));
14402 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14403 expr = (tree) &buf;
14405 else if (TREE_CODE_CLASS (code) == tcc_type
14406 && (TYPE_POINTER_TO (expr)
14407 || TYPE_REFERENCE_TO (expr)
14408 || TYPE_CACHED_VALUES_P (expr)
14409 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14410 || TYPE_NEXT_VARIANT (expr)))
14412 /* Allow these fields to be modified. */
14413 tree tmp;
14414 memcpy ((char *) &buf, expr, tree_size (expr));
14415 expr = tmp = (tree) &buf;
14416 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14417 TYPE_POINTER_TO (tmp) = NULL;
14418 TYPE_REFERENCE_TO (tmp) = NULL;
14419 TYPE_NEXT_VARIANT (tmp) = NULL;
14420 if (TYPE_CACHED_VALUES_P (tmp))
14422 TYPE_CACHED_VALUES_P (tmp) = 0;
14423 TYPE_CACHED_VALUES (tmp) = NULL;
14426 md5_process_bytes (expr, tree_size (expr), ctx);
14427 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14428 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14429 if (TREE_CODE_CLASS (code) != tcc_type
14430 && TREE_CODE_CLASS (code) != tcc_declaration
14431 && code != TREE_LIST
14432 && code != SSA_NAME
14433 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14434 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14435 switch (TREE_CODE_CLASS (code))
14437 case tcc_constant:
14438 switch (code)
14440 case STRING_CST:
14441 md5_process_bytes (TREE_STRING_POINTER (expr),
14442 TREE_STRING_LENGTH (expr), ctx);
14443 break;
14444 case COMPLEX_CST:
14445 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14446 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14447 break;
14448 case VECTOR_CST:
14449 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14450 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14451 break;
14452 default:
14453 break;
14455 break;
14456 case tcc_exceptional:
14457 switch (code)
14459 case TREE_LIST:
14460 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14461 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14462 expr = TREE_CHAIN (expr);
14463 goto recursive_label;
14464 break;
14465 case TREE_VEC:
14466 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14467 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14468 break;
14469 default:
14470 break;
14472 break;
14473 case tcc_expression:
14474 case tcc_reference:
14475 case tcc_comparison:
14476 case tcc_unary:
14477 case tcc_binary:
14478 case tcc_statement:
14479 case tcc_vl_exp:
14480 len = TREE_OPERAND_LENGTH (expr);
14481 for (i = 0; i < len; ++i)
14482 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14483 break;
14484 case tcc_declaration:
14485 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14486 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14487 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14489 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14490 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14491 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14492 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14493 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14495 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14496 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14498 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14500 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14501 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14502 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14504 break;
14505 case tcc_type:
14506 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14507 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14508 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14509 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14510 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14511 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14512 if (INTEGRAL_TYPE_P (expr)
14513 || SCALAR_FLOAT_TYPE_P (expr))
14515 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14516 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14518 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14519 if (TREE_CODE (expr) == RECORD_TYPE
14520 || TREE_CODE (expr) == UNION_TYPE
14521 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14522 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14523 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14524 break;
14525 default:
14526 break;
14530 /* Helper function for outputting the checksum of a tree T. When
14531 debugging with gdb, you can "define mynext" to be "next" followed
14532 by "call debug_fold_checksum (op0)", then just trace down till the
14533 outputs differ. */
14535 DEBUG_FUNCTION void
14536 debug_fold_checksum (const_tree t)
14538 int i;
14539 unsigned char checksum[16];
14540 struct md5_ctx ctx;
14541 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14543 md5_init_ctx (&ctx);
14544 fold_checksum_tree (t, &ctx, ht);
14545 md5_finish_ctx (&ctx, checksum);
14546 htab_empty (ht);
14548 for (i = 0; i < 16; i++)
14549 fprintf (stderr, "%d ", checksum[i]);
14551 fprintf (stderr, "\n");
14554 #endif
14556 /* Fold a unary tree expression with code CODE of type TYPE with an
14557 operand OP0. LOC is the location of the resulting expression.
14558 Return a folded expression if successful. Otherwise, return a tree
14559 expression with code CODE of type TYPE with an operand OP0. */
14561 tree
14562 fold_build1_stat_loc (location_t loc,
14563 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14565 tree tem;
14566 #ifdef ENABLE_FOLD_CHECKING
14567 unsigned char checksum_before[16], checksum_after[16];
14568 struct md5_ctx ctx;
14569 htab_t ht;
14571 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14572 md5_init_ctx (&ctx);
14573 fold_checksum_tree (op0, &ctx, ht);
14574 md5_finish_ctx (&ctx, checksum_before);
14575 htab_empty (ht);
14576 #endif
14578 tem = fold_unary_loc (loc, code, type, op0);
14579 if (!tem)
14580 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14582 #ifdef ENABLE_FOLD_CHECKING
14583 md5_init_ctx (&ctx);
14584 fold_checksum_tree (op0, &ctx, ht);
14585 md5_finish_ctx (&ctx, checksum_after);
14586 htab_delete (ht);
14588 if (memcmp (checksum_before, checksum_after, 16))
14589 fold_check_failed (op0, tem);
14590 #endif
14591 return tem;
14594 /* Fold a binary tree expression with code CODE of type TYPE with
14595 operands OP0 and OP1. LOC is the location of the resulting
14596 expression. Return a folded expression if successful. Otherwise,
14597 return a tree expression with code CODE of type TYPE with operands
14598 OP0 and OP1. */
14600 tree
14601 fold_build2_stat_loc (location_t loc,
14602 enum tree_code code, tree type, tree op0, tree op1
14603 MEM_STAT_DECL)
14605 tree tem;
14606 #ifdef ENABLE_FOLD_CHECKING
14607 unsigned char checksum_before_op0[16],
14608 checksum_before_op1[16],
14609 checksum_after_op0[16],
14610 checksum_after_op1[16];
14611 struct md5_ctx ctx;
14612 htab_t ht;
14614 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14615 md5_init_ctx (&ctx);
14616 fold_checksum_tree (op0, &ctx, ht);
14617 md5_finish_ctx (&ctx, checksum_before_op0);
14618 htab_empty (ht);
14620 md5_init_ctx (&ctx);
14621 fold_checksum_tree (op1, &ctx, ht);
14622 md5_finish_ctx (&ctx, checksum_before_op1);
14623 htab_empty (ht);
14624 #endif
14626 tem = fold_binary_loc (loc, code, type, op0, op1);
14627 if (!tem)
14628 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14630 #ifdef ENABLE_FOLD_CHECKING
14631 md5_init_ctx (&ctx);
14632 fold_checksum_tree (op0, &ctx, ht);
14633 md5_finish_ctx (&ctx, checksum_after_op0);
14634 htab_empty (ht);
14636 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14637 fold_check_failed (op0, tem);
14639 md5_init_ctx (&ctx);
14640 fold_checksum_tree (op1, &ctx, ht);
14641 md5_finish_ctx (&ctx, checksum_after_op1);
14642 htab_delete (ht);
14644 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14645 fold_check_failed (op1, tem);
14646 #endif
14647 return tem;
14650 /* Fold a ternary tree expression with code CODE of type TYPE with
14651 operands OP0, OP1, and OP2. Return a folded expression if
14652 successful. Otherwise, return a tree expression with code CODE of
14653 type TYPE with operands OP0, OP1, and OP2. */
14655 tree
14656 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14657 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14659 tree tem;
14660 #ifdef ENABLE_FOLD_CHECKING
14661 unsigned char checksum_before_op0[16],
14662 checksum_before_op1[16],
14663 checksum_before_op2[16],
14664 checksum_after_op0[16],
14665 checksum_after_op1[16],
14666 checksum_after_op2[16];
14667 struct md5_ctx ctx;
14668 htab_t ht;
14670 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14671 md5_init_ctx (&ctx);
14672 fold_checksum_tree (op0, &ctx, ht);
14673 md5_finish_ctx (&ctx, checksum_before_op0);
14674 htab_empty (ht);
14676 md5_init_ctx (&ctx);
14677 fold_checksum_tree (op1, &ctx, ht);
14678 md5_finish_ctx (&ctx, checksum_before_op1);
14679 htab_empty (ht);
14681 md5_init_ctx (&ctx);
14682 fold_checksum_tree (op2, &ctx, ht);
14683 md5_finish_ctx (&ctx, checksum_before_op2);
14684 htab_empty (ht);
14685 #endif
14687 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14688 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14689 if (!tem)
14690 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14692 #ifdef ENABLE_FOLD_CHECKING
14693 md5_init_ctx (&ctx);
14694 fold_checksum_tree (op0, &ctx, ht);
14695 md5_finish_ctx (&ctx, checksum_after_op0);
14696 htab_empty (ht);
14698 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14699 fold_check_failed (op0, tem);
14701 md5_init_ctx (&ctx);
14702 fold_checksum_tree (op1, &ctx, ht);
14703 md5_finish_ctx (&ctx, checksum_after_op1);
14704 htab_empty (ht);
14706 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14707 fold_check_failed (op1, tem);
14709 md5_init_ctx (&ctx);
14710 fold_checksum_tree (op2, &ctx, ht);
14711 md5_finish_ctx (&ctx, checksum_after_op2);
14712 htab_delete (ht);
14714 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14715 fold_check_failed (op2, tem);
14716 #endif
14717 return tem;
14720 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14721 arguments in ARGARRAY, and a null static chain.
14722 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14723 of type TYPE from the given operands as constructed by build_call_array. */
14725 tree
14726 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14727 int nargs, tree *argarray)
14729 tree tem;
14730 #ifdef ENABLE_FOLD_CHECKING
14731 unsigned char checksum_before_fn[16],
14732 checksum_before_arglist[16],
14733 checksum_after_fn[16],
14734 checksum_after_arglist[16];
14735 struct md5_ctx ctx;
14736 htab_t ht;
14737 int i;
14739 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14740 md5_init_ctx (&ctx);
14741 fold_checksum_tree (fn, &ctx, ht);
14742 md5_finish_ctx (&ctx, checksum_before_fn);
14743 htab_empty (ht);
14745 md5_init_ctx (&ctx);
14746 for (i = 0; i < nargs; i++)
14747 fold_checksum_tree (argarray[i], &ctx, ht);
14748 md5_finish_ctx (&ctx, checksum_before_arglist);
14749 htab_empty (ht);
14750 #endif
14752 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14754 #ifdef ENABLE_FOLD_CHECKING
14755 md5_init_ctx (&ctx);
14756 fold_checksum_tree (fn, &ctx, ht);
14757 md5_finish_ctx (&ctx, checksum_after_fn);
14758 htab_empty (ht);
14760 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14761 fold_check_failed (fn, tem);
14763 md5_init_ctx (&ctx);
14764 for (i = 0; i < nargs; i++)
14765 fold_checksum_tree (argarray[i], &ctx, ht);
14766 md5_finish_ctx (&ctx, checksum_after_arglist);
14767 htab_delete (ht);
14769 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14770 fold_check_failed (NULL_TREE, tem);
14771 #endif
14772 return tem;
14775 /* Perform constant folding and related simplification of initializer
14776 expression EXPR. These behave identically to "fold_buildN" but ignore
14777 potential run-time traps and exceptions that fold must preserve. */
14779 #define START_FOLD_INIT \
14780 int saved_signaling_nans = flag_signaling_nans;\
14781 int saved_trapping_math = flag_trapping_math;\
14782 int saved_rounding_math = flag_rounding_math;\
14783 int saved_trapv = flag_trapv;\
14784 int saved_folding_initializer = folding_initializer;\
14785 flag_signaling_nans = 0;\
14786 flag_trapping_math = 0;\
14787 flag_rounding_math = 0;\
14788 flag_trapv = 0;\
14789 folding_initializer = 1;
14791 #define END_FOLD_INIT \
14792 flag_signaling_nans = saved_signaling_nans;\
14793 flag_trapping_math = saved_trapping_math;\
14794 flag_rounding_math = saved_rounding_math;\
14795 flag_trapv = saved_trapv;\
14796 folding_initializer = saved_folding_initializer;
14798 tree
14799 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14800 tree type, tree op)
14802 tree result;
14803 START_FOLD_INIT;
14805 result = fold_build1_loc (loc, code, type, op);
14807 END_FOLD_INIT;
14808 return result;
14811 tree
14812 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14813 tree type, tree op0, tree op1)
14815 tree result;
14816 START_FOLD_INIT;
14818 result = fold_build2_loc (loc, code, type, op0, op1);
14820 END_FOLD_INIT;
14821 return result;
14824 tree
14825 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14826 tree type, tree op0, tree op1, tree op2)
14828 tree result;
14829 START_FOLD_INIT;
14831 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14833 END_FOLD_INIT;
14834 return result;
14837 tree
14838 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14839 int nargs, tree *argarray)
14841 tree result;
14842 START_FOLD_INIT;
14844 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14846 END_FOLD_INIT;
14847 return result;
14850 #undef START_FOLD_INIT
14851 #undef END_FOLD_INIT
14853 /* Determine if first argument is a multiple of second argument. Return 0 if
14854 it is not, or we cannot easily determined it to be.
14856 An example of the sort of thing we care about (at this point; this routine
14857 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14858 fold cases do now) is discovering that
14860 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14862 is a multiple of
14864 SAVE_EXPR (J * 8)
14866 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14868 This code also handles discovering that
14870 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14872 is a multiple of 8 so we don't have to worry about dealing with a
14873 possible remainder.
14875 Note that we *look* inside a SAVE_EXPR only to determine how it was
14876 calculated; it is not safe for fold to do much of anything else with the
14877 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14878 at run time. For example, the latter example above *cannot* be implemented
14879 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14880 evaluation time of the original SAVE_EXPR is not necessarily the same at
14881 the time the new expression is evaluated. The only optimization of this
14882 sort that would be valid is changing
14884 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14886 divided by 8 to
14888 SAVE_EXPR (I) * SAVE_EXPR (J)
14890 (where the same SAVE_EXPR (J) is used in the original and the
14891 transformed version). */
14894 multiple_of_p (tree type, const_tree top, const_tree bottom)
14896 if (operand_equal_p (top, bottom, 0))
14897 return 1;
14899 if (TREE_CODE (type) != INTEGER_TYPE)
14900 return 0;
14902 switch (TREE_CODE (top))
14904 case BIT_AND_EXPR:
14905 /* Bitwise and provides a power of two multiple. If the mask is
14906 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14907 if (!integer_pow2p (bottom))
14908 return 0;
14909 /* FALLTHRU */
14911 case MULT_EXPR:
14912 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14913 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14915 case PLUS_EXPR:
14916 case MINUS_EXPR:
14917 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14918 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14920 case LSHIFT_EXPR:
14921 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14923 tree op1, t1;
14925 op1 = TREE_OPERAND (top, 1);
14926 /* const_binop may not detect overflow correctly,
14927 so check for it explicitly here. */
14928 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14929 > TREE_INT_CST_LOW (op1)
14930 && TREE_INT_CST_HIGH (op1) == 0
14931 && 0 != (t1 = fold_convert (type,
14932 const_binop (LSHIFT_EXPR,
14933 size_one_node,
14934 op1)))
14935 && !TREE_OVERFLOW (t1))
14936 return multiple_of_p (type, t1, bottom);
14938 return 0;
14940 case NOP_EXPR:
14941 /* Can't handle conversions from non-integral or wider integral type. */
14942 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14943 || (TYPE_PRECISION (type)
14944 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14945 return 0;
14947 /* .. fall through ... */
14949 case SAVE_EXPR:
14950 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14952 case COND_EXPR:
14953 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14954 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14956 case INTEGER_CST:
14957 if (TREE_CODE (bottom) != INTEGER_CST
14958 || integer_zerop (bottom)
14959 || (TYPE_UNSIGNED (type)
14960 && (tree_int_cst_sgn (top) < 0
14961 || tree_int_cst_sgn (bottom) < 0)))
14962 return 0;
14963 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14964 top, bottom));
14966 default:
14967 return 0;
14971 /* Return true if CODE or TYPE is known to be non-negative. */
14973 static bool
14974 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14976 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14977 && truth_value_p (code))
14978 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14979 have a signed:1 type (where the value is -1 and 0). */
14980 return true;
14981 return false;
14984 /* Return true if (CODE OP0) is known to be non-negative. If the return
14985 value is based on the assumption that signed overflow is undefined,
14986 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14987 *STRICT_OVERFLOW_P. */
14989 bool
14990 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14991 bool *strict_overflow_p)
14993 if (TYPE_UNSIGNED (type))
14994 return true;
14996 switch (code)
14998 case ABS_EXPR:
14999 /* We can't return 1 if flag_wrapv is set because
15000 ABS_EXPR<INT_MIN> = INT_MIN. */
15001 if (!INTEGRAL_TYPE_P (type))
15002 return true;
15003 if (TYPE_OVERFLOW_UNDEFINED (type))
15005 *strict_overflow_p = true;
15006 return true;
15008 break;
15010 case NON_LVALUE_EXPR:
15011 case FLOAT_EXPR:
15012 case FIX_TRUNC_EXPR:
15013 return tree_expr_nonnegative_warnv_p (op0,
15014 strict_overflow_p);
15016 case NOP_EXPR:
15018 tree inner_type = TREE_TYPE (op0);
15019 tree outer_type = type;
15021 if (TREE_CODE (outer_type) == REAL_TYPE)
15023 if (TREE_CODE (inner_type) == REAL_TYPE)
15024 return tree_expr_nonnegative_warnv_p (op0,
15025 strict_overflow_p);
15026 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15028 if (TYPE_UNSIGNED (inner_type))
15029 return true;
15030 return tree_expr_nonnegative_warnv_p (op0,
15031 strict_overflow_p);
15034 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15036 if (TREE_CODE (inner_type) == REAL_TYPE)
15037 return tree_expr_nonnegative_warnv_p (op0,
15038 strict_overflow_p);
15039 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15040 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15041 && TYPE_UNSIGNED (inner_type);
15044 break;
15046 default:
15047 return tree_simple_nonnegative_warnv_p (code, type);
15050 /* We don't know sign of `t', so be conservative and return false. */
15051 return false;
15054 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15055 value is based on the assumption that signed overflow is undefined,
15056 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15057 *STRICT_OVERFLOW_P. */
15059 bool
15060 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15061 tree op1, bool *strict_overflow_p)
15063 if (TYPE_UNSIGNED (type))
15064 return true;
15066 switch (code)
15068 case POINTER_PLUS_EXPR:
15069 case PLUS_EXPR:
15070 if (FLOAT_TYPE_P (type))
15071 return (tree_expr_nonnegative_warnv_p (op0,
15072 strict_overflow_p)
15073 && tree_expr_nonnegative_warnv_p (op1,
15074 strict_overflow_p));
15076 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15077 both unsigned and at least 2 bits shorter than the result. */
15078 if (TREE_CODE (type) == INTEGER_TYPE
15079 && TREE_CODE (op0) == NOP_EXPR
15080 && TREE_CODE (op1) == NOP_EXPR)
15082 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15083 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15084 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15085 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15087 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15088 TYPE_PRECISION (inner2)) + 1;
15089 return prec < TYPE_PRECISION (type);
15092 break;
15094 case MULT_EXPR:
15095 if (FLOAT_TYPE_P (type))
15097 /* x * x for floating point x is always non-negative. */
15098 if (operand_equal_p (op0, op1, 0))
15099 return true;
15100 return (tree_expr_nonnegative_warnv_p (op0,
15101 strict_overflow_p)
15102 && tree_expr_nonnegative_warnv_p (op1,
15103 strict_overflow_p));
15106 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15107 both unsigned and their total bits is shorter than the result. */
15108 if (TREE_CODE (type) == INTEGER_TYPE
15109 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15110 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15112 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15113 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15114 : TREE_TYPE (op0);
15115 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15116 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15117 : TREE_TYPE (op1);
15119 bool unsigned0 = TYPE_UNSIGNED (inner0);
15120 bool unsigned1 = TYPE_UNSIGNED (inner1);
15122 if (TREE_CODE (op0) == INTEGER_CST)
15123 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15125 if (TREE_CODE (op1) == INTEGER_CST)
15126 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15128 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15129 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15131 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15132 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15133 : TYPE_PRECISION (inner0);
15135 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15136 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15137 : TYPE_PRECISION (inner1);
15139 return precision0 + precision1 < TYPE_PRECISION (type);
15142 return false;
15144 case BIT_AND_EXPR:
15145 case MAX_EXPR:
15146 return (tree_expr_nonnegative_warnv_p (op0,
15147 strict_overflow_p)
15148 || tree_expr_nonnegative_warnv_p (op1,
15149 strict_overflow_p));
15151 case BIT_IOR_EXPR:
15152 case BIT_XOR_EXPR:
15153 case MIN_EXPR:
15154 case RDIV_EXPR:
15155 case TRUNC_DIV_EXPR:
15156 case CEIL_DIV_EXPR:
15157 case FLOOR_DIV_EXPR:
15158 case ROUND_DIV_EXPR:
15159 return (tree_expr_nonnegative_warnv_p (op0,
15160 strict_overflow_p)
15161 && tree_expr_nonnegative_warnv_p (op1,
15162 strict_overflow_p));
15164 case TRUNC_MOD_EXPR:
15165 case CEIL_MOD_EXPR:
15166 case FLOOR_MOD_EXPR:
15167 case ROUND_MOD_EXPR:
15168 return tree_expr_nonnegative_warnv_p (op0,
15169 strict_overflow_p);
15170 default:
15171 return tree_simple_nonnegative_warnv_p (code, type);
15174 /* We don't know sign of `t', so be conservative and return false. */
15175 return false;
15178 /* Return true if T is known to be non-negative. If the return
15179 value is based on the assumption that signed overflow is undefined,
15180 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15181 *STRICT_OVERFLOW_P. */
15183 bool
15184 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15186 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15187 return true;
15189 switch (TREE_CODE (t))
15191 case INTEGER_CST:
15192 return tree_int_cst_sgn (t) >= 0;
15194 case REAL_CST:
15195 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15197 case FIXED_CST:
15198 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15200 case COND_EXPR:
15201 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15202 strict_overflow_p)
15203 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15204 strict_overflow_p));
15205 default:
15206 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15207 TREE_TYPE (t));
15209 /* We don't know sign of `t', so be conservative and return false. */
15210 return false;
15213 /* Return true if T is known to be non-negative. If the return
15214 value is based on the assumption that signed overflow is undefined,
15215 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15216 *STRICT_OVERFLOW_P. */
15218 bool
15219 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15220 tree arg0, tree arg1, bool *strict_overflow_p)
15222 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15223 switch (DECL_FUNCTION_CODE (fndecl))
15225 CASE_FLT_FN (BUILT_IN_ACOS):
15226 CASE_FLT_FN (BUILT_IN_ACOSH):
15227 CASE_FLT_FN (BUILT_IN_CABS):
15228 CASE_FLT_FN (BUILT_IN_COSH):
15229 CASE_FLT_FN (BUILT_IN_ERFC):
15230 CASE_FLT_FN (BUILT_IN_EXP):
15231 CASE_FLT_FN (BUILT_IN_EXP10):
15232 CASE_FLT_FN (BUILT_IN_EXP2):
15233 CASE_FLT_FN (BUILT_IN_FABS):
15234 CASE_FLT_FN (BUILT_IN_FDIM):
15235 CASE_FLT_FN (BUILT_IN_HYPOT):
15236 CASE_FLT_FN (BUILT_IN_POW10):
15237 CASE_INT_FN (BUILT_IN_FFS):
15238 CASE_INT_FN (BUILT_IN_PARITY):
15239 CASE_INT_FN (BUILT_IN_POPCOUNT):
15240 case BUILT_IN_BSWAP32:
15241 case BUILT_IN_BSWAP64:
15242 /* Always true. */
15243 return true;
15245 CASE_FLT_FN (BUILT_IN_SQRT):
15246 /* sqrt(-0.0) is -0.0. */
15247 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15248 return true;
15249 return tree_expr_nonnegative_warnv_p (arg0,
15250 strict_overflow_p);
15252 CASE_FLT_FN (BUILT_IN_ASINH):
15253 CASE_FLT_FN (BUILT_IN_ATAN):
15254 CASE_FLT_FN (BUILT_IN_ATANH):
15255 CASE_FLT_FN (BUILT_IN_CBRT):
15256 CASE_FLT_FN (BUILT_IN_CEIL):
15257 CASE_FLT_FN (BUILT_IN_ERF):
15258 CASE_FLT_FN (BUILT_IN_EXPM1):
15259 CASE_FLT_FN (BUILT_IN_FLOOR):
15260 CASE_FLT_FN (BUILT_IN_FMOD):
15261 CASE_FLT_FN (BUILT_IN_FREXP):
15262 CASE_FLT_FN (BUILT_IN_ICEIL):
15263 CASE_FLT_FN (BUILT_IN_IFLOOR):
15264 CASE_FLT_FN (BUILT_IN_IRINT):
15265 CASE_FLT_FN (BUILT_IN_IROUND):
15266 CASE_FLT_FN (BUILT_IN_LCEIL):
15267 CASE_FLT_FN (BUILT_IN_LDEXP):
15268 CASE_FLT_FN (BUILT_IN_LFLOOR):
15269 CASE_FLT_FN (BUILT_IN_LLCEIL):
15270 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15271 CASE_FLT_FN (BUILT_IN_LLRINT):
15272 CASE_FLT_FN (BUILT_IN_LLROUND):
15273 CASE_FLT_FN (BUILT_IN_LRINT):
15274 CASE_FLT_FN (BUILT_IN_LROUND):
15275 CASE_FLT_FN (BUILT_IN_MODF):
15276 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15277 CASE_FLT_FN (BUILT_IN_RINT):
15278 CASE_FLT_FN (BUILT_IN_ROUND):
15279 CASE_FLT_FN (BUILT_IN_SCALB):
15280 CASE_FLT_FN (BUILT_IN_SCALBLN):
15281 CASE_FLT_FN (BUILT_IN_SCALBN):
15282 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15283 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15284 CASE_FLT_FN (BUILT_IN_SINH):
15285 CASE_FLT_FN (BUILT_IN_TANH):
15286 CASE_FLT_FN (BUILT_IN_TRUNC):
15287 /* True if the 1st argument is nonnegative. */
15288 return tree_expr_nonnegative_warnv_p (arg0,
15289 strict_overflow_p);
15291 CASE_FLT_FN (BUILT_IN_FMAX):
15292 /* True if the 1st OR 2nd arguments are nonnegative. */
15293 return (tree_expr_nonnegative_warnv_p (arg0,
15294 strict_overflow_p)
15295 || (tree_expr_nonnegative_warnv_p (arg1,
15296 strict_overflow_p)));
15298 CASE_FLT_FN (BUILT_IN_FMIN):
15299 /* True if the 1st AND 2nd arguments are nonnegative. */
15300 return (tree_expr_nonnegative_warnv_p (arg0,
15301 strict_overflow_p)
15302 && (tree_expr_nonnegative_warnv_p (arg1,
15303 strict_overflow_p)));
15305 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15306 /* True if the 2nd argument is nonnegative. */
15307 return tree_expr_nonnegative_warnv_p (arg1,
15308 strict_overflow_p);
15310 CASE_FLT_FN (BUILT_IN_POWI):
15311 /* True if the 1st argument is nonnegative or the second
15312 argument is an even integer. */
15313 if (TREE_CODE (arg1) == INTEGER_CST
15314 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15315 return true;
15316 return tree_expr_nonnegative_warnv_p (arg0,
15317 strict_overflow_p);
15319 CASE_FLT_FN (BUILT_IN_POW):
15320 /* True if the 1st argument is nonnegative or the second
15321 argument is an even integer valued real. */
15322 if (TREE_CODE (arg1) == REAL_CST)
15324 REAL_VALUE_TYPE c;
15325 HOST_WIDE_INT n;
15327 c = TREE_REAL_CST (arg1);
15328 n = real_to_integer (&c);
15329 if ((n & 1) == 0)
15331 REAL_VALUE_TYPE cint;
15332 real_from_integer (&cint, VOIDmode, n,
15333 n < 0 ? -1 : 0, 0);
15334 if (real_identical (&c, &cint))
15335 return true;
15338 return tree_expr_nonnegative_warnv_p (arg0,
15339 strict_overflow_p);
15341 default:
15342 break;
15344 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15345 type);
15348 /* Return true if T is known to be non-negative. If the return
15349 value is based on the assumption that signed overflow is undefined,
15350 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15351 *STRICT_OVERFLOW_P. */
15353 bool
15354 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15356 enum tree_code code = TREE_CODE (t);
15357 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15358 return true;
15360 switch (code)
15362 case TARGET_EXPR:
15364 tree temp = TARGET_EXPR_SLOT (t);
15365 t = TARGET_EXPR_INITIAL (t);
15367 /* If the initializer is non-void, then it's a normal expression
15368 that will be assigned to the slot. */
15369 if (!VOID_TYPE_P (t))
15370 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15372 /* Otherwise, the initializer sets the slot in some way. One common
15373 way is an assignment statement at the end of the initializer. */
15374 while (1)
15376 if (TREE_CODE (t) == BIND_EXPR)
15377 t = expr_last (BIND_EXPR_BODY (t));
15378 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15379 || TREE_CODE (t) == TRY_CATCH_EXPR)
15380 t = expr_last (TREE_OPERAND (t, 0));
15381 else if (TREE_CODE (t) == STATEMENT_LIST)
15382 t = expr_last (t);
15383 else
15384 break;
15386 if (TREE_CODE (t) == MODIFY_EXPR
15387 && TREE_OPERAND (t, 0) == temp)
15388 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15389 strict_overflow_p);
15391 return false;
15394 case CALL_EXPR:
15396 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15397 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15399 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15400 get_callee_fndecl (t),
15401 arg0,
15402 arg1,
15403 strict_overflow_p);
15405 case COMPOUND_EXPR:
15406 case MODIFY_EXPR:
15407 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15408 strict_overflow_p);
15409 case BIND_EXPR:
15410 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15411 strict_overflow_p);
15412 case SAVE_EXPR:
15413 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15414 strict_overflow_p);
15416 default:
15417 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15418 TREE_TYPE (t));
15421 /* We don't know sign of `t', so be conservative and return false. */
15422 return false;
15425 /* Return true if T is known to be non-negative. If the return
15426 value is based on the assumption that signed overflow is undefined,
15427 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15428 *STRICT_OVERFLOW_P. */
15430 bool
15431 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15433 enum tree_code code;
15434 if (t == error_mark_node)
15435 return false;
15437 code = TREE_CODE (t);
15438 switch (TREE_CODE_CLASS (code))
15440 case tcc_binary:
15441 case tcc_comparison:
15442 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15443 TREE_TYPE (t),
15444 TREE_OPERAND (t, 0),
15445 TREE_OPERAND (t, 1),
15446 strict_overflow_p);
15448 case tcc_unary:
15449 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15450 TREE_TYPE (t),
15451 TREE_OPERAND (t, 0),
15452 strict_overflow_p);
15454 case tcc_constant:
15455 case tcc_declaration:
15456 case tcc_reference:
15457 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15459 default:
15460 break;
15463 switch (code)
15465 case TRUTH_AND_EXPR:
15466 case TRUTH_OR_EXPR:
15467 case TRUTH_XOR_EXPR:
15468 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15469 TREE_TYPE (t),
15470 TREE_OPERAND (t, 0),
15471 TREE_OPERAND (t, 1),
15472 strict_overflow_p);
15473 case TRUTH_NOT_EXPR:
15474 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15475 TREE_TYPE (t),
15476 TREE_OPERAND (t, 0),
15477 strict_overflow_p);
15479 case COND_EXPR:
15480 case CONSTRUCTOR:
15481 case OBJ_TYPE_REF:
15482 case ASSERT_EXPR:
15483 case ADDR_EXPR:
15484 case WITH_SIZE_EXPR:
15485 case SSA_NAME:
15486 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15488 default:
15489 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15493 /* Return true if `t' is known to be non-negative. Handle warnings
15494 about undefined signed overflow. */
15496 bool
15497 tree_expr_nonnegative_p (tree t)
15499 bool ret, strict_overflow_p;
15501 strict_overflow_p = false;
15502 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15503 if (strict_overflow_p)
15504 fold_overflow_warning (("assuming signed overflow does not occur when "
15505 "determining that expression is always "
15506 "non-negative"),
15507 WARN_STRICT_OVERFLOW_MISC);
15508 return ret;
15512 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15513 For floating point we further ensure that T is not denormal.
15514 Similar logic is present in nonzero_address in rtlanal.h.
15516 If the return value is based on the assumption that signed overflow
15517 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15518 change *STRICT_OVERFLOW_P. */
15520 bool
15521 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15522 bool *strict_overflow_p)
15524 switch (code)
15526 case ABS_EXPR:
15527 return tree_expr_nonzero_warnv_p (op0,
15528 strict_overflow_p);
15530 case NOP_EXPR:
15532 tree inner_type = TREE_TYPE (op0);
15533 tree outer_type = type;
15535 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15536 && tree_expr_nonzero_warnv_p (op0,
15537 strict_overflow_p));
15539 break;
15541 case NON_LVALUE_EXPR:
15542 return tree_expr_nonzero_warnv_p (op0,
15543 strict_overflow_p);
15545 default:
15546 break;
15549 return false;
15552 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15553 For floating point we further ensure that T is not denormal.
15554 Similar logic is present in nonzero_address in rtlanal.h.
15556 If the return value is based on the assumption that signed overflow
15557 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15558 change *STRICT_OVERFLOW_P. */
15560 bool
15561 tree_binary_nonzero_warnv_p (enum tree_code code,
15562 tree type,
15563 tree op0,
15564 tree op1, bool *strict_overflow_p)
15566 bool sub_strict_overflow_p;
15567 switch (code)
15569 case POINTER_PLUS_EXPR:
15570 case PLUS_EXPR:
15571 if (TYPE_OVERFLOW_UNDEFINED (type))
15573 /* With the presence of negative values it is hard
15574 to say something. */
15575 sub_strict_overflow_p = false;
15576 if (!tree_expr_nonnegative_warnv_p (op0,
15577 &sub_strict_overflow_p)
15578 || !tree_expr_nonnegative_warnv_p (op1,
15579 &sub_strict_overflow_p))
15580 return false;
15581 /* One of operands must be positive and the other non-negative. */
15582 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15583 overflows, on a twos-complement machine the sum of two
15584 nonnegative numbers can never be zero. */
15585 return (tree_expr_nonzero_warnv_p (op0,
15586 strict_overflow_p)
15587 || tree_expr_nonzero_warnv_p (op1,
15588 strict_overflow_p));
15590 break;
15592 case MULT_EXPR:
15593 if (TYPE_OVERFLOW_UNDEFINED (type))
15595 if (tree_expr_nonzero_warnv_p (op0,
15596 strict_overflow_p)
15597 && tree_expr_nonzero_warnv_p (op1,
15598 strict_overflow_p))
15600 *strict_overflow_p = true;
15601 return true;
15604 break;
15606 case MIN_EXPR:
15607 sub_strict_overflow_p = false;
15608 if (tree_expr_nonzero_warnv_p (op0,
15609 &sub_strict_overflow_p)
15610 && tree_expr_nonzero_warnv_p (op1,
15611 &sub_strict_overflow_p))
15613 if (sub_strict_overflow_p)
15614 *strict_overflow_p = true;
15616 break;
15618 case MAX_EXPR:
15619 sub_strict_overflow_p = false;
15620 if (tree_expr_nonzero_warnv_p (op0,
15621 &sub_strict_overflow_p))
15623 if (sub_strict_overflow_p)
15624 *strict_overflow_p = true;
15626 /* When both operands are nonzero, then MAX must be too. */
15627 if (tree_expr_nonzero_warnv_p (op1,
15628 strict_overflow_p))
15629 return true;
15631 /* MAX where operand 0 is positive is positive. */
15632 return tree_expr_nonnegative_warnv_p (op0,
15633 strict_overflow_p);
15635 /* MAX where operand 1 is positive is positive. */
15636 else if (tree_expr_nonzero_warnv_p (op1,
15637 &sub_strict_overflow_p)
15638 && tree_expr_nonnegative_warnv_p (op1,
15639 &sub_strict_overflow_p))
15641 if (sub_strict_overflow_p)
15642 *strict_overflow_p = true;
15643 return true;
15645 break;
15647 case BIT_IOR_EXPR:
15648 return (tree_expr_nonzero_warnv_p (op1,
15649 strict_overflow_p)
15650 || tree_expr_nonzero_warnv_p (op0,
15651 strict_overflow_p));
15653 default:
15654 break;
15657 return false;
15660 /* Return true when T is an address and is known to be nonzero.
15661 For floating point we further ensure that T is not denormal.
15662 Similar logic is present in nonzero_address in rtlanal.h.
15664 If the return value is based on the assumption that signed overflow
15665 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15666 change *STRICT_OVERFLOW_P. */
15668 bool
15669 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15671 bool sub_strict_overflow_p;
15672 switch (TREE_CODE (t))
15674 case INTEGER_CST:
15675 return !integer_zerop (t);
15677 case ADDR_EXPR:
15679 tree base = TREE_OPERAND (t, 0);
15680 if (!DECL_P (base))
15681 base = get_base_address (base);
15683 if (!base)
15684 return false;
15686 /* Weak declarations may link to NULL. Other things may also be NULL
15687 so protect with -fdelete-null-pointer-checks; but not variables
15688 allocated on the stack. */
15689 if (DECL_P (base)
15690 && (flag_delete_null_pointer_checks
15691 || (DECL_CONTEXT (base)
15692 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15693 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15694 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15696 /* Constants are never weak. */
15697 if (CONSTANT_CLASS_P (base))
15698 return true;
15700 return false;
15703 case COND_EXPR:
15704 sub_strict_overflow_p = false;
15705 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15706 &sub_strict_overflow_p)
15707 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15708 &sub_strict_overflow_p))
15710 if (sub_strict_overflow_p)
15711 *strict_overflow_p = true;
15712 return true;
15714 break;
15716 default:
15717 break;
15719 return false;
15722 /* Return true when T is an address and is known to be nonzero.
15723 For floating point we further ensure that T is not denormal.
15724 Similar logic is present in nonzero_address in rtlanal.h.
15726 If the return value is based on the assumption that signed overflow
15727 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15728 change *STRICT_OVERFLOW_P. */
15730 bool
15731 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15733 tree type = TREE_TYPE (t);
15734 enum tree_code code;
15736 /* Doing something useful for floating point would need more work. */
15737 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15738 return false;
15740 code = TREE_CODE (t);
15741 switch (TREE_CODE_CLASS (code))
15743 case tcc_unary:
15744 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15745 strict_overflow_p);
15746 case tcc_binary:
15747 case tcc_comparison:
15748 return tree_binary_nonzero_warnv_p (code, type,
15749 TREE_OPERAND (t, 0),
15750 TREE_OPERAND (t, 1),
15751 strict_overflow_p);
15752 case tcc_constant:
15753 case tcc_declaration:
15754 case tcc_reference:
15755 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15757 default:
15758 break;
15761 switch (code)
15763 case TRUTH_NOT_EXPR:
15764 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15765 strict_overflow_p);
15767 case TRUTH_AND_EXPR:
15768 case TRUTH_OR_EXPR:
15769 case TRUTH_XOR_EXPR:
15770 return tree_binary_nonzero_warnv_p (code, type,
15771 TREE_OPERAND (t, 0),
15772 TREE_OPERAND (t, 1),
15773 strict_overflow_p);
15775 case COND_EXPR:
15776 case CONSTRUCTOR:
15777 case OBJ_TYPE_REF:
15778 case ASSERT_EXPR:
15779 case ADDR_EXPR:
15780 case WITH_SIZE_EXPR:
15781 case SSA_NAME:
15782 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15784 case COMPOUND_EXPR:
15785 case MODIFY_EXPR:
15786 case BIND_EXPR:
15787 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15788 strict_overflow_p);
15790 case SAVE_EXPR:
15791 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15792 strict_overflow_p);
15794 case CALL_EXPR:
15795 return alloca_call_p (t);
15797 default:
15798 break;
15800 return false;
15803 /* Return true when T is an address and is known to be nonzero.
15804 Handle warnings about undefined signed overflow. */
15806 bool
15807 tree_expr_nonzero_p (tree t)
15809 bool ret, strict_overflow_p;
15811 strict_overflow_p = false;
15812 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15813 if (strict_overflow_p)
15814 fold_overflow_warning (("assuming signed overflow does not occur when "
15815 "determining that expression is always "
15816 "non-zero"),
15817 WARN_STRICT_OVERFLOW_MISC);
15818 return ret;
15821 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15822 attempt to fold the expression to a constant without modifying TYPE,
15823 OP0 or OP1.
15825 If the expression could be simplified to a constant, then return
15826 the constant. If the expression would not be simplified to a
15827 constant, then return NULL_TREE. */
15829 tree
15830 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15832 tree tem = fold_binary (code, type, op0, op1);
15833 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15836 /* Given the components of a unary expression CODE, TYPE and OP0,
15837 attempt to fold the expression to a constant without modifying
15838 TYPE or OP0.
15840 If the expression could be simplified to a constant, then return
15841 the constant. If the expression would not be simplified to a
15842 constant, then return NULL_TREE. */
15844 tree
15845 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15847 tree tem = fold_unary (code, type, op0);
15848 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15851 /* If EXP represents referencing an element in a constant string
15852 (either via pointer arithmetic or array indexing), return the
15853 tree representing the value accessed, otherwise return NULL. */
15855 tree
15856 fold_read_from_constant_string (tree exp)
15858 if ((TREE_CODE (exp) == INDIRECT_REF
15859 || TREE_CODE (exp) == ARRAY_REF)
15860 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15862 tree exp1 = TREE_OPERAND (exp, 0);
15863 tree index;
15864 tree string;
15865 location_t loc = EXPR_LOCATION (exp);
15867 if (TREE_CODE (exp) == INDIRECT_REF)
15868 string = string_constant (exp1, &index);
15869 else
15871 tree low_bound = array_ref_low_bound (exp);
15872 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15874 /* Optimize the special-case of a zero lower bound.
15876 We convert the low_bound to sizetype to avoid some problems
15877 with constant folding. (E.g. suppose the lower bound is 1,
15878 and its mode is QI. Without the conversion,l (ARRAY
15879 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15880 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15881 if (! integer_zerop (low_bound))
15882 index = size_diffop_loc (loc, index,
15883 fold_convert_loc (loc, sizetype, low_bound));
15885 string = exp1;
15888 if (string
15889 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15890 && TREE_CODE (string) == STRING_CST
15891 && TREE_CODE (index) == INTEGER_CST
15892 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15893 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15894 == MODE_INT)
15895 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15896 return build_int_cst_type (TREE_TYPE (exp),
15897 (TREE_STRING_POINTER (string)
15898 [TREE_INT_CST_LOW (index)]));
15900 return NULL;
15903 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15904 an integer constant, real, or fixed-point constant.
15906 TYPE is the type of the result. */
15908 static tree
15909 fold_negate_const (tree arg0, tree type)
15911 tree t = NULL_TREE;
15913 switch (TREE_CODE (arg0))
15915 case INTEGER_CST:
15917 double_int val = tree_to_double_int (arg0);
15918 bool overflow;
15919 val = val.neg_with_overflow (&overflow);
15920 t = force_fit_type_double (type, val, 1,
15921 (overflow | TREE_OVERFLOW (arg0))
15922 && !TYPE_UNSIGNED (type));
15923 break;
15926 case REAL_CST:
15927 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15928 break;
15930 case FIXED_CST:
15932 FIXED_VALUE_TYPE f;
15933 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15934 &(TREE_FIXED_CST (arg0)), NULL,
15935 TYPE_SATURATING (type));
15936 t = build_fixed (type, f);
15937 /* Propagate overflow flags. */
15938 if (overflow_p | TREE_OVERFLOW (arg0))
15939 TREE_OVERFLOW (t) = 1;
15940 break;
15943 default:
15944 gcc_unreachable ();
15947 return t;
15950 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15951 an integer constant or real constant.
15953 TYPE is the type of the result. */
15955 tree
15956 fold_abs_const (tree arg0, tree type)
15958 tree t = NULL_TREE;
15960 switch (TREE_CODE (arg0))
15962 case INTEGER_CST:
15964 double_int val = tree_to_double_int (arg0);
15966 /* If the value is unsigned or non-negative, then the absolute value
15967 is the same as the ordinary value. */
15968 if (TYPE_UNSIGNED (type)
15969 || !val.is_negative ())
15970 t = arg0;
15972 /* If the value is negative, then the absolute value is
15973 its negation. */
15974 else
15976 bool overflow;
15977 val = val.neg_with_overflow (&overflow);
15978 t = force_fit_type_double (type, val, -1,
15979 overflow | TREE_OVERFLOW (arg0));
15982 break;
15984 case REAL_CST:
15985 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15986 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15987 else
15988 t = arg0;
15989 break;
15991 default:
15992 gcc_unreachable ();
15995 return t;
15998 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15999 constant. TYPE is the type of the result. */
16001 static tree
16002 fold_not_const (const_tree arg0, tree type)
16004 double_int val;
16006 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16008 val = ~tree_to_double_int (arg0);
16009 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16012 /* Given CODE, a relational operator, the target type, TYPE and two
16013 constant operands OP0 and OP1, return the result of the
16014 relational operation. If the result is not a compile time
16015 constant, then return NULL_TREE. */
16017 static tree
16018 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16020 int result, invert;
16022 /* From here on, the only cases we handle are when the result is
16023 known to be a constant. */
16025 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16027 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16028 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16030 /* Handle the cases where either operand is a NaN. */
16031 if (real_isnan (c0) || real_isnan (c1))
16033 switch (code)
16035 case EQ_EXPR:
16036 case ORDERED_EXPR:
16037 result = 0;
16038 break;
16040 case NE_EXPR:
16041 case UNORDERED_EXPR:
16042 case UNLT_EXPR:
16043 case UNLE_EXPR:
16044 case UNGT_EXPR:
16045 case UNGE_EXPR:
16046 case UNEQ_EXPR:
16047 result = 1;
16048 break;
16050 case LT_EXPR:
16051 case LE_EXPR:
16052 case GT_EXPR:
16053 case GE_EXPR:
16054 case LTGT_EXPR:
16055 if (flag_trapping_math)
16056 return NULL_TREE;
16057 result = 0;
16058 break;
16060 default:
16061 gcc_unreachable ();
16064 return constant_boolean_node (result, type);
16067 return constant_boolean_node (real_compare (code, c0, c1), type);
16070 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16072 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16073 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16074 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16077 /* Handle equality/inequality of complex constants. */
16078 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16080 tree rcond = fold_relational_const (code, type,
16081 TREE_REALPART (op0),
16082 TREE_REALPART (op1));
16083 tree icond = fold_relational_const (code, type,
16084 TREE_IMAGPART (op0),
16085 TREE_IMAGPART (op1));
16086 if (code == EQ_EXPR)
16087 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16088 else if (code == NE_EXPR)
16089 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16090 else
16091 return NULL_TREE;
16094 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16096 To compute GT, swap the arguments and do LT.
16097 To compute GE, do LT and invert the result.
16098 To compute LE, swap the arguments, do LT and invert the result.
16099 To compute NE, do EQ and invert the result.
16101 Therefore, the code below must handle only EQ and LT. */
16103 if (code == LE_EXPR || code == GT_EXPR)
16105 tree tem = op0;
16106 op0 = op1;
16107 op1 = tem;
16108 code = swap_tree_comparison (code);
16111 /* Note that it is safe to invert for real values here because we
16112 have already handled the one case that it matters. */
16114 invert = 0;
16115 if (code == NE_EXPR || code == GE_EXPR)
16117 invert = 1;
16118 code = invert_tree_comparison (code, false);
16121 /* Compute a result for LT or EQ if args permit;
16122 Otherwise return T. */
16123 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16125 if (code == EQ_EXPR)
16126 result = tree_int_cst_equal (op0, op1);
16127 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16128 result = INT_CST_LT_UNSIGNED (op0, op1);
16129 else
16130 result = INT_CST_LT (op0, op1);
16132 else
16133 return NULL_TREE;
16135 if (invert)
16136 result ^= 1;
16137 return constant_boolean_node (result, type);
16140 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16141 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16142 itself. */
16144 tree
16145 fold_build_cleanup_point_expr (tree type, tree expr)
16147 /* If the expression does not have side effects then we don't have to wrap
16148 it with a cleanup point expression. */
16149 if (!TREE_SIDE_EFFECTS (expr))
16150 return expr;
16152 /* If the expression is a return, check to see if the expression inside the
16153 return has no side effects or the right hand side of the modify expression
16154 inside the return. If either don't have side effects set we don't need to
16155 wrap the expression in a cleanup point expression. Note we don't check the
16156 left hand side of the modify because it should always be a return decl. */
16157 if (TREE_CODE (expr) == RETURN_EXPR)
16159 tree op = TREE_OPERAND (expr, 0);
16160 if (!op || !TREE_SIDE_EFFECTS (op))
16161 return expr;
16162 op = TREE_OPERAND (op, 1);
16163 if (!TREE_SIDE_EFFECTS (op))
16164 return expr;
16167 return build1 (CLEANUP_POINT_EXPR, type, expr);
16170 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16171 of an indirection through OP0, or NULL_TREE if no simplification is
16172 possible. */
16174 tree
16175 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16177 tree sub = op0;
16178 tree subtype;
16180 STRIP_NOPS (sub);
16181 subtype = TREE_TYPE (sub);
16182 if (!POINTER_TYPE_P (subtype))
16183 return NULL_TREE;
16185 if (TREE_CODE (sub) == ADDR_EXPR)
16187 tree op = TREE_OPERAND (sub, 0);
16188 tree optype = TREE_TYPE (op);
16189 /* *&CONST_DECL -> to the value of the const decl. */
16190 if (TREE_CODE (op) == CONST_DECL)
16191 return DECL_INITIAL (op);
16192 /* *&p => p; make sure to handle *&"str"[cst] here. */
16193 if (type == optype)
16195 tree fop = fold_read_from_constant_string (op);
16196 if (fop)
16197 return fop;
16198 else
16199 return op;
16201 /* *(foo *)&fooarray => fooarray[0] */
16202 else if (TREE_CODE (optype) == ARRAY_TYPE
16203 && type == TREE_TYPE (optype)
16204 && (!in_gimple_form
16205 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16207 tree type_domain = TYPE_DOMAIN (optype);
16208 tree min_val = size_zero_node;
16209 if (type_domain && TYPE_MIN_VALUE (type_domain))
16210 min_val = TYPE_MIN_VALUE (type_domain);
16211 if (in_gimple_form
16212 && TREE_CODE (min_val) != INTEGER_CST)
16213 return NULL_TREE;
16214 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16215 NULL_TREE, NULL_TREE);
16217 /* *(foo *)&complexfoo => __real__ complexfoo */
16218 else if (TREE_CODE (optype) == COMPLEX_TYPE
16219 && type == TREE_TYPE (optype))
16220 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16221 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16222 else if (TREE_CODE (optype) == VECTOR_TYPE
16223 && type == TREE_TYPE (optype))
16225 tree part_width = TYPE_SIZE (type);
16226 tree index = bitsize_int (0);
16227 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16231 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16232 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16234 tree op00 = TREE_OPERAND (sub, 0);
16235 tree op01 = TREE_OPERAND (sub, 1);
16237 STRIP_NOPS (op00);
16238 if (TREE_CODE (op00) == ADDR_EXPR)
16240 tree op00type;
16241 op00 = TREE_OPERAND (op00, 0);
16242 op00type = TREE_TYPE (op00);
16244 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16245 if (TREE_CODE (op00type) == VECTOR_TYPE
16246 && type == TREE_TYPE (op00type))
16248 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16249 tree part_width = TYPE_SIZE (type);
16250 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16251 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16252 tree index = bitsize_int (indexi);
16254 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16255 return fold_build3_loc (loc,
16256 BIT_FIELD_REF, type, op00,
16257 part_width, index);
16260 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16261 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16262 && type == TREE_TYPE (op00type))
16264 tree size = TYPE_SIZE_UNIT (type);
16265 if (tree_int_cst_equal (size, op01))
16266 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16268 /* ((foo *)&fooarray)[1] => fooarray[1] */
16269 else if (TREE_CODE (op00type) == ARRAY_TYPE
16270 && type == TREE_TYPE (op00type))
16272 tree type_domain = TYPE_DOMAIN (op00type);
16273 tree min_val = size_zero_node;
16274 if (type_domain && TYPE_MIN_VALUE (type_domain))
16275 min_val = TYPE_MIN_VALUE (type_domain);
16276 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16277 TYPE_SIZE_UNIT (type));
16278 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16279 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16280 NULL_TREE, NULL_TREE);
16285 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16286 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16287 && type == TREE_TYPE (TREE_TYPE (subtype))
16288 && (!in_gimple_form
16289 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16291 tree type_domain;
16292 tree min_val = size_zero_node;
16293 sub = build_fold_indirect_ref_loc (loc, sub);
16294 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16295 if (type_domain && TYPE_MIN_VALUE (type_domain))
16296 min_val = TYPE_MIN_VALUE (type_domain);
16297 if (in_gimple_form
16298 && TREE_CODE (min_val) != INTEGER_CST)
16299 return NULL_TREE;
16300 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16301 NULL_TREE);
16304 return NULL_TREE;
16307 /* Builds an expression for an indirection through T, simplifying some
16308 cases. */
16310 tree
16311 build_fold_indirect_ref_loc (location_t loc, tree t)
16313 tree type = TREE_TYPE (TREE_TYPE (t));
16314 tree sub = fold_indirect_ref_1 (loc, type, t);
16316 if (sub)
16317 return sub;
16319 return build1_loc (loc, INDIRECT_REF, type, t);
16322 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16324 tree
16325 fold_indirect_ref_loc (location_t loc, tree t)
16327 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16329 if (sub)
16330 return sub;
16331 else
16332 return t;
16335 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16336 whose result is ignored. The type of the returned tree need not be
16337 the same as the original expression. */
16339 tree
16340 fold_ignored_result (tree t)
16342 if (!TREE_SIDE_EFFECTS (t))
16343 return integer_zero_node;
16345 for (;;)
16346 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16348 case tcc_unary:
16349 t = TREE_OPERAND (t, 0);
16350 break;
16352 case tcc_binary:
16353 case tcc_comparison:
16354 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16355 t = TREE_OPERAND (t, 0);
16356 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16357 t = TREE_OPERAND (t, 1);
16358 else
16359 return t;
16360 break;
16362 case tcc_expression:
16363 switch (TREE_CODE (t))
16365 case COMPOUND_EXPR:
16366 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16367 return t;
16368 t = TREE_OPERAND (t, 0);
16369 break;
16371 case COND_EXPR:
16372 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16373 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16374 return t;
16375 t = TREE_OPERAND (t, 0);
16376 break;
16378 default:
16379 return t;
16381 break;
16383 default:
16384 return t;
16388 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16389 This can only be applied to objects of a sizetype. */
16391 tree
16392 round_up_loc (location_t loc, tree value, int divisor)
16394 tree div = NULL_TREE;
16396 gcc_assert (divisor > 0);
16397 if (divisor == 1)
16398 return value;
16400 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16401 have to do anything. Only do this when we are not given a const,
16402 because in that case, this check is more expensive than just
16403 doing it. */
16404 if (TREE_CODE (value) != INTEGER_CST)
16406 div = build_int_cst (TREE_TYPE (value), divisor);
16408 if (multiple_of_p (TREE_TYPE (value), value, div))
16409 return value;
16412 /* If divisor is a power of two, simplify this to bit manipulation. */
16413 if (divisor == (divisor & -divisor))
16415 if (TREE_CODE (value) == INTEGER_CST)
16417 double_int val = tree_to_double_int (value);
16418 bool overflow_p;
16420 if ((val.low & (divisor - 1)) == 0)
16421 return value;
16423 overflow_p = TREE_OVERFLOW (value);
16424 val.low &= ~(divisor - 1);
16425 val.low += divisor;
16426 if (val.low == 0)
16428 val.high++;
16429 if (val.high == 0)
16430 overflow_p = true;
16433 return force_fit_type_double (TREE_TYPE (value), val,
16434 -1, overflow_p);
16436 else
16438 tree t;
16440 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16441 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16442 t = build_int_cst (TREE_TYPE (value), -divisor);
16443 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16446 else
16448 if (!div)
16449 div = build_int_cst (TREE_TYPE (value), divisor);
16450 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16451 value = size_binop_loc (loc, MULT_EXPR, value, div);
16454 return value;
16457 /* Likewise, but round down. */
16459 tree
16460 round_down_loc (location_t loc, tree value, int divisor)
16462 tree div = NULL_TREE;
16464 gcc_assert (divisor > 0);
16465 if (divisor == 1)
16466 return value;
16468 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16469 have to do anything. Only do this when we are not given a const,
16470 because in that case, this check is more expensive than just
16471 doing it. */
16472 if (TREE_CODE (value) != INTEGER_CST)
16474 div = build_int_cst (TREE_TYPE (value), divisor);
16476 if (multiple_of_p (TREE_TYPE (value), value, div))
16477 return value;
16480 /* If divisor is a power of two, simplify this to bit manipulation. */
16481 if (divisor == (divisor & -divisor))
16483 tree t;
16485 t = build_int_cst (TREE_TYPE (value), -divisor);
16486 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16488 else
16490 if (!div)
16491 div = build_int_cst (TREE_TYPE (value), divisor);
16492 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16493 value = size_binop_loc (loc, MULT_EXPR, value, div);
16496 return value;
16499 /* Returns the pointer to the base of the object addressed by EXP and
16500 extracts the information about the offset of the access, storing it
16501 to PBITPOS and POFFSET. */
16503 static tree
16504 split_address_to_core_and_offset (tree exp,
16505 HOST_WIDE_INT *pbitpos, tree *poffset)
16507 tree core;
16508 enum machine_mode mode;
16509 int unsignedp, volatilep;
16510 HOST_WIDE_INT bitsize;
16511 location_t loc = EXPR_LOCATION (exp);
16513 if (TREE_CODE (exp) == ADDR_EXPR)
16515 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16516 poffset, &mode, &unsignedp, &volatilep,
16517 false);
16518 core = build_fold_addr_expr_loc (loc, core);
16520 else
16522 core = exp;
16523 *pbitpos = 0;
16524 *poffset = NULL_TREE;
16527 return core;
16530 /* Returns true if addresses of E1 and E2 differ by a constant, false
16531 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16533 bool
16534 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16536 tree core1, core2;
16537 HOST_WIDE_INT bitpos1, bitpos2;
16538 tree toffset1, toffset2, tdiff, type;
16540 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16541 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16543 if (bitpos1 % BITS_PER_UNIT != 0
16544 || bitpos2 % BITS_PER_UNIT != 0
16545 || !operand_equal_p (core1, core2, 0))
16546 return false;
16548 if (toffset1 && toffset2)
16550 type = TREE_TYPE (toffset1);
16551 if (type != TREE_TYPE (toffset2))
16552 toffset2 = fold_convert (type, toffset2);
16554 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16555 if (!cst_and_fits_in_hwi (tdiff))
16556 return false;
16558 *diff = int_cst_value (tdiff);
16560 else if (toffset1 || toffset2)
16562 /* If only one of the offsets is non-constant, the difference cannot
16563 be a constant. */
16564 return false;
16566 else
16567 *diff = 0;
16569 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16570 return true;
16573 /* Simplify the floating point expression EXP when the sign of the
16574 result is not significant. Return NULL_TREE if no simplification
16575 is possible. */
16577 tree
16578 fold_strip_sign_ops (tree exp)
16580 tree arg0, arg1;
16581 location_t loc = EXPR_LOCATION (exp);
16583 switch (TREE_CODE (exp))
16585 case ABS_EXPR:
16586 case NEGATE_EXPR:
16587 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16588 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16590 case MULT_EXPR:
16591 case RDIV_EXPR:
16592 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16593 return NULL_TREE;
16594 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16595 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16596 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16597 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16598 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16599 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16600 break;
16602 case COMPOUND_EXPR:
16603 arg0 = TREE_OPERAND (exp, 0);
16604 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16605 if (arg1)
16606 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16607 break;
16609 case COND_EXPR:
16610 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16611 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16612 if (arg0 || arg1)
16613 return fold_build3_loc (loc,
16614 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16615 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16616 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16617 break;
16619 case CALL_EXPR:
16621 const enum built_in_function fcode = builtin_mathfn_code (exp);
16622 switch (fcode)
16624 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16625 /* Strip copysign function call, return the 1st argument. */
16626 arg0 = CALL_EXPR_ARG (exp, 0);
16627 arg1 = CALL_EXPR_ARG (exp, 1);
16628 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16630 default:
16631 /* Strip sign ops from the argument of "odd" math functions. */
16632 if (negate_mathfn_p (fcode))
16634 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16635 if (arg0)
16636 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16638 break;
16641 break;
16643 default:
16644 break;
16646 return NULL_TREE;