PR target/41993
[official-gcc.git] / gcc / fold-const.c
blob62a4e21896844248de2236f4914855f0782d86e1
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hash-table.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
173 tree
174 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
176 double_int quo, rem;
177 int uns;
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
184 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
185 uns, code, &rem);
187 if (rem.is_zero ())
188 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 unsigned HOST_WIDE_INT val;
363 unsigned int prec;
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 prec = TYPE_PRECISION (type);
373 if (prec > HOST_BITS_PER_WIDE_INT)
375 if (TREE_INT_CST_LOW (t) != 0)
376 return true;
377 prec -= HOST_BITS_PER_WIDE_INT;
378 val = TREE_INT_CST_HIGH (t);
380 else
381 val = TREE_INT_CST_LOW (t);
382 if (prec < HOST_BITS_PER_WIDE_INT)
383 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
384 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
390 static bool
391 negate_expr_p (tree t)
393 tree type;
395 if (t == 0)
396 return false;
398 type = TREE_TYPE (t);
400 STRIP_SIGN_NOPS (t);
401 switch (TREE_CODE (t))
403 case INTEGER_CST:
404 if (TYPE_OVERFLOW_WRAPS (type))
405 return true;
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t);
409 case BIT_NOT_EXPR:
410 return (INTEGRAL_TYPE_P (type)
411 && TYPE_OVERFLOW_WRAPS (type));
413 case FIXED_CST:
414 case NEGATE_EXPR:
415 return true;
417 case REAL_CST:
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422 case COMPLEX_CST:
423 return negate_expr_p (TREE_REALPART (t))
424 && negate_expr_p (TREE_IMAGPART (t));
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case FLOOR_DIV_EXPR:
467 case CEIL_DIV_EXPR:
468 case EXACT_DIV_EXPR:
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
473 overflow. */
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
480 case NOP_EXPR:
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type) == REAL_TYPE)
484 tree tem = strip_float_extensions (t);
485 if (tem != t)
486 return negate_expr_p (tem);
488 break;
490 case CALL_EXPR:
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t)))
493 return negate_expr_p (CALL_EXPR_ARG (t, 0));
494 break;
496 case RSHIFT_EXPR:
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
500 tree op1 = TREE_OPERAND (t, 1);
501 if (TREE_INT_CST_HIGH (op1) == 0
502 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
503 == TREE_INT_CST_LOW (op1))
504 return true;
506 break;
508 default:
509 break;
511 return false;
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
517 returned. */
519 static tree
520 fold_negate_expr (location_t loc, tree t)
522 tree type = TREE_TYPE (t);
523 tree tem;
525 switch (TREE_CODE (t))
527 /* Convert - (~A) to A + 1. */
528 case BIT_NOT_EXPR:
529 if (INTEGRAL_TYPE_P (type))
530 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
531 build_int_cst (type, 1));
532 break;
534 case INTEGER_CST:
535 tem = fold_negate_const (t, type);
536 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
537 || !TYPE_OVERFLOW_TRAPS (type))
538 return tem;
539 break;
541 case REAL_CST:
542 tem = fold_negate_const (t, type);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
545 return tem;
546 break;
548 case FIXED_CST:
549 tem = fold_negate_const (t, type);
550 return tem;
552 case COMPLEX_CST:
554 tree rpart = negate_expr (TREE_REALPART (t));
555 tree ipart = negate_expr (TREE_IMAGPART (t));
557 if ((TREE_CODE (rpart) == REAL_CST
558 && TREE_CODE (ipart) == REAL_CST)
559 || (TREE_CODE (rpart) == INTEGER_CST
560 && TREE_CODE (ipart) == INTEGER_CST))
561 return build_complex (type, rpart, ipart);
563 break;
565 case COMPLEX_EXPR:
566 if (negate_expr_p (t))
567 return fold_build2_loc (loc, COMPLEX_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
569 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
570 break;
572 case CONJ_EXPR:
573 if (negate_expr_p (t))
574 return fold_build1_loc (loc, CONJ_EXPR, type,
575 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
576 break;
578 case NEGATE_EXPR:
579 return TREE_OPERAND (t, 0);
581 case PLUS_EXPR:
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t, 1))
587 && reorder_operands_p (TREE_OPERAND (t, 0),
588 TREE_OPERAND (t, 1)))
590 tem = negate_expr (TREE_OPERAND (t, 1));
591 return fold_build2_loc (loc, MINUS_EXPR, type,
592 tem, TREE_OPERAND (t, 0));
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t, 0)))
598 tem = negate_expr (TREE_OPERAND (t, 0));
599 return fold_build2_loc (loc, MINUS_EXPR, type,
600 tem, TREE_OPERAND (t, 1));
603 break;
605 case MINUS_EXPR:
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
609 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
612 break;
614 case MULT_EXPR:
615 if (TYPE_UNSIGNED (type))
616 break;
618 /* Fall through. */
620 case RDIV_EXPR:
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
623 tem = TREE_OPERAND (t, 1);
624 if (negate_expr_p (tem))
625 return fold_build2_loc (loc, TREE_CODE (t), type,
626 TREE_OPERAND (t, 0), negate_expr (tem));
627 tem = TREE_OPERAND (t, 0);
628 if (negate_expr_p (tem))
629 return fold_build2_loc (loc, TREE_CODE (t), type,
630 negate_expr (tem), TREE_OPERAND (t, 1));
632 break;
634 case TRUNC_DIV_EXPR:
635 case ROUND_DIV_EXPR:
636 case FLOOR_DIV_EXPR:
637 case CEIL_DIV_EXPR:
638 case EXACT_DIV_EXPR:
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
643 overflow. */
644 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
646 const char * const warnmsg = G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || integer_onep (tem)))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
661 if (INTEGRAL_TYPE_P (type)
662 && (TREE_CODE (tem) != INTEGER_CST
663 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (TREE_INT_CST_HIGH (op1) == 0
700 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
701 == TREE_INT_CST_LOW (op1))
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
722 return NULL_TREE. */
724 static tree
725 negate_expr (tree t)
727 tree type, tem;
728 location_t loc;
730 if (t == NULL_TREE)
731 return NULL_TREE;
733 loc = EXPR_LOCATION (t);
734 type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
737 tem = fold_negate_expr (loc, t);
738 if (!tem)
739 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
740 return fold_convert_loc (loc, type, tem);
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
758 If IN is itself a literal or constant, return it as appropriate.
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
763 static tree
764 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
765 tree *minus_litp, int negate_p)
767 tree var = 0;
769 *conp = 0;
770 *litp = 0;
771 *minus_litp = 0;
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in);
776 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
777 || TREE_CODE (in) == FIXED_CST)
778 *litp = in;
779 else if (TREE_CODE (in) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
787 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
789 tree op0 = TREE_OPERAND (in, 0);
790 tree op1 = TREE_OPERAND (in, 1);
791 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
792 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
796 || TREE_CODE (op0) == FIXED_CST)
797 *litp = op0, op0 = 0;
798 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
799 || TREE_CODE (op1) == FIXED_CST)
800 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
802 if (op0 != 0 && TREE_CONSTANT (op0))
803 *conp = op0, op0 = 0;
804 else if (op1 != 0 && TREE_CONSTANT (op1))
805 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0 != 0 && op1 != 0)
810 var = in;
811 else if (op0 != 0)
812 var = op0;
813 else
814 var = op1, neg_var_p = neg1_p;
816 /* Now do any needed negations. */
817 if (neg_litp_p)
818 *minus_litp = *litp, *litp = 0;
819 if (neg_conp_p)
820 *conp = negate_expr (*conp);
821 if (neg_var_p)
822 var = negate_expr (var);
824 else if (TREE_CONSTANT (in))
825 *conp = in;
826 else
827 var = in;
829 if (negate_p)
831 if (*litp)
832 *minus_litp = *litp, *litp = 0;
833 else if (*minus_litp)
834 *litp = *minus_litp, *minus_litp = 0;
835 *conp = negate_expr (*conp);
836 var = negate_expr (var);
839 return var;
842 /* Re-associate trees split by the above function. T1 and T2 are
843 either expressions to associate or null. Return the new
844 expression, if any. LOC is the location of the new expression. If
845 we build an operation, do it in TYPE and with CODE. */
847 static tree
848 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
861 if (code == PLUS_EXPR)
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 return build2_loc (loc, MINUS_EXPR, type,
865 fold_convert_loc (loc, type, t2),
866 fold_convert_loc (loc, type,
867 TREE_OPERAND (t1, 0)));
868 else if (TREE_CODE (t2) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t1),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t2, 0)));
873 else if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
876 else if (code == MINUS_EXPR)
878 if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
882 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
886 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
887 fold_convert_loc (loc, type, t2));
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
901 switch (code)
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
909 default:
910 break;
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time. */
923 static tree
924 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
925 int overflowable)
927 double_int op1, op2, res, tmp;
928 tree t;
929 tree type = TREE_TYPE (arg1);
930 bool uns = TYPE_UNSIGNED (type);
931 bool overflow = false;
933 op1 = tree_to_double_int (arg1);
934 op2 = tree_to_double_int (arg2);
936 switch (code)
938 case BIT_IOR_EXPR:
939 res = op1 | op2;
940 break;
942 case BIT_XOR_EXPR:
943 res = op1 ^ op2;
944 break;
946 case BIT_AND_EXPR:
947 res = op1 & op2;
948 break;
950 case RSHIFT_EXPR:
951 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
959 break;
961 case RROTATE_EXPR:
962 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
963 break;
965 case LROTATE_EXPR:
966 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
967 break;
969 case PLUS_EXPR:
970 res = op1.add_with_sign (op2, false, &overflow);
971 break;
973 case MINUS_EXPR:
974 res = op1.sub_with_overflow (op2, &overflow);
975 break;
977 case MULT_EXPR:
978 res = op1.mul_with_sign (op2, false, &overflow);
979 break;
981 case MULT_HIGHPART_EXPR:
982 /* ??? Need quad precision, or an additional shift operand
983 to the multiply primitive, to handle very large highparts. */
984 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
985 return NULL_TREE;
986 tmp = op1 - op2;
987 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
988 break;
990 case TRUNC_DIV_EXPR:
991 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
992 case EXACT_DIV_EXPR:
993 /* This is a shortcut for a common special case. */
994 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
995 && !TREE_OVERFLOW (arg1)
996 && !TREE_OVERFLOW (arg2)
997 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
999 if (code == CEIL_DIV_EXPR)
1000 op1.low += op2.low - 1;
1002 res.low = op1.low / op2.low, res.high = 0;
1003 break;
1006 /* ... fall through ... */
1008 case ROUND_DIV_EXPR:
1009 if (op2.is_zero ())
1010 return NULL_TREE;
1011 if (op2.is_one ())
1013 res = op1;
1014 break;
1016 if (op1 == op2 && !op1.is_zero ())
1018 res = double_int_one;
1019 break;
1021 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1022 break;
1024 case TRUNC_MOD_EXPR:
1025 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1026 /* This is a shortcut for a common special case. */
1027 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1028 && !TREE_OVERFLOW (arg1)
1029 && !TREE_OVERFLOW (arg2)
1030 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1032 if (code == CEIL_MOD_EXPR)
1033 op1.low += op2.low - 1;
1034 res.low = op1.low % op2.low, res.high = 0;
1035 break;
1038 /* ... fall through ... */
1040 case ROUND_MOD_EXPR:
1041 if (op2.is_zero ())
1042 return NULL_TREE;
1043 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1044 break;
1046 case MIN_EXPR:
1047 res = op1.min (op2, uns);
1048 break;
1050 case MAX_EXPR:
1051 res = op1.max (op2, uns);
1052 break;
1054 default:
1055 return NULL_TREE;
1058 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1059 (!uns && overflow)
1060 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1062 return t;
1065 tree
1066 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1068 return int_const_binop_1 (code, arg1, arg2, 1);
1071 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1072 constant. We assume ARG1 and ARG2 have the same data type, or at least
1073 are the same kind of constant and the same machine mode. Return zero if
1074 combining the constants is not allowed in the current operating mode. */
1076 static tree
1077 const_binop (enum tree_code code, tree arg1, tree arg2)
1079 /* Sanity check for the recursive cases. */
1080 if (!arg1 || !arg2)
1081 return NULL_TREE;
1083 STRIP_NOPS (arg1);
1084 STRIP_NOPS (arg2);
1086 if (TREE_CODE (arg1) == INTEGER_CST)
1087 return int_const_binop (code, arg1, arg2);
1089 if (TREE_CODE (arg1) == REAL_CST)
1091 enum machine_mode mode;
1092 REAL_VALUE_TYPE d1;
1093 REAL_VALUE_TYPE d2;
1094 REAL_VALUE_TYPE value;
1095 REAL_VALUE_TYPE result;
1096 bool inexact;
1097 tree t, type;
1099 /* The following codes are handled by real_arithmetic. */
1100 switch (code)
1102 case PLUS_EXPR:
1103 case MINUS_EXPR:
1104 case MULT_EXPR:
1105 case RDIV_EXPR:
1106 case MIN_EXPR:
1107 case MAX_EXPR:
1108 break;
1110 default:
1111 return NULL_TREE;
1114 d1 = TREE_REAL_CST (arg1);
1115 d2 = TREE_REAL_CST (arg2);
1117 type = TREE_TYPE (arg1);
1118 mode = TYPE_MODE (type);
1120 /* Don't perform operation if we honor signaling NaNs and
1121 either operand is a NaN. */
1122 if (HONOR_SNANS (mode)
1123 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1124 return NULL_TREE;
1126 /* Don't perform operation if it would raise a division
1127 by zero exception. */
1128 if (code == RDIV_EXPR
1129 && REAL_VALUES_EQUAL (d2, dconst0)
1130 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1131 return NULL_TREE;
1133 /* If either operand is a NaN, just return it. Otherwise, set up
1134 for floating-point trap; we return an overflow. */
1135 if (REAL_VALUE_ISNAN (d1))
1136 return arg1;
1137 else if (REAL_VALUE_ISNAN (d2))
1138 return arg2;
1140 inexact = real_arithmetic (&value, code, &d1, &d2);
1141 real_convert (&result, mode, &value);
1143 /* Don't constant fold this floating point operation if
1144 the result has overflowed and flag_trapping_math. */
1145 if (flag_trapping_math
1146 && MODE_HAS_INFINITIES (mode)
1147 && REAL_VALUE_ISINF (result)
1148 && !REAL_VALUE_ISINF (d1)
1149 && !REAL_VALUE_ISINF (d2))
1150 return NULL_TREE;
1152 /* Don't constant fold this floating point operation if the
1153 result may dependent upon the run-time rounding mode and
1154 flag_rounding_math is set, or if GCC's software emulation
1155 is unable to accurately represent the result. */
1156 if ((flag_rounding_math
1157 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1158 && (inexact || !real_identical (&result, &value)))
1159 return NULL_TREE;
1161 t = build_real (type, result);
1163 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1164 return t;
1167 if (TREE_CODE (arg1) == FIXED_CST)
1169 FIXED_VALUE_TYPE f1;
1170 FIXED_VALUE_TYPE f2;
1171 FIXED_VALUE_TYPE result;
1172 tree t, type;
1173 int sat_p;
1174 bool overflow_p;
1176 /* The following codes are handled by fixed_arithmetic. */
1177 switch (code)
1179 case PLUS_EXPR:
1180 case MINUS_EXPR:
1181 case MULT_EXPR:
1182 case TRUNC_DIV_EXPR:
1183 f2 = TREE_FIXED_CST (arg2);
1184 break;
1186 case LSHIFT_EXPR:
1187 case RSHIFT_EXPR:
1188 f2.data.high = TREE_INT_CST_HIGH (arg2);
1189 f2.data.low = TREE_INT_CST_LOW (arg2);
1190 f2.mode = SImode;
1191 break;
1193 default:
1194 return NULL_TREE;
1197 f1 = TREE_FIXED_CST (arg1);
1198 type = TREE_TYPE (arg1);
1199 sat_p = TYPE_SATURATING (type);
1200 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1201 t = build_fixed (type, result);
1202 /* Propagate overflow flags. */
1203 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1204 TREE_OVERFLOW (t) = 1;
1205 return t;
1208 if (TREE_CODE (arg1) == COMPLEX_CST)
1210 tree type = TREE_TYPE (arg1);
1211 tree r1 = TREE_REALPART (arg1);
1212 tree i1 = TREE_IMAGPART (arg1);
1213 tree r2 = TREE_REALPART (arg2);
1214 tree i2 = TREE_IMAGPART (arg2);
1215 tree real, imag;
1217 switch (code)
1219 case PLUS_EXPR:
1220 case MINUS_EXPR:
1221 real = const_binop (code, r1, r2);
1222 imag = const_binop (code, i1, i2);
1223 break;
1225 case MULT_EXPR:
1226 if (COMPLEX_FLOAT_TYPE_P (type))
1227 return do_mpc_arg2 (arg1, arg2, type,
1228 /* do_nonfinite= */ folding_initializer,
1229 mpc_mul);
1231 real = const_binop (MINUS_EXPR,
1232 const_binop (MULT_EXPR, r1, r2),
1233 const_binop (MULT_EXPR, i1, i2));
1234 imag = const_binop (PLUS_EXPR,
1235 const_binop (MULT_EXPR, r1, i2),
1236 const_binop (MULT_EXPR, i1, r2));
1237 break;
1239 case RDIV_EXPR:
1240 if (COMPLEX_FLOAT_TYPE_P (type))
1241 return do_mpc_arg2 (arg1, arg2, type,
1242 /* do_nonfinite= */ folding_initializer,
1243 mpc_div);
1244 /* Fallthru ... */
1245 case TRUNC_DIV_EXPR:
1246 case CEIL_DIV_EXPR:
1247 case FLOOR_DIV_EXPR:
1248 case ROUND_DIV_EXPR:
1249 if (flag_complex_method == 0)
1251 /* Keep this algorithm in sync with
1252 tree-complex.c:expand_complex_div_straight().
1254 Expand complex division to scalars, straightforward algorithm.
1255 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1256 t = br*br + bi*bi
1258 tree magsquared
1259 = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r2, r2),
1261 const_binop (MULT_EXPR, i2, i2));
1262 tree t1
1263 = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, r2),
1265 const_binop (MULT_EXPR, i1, i2));
1266 tree t2
1267 = const_binop (MINUS_EXPR,
1268 const_binop (MULT_EXPR, i1, r2),
1269 const_binop (MULT_EXPR, r1, i2));
1271 real = const_binop (code, t1, magsquared);
1272 imag = const_binop (code, t2, magsquared);
1274 else
1276 /* Keep this algorithm in sync with
1277 tree-complex.c:expand_complex_div_wide().
1279 Expand complex division to scalars, modified algorithm to minimize
1280 overflow with wide input ranges. */
1281 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1282 fold_abs_const (r2, TREE_TYPE (type)),
1283 fold_abs_const (i2, TREE_TYPE (type)));
1285 if (integer_nonzerop (compare))
1287 /* In the TRUE branch, we compute
1288 ratio = br/bi;
1289 div = (br * ratio) + bi;
1290 tr = (ar * ratio) + ai;
1291 ti = (ai * ratio) - ar;
1292 tr = tr / div;
1293 ti = ti / div; */
1294 tree ratio = const_binop (code, r2, i2);
1295 tree div = const_binop (PLUS_EXPR, i2,
1296 const_binop (MULT_EXPR, r2, ratio));
1297 real = const_binop (MULT_EXPR, r1, ratio);
1298 real = const_binop (PLUS_EXPR, real, i1);
1299 real = const_binop (code, real, div);
1301 imag = const_binop (MULT_EXPR, i1, ratio);
1302 imag = const_binop (MINUS_EXPR, imag, r1);
1303 imag = const_binop (code, imag, div);
1305 else
1307 /* In the FALSE branch, we compute
1308 ratio = d/c;
1309 divisor = (d * ratio) + c;
1310 tr = (b * ratio) + a;
1311 ti = b - (a * ratio);
1312 tr = tr / div;
1313 ti = ti / div; */
1314 tree ratio = const_binop (code, i2, r2);
1315 tree div = const_binop (PLUS_EXPR, r2,
1316 const_binop (MULT_EXPR, i2, ratio));
1318 real = const_binop (MULT_EXPR, i1, ratio);
1319 real = const_binop (PLUS_EXPR, real, r1);
1320 real = const_binop (code, real, div);
1322 imag = const_binop (MULT_EXPR, r1, ratio);
1323 imag = const_binop (MINUS_EXPR, i1, imag);
1324 imag = const_binop (code, imag, div);
1327 break;
1329 default:
1330 return NULL_TREE;
1333 if (real && imag)
1334 return build_complex (type, real, imag);
1337 if (TREE_CODE (arg1) == VECTOR_CST
1338 && TREE_CODE (arg2) == VECTOR_CST)
1340 tree type = TREE_TYPE(arg1);
1341 int count = TYPE_VECTOR_SUBPARTS (type), i;
1342 tree *elts = XALLOCAVEC (tree, count);
1344 for (i = 0; i < count; i++)
1346 tree elem1 = VECTOR_CST_ELT (arg1, i);
1347 tree elem2 = VECTOR_CST_ELT (arg2, i);
1349 elts[i] = const_binop (code, elem1, elem2);
1351 /* It is possible that const_binop cannot handle the given
1352 code and return NULL_TREE */
1353 if(elts[i] == NULL_TREE)
1354 return NULL_TREE;
1357 return build_vector (type, elts);
1359 return NULL_TREE;
1362 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1363 indicates which particular sizetype to create. */
1365 tree
1366 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1368 return build_int_cst (sizetype_tab[(int) kind], number);
1371 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1372 is a tree code. The type of the result is taken from the operands.
1373 Both must be equivalent integer types, ala int_binop_types_match_p.
1374 If the operands are constant, so is the result. */
1376 tree
1377 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1379 tree type = TREE_TYPE (arg0);
1381 if (arg0 == error_mark_node || arg1 == error_mark_node)
1382 return error_mark_node;
1384 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1385 TREE_TYPE (arg1)));
1387 /* Handle the special case of two integer constants faster. */
1388 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1390 /* And some specific cases even faster than that. */
1391 if (code == PLUS_EXPR)
1393 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1394 return arg1;
1395 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1396 return arg0;
1398 else if (code == MINUS_EXPR)
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MULT_EXPR)
1405 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1406 return arg1;
1409 /* Handle general case of two integer constants. For sizetype
1410 constant calculations we always want to know about overflow,
1411 even in the unsigned case. */
1412 return int_const_binop_1 (code, arg0, arg1, -1);
1415 return fold_build2_loc (loc, code, type, arg0, arg1);
1418 /* Given two values, either both of sizetype or both of bitsizetype,
1419 compute the difference between the two values. Return the value
1420 in signed type corresponding to the type of the operands. */
1422 tree
1423 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1425 tree type = TREE_TYPE (arg0);
1426 tree ctype;
1428 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1429 TREE_TYPE (arg1)));
1431 /* If the type is already signed, just do the simple thing. */
1432 if (!TYPE_UNSIGNED (type))
1433 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1435 if (type == sizetype)
1436 ctype = ssizetype;
1437 else if (type == bitsizetype)
1438 ctype = sbitsizetype;
1439 else
1440 ctype = signed_type_for (type);
1442 /* If either operand is not a constant, do the conversions to the signed
1443 type and subtract. The hardware will do the right thing with any
1444 overflow in the subtraction. */
1445 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1446 return size_binop_loc (loc, MINUS_EXPR,
1447 fold_convert_loc (loc, ctype, arg0),
1448 fold_convert_loc (loc, ctype, arg1));
1450 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1451 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1452 overflow) and negate (which can't either). Special-case a result
1453 of zero while we're here. */
1454 if (tree_int_cst_equal (arg0, arg1))
1455 return build_int_cst (ctype, 0);
1456 else if (tree_int_cst_lt (arg1, arg0))
1457 return fold_convert_loc (loc, ctype,
1458 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1459 else
1460 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1461 fold_convert_loc (loc, ctype,
1462 size_binop_loc (loc,
1463 MINUS_EXPR,
1464 arg1, arg0)));
1467 /* A subroutine of fold_convert_const handling conversions of an
1468 INTEGER_CST to another integer type. */
1470 static tree
1471 fold_convert_const_int_from_int (tree type, const_tree arg1)
1473 tree t;
1475 /* Given an integer constant, make new constant with new type,
1476 appropriately sign-extended or truncated. */
1477 t = force_fit_type_double (type, tree_to_double_int (arg1),
1478 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1479 (TREE_INT_CST_HIGH (arg1) < 0
1480 && (TYPE_UNSIGNED (type)
1481 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1482 | TREE_OVERFLOW (arg1));
1484 return t;
1487 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1488 to an integer type. */
1490 static tree
1491 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1493 int overflow = 0;
1494 tree t;
1496 /* The following code implements the floating point to integer
1497 conversion rules required by the Java Language Specification,
1498 that IEEE NaNs are mapped to zero and values that overflow
1499 the target precision saturate, i.e. values greater than
1500 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1501 are mapped to INT_MIN. These semantics are allowed by the
1502 C and C++ standards that simply state that the behavior of
1503 FP-to-integer conversion is unspecified upon overflow. */
1505 double_int val;
1506 REAL_VALUE_TYPE r;
1507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1509 switch (code)
1511 case FIX_TRUNC_EXPR:
1512 real_trunc (&r, VOIDmode, &x);
1513 break;
1515 default:
1516 gcc_unreachable ();
1519 /* If R is NaN, return zero and show we have an overflow. */
1520 if (REAL_VALUE_ISNAN (r))
1522 overflow = 1;
1523 val = double_int_zero;
1526 /* See if R is less than the lower bound or greater than the
1527 upper bound. */
1529 if (! overflow)
1531 tree lt = TYPE_MIN_VALUE (type);
1532 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1533 if (REAL_VALUES_LESS (r, l))
1535 overflow = 1;
1536 val = tree_to_double_int (lt);
1540 if (! overflow)
1542 tree ut = TYPE_MAX_VALUE (type);
1543 if (ut)
1545 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1546 if (REAL_VALUES_LESS (u, r))
1548 overflow = 1;
1549 val = tree_to_double_int (ut);
1554 if (! overflow)
1555 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1557 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1558 return t;
1561 /* A subroutine of fold_convert_const handling conversions of a
1562 FIXED_CST to an integer type. */
1564 static tree
1565 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1567 tree t;
1568 double_int temp, temp_trunc;
1569 unsigned int mode;
1571 /* Right shift FIXED_CST to temp by fbit. */
1572 temp = TREE_FIXED_CST (arg1).data;
1573 mode = TREE_FIXED_CST (arg1).mode;
1574 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1576 temp = temp.rshift (GET_MODE_FBIT (mode),
1577 HOST_BITS_PER_DOUBLE_INT,
1578 SIGNED_FIXED_POINT_MODE_P (mode));
1580 /* Left shift temp to temp_trunc by fbit. */
1581 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 else
1587 temp = double_int_zero;
1588 temp_trunc = double_int_zero;
1591 /* If FIXED_CST is negative, we need to round the value toward 0.
1592 By checking if the fractional bits are not zero to add 1 to temp. */
1593 if (SIGNED_FIXED_POINT_MODE_P (mode)
1594 && temp_trunc.is_negative ()
1595 && TREE_FIXED_CST (arg1).data != temp_trunc)
1596 temp += double_int_one;
1598 /* Given a fixed-point constant, make new constant with new type,
1599 appropriately sign-extended or truncated. */
1600 t = force_fit_type_double (type, temp, -1,
1601 (temp.is_negative ()
1602 && (TYPE_UNSIGNED (type)
1603 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1604 | TREE_OVERFLOW (arg1));
1606 return t;
1609 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1610 to another floating point type. */
1612 static tree
1613 fold_convert_const_real_from_real (tree type, const_tree arg1)
1615 REAL_VALUE_TYPE value;
1616 tree t;
1618 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1619 t = build_real (type, value);
1621 /* If converting an infinity or NAN to a representation that doesn't
1622 have one, set the overflow bit so that we can produce some kind of
1623 error message at the appropriate point if necessary. It's not the
1624 most user-friendly message, but it's better than nothing. */
1625 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1626 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1627 TREE_OVERFLOW (t) = 1;
1628 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1629 && !MODE_HAS_NANS (TYPE_MODE (type)))
1630 TREE_OVERFLOW (t) = 1;
1631 /* Regular overflow, conversion produced an infinity in a mode that
1632 can't represent them. */
1633 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1634 && REAL_VALUE_ISINF (value)
1635 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1636 TREE_OVERFLOW (t) = 1;
1637 else
1638 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1643 to a floating point type. */
1645 static tree
1646 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1652 t = build_real (type, value);
1654 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1659 to another fixed-point type. */
1661 static tree
1662 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1664 FIXED_VALUE_TYPE value;
1665 tree t;
1666 bool overflow_p;
1668 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1669 TYPE_SATURATING (type));
1670 t = build_fixed (type, value);
1672 /* Propagate overflow flags. */
1673 if (overflow_p | TREE_OVERFLOW (arg1))
1674 TREE_OVERFLOW (t) = 1;
1675 return t;
1678 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1679 to a fixed-point type. */
1681 static tree
1682 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1684 FIXED_VALUE_TYPE value;
1685 tree t;
1686 bool overflow_p;
1688 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1689 TREE_INT_CST (arg1),
1690 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1691 TYPE_SATURATING (type));
1692 t = build_fixed (type, value);
1694 /* Propagate overflow flags. */
1695 if (overflow_p | TREE_OVERFLOW (arg1))
1696 TREE_OVERFLOW (t) = 1;
1697 return t;
1700 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1701 to a fixed-point type. */
1703 static tree
1704 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1706 FIXED_VALUE_TYPE value;
1707 tree t;
1708 bool overflow_p;
1710 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1711 &TREE_REAL_CST (arg1),
1712 TYPE_SATURATING (type));
1713 t = build_fixed (type, value);
1715 /* Propagate overflow flags. */
1716 if (overflow_p | TREE_OVERFLOW (arg1))
1717 TREE_OVERFLOW (t) = 1;
1718 return t;
1721 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1722 type TYPE. If no simplification can be done return NULL_TREE. */
1724 static tree
1725 fold_convert_const (enum tree_code code, tree type, tree arg1)
1727 if (TREE_TYPE (arg1) == type)
1728 return arg1;
1730 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1731 || TREE_CODE (type) == OFFSET_TYPE)
1733 if (TREE_CODE (arg1) == INTEGER_CST)
1734 return fold_convert_const_int_from_int (type, arg1);
1735 else if (TREE_CODE (arg1) == REAL_CST)
1736 return fold_convert_const_int_from_real (code, type, arg1);
1737 else if (TREE_CODE (arg1) == FIXED_CST)
1738 return fold_convert_const_int_from_fixed (type, arg1);
1740 else if (TREE_CODE (type) == REAL_TYPE)
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1743 return build_real_from_int_cst (type, arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1745 return fold_convert_const_real_from_real (type, arg1);
1746 else if (TREE_CODE (arg1) == FIXED_CST)
1747 return fold_convert_const_real_from_fixed (type, arg1);
1749 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1751 if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_fixed_from_fixed (type, arg1);
1753 else if (TREE_CODE (arg1) == INTEGER_CST)
1754 return fold_convert_const_fixed_from_int (type, arg1);
1755 else if (TREE_CODE (arg1) == REAL_CST)
1756 return fold_convert_const_fixed_from_real (type, arg1);
1758 return NULL_TREE;
1761 /* Construct a vector of zero elements of vector type TYPE. */
1763 static tree
1764 build_zero_vector (tree type)
1766 tree t;
1768 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1769 return build_vector_from_val (type, t);
1772 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1774 bool
1775 fold_convertible_p (const_tree type, const_tree arg)
1777 tree orig = TREE_TYPE (arg);
1779 if (type == orig)
1780 return true;
1782 if (TREE_CODE (arg) == ERROR_MARK
1783 || TREE_CODE (type) == ERROR_MARK
1784 || TREE_CODE (orig) == ERROR_MARK)
1785 return false;
1787 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1788 return true;
1790 switch (TREE_CODE (type))
1792 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1793 case POINTER_TYPE: case REFERENCE_TYPE:
1794 case OFFSET_TYPE:
1795 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1796 || TREE_CODE (orig) == OFFSET_TYPE)
1797 return true;
1798 return (TREE_CODE (orig) == VECTOR_TYPE
1799 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1801 case REAL_TYPE:
1802 case FIXED_POINT_TYPE:
1803 case COMPLEX_TYPE:
1804 case VECTOR_TYPE:
1805 case VOID_TYPE:
1806 return TREE_CODE (type) == TREE_CODE (orig);
1808 default:
1809 return false;
1813 /* Convert expression ARG to type TYPE. Used by the middle-end for
1814 simple conversions in preference to calling the front-end's convert. */
1816 tree
1817 fold_convert_loc (location_t loc, tree type, tree arg)
1819 tree orig = TREE_TYPE (arg);
1820 tree tem;
1822 if (type == orig)
1823 return arg;
1825 if (TREE_CODE (arg) == ERROR_MARK
1826 || TREE_CODE (type) == ERROR_MARK
1827 || TREE_CODE (orig) == ERROR_MARK)
1828 return error_mark_node;
1830 switch (TREE_CODE (type))
1832 case POINTER_TYPE:
1833 case REFERENCE_TYPE:
1834 /* Handle conversions between pointers to different address spaces. */
1835 if (POINTER_TYPE_P (orig)
1836 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1837 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1838 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1839 /* fall through */
1841 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1842 case OFFSET_TYPE:
1843 if (TREE_CODE (arg) == INTEGER_CST)
1845 tem = fold_convert_const (NOP_EXPR, type, arg);
1846 if (tem != NULL_TREE)
1847 return tem;
1849 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1850 || TREE_CODE (orig) == OFFSET_TYPE)
1851 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1852 if (TREE_CODE (orig) == COMPLEX_TYPE)
1853 return fold_convert_loc (loc, type,
1854 fold_build1_loc (loc, REALPART_EXPR,
1855 TREE_TYPE (orig), arg));
1856 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1857 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1858 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1860 case REAL_TYPE:
1861 if (TREE_CODE (arg) == INTEGER_CST)
1863 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1864 if (tem != NULL_TREE)
1865 return tem;
1867 else if (TREE_CODE (arg) == REAL_CST)
1869 tem = fold_convert_const (NOP_EXPR, type, arg);
1870 if (tem != NULL_TREE)
1871 return tem;
1873 else if (TREE_CODE (arg) == FIXED_CST)
1875 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1876 if (tem != NULL_TREE)
1877 return tem;
1880 switch (TREE_CODE (orig))
1882 case INTEGER_TYPE:
1883 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1884 case POINTER_TYPE: case REFERENCE_TYPE:
1885 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1887 case REAL_TYPE:
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1890 case FIXED_POINT_TYPE:
1891 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1893 case COMPLEX_TYPE:
1894 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1895 return fold_convert_loc (loc, type, tem);
1897 default:
1898 gcc_unreachable ();
1901 case FIXED_POINT_TYPE:
1902 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1903 || TREE_CODE (arg) == REAL_CST)
1905 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 goto fold_convert_exit;
1910 switch (TREE_CODE (orig))
1912 case FIXED_POINT_TYPE:
1913 case INTEGER_TYPE:
1914 case ENUMERAL_TYPE:
1915 case BOOLEAN_TYPE:
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1919 case COMPLEX_TYPE:
1920 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1921 return fold_convert_loc (loc, type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 case FIXED_POINT_TYPE:
1935 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1936 fold_convert_loc (loc, TREE_TYPE (type), arg),
1937 fold_convert_loc (loc, TREE_TYPE (type),
1938 integer_zero_node));
1939 case COMPLEX_TYPE:
1941 tree rpart, ipart;
1943 if (TREE_CODE (arg) == COMPLEX_EXPR)
1945 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1946 TREE_OPERAND (arg, 0));
1947 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1948 TREE_OPERAND (arg, 1));
1949 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1952 arg = save_expr (arg);
1953 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1954 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1955 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1956 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1957 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1960 default:
1961 gcc_unreachable ();
1964 case VECTOR_TYPE:
1965 if (integer_zerop (arg))
1966 return build_zero_vector (type);
1967 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1968 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1969 || TREE_CODE (orig) == VECTOR_TYPE);
1970 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1972 case VOID_TYPE:
1973 tem = fold_ignored_result (arg);
1974 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1976 default:
1977 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1978 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1979 gcc_unreachable ();
1981 fold_convert_exit:
1982 protected_set_expr_location_unshare (tem, loc);
1983 return tem;
1986 /* Return false if expr can be assumed not to be an lvalue, true
1987 otherwise. */
1989 static bool
1990 maybe_lvalue_p (const_tree x)
1992 /* We only need to wrap lvalue tree codes. */
1993 switch (TREE_CODE (x))
1995 case VAR_DECL:
1996 case PARM_DECL:
1997 case RESULT_DECL:
1998 case LABEL_DECL:
1999 case FUNCTION_DECL:
2000 case SSA_NAME:
2002 case COMPONENT_REF:
2003 case MEM_REF:
2004 case INDIRECT_REF:
2005 case ARRAY_REF:
2006 case ARRAY_RANGE_REF:
2007 case BIT_FIELD_REF:
2008 case OBJ_TYPE_REF:
2010 case REALPART_EXPR:
2011 case IMAGPART_EXPR:
2012 case PREINCREMENT_EXPR:
2013 case PREDECREMENT_EXPR:
2014 case SAVE_EXPR:
2015 case TRY_CATCH_EXPR:
2016 case WITH_CLEANUP_EXPR:
2017 case COMPOUND_EXPR:
2018 case MODIFY_EXPR:
2019 case TARGET_EXPR:
2020 case COND_EXPR:
2021 case BIND_EXPR:
2022 break;
2024 default:
2025 /* Assume the worst for front-end tree codes. */
2026 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2027 break;
2028 return false;
2031 return true;
2034 /* Return an expr equal to X but certainly not valid as an lvalue. */
2036 tree
2037 non_lvalue_loc (location_t loc, tree x)
2039 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2040 us. */
2041 if (in_gimple_form)
2042 return x;
2044 if (! maybe_lvalue_p (x))
2045 return x;
2046 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2049 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2050 Zero means allow extended lvalues. */
2052 int pedantic_lvalues;
2054 /* When pedantic, return an expr equal to X but certainly not valid as a
2055 pedantic lvalue. Otherwise, return X. */
2057 static tree
2058 pedantic_non_lvalue_loc (location_t loc, tree x)
2060 if (pedantic_lvalues)
2061 return non_lvalue_loc (loc, x);
2063 return protected_set_expr_location_unshare (x, loc);
2066 /* Given a tree comparison code, return the code that is the logical inverse.
2067 It is generally not safe to do this for floating-point comparisons, except
2068 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2069 ERROR_MARK in this case. */
2071 enum tree_code
2072 invert_tree_comparison (enum tree_code code, bool honor_nans)
2074 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2075 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2076 return ERROR_MARK;
2078 switch (code)
2080 case EQ_EXPR:
2081 return NE_EXPR;
2082 case NE_EXPR:
2083 return EQ_EXPR;
2084 case GT_EXPR:
2085 return honor_nans ? UNLE_EXPR : LE_EXPR;
2086 case GE_EXPR:
2087 return honor_nans ? UNLT_EXPR : LT_EXPR;
2088 case LT_EXPR:
2089 return honor_nans ? UNGE_EXPR : GE_EXPR;
2090 case LE_EXPR:
2091 return honor_nans ? UNGT_EXPR : GT_EXPR;
2092 case LTGT_EXPR:
2093 return UNEQ_EXPR;
2094 case UNEQ_EXPR:
2095 return LTGT_EXPR;
2096 case UNGT_EXPR:
2097 return LE_EXPR;
2098 case UNGE_EXPR:
2099 return LT_EXPR;
2100 case UNLT_EXPR:
2101 return GE_EXPR;
2102 case UNLE_EXPR:
2103 return GT_EXPR;
2104 case ORDERED_EXPR:
2105 return UNORDERED_EXPR;
2106 case UNORDERED_EXPR:
2107 return ORDERED_EXPR;
2108 default:
2109 gcc_unreachable ();
2113 /* Similar, but return the comparison that results if the operands are
2114 swapped. This is safe for floating-point. */
2116 enum tree_code
2117 swap_tree_comparison (enum tree_code code)
2119 switch (code)
2121 case EQ_EXPR:
2122 case NE_EXPR:
2123 case ORDERED_EXPR:
2124 case UNORDERED_EXPR:
2125 case LTGT_EXPR:
2126 case UNEQ_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 case UNGT_EXPR:
2137 return UNLT_EXPR;
2138 case UNGE_EXPR:
2139 return UNLE_EXPR;
2140 case UNLT_EXPR:
2141 return UNGT_EXPR;
2142 case UNLE_EXPR:
2143 return UNGE_EXPR;
2144 default:
2145 gcc_unreachable ();
2150 /* Convert a comparison tree code from an enum tree_code representation
2151 into a compcode bit-based encoding. This function is the inverse of
2152 compcode_to_comparison. */
2154 static enum comparison_code
2155 comparison_to_compcode (enum tree_code code)
2157 switch (code)
2159 case LT_EXPR:
2160 return COMPCODE_LT;
2161 case EQ_EXPR:
2162 return COMPCODE_EQ;
2163 case LE_EXPR:
2164 return COMPCODE_LE;
2165 case GT_EXPR:
2166 return COMPCODE_GT;
2167 case NE_EXPR:
2168 return COMPCODE_NE;
2169 case GE_EXPR:
2170 return COMPCODE_GE;
2171 case ORDERED_EXPR:
2172 return COMPCODE_ORD;
2173 case UNORDERED_EXPR:
2174 return COMPCODE_UNORD;
2175 case UNLT_EXPR:
2176 return COMPCODE_UNLT;
2177 case UNEQ_EXPR:
2178 return COMPCODE_UNEQ;
2179 case UNLE_EXPR:
2180 return COMPCODE_UNLE;
2181 case UNGT_EXPR:
2182 return COMPCODE_UNGT;
2183 case LTGT_EXPR:
2184 return COMPCODE_LTGT;
2185 case UNGE_EXPR:
2186 return COMPCODE_UNGE;
2187 default:
2188 gcc_unreachable ();
2192 /* Convert a compcode bit-based encoding of a comparison operator back
2193 to GCC's enum tree_code representation. This function is the
2194 inverse of comparison_to_compcode. */
2196 static enum tree_code
2197 compcode_to_comparison (enum comparison_code code)
2199 switch (code)
2201 case COMPCODE_LT:
2202 return LT_EXPR;
2203 case COMPCODE_EQ:
2204 return EQ_EXPR;
2205 case COMPCODE_LE:
2206 return LE_EXPR;
2207 case COMPCODE_GT:
2208 return GT_EXPR;
2209 case COMPCODE_NE:
2210 return NE_EXPR;
2211 case COMPCODE_GE:
2212 return GE_EXPR;
2213 case COMPCODE_ORD:
2214 return ORDERED_EXPR;
2215 case COMPCODE_UNORD:
2216 return UNORDERED_EXPR;
2217 case COMPCODE_UNLT:
2218 return UNLT_EXPR;
2219 case COMPCODE_UNEQ:
2220 return UNEQ_EXPR;
2221 case COMPCODE_UNLE:
2222 return UNLE_EXPR;
2223 case COMPCODE_UNGT:
2224 return UNGT_EXPR;
2225 case COMPCODE_LTGT:
2226 return LTGT_EXPR;
2227 case COMPCODE_UNGE:
2228 return UNGE_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Return a tree for the comparison which is the combination of
2235 doing the AND or OR (depending on CODE) of the two operations LCODE
2236 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2237 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2238 if this makes the transformation invalid. */
2240 tree
2241 combine_comparisons (location_t loc,
2242 enum tree_code code, enum tree_code lcode,
2243 enum tree_code rcode, tree truth_type,
2244 tree ll_arg, tree lr_arg)
2246 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2247 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2248 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2249 int compcode;
2251 switch (code)
2253 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2254 compcode = lcompcode & rcompcode;
2255 break;
2257 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2258 compcode = lcompcode | rcompcode;
2259 break;
2261 default:
2262 return NULL_TREE;
2265 if (!honor_nans)
2267 /* Eliminate unordered comparisons, as well as LTGT and ORD
2268 which are not used unless the mode has NaNs. */
2269 compcode &= ~COMPCODE_UNORD;
2270 if (compcode == COMPCODE_LTGT)
2271 compcode = COMPCODE_NE;
2272 else if (compcode == COMPCODE_ORD)
2273 compcode = COMPCODE_TRUE;
2275 else if (flag_trapping_math)
2277 /* Check that the original operation and the optimized ones will trap
2278 under the same condition. */
2279 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2280 && (lcompcode != COMPCODE_EQ)
2281 && (lcompcode != COMPCODE_ORD);
2282 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2283 && (rcompcode != COMPCODE_EQ)
2284 && (rcompcode != COMPCODE_ORD);
2285 bool trap = (compcode & COMPCODE_UNORD) == 0
2286 && (compcode != COMPCODE_EQ)
2287 && (compcode != COMPCODE_ORD);
2289 /* In a short-circuited boolean expression the LHS might be
2290 such that the RHS, if evaluated, will never trap. For
2291 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2292 if neither x nor y is NaN. (This is a mixed blessing: for
2293 example, the expression above will never trap, hence
2294 optimizing it to x < y would be invalid). */
2295 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2296 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 rtrap = false;
2299 /* If the comparison was short-circuited, and only the RHS
2300 trapped, we may now generate a spurious trap. */
2301 if (rtrap && !ltrap
2302 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 return NULL_TREE;
2305 /* If we changed the conditions that cause a trap, we lose. */
2306 if ((ltrap || rtrap) != trap)
2307 return NULL_TREE;
2310 if (compcode == COMPCODE_TRUE)
2311 return constant_boolean_node (true, truth_type);
2312 else if (compcode == COMPCODE_FALSE)
2313 return constant_boolean_node (false, truth_type);
2314 else
2316 enum tree_code tcode;
2318 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2319 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2354 || TREE_TYPE (arg0) == error_mark_node
2355 || TREE_TYPE (arg1) == error_mark_node)
2356 return 0;
2358 /* Similar, if either does not have a type (like a released SSA name),
2359 they aren't equal. */
2360 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2361 return 0;
2363 /* Check equality of integer constants before bailing out due to
2364 precision differences. */
2365 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2366 return tree_int_cst_equal (arg0, arg1);
2368 /* If both types don't have the same signedness, then we can't consider
2369 them equal. We must check this before the STRIP_NOPS calls
2370 because they may change the signedness of the arguments. As pointers
2371 strictly don't have a signedness, require either two pointers or
2372 two non-pointers as well. */
2373 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2374 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2375 return 0;
2377 /* We cannot consider pointers to different address space equal. */
2378 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2379 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2380 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2381 return 0;
2383 /* If both types don't have the same precision, then it is not safe
2384 to strip NOPs. */
2385 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2386 return 0;
2388 STRIP_NOPS (arg0);
2389 STRIP_NOPS (arg1);
2391 /* In case both args are comparisons but with different comparison
2392 code, try to swap the comparison operands of one arg to produce
2393 a match and compare that variant. */
2394 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2395 && COMPARISON_CLASS_P (arg0)
2396 && COMPARISON_CLASS_P (arg1))
2398 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2400 if (TREE_CODE (arg0) == swap_code)
2401 return operand_equal_p (TREE_OPERAND (arg0, 0),
2402 TREE_OPERAND (arg1, 1), flags)
2403 && operand_equal_p (TREE_OPERAND (arg0, 1),
2404 TREE_OPERAND (arg1, 0), flags);
2407 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2408 /* This is needed for conversions and for COMPONENT_REF.
2409 Might as well play it safe and always test this. */
2410 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2411 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2412 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2413 return 0;
2415 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2416 We don't care about side effects in that case because the SAVE_EXPR
2417 takes care of that for us. In all other cases, two expressions are
2418 equal if they have no side effects. If we have two identical
2419 expressions with side effects that should be treated the same due
2420 to the only side effects being identical SAVE_EXPR's, that will
2421 be detected in the recursive calls below.
2422 If we are taking an invariant address of two identical objects
2423 they are necessarily equal as well. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (flags & OEP_CONSTANT_ADDRESS_OF)
2427 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2428 return 1;
2430 /* Next handle constant cases, those for which we can return 1 even
2431 if ONLY_CONST is set. */
2432 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2433 switch (TREE_CODE (arg0))
2435 case INTEGER_CST:
2436 return tree_int_cst_equal (arg0, arg1);
2438 case FIXED_CST:
2439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2440 TREE_FIXED_CST (arg1));
2442 case REAL_CST:
2443 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2444 TREE_REAL_CST (arg1)))
2445 return 1;
2448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2450 /* If we do not distinguish between signed and unsigned zero,
2451 consider them equal. */
2452 if (real_zerop (arg0) && real_zerop (arg1))
2453 return 1;
2455 return 0;
2457 case VECTOR_CST:
2459 unsigned i;
2461 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2462 return 0;
2464 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2466 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2467 VECTOR_CST_ELT (arg1, i), flags))
2468 return 0;
2470 return 1;
2473 case COMPLEX_CST:
2474 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2475 flags)
2476 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2477 flags));
2479 case STRING_CST:
2480 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2481 && ! memcmp (TREE_STRING_POINTER (arg0),
2482 TREE_STRING_POINTER (arg1),
2483 TREE_STRING_LENGTH (arg0)));
2485 case ADDR_EXPR:
2486 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2487 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2488 ? OEP_CONSTANT_ADDRESS_OF : 0);
2489 default:
2490 break;
2493 if (flags & OEP_ONLY_CONST)
2494 return 0;
2496 /* Define macros to test an operand from arg0 and arg1 for equality and a
2497 variant that allows null and views null as being different from any
2498 non-null value. In the latter case, if either is null, the both
2499 must be; otherwise, do the normal comparison. */
2500 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2501 TREE_OPERAND (arg1, N), flags)
2503 #define OP_SAME_WITH_NULL(N) \
2504 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2505 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2507 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2509 case tcc_unary:
2510 /* Two conversions are equal only if signedness and modes match. */
2511 switch (TREE_CODE (arg0))
2513 CASE_CONVERT:
2514 case FIX_TRUNC_EXPR:
2515 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2516 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2517 return 0;
2518 break;
2519 default:
2520 break;
2523 return OP_SAME (0);
2526 case tcc_comparison:
2527 case tcc_binary:
2528 if (OP_SAME (0) && OP_SAME (1))
2529 return 1;
2531 /* For commutative ops, allow the other order. */
2532 return (commutative_tree_code (TREE_CODE (arg0))
2533 && operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 1), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 0), flags));
2538 case tcc_reference:
2539 /* If either of the pointer (or reference) expressions we are
2540 dereferencing contain a side effect, these cannot be equal. */
2541 if (TREE_SIDE_EFFECTS (arg0)
2542 || TREE_SIDE_EFFECTS (arg1))
2543 return 0;
2545 switch (TREE_CODE (arg0))
2547 case INDIRECT_REF:
2548 case REALPART_EXPR:
2549 case IMAGPART_EXPR:
2550 return OP_SAME (0);
2552 case TARGET_MEM_REF:
2553 /* Require equal extra operands and then fall through to MEM_REF
2554 handling of the two common operands. */
2555 if (!OP_SAME_WITH_NULL (2)
2556 || !OP_SAME_WITH_NULL (3)
2557 || !OP_SAME_WITH_NULL (4))
2558 return 0;
2559 /* Fallthru. */
2560 case MEM_REF:
2561 /* Require equal access sizes, and similar pointer types.
2562 We can have incomplete types for array references of
2563 variable-sized arrays from the Fortran frontent
2564 though. */
2565 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2566 || (TYPE_SIZE (TREE_TYPE (arg0))
2567 && TYPE_SIZE (TREE_TYPE (arg1))
2568 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2569 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2570 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2571 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2572 && OP_SAME (0) && OP_SAME (1));
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 /* Operands 2 and 3 may be null.
2577 Compare the array index by value if it is constant first as we
2578 may have different types but same value here. */
2579 return (OP_SAME (0)
2580 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2581 TREE_OPERAND (arg1, 1))
2582 || OP_SAME (1))
2583 && OP_SAME_WITH_NULL (2)
2584 && OP_SAME_WITH_NULL (3));
2586 case COMPONENT_REF:
2587 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2588 may be NULL when we're called to compare MEM_EXPRs. */
2589 return OP_SAME_WITH_NULL (0)
2590 && OP_SAME (1)
2591 && OP_SAME_WITH_NULL (2);
2593 case BIT_FIELD_REF:
2594 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2596 default:
2597 return 0;
2600 case tcc_expression:
2601 switch (TREE_CODE (arg0))
2603 case ADDR_EXPR:
2604 case TRUTH_NOT_EXPR:
2605 return OP_SAME (0);
2607 case TRUTH_ANDIF_EXPR:
2608 case TRUTH_ORIF_EXPR:
2609 return OP_SAME (0) && OP_SAME (1);
2611 case FMA_EXPR:
2612 case WIDEN_MULT_PLUS_EXPR:
2613 case WIDEN_MULT_MINUS_EXPR:
2614 if (!OP_SAME (2))
2615 return 0;
2616 /* The multiplcation operands are commutative. */
2617 /* FALLTHRU */
2619 case TRUTH_AND_EXPR:
2620 case TRUTH_OR_EXPR:
2621 case TRUTH_XOR_EXPR:
2622 if (OP_SAME (0) && OP_SAME (1))
2623 return 1;
2625 /* Otherwise take into account this is a commutative operation. */
2626 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2627 TREE_OPERAND (arg1, 1), flags)
2628 && operand_equal_p (TREE_OPERAND (arg0, 1),
2629 TREE_OPERAND (arg1, 0), flags));
2631 case COND_EXPR:
2632 case VEC_COND_EXPR:
2633 case DOT_PROD_EXPR:
2634 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 default:
2637 return 0;
2640 case tcc_vl_exp:
2641 switch (TREE_CODE (arg0))
2643 case CALL_EXPR:
2644 /* If the CALL_EXPRs call different functions, then they
2645 clearly can not be equal. */
2646 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2647 flags))
2648 return 0;
2651 unsigned int cef = call_expr_flags (arg0);
2652 if (flags & OEP_PURE_SAME)
2653 cef &= ECF_CONST | ECF_PURE;
2654 else
2655 cef &= ECF_CONST;
2656 if (!cef)
2657 return 0;
2660 /* Now see if all the arguments are the same. */
2662 const_call_expr_arg_iterator iter0, iter1;
2663 const_tree a0, a1;
2664 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2665 a1 = first_const_call_expr_arg (arg1, &iter1);
2666 a0 && a1;
2667 a0 = next_const_call_expr_arg (&iter0),
2668 a1 = next_const_call_expr_arg (&iter1))
2669 if (! operand_equal_p (a0, a1, flags))
2670 return 0;
2672 /* If we get here and both argument lists are exhausted
2673 then the CALL_EXPRs are equal. */
2674 return ! (a0 || a1);
2676 default:
2677 return 0;
2680 case tcc_declaration:
2681 /* Consider __builtin_sqrt equal to sqrt. */
2682 return (TREE_CODE (arg0) == FUNCTION_DECL
2683 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2684 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2685 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2687 default:
2688 return 0;
2691 #undef OP_SAME
2692 #undef OP_SAME_WITH_NULL
2695 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2696 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2698 When in doubt, return 0. */
2700 static int
2701 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2703 int unsignedp1, unsignedpo;
2704 tree primarg0, primarg1, primother;
2705 unsigned int correct_width;
2707 if (operand_equal_p (arg0, arg1, 0))
2708 return 1;
2710 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2711 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2712 return 0;
2714 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2715 and see if the inner values are the same. This removes any
2716 signedness comparison, which doesn't matter here. */
2717 primarg0 = arg0, primarg1 = arg1;
2718 STRIP_NOPS (primarg0);
2719 STRIP_NOPS (primarg1);
2720 if (operand_equal_p (primarg0, primarg1, 0))
2721 return 1;
2723 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2724 actual comparison operand, ARG0.
2726 First throw away any conversions to wider types
2727 already present in the operands. */
2729 primarg1 = get_narrower (arg1, &unsignedp1);
2730 primother = get_narrower (other, &unsignedpo);
2732 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2733 if (unsignedp1 == unsignedpo
2734 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2735 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2737 tree type = TREE_TYPE (arg0);
2739 /* Make sure shorter operand is extended the right way
2740 to match the longer operand. */
2741 primarg1 = fold_convert (signed_or_unsigned_type_for
2742 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2744 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2745 return 1;
2748 return 0;
2751 /* See if ARG is an expression that is either a comparison or is performing
2752 arithmetic on comparisons. The comparisons must only be comparing
2753 two different values, which will be stored in *CVAL1 and *CVAL2; if
2754 they are nonzero it means that some operands have already been found.
2755 No variables may be used anywhere else in the expression except in the
2756 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2757 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2759 If this is true, return 1. Otherwise, return zero. */
2761 static int
2762 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2764 enum tree_code code = TREE_CODE (arg);
2765 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2767 /* We can handle some of the tcc_expression cases here. */
2768 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2769 tclass = tcc_unary;
2770 else if (tclass == tcc_expression
2771 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2772 || code == COMPOUND_EXPR))
2773 tclass = tcc_binary;
2775 else if (tclass == tcc_expression && code == SAVE_EXPR
2776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2778 /* If we've already found a CVAL1 or CVAL2, this expression is
2779 two complex to handle. */
2780 if (*cval1 || *cval2)
2781 return 0;
2783 tclass = tcc_unary;
2784 *save_p = 1;
2787 switch (tclass)
2789 case tcc_unary:
2790 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2792 case tcc_binary:
2793 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2794 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2795 cval1, cval2, save_p));
2797 case tcc_constant:
2798 return 1;
2800 case tcc_expression:
2801 if (code == COND_EXPR)
2802 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2803 cval1, cval2, save_p)
2804 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2805 cval1, cval2, save_p)
2806 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2807 cval1, cval2, save_p));
2808 return 0;
2810 case tcc_comparison:
2811 /* First see if we can handle the first operand, then the second. For
2812 the second operand, we know *CVAL1 can't be zero. It must be that
2813 one side of the comparison is each of the values; test for the
2814 case where this isn't true by failing if the two operands
2815 are the same. */
2817 if (operand_equal_p (TREE_OPERAND (arg, 0),
2818 TREE_OPERAND (arg, 1), 0))
2819 return 0;
2821 if (*cval1 == 0)
2822 *cval1 = TREE_OPERAND (arg, 0);
2823 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2825 else if (*cval2 == 0)
2826 *cval2 = TREE_OPERAND (arg, 0);
2827 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2829 else
2830 return 0;
2832 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2834 else if (*cval2 == 0)
2835 *cval2 = TREE_OPERAND (arg, 1);
2836 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2838 else
2839 return 0;
2841 return 1;
2843 default:
2844 return 0;
2848 /* ARG is a tree that is known to contain just arithmetic operations and
2849 comparisons. Evaluate the operations in the tree substituting NEW0 for
2850 any occurrence of OLD0 as an operand of a comparison and likewise for
2851 NEW1 and OLD1. */
2853 static tree
2854 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2855 tree old1, tree new1)
2857 tree type = TREE_TYPE (arg);
2858 enum tree_code code = TREE_CODE (arg);
2859 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2861 /* We can handle some of the tcc_expression cases here. */
2862 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2863 tclass = tcc_unary;
2864 else if (tclass == tcc_expression
2865 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2866 tclass = tcc_binary;
2868 switch (tclass)
2870 case tcc_unary:
2871 return fold_build1_loc (loc, code, type,
2872 eval_subst (loc, TREE_OPERAND (arg, 0),
2873 old0, new0, old1, new1));
2875 case tcc_binary:
2876 return fold_build2_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1),
2879 eval_subst (loc, TREE_OPERAND (arg, 1),
2880 old0, new0, old1, new1));
2882 case tcc_expression:
2883 switch (code)
2885 case SAVE_EXPR:
2886 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2887 old1, new1);
2889 case COMPOUND_EXPR:
2890 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2891 old1, new1);
2893 case COND_EXPR:
2894 return fold_build3_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (loc, TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1),
2899 eval_subst (loc, TREE_OPERAND (arg, 2),
2900 old0, new0, old1, new1));
2901 default:
2902 break;
2904 /* Fall through - ??? */
2906 case tcc_comparison:
2908 tree arg0 = TREE_OPERAND (arg, 0);
2909 tree arg1 = TREE_OPERAND (arg, 1);
2911 /* We need to check both for exact equality and tree equality. The
2912 former will be true if the operand has a side-effect. In that
2913 case, we know the operand occurred exactly once. */
2915 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2916 arg0 = new0;
2917 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2918 arg0 = new1;
2920 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2921 arg1 = new0;
2922 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2923 arg1 = new1;
2925 return fold_build2_loc (loc, code, type, arg0, arg1);
2928 default:
2929 return arg;
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED was previously an operand of the expression
2935 but is now not needed (e.g., we folded OMITTED * 0).
2937 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2938 the conversion of RESULT to TYPE. */
2940 tree
2941 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2943 tree t = fold_convert_loc (loc, type, result);
2945 /* If the resulting operand is an empty statement, just return the omitted
2946 statement casted to void. */
2947 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2948 return build1_loc (loc, NOP_EXPR, void_type_node,
2949 fold_ignored_result (omitted));
2951 if (TREE_SIDE_EFFECTS (omitted))
2952 return build2_loc (loc, COMPOUND_EXPR, type,
2953 fold_ignored_result (omitted), t);
2955 return non_lvalue_loc (loc, t);
2958 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 static tree
2961 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2962 tree omitted)
2964 tree t = fold_convert_loc (loc, type, result);
2966 /* If the resulting operand is an empty statement, just return the omitted
2967 statement casted to void. */
2968 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2969 return build1_loc (loc, NOP_EXPR, void_type_node,
2970 fold_ignored_result (omitted));
2972 if (TREE_SIDE_EFFECTS (omitted))
2973 return build2_loc (loc, COMPOUND_EXPR, type,
2974 fold_ignored_result (omitted), t);
2976 return pedantic_non_lvalue_loc (loc, t);
2979 /* Return a tree for the case when the result of an expression is RESULT
2980 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2981 of the expression but are now not needed.
2983 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2984 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2985 evaluated before OMITTED2. Otherwise, if neither has side effects,
2986 just do the conversion of RESULT to TYPE. */
2988 tree
2989 omit_two_operands_loc (location_t loc, tree type, tree result,
2990 tree omitted1, tree omitted2)
2992 tree t = fold_convert_loc (loc, type, result);
2994 if (TREE_SIDE_EFFECTS (omitted2))
2995 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
2996 if (TREE_SIDE_EFFECTS (omitted1))
2997 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
2999 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3003 /* Return a simplified tree node for the truth-negation of ARG. This
3004 never alters ARG itself. We assume that ARG is an operation that
3005 returns a truth value (0 or 1).
3007 FIXME: one would think we would fold the result, but it causes
3008 problems with the dominator optimizer. */
3010 tree
3011 fold_truth_not_expr (location_t loc, tree arg)
3013 tree type = TREE_TYPE (arg);
3014 enum tree_code code = TREE_CODE (arg);
3015 location_t loc1, loc2;
3017 /* If this is a comparison, we can simply invert it, except for
3018 floating-point non-equality comparisons, in which case we just
3019 enclose a TRUTH_NOT_EXPR around what we have. */
3021 if (TREE_CODE_CLASS (code) == tcc_comparison)
3023 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3024 if (FLOAT_TYPE_P (op_type)
3025 && flag_trapping_math
3026 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3027 && code != NE_EXPR && code != EQ_EXPR)
3028 return NULL_TREE;
3030 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3031 if (code == ERROR_MARK)
3032 return NULL_TREE;
3034 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3035 TREE_OPERAND (arg, 1));
3038 switch (code)
3040 case INTEGER_CST:
3041 return constant_boolean_node (integer_zerop (arg), type);
3043 case TRUTH_AND_EXPR:
3044 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3045 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3046 return build2_loc (loc, TRUTH_OR_EXPR, type,
3047 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3048 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3050 case TRUTH_OR_EXPR:
3051 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3052 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3053 return build2_loc (loc, TRUTH_AND_EXPR, type,
3054 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3055 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3057 case TRUTH_XOR_EXPR:
3058 /* Here we can invert either operand. We invert the first operand
3059 unless the second operand is a TRUTH_NOT_EXPR in which case our
3060 result is the XOR of the first operand with the inside of the
3061 negation of the second operand. */
3063 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3064 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3066 else
3067 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3068 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3069 TREE_OPERAND (arg, 1));
3071 case TRUTH_ANDIF_EXPR:
3072 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3073 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3074 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3075 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3076 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3078 case TRUTH_ORIF_EXPR:
3079 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3080 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3081 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3082 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3083 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 case TRUTH_NOT_EXPR:
3086 return TREE_OPERAND (arg, 0);
3088 case COND_EXPR:
3090 tree arg1 = TREE_OPERAND (arg, 1);
3091 tree arg2 = TREE_OPERAND (arg, 2);
3093 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3094 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3096 /* A COND_EXPR may have a throw as one operand, which
3097 then has void type. Just leave void operands
3098 as they are. */
3099 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3100 VOID_TYPE_P (TREE_TYPE (arg1))
3101 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3102 VOID_TYPE_P (TREE_TYPE (arg2))
3103 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3106 case COMPOUND_EXPR:
3107 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, COMPOUND_EXPR, type,
3109 TREE_OPERAND (arg, 0),
3110 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3112 case NON_LVALUE_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3116 CASE_CONVERT:
3117 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3118 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3120 /* ... fall through ... */
3122 case FLOAT_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 return build1_loc (loc, TREE_CODE (arg), type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3127 case BIT_AND_EXPR:
3128 if (!integer_onep (TREE_OPERAND (arg, 1)))
3129 return NULL_TREE;
3130 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3132 case SAVE_EXPR:
3133 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3135 case CLEANUP_POINT_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3138 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3140 default:
3141 return NULL_TREE;
3145 /* Return a simplified tree node for the truth-negation of ARG. This
3146 never alters ARG itself. We assume that ARG is an operation that
3147 returns a truth value (0 or 1).
3149 FIXME: one would think we would fold the result, but it causes
3150 problems with the dominator optimizer. */
3152 tree
3153 invert_truthvalue_loc (location_t loc, tree arg)
3155 tree tem;
3157 if (TREE_CODE (arg) == ERROR_MARK)
3158 return arg;
3160 tem = fold_truth_not_expr (loc, arg);
3161 if (!tem)
3162 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3164 return tem;
3167 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3168 operands are another bit-wise operation with a common input. If so,
3169 distribute the bit operations to save an operation and possibly two if
3170 constants are involved. For example, convert
3171 (A | B) & (A | C) into A | (B & C)
3172 Further simplification will occur if B and C are constants.
3174 If this optimization cannot be done, 0 will be returned. */
3176 static tree
3177 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3178 tree arg0, tree arg1)
3180 tree common;
3181 tree left, right;
3183 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3184 || TREE_CODE (arg0) == code
3185 || (TREE_CODE (arg0) != BIT_AND_EXPR
3186 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3187 return 0;
3189 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3191 common = TREE_OPERAND (arg0, 0);
3192 left = TREE_OPERAND (arg0, 1);
3193 right = TREE_OPERAND (arg1, 1);
3195 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3197 common = TREE_OPERAND (arg0, 0);
3198 left = TREE_OPERAND (arg0, 1);
3199 right = TREE_OPERAND (arg1, 0);
3201 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3203 common = TREE_OPERAND (arg0, 1);
3204 left = TREE_OPERAND (arg0, 0);
3205 right = TREE_OPERAND (arg1, 1);
3207 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3209 common = TREE_OPERAND (arg0, 1);
3210 left = TREE_OPERAND (arg0, 0);
3211 right = TREE_OPERAND (arg1, 0);
3213 else
3214 return 0;
3216 common = fold_convert_loc (loc, type, common);
3217 left = fold_convert_loc (loc, type, left);
3218 right = fold_convert_loc (loc, type, right);
3219 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3220 fold_build2_loc (loc, code, type, left, right));
3223 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3224 with code CODE. This optimization is unsafe. */
3225 static tree
3226 distribute_real_division (location_t loc, enum tree_code code, tree type,
3227 tree arg0, tree arg1)
3229 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3230 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3232 /* (A / C) +- (B / C) -> (A +- B) / C. */
3233 if (mul0 == mul1
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 1), 0))
3236 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3237 fold_build2_loc (loc, code, type,
3238 TREE_OPERAND (arg0, 0),
3239 TREE_OPERAND (arg1, 0)),
3240 TREE_OPERAND (arg0, 1));
3242 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3243 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0), 0)
3245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3248 REAL_VALUE_TYPE r0, r1;
3249 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3250 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3251 if (!mul0)
3252 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3253 if (!mul1)
3254 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3255 real_arithmetic (&r0, code, &r0, &r1);
3256 return fold_build2_loc (loc, MULT_EXPR, type,
3257 TREE_OPERAND (arg0, 0),
3258 build_real (type, r0));
3261 return NULL_TREE;
3264 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3265 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3267 static tree
3268 make_bit_field_ref (location_t loc, tree inner, tree type,
3269 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3271 tree result, bftype;
3273 if (bitpos == 0)
3275 tree size = TYPE_SIZE (TREE_TYPE (inner));
3276 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3277 || POINTER_TYPE_P (TREE_TYPE (inner)))
3278 && host_integerp (size, 0)
3279 && tree_low_cst (size, 0) == bitsize)
3280 return fold_convert_loc (loc, type, inner);
3283 bftype = type;
3284 if (TYPE_PRECISION (bftype) != bitsize
3285 || TYPE_UNSIGNED (bftype) == !unsignedp)
3286 bftype = build_nonstandard_integer_type (bitsize, 0);
3288 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 if (bftype != type)
3292 result = fold_convert_loc (loc, type, result);
3294 return result;
3297 /* Optimize a bit-field compare.
3299 There are two cases: First is a compare against a constant and the
3300 second is a comparison of two items where the fields are at the same
3301 bit position relative to the start of a chunk (byte, halfword, word)
3302 large enough to contain it. In these cases we can avoid the shift
3303 implicit in bitfield extractions.
3305 For constants, we emit a compare of the shifted constant with the
3306 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3307 compared. For two fields at the same position, we do the ANDs with the
3308 similar mask and compare the result of the ANDs.
3310 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3311 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3312 are the left and right operands of the comparison, respectively.
3314 If the optimization described above can be done, we return the resulting
3315 tree. Otherwise we return zero. */
3317 static tree
3318 optimize_bit_field_compare (location_t loc, enum tree_code code,
3319 tree compare_type, tree lhs, tree rhs)
3321 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3322 tree type = TREE_TYPE (lhs);
3323 tree signed_type, unsigned_type;
3324 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3325 enum machine_mode lmode, rmode, nmode;
3326 int lunsignedp, runsignedp;
3327 int lvolatilep = 0, rvolatilep = 0;
3328 tree linner, rinner = NULL_TREE;
3329 tree mask;
3330 tree offset;
3332 /* In the strict volatile bitfields case, doing code changes here may prevent
3333 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3334 if (flag_strict_volatile_bitfields > 0)
3335 return 0;
3337 /* Get all the information about the extractions being done. If the bit size
3338 if the same as the size of the underlying object, we aren't doing an
3339 extraction at all and so can do nothing. We also don't want to
3340 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3341 then will no longer be able to replace it. */
3342 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3343 &lunsignedp, &lvolatilep, false);
3344 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3345 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3346 return 0;
3348 if (!const_p)
3350 /* If this is not a constant, we can only do something if bit positions,
3351 sizes, and signedness are the same. */
3352 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3353 &runsignedp, &rvolatilep, false);
3355 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3356 || lunsignedp != runsignedp || offset != 0
3357 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3358 return 0;
3361 /* See if we can find a mode to refer to this field. We should be able to,
3362 but fail if we can't. */
3363 if (lvolatilep
3364 && GET_MODE_BITSIZE (lmode) > 0
3365 && flag_strict_volatile_bitfields > 0)
3366 nmode = lmode;
3367 else
3368 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3369 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3370 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3371 TYPE_ALIGN (TREE_TYPE (rinner))),
3372 word_mode, lvolatilep || rvolatilep);
3373 if (nmode == VOIDmode)
3374 return 0;
3376 /* Set signed and unsigned types of the precision of this mode for the
3377 shifts below. */
3378 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3379 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3381 /* Compute the bit position and size for the new reference and our offset
3382 within it. If the new reference is the same size as the original, we
3383 won't optimize anything, so return zero. */
3384 nbitsize = GET_MODE_BITSIZE (nmode);
3385 nbitpos = lbitpos & ~ (nbitsize - 1);
3386 lbitpos -= nbitpos;
3387 if (nbitsize == lbitsize)
3388 return 0;
3390 if (BYTES_BIG_ENDIAN)
3391 lbitpos = nbitsize - lbitsize - lbitpos;
3393 /* Make the mask to be used against the extracted field. */
3394 mask = build_int_cst_type (unsigned_type, -1);
3395 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3396 mask = const_binop (RSHIFT_EXPR, mask,
3397 size_int (nbitsize - lbitsize - lbitpos));
3399 if (! const_p)
3400 /* If not comparing with constant, just rework the comparison
3401 and return. */
3402 return fold_build2_loc (loc, code, compare_type,
3403 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3404 make_bit_field_ref (loc, linner,
3405 unsigned_type,
3406 nbitsize, nbitpos,
3408 mask),
3409 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3410 make_bit_field_ref (loc, rinner,
3411 unsigned_type,
3412 nbitsize, nbitpos,
3414 mask));
3416 /* Otherwise, we are handling the constant case. See if the constant is too
3417 big for the field. Warn and return a tree of for 0 (false) if so. We do
3418 this not only for its own sake, but to avoid having to test for this
3419 error case below. If we didn't, we might generate wrong code.
3421 For unsigned fields, the constant shifted right by the field length should
3422 be all zero. For signed fields, the high-order bits should agree with
3423 the sign bit. */
3425 if (lunsignedp)
3427 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3428 fold_convert_loc (loc,
3429 unsigned_type, rhs),
3430 size_int (lbitsize))))
3432 warning (0, "comparison is always %d due to width of bit-field",
3433 code == NE_EXPR);
3434 return constant_boolean_node (code == NE_EXPR, compare_type);
3437 else
3439 tree tem = const_binop (RSHIFT_EXPR,
3440 fold_convert_loc (loc, signed_type, rhs),
3441 size_int (lbitsize - 1));
3442 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3444 warning (0, "comparison is always %d due to width of bit-field",
3445 code == NE_EXPR);
3446 return constant_boolean_node (code == NE_EXPR, compare_type);
3450 /* Single-bit compares should always be against zero. */
3451 if (lbitsize == 1 && ! integer_zerop (rhs))
3453 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3454 rhs = build_int_cst (type, 0);
3457 /* Make a new bitfield reference, shift the constant over the
3458 appropriate number of bits and mask it with the computed mask
3459 (in case this was a signed field). If we changed it, make a new one. */
3460 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3461 if (lvolatilep)
3463 TREE_SIDE_EFFECTS (lhs) = 1;
3464 TREE_THIS_VOLATILE (lhs) = 1;
3467 rhs = const_binop (BIT_AND_EXPR,
3468 const_binop (LSHIFT_EXPR,
3469 fold_convert_loc (loc, unsigned_type, rhs),
3470 size_int (lbitpos)),
3471 mask);
3473 lhs = build2_loc (loc, code, compare_type,
3474 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3475 return lhs;
3478 /* Subroutine for fold_truth_andor_1: decode a field reference.
3480 If EXP is a comparison reference, we return the innermost reference.
3482 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3483 set to the starting bit number.
3485 If the innermost field can be completely contained in a mode-sized
3486 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3488 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3489 otherwise it is not changed.
3491 *PUNSIGNEDP is set to the signedness of the field.
3493 *PMASK is set to the mask used. This is either contained in a
3494 BIT_AND_EXPR or derived from the width of the field.
3496 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3498 Return 0 if this is not a component reference or is one that we can't
3499 do anything with. */
3501 static tree
3502 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3503 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3504 int *punsignedp, int *pvolatilep,
3505 tree *pmask, tree *pand_mask)
3507 tree outer_type = 0;
3508 tree and_mask = 0;
3509 tree mask, inner, offset;
3510 tree unsigned_type;
3511 unsigned int precision;
3513 /* All the optimizations using this function assume integer fields.
3514 There are problems with FP fields since the type_for_size call
3515 below can fail for, e.g., XFmode. */
3516 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3517 return 0;
3519 /* We are interested in the bare arrangement of bits, so strip everything
3520 that doesn't affect the machine mode. However, record the type of the
3521 outermost expression if it may matter below. */
3522 if (CONVERT_EXPR_P (exp)
3523 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3524 outer_type = TREE_TYPE (exp);
3525 STRIP_NOPS (exp);
3527 if (TREE_CODE (exp) == BIT_AND_EXPR)
3529 and_mask = TREE_OPERAND (exp, 1);
3530 exp = TREE_OPERAND (exp, 0);
3531 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3532 if (TREE_CODE (and_mask) != INTEGER_CST)
3533 return 0;
3536 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3537 punsignedp, pvolatilep, false);
3538 if ((inner == exp && and_mask == 0)
3539 || *pbitsize < 0 || offset != 0
3540 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3541 return 0;
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3547 *punsignedp = TYPE_UNSIGNED (outer_type);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3551 precision = TYPE_PRECISION (unsigned_type);
3553 mask = build_int_cst_type (unsigned_type, -1);
3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3556 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3558 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3559 if (and_mask != 0)
3560 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3561 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3563 *pmask = mask;
3564 *pand_mask = and_mask;
3565 return inner;
3568 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3569 bit positions. */
3571 static int
3572 all_ones_mask_p (const_tree mask, int size)
3574 tree type = TREE_TYPE (mask);
3575 unsigned int precision = TYPE_PRECISION (type);
3576 tree tmask;
3578 tmask = build_int_cst_type (signed_type_for (type), -1);
3580 return
3581 tree_int_cst_equal (mask,
3582 const_binop (RSHIFT_EXPR,
3583 const_binop (LSHIFT_EXPR, tmask,
3584 size_int (precision - size)),
3585 size_int (precision - size)));
3588 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3589 represents the sign bit of EXP's type. If EXP represents a sign
3590 or zero extension, also test VAL against the unextended type.
3591 The return value is the (sub)expression whose sign bit is VAL,
3592 or NULL_TREE otherwise. */
3594 static tree
3595 sign_bit_p (tree exp, const_tree val)
3597 unsigned HOST_WIDE_INT mask_lo, lo;
3598 HOST_WIDE_INT mask_hi, hi;
3599 int width;
3600 tree t;
3602 /* Tree EXP must have an integral type. */
3603 t = TREE_TYPE (exp);
3604 if (! INTEGRAL_TYPE_P (t))
3605 return NULL_TREE;
3607 /* Tree VAL must be an integer constant. */
3608 if (TREE_CODE (val) != INTEGER_CST
3609 || TREE_OVERFLOW (val))
3610 return NULL_TREE;
3612 width = TYPE_PRECISION (t);
3613 if (width > HOST_BITS_PER_WIDE_INT)
3615 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3616 lo = 0;
3618 mask_hi = ((unsigned HOST_WIDE_INT) -1
3619 >> (HOST_BITS_PER_DOUBLE_INT - width));
3620 mask_lo = -1;
3622 else
3624 hi = 0;
3625 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3627 mask_hi = 0;
3628 mask_lo = ((unsigned HOST_WIDE_INT) -1
3629 >> (HOST_BITS_PER_WIDE_INT - width));
3632 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3633 treat VAL as if it were unsigned. */
3634 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3635 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3636 return exp;
3638 /* Handle extension from a narrower type. */
3639 if (TREE_CODE (exp) == NOP_EXPR
3640 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3641 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3643 return NULL_TREE;
3646 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3647 to be evaluated unconditionally. */
3649 static int
3650 simple_operand_p (const_tree exp)
3652 /* Strip any conversions that don't change the machine mode. */
3653 STRIP_NOPS (exp);
3655 return (CONSTANT_CLASS_P (exp)
3656 || TREE_CODE (exp) == SSA_NAME
3657 || (DECL_P (exp)
3658 && ! TREE_ADDRESSABLE (exp)
3659 && ! TREE_THIS_VOLATILE (exp)
3660 && ! DECL_NONLOCAL (exp)
3661 /* Don't regard global variables as simple. They may be
3662 allocated in ways unknown to the compiler (shared memory,
3663 #pragma weak, etc). */
3664 && ! TREE_PUBLIC (exp)
3665 && ! DECL_EXTERNAL (exp)
3666 /* Loading a static variable is unduly expensive, but global
3667 registers aren't expensive. */
3668 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3671 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3672 to be evaluated unconditionally.
3673 I addition to simple_operand_p, we assume that comparisons, conversions,
3674 and logic-not operations are simple, if their operands are simple, too. */
3676 static bool
3677 simple_operand_p_2 (tree exp)
3679 enum tree_code code;
3681 if (TREE_SIDE_EFFECTS (exp)
3682 || tree_could_trap_p (exp))
3683 return false;
3685 while (CONVERT_EXPR_P (exp))
3686 exp = TREE_OPERAND (exp, 0);
3688 code = TREE_CODE (exp);
3690 if (TREE_CODE_CLASS (code) == tcc_comparison)
3691 return (simple_operand_p (TREE_OPERAND (exp, 0))
3692 && simple_operand_p (TREE_OPERAND (exp, 1)));
3694 if (code == TRUTH_NOT_EXPR)
3695 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3697 return simple_operand_p (exp);
3701 /* The following functions are subroutines to fold_range_test and allow it to
3702 try to change a logical combination of comparisons into a range test.
3704 For example, both
3705 X == 2 || X == 3 || X == 4 || X == 5
3707 X >= 2 && X <= 5
3708 are converted to
3709 (unsigned) (X - 2) <= 3
3711 We describe each set of comparisons as being either inside or outside
3712 a range, using a variable named like IN_P, and then describe the
3713 range with a lower and upper bound. If one of the bounds is omitted,
3714 it represents either the highest or lowest value of the type.
3716 In the comments below, we represent a range by two numbers in brackets
3717 preceded by a "+" to designate being inside that range, or a "-" to
3718 designate being outside that range, so the condition can be inverted by
3719 flipping the prefix. An omitted bound is represented by a "-". For
3720 example, "- [-, 10]" means being outside the range starting at the lowest
3721 possible value and ending at 10, in other words, being greater than 10.
3722 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3723 always false.
3725 We set up things so that the missing bounds are handled in a consistent
3726 manner so neither a missing bound nor "true" and "false" need to be
3727 handled using a special case. */
3729 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3730 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3731 and UPPER1_P are nonzero if the respective argument is an upper bound
3732 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3733 must be specified for a comparison. ARG1 will be converted to ARG0's
3734 type if both are specified. */
3736 static tree
3737 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3738 tree arg1, int upper1_p)
3740 tree tem;
3741 int result;
3742 int sgn0, sgn1;
3744 /* If neither arg represents infinity, do the normal operation.
3745 Else, if not a comparison, return infinity. Else handle the special
3746 comparison rules. Note that most of the cases below won't occur, but
3747 are handled for consistency. */
3749 if (arg0 != 0 && arg1 != 0)
3751 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3752 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 STRIP_NOPS (tem);
3754 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3757 if (TREE_CODE_CLASS (code) != tcc_comparison)
3758 return 0;
3760 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3761 for neither. In real maths, we cannot assume open ended ranges are
3762 the same. But, this is computer arithmetic, where numbers are finite.
3763 We can therefore make the transformation of any unbounded range with
3764 the value Z, Z being greater than any representable number. This permits
3765 us to treat unbounded ranges as equal. */
3766 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3767 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3768 switch (code)
3770 case EQ_EXPR:
3771 result = sgn0 == sgn1;
3772 break;
3773 case NE_EXPR:
3774 result = sgn0 != sgn1;
3775 break;
3776 case LT_EXPR:
3777 result = sgn0 < sgn1;
3778 break;
3779 case LE_EXPR:
3780 result = sgn0 <= sgn1;
3781 break;
3782 case GT_EXPR:
3783 result = sgn0 > sgn1;
3784 break;
3785 case GE_EXPR:
3786 result = sgn0 >= sgn1;
3787 break;
3788 default:
3789 gcc_unreachable ();
3792 return constant_boolean_node (result, type);
3795 /* Helper routine for make_range. Perform one step for it, return
3796 new expression if the loop should continue or NULL_TREE if it should
3797 stop. */
3799 tree
3800 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3801 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3802 bool *strict_overflow_p)
3804 tree arg0_type = TREE_TYPE (arg0);
3805 tree n_low, n_high, low = *p_low, high = *p_high;
3806 int in_p = *p_in_p, n_in_p;
3808 switch (code)
3810 case TRUTH_NOT_EXPR:
3811 *p_in_p = ! in_p;
3812 return arg0;
3814 case EQ_EXPR: case NE_EXPR:
3815 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3816 /* We can only do something if the range is testing for zero
3817 and if the second operand is an integer constant. Note that
3818 saying something is "in" the range we make is done by
3819 complementing IN_P since it will set in the initial case of
3820 being not equal to zero; "out" is leaving it alone. */
3821 if (low == NULL_TREE || high == NULL_TREE
3822 || ! integer_zerop (low) || ! integer_zerop (high)
3823 || TREE_CODE (arg1) != INTEGER_CST)
3824 return NULL_TREE;
3826 switch (code)
3828 case NE_EXPR: /* - [c, c] */
3829 low = high = arg1;
3830 break;
3831 case EQ_EXPR: /* + [c, c] */
3832 in_p = ! in_p, low = high = arg1;
3833 break;
3834 case GT_EXPR: /* - [-, c] */
3835 low = 0, high = arg1;
3836 break;
3837 case GE_EXPR: /* + [c, -] */
3838 in_p = ! in_p, low = arg1, high = 0;
3839 break;
3840 case LT_EXPR: /* - [c, -] */
3841 low = arg1, high = 0;
3842 break;
3843 case LE_EXPR: /* + [-, c] */
3844 in_p = ! in_p, low = 0, high = arg1;
3845 break;
3846 default:
3847 gcc_unreachable ();
3850 /* If this is an unsigned comparison, we also know that EXP is
3851 greater than or equal to zero. We base the range tests we make
3852 on that fact, so we record it here so we can parse existing
3853 range tests. We test arg0_type since often the return type
3854 of, e.g. EQ_EXPR, is boolean. */
3855 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3857 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3858 in_p, low, high, 1,
3859 build_int_cst (arg0_type, 0),
3860 NULL_TREE))
3861 return NULL_TREE;
3863 in_p = n_in_p, low = n_low, high = n_high;
3865 /* If the high bound is missing, but we have a nonzero low
3866 bound, reverse the range so it goes from zero to the low bound
3867 minus 1. */
3868 if (high == 0 && low && ! integer_zerop (low))
3870 in_p = ! in_p;
3871 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3872 integer_one_node, 0);
3873 low = build_int_cst (arg0_type, 0);
3877 *p_low = low;
3878 *p_high = high;
3879 *p_in_p = in_p;
3880 return arg0;
3882 case NEGATE_EXPR:
3883 /* (-x) IN [a,b] -> x in [-b, -a] */
3884 n_low = range_binop (MINUS_EXPR, exp_type,
3885 build_int_cst (exp_type, 0),
3886 0, high, 1);
3887 n_high = range_binop (MINUS_EXPR, exp_type,
3888 build_int_cst (exp_type, 0),
3889 0, low, 0);
3890 if (n_high != 0 && TREE_OVERFLOW (n_high))
3891 return NULL_TREE;
3892 goto normalize;
3894 case BIT_NOT_EXPR:
3895 /* ~ X -> -X - 1 */
3896 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3897 build_int_cst (exp_type, 1));
3899 case PLUS_EXPR:
3900 case MINUS_EXPR:
3901 if (TREE_CODE (arg1) != INTEGER_CST)
3902 return NULL_TREE;
3904 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3905 move a constant to the other side. */
3906 if (!TYPE_UNSIGNED (arg0_type)
3907 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3908 return NULL_TREE;
3910 /* If EXP is signed, any overflow in the computation is undefined,
3911 so we don't worry about it so long as our computations on
3912 the bounds don't overflow. For unsigned, overflow is defined
3913 and this is exactly the right thing. */
3914 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3915 arg0_type, low, 0, arg1, 0);
3916 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3917 arg0_type, high, 1, arg1, 0);
3918 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3919 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3920 return NULL_TREE;
3922 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3923 *strict_overflow_p = true;
3925 normalize:
3926 /* Check for an unsigned range which has wrapped around the maximum
3927 value thus making n_high < n_low, and normalize it. */
3928 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3930 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3931 integer_one_node, 0);
3932 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3933 integer_one_node, 0);
3935 /* If the range is of the form +/- [ x+1, x ], we won't
3936 be able to normalize it. But then, it represents the
3937 whole range or the empty set, so make it
3938 +/- [ -, - ]. */
3939 if (tree_int_cst_equal (n_low, low)
3940 && tree_int_cst_equal (n_high, high))
3941 low = high = 0;
3942 else
3943 in_p = ! in_p;
3945 else
3946 low = n_low, high = n_high;
3948 *p_low = low;
3949 *p_high = high;
3950 *p_in_p = in_p;
3951 return arg0;
3953 CASE_CONVERT:
3954 case NON_LVALUE_EXPR:
3955 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3956 return NULL_TREE;
3958 if (! INTEGRAL_TYPE_P (arg0_type)
3959 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3960 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3961 return NULL_TREE;
3963 n_low = low, n_high = high;
3965 if (n_low != 0)
3966 n_low = fold_convert_loc (loc, arg0_type, n_low);
3968 if (n_high != 0)
3969 n_high = fold_convert_loc (loc, arg0_type, n_high);
3971 /* If we're converting arg0 from an unsigned type, to exp,
3972 a signed type, we will be doing the comparison as unsigned.
3973 The tests above have already verified that LOW and HIGH
3974 are both positive.
3976 So we have to ensure that we will handle large unsigned
3977 values the same way that the current signed bounds treat
3978 negative values. */
3980 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3982 tree high_positive;
3983 tree equiv_type;
3984 /* For fixed-point modes, we need to pass the saturating flag
3985 as the 2nd parameter. */
3986 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3987 equiv_type
3988 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
3989 TYPE_SATURATING (arg0_type));
3990 else
3991 equiv_type
3992 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
3994 /* A range without an upper bound is, naturally, unbounded.
3995 Since convert would have cropped a very large value, use
3996 the max value for the destination type. */
3997 high_positive
3998 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3999 : TYPE_MAX_VALUE (arg0_type);
4001 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4002 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4003 fold_convert_loc (loc, arg0_type,
4004 high_positive),
4005 build_int_cst (arg0_type, 1));
4007 /* If the low bound is specified, "and" the range with the
4008 range for which the original unsigned value will be
4009 positive. */
4010 if (low != 0)
4012 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4013 1, fold_convert_loc (loc, arg0_type,
4014 integer_zero_node),
4015 high_positive))
4016 return NULL_TREE;
4018 in_p = (n_in_p == in_p);
4020 else
4022 /* Otherwise, "or" the range with the range of the input
4023 that will be interpreted as negative. */
4024 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4025 1, fold_convert_loc (loc, arg0_type,
4026 integer_zero_node),
4027 high_positive))
4028 return NULL_TREE;
4030 in_p = (in_p != n_in_p);
4034 *p_low = n_low;
4035 *p_high = n_high;
4036 *p_in_p = in_p;
4037 return arg0;
4039 default:
4040 return NULL_TREE;
4044 /* Given EXP, a logical expression, set the range it is testing into
4045 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4046 actually being tested. *PLOW and *PHIGH will be made of the same
4047 type as the returned expression. If EXP is not a comparison, we
4048 will most likely not be returning a useful value and range. Set
4049 *STRICT_OVERFLOW_P to true if the return value is only valid
4050 because signed overflow is undefined; otherwise, do not change
4051 *STRICT_OVERFLOW_P. */
4053 tree
4054 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4055 bool *strict_overflow_p)
4057 enum tree_code code;
4058 tree arg0, arg1 = NULL_TREE;
4059 tree exp_type, nexp;
4060 int in_p;
4061 tree low, high;
4062 location_t loc = EXPR_LOCATION (exp);
4064 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4065 and see if we can refine the range. Some of the cases below may not
4066 happen, but it doesn't seem worth worrying about this. We "continue"
4067 the outer loop when we've changed something; otherwise we "break"
4068 the switch, which will "break" the while. */
4070 in_p = 0;
4071 low = high = build_int_cst (TREE_TYPE (exp), 0);
4073 while (1)
4075 code = TREE_CODE (exp);
4076 exp_type = TREE_TYPE (exp);
4077 arg0 = NULL_TREE;
4079 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4081 if (TREE_OPERAND_LENGTH (exp) > 0)
4082 arg0 = TREE_OPERAND (exp, 0);
4083 if (TREE_CODE_CLASS (code) == tcc_binary
4084 || TREE_CODE_CLASS (code) == tcc_comparison
4085 || (TREE_CODE_CLASS (code) == tcc_expression
4086 && TREE_OPERAND_LENGTH (exp) > 1))
4087 arg1 = TREE_OPERAND (exp, 1);
4089 if (arg0 == NULL_TREE)
4090 break;
4092 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4093 &high, &in_p, strict_overflow_p);
4094 if (nexp == NULL_TREE)
4095 break;
4096 exp = nexp;
4099 /* If EXP is a constant, we can evaluate whether this is true or false. */
4100 if (TREE_CODE (exp) == INTEGER_CST)
4102 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4103 exp, 0, low, 0))
4104 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4105 exp, 1, high, 1)));
4106 low = high = 0;
4107 exp = 0;
4110 *pin_p = in_p, *plow = low, *phigh = high;
4111 return exp;
4114 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4115 type, TYPE, return an expression to test if EXP is in (or out of, depending
4116 on IN_P) the range. Return 0 if the test couldn't be created. */
4118 tree
4119 build_range_check (location_t loc, tree type, tree exp, int in_p,
4120 tree low, tree high)
4122 tree etype = TREE_TYPE (exp), value;
4124 #ifdef HAVE_canonicalize_funcptr_for_compare
4125 /* Disable this optimization for function pointer expressions
4126 on targets that require function pointer canonicalization. */
4127 if (HAVE_canonicalize_funcptr_for_compare
4128 && TREE_CODE (etype) == POINTER_TYPE
4129 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4130 return NULL_TREE;
4131 #endif
4133 if (! in_p)
4135 value = build_range_check (loc, type, exp, 1, low, high);
4136 if (value != 0)
4137 return invert_truthvalue_loc (loc, value);
4139 return 0;
4142 if (low == 0 && high == 0)
4143 return build_int_cst (type, 1);
4145 if (low == 0)
4146 return fold_build2_loc (loc, LE_EXPR, type, exp,
4147 fold_convert_loc (loc, etype, high));
4149 if (high == 0)
4150 return fold_build2_loc (loc, GE_EXPR, type, exp,
4151 fold_convert_loc (loc, etype, low));
4153 if (operand_equal_p (low, high, 0))
4154 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4155 fold_convert_loc (loc, etype, low));
4157 if (integer_zerop (low))
4159 if (! TYPE_UNSIGNED (etype))
4161 etype = unsigned_type_for (etype);
4162 high = fold_convert_loc (loc, etype, high);
4163 exp = fold_convert_loc (loc, etype, exp);
4165 return build_range_check (loc, type, exp, 1, 0, high);
4168 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4169 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4171 unsigned HOST_WIDE_INT lo;
4172 HOST_WIDE_INT hi;
4173 int prec;
4175 prec = TYPE_PRECISION (etype);
4176 if (prec <= HOST_BITS_PER_WIDE_INT)
4178 hi = 0;
4179 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4181 else
4183 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4184 lo = (unsigned HOST_WIDE_INT) -1;
4187 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4189 if (TYPE_UNSIGNED (etype))
4191 tree signed_etype = signed_type_for (etype);
4192 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4193 etype
4194 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4195 else
4196 etype = signed_etype;
4197 exp = fold_convert_loc (loc, etype, exp);
4199 return fold_build2_loc (loc, GT_EXPR, type, exp,
4200 build_int_cst (etype, 0));
4204 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4205 This requires wrap-around arithmetics for the type of the expression.
4206 First make sure that arithmetics in this type is valid, then make sure
4207 that it wraps around. */
4208 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4209 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4210 TYPE_UNSIGNED (etype));
4212 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4214 tree utype, minv, maxv;
4216 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4217 for the type in question, as we rely on this here. */
4218 utype = unsigned_type_for (etype);
4219 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4220 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4221 integer_one_node, 1);
4222 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4224 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4225 minv, 1, maxv, 1)))
4226 etype = utype;
4227 else
4228 return 0;
4231 high = fold_convert_loc (loc, etype, high);
4232 low = fold_convert_loc (loc, etype, low);
4233 exp = fold_convert_loc (loc, etype, exp);
4235 value = const_binop (MINUS_EXPR, high, low);
4238 if (POINTER_TYPE_P (etype))
4240 if (value != 0 && !TREE_OVERFLOW (value))
4242 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4243 return build_range_check (loc, type,
4244 fold_build_pointer_plus_loc (loc, exp, low),
4245 1, build_int_cst (etype, 0), value);
4247 return 0;
4250 if (value != 0 && !TREE_OVERFLOW (value))
4251 return build_range_check (loc, type,
4252 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4253 1, build_int_cst (etype, 0), value);
4255 return 0;
4258 /* Return the predecessor of VAL in its type, handling the infinite case. */
4260 static tree
4261 range_predecessor (tree val)
4263 tree type = TREE_TYPE (val);
4265 if (INTEGRAL_TYPE_P (type)
4266 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4267 return 0;
4268 else
4269 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4272 /* Return the successor of VAL in its type, handling the infinite case. */
4274 static tree
4275 range_successor (tree val)
4277 tree type = TREE_TYPE (val);
4279 if (INTEGRAL_TYPE_P (type)
4280 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4281 return 0;
4282 else
4283 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4286 /* Given two ranges, see if we can merge them into one. Return 1 if we
4287 can, 0 if we can't. Set the output range into the specified parameters. */
4289 bool
4290 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4291 tree high0, int in1_p, tree low1, tree high1)
4293 int no_overlap;
4294 int subset;
4295 int temp;
4296 tree tem;
4297 int in_p;
4298 tree low, high;
4299 int lowequal = ((low0 == 0 && low1 == 0)
4300 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4301 low0, 0, low1, 0)));
4302 int highequal = ((high0 == 0 && high1 == 0)
4303 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4304 high0, 1, high1, 1)));
4306 /* Make range 0 be the range that starts first, or ends last if they
4307 start at the same value. Swap them if it isn't. */
4308 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4309 low0, 0, low1, 0))
4310 || (lowequal
4311 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4312 high1, 1, high0, 1))))
4314 temp = in0_p, in0_p = in1_p, in1_p = temp;
4315 tem = low0, low0 = low1, low1 = tem;
4316 tem = high0, high0 = high1, high1 = tem;
4319 /* Now flag two cases, whether the ranges are disjoint or whether the
4320 second range is totally subsumed in the first. Note that the tests
4321 below are simplified by the ones above. */
4322 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4323 high0, 1, low1, 0));
4324 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4325 high1, 1, high0, 1));
4327 /* We now have four cases, depending on whether we are including or
4328 excluding the two ranges. */
4329 if (in0_p && in1_p)
4331 /* If they don't overlap, the result is false. If the second range
4332 is a subset it is the result. Otherwise, the range is from the start
4333 of the second to the end of the first. */
4334 if (no_overlap)
4335 in_p = 0, low = high = 0;
4336 else if (subset)
4337 in_p = 1, low = low1, high = high1;
4338 else
4339 in_p = 1, low = low1, high = high0;
4342 else if (in0_p && ! in1_p)
4344 /* If they don't overlap, the result is the first range. If they are
4345 equal, the result is false. If the second range is a subset of the
4346 first, and the ranges begin at the same place, we go from just after
4347 the end of the second range to the end of the first. If the second
4348 range is not a subset of the first, or if it is a subset and both
4349 ranges end at the same place, the range starts at the start of the
4350 first range and ends just before the second range.
4351 Otherwise, we can't describe this as a single range. */
4352 if (no_overlap)
4353 in_p = 1, low = low0, high = high0;
4354 else if (lowequal && highequal)
4355 in_p = 0, low = high = 0;
4356 else if (subset && lowequal)
4358 low = range_successor (high1);
4359 high = high0;
4360 in_p = 1;
4361 if (low == 0)
4363 /* We are in the weird situation where high0 > high1 but
4364 high1 has no successor. Punt. */
4365 return 0;
4368 else if (! subset || highequal)
4370 low = low0;
4371 high = range_predecessor (low1);
4372 in_p = 1;
4373 if (high == 0)
4375 /* low0 < low1 but low1 has no predecessor. Punt. */
4376 return 0;
4379 else
4380 return 0;
4383 else if (! in0_p && in1_p)
4385 /* If they don't overlap, the result is the second range. If the second
4386 is a subset of the first, the result is false. Otherwise,
4387 the range starts just after the first range and ends at the
4388 end of the second. */
4389 if (no_overlap)
4390 in_p = 1, low = low1, high = high1;
4391 else if (subset || highequal)
4392 in_p = 0, low = high = 0;
4393 else
4395 low = range_successor (high0);
4396 high = high1;
4397 in_p = 1;
4398 if (low == 0)
4400 /* high1 > high0 but high0 has no successor. Punt. */
4401 return 0;
4406 else
4408 /* The case where we are excluding both ranges. Here the complex case
4409 is if they don't overlap. In that case, the only time we have a
4410 range is if they are adjacent. If the second is a subset of the
4411 first, the result is the first. Otherwise, the range to exclude
4412 starts at the beginning of the first range and ends at the end of the
4413 second. */
4414 if (no_overlap)
4416 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4417 range_successor (high0),
4418 1, low1, 0)))
4419 in_p = 0, low = low0, high = high1;
4420 else
4422 /* Canonicalize - [min, x] into - [-, x]. */
4423 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4424 switch (TREE_CODE (TREE_TYPE (low0)))
4426 case ENUMERAL_TYPE:
4427 if (TYPE_PRECISION (TREE_TYPE (low0))
4428 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4429 break;
4430 /* FALLTHROUGH */
4431 case INTEGER_TYPE:
4432 if (tree_int_cst_equal (low0,
4433 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4434 low0 = 0;
4435 break;
4436 case POINTER_TYPE:
4437 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4438 && integer_zerop (low0))
4439 low0 = 0;
4440 break;
4441 default:
4442 break;
4445 /* Canonicalize - [x, max] into - [x, -]. */
4446 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4447 switch (TREE_CODE (TREE_TYPE (high1)))
4449 case ENUMERAL_TYPE:
4450 if (TYPE_PRECISION (TREE_TYPE (high1))
4451 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4452 break;
4453 /* FALLTHROUGH */
4454 case INTEGER_TYPE:
4455 if (tree_int_cst_equal (high1,
4456 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4457 high1 = 0;
4458 break;
4459 case POINTER_TYPE:
4460 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4461 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4462 high1, 1,
4463 integer_one_node, 1)))
4464 high1 = 0;
4465 break;
4466 default:
4467 break;
4470 /* The ranges might be also adjacent between the maximum and
4471 minimum values of the given type. For
4472 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4473 return + [x + 1, y - 1]. */
4474 if (low0 == 0 && high1 == 0)
4476 low = range_successor (high0);
4477 high = range_predecessor (low1);
4478 if (low == 0 || high == 0)
4479 return 0;
4481 in_p = 1;
4483 else
4484 return 0;
4487 else if (subset)
4488 in_p = 0, low = low0, high = high0;
4489 else
4490 in_p = 0, low = low0, high = high1;
4493 *pin_p = in_p, *plow = low, *phigh = high;
4494 return 1;
4498 /* Subroutine of fold, looking inside expressions of the form
4499 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4500 of the COND_EXPR. This function is being used also to optimize
4501 A op B ? C : A, by reversing the comparison first.
4503 Return a folded expression whose code is not a COND_EXPR
4504 anymore, or NULL_TREE if no folding opportunity is found. */
4506 static tree
4507 fold_cond_expr_with_comparison (location_t loc, tree type,
4508 tree arg0, tree arg1, tree arg2)
4510 enum tree_code comp_code = TREE_CODE (arg0);
4511 tree arg00 = TREE_OPERAND (arg0, 0);
4512 tree arg01 = TREE_OPERAND (arg0, 1);
4513 tree arg1_type = TREE_TYPE (arg1);
4514 tree tem;
4516 STRIP_NOPS (arg1);
4517 STRIP_NOPS (arg2);
4519 /* If we have A op 0 ? A : -A, consider applying the following
4520 transformations:
4522 A == 0? A : -A same as -A
4523 A != 0? A : -A same as A
4524 A >= 0? A : -A same as abs (A)
4525 A > 0? A : -A same as abs (A)
4526 A <= 0? A : -A same as -abs (A)
4527 A < 0? A : -A same as -abs (A)
4529 None of these transformations work for modes with signed
4530 zeros. If A is +/-0, the first two transformations will
4531 change the sign of the result (from +0 to -0, or vice
4532 versa). The last four will fix the sign of the result,
4533 even though the original expressions could be positive or
4534 negative, depending on the sign of A.
4536 Note that all these transformations are correct if A is
4537 NaN, since the two alternatives (A and -A) are also NaNs. */
4538 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4539 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4540 ? real_zerop (arg01)
4541 : integer_zerop (arg01))
4542 && ((TREE_CODE (arg2) == NEGATE_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4544 /* In the case that A is of the form X-Y, '-A' (arg2) may
4545 have already been folded to Y-X, check for that. */
4546 || (TREE_CODE (arg1) == MINUS_EXPR
4547 && TREE_CODE (arg2) == MINUS_EXPR
4548 && operand_equal_p (TREE_OPERAND (arg1, 0),
4549 TREE_OPERAND (arg2, 1), 0)
4550 && operand_equal_p (TREE_OPERAND (arg1, 1),
4551 TREE_OPERAND (arg2, 0), 0))))
4552 switch (comp_code)
4554 case EQ_EXPR:
4555 case UNEQ_EXPR:
4556 tem = fold_convert_loc (loc, arg1_type, arg1);
4557 return pedantic_non_lvalue_loc (loc,
4558 fold_convert_loc (loc, type,
4559 negate_expr (tem)));
4560 case NE_EXPR:
4561 case LTGT_EXPR:
4562 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4563 case UNGE_EXPR:
4564 case UNGT_EXPR:
4565 if (flag_trapping_math)
4566 break;
4567 /* Fall through. */
4568 case GE_EXPR:
4569 case GT_EXPR:
4570 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4571 arg1 = fold_convert_loc (loc, signed_type_for
4572 (TREE_TYPE (arg1)), arg1);
4573 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4574 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4575 case UNLE_EXPR:
4576 case UNLT_EXPR:
4577 if (flag_trapping_math)
4578 break;
4579 case LE_EXPR:
4580 case LT_EXPR:
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return negate_expr (fold_convert_loc (loc, type, tem));
4586 default:
4587 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4588 break;
4591 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4592 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4593 both transformations are correct when A is NaN: A != 0
4594 is then true, and A == 0 is false. */
4596 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4597 && integer_zerop (arg01) && integer_zerop (arg2))
4599 if (comp_code == NE_EXPR)
4600 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4601 else if (comp_code == EQ_EXPR)
4602 return build_int_cst (type, 0);
4605 /* Try some transformations of A op B ? A : B.
4607 A == B? A : B same as B
4608 A != B? A : B same as A
4609 A >= B? A : B same as max (A, B)
4610 A > B? A : B same as max (B, A)
4611 A <= B? A : B same as min (A, B)
4612 A < B? A : B same as min (B, A)
4614 As above, these transformations don't work in the presence
4615 of signed zeros. For example, if A and B are zeros of
4616 opposite sign, the first two transformations will change
4617 the sign of the result. In the last four, the original
4618 expressions give different results for (A=+0, B=-0) and
4619 (A=-0, B=+0), but the transformed expressions do not.
4621 The first two transformations are correct if either A or B
4622 is a NaN. In the first transformation, the condition will
4623 be false, and B will indeed be chosen. In the case of the
4624 second transformation, the condition A != B will be true,
4625 and A will be chosen.
4627 The conversions to max() and min() are not correct if B is
4628 a number and A is not. The conditions in the original
4629 expressions will be false, so all four give B. The min()
4630 and max() versions would give a NaN instead. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4632 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4633 /* Avoid these transformations if the COND_EXPR may be used
4634 as an lvalue in the C++ front-end. PR c++/19199. */
4635 && (in_gimple_form
4636 || (strcmp (lang_hooks.name, "GNU C++") != 0
4637 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4638 || ! maybe_lvalue_p (arg1)
4639 || ! maybe_lvalue_p (arg2)))
4641 tree comp_op0 = arg00;
4642 tree comp_op1 = arg01;
4643 tree comp_type = TREE_TYPE (comp_op0);
4645 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4646 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4648 comp_type = type;
4649 comp_op0 = arg1;
4650 comp_op1 = arg2;
4653 switch (comp_code)
4655 case EQ_EXPR:
4656 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4657 case NE_EXPR:
4658 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4659 case LE_EXPR:
4660 case LT_EXPR:
4661 case UNLE_EXPR:
4662 case UNLT_EXPR:
4663 /* In C++ a ?: expression can be an lvalue, so put the
4664 operand which will be used if they are equal first
4665 so that we can convert this back to the
4666 corresponding COND_EXPR. */
4667 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4669 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4670 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4671 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4672 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4673 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4674 comp_op1, comp_op0);
4675 return pedantic_non_lvalue_loc (loc,
4676 fold_convert_loc (loc, type, tem));
4678 break;
4679 case GE_EXPR:
4680 case GT_EXPR:
4681 case UNGE_EXPR:
4682 case UNGT_EXPR:
4683 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4685 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4686 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4687 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4688 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4689 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4690 comp_op1, comp_op0);
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, tem));
4694 break;
4695 case UNEQ_EXPR:
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4697 return pedantic_non_lvalue_loc (loc,
4698 fold_convert_loc (loc, type, arg2));
4699 break;
4700 case LTGT_EXPR:
4701 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, arg1));
4704 break;
4705 default:
4706 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4707 break;
4711 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4712 we might still be able to simplify this. For example,
4713 if C1 is one less or one more than C2, this might have started
4714 out as a MIN or MAX and been transformed by this function.
4715 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4717 if (INTEGRAL_TYPE_P (type)
4718 && TREE_CODE (arg01) == INTEGER_CST
4719 && TREE_CODE (arg2) == INTEGER_CST)
4720 switch (comp_code)
4722 case EQ_EXPR:
4723 if (TREE_CODE (arg1) == INTEGER_CST)
4724 break;
4725 /* We can replace A with C1 in this case. */
4726 arg1 = fold_convert_loc (loc, type, arg01);
4727 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4729 case LT_EXPR:
4730 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4731 MIN_EXPR, to preserve the signedness of the comparison. */
4732 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4733 OEP_ONLY_CONST)
4734 && operand_equal_p (arg01,
4735 const_binop (PLUS_EXPR, arg2,
4736 build_int_cst (type, 1)),
4737 OEP_ONLY_CONST))
4739 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4740 fold_convert_loc (loc, TREE_TYPE (arg00),
4741 arg2));
4742 return pedantic_non_lvalue_loc (loc,
4743 fold_convert_loc (loc, type, tem));
4745 break;
4747 case LE_EXPR:
4748 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4749 as above. */
4750 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4751 OEP_ONLY_CONST)
4752 && operand_equal_p (arg01,
4753 const_binop (MINUS_EXPR, arg2,
4754 build_int_cst (type, 1)),
4755 OEP_ONLY_CONST))
4757 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4758 fold_convert_loc (loc, TREE_TYPE (arg00),
4759 arg2));
4760 return pedantic_non_lvalue_loc (loc,
4761 fold_convert_loc (loc, type, tem));
4763 break;
4765 case GT_EXPR:
4766 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4767 MAX_EXPR, to preserve the signedness of the comparison. */
4768 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4769 OEP_ONLY_CONST)
4770 && operand_equal_p (arg01,
4771 const_binop (MINUS_EXPR, arg2,
4772 build_int_cst (type, 1)),
4773 OEP_ONLY_CONST))
4775 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4776 fold_convert_loc (loc, TREE_TYPE (arg00),
4777 arg2));
4778 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4780 break;
4782 case GE_EXPR:
4783 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4784 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (PLUS_EXPR, arg2,
4788 build_int_cst (type, 1)),
4789 OEP_ONLY_CONST))
4791 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4796 break;
4797 case NE_EXPR:
4798 break;
4799 default:
4800 gcc_unreachable ();
4803 return NULL_TREE;
4808 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4809 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4810 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4811 false) >= 2)
4812 #endif
4814 /* EXP is some logical combination of boolean tests. See if we can
4815 merge it into some range test. Return the new tree if so. */
4817 static tree
4818 fold_range_test (location_t loc, enum tree_code code, tree type,
4819 tree op0, tree op1)
4821 int or_op = (code == TRUTH_ORIF_EXPR
4822 || code == TRUTH_OR_EXPR);
4823 int in0_p, in1_p, in_p;
4824 tree low0, low1, low, high0, high1, high;
4825 bool strict_overflow_p = false;
4826 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4827 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4828 tree tem;
4829 const char * const warnmsg = G_("assuming signed overflow does not occur "
4830 "when simplifying range test");
4832 /* If this is an OR operation, invert both sides; we will invert
4833 again at the end. */
4834 if (or_op)
4835 in0_p = ! in0_p, in1_p = ! in1_p;
4837 /* If both expressions are the same, if we can merge the ranges, and we
4838 can build the range test, return it or it inverted. If one of the
4839 ranges is always true or always false, consider it to be the same
4840 expression as the other. */
4841 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4842 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4843 in1_p, low1, high1)
4844 && 0 != (tem = (build_range_check (loc, type,
4845 lhs != 0 ? lhs
4846 : rhs != 0 ? rhs : integer_zero_node,
4847 in_p, low, high))))
4849 if (strict_overflow_p)
4850 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4851 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4854 /* On machines where the branch cost is expensive, if this is a
4855 short-circuited branch and the underlying object on both sides
4856 is the same, make a non-short-circuit operation. */
4857 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4858 && lhs != 0 && rhs != 0
4859 && (code == TRUTH_ANDIF_EXPR
4860 || code == TRUTH_ORIF_EXPR)
4861 && operand_equal_p (lhs, rhs, 0))
4863 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4864 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4865 which cases we can't do this. */
4866 if (simple_operand_p (lhs))
4867 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4868 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4869 type, op0, op1);
4871 else if (!lang_hooks.decls.global_bindings_p ()
4872 && !CONTAINS_PLACEHOLDER_P (lhs))
4874 tree common = save_expr (lhs);
4876 if (0 != (lhs = build_range_check (loc, type, common,
4877 or_op ? ! in0_p : in0_p,
4878 low0, high0))
4879 && (0 != (rhs = build_range_check (loc, type, common,
4880 or_op ? ! in1_p : in1_p,
4881 low1, high1))))
4883 if (strict_overflow_p)
4884 fold_overflow_warning (warnmsg,
4885 WARN_STRICT_OVERFLOW_COMPARISON);
4886 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4887 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4888 type, lhs, rhs);
4893 return 0;
4896 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4897 bit value. Arrange things so the extra bits will be set to zero if and
4898 only if C is signed-extended to its full width. If MASK is nonzero,
4899 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4901 static tree
4902 unextend (tree c, int p, int unsignedp, tree mask)
4904 tree type = TREE_TYPE (c);
4905 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4906 tree temp;
4908 if (p == modesize || unsignedp)
4909 return c;
4911 /* We work by getting just the sign bit into the low-order bit, then
4912 into the high-order bit, then sign-extend. We then XOR that value
4913 with C. */
4914 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4915 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4917 /* We must use a signed type in order to get an arithmetic right shift.
4918 However, we must also avoid introducing accidental overflows, so that
4919 a subsequent call to integer_zerop will work. Hence we must
4920 do the type conversion here. At this point, the constant is either
4921 zero or one, and the conversion to a signed type can never overflow.
4922 We could get an overflow if this conversion is done anywhere else. */
4923 if (TYPE_UNSIGNED (type))
4924 temp = fold_convert (signed_type_for (type), temp);
4926 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4927 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4928 if (mask != 0)
4929 temp = const_binop (BIT_AND_EXPR, temp,
4930 fold_convert (TREE_TYPE (c), mask));
4931 /* If necessary, convert the type back to match the type of C. */
4932 if (TYPE_UNSIGNED (type))
4933 temp = fold_convert (type, temp);
4935 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4938 /* For an expression that has the form
4939 (A && B) || ~B
4941 (A || B) && ~B,
4942 we can drop one of the inner expressions and simplify to
4943 A || ~B
4945 A && ~B
4946 LOC is the location of the resulting expression. OP is the inner
4947 logical operation; the left-hand side in the examples above, while CMPOP
4948 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4949 removing a condition that guards another, as in
4950 (A != NULL && A->...) || A == NULL
4951 which we must not transform. If RHS_ONLY is true, only eliminate the
4952 right-most operand of the inner logical operation. */
4954 static tree
4955 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4956 bool rhs_only)
4958 tree type = TREE_TYPE (cmpop);
4959 enum tree_code code = TREE_CODE (cmpop);
4960 enum tree_code truthop_code = TREE_CODE (op);
4961 tree lhs = TREE_OPERAND (op, 0);
4962 tree rhs = TREE_OPERAND (op, 1);
4963 tree orig_lhs = lhs, orig_rhs = rhs;
4964 enum tree_code rhs_code = TREE_CODE (rhs);
4965 enum tree_code lhs_code = TREE_CODE (lhs);
4966 enum tree_code inv_code;
4968 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4969 return NULL_TREE;
4971 if (TREE_CODE_CLASS (code) != tcc_comparison)
4972 return NULL_TREE;
4974 if (rhs_code == truthop_code)
4976 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4977 if (newrhs != NULL_TREE)
4979 rhs = newrhs;
4980 rhs_code = TREE_CODE (rhs);
4983 if (lhs_code == truthop_code && !rhs_only)
4985 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4986 if (newlhs != NULL_TREE)
4988 lhs = newlhs;
4989 lhs_code = TREE_CODE (lhs);
4993 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4994 if (inv_code == rhs_code
4995 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4996 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4997 return lhs;
4998 if (!rhs_only && inv_code == lhs_code
4999 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5000 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5001 return rhs;
5002 if (rhs != orig_rhs || lhs != orig_lhs)
5003 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5004 lhs, rhs);
5005 return NULL_TREE;
5008 /* Find ways of folding logical expressions of LHS and RHS:
5009 Try to merge two comparisons to the same innermost item.
5010 Look for range tests like "ch >= '0' && ch <= '9'".
5011 Look for combinations of simple terms on machines with expensive branches
5012 and evaluate the RHS unconditionally.
5014 For example, if we have p->a == 2 && p->b == 4 and we can make an
5015 object large enough to span both A and B, we can do this with a comparison
5016 against the object ANDed with the a mask.
5018 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5019 operations to do this with one comparison.
5021 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5022 function and the one above.
5024 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5025 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5027 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5028 two operands.
5030 We return the simplified tree or 0 if no optimization is possible. */
5032 static tree
5033 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5034 tree lhs, tree rhs)
5036 /* If this is the "or" of two comparisons, we can do something if
5037 the comparisons are NE_EXPR. If this is the "and", we can do something
5038 if the comparisons are EQ_EXPR. I.e.,
5039 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5041 WANTED_CODE is this operation code. For single bit fields, we can
5042 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5043 comparison for one-bit fields. */
5045 enum tree_code wanted_code;
5046 enum tree_code lcode, rcode;
5047 tree ll_arg, lr_arg, rl_arg, rr_arg;
5048 tree ll_inner, lr_inner, rl_inner, rr_inner;
5049 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5050 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5051 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5052 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5053 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5054 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5055 enum machine_mode lnmode, rnmode;
5056 tree ll_mask, lr_mask, rl_mask, rr_mask;
5057 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5058 tree l_const, r_const;
5059 tree lntype, rntype, result;
5060 HOST_WIDE_INT first_bit, end_bit;
5061 int volatilep;
5063 /* Start by getting the comparison codes. Fail if anything is volatile.
5064 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5065 it were surrounded with a NE_EXPR. */
5067 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5068 return 0;
5070 lcode = TREE_CODE (lhs);
5071 rcode = TREE_CODE (rhs);
5073 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5075 lhs = build2 (NE_EXPR, truth_type, lhs,
5076 build_int_cst (TREE_TYPE (lhs), 0));
5077 lcode = NE_EXPR;
5080 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5082 rhs = build2 (NE_EXPR, truth_type, rhs,
5083 build_int_cst (TREE_TYPE (rhs), 0));
5084 rcode = NE_EXPR;
5087 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5088 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5089 return 0;
5091 ll_arg = TREE_OPERAND (lhs, 0);
5092 lr_arg = TREE_OPERAND (lhs, 1);
5093 rl_arg = TREE_OPERAND (rhs, 0);
5094 rr_arg = TREE_OPERAND (rhs, 1);
5096 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5097 if (simple_operand_p (ll_arg)
5098 && simple_operand_p (lr_arg))
5100 if (operand_equal_p (ll_arg, rl_arg, 0)
5101 && operand_equal_p (lr_arg, rr_arg, 0))
5103 result = combine_comparisons (loc, code, lcode, rcode,
5104 truth_type, ll_arg, lr_arg);
5105 if (result)
5106 return result;
5108 else if (operand_equal_p (ll_arg, rr_arg, 0)
5109 && operand_equal_p (lr_arg, rl_arg, 0))
5111 result = combine_comparisons (loc, code, lcode,
5112 swap_tree_comparison (rcode),
5113 truth_type, ll_arg, lr_arg);
5114 if (result)
5115 return result;
5119 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5120 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5122 /* If the RHS can be evaluated unconditionally and its operands are
5123 simple, it wins to evaluate the RHS unconditionally on machines
5124 with expensive branches. In this case, this isn't a comparison
5125 that can be merged. */
5127 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5128 false) >= 2
5129 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5130 && simple_operand_p (rl_arg)
5131 && simple_operand_p (rr_arg))
5133 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5134 if (code == TRUTH_OR_EXPR
5135 && lcode == NE_EXPR && integer_zerop (lr_arg)
5136 && rcode == NE_EXPR && integer_zerop (rr_arg)
5137 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5138 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5139 return build2_loc (loc, NE_EXPR, truth_type,
5140 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5141 ll_arg, rl_arg),
5142 build_int_cst (TREE_TYPE (ll_arg), 0));
5144 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5145 if (code == TRUTH_AND_EXPR
5146 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5147 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5148 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5149 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5150 return build2_loc (loc, EQ_EXPR, truth_type,
5151 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5152 ll_arg, rl_arg),
5153 build_int_cst (TREE_TYPE (ll_arg), 0));
5156 /* See if the comparisons can be merged. Then get all the parameters for
5157 each side. */
5159 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5160 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5161 return 0;
5163 volatilep = 0;
5164 ll_inner = decode_field_reference (loc, ll_arg,
5165 &ll_bitsize, &ll_bitpos, &ll_mode,
5166 &ll_unsignedp, &volatilep, &ll_mask,
5167 &ll_and_mask);
5168 lr_inner = decode_field_reference (loc, lr_arg,
5169 &lr_bitsize, &lr_bitpos, &lr_mode,
5170 &lr_unsignedp, &volatilep, &lr_mask,
5171 &lr_and_mask);
5172 rl_inner = decode_field_reference (loc, rl_arg,
5173 &rl_bitsize, &rl_bitpos, &rl_mode,
5174 &rl_unsignedp, &volatilep, &rl_mask,
5175 &rl_and_mask);
5176 rr_inner = decode_field_reference (loc, rr_arg,
5177 &rr_bitsize, &rr_bitpos, &rr_mode,
5178 &rr_unsignedp, &volatilep, &rr_mask,
5179 &rr_and_mask);
5181 /* It must be true that the inner operation on the lhs of each
5182 comparison must be the same if we are to be able to do anything.
5183 Then see if we have constants. If not, the same must be true for
5184 the rhs's. */
5185 if (volatilep || ll_inner == 0 || rl_inner == 0
5186 || ! operand_equal_p (ll_inner, rl_inner, 0))
5187 return 0;
5189 if (TREE_CODE (lr_arg) == INTEGER_CST
5190 && TREE_CODE (rr_arg) == INTEGER_CST)
5191 l_const = lr_arg, r_const = rr_arg;
5192 else if (lr_inner == 0 || rr_inner == 0
5193 || ! operand_equal_p (lr_inner, rr_inner, 0))
5194 return 0;
5195 else
5196 l_const = r_const = 0;
5198 /* If either comparison code is not correct for our logical operation,
5199 fail. However, we can convert a one-bit comparison against zero into
5200 the opposite comparison against that bit being set in the field. */
5202 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5203 if (lcode != wanted_code)
5205 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5207 /* Make the left operand unsigned, since we are only interested
5208 in the value of one bit. Otherwise we are doing the wrong
5209 thing below. */
5210 ll_unsignedp = 1;
5211 l_const = ll_mask;
5213 else
5214 return 0;
5217 /* This is analogous to the code for l_const above. */
5218 if (rcode != wanted_code)
5220 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5222 rl_unsignedp = 1;
5223 r_const = rl_mask;
5225 else
5226 return 0;
5229 /* See if we can find a mode that contains both fields being compared on
5230 the left. If we can't, fail. Otherwise, update all constants and masks
5231 to be relative to a field of that size. */
5232 first_bit = MIN (ll_bitpos, rl_bitpos);
5233 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5234 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5235 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5236 volatilep);
5237 if (lnmode == VOIDmode)
5238 return 0;
5240 lnbitsize = GET_MODE_BITSIZE (lnmode);
5241 lnbitpos = first_bit & ~ (lnbitsize - 1);
5242 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5243 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5245 if (BYTES_BIG_ENDIAN)
5247 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5248 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5251 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5252 size_int (xll_bitpos));
5253 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5254 size_int (xrl_bitpos));
5256 if (l_const)
5258 l_const = fold_convert_loc (loc, lntype, l_const);
5259 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5260 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5261 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5262 fold_build1_loc (loc, BIT_NOT_EXPR,
5263 lntype, ll_mask))))
5265 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5267 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5270 if (r_const)
5272 r_const = fold_convert_loc (loc, lntype, r_const);
5273 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5274 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5275 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5276 fold_build1_loc (loc, BIT_NOT_EXPR,
5277 lntype, rl_mask))))
5279 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5281 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5285 /* If the right sides are not constant, do the same for it. Also,
5286 disallow this optimization if a size or signedness mismatch occurs
5287 between the left and right sides. */
5288 if (l_const == 0)
5290 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5291 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5292 /* Make sure the two fields on the right
5293 correspond to the left without being swapped. */
5294 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5295 return 0;
5297 first_bit = MIN (lr_bitpos, rr_bitpos);
5298 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5299 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5300 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5301 volatilep);
5302 if (rnmode == VOIDmode)
5303 return 0;
5305 rnbitsize = GET_MODE_BITSIZE (rnmode);
5306 rnbitpos = first_bit & ~ (rnbitsize - 1);
5307 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5308 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5310 if (BYTES_BIG_ENDIAN)
5312 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5313 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5316 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5317 rntype, lr_mask),
5318 size_int (xlr_bitpos));
5319 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5320 rntype, rr_mask),
5321 size_int (xrr_bitpos));
5323 /* Make a mask that corresponds to both fields being compared.
5324 Do this for both items being compared. If the operands are the
5325 same size and the bits being compared are in the same position
5326 then we can do this by masking both and comparing the masked
5327 results. */
5328 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5329 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5330 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5332 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5333 ll_unsignedp || rl_unsignedp);
5334 if (! all_ones_mask_p (ll_mask, lnbitsize))
5335 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5337 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5338 lr_unsignedp || rr_unsignedp);
5339 if (! all_ones_mask_p (lr_mask, rnbitsize))
5340 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5342 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5345 /* There is still another way we can do something: If both pairs of
5346 fields being compared are adjacent, we may be able to make a wider
5347 field containing them both.
5349 Note that we still must mask the lhs/rhs expressions. Furthermore,
5350 the mask must be shifted to account for the shift done by
5351 make_bit_field_ref. */
5352 if ((ll_bitsize + ll_bitpos == rl_bitpos
5353 && lr_bitsize + lr_bitpos == rr_bitpos)
5354 || (ll_bitpos == rl_bitpos + rl_bitsize
5355 && lr_bitpos == rr_bitpos + rr_bitsize))
5357 tree type;
5359 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5360 ll_bitsize + rl_bitsize,
5361 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5362 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5363 lr_bitsize + rr_bitsize,
5364 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5366 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5367 size_int (MIN (xll_bitpos, xrl_bitpos)));
5368 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5369 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5371 /* Convert to the smaller type before masking out unwanted bits. */
5372 type = lntype;
5373 if (lntype != rntype)
5375 if (lnbitsize > rnbitsize)
5377 lhs = fold_convert_loc (loc, rntype, lhs);
5378 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5379 type = rntype;
5381 else if (lnbitsize < rnbitsize)
5383 rhs = fold_convert_loc (loc, lntype, rhs);
5384 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5385 type = lntype;
5389 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5390 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5392 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5393 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5395 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5398 return 0;
5401 /* Handle the case of comparisons with constants. If there is something in
5402 common between the masks, those bits of the constants must be the same.
5403 If not, the condition is always false. Test for this to avoid generating
5404 incorrect code below. */
5405 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5406 if (! integer_zerop (result)
5407 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5408 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5410 if (wanted_code == NE_EXPR)
5412 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5413 return constant_boolean_node (true, truth_type);
5415 else
5417 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5418 return constant_boolean_node (false, truth_type);
5422 /* Construct the expression we will return. First get the component
5423 reference we will make. Unless the mask is all ones the width of
5424 that field, perform the mask operation. Then compare with the
5425 merged constant. */
5426 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5427 ll_unsignedp || rl_unsignedp);
5429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5430 if (! all_ones_mask_p (ll_mask, lnbitsize))
5431 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5433 return build2_loc (loc, wanted_code, truth_type, result,
5434 const_binop (BIT_IOR_EXPR, l_const, r_const));
5437 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5438 constant. */
5440 static tree
5441 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5442 tree op0, tree op1)
5444 tree arg0 = op0;
5445 enum tree_code op_code;
5446 tree comp_const;
5447 tree minmax_const;
5448 int consts_equal, consts_lt;
5449 tree inner;
5451 STRIP_SIGN_NOPS (arg0);
5453 op_code = TREE_CODE (arg0);
5454 minmax_const = TREE_OPERAND (arg0, 1);
5455 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5456 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5457 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5458 inner = TREE_OPERAND (arg0, 0);
5460 /* If something does not permit us to optimize, return the original tree. */
5461 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5462 || TREE_CODE (comp_const) != INTEGER_CST
5463 || TREE_OVERFLOW (comp_const)
5464 || TREE_CODE (minmax_const) != INTEGER_CST
5465 || TREE_OVERFLOW (minmax_const))
5466 return NULL_TREE;
5468 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5469 and GT_EXPR, doing the rest with recursive calls using logical
5470 simplifications. */
5471 switch (code)
5473 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5475 tree tem
5476 = optimize_minmax_comparison (loc,
5477 invert_tree_comparison (code, false),
5478 type, op0, op1);
5479 if (tem)
5480 return invert_truthvalue_loc (loc, tem);
5481 return NULL_TREE;
5484 case GE_EXPR:
5485 return
5486 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5487 optimize_minmax_comparison
5488 (loc, EQ_EXPR, type, arg0, comp_const),
5489 optimize_minmax_comparison
5490 (loc, GT_EXPR, type, arg0, comp_const));
5492 case EQ_EXPR:
5493 if (op_code == MAX_EXPR && consts_equal)
5494 /* MAX (X, 0) == 0 -> X <= 0 */
5495 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5497 else if (op_code == MAX_EXPR && consts_lt)
5498 /* MAX (X, 0) == 5 -> X == 5 */
5499 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5501 else if (op_code == MAX_EXPR)
5502 /* MAX (X, 0) == -1 -> false */
5503 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5505 else if (consts_equal)
5506 /* MIN (X, 0) == 0 -> X >= 0 */
5507 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5509 else if (consts_lt)
5510 /* MIN (X, 0) == 5 -> false */
5511 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5513 else
5514 /* MIN (X, 0) == -1 -> X == -1 */
5515 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5517 case GT_EXPR:
5518 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5519 /* MAX (X, 0) > 0 -> X > 0
5520 MAX (X, 0) > 5 -> X > 5 */
5521 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5523 else if (op_code == MAX_EXPR)
5524 /* MAX (X, 0) > -1 -> true */
5525 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5527 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5528 /* MIN (X, 0) > 0 -> false
5529 MIN (X, 0) > 5 -> false */
5530 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5532 else
5533 /* MIN (X, 0) > -1 -> X > -1 */
5534 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5536 default:
5537 return NULL_TREE;
5541 /* T is an integer expression that is being multiplied, divided, or taken a
5542 modulus (CODE says which and what kind of divide or modulus) by a
5543 constant C. See if we can eliminate that operation by folding it with
5544 other operations already in T. WIDE_TYPE, if non-null, is a type that
5545 should be used for the computation if wider than our type.
5547 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5548 (X * 2) + (Y * 4). We must, however, be assured that either the original
5549 expression would not overflow or that overflow is undefined for the type
5550 in the language in question.
5552 If we return a non-null expression, it is an equivalent form of the
5553 original computation, but need not be in the original type.
5555 We set *STRICT_OVERFLOW_P to true if the return values depends on
5556 signed overflow being undefined. Otherwise we do not change
5557 *STRICT_OVERFLOW_P. */
5559 static tree
5560 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5561 bool *strict_overflow_p)
5563 /* To avoid exponential search depth, refuse to allow recursion past
5564 three levels. Beyond that (1) it's highly unlikely that we'll find
5565 something interesting and (2) we've probably processed it before
5566 when we built the inner expression. */
5568 static int depth;
5569 tree ret;
5571 if (depth > 3)
5572 return NULL;
5574 depth++;
5575 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5576 depth--;
5578 return ret;
5581 static tree
5582 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5583 bool *strict_overflow_p)
5585 tree type = TREE_TYPE (t);
5586 enum tree_code tcode = TREE_CODE (t);
5587 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5588 > GET_MODE_SIZE (TYPE_MODE (type)))
5589 ? wide_type : type);
5590 tree t1, t2;
5591 int same_p = tcode == code;
5592 tree op0 = NULL_TREE, op1 = NULL_TREE;
5593 bool sub_strict_overflow_p;
5595 /* Don't deal with constants of zero here; they confuse the code below. */
5596 if (integer_zerop (c))
5597 return NULL_TREE;
5599 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5600 op0 = TREE_OPERAND (t, 0);
5602 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5603 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5605 /* Note that we need not handle conditional operations here since fold
5606 already handles those cases. So just do arithmetic here. */
5607 switch (tcode)
5609 case INTEGER_CST:
5610 /* For a constant, we can always simplify if we are a multiply
5611 or (for divide and modulus) if it is a multiple of our constant. */
5612 if (code == MULT_EXPR
5613 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5614 return const_binop (code, fold_convert (ctype, t),
5615 fold_convert (ctype, c));
5616 break;
5618 CASE_CONVERT: case NON_LVALUE_EXPR:
5619 /* If op0 is an expression ... */
5620 if ((COMPARISON_CLASS_P (op0)
5621 || UNARY_CLASS_P (op0)
5622 || BINARY_CLASS_P (op0)
5623 || VL_EXP_CLASS_P (op0)
5624 || EXPRESSION_CLASS_P (op0))
5625 /* ... and has wrapping overflow, and its type is smaller
5626 than ctype, then we cannot pass through as widening. */
5627 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5628 && (TYPE_PRECISION (ctype)
5629 > TYPE_PRECISION (TREE_TYPE (op0))))
5630 /* ... or this is a truncation (t is narrower than op0),
5631 then we cannot pass through this narrowing. */
5632 || (TYPE_PRECISION (type)
5633 < TYPE_PRECISION (TREE_TYPE (op0)))
5634 /* ... or signedness changes for division or modulus,
5635 then we cannot pass through this conversion. */
5636 || (code != MULT_EXPR
5637 && (TYPE_UNSIGNED (ctype)
5638 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5639 /* ... or has undefined overflow while the converted to
5640 type has not, we cannot do the operation in the inner type
5641 as that would introduce undefined overflow. */
5642 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5643 && !TYPE_OVERFLOW_UNDEFINED (type))))
5644 break;
5646 /* Pass the constant down and see if we can make a simplification. If
5647 we can, replace this expression with the inner simplification for
5648 possible later conversion to our or some other type. */
5649 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5650 && TREE_CODE (t2) == INTEGER_CST
5651 && !TREE_OVERFLOW (t2)
5652 && (0 != (t1 = extract_muldiv (op0, t2, code,
5653 code == MULT_EXPR
5654 ? ctype : NULL_TREE,
5655 strict_overflow_p))))
5656 return t1;
5657 break;
5659 case ABS_EXPR:
5660 /* If widening the type changes it from signed to unsigned, then we
5661 must avoid building ABS_EXPR itself as unsigned. */
5662 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5664 tree cstype = (*signed_type_for) (ctype);
5665 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5666 != 0)
5668 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5669 return fold_convert (ctype, t1);
5671 break;
5673 /* If the constant is negative, we cannot simplify this. */
5674 if (tree_int_cst_sgn (c) == -1)
5675 break;
5676 /* FALLTHROUGH */
5677 case NEGATE_EXPR:
5678 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5679 != 0)
5680 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5681 break;
5683 case MIN_EXPR: case MAX_EXPR:
5684 /* If widening the type changes the signedness, then we can't perform
5685 this optimization as that changes the result. */
5686 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5687 break;
5689 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5690 sub_strict_overflow_p = false;
5691 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5692 &sub_strict_overflow_p)) != 0
5693 && (t2 = extract_muldiv (op1, c, code, wide_type,
5694 &sub_strict_overflow_p)) != 0)
5696 if (tree_int_cst_sgn (c) < 0)
5697 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5698 if (sub_strict_overflow_p)
5699 *strict_overflow_p = true;
5700 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5701 fold_convert (ctype, t2));
5703 break;
5705 case LSHIFT_EXPR: case RSHIFT_EXPR:
5706 /* If the second operand is constant, this is a multiplication
5707 or floor division, by a power of two, so we can treat it that
5708 way unless the multiplier or divisor overflows. Signed
5709 left-shift overflow is implementation-defined rather than
5710 undefined in C90, so do not convert signed left shift into
5711 multiplication. */
5712 if (TREE_CODE (op1) == INTEGER_CST
5713 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5714 /* const_binop may not detect overflow correctly,
5715 so check for it explicitly here. */
5716 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5717 && TREE_INT_CST_HIGH (op1) == 0
5718 && 0 != (t1 = fold_convert (ctype,
5719 const_binop (LSHIFT_EXPR,
5720 size_one_node,
5721 op1)))
5722 && !TREE_OVERFLOW (t1))
5723 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5724 ? MULT_EXPR : FLOOR_DIV_EXPR,
5725 ctype,
5726 fold_convert (ctype, op0),
5727 t1),
5728 c, code, wide_type, strict_overflow_p);
5729 break;
5731 case PLUS_EXPR: case MINUS_EXPR:
5732 /* See if we can eliminate the operation on both sides. If we can, we
5733 can return a new PLUS or MINUS. If we can't, the only remaining
5734 cases where we can do anything are if the second operand is a
5735 constant. */
5736 sub_strict_overflow_p = false;
5737 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5738 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5739 if (t1 != 0 && t2 != 0
5740 && (code == MULT_EXPR
5741 /* If not multiplication, we can only do this if both operands
5742 are divisible by c. */
5743 || (multiple_of_p (ctype, op0, c)
5744 && multiple_of_p (ctype, op1, c))))
5746 if (sub_strict_overflow_p)
5747 *strict_overflow_p = true;
5748 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5749 fold_convert (ctype, t2));
5752 /* If this was a subtraction, negate OP1 and set it to be an addition.
5753 This simplifies the logic below. */
5754 if (tcode == MINUS_EXPR)
5756 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5757 /* If OP1 was not easily negatable, the constant may be OP0. */
5758 if (TREE_CODE (op0) == INTEGER_CST)
5760 tree tem = op0;
5761 op0 = op1;
5762 op1 = tem;
5763 tem = t1;
5764 t1 = t2;
5765 t2 = tem;
5769 if (TREE_CODE (op1) != INTEGER_CST)
5770 break;
5772 /* If either OP1 or C are negative, this optimization is not safe for
5773 some of the division and remainder types while for others we need
5774 to change the code. */
5775 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5777 if (code == CEIL_DIV_EXPR)
5778 code = FLOOR_DIV_EXPR;
5779 else if (code == FLOOR_DIV_EXPR)
5780 code = CEIL_DIV_EXPR;
5781 else if (code != MULT_EXPR
5782 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5783 break;
5786 /* If it's a multiply or a division/modulus operation of a multiple
5787 of our constant, do the operation and verify it doesn't overflow. */
5788 if (code == MULT_EXPR
5789 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5791 op1 = const_binop (code, fold_convert (ctype, op1),
5792 fold_convert (ctype, c));
5793 /* We allow the constant to overflow with wrapping semantics. */
5794 if (op1 == 0
5795 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5796 break;
5798 else
5799 break;
5801 /* If we have an unsigned type is not a sizetype, we cannot widen
5802 the operation since it will change the result if the original
5803 computation overflowed. */
5804 if (TYPE_UNSIGNED (ctype)
5805 && ctype != type)
5806 break;
5808 /* If we were able to eliminate our operation from the first side,
5809 apply our operation to the second side and reform the PLUS. */
5810 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5811 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5813 /* The last case is if we are a multiply. In that case, we can
5814 apply the distributive law to commute the multiply and addition
5815 if the multiplication of the constants doesn't overflow. */
5816 if (code == MULT_EXPR)
5817 return fold_build2 (tcode, ctype,
5818 fold_build2 (code, ctype,
5819 fold_convert (ctype, op0),
5820 fold_convert (ctype, c)),
5821 op1);
5823 break;
5825 case MULT_EXPR:
5826 /* We have a special case here if we are doing something like
5827 (C * 8) % 4 since we know that's zero. */
5828 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5829 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5830 /* If the multiplication can overflow we cannot optimize this. */
5831 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5832 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5833 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5835 *strict_overflow_p = true;
5836 return omit_one_operand (type, integer_zero_node, op0);
5839 /* ... fall through ... */
5841 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5842 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5843 /* If we can extract our operation from the LHS, do so and return a
5844 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5845 do something only if the second operand is a constant. */
5846 if (same_p
5847 && (t1 = extract_muldiv (op0, c, code, wide_type,
5848 strict_overflow_p)) != 0)
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, op1));
5851 else if (tcode == MULT_EXPR && code == MULT_EXPR
5852 && (t1 = extract_muldiv (op1, c, code, wide_type,
5853 strict_overflow_p)) != 0)
5854 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5855 fold_convert (ctype, t1));
5856 else if (TREE_CODE (op1) != INTEGER_CST)
5857 return 0;
5859 /* If these are the same operation types, we can associate them
5860 assuming no overflow. */
5861 if (tcode == code)
5863 double_int mul;
5864 bool overflow_p;
5865 unsigned prec = TYPE_PRECISION (ctype);
5866 bool uns = TYPE_UNSIGNED (ctype);
5867 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5868 double_int dic = tree_to_double_int (c).ext (prec, uns);
5869 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5870 overflow_p = ((!uns && overflow_p)
5871 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5872 if (!double_int_fits_to_tree_p (ctype, mul)
5873 && ((uns && tcode != MULT_EXPR) || !uns))
5874 overflow_p = 1;
5875 if (!overflow_p)
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5877 double_int_to_tree (ctype, mul));
5880 /* If these operations "cancel" each other, we have the main
5881 optimizations of this pass, which occur when either constant is a
5882 multiple of the other, in which case we replace this with either an
5883 operation or CODE or TCODE.
5885 If we have an unsigned type, we cannot do this since it will change
5886 the result if the original computation overflowed. */
5887 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5888 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5889 || (tcode == MULT_EXPR
5890 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5891 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5892 && code != MULT_EXPR)))
5894 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5896 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5897 *strict_overflow_p = true;
5898 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5899 fold_convert (ctype,
5900 const_binop (TRUNC_DIV_EXPR,
5901 op1, c)));
5903 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5906 *strict_overflow_p = true;
5907 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5908 fold_convert (ctype,
5909 const_binop (TRUNC_DIV_EXPR,
5910 c, op1)));
5913 break;
5915 default:
5916 break;
5919 return 0;
5922 /* Return a node which has the indicated constant VALUE (either 0 or
5923 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5924 and is of the indicated TYPE. */
5926 tree
5927 constant_boolean_node (bool value, tree type)
5929 if (type == integer_type_node)
5930 return value ? integer_one_node : integer_zero_node;
5931 else if (type == boolean_type_node)
5932 return value ? boolean_true_node : boolean_false_node;
5933 else if (TREE_CODE (type) == VECTOR_TYPE)
5934 return build_vector_from_val (type,
5935 build_int_cst (TREE_TYPE (type),
5936 value ? -1 : 0));
5937 else
5938 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5942 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5943 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5944 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5945 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5946 COND is the first argument to CODE; otherwise (as in the example
5947 given here), it is the second argument. TYPE is the type of the
5948 original expression. Return NULL_TREE if no simplification is
5949 possible. */
5951 static tree
5952 fold_binary_op_with_conditional_arg (location_t loc,
5953 enum tree_code code,
5954 tree type, tree op0, tree op1,
5955 tree cond, tree arg, int cond_first_p)
5957 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5958 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5959 tree test, true_value, false_value;
5960 tree lhs = NULL_TREE;
5961 tree rhs = NULL_TREE;
5962 enum tree_code cond_code = COND_EXPR;
5964 if (TREE_CODE (cond) == COND_EXPR
5965 || TREE_CODE (cond) == VEC_COND_EXPR)
5967 test = TREE_OPERAND (cond, 0);
5968 true_value = TREE_OPERAND (cond, 1);
5969 false_value = TREE_OPERAND (cond, 2);
5970 /* If this operand throws an expression, then it does not make
5971 sense to try to perform a logical or arithmetic operation
5972 involving it. */
5973 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5974 lhs = true_value;
5975 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5976 rhs = false_value;
5978 else
5980 tree testtype = TREE_TYPE (cond);
5981 test = cond;
5982 true_value = constant_boolean_node (true, testtype);
5983 false_value = constant_boolean_node (false, testtype);
5986 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
5987 cond_code = VEC_COND_EXPR;
5989 /* This transformation is only worthwhile if we don't have to wrap ARG
5990 in a SAVE_EXPR and the operation can be simplified on at least one
5991 of the branches once its pushed inside the COND_EXPR. */
5992 if (!TREE_CONSTANT (arg)
5993 && (TREE_SIDE_EFFECTS (arg)
5994 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5995 return NULL_TREE;
5997 arg = fold_convert_loc (loc, arg_type, arg);
5998 if (lhs == 0)
6000 true_value = fold_convert_loc (loc, cond_type, true_value);
6001 if (cond_first_p)
6002 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6003 else
6004 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6006 if (rhs == 0)
6008 false_value = fold_convert_loc (loc, cond_type, false_value);
6009 if (cond_first_p)
6010 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6011 else
6012 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6015 /* Check that we have simplified at least one of the branches. */
6016 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6017 return NULL_TREE;
6019 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6023 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6025 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6026 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6027 ADDEND is the same as X.
6029 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6030 and finite. The problematic cases are when X is zero, and its mode
6031 has signed zeros. In the case of rounding towards -infinity,
6032 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6033 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6035 bool
6036 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6038 if (!real_zerop (addend))
6039 return false;
6041 /* Don't allow the fold with -fsignaling-nans. */
6042 if (HONOR_SNANS (TYPE_MODE (type)))
6043 return false;
6045 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6046 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6047 return true;
6049 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6050 if (TREE_CODE (addend) == REAL_CST
6051 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6052 negate = !negate;
6054 /* The mode has signed zeros, and we have to honor their sign.
6055 In this situation, there is only one case we can return true for.
6056 X - 0 is the same as X unless rounding towards -infinity is
6057 supported. */
6058 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6061 /* Subroutine of fold() that checks comparisons of built-in math
6062 functions against real constants.
6064 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6065 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6066 is the type of the result and ARG0 and ARG1 are the operands of the
6067 comparison. ARG1 must be a TREE_REAL_CST.
6069 The function returns the constant folded tree if a simplification
6070 can be made, and NULL_TREE otherwise. */
6072 static tree
6073 fold_mathfn_compare (location_t loc,
6074 enum built_in_function fcode, enum tree_code code,
6075 tree type, tree arg0, tree arg1)
6077 REAL_VALUE_TYPE c;
6079 if (BUILTIN_SQRT_P (fcode))
6081 tree arg = CALL_EXPR_ARG (arg0, 0);
6082 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6084 c = TREE_REAL_CST (arg1);
6085 if (REAL_VALUE_NEGATIVE (c))
6087 /* sqrt(x) < y is always false, if y is negative. */
6088 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6089 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6091 /* sqrt(x) > y is always true, if y is negative and we
6092 don't care about NaNs, i.e. negative values of x. */
6093 if (code == NE_EXPR || !HONOR_NANS (mode))
6094 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6096 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6097 return fold_build2_loc (loc, GE_EXPR, type, arg,
6098 build_real (TREE_TYPE (arg), dconst0));
6100 else if (code == GT_EXPR || code == GE_EXPR)
6102 REAL_VALUE_TYPE c2;
6104 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6105 real_convert (&c2, mode, &c2);
6107 if (REAL_VALUE_ISINF (c2))
6109 /* sqrt(x) > y is x == +Inf, when y is very large. */
6110 if (HONOR_INFINITIES (mode))
6111 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6112 build_real (TREE_TYPE (arg), c2));
6114 /* sqrt(x) > y is always false, when y is very large
6115 and we don't care about infinities. */
6116 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6119 /* sqrt(x) > c is the same as x > c*c. */
6120 return fold_build2_loc (loc, code, type, arg,
6121 build_real (TREE_TYPE (arg), c2));
6123 else if (code == LT_EXPR || code == LE_EXPR)
6125 REAL_VALUE_TYPE c2;
6127 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6128 real_convert (&c2, mode, &c2);
6130 if (REAL_VALUE_ISINF (c2))
6132 /* sqrt(x) < y is always true, when y is a very large
6133 value and we don't care about NaNs or Infinities. */
6134 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6135 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6137 /* sqrt(x) < y is x != +Inf when y is very large and we
6138 don't care about NaNs. */
6139 if (! HONOR_NANS (mode))
6140 return fold_build2_loc (loc, NE_EXPR, type, arg,
6141 build_real (TREE_TYPE (arg), c2));
6143 /* sqrt(x) < y is x >= 0 when y is very large and we
6144 don't care about Infinities. */
6145 if (! HONOR_INFINITIES (mode))
6146 return fold_build2_loc (loc, GE_EXPR, type, arg,
6147 build_real (TREE_TYPE (arg), dconst0));
6149 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6150 arg = save_expr (arg);
6151 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6152 fold_build2_loc (loc, GE_EXPR, type, arg,
6153 build_real (TREE_TYPE (arg),
6154 dconst0)),
6155 fold_build2_loc (loc, NE_EXPR, type, arg,
6156 build_real (TREE_TYPE (arg),
6157 c2)));
6160 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6161 if (! HONOR_NANS (mode))
6162 return fold_build2_loc (loc, code, type, arg,
6163 build_real (TREE_TYPE (arg), c2));
6165 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6166 arg = save_expr (arg);
6167 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6168 fold_build2_loc (loc, GE_EXPR, type, arg,
6169 build_real (TREE_TYPE (arg),
6170 dconst0)),
6171 fold_build2_loc (loc, code, type, arg,
6172 build_real (TREE_TYPE (arg),
6173 c2)));
6177 return NULL_TREE;
6180 /* Subroutine of fold() that optimizes comparisons against Infinities,
6181 either +Inf or -Inf.
6183 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6184 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6185 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6187 The function returns the constant folded tree if a simplification
6188 can be made, and NULL_TREE otherwise. */
6190 static tree
6191 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6192 tree arg0, tree arg1)
6194 enum machine_mode mode;
6195 REAL_VALUE_TYPE max;
6196 tree temp;
6197 bool neg;
6199 mode = TYPE_MODE (TREE_TYPE (arg0));
6201 /* For negative infinity swap the sense of the comparison. */
6202 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6203 if (neg)
6204 code = swap_tree_comparison (code);
6206 switch (code)
6208 case GT_EXPR:
6209 /* x > +Inf is always false, if with ignore sNANs. */
6210 if (HONOR_SNANS (mode))
6211 return NULL_TREE;
6212 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6214 case LE_EXPR:
6215 /* x <= +Inf is always true, if we don't case about NaNs. */
6216 if (! HONOR_NANS (mode))
6217 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6219 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6220 arg0 = save_expr (arg0);
6221 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6223 case EQ_EXPR:
6224 case GE_EXPR:
6225 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6226 real_maxval (&max, neg, mode);
6227 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6228 arg0, build_real (TREE_TYPE (arg0), max));
6230 case LT_EXPR:
6231 /* x < +Inf is always equal to x <= DBL_MAX. */
6232 real_maxval (&max, neg, mode);
6233 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6234 arg0, build_real (TREE_TYPE (arg0), max));
6236 case NE_EXPR:
6237 /* x != +Inf is always equal to !(x > DBL_MAX). */
6238 real_maxval (&max, neg, mode);
6239 if (! HONOR_NANS (mode))
6240 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6241 arg0, build_real (TREE_TYPE (arg0), max));
6243 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6244 arg0, build_real (TREE_TYPE (arg0), max));
6245 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6247 default:
6248 break;
6251 return NULL_TREE;
6254 /* Subroutine of fold() that optimizes comparisons of a division by
6255 a nonzero integer constant against an integer constant, i.e.
6256 X/C1 op C2.
6258 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6259 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6260 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6262 The function returns the constant folded tree if a simplification
6263 can be made, and NULL_TREE otherwise. */
6265 static tree
6266 fold_div_compare (location_t loc,
6267 enum tree_code code, tree type, tree arg0, tree arg1)
6269 tree prod, tmp, hi, lo;
6270 tree arg00 = TREE_OPERAND (arg0, 0);
6271 tree arg01 = TREE_OPERAND (arg0, 1);
6272 double_int val;
6273 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6274 bool neg_overflow;
6275 bool overflow;
6277 /* We have to do this the hard way to detect unsigned overflow.
6278 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6279 val = TREE_INT_CST (arg01)
6280 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6281 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6282 neg_overflow = false;
6284 if (unsigned_p)
6286 tmp = int_const_binop (MINUS_EXPR, arg01,
6287 build_int_cst (TREE_TYPE (arg01), 1));
6288 lo = prod;
6290 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6291 val = TREE_INT_CST (prod)
6292 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6293 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6294 -1, overflow | TREE_OVERFLOW (prod));
6296 else if (tree_int_cst_sgn (arg01) >= 0)
6298 tmp = int_const_binop (MINUS_EXPR, arg01,
6299 build_int_cst (TREE_TYPE (arg01), 1));
6300 switch (tree_int_cst_sgn (arg1))
6302 case -1:
6303 neg_overflow = true;
6304 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6305 hi = prod;
6306 break;
6308 case 0:
6309 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6310 hi = tmp;
6311 break;
6313 case 1:
6314 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6315 lo = prod;
6316 break;
6318 default:
6319 gcc_unreachable ();
6322 else
6324 /* A negative divisor reverses the relational operators. */
6325 code = swap_tree_comparison (code);
6327 tmp = int_const_binop (PLUS_EXPR, arg01,
6328 build_int_cst (TREE_TYPE (arg01), 1));
6329 switch (tree_int_cst_sgn (arg1))
6331 case -1:
6332 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6333 lo = prod;
6334 break;
6336 case 0:
6337 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6338 lo = tmp;
6339 break;
6341 case 1:
6342 neg_overflow = true;
6343 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6344 hi = prod;
6345 break;
6347 default:
6348 gcc_unreachable ();
6352 switch (code)
6354 case EQ_EXPR:
6355 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6356 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6357 if (TREE_OVERFLOW (hi))
6358 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6359 if (TREE_OVERFLOW (lo))
6360 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6361 return build_range_check (loc, type, arg00, 1, lo, hi);
6363 case NE_EXPR:
6364 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6365 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6366 if (TREE_OVERFLOW (hi))
6367 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6368 if (TREE_OVERFLOW (lo))
6369 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6370 return build_range_check (loc, type, arg00, 0, lo, hi);
6372 case LT_EXPR:
6373 if (TREE_OVERFLOW (lo))
6375 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6376 return omit_one_operand_loc (loc, type, tmp, arg00);
6378 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6380 case LE_EXPR:
6381 if (TREE_OVERFLOW (hi))
6383 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6384 return omit_one_operand_loc (loc, type, tmp, arg00);
6386 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6388 case GT_EXPR:
6389 if (TREE_OVERFLOW (hi))
6391 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6392 return omit_one_operand_loc (loc, type, tmp, arg00);
6394 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6396 case GE_EXPR:
6397 if (TREE_OVERFLOW (lo))
6399 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6400 return omit_one_operand_loc (loc, type, tmp, arg00);
6402 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6404 default:
6405 break;
6408 return NULL_TREE;
6412 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6413 equality/inequality test, then return a simplified form of the test
6414 using a sign testing. Otherwise return NULL. TYPE is the desired
6415 result type. */
6417 static tree
6418 fold_single_bit_test_into_sign_test (location_t loc,
6419 enum tree_code code, tree arg0, tree arg1,
6420 tree result_type)
6422 /* If this is testing a single bit, we can optimize the test. */
6423 if ((code == NE_EXPR || code == EQ_EXPR)
6424 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6425 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6427 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6428 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6429 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6431 if (arg00 != NULL_TREE
6432 /* This is only a win if casting to a signed type is cheap,
6433 i.e. when arg00's type is not a partial mode. */
6434 && TYPE_PRECISION (TREE_TYPE (arg00))
6435 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6437 tree stype = signed_type_for (TREE_TYPE (arg00));
6438 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6439 result_type,
6440 fold_convert_loc (loc, stype, arg00),
6441 build_int_cst (stype, 0));
6445 return NULL_TREE;
6448 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6449 equality/inequality test, then return a simplified form of
6450 the test using shifts and logical operations. Otherwise return
6451 NULL. TYPE is the desired result type. */
6453 tree
6454 fold_single_bit_test (location_t loc, enum tree_code code,
6455 tree arg0, tree arg1, tree result_type)
6457 /* If this is testing a single bit, we can optimize the test. */
6458 if ((code == NE_EXPR || code == EQ_EXPR)
6459 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6460 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6462 tree inner = TREE_OPERAND (arg0, 0);
6463 tree type = TREE_TYPE (arg0);
6464 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6465 enum machine_mode operand_mode = TYPE_MODE (type);
6466 int ops_unsigned;
6467 tree signed_type, unsigned_type, intermediate_type;
6468 tree tem, one;
6470 /* First, see if we can fold the single bit test into a sign-bit
6471 test. */
6472 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6473 result_type);
6474 if (tem)
6475 return tem;
6477 /* Otherwise we have (A & C) != 0 where C is a single bit,
6478 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6479 Similarly for (A & C) == 0. */
6481 /* If INNER is a right shift of a constant and it plus BITNUM does
6482 not overflow, adjust BITNUM and INNER. */
6483 if (TREE_CODE (inner) == RSHIFT_EXPR
6484 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6485 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6486 && bitnum < TYPE_PRECISION (type)
6487 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6488 bitnum - TYPE_PRECISION (type)))
6490 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6491 inner = TREE_OPERAND (inner, 0);
6494 /* If we are going to be able to omit the AND below, we must do our
6495 operations as unsigned. If we must use the AND, we have a choice.
6496 Normally unsigned is faster, but for some machines signed is. */
6497 #ifdef LOAD_EXTEND_OP
6498 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6499 && !flag_syntax_only) ? 0 : 1;
6500 #else
6501 ops_unsigned = 1;
6502 #endif
6504 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6505 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6506 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6507 inner = fold_convert_loc (loc, intermediate_type, inner);
6509 if (bitnum != 0)
6510 inner = build2 (RSHIFT_EXPR, intermediate_type,
6511 inner, size_int (bitnum));
6513 one = build_int_cst (intermediate_type, 1);
6515 if (code == EQ_EXPR)
6516 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6518 /* Put the AND last so it can combine with more things. */
6519 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6521 /* Make sure to return the proper type. */
6522 inner = fold_convert_loc (loc, result_type, inner);
6524 return inner;
6526 return NULL_TREE;
6529 /* Check whether we are allowed to reorder operands arg0 and arg1,
6530 such that the evaluation of arg1 occurs before arg0. */
6532 static bool
6533 reorder_operands_p (const_tree arg0, const_tree arg1)
6535 if (! flag_evaluation_order)
6536 return true;
6537 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6538 return true;
6539 return ! TREE_SIDE_EFFECTS (arg0)
6540 && ! TREE_SIDE_EFFECTS (arg1);
6543 /* Test whether it is preferable two swap two operands, ARG0 and
6544 ARG1, for example because ARG0 is an integer constant and ARG1
6545 isn't. If REORDER is true, only recommend swapping if we can
6546 evaluate the operands in reverse order. */
6548 bool
6549 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6551 STRIP_SIGN_NOPS (arg0);
6552 STRIP_SIGN_NOPS (arg1);
6554 if (TREE_CODE (arg1) == INTEGER_CST)
6555 return 0;
6556 if (TREE_CODE (arg0) == INTEGER_CST)
6557 return 1;
6559 if (TREE_CODE (arg1) == REAL_CST)
6560 return 0;
6561 if (TREE_CODE (arg0) == REAL_CST)
6562 return 1;
6564 if (TREE_CODE (arg1) == FIXED_CST)
6565 return 0;
6566 if (TREE_CODE (arg0) == FIXED_CST)
6567 return 1;
6569 if (TREE_CODE (arg1) == COMPLEX_CST)
6570 return 0;
6571 if (TREE_CODE (arg0) == COMPLEX_CST)
6572 return 1;
6574 if (TREE_CONSTANT (arg1))
6575 return 0;
6576 if (TREE_CONSTANT (arg0))
6577 return 1;
6579 if (optimize_function_for_size_p (cfun))
6580 return 0;
6582 if (reorder && flag_evaluation_order
6583 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6584 return 0;
6586 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6587 for commutative and comparison operators. Ensuring a canonical
6588 form allows the optimizers to find additional redundancies without
6589 having to explicitly check for both orderings. */
6590 if (TREE_CODE (arg0) == SSA_NAME
6591 && TREE_CODE (arg1) == SSA_NAME
6592 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6593 return 1;
6595 /* Put SSA_NAMEs last. */
6596 if (TREE_CODE (arg1) == SSA_NAME)
6597 return 0;
6598 if (TREE_CODE (arg0) == SSA_NAME)
6599 return 1;
6601 /* Put variables last. */
6602 if (DECL_P (arg1))
6603 return 0;
6604 if (DECL_P (arg0))
6605 return 1;
6607 return 0;
6610 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6611 ARG0 is extended to a wider type. */
6613 static tree
6614 fold_widened_comparison (location_t loc, enum tree_code code,
6615 tree type, tree arg0, tree arg1)
6617 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6618 tree arg1_unw;
6619 tree shorter_type, outer_type;
6620 tree min, max;
6621 bool above, below;
6623 if (arg0_unw == arg0)
6624 return NULL_TREE;
6625 shorter_type = TREE_TYPE (arg0_unw);
6627 #ifdef HAVE_canonicalize_funcptr_for_compare
6628 /* Disable this optimization if we're casting a function pointer
6629 type on targets that require function pointer canonicalization. */
6630 if (HAVE_canonicalize_funcptr_for_compare
6631 && TREE_CODE (shorter_type) == POINTER_TYPE
6632 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6633 return NULL_TREE;
6634 #endif
6636 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6637 return NULL_TREE;
6639 arg1_unw = get_unwidened (arg1, NULL_TREE);
6641 /* If possible, express the comparison in the shorter mode. */
6642 if ((code == EQ_EXPR || code == NE_EXPR
6643 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6644 && (TREE_TYPE (arg1_unw) == shorter_type
6645 || ((TYPE_PRECISION (shorter_type)
6646 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6647 && (TYPE_UNSIGNED (shorter_type)
6648 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6649 || (TREE_CODE (arg1_unw) == INTEGER_CST
6650 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6651 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6652 && int_fits_type_p (arg1_unw, shorter_type))))
6653 return fold_build2_loc (loc, code, type, arg0_unw,
6654 fold_convert_loc (loc, shorter_type, arg1_unw));
6656 if (TREE_CODE (arg1_unw) != INTEGER_CST
6657 || TREE_CODE (shorter_type) != INTEGER_TYPE
6658 || !int_fits_type_p (arg1_unw, shorter_type))
6659 return NULL_TREE;
6661 /* If we are comparing with the integer that does not fit into the range
6662 of the shorter type, the result is known. */
6663 outer_type = TREE_TYPE (arg1_unw);
6664 min = lower_bound_in_type (outer_type, shorter_type);
6665 max = upper_bound_in_type (outer_type, shorter_type);
6667 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6668 max, arg1_unw));
6669 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6670 arg1_unw, min));
6672 switch (code)
6674 case EQ_EXPR:
6675 if (above || below)
6676 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6677 break;
6679 case NE_EXPR:
6680 if (above || below)
6681 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6682 break;
6684 case LT_EXPR:
6685 case LE_EXPR:
6686 if (above)
6687 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6688 else if (below)
6689 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6691 case GT_EXPR:
6692 case GE_EXPR:
6693 if (above)
6694 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6695 else if (below)
6696 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6698 default:
6699 break;
6702 return NULL_TREE;
6705 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6706 ARG0 just the signedness is changed. */
6708 static tree
6709 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6710 tree arg0, tree arg1)
6712 tree arg0_inner;
6713 tree inner_type, outer_type;
6715 if (!CONVERT_EXPR_P (arg0))
6716 return NULL_TREE;
6718 outer_type = TREE_TYPE (arg0);
6719 arg0_inner = TREE_OPERAND (arg0, 0);
6720 inner_type = TREE_TYPE (arg0_inner);
6722 #ifdef HAVE_canonicalize_funcptr_for_compare
6723 /* Disable this optimization if we're casting a function pointer
6724 type on targets that require function pointer canonicalization. */
6725 if (HAVE_canonicalize_funcptr_for_compare
6726 && TREE_CODE (inner_type) == POINTER_TYPE
6727 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6728 return NULL_TREE;
6729 #endif
6731 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6732 return NULL_TREE;
6734 if (TREE_CODE (arg1) != INTEGER_CST
6735 && !(CONVERT_EXPR_P (arg1)
6736 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6737 return NULL_TREE;
6739 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6740 && code != NE_EXPR
6741 && code != EQ_EXPR)
6742 return NULL_TREE;
6744 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6745 return NULL_TREE;
6747 if (TREE_CODE (arg1) == INTEGER_CST)
6748 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6749 0, TREE_OVERFLOW (arg1));
6750 else
6751 arg1 = fold_convert_loc (loc, inner_type, arg1);
6753 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6756 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6757 step of the array. Reconstructs s and delta in the case of s *
6758 delta being an integer constant (and thus already folded). ADDR is
6759 the address. MULT is the multiplicative expression. If the
6760 function succeeds, the new address expression is returned.
6761 Otherwise NULL_TREE is returned. LOC is the location of the
6762 resulting expression. */
6764 static tree
6765 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6767 tree s, delta, step;
6768 tree ref = TREE_OPERAND (addr, 0), pref;
6769 tree ret, pos;
6770 tree itype;
6771 bool mdim = false;
6773 /* Strip the nops that might be added when converting op1 to sizetype. */
6774 STRIP_NOPS (op1);
6776 /* Canonicalize op1 into a possibly non-constant delta
6777 and an INTEGER_CST s. */
6778 if (TREE_CODE (op1) == MULT_EXPR)
6780 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6782 STRIP_NOPS (arg0);
6783 STRIP_NOPS (arg1);
6785 if (TREE_CODE (arg0) == INTEGER_CST)
6787 s = arg0;
6788 delta = arg1;
6790 else if (TREE_CODE (arg1) == INTEGER_CST)
6792 s = arg1;
6793 delta = arg0;
6795 else
6796 return NULL_TREE;
6798 else if (TREE_CODE (op1) == INTEGER_CST)
6800 delta = op1;
6801 s = NULL_TREE;
6803 else
6805 /* Simulate we are delta * 1. */
6806 delta = op1;
6807 s = integer_one_node;
6810 /* Handle &x.array the same as we would handle &x.array[0]. */
6811 if (TREE_CODE (ref) == COMPONENT_REF
6812 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6814 tree domain;
6816 /* Remember if this was a multi-dimensional array. */
6817 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6818 mdim = true;
6820 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6821 if (! domain)
6822 goto cont;
6823 itype = TREE_TYPE (domain);
6825 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6826 if (TREE_CODE (step) != INTEGER_CST)
6827 goto cont;
6829 if (s)
6831 if (! tree_int_cst_equal (step, s))
6832 goto cont;
6834 else
6836 /* Try if delta is a multiple of step. */
6837 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6838 if (! tmp)
6839 goto cont;
6840 delta = tmp;
6843 /* Only fold here if we can verify we do not overflow one
6844 dimension of a multi-dimensional array. */
6845 if (mdim)
6847 tree tmp;
6849 if (!TYPE_MIN_VALUE (domain)
6850 || !TYPE_MAX_VALUE (domain)
6851 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6852 goto cont;
6854 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6855 fold_convert_loc (loc, itype,
6856 TYPE_MIN_VALUE (domain)),
6857 fold_convert_loc (loc, itype, delta));
6858 if (TREE_CODE (tmp) != INTEGER_CST
6859 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6860 goto cont;
6863 /* We found a suitable component reference. */
6865 pref = TREE_OPERAND (addr, 0);
6866 ret = copy_node (pref);
6867 SET_EXPR_LOCATION (ret, loc);
6869 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6870 fold_build2_loc
6871 (loc, PLUS_EXPR, itype,
6872 fold_convert_loc (loc, itype,
6873 TYPE_MIN_VALUE
6874 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6875 fold_convert_loc (loc, itype, delta)),
6876 NULL_TREE, NULL_TREE);
6877 return build_fold_addr_expr_loc (loc, ret);
6880 cont:
6882 for (;; ref = TREE_OPERAND (ref, 0))
6884 if (TREE_CODE (ref) == ARRAY_REF)
6886 tree domain;
6888 /* Remember if this was a multi-dimensional array. */
6889 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6890 mdim = true;
6892 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6893 if (! domain)
6894 continue;
6895 itype = TREE_TYPE (domain);
6897 step = array_ref_element_size (ref);
6898 if (TREE_CODE (step) != INTEGER_CST)
6899 continue;
6901 if (s)
6903 if (! tree_int_cst_equal (step, s))
6904 continue;
6906 else
6908 /* Try if delta is a multiple of step. */
6909 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6910 if (! tmp)
6911 continue;
6912 delta = tmp;
6915 /* Only fold here if we can verify we do not overflow one
6916 dimension of a multi-dimensional array. */
6917 if (mdim)
6919 tree tmp;
6921 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6922 || !TYPE_MAX_VALUE (domain)
6923 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6924 continue;
6926 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6927 fold_convert_loc (loc, itype,
6928 TREE_OPERAND (ref, 1)),
6929 fold_convert_loc (loc, itype, delta));
6930 if (!tmp
6931 || TREE_CODE (tmp) != INTEGER_CST
6932 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6933 continue;
6936 break;
6938 else
6939 mdim = false;
6941 if (!handled_component_p (ref))
6942 return NULL_TREE;
6945 /* We found the suitable array reference. So copy everything up to it,
6946 and replace the index. */
6948 pref = TREE_OPERAND (addr, 0);
6949 ret = copy_node (pref);
6950 SET_EXPR_LOCATION (ret, loc);
6951 pos = ret;
6953 while (pref != ref)
6955 pref = TREE_OPERAND (pref, 0);
6956 TREE_OPERAND (pos, 0) = copy_node (pref);
6957 pos = TREE_OPERAND (pos, 0);
6960 TREE_OPERAND (pos, 1)
6961 = fold_build2_loc (loc, PLUS_EXPR, itype,
6962 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6963 fold_convert_loc (loc, itype, delta));
6964 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6968 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6969 means A >= Y && A != MAX, but in this case we know that
6970 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6972 static tree
6973 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6975 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6977 if (TREE_CODE (bound) == LT_EXPR)
6978 a = TREE_OPERAND (bound, 0);
6979 else if (TREE_CODE (bound) == GT_EXPR)
6980 a = TREE_OPERAND (bound, 1);
6981 else
6982 return NULL_TREE;
6984 typea = TREE_TYPE (a);
6985 if (!INTEGRAL_TYPE_P (typea)
6986 && !POINTER_TYPE_P (typea))
6987 return NULL_TREE;
6989 if (TREE_CODE (ineq) == LT_EXPR)
6991 a1 = TREE_OPERAND (ineq, 1);
6992 y = TREE_OPERAND (ineq, 0);
6994 else if (TREE_CODE (ineq) == GT_EXPR)
6996 a1 = TREE_OPERAND (ineq, 0);
6997 y = TREE_OPERAND (ineq, 1);
6999 else
7000 return NULL_TREE;
7002 if (TREE_TYPE (a1) != typea)
7003 return NULL_TREE;
7005 if (POINTER_TYPE_P (typea))
7007 /* Convert the pointer types into integer before taking the difference. */
7008 tree ta = fold_convert_loc (loc, ssizetype, a);
7009 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7010 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7012 else
7013 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7015 if (!diff || !integer_onep (diff))
7016 return NULL_TREE;
7018 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7021 /* Fold a sum or difference of at least one multiplication.
7022 Returns the folded tree or NULL if no simplification could be made. */
7024 static tree
7025 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7026 tree arg0, tree arg1)
7028 tree arg00, arg01, arg10, arg11;
7029 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7031 /* (A * C) +- (B * C) -> (A+-B) * C.
7032 (A * C) +- A -> A * (C+-1).
7033 We are most concerned about the case where C is a constant,
7034 but other combinations show up during loop reduction. Since
7035 it is not difficult, try all four possibilities. */
7037 if (TREE_CODE (arg0) == MULT_EXPR)
7039 arg00 = TREE_OPERAND (arg0, 0);
7040 arg01 = TREE_OPERAND (arg0, 1);
7042 else if (TREE_CODE (arg0) == INTEGER_CST)
7044 arg00 = build_one_cst (type);
7045 arg01 = arg0;
7047 else
7049 /* We cannot generate constant 1 for fract. */
7050 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7051 return NULL_TREE;
7052 arg00 = arg0;
7053 arg01 = build_one_cst (type);
7055 if (TREE_CODE (arg1) == MULT_EXPR)
7057 arg10 = TREE_OPERAND (arg1, 0);
7058 arg11 = TREE_OPERAND (arg1, 1);
7060 else if (TREE_CODE (arg1) == INTEGER_CST)
7062 arg10 = build_one_cst (type);
7063 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7064 the purpose of this canonicalization. */
7065 if (TREE_INT_CST_HIGH (arg1) == -1
7066 && negate_expr_p (arg1)
7067 && code == PLUS_EXPR)
7069 arg11 = negate_expr (arg1);
7070 code = MINUS_EXPR;
7072 else
7073 arg11 = arg1;
7075 else
7077 /* We cannot generate constant 1 for fract. */
7078 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7079 return NULL_TREE;
7080 arg10 = arg1;
7081 arg11 = build_one_cst (type);
7083 same = NULL_TREE;
7085 if (operand_equal_p (arg01, arg11, 0))
7086 same = arg01, alt0 = arg00, alt1 = arg10;
7087 else if (operand_equal_p (arg00, arg10, 0))
7088 same = arg00, alt0 = arg01, alt1 = arg11;
7089 else if (operand_equal_p (arg00, arg11, 0))
7090 same = arg00, alt0 = arg01, alt1 = arg10;
7091 else if (operand_equal_p (arg01, arg10, 0))
7092 same = arg01, alt0 = arg00, alt1 = arg11;
7094 /* No identical multiplicands; see if we can find a common
7095 power-of-two factor in non-power-of-two multiplies. This
7096 can help in multi-dimensional array access. */
7097 else if (host_integerp (arg01, 0)
7098 && host_integerp (arg11, 0))
7100 HOST_WIDE_INT int01, int11, tmp;
7101 bool swap = false;
7102 tree maybe_same;
7103 int01 = TREE_INT_CST_LOW (arg01);
7104 int11 = TREE_INT_CST_LOW (arg11);
7106 /* Move min of absolute values to int11. */
7107 if (absu_hwi (int01) < absu_hwi (int11))
7109 tmp = int01, int01 = int11, int11 = tmp;
7110 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7111 maybe_same = arg01;
7112 swap = true;
7114 else
7115 maybe_same = arg11;
7117 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7118 /* The remainder should not be a constant, otherwise we
7119 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7120 increased the number of multiplications necessary. */
7121 && TREE_CODE (arg10) != INTEGER_CST)
7123 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7124 build_int_cst (TREE_TYPE (arg00),
7125 int01 / int11));
7126 alt1 = arg10;
7127 same = maybe_same;
7128 if (swap)
7129 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7133 if (same)
7134 return fold_build2_loc (loc, MULT_EXPR, type,
7135 fold_build2_loc (loc, code, type,
7136 fold_convert_loc (loc, type, alt0),
7137 fold_convert_loc (loc, type, alt1)),
7138 fold_convert_loc (loc, type, same));
7140 return NULL_TREE;
7143 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7144 specified by EXPR into the buffer PTR of length LEN bytes.
7145 Return the number of bytes placed in the buffer, or zero
7146 upon failure. */
7148 static int
7149 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7151 tree type = TREE_TYPE (expr);
7152 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7153 int byte, offset, word, words;
7154 unsigned char value;
7156 if (total_bytes > len)
7157 return 0;
7158 words = total_bytes / UNITS_PER_WORD;
7160 for (byte = 0; byte < total_bytes; byte++)
7162 int bitpos = byte * BITS_PER_UNIT;
7163 if (bitpos < HOST_BITS_PER_WIDE_INT)
7164 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7165 else
7166 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7167 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7169 if (total_bytes > UNITS_PER_WORD)
7171 word = byte / UNITS_PER_WORD;
7172 if (WORDS_BIG_ENDIAN)
7173 word = (words - 1) - word;
7174 offset = word * UNITS_PER_WORD;
7175 if (BYTES_BIG_ENDIAN)
7176 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7177 else
7178 offset += byte % UNITS_PER_WORD;
7180 else
7181 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7182 ptr[offset] = value;
7184 return total_bytes;
7188 /* Subroutine of native_encode_expr. Encode the REAL_CST
7189 specified by EXPR into the buffer PTR of length LEN bytes.
7190 Return the number of bytes placed in the buffer, or zero
7191 upon failure. */
7193 static int
7194 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7196 tree type = TREE_TYPE (expr);
7197 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7198 int byte, offset, word, words, bitpos;
7199 unsigned char value;
7201 /* There are always 32 bits in each long, no matter the size of
7202 the hosts long. We handle floating point representations with
7203 up to 192 bits. */
7204 long tmp[6];
7206 if (total_bytes > len)
7207 return 0;
7208 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7210 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7212 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7213 bitpos += BITS_PER_UNIT)
7215 byte = (bitpos / BITS_PER_UNIT) & 3;
7216 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7218 if (UNITS_PER_WORD < 4)
7220 word = byte / UNITS_PER_WORD;
7221 if (WORDS_BIG_ENDIAN)
7222 word = (words - 1) - word;
7223 offset = word * UNITS_PER_WORD;
7224 if (BYTES_BIG_ENDIAN)
7225 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7226 else
7227 offset += byte % UNITS_PER_WORD;
7229 else
7230 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7231 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7233 return total_bytes;
7236 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7237 specified by EXPR into the buffer PTR of length LEN bytes.
7238 Return the number of bytes placed in the buffer, or zero
7239 upon failure. */
7241 static int
7242 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7244 int rsize, isize;
7245 tree part;
7247 part = TREE_REALPART (expr);
7248 rsize = native_encode_expr (part, ptr, len);
7249 if (rsize == 0)
7250 return 0;
7251 part = TREE_IMAGPART (expr);
7252 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7253 if (isize != rsize)
7254 return 0;
7255 return rsize + isize;
7259 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7260 specified by EXPR into the buffer PTR of length LEN bytes.
7261 Return the number of bytes placed in the buffer, or zero
7262 upon failure. */
7264 static int
7265 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7267 unsigned i, count;
7268 int size, offset;
7269 tree itype, elem;
7271 offset = 0;
7272 count = VECTOR_CST_NELTS (expr);
7273 itype = TREE_TYPE (TREE_TYPE (expr));
7274 size = GET_MODE_SIZE (TYPE_MODE (itype));
7275 for (i = 0; i < count; i++)
7277 elem = VECTOR_CST_ELT (expr, i);
7278 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7279 return 0;
7280 offset += size;
7282 return offset;
7286 /* Subroutine of native_encode_expr. Encode the STRING_CST
7287 specified by EXPR into the buffer PTR of length LEN bytes.
7288 Return the number of bytes placed in the buffer, or zero
7289 upon failure. */
7291 static int
7292 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7294 tree type = TREE_TYPE (expr);
7295 HOST_WIDE_INT total_bytes;
7297 if (TREE_CODE (type) != ARRAY_TYPE
7298 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7299 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7300 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7301 return 0;
7302 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7303 if (total_bytes > len)
7304 return 0;
7305 if (TREE_STRING_LENGTH (expr) < total_bytes)
7307 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7308 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7309 total_bytes - TREE_STRING_LENGTH (expr));
7311 else
7312 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7313 return total_bytes;
7317 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7318 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7319 buffer PTR of length LEN bytes. Return the number of bytes
7320 placed in the buffer, or zero upon failure. */
7323 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7325 switch (TREE_CODE (expr))
7327 case INTEGER_CST:
7328 return native_encode_int (expr, ptr, len);
7330 case REAL_CST:
7331 return native_encode_real (expr, ptr, len);
7333 case COMPLEX_CST:
7334 return native_encode_complex (expr, ptr, len);
7336 case VECTOR_CST:
7337 return native_encode_vector (expr, ptr, len);
7339 case STRING_CST:
7340 return native_encode_string (expr, ptr, len);
7342 default:
7343 return 0;
7348 /* Subroutine of native_interpret_expr. Interpret the contents of
7349 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7350 If the buffer cannot be interpreted, return NULL_TREE. */
7352 static tree
7353 native_interpret_int (tree type, const unsigned char *ptr, int len)
7355 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7356 int byte, offset, word, words;
7357 unsigned char value;
7358 double_int result;
7360 if (total_bytes > len)
7361 return NULL_TREE;
7362 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7363 return NULL_TREE;
7365 result = double_int_zero;
7366 words = total_bytes / UNITS_PER_WORD;
7368 for (byte = 0; byte < total_bytes; byte++)
7370 int bitpos = byte * BITS_PER_UNIT;
7371 if (total_bytes > UNITS_PER_WORD)
7373 word = byte / UNITS_PER_WORD;
7374 if (WORDS_BIG_ENDIAN)
7375 word = (words - 1) - word;
7376 offset = word * UNITS_PER_WORD;
7377 if (BYTES_BIG_ENDIAN)
7378 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7379 else
7380 offset += byte % UNITS_PER_WORD;
7382 else
7383 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7384 value = ptr[offset];
7386 if (bitpos < HOST_BITS_PER_WIDE_INT)
7387 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7388 else
7389 result.high |= (unsigned HOST_WIDE_INT) value
7390 << (bitpos - HOST_BITS_PER_WIDE_INT);
7393 return double_int_to_tree (type, result);
7397 /* Subroutine of native_interpret_expr. Interpret the contents of
7398 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7399 If the buffer cannot be interpreted, return NULL_TREE. */
7401 static tree
7402 native_interpret_real (tree type, const unsigned char *ptr, int len)
7404 enum machine_mode mode = TYPE_MODE (type);
7405 int total_bytes = GET_MODE_SIZE (mode);
7406 int byte, offset, word, words, bitpos;
7407 unsigned char value;
7408 /* There are always 32 bits in each long, no matter the size of
7409 the hosts long. We handle floating point representations with
7410 up to 192 bits. */
7411 REAL_VALUE_TYPE r;
7412 long tmp[6];
7414 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7415 if (total_bytes > len || total_bytes > 24)
7416 return NULL_TREE;
7417 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7419 memset (tmp, 0, sizeof (tmp));
7420 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7421 bitpos += BITS_PER_UNIT)
7423 byte = (bitpos / BITS_PER_UNIT) & 3;
7424 if (UNITS_PER_WORD < 4)
7426 word = byte / UNITS_PER_WORD;
7427 if (WORDS_BIG_ENDIAN)
7428 word = (words - 1) - word;
7429 offset = word * UNITS_PER_WORD;
7430 if (BYTES_BIG_ENDIAN)
7431 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7432 else
7433 offset += byte % UNITS_PER_WORD;
7435 else
7436 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7437 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7439 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7442 real_from_target (&r, tmp, mode);
7443 return build_real (type, r);
7447 /* Subroutine of native_interpret_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7449 If the buffer cannot be interpreted, return NULL_TREE. */
7451 static tree
7452 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7454 tree etype, rpart, ipart;
7455 int size;
7457 etype = TREE_TYPE (type);
7458 size = GET_MODE_SIZE (TYPE_MODE (etype));
7459 if (size * 2 > len)
7460 return NULL_TREE;
7461 rpart = native_interpret_expr (etype, ptr, size);
7462 if (!rpart)
7463 return NULL_TREE;
7464 ipart = native_interpret_expr (etype, ptr+size, size);
7465 if (!ipart)
7466 return NULL_TREE;
7467 return build_complex (type, rpart, ipart);
7471 /* Subroutine of native_interpret_expr. Interpret the contents of
7472 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7473 If the buffer cannot be interpreted, return NULL_TREE. */
7475 static tree
7476 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7478 tree etype, elem;
7479 int i, size, count;
7480 tree *elements;
7482 etype = TREE_TYPE (type);
7483 size = GET_MODE_SIZE (TYPE_MODE (etype));
7484 count = TYPE_VECTOR_SUBPARTS (type);
7485 if (size * count > len)
7486 return NULL_TREE;
7488 elements = XALLOCAVEC (tree, count);
7489 for (i = count - 1; i >= 0; i--)
7491 elem = native_interpret_expr (etype, ptr+(i*size), size);
7492 if (!elem)
7493 return NULL_TREE;
7494 elements[i] = elem;
7496 return build_vector (type, elements);
7500 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7501 the buffer PTR of length LEN as a constant of type TYPE. For
7502 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7503 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7504 return NULL_TREE. */
7506 tree
7507 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7509 switch (TREE_CODE (type))
7511 case INTEGER_TYPE:
7512 case ENUMERAL_TYPE:
7513 case BOOLEAN_TYPE:
7514 case POINTER_TYPE:
7515 case REFERENCE_TYPE:
7516 return native_interpret_int (type, ptr, len);
7518 case REAL_TYPE:
7519 return native_interpret_real (type, ptr, len);
7521 case COMPLEX_TYPE:
7522 return native_interpret_complex (type, ptr, len);
7524 case VECTOR_TYPE:
7525 return native_interpret_vector (type, ptr, len);
7527 default:
7528 return NULL_TREE;
7532 /* Returns true if we can interpret the contents of a native encoding
7533 as TYPE. */
7535 static bool
7536 can_native_interpret_type_p (tree type)
7538 switch (TREE_CODE (type))
7540 case INTEGER_TYPE:
7541 case ENUMERAL_TYPE:
7542 case BOOLEAN_TYPE:
7543 case POINTER_TYPE:
7544 case REFERENCE_TYPE:
7545 case REAL_TYPE:
7546 case COMPLEX_TYPE:
7547 case VECTOR_TYPE:
7548 return true;
7549 default:
7550 return false;
7554 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7555 TYPE at compile-time. If we're unable to perform the conversion
7556 return NULL_TREE. */
7558 static tree
7559 fold_view_convert_expr (tree type, tree expr)
7561 /* We support up to 512-bit values (for V8DFmode). */
7562 unsigned char buffer[64];
7563 int len;
7565 /* Check that the host and target are sane. */
7566 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7567 return NULL_TREE;
7569 len = native_encode_expr (expr, buffer, sizeof (buffer));
7570 if (len == 0)
7571 return NULL_TREE;
7573 return native_interpret_expr (type, buffer, len);
7576 /* Build an expression for the address of T. Folds away INDIRECT_REF
7577 to avoid confusing the gimplify process. */
7579 tree
7580 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7582 /* The size of the object is not relevant when talking about its address. */
7583 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7584 t = TREE_OPERAND (t, 0);
7586 if (TREE_CODE (t) == INDIRECT_REF)
7588 t = TREE_OPERAND (t, 0);
7590 if (TREE_TYPE (t) != ptrtype)
7591 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7593 else if (TREE_CODE (t) == MEM_REF
7594 && integer_zerop (TREE_OPERAND (t, 1)))
7595 return TREE_OPERAND (t, 0);
7596 else if (TREE_CODE (t) == MEM_REF
7597 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7598 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7599 TREE_OPERAND (t, 0),
7600 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7601 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7603 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7605 if (TREE_TYPE (t) != ptrtype)
7606 t = fold_convert_loc (loc, ptrtype, t);
7608 else
7609 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7611 return t;
7614 /* Build an expression for the address of T. */
7616 tree
7617 build_fold_addr_expr_loc (location_t loc, tree t)
7619 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7621 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7624 static bool vec_cst_ctor_to_array (tree, tree *);
7626 /* Fold a unary expression of code CODE and type TYPE with operand
7627 OP0. Return the folded expression if folding is successful.
7628 Otherwise, return NULL_TREE. */
7630 tree
7631 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7633 tree tem;
7634 tree arg0;
7635 enum tree_code_class kind = TREE_CODE_CLASS (code);
7637 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7638 && TREE_CODE_LENGTH (code) == 1);
7640 arg0 = op0;
7641 if (arg0)
7643 if (CONVERT_EXPR_CODE_P (code)
7644 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7646 /* Don't use STRIP_NOPS, because signedness of argument type
7647 matters. */
7648 STRIP_SIGN_NOPS (arg0);
7650 else
7652 /* Strip any conversions that don't change the mode. This
7653 is safe for every expression, except for a comparison
7654 expression because its signedness is derived from its
7655 operands.
7657 Note that this is done as an internal manipulation within
7658 the constant folder, in order to find the simplest
7659 representation of the arguments so that their form can be
7660 studied. In any cases, the appropriate type conversions
7661 should be put back in the tree that will get out of the
7662 constant folder. */
7663 STRIP_NOPS (arg0);
7667 if (TREE_CODE_CLASS (code) == tcc_unary)
7669 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7670 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7671 fold_build1_loc (loc, code, type,
7672 fold_convert_loc (loc, TREE_TYPE (op0),
7673 TREE_OPERAND (arg0, 1))));
7674 else if (TREE_CODE (arg0) == COND_EXPR)
7676 tree arg01 = TREE_OPERAND (arg0, 1);
7677 tree arg02 = TREE_OPERAND (arg0, 2);
7678 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7679 arg01 = fold_build1_loc (loc, code, type,
7680 fold_convert_loc (loc,
7681 TREE_TYPE (op0), arg01));
7682 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7683 arg02 = fold_build1_loc (loc, code, type,
7684 fold_convert_loc (loc,
7685 TREE_TYPE (op0), arg02));
7686 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7687 arg01, arg02);
7689 /* If this was a conversion, and all we did was to move into
7690 inside the COND_EXPR, bring it back out. But leave it if
7691 it is a conversion from integer to integer and the
7692 result precision is no wider than a word since such a
7693 conversion is cheap and may be optimized away by combine,
7694 while it couldn't if it were outside the COND_EXPR. Then return
7695 so we don't get into an infinite recursion loop taking the
7696 conversion out and then back in. */
7698 if ((CONVERT_EXPR_CODE_P (code)
7699 || code == NON_LVALUE_EXPR)
7700 && TREE_CODE (tem) == COND_EXPR
7701 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7702 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7703 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7704 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7705 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7706 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7707 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7708 && (INTEGRAL_TYPE_P
7709 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7710 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7711 || flag_syntax_only))
7712 tem = build1_loc (loc, code, type,
7713 build3 (COND_EXPR,
7714 TREE_TYPE (TREE_OPERAND
7715 (TREE_OPERAND (tem, 1), 0)),
7716 TREE_OPERAND (tem, 0),
7717 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7718 TREE_OPERAND (TREE_OPERAND (tem, 2),
7719 0)));
7720 return tem;
7724 switch (code)
7726 case PAREN_EXPR:
7727 /* Re-association barriers around constants and other re-association
7728 barriers can be removed. */
7729 if (CONSTANT_CLASS_P (op0)
7730 || TREE_CODE (op0) == PAREN_EXPR)
7731 return fold_convert_loc (loc, type, op0);
7732 return NULL_TREE;
7734 CASE_CONVERT:
7735 case FLOAT_EXPR:
7736 case FIX_TRUNC_EXPR:
7737 if (TREE_TYPE (op0) == type)
7738 return op0;
7740 if (COMPARISON_CLASS_P (op0))
7742 /* If we have (type) (a CMP b) and type is an integral type, return
7743 new expression involving the new type. Canonicalize
7744 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7745 non-integral type.
7746 Do not fold the result as that would not simplify further, also
7747 folding again results in recursions. */
7748 if (TREE_CODE (type) == BOOLEAN_TYPE)
7749 return build2_loc (loc, TREE_CODE (op0), type,
7750 TREE_OPERAND (op0, 0),
7751 TREE_OPERAND (op0, 1));
7752 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7753 && TREE_CODE (type) != VECTOR_TYPE)
7754 return build3_loc (loc, COND_EXPR, type, op0,
7755 constant_boolean_node (true, type),
7756 constant_boolean_node (false, type));
7759 /* Handle cases of two conversions in a row. */
7760 if (CONVERT_EXPR_P (op0))
7762 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7763 tree inter_type = TREE_TYPE (op0);
7764 int inside_int = INTEGRAL_TYPE_P (inside_type);
7765 int inside_ptr = POINTER_TYPE_P (inside_type);
7766 int inside_float = FLOAT_TYPE_P (inside_type);
7767 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7768 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7769 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7770 int inter_int = INTEGRAL_TYPE_P (inter_type);
7771 int inter_ptr = POINTER_TYPE_P (inter_type);
7772 int inter_float = FLOAT_TYPE_P (inter_type);
7773 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7774 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7775 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7776 int final_int = INTEGRAL_TYPE_P (type);
7777 int final_ptr = POINTER_TYPE_P (type);
7778 int final_float = FLOAT_TYPE_P (type);
7779 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7780 unsigned int final_prec = TYPE_PRECISION (type);
7781 int final_unsignedp = TYPE_UNSIGNED (type);
7783 /* In addition to the cases of two conversions in a row
7784 handled below, if we are converting something to its own
7785 type via an object of identical or wider precision, neither
7786 conversion is needed. */
7787 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7788 && (((inter_int || inter_ptr) && final_int)
7789 || (inter_float && final_float))
7790 && inter_prec >= final_prec)
7791 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7793 /* Likewise, if the intermediate and initial types are either both
7794 float or both integer, we don't need the middle conversion if the
7795 former is wider than the latter and doesn't change the signedness
7796 (for integers). Avoid this if the final type is a pointer since
7797 then we sometimes need the middle conversion. Likewise if the
7798 final type has a precision not equal to the size of its mode. */
7799 if (((inter_int && inside_int)
7800 || (inter_float && inside_float)
7801 || (inter_vec && inside_vec))
7802 && inter_prec >= inside_prec
7803 && (inter_float || inter_vec
7804 || inter_unsignedp == inside_unsignedp)
7805 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7806 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7807 && ! final_ptr
7808 && (! final_vec || inter_prec == inside_prec))
7809 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7811 /* If we have a sign-extension of a zero-extended value, we can
7812 replace that by a single zero-extension. Likewise if the
7813 final conversion does not change precision we can drop the
7814 intermediate conversion. */
7815 if (inside_int && inter_int && final_int
7816 && ((inside_prec < inter_prec && inter_prec < final_prec
7817 && inside_unsignedp && !inter_unsignedp)
7818 || final_prec == inter_prec))
7819 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7821 /* Two conversions in a row are not needed unless:
7822 - some conversion is floating-point (overstrict for now), or
7823 - some conversion is a vector (overstrict for now), or
7824 - the intermediate type is narrower than both initial and
7825 final, or
7826 - the intermediate type and innermost type differ in signedness,
7827 and the outermost type is wider than the intermediate, or
7828 - the initial type is a pointer type and the precisions of the
7829 intermediate and final types differ, or
7830 - the final type is a pointer type and the precisions of the
7831 initial and intermediate types differ. */
7832 if (! inside_float && ! inter_float && ! final_float
7833 && ! inside_vec && ! inter_vec && ! final_vec
7834 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7835 && ! (inside_int && inter_int
7836 && inter_unsignedp != inside_unsignedp
7837 && inter_prec < final_prec)
7838 && ((inter_unsignedp && inter_prec > inside_prec)
7839 == (final_unsignedp && final_prec > inter_prec))
7840 && ! (inside_ptr && inter_prec != final_prec)
7841 && ! (final_ptr && inside_prec != inter_prec)
7842 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7843 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7844 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7847 /* Handle (T *)&A.B.C for A being of type T and B and C
7848 living at offset zero. This occurs frequently in
7849 C++ upcasting and then accessing the base. */
7850 if (TREE_CODE (op0) == ADDR_EXPR
7851 && POINTER_TYPE_P (type)
7852 && handled_component_p (TREE_OPERAND (op0, 0)))
7854 HOST_WIDE_INT bitsize, bitpos;
7855 tree offset;
7856 enum machine_mode mode;
7857 int unsignedp, volatilep;
7858 tree base = TREE_OPERAND (op0, 0);
7859 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7860 &mode, &unsignedp, &volatilep, false);
7861 /* If the reference was to a (constant) zero offset, we can use
7862 the address of the base if it has the same base type
7863 as the result type and the pointer type is unqualified. */
7864 if (! offset && bitpos == 0
7865 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7866 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7867 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7868 return fold_convert_loc (loc, type,
7869 build_fold_addr_expr_loc (loc, base));
7872 if (TREE_CODE (op0) == MODIFY_EXPR
7873 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7874 /* Detect assigning a bitfield. */
7875 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7876 && DECL_BIT_FIELD
7877 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7879 /* Don't leave an assignment inside a conversion
7880 unless assigning a bitfield. */
7881 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7882 /* First do the assignment, then return converted constant. */
7883 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7884 TREE_NO_WARNING (tem) = 1;
7885 TREE_USED (tem) = 1;
7886 return tem;
7889 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7890 constants (if x has signed type, the sign bit cannot be set
7891 in c). This folds extension into the BIT_AND_EXPR.
7892 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7893 very likely don't have maximal range for their precision and this
7894 transformation effectively doesn't preserve non-maximal ranges. */
7895 if (TREE_CODE (type) == INTEGER_TYPE
7896 && TREE_CODE (op0) == BIT_AND_EXPR
7897 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7899 tree and_expr = op0;
7900 tree and0 = TREE_OPERAND (and_expr, 0);
7901 tree and1 = TREE_OPERAND (and_expr, 1);
7902 int change = 0;
7904 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7905 || (TYPE_PRECISION (type)
7906 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7907 change = 1;
7908 else if (TYPE_PRECISION (TREE_TYPE (and1))
7909 <= HOST_BITS_PER_WIDE_INT
7910 && host_integerp (and1, 1))
7912 unsigned HOST_WIDE_INT cst;
7914 cst = tree_low_cst (and1, 1);
7915 cst &= (HOST_WIDE_INT) -1
7916 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7917 change = (cst == 0);
7918 #ifdef LOAD_EXTEND_OP
7919 if (change
7920 && !flag_syntax_only
7921 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7922 == ZERO_EXTEND))
7924 tree uns = unsigned_type_for (TREE_TYPE (and0));
7925 and0 = fold_convert_loc (loc, uns, and0);
7926 and1 = fold_convert_loc (loc, uns, and1);
7928 #endif
7930 if (change)
7932 tem = force_fit_type_double (type, tree_to_double_int (and1),
7933 0, TREE_OVERFLOW (and1));
7934 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7935 fold_convert_loc (loc, type, and0), tem);
7939 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7940 when one of the new casts will fold away. Conservatively we assume
7941 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7942 if (POINTER_TYPE_P (type)
7943 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7944 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7945 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7946 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7947 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7949 tree arg00 = TREE_OPERAND (arg0, 0);
7950 tree arg01 = TREE_OPERAND (arg0, 1);
7952 return fold_build_pointer_plus_loc
7953 (loc, fold_convert_loc (loc, type, arg00), arg01);
7956 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7957 of the same precision, and X is an integer type not narrower than
7958 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7959 if (INTEGRAL_TYPE_P (type)
7960 && TREE_CODE (op0) == BIT_NOT_EXPR
7961 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7962 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7963 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7965 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7966 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7967 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7968 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7969 fold_convert_loc (loc, type, tem));
7972 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7973 type of X and Y (integer types only). */
7974 if (INTEGRAL_TYPE_P (type)
7975 && TREE_CODE (op0) == MULT_EXPR
7976 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7977 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7979 /* Be careful not to introduce new overflows. */
7980 tree mult_type;
7981 if (TYPE_OVERFLOW_WRAPS (type))
7982 mult_type = type;
7983 else
7984 mult_type = unsigned_type_for (type);
7986 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7988 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7989 fold_convert_loc (loc, mult_type,
7990 TREE_OPERAND (op0, 0)),
7991 fold_convert_loc (loc, mult_type,
7992 TREE_OPERAND (op0, 1)));
7993 return fold_convert_loc (loc, type, tem);
7997 tem = fold_convert_const (code, type, op0);
7998 return tem ? tem : NULL_TREE;
8000 case ADDR_SPACE_CONVERT_EXPR:
8001 if (integer_zerop (arg0))
8002 return fold_convert_const (code, type, arg0);
8003 return NULL_TREE;
8005 case FIXED_CONVERT_EXPR:
8006 tem = fold_convert_const (code, type, arg0);
8007 return tem ? tem : NULL_TREE;
8009 case VIEW_CONVERT_EXPR:
8010 if (TREE_TYPE (op0) == type)
8011 return op0;
8012 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8013 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8014 type, TREE_OPERAND (op0, 0));
8015 if (TREE_CODE (op0) == MEM_REF)
8016 return fold_build2_loc (loc, MEM_REF, type,
8017 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8019 /* For integral conversions with the same precision or pointer
8020 conversions use a NOP_EXPR instead. */
8021 if ((INTEGRAL_TYPE_P (type)
8022 || POINTER_TYPE_P (type))
8023 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8024 || POINTER_TYPE_P (TREE_TYPE (op0)))
8025 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8026 return fold_convert_loc (loc, type, op0);
8028 /* Strip inner integral conversions that do not change the precision. */
8029 if (CONVERT_EXPR_P (op0)
8030 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8031 || POINTER_TYPE_P (TREE_TYPE (op0)))
8032 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8033 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8034 && (TYPE_PRECISION (TREE_TYPE (op0))
8035 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8036 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8037 type, TREE_OPERAND (op0, 0));
8039 return fold_view_convert_expr (type, op0);
8041 case NEGATE_EXPR:
8042 tem = fold_negate_expr (loc, arg0);
8043 if (tem)
8044 return fold_convert_loc (loc, type, tem);
8045 return NULL_TREE;
8047 case ABS_EXPR:
8048 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8049 return fold_abs_const (arg0, type);
8050 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8051 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8052 /* Convert fabs((double)float) into (double)fabsf(float). */
8053 else if (TREE_CODE (arg0) == NOP_EXPR
8054 && TREE_CODE (type) == REAL_TYPE)
8056 tree targ0 = strip_float_extensions (arg0);
8057 if (targ0 != arg0)
8058 return fold_convert_loc (loc, type,
8059 fold_build1_loc (loc, ABS_EXPR,
8060 TREE_TYPE (targ0),
8061 targ0));
8063 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8064 else if (TREE_CODE (arg0) == ABS_EXPR)
8065 return arg0;
8066 else if (tree_expr_nonnegative_p (arg0))
8067 return arg0;
8069 /* Strip sign ops from argument. */
8070 if (TREE_CODE (type) == REAL_TYPE)
8072 tem = fold_strip_sign_ops (arg0);
8073 if (tem)
8074 return fold_build1_loc (loc, ABS_EXPR, type,
8075 fold_convert_loc (loc, type, tem));
8077 return NULL_TREE;
8079 case CONJ_EXPR:
8080 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8081 return fold_convert_loc (loc, type, arg0);
8082 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8084 tree itype = TREE_TYPE (type);
8085 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8086 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8087 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8088 negate_expr (ipart));
8090 if (TREE_CODE (arg0) == COMPLEX_CST)
8092 tree itype = TREE_TYPE (type);
8093 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8094 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8095 return build_complex (type, rpart, negate_expr (ipart));
8097 if (TREE_CODE (arg0) == CONJ_EXPR)
8098 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8099 return NULL_TREE;
8101 case BIT_NOT_EXPR:
8102 if (TREE_CODE (arg0) == INTEGER_CST)
8103 return fold_not_const (arg0, type);
8104 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8105 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8106 /* Convert ~ (-A) to A - 1. */
8107 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8108 return fold_build2_loc (loc, MINUS_EXPR, type,
8109 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8110 build_int_cst (type, 1));
8111 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8112 else if (INTEGRAL_TYPE_P (type)
8113 && ((TREE_CODE (arg0) == MINUS_EXPR
8114 && integer_onep (TREE_OPERAND (arg0, 1)))
8115 || (TREE_CODE (arg0) == PLUS_EXPR
8116 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8117 return fold_build1_loc (loc, NEGATE_EXPR, type,
8118 fold_convert_loc (loc, type,
8119 TREE_OPERAND (arg0, 0)));
8120 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8121 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8122 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8123 fold_convert_loc (loc, type,
8124 TREE_OPERAND (arg0, 0)))))
8125 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8126 fold_convert_loc (loc, type,
8127 TREE_OPERAND (arg0, 1)));
8128 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8129 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8130 fold_convert_loc (loc, type,
8131 TREE_OPERAND (arg0, 1)))))
8132 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8133 fold_convert_loc (loc, type,
8134 TREE_OPERAND (arg0, 0)), tem);
8135 /* Perform BIT_NOT_EXPR on each element individually. */
8136 else if (TREE_CODE (arg0) == VECTOR_CST)
8138 tree *elements;
8139 tree elem;
8140 unsigned count = VECTOR_CST_NELTS (arg0), i;
8142 elements = XALLOCAVEC (tree, count);
8143 for (i = 0; i < count; i++)
8145 elem = VECTOR_CST_ELT (arg0, i);
8146 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8147 if (elem == NULL_TREE)
8148 break;
8149 elements[i] = elem;
8151 if (i == count)
8152 return build_vector (type, elements);
8155 return NULL_TREE;
8157 case TRUTH_NOT_EXPR:
8158 /* The argument to invert_truthvalue must have Boolean type. */
8159 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8160 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8162 /* Note that the operand of this must be an int
8163 and its values must be 0 or 1.
8164 ("true" is a fixed value perhaps depending on the language,
8165 but we don't handle values other than 1 correctly yet.) */
8166 tem = fold_truth_not_expr (loc, arg0);
8167 if (!tem)
8168 return NULL_TREE;
8169 return fold_convert_loc (loc, type, tem);
8171 case REALPART_EXPR:
8172 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8173 return fold_convert_loc (loc, type, arg0);
8174 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8175 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8176 TREE_OPERAND (arg0, 1));
8177 if (TREE_CODE (arg0) == COMPLEX_CST)
8178 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8179 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8181 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8182 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8183 fold_build1_loc (loc, REALPART_EXPR, itype,
8184 TREE_OPERAND (arg0, 0)),
8185 fold_build1_loc (loc, REALPART_EXPR, itype,
8186 TREE_OPERAND (arg0, 1)));
8187 return fold_convert_loc (loc, type, tem);
8189 if (TREE_CODE (arg0) == CONJ_EXPR)
8191 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8192 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8193 TREE_OPERAND (arg0, 0));
8194 return fold_convert_loc (loc, type, tem);
8196 if (TREE_CODE (arg0) == CALL_EXPR)
8198 tree fn = get_callee_fndecl (arg0);
8199 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8200 switch (DECL_FUNCTION_CODE (fn))
8202 CASE_FLT_FN (BUILT_IN_CEXPI):
8203 fn = mathfn_built_in (type, BUILT_IN_COS);
8204 if (fn)
8205 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8206 break;
8208 default:
8209 break;
8212 return NULL_TREE;
8214 case IMAGPART_EXPR:
8215 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8216 return build_zero_cst (type);
8217 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8218 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8219 TREE_OPERAND (arg0, 0));
8220 if (TREE_CODE (arg0) == COMPLEX_CST)
8221 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8222 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8224 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8225 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8226 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8227 TREE_OPERAND (arg0, 0)),
8228 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8229 TREE_OPERAND (arg0, 1)));
8230 return fold_convert_loc (loc, type, tem);
8232 if (TREE_CODE (arg0) == CONJ_EXPR)
8234 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8235 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8236 return fold_convert_loc (loc, type, negate_expr (tem));
8238 if (TREE_CODE (arg0) == CALL_EXPR)
8240 tree fn = get_callee_fndecl (arg0);
8241 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8242 switch (DECL_FUNCTION_CODE (fn))
8244 CASE_FLT_FN (BUILT_IN_CEXPI):
8245 fn = mathfn_built_in (type, BUILT_IN_SIN);
8246 if (fn)
8247 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8248 break;
8250 default:
8251 break;
8254 return NULL_TREE;
8256 case INDIRECT_REF:
8257 /* Fold *&X to X if X is an lvalue. */
8258 if (TREE_CODE (op0) == ADDR_EXPR)
8260 tree op00 = TREE_OPERAND (op0, 0);
8261 if ((TREE_CODE (op00) == VAR_DECL
8262 || TREE_CODE (op00) == PARM_DECL
8263 || TREE_CODE (op00) == RESULT_DECL)
8264 && !TREE_READONLY (op00))
8265 return op00;
8267 return NULL_TREE;
8269 case VEC_UNPACK_LO_EXPR:
8270 case VEC_UNPACK_HI_EXPR:
8271 case VEC_UNPACK_FLOAT_LO_EXPR:
8272 case VEC_UNPACK_FLOAT_HI_EXPR:
8274 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8275 tree *elts;
8276 enum tree_code subcode;
8278 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8279 if (TREE_CODE (arg0) != VECTOR_CST)
8280 return NULL_TREE;
8282 elts = XALLOCAVEC (tree, nelts * 2);
8283 if (!vec_cst_ctor_to_array (arg0, elts))
8284 return NULL_TREE;
8286 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8287 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8288 elts += nelts;
8290 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8291 subcode = NOP_EXPR;
8292 else
8293 subcode = FLOAT_EXPR;
8295 for (i = 0; i < nelts; i++)
8297 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8298 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8299 return NULL_TREE;
8302 return build_vector (type, elts);
8305 case REDUC_MIN_EXPR:
8306 case REDUC_MAX_EXPR:
8307 case REDUC_PLUS_EXPR:
8309 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8310 tree *elts;
8311 enum tree_code subcode;
8313 if (TREE_CODE (op0) != VECTOR_CST)
8314 return NULL_TREE;
8316 elts = XALLOCAVEC (tree, nelts);
8317 if (!vec_cst_ctor_to_array (op0, elts))
8318 return NULL_TREE;
8320 switch (code)
8322 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8323 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8324 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8325 default: gcc_unreachable ();
8328 for (i = 1; i < nelts; i++)
8330 elts[0] = const_binop (subcode, elts[0], elts[i]);
8331 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8332 return NULL_TREE;
8333 elts[i] = build_zero_cst (TREE_TYPE (type));
8336 return build_vector (type, elts);
8339 default:
8340 return NULL_TREE;
8341 } /* switch (code) */
8345 /* If the operation was a conversion do _not_ mark a resulting constant
8346 with TREE_OVERFLOW if the original constant was not. These conversions
8347 have implementation defined behavior and retaining the TREE_OVERFLOW
8348 flag here would confuse later passes such as VRP. */
8349 tree
8350 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8351 tree type, tree op0)
8353 tree res = fold_unary_loc (loc, code, type, op0);
8354 if (res
8355 && TREE_CODE (res) == INTEGER_CST
8356 && TREE_CODE (op0) == INTEGER_CST
8357 && CONVERT_EXPR_CODE_P (code))
8358 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8360 return res;
8363 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8364 operands OP0 and OP1. LOC is the location of the resulting expression.
8365 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8366 Return the folded expression if folding is successful. Otherwise,
8367 return NULL_TREE. */
8368 static tree
8369 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8370 tree arg0, tree arg1, tree op0, tree op1)
8372 tree tem;
8374 /* We only do these simplifications if we are optimizing. */
8375 if (!optimize)
8376 return NULL_TREE;
8378 /* Check for things like (A || B) && (A || C). We can convert this
8379 to A || (B && C). Note that either operator can be any of the four
8380 truth and/or operations and the transformation will still be
8381 valid. Also note that we only care about order for the
8382 ANDIF and ORIF operators. If B contains side effects, this
8383 might change the truth-value of A. */
8384 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8385 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8386 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8387 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8388 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8389 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8391 tree a00 = TREE_OPERAND (arg0, 0);
8392 tree a01 = TREE_OPERAND (arg0, 1);
8393 tree a10 = TREE_OPERAND (arg1, 0);
8394 tree a11 = TREE_OPERAND (arg1, 1);
8395 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8396 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8397 && (code == TRUTH_AND_EXPR
8398 || code == TRUTH_OR_EXPR));
8400 if (operand_equal_p (a00, a10, 0))
8401 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8402 fold_build2_loc (loc, code, type, a01, a11));
8403 else if (commutative && operand_equal_p (a00, a11, 0))
8404 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8405 fold_build2_loc (loc, code, type, a01, a10));
8406 else if (commutative && operand_equal_p (a01, a10, 0))
8407 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8408 fold_build2_loc (loc, code, type, a00, a11));
8410 /* This case if tricky because we must either have commutative
8411 operators or else A10 must not have side-effects. */
8413 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8414 && operand_equal_p (a01, a11, 0))
8415 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8416 fold_build2_loc (loc, code, type, a00, a10),
8417 a01);
8420 /* See if we can build a range comparison. */
8421 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8422 return tem;
8424 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8425 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8427 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8428 if (tem)
8429 return fold_build2_loc (loc, code, type, tem, arg1);
8432 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8433 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8435 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8436 if (tem)
8437 return fold_build2_loc (loc, code, type, arg0, tem);
8440 /* Check for the possibility of merging component references. If our
8441 lhs is another similar operation, try to merge its rhs with our
8442 rhs. Then try to merge our lhs and rhs. */
8443 if (TREE_CODE (arg0) == code
8444 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8445 TREE_OPERAND (arg0, 1), arg1)))
8446 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8448 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8449 return tem;
8451 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8452 && (code == TRUTH_AND_EXPR
8453 || code == TRUTH_ANDIF_EXPR
8454 || code == TRUTH_OR_EXPR
8455 || code == TRUTH_ORIF_EXPR))
8457 enum tree_code ncode, icode;
8459 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8460 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8461 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8463 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8464 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8465 We don't want to pack more than two leafs to a non-IF AND/OR
8466 expression.
8467 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8468 equal to IF-CODE, then we don't want to add right-hand operand.
8469 If the inner right-hand side of left-hand operand has
8470 side-effects, or isn't simple, then we can't add to it,
8471 as otherwise we might destroy if-sequence. */
8472 if (TREE_CODE (arg0) == icode
8473 && simple_operand_p_2 (arg1)
8474 /* Needed for sequence points to handle trappings, and
8475 side-effects. */
8476 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8478 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8479 arg1);
8480 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8481 tem);
8483 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8484 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8485 else if (TREE_CODE (arg1) == icode
8486 && simple_operand_p_2 (arg0)
8487 /* Needed for sequence points to handle trappings, and
8488 side-effects. */
8489 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8491 tem = fold_build2_loc (loc, ncode, type,
8492 arg0, TREE_OPERAND (arg1, 0));
8493 return fold_build2_loc (loc, icode, type, tem,
8494 TREE_OPERAND (arg1, 1));
8496 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8497 into (A OR B).
8498 For sequence point consistancy, we need to check for trapping,
8499 and side-effects. */
8500 else if (code == icode && simple_operand_p_2 (arg0)
8501 && simple_operand_p_2 (arg1))
8502 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8505 return NULL_TREE;
8508 /* Fold a binary expression of code CODE and type TYPE with operands
8509 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8510 Return the folded expression if folding is successful. Otherwise,
8511 return NULL_TREE. */
8513 static tree
8514 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8516 enum tree_code compl_code;
8518 if (code == MIN_EXPR)
8519 compl_code = MAX_EXPR;
8520 else if (code == MAX_EXPR)
8521 compl_code = MIN_EXPR;
8522 else
8523 gcc_unreachable ();
8525 /* MIN (MAX (a, b), b) == b. */
8526 if (TREE_CODE (op0) == compl_code
8527 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8528 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8530 /* MIN (MAX (b, a), b) == b. */
8531 if (TREE_CODE (op0) == compl_code
8532 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8533 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8534 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8536 /* MIN (a, MAX (a, b)) == a. */
8537 if (TREE_CODE (op1) == compl_code
8538 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8539 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8540 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8542 /* MIN (a, MAX (b, a)) == a. */
8543 if (TREE_CODE (op1) == compl_code
8544 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8545 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8546 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8548 return NULL_TREE;
8551 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8552 by changing CODE to reduce the magnitude of constants involved in
8553 ARG0 of the comparison.
8554 Returns a canonicalized comparison tree if a simplification was
8555 possible, otherwise returns NULL_TREE.
8556 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8557 valid if signed overflow is undefined. */
8559 static tree
8560 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8561 tree arg0, tree arg1,
8562 bool *strict_overflow_p)
8564 enum tree_code code0 = TREE_CODE (arg0);
8565 tree t, cst0 = NULL_TREE;
8566 int sgn0;
8567 bool swap = false;
8569 /* Match A +- CST code arg1 and CST code arg1. We can change the
8570 first form only if overflow is undefined. */
8571 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8572 /* In principle pointers also have undefined overflow behavior,
8573 but that causes problems elsewhere. */
8574 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8575 && (code0 == MINUS_EXPR
8576 || code0 == PLUS_EXPR)
8577 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8578 || code0 == INTEGER_CST))
8579 return NULL_TREE;
8581 /* Identify the constant in arg0 and its sign. */
8582 if (code0 == INTEGER_CST)
8583 cst0 = arg0;
8584 else
8585 cst0 = TREE_OPERAND (arg0, 1);
8586 sgn0 = tree_int_cst_sgn (cst0);
8588 /* Overflowed constants and zero will cause problems. */
8589 if (integer_zerop (cst0)
8590 || TREE_OVERFLOW (cst0))
8591 return NULL_TREE;
8593 /* See if we can reduce the magnitude of the constant in
8594 arg0 by changing the comparison code. */
8595 if (code0 == INTEGER_CST)
8597 /* CST <= arg1 -> CST-1 < arg1. */
8598 if (code == LE_EXPR && sgn0 == 1)
8599 code = LT_EXPR;
8600 /* -CST < arg1 -> -CST-1 <= arg1. */
8601 else if (code == LT_EXPR && sgn0 == -1)
8602 code = LE_EXPR;
8603 /* CST > arg1 -> CST-1 >= arg1. */
8604 else if (code == GT_EXPR && sgn0 == 1)
8605 code = GE_EXPR;
8606 /* -CST >= arg1 -> -CST-1 > arg1. */
8607 else if (code == GE_EXPR && sgn0 == -1)
8608 code = GT_EXPR;
8609 else
8610 return NULL_TREE;
8611 /* arg1 code' CST' might be more canonical. */
8612 swap = true;
8614 else
8616 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8617 if (code == LT_EXPR
8618 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8619 code = LE_EXPR;
8620 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8621 else if (code == GT_EXPR
8622 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8623 code = GE_EXPR;
8624 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8625 else if (code == LE_EXPR
8626 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8627 code = LT_EXPR;
8628 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8629 else if (code == GE_EXPR
8630 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8631 code = GT_EXPR;
8632 else
8633 return NULL_TREE;
8634 *strict_overflow_p = true;
8637 /* Now build the constant reduced in magnitude. But not if that
8638 would produce one outside of its types range. */
8639 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8640 && ((sgn0 == 1
8641 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8642 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8643 || (sgn0 == -1
8644 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8645 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8646 /* We cannot swap the comparison here as that would cause us to
8647 endlessly recurse. */
8648 return NULL_TREE;
8650 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8651 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8652 if (code0 != INTEGER_CST)
8653 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8654 t = fold_convert (TREE_TYPE (arg1), t);
8656 /* If swapping might yield to a more canonical form, do so. */
8657 if (swap)
8658 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8659 else
8660 return fold_build2_loc (loc, code, type, t, arg1);
8663 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8664 overflow further. Try to decrease the magnitude of constants involved
8665 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8666 and put sole constants at the second argument position.
8667 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8669 static tree
8670 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8671 tree arg0, tree arg1)
8673 tree t;
8674 bool strict_overflow_p;
8675 const char * const warnmsg = G_("assuming signed overflow does not occur "
8676 "when reducing constant in comparison");
8678 /* Try canonicalization by simplifying arg0. */
8679 strict_overflow_p = false;
8680 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8681 &strict_overflow_p);
8682 if (t)
8684 if (strict_overflow_p)
8685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8686 return t;
8689 /* Try canonicalization by simplifying arg1 using the swapped
8690 comparison. */
8691 code = swap_tree_comparison (code);
8692 strict_overflow_p = false;
8693 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8694 &strict_overflow_p);
8695 if (t && strict_overflow_p)
8696 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8697 return t;
8700 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8701 space. This is used to avoid issuing overflow warnings for
8702 expressions like &p->x which can not wrap. */
8704 static bool
8705 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8707 double_int di_offset, total;
8709 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8710 return true;
8712 if (bitpos < 0)
8713 return true;
8715 if (offset == NULL_TREE)
8716 di_offset = double_int_zero;
8717 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8718 return true;
8719 else
8720 di_offset = TREE_INT_CST (offset);
8722 bool overflow;
8723 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8724 total = di_offset.add_with_sign (units, true, &overflow);
8725 if (overflow)
8726 return true;
8728 if (total.high != 0)
8729 return true;
8731 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8732 if (size <= 0)
8733 return true;
8735 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8736 array. */
8737 if (TREE_CODE (base) == ADDR_EXPR)
8739 HOST_WIDE_INT base_size;
8741 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8742 if (base_size > 0 && size < base_size)
8743 size = base_size;
8746 return total.low > (unsigned HOST_WIDE_INT) size;
8749 /* Subroutine of fold_binary. This routine performs all of the
8750 transformations that are common to the equality/inequality
8751 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8752 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8753 fold_binary should call fold_binary. Fold a comparison with
8754 tree code CODE and type TYPE with operands OP0 and OP1. Return
8755 the folded comparison or NULL_TREE. */
8757 static tree
8758 fold_comparison (location_t loc, enum tree_code code, tree type,
8759 tree op0, tree op1)
8761 tree arg0, arg1, tem;
8763 arg0 = op0;
8764 arg1 = op1;
8766 STRIP_SIGN_NOPS (arg0);
8767 STRIP_SIGN_NOPS (arg1);
8769 tem = fold_relational_const (code, type, arg0, arg1);
8770 if (tem != NULL_TREE)
8771 return tem;
8773 /* If one arg is a real or integer constant, put it last. */
8774 if (tree_swap_operands_p (arg0, arg1, true))
8775 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8777 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8778 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8779 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8780 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8781 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8782 && (TREE_CODE (arg1) == INTEGER_CST
8783 && !TREE_OVERFLOW (arg1)))
8785 tree const1 = TREE_OPERAND (arg0, 1);
8786 tree const2 = arg1;
8787 tree variable = TREE_OPERAND (arg0, 0);
8788 tree lhs;
8789 int lhs_add;
8790 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8792 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8793 TREE_TYPE (arg1), const2, const1);
8795 /* If the constant operation overflowed this can be
8796 simplified as a comparison against INT_MAX/INT_MIN. */
8797 if (TREE_CODE (lhs) == INTEGER_CST
8798 && TREE_OVERFLOW (lhs))
8800 int const1_sgn = tree_int_cst_sgn (const1);
8801 enum tree_code code2 = code;
8803 /* Get the sign of the constant on the lhs if the
8804 operation were VARIABLE + CONST1. */
8805 if (TREE_CODE (arg0) == MINUS_EXPR)
8806 const1_sgn = -const1_sgn;
8808 /* The sign of the constant determines if we overflowed
8809 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8810 Canonicalize to the INT_MIN overflow by swapping the comparison
8811 if necessary. */
8812 if (const1_sgn == -1)
8813 code2 = swap_tree_comparison (code);
8815 /* We now can look at the canonicalized case
8816 VARIABLE + 1 CODE2 INT_MIN
8817 and decide on the result. */
8818 if (code2 == LT_EXPR
8819 || code2 == LE_EXPR
8820 || code2 == EQ_EXPR)
8821 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8822 else if (code2 == NE_EXPR
8823 || code2 == GE_EXPR
8824 || code2 == GT_EXPR)
8825 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8828 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8829 && (TREE_CODE (lhs) != INTEGER_CST
8830 || !TREE_OVERFLOW (lhs)))
8832 if (code != EQ_EXPR && code != NE_EXPR)
8833 fold_overflow_warning ("assuming signed overflow does not occur "
8834 "when changing X +- C1 cmp C2 to "
8835 "X cmp C1 +- C2",
8836 WARN_STRICT_OVERFLOW_COMPARISON);
8837 return fold_build2_loc (loc, code, type, variable, lhs);
8841 /* For comparisons of pointers we can decompose it to a compile time
8842 comparison of the base objects and the offsets into the object.
8843 This requires at least one operand being an ADDR_EXPR or a
8844 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8845 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8846 && (TREE_CODE (arg0) == ADDR_EXPR
8847 || TREE_CODE (arg1) == ADDR_EXPR
8848 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8849 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8851 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8852 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8853 enum machine_mode mode;
8854 int volatilep, unsignedp;
8855 bool indirect_base0 = false, indirect_base1 = false;
8857 /* Get base and offset for the access. Strip ADDR_EXPR for
8858 get_inner_reference, but put it back by stripping INDIRECT_REF
8859 off the base object if possible. indirect_baseN will be true
8860 if baseN is not an address but refers to the object itself. */
8861 base0 = arg0;
8862 if (TREE_CODE (arg0) == ADDR_EXPR)
8864 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8865 &bitsize, &bitpos0, &offset0, &mode,
8866 &unsignedp, &volatilep, false);
8867 if (TREE_CODE (base0) == INDIRECT_REF)
8868 base0 = TREE_OPERAND (base0, 0);
8869 else
8870 indirect_base0 = true;
8872 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8874 base0 = TREE_OPERAND (arg0, 0);
8875 STRIP_SIGN_NOPS (base0);
8876 if (TREE_CODE (base0) == ADDR_EXPR)
8878 base0 = TREE_OPERAND (base0, 0);
8879 indirect_base0 = true;
8881 offset0 = TREE_OPERAND (arg0, 1);
8882 if (host_integerp (offset0, 0))
8884 HOST_WIDE_INT off = size_low_cst (offset0);
8885 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8886 * BITS_PER_UNIT)
8887 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8889 bitpos0 = off * BITS_PER_UNIT;
8890 offset0 = NULL_TREE;
8895 base1 = arg1;
8896 if (TREE_CODE (arg1) == ADDR_EXPR)
8898 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8899 &bitsize, &bitpos1, &offset1, &mode,
8900 &unsignedp, &volatilep, false);
8901 if (TREE_CODE (base1) == INDIRECT_REF)
8902 base1 = TREE_OPERAND (base1, 0);
8903 else
8904 indirect_base1 = true;
8906 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8908 base1 = TREE_OPERAND (arg1, 0);
8909 STRIP_SIGN_NOPS (base1);
8910 if (TREE_CODE (base1) == ADDR_EXPR)
8912 base1 = TREE_OPERAND (base1, 0);
8913 indirect_base1 = true;
8915 offset1 = TREE_OPERAND (arg1, 1);
8916 if (host_integerp (offset1, 0))
8918 HOST_WIDE_INT off = size_low_cst (offset1);
8919 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8920 * BITS_PER_UNIT)
8921 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8923 bitpos1 = off * BITS_PER_UNIT;
8924 offset1 = NULL_TREE;
8929 /* A local variable can never be pointed to by
8930 the default SSA name of an incoming parameter. */
8931 if ((TREE_CODE (arg0) == ADDR_EXPR
8932 && indirect_base0
8933 && TREE_CODE (base0) == VAR_DECL
8934 && auto_var_in_fn_p (base0, current_function_decl)
8935 && !indirect_base1
8936 && TREE_CODE (base1) == SSA_NAME
8937 && SSA_NAME_IS_DEFAULT_DEF (base1)
8938 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8939 || (TREE_CODE (arg1) == ADDR_EXPR
8940 && indirect_base1
8941 && TREE_CODE (base1) == VAR_DECL
8942 && auto_var_in_fn_p (base1, current_function_decl)
8943 && !indirect_base0
8944 && TREE_CODE (base0) == SSA_NAME
8945 && SSA_NAME_IS_DEFAULT_DEF (base0)
8946 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8948 if (code == NE_EXPR)
8949 return constant_boolean_node (1, type);
8950 else if (code == EQ_EXPR)
8951 return constant_boolean_node (0, type);
8953 /* If we have equivalent bases we might be able to simplify. */
8954 else if (indirect_base0 == indirect_base1
8955 && operand_equal_p (base0, base1, 0))
8957 /* We can fold this expression to a constant if the non-constant
8958 offset parts are equal. */
8959 if ((offset0 == offset1
8960 || (offset0 && offset1
8961 && operand_equal_p (offset0, offset1, 0)))
8962 && (code == EQ_EXPR
8963 || code == NE_EXPR
8964 || (indirect_base0 && DECL_P (base0))
8965 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8968 if (code != EQ_EXPR
8969 && code != NE_EXPR
8970 && bitpos0 != bitpos1
8971 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8972 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8973 fold_overflow_warning (("assuming pointer wraparound does not "
8974 "occur when comparing P +- C1 with "
8975 "P +- C2"),
8976 WARN_STRICT_OVERFLOW_CONDITIONAL);
8978 switch (code)
8980 case EQ_EXPR:
8981 return constant_boolean_node (bitpos0 == bitpos1, type);
8982 case NE_EXPR:
8983 return constant_boolean_node (bitpos0 != bitpos1, type);
8984 case LT_EXPR:
8985 return constant_boolean_node (bitpos0 < bitpos1, type);
8986 case LE_EXPR:
8987 return constant_boolean_node (bitpos0 <= bitpos1, type);
8988 case GE_EXPR:
8989 return constant_boolean_node (bitpos0 >= bitpos1, type);
8990 case GT_EXPR:
8991 return constant_boolean_node (bitpos0 > bitpos1, type);
8992 default:;
8995 /* We can simplify the comparison to a comparison of the variable
8996 offset parts if the constant offset parts are equal.
8997 Be careful to use signed size type here because otherwise we
8998 mess with array offsets in the wrong way. This is possible
8999 because pointer arithmetic is restricted to retain within an
9000 object and overflow on pointer differences is undefined as of
9001 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9002 else if (bitpos0 == bitpos1
9003 && ((code == EQ_EXPR || code == NE_EXPR)
9004 || (indirect_base0 && DECL_P (base0))
9005 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9007 /* By converting to signed size type we cover middle-end pointer
9008 arithmetic which operates on unsigned pointer types of size
9009 type size and ARRAY_REF offsets which are properly sign or
9010 zero extended from their type in case it is narrower than
9011 size type. */
9012 if (offset0 == NULL_TREE)
9013 offset0 = build_int_cst (ssizetype, 0);
9014 else
9015 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9016 if (offset1 == NULL_TREE)
9017 offset1 = build_int_cst (ssizetype, 0);
9018 else
9019 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9021 if (code != EQ_EXPR
9022 && code != NE_EXPR
9023 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9024 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9025 fold_overflow_warning (("assuming pointer wraparound does not "
9026 "occur when comparing P +- C1 with "
9027 "P +- C2"),
9028 WARN_STRICT_OVERFLOW_COMPARISON);
9030 return fold_build2_loc (loc, code, type, offset0, offset1);
9033 /* For non-equal bases we can simplify if they are addresses
9034 of local binding decls or constants. */
9035 else if (indirect_base0 && indirect_base1
9036 /* We know that !operand_equal_p (base0, base1, 0)
9037 because the if condition was false. But make
9038 sure two decls are not the same. */
9039 && base0 != base1
9040 && TREE_CODE (arg0) == ADDR_EXPR
9041 && TREE_CODE (arg1) == ADDR_EXPR
9042 && (((TREE_CODE (base0) == VAR_DECL
9043 || TREE_CODE (base0) == PARM_DECL)
9044 && (targetm.binds_local_p (base0)
9045 || CONSTANT_CLASS_P (base1)))
9046 || CONSTANT_CLASS_P (base0))
9047 && (((TREE_CODE (base1) == VAR_DECL
9048 || TREE_CODE (base1) == PARM_DECL)
9049 && (targetm.binds_local_p (base1)
9050 || CONSTANT_CLASS_P (base0)))
9051 || CONSTANT_CLASS_P (base1)))
9053 if (code == EQ_EXPR)
9054 return omit_two_operands_loc (loc, type, boolean_false_node,
9055 arg0, arg1);
9056 else if (code == NE_EXPR)
9057 return omit_two_operands_loc (loc, type, boolean_true_node,
9058 arg0, arg1);
9060 /* For equal offsets we can simplify to a comparison of the
9061 base addresses. */
9062 else if (bitpos0 == bitpos1
9063 && (indirect_base0
9064 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9065 && (indirect_base1
9066 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9067 && ((offset0 == offset1)
9068 || (offset0 && offset1
9069 && operand_equal_p (offset0, offset1, 0))))
9071 if (indirect_base0)
9072 base0 = build_fold_addr_expr_loc (loc, base0);
9073 if (indirect_base1)
9074 base1 = build_fold_addr_expr_loc (loc, base1);
9075 return fold_build2_loc (loc, code, type, base0, base1);
9079 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9080 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9081 the resulting offset is smaller in absolute value than the
9082 original one. */
9083 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9084 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9085 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9086 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9087 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9088 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9089 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9091 tree const1 = TREE_OPERAND (arg0, 1);
9092 tree const2 = TREE_OPERAND (arg1, 1);
9093 tree variable1 = TREE_OPERAND (arg0, 0);
9094 tree variable2 = TREE_OPERAND (arg1, 0);
9095 tree cst;
9096 const char * const warnmsg = G_("assuming signed overflow does not "
9097 "occur when combining constants around "
9098 "a comparison");
9100 /* Put the constant on the side where it doesn't overflow and is
9101 of lower absolute value than before. */
9102 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9103 ? MINUS_EXPR : PLUS_EXPR,
9104 const2, const1);
9105 if (!TREE_OVERFLOW (cst)
9106 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9108 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9109 return fold_build2_loc (loc, code, type,
9110 variable1,
9111 fold_build2_loc (loc,
9112 TREE_CODE (arg1), TREE_TYPE (arg1),
9113 variable2, cst));
9116 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9117 ? MINUS_EXPR : PLUS_EXPR,
9118 const1, const2);
9119 if (!TREE_OVERFLOW (cst)
9120 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9122 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9123 return fold_build2_loc (loc, code, type,
9124 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9125 variable1, cst),
9126 variable2);
9130 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9131 signed arithmetic case. That form is created by the compiler
9132 often enough for folding it to be of value. One example is in
9133 computing loop trip counts after Operator Strength Reduction. */
9134 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9135 && TREE_CODE (arg0) == MULT_EXPR
9136 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9137 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9138 && integer_zerop (arg1))
9140 tree const1 = TREE_OPERAND (arg0, 1);
9141 tree const2 = arg1; /* zero */
9142 tree variable1 = TREE_OPERAND (arg0, 0);
9143 enum tree_code cmp_code = code;
9145 /* Handle unfolded multiplication by zero. */
9146 if (integer_zerop (const1))
9147 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9149 fold_overflow_warning (("assuming signed overflow does not occur when "
9150 "eliminating multiplication in comparison "
9151 "with zero"),
9152 WARN_STRICT_OVERFLOW_COMPARISON);
9154 /* If const1 is negative we swap the sense of the comparison. */
9155 if (tree_int_cst_sgn (const1) < 0)
9156 cmp_code = swap_tree_comparison (cmp_code);
9158 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9161 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9162 if (tem)
9163 return tem;
9165 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9167 tree targ0 = strip_float_extensions (arg0);
9168 tree targ1 = strip_float_extensions (arg1);
9169 tree newtype = TREE_TYPE (targ0);
9171 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9172 newtype = TREE_TYPE (targ1);
9174 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9175 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9176 return fold_build2_loc (loc, code, type,
9177 fold_convert_loc (loc, newtype, targ0),
9178 fold_convert_loc (loc, newtype, targ1));
9180 /* (-a) CMP (-b) -> b CMP a */
9181 if (TREE_CODE (arg0) == NEGATE_EXPR
9182 && TREE_CODE (arg1) == NEGATE_EXPR)
9183 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9184 TREE_OPERAND (arg0, 0));
9186 if (TREE_CODE (arg1) == REAL_CST)
9188 REAL_VALUE_TYPE cst;
9189 cst = TREE_REAL_CST (arg1);
9191 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9192 if (TREE_CODE (arg0) == NEGATE_EXPR)
9193 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9194 TREE_OPERAND (arg0, 0),
9195 build_real (TREE_TYPE (arg1),
9196 real_value_negate (&cst)));
9198 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9199 /* a CMP (-0) -> a CMP 0 */
9200 if (REAL_VALUE_MINUS_ZERO (cst))
9201 return fold_build2_loc (loc, code, type, arg0,
9202 build_real (TREE_TYPE (arg1), dconst0));
9204 /* x != NaN is always true, other ops are always false. */
9205 if (REAL_VALUE_ISNAN (cst)
9206 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9208 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9209 return omit_one_operand_loc (loc, type, tem, arg0);
9212 /* Fold comparisons against infinity. */
9213 if (REAL_VALUE_ISINF (cst)
9214 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9216 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9217 if (tem != NULL_TREE)
9218 return tem;
9222 /* If this is a comparison of a real constant with a PLUS_EXPR
9223 or a MINUS_EXPR of a real constant, we can convert it into a
9224 comparison with a revised real constant as long as no overflow
9225 occurs when unsafe_math_optimizations are enabled. */
9226 if (flag_unsafe_math_optimizations
9227 && TREE_CODE (arg1) == REAL_CST
9228 && (TREE_CODE (arg0) == PLUS_EXPR
9229 || TREE_CODE (arg0) == MINUS_EXPR)
9230 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9231 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9232 ? MINUS_EXPR : PLUS_EXPR,
9233 arg1, TREE_OPERAND (arg0, 1)))
9234 && !TREE_OVERFLOW (tem))
9235 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9237 /* Likewise, we can simplify a comparison of a real constant with
9238 a MINUS_EXPR whose first operand is also a real constant, i.e.
9239 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9240 floating-point types only if -fassociative-math is set. */
9241 if (flag_associative_math
9242 && TREE_CODE (arg1) == REAL_CST
9243 && TREE_CODE (arg0) == MINUS_EXPR
9244 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9245 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9246 arg1))
9247 && !TREE_OVERFLOW (tem))
9248 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9249 TREE_OPERAND (arg0, 1), tem);
9251 /* Fold comparisons against built-in math functions. */
9252 if (TREE_CODE (arg1) == REAL_CST
9253 && flag_unsafe_math_optimizations
9254 && ! flag_errno_math)
9256 enum built_in_function fcode = builtin_mathfn_code (arg0);
9258 if (fcode != END_BUILTINS)
9260 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9261 if (tem != NULL_TREE)
9262 return tem;
9267 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9268 && CONVERT_EXPR_P (arg0))
9270 /* If we are widening one operand of an integer comparison,
9271 see if the other operand is similarly being widened. Perhaps we
9272 can do the comparison in the narrower type. */
9273 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9274 if (tem)
9275 return tem;
9277 /* Or if we are changing signedness. */
9278 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9279 if (tem)
9280 return tem;
9283 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9284 constant, we can simplify it. */
9285 if (TREE_CODE (arg1) == INTEGER_CST
9286 && (TREE_CODE (arg0) == MIN_EXPR
9287 || TREE_CODE (arg0) == MAX_EXPR)
9288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9290 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9291 if (tem)
9292 return tem;
9295 /* Simplify comparison of something with itself. (For IEEE
9296 floating-point, we can only do some of these simplifications.) */
9297 if (operand_equal_p (arg0, arg1, 0))
9299 switch (code)
9301 case EQ_EXPR:
9302 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9303 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9304 return constant_boolean_node (1, type);
9305 break;
9307 case GE_EXPR:
9308 case LE_EXPR:
9309 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9310 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9311 return constant_boolean_node (1, type);
9312 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9314 case NE_EXPR:
9315 /* For NE, we can only do this simplification if integer
9316 or we don't honor IEEE floating point NaNs. */
9317 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9318 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9319 break;
9320 /* ... fall through ... */
9321 case GT_EXPR:
9322 case LT_EXPR:
9323 return constant_boolean_node (0, type);
9324 default:
9325 gcc_unreachable ();
9329 /* If we are comparing an expression that just has comparisons
9330 of two integer values, arithmetic expressions of those comparisons,
9331 and constants, we can simplify it. There are only three cases
9332 to check: the two values can either be equal, the first can be
9333 greater, or the second can be greater. Fold the expression for
9334 those three values. Since each value must be 0 or 1, we have
9335 eight possibilities, each of which corresponds to the constant 0
9336 or 1 or one of the six possible comparisons.
9338 This handles common cases like (a > b) == 0 but also handles
9339 expressions like ((x > y) - (y > x)) > 0, which supposedly
9340 occur in macroized code. */
9342 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9344 tree cval1 = 0, cval2 = 0;
9345 int save_p = 0;
9347 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9348 /* Don't handle degenerate cases here; they should already
9349 have been handled anyway. */
9350 && cval1 != 0 && cval2 != 0
9351 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9352 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9353 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9354 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9355 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9356 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9357 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9359 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9360 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9362 /* We can't just pass T to eval_subst in case cval1 or cval2
9363 was the same as ARG1. */
9365 tree high_result
9366 = fold_build2_loc (loc, code, type,
9367 eval_subst (loc, arg0, cval1, maxval,
9368 cval2, minval),
9369 arg1);
9370 tree equal_result
9371 = fold_build2_loc (loc, code, type,
9372 eval_subst (loc, arg0, cval1, maxval,
9373 cval2, maxval),
9374 arg1);
9375 tree low_result
9376 = fold_build2_loc (loc, code, type,
9377 eval_subst (loc, arg0, cval1, minval,
9378 cval2, maxval),
9379 arg1);
9381 /* All three of these results should be 0 or 1. Confirm they are.
9382 Then use those values to select the proper code to use. */
9384 if (TREE_CODE (high_result) == INTEGER_CST
9385 && TREE_CODE (equal_result) == INTEGER_CST
9386 && TREE_CODE (low_result) == INTEGER_CST)
9388 /* Make a 3-bit mask with the high-order bit being the
9389 value for `>', the next for '=', and the low for '<'. */
9390 switch ((integer_onep (high_result) * 4)
9391 + (integer_onep (equal_result) * 2)
9392 + integer_onep (low_result))
9394 case 0:
9395 /* Always false. */
9396 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9397 case 1:
9398 code = LT_EXPR;
9399 break;
9400 case 2:
9401 code = EQ_EXPR;
9402 break;
9403 case 3:
9404 code = LE_EXPR;
9405 break;
9406 case 4:
9407 code = GT_EXPR;
9408 break;
9409 case 5:
9410 code = NE_EXPR;
9411 break;
9412 case 6:
9413 code = GE_EXPR;
9414 break;
9415 case 7:
9416 /* Always true. */
9417 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9420 if (save_p)
9422 tem = save_expr (build2 (code, type, cval1, cval2));
9423 SET_EXPR_LOCATION (tem, loc);
9424 return tem;
9426 return fold_build2_loc (loc, code, type, cval1, cval2);
9431 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9432 into a single range test. */
9433 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9434 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9435 && TREE_CODE (arg1) == INTEGER_CST
9436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9437 && !integer_zerop (TREE_OPERAND (arg0, 1))
9438 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9439 && !TREE_OVERFLOW (arg1))
9441 tem = fold_div_compare (loc, code, type, arg0, arg1);
9442 if (tem != NULL_TREE)
9443 return tem;
9446 /* Fold ~X op ~Y as Y op X. */
9447 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9448 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9450 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9451 return fold_build2_loc (loc, code, type,
9452 fold_convert_loc (loc, cmp_type,
9453 TREE_OPERAND (arg1, 0)),
9454 TREE_OPERAND (arg0, 0));
9457 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9458 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9459 && TREE_CODE (arg1) == INTEGER_CST)
9461 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9462 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9463 TREE_OPERAND (arg0, 0),
9464 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9465 fold_convert_loc (loc, cmp_type, arg1)));
9468 return NULL_TREE;
9472 /* Subroutine of fold_binary. Optimize complex multiplications of the
9473 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9474 argument EXPR represents the expression "z" of type TYPE. */
9476 static tree
9477 fold_mult_zconjz (location_t loc, tree type, tree expr)
9479 tree itype = TREE_TYPE (type);
9480 tree rpart, ipart, tem;
9482 if (TREE_CODE (expr) == COMPLEX_EXPR)
9484 rpart = TREE_OPERAND (expr, 0);
9485 ipart = TREE_OPERAND (expr, 1);
9487 else if (TREE_CODE (expr) == COMPLEX_CST)
9489 rpart = TREE_REALPART (expr);
9490 ipart = TREE_IMAGPART (expr);
9492 else
9494 expr = save_expr (expr);
9495 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9496 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9499 rpart = save_expr (rpart);
9500 ipart = save_expr (ipart);
9501 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9502 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9503 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9504 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9505 build_zero_cst (itype));
9509 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9510 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9511 guarantees that P and N have the same least significant log2(M) bits.
9512 N is not otherwise constrained. In particular, N is not normalized to
9513 0 <= N < M as is common. In general, the precise value of P is unknown.
9514 M is chosen as large as possible such that constant N can be determined.
9516 Returns M and sets *RESIDUE to N.
9518 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9519 account. This is not always possible due to PR 35705.
9522 static unsigned HOST_WIDE_INT
9523 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9524 bool allow_func_align)
9526 enum tree_code code;
9528 *residue = 0;
9530 code = TREE_CODE (expr);
9531 if (code == ADDR_EXPR)
9533 unsigned int bitalign;
9534 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9535 *residue /= BITS_PER_UNIT;
9536 return bitalign / BITS_PER_UNIT;
9538 else if (code == POINTER_PLUS_EXPR)
9540 tree op0, op1;
9541 unsigned HOST_WIDE_INT modulus;
9542 enum tree_code inner_code;
9544 op0 = TREE_OPERAND (expr, 0);
9545 STRIP_NOPS (op0);
9546 modulus = get_pointer_modulus_and_residue (op0, residue,
9547 allow_func_align);
9549 op1 = TREE_OPERAND (expr, 1);
9550 STRIP_NOPS (op1);
9551 inner_code = TREE_CODE (op1);
9552 if (inner_code == INTEGER_CST)
9554 *residue += TREE_INT_CST_LOW (op1);
9555 return modulus;
9557 else if (inner_code == MULT_EXPR)
9559 op1 = TREE_OPERAND (op1, 1);
9560 if (TREE_CODE (op1) == INTEGER_CST)
9562 unsigned HOST_WIDE_INT align;
9564 /* Compute the greatest power-of-2 divisor of op1. */
9565 align = TREE_INT_CST_LOW (op1);
9566 align &= -align;
9568 /* If align is non-zero and less than *modulus, replace
9569 *modulus with align., If align is 0, then either op1 is 0
9570 or the greatest power-of-2 divisor of op1 doesn't fit in an
9571 unsigned HOST_WIDE_INT. In either case, no additional
9572 constraint is imposed. */
9573 if (align)
9574 modulus = MIN (modulus, align);
9576 return modulus;
9581 /* If we get here, we were unable to determine anything useful about the
9582 expression. */
9583 return 1;
9586 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9587 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9589 static bool
9590 vec_cst_ctor_to_array (tree arg, tree *elts)
9592 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9594 if (TREE_CODE (arg) == VECTOR_CST)
9596 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9597 elts[i] = VECTOR_CST_ELT (arg, i);
9599 else if (TREE_CODE (arg) == CONSTRUCTOR)
9601 constructor_elt *elt;
9603 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9604 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9605 return false;
9606 else
9607 elts[i] = elt->value;
9609 else
9610 return false;
9611 for (; i < nelts; i++)
9612 elts[i]
9613 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9614 return true;
9617 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9618 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9619 NULL_TREE otherwise. */
9621 static tree
9622 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9624 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9625 tree *elts;
9626 bool need_ctor = false;
9628 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9629 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9630 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9631 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9632 return NULL_TREE;
9634 elts = XALLOCAVEC (tree, nelts * 3);
9635 if (!vec_cst_ctor_to_array (arg0, elts)
9636 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9637 return NULL_TREE;
9639 for (i = 0; i < nelts; i++)
9641 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9642 need_ctor = true;
9643 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9646 if (need_ctor)
9648 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9649 for (i = 0; i < nelts; i++)
9650 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9651 return build_constructor (type, v);
9653 else
9654 return build_vector (type, &elts[2 * nelts]);
9657 /* Try to fold a pointer difference of type TYPE two address expressions of
9658 array references AREF0 and AREF1 using location LOC. Return a
9659 simplified expression for the difference or NULL_TREE. */
9661 static tree
9662 fold_addr_of_array_ref_difference (location_t loc, tree type,
9663 tree aref0, tree aref1)
9665 tree base0 = TREE_OPERAND (aref0, 0);
9666 tree base1 = TREE_OPERAND (aref1, 0);
9667 tree base_offset = build_int_cst (type, 0);
9669 /* If the bases are array references as well, recurse. If the bases
9670 are pointer indirections compute the difference of the pointers.
9671 If the bases are equal, we are set. */
9672 if ((TREE_CODE (base0) == ARRAY_REF
9673 && TREE_CODE (base1) == ARRAY_REF
9674 && (base_offset
9675 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9676 || (INDIRECT_REF_P (base0)
9677 && INDIRECT_REF_P (base1)
9678 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9679 TREE_OPERAND (base0, 0),
9680 TREE_OPERAND (base1, 0))))
9681 || operand_equal_p (base0, base1, 0))
9683 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9684 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9685 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9686 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9687 return fold_build2_loc (loc, PLUS_EXPR, type,
9688 base_offset,
9689 fold_build2_loc (loc, MULT_EXPR, type,
9690 diff, esz));
9692 return NULL_TREE;
9695 /* If the real or vector real constant CST of type TYPE has an exact
9696 inverse, return it, else return NULL. */
9698 static tree
9699 exact_inverse (tree type, tree cst)
9701 REAL_VALUE_TYPE r;
9702 tree unit_type, *elts;
9703 enum machine_mode mode;
9704 unsigned vec_nelts, i;
9706 switch (TREE_CODE (cst))
9708 case REAL_CST:
9709 r = TREE_REAL_CST (cst);
9711 if (exact_real_inverse (TYPE_MODE (type), &r))
9712 return build_real (type, r);
9714 return NULL_TREE;
9716 case VECTOR_CST:
9717 vec_nelts = VECTOR_CST_NELTS (cst);
9718 elts = XALLOCAVEC (tree, vec_nelts);
9719 unit_type = TREE_TYPE (type);
9720 mode = TYPE_MODE (unit_type);
9722 for (i = 0; i < vec_nelts; i++)
9724 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9725 if (!exact_real_inverse (mode, &r))
9726 return NULL_TREE;
9727 elts[i] = build_real (unit_type, r);
9730 return build_vector (type, elts);
9732 default:
9733 return NULL_TREE;
9737 /* Fold a binary expression of code CODE and type TYPE with operands
9738 OP0 and OP1. LOC is the location of the resulting expression.
9739 Return the folded expression if folding is successful. Otherwise,
9740 return NULL_TREE. */
9742 tree
9743 fold_binary_loc (location_t loc,
9744 enum tree_code code, tree type, tree op0, tree op1)
9746 enum tree_code_class kind = TREE_CODE_CLASS (code);
9747 tree arg0, arg1, tem;
9748 tree t1 = NULL_TREE;
9749 bool strict_overflow_p;
9751 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9752 && TREE_CODE_LENGTH (code) == 2
9753 && op0 != NULL_TREE
9754 && op1 != NULL_TREE);
9756 arg0 = op0;
9757 arg1 = op1;
9759 /* Strip any conversions that don't change the mode. This is
9760 safe for every expression, except for a comparison expression
9761 because its signedness is derived from its operands. So, in
9762 the latter case, only strip conversions that don't change the
9763 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9764 preserved.
9766 Note that this is done as an internal manipulation within the
9767 constant folder, in order to find the simplest representation
9768 of the arguments so that their form can be studied. In any
9769 cases, the appropriate type conversions should be put back in
9770 the tree that will get out of the constant folder. */
9772 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9774 STRIP_SIGN_NOPS (arg0);
9775 STRIP_SIGN_NOPS (arg1);
9777 else
9779 STRIP_NOPS (arg0);
9780 STRIP_NOPS (arg1);
9783 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9784 constant but we can't do arithmetic on them. */
9785 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9786 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9787 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9788 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9789 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9790 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9792 if (kind == tcc_binary)
9794 /* Make sure type and arg0 have the same saturating flag. */
9795 gcc_assert (TYPE_SATURATING (type)
9796 == TYPE_SATURATING (TREE_TYPE (arg0)));
9797 tem = const_binop (code, arg0, arg1);
9799 else if (kind == tcc_comparison)
9800 tem = fold_relational_const (code, type, arg0, arg1);
9801 else
9802 tem = NULL_TREE;
9804 if (tem != NULL_TREE)
9806 if (TREE_TYPE (tem) != type)
9807 tem = fold_convert_loc (loc, type, tem);
9808 return tem;
9812 /* If this is a commutative operation, and ARG0 is a constant, move it
9813 to ARG1 to reduce the number of tests below. */
9814 if (commutative_tree_code (code)
9815 && tree_swap_operands_p (arg0, arg1, true))
9816 return fold_build2_loc (loc, code, type, op1, op0);
9818 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9820 First check for cases where an arithmetic operation is applied to a
9821 compound, conditional, or comparison operation. Push the arithmetic
9822 operation inside the compound or conditional to see if any folding
9823 can then be done. Convert comparison to conditional for this purpose.
9824 The also optimizes non-constant cases that used to be done in
9825 expand_expr.
9827 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9828 one of the operands is a comparison and the other is a comparison, a
9829 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9830 code below would make the expression more complex. Change it to a
9831 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9832 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9834 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9835 || code == EQ_EXPR || code == NE_EXPR)
9836 && TREE_CODE (type) != VECTOR_TYPE
9837 && ((truth_value_p (TREE_CODE (arg0))
9838 && (truth_value_p (TREE_CODE (arg1))
9839 || (TREE_CODE (arg1) == BIT_AND_EXPR
9840 && integer_onep (TREE_OPERAND (arg1, 1)))))
9841 || (truth_value_p (TREE_CODE (arg1))
9842 && (truth_value_p (TREE_CODE (arg0))
9843 || (TREE_CODE (arg0) == BIT_AND_EXPR
9844 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9846 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9847 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9848 : TRUTH_XOR_EXPR,
9849 boolean_type_node,
9850 fold_convert_loc (loc, boolean_type_node, arg0),
9851 fold_convert_loc (loc, boolean_type_node, arg1));
9853 if (code == EQ_EXPR)
9854 tem = invert_truthvalue_loc (loc, tem);
9856 return fold_convert_loc (loc, type, tem);
9859 if (TREE_CODE_CLASS (code) == tcc_binary
9860 || TREE_CODE_CLASS (code) == tcc_comparison)
9862 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9864 tem = fold_build2_loc (loc, code, type,
9865 fold_convert_loc (loc, TREE_TYPE (op0),
9866 TREE_OPERAND (arg0, 1)), op1);
9867 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9868 tem);
9870 if (TREE_CODE (arg1) == COMPOUND_EXPR
9871 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9873 tem = fold_build2_loc (loc, code, type, op0,
9874 fold_convert_loc (loc, TREE_TYPE (op1),
9875 TREE_OPERAND (arg1, 1)));
9876 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9877 tem);
9880 if (TREE_CODE (arg0) == COND_EXPR
9881 || TREE_CODE (arg0) == VEC_COND_EXPR
9882 || COMPARISON_CLASS_P (arg0))
9884 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9885 arg0, arg1,
9886 /*cond_first_p=*/1);
9887 if (tem != NULL_TREE)
9888 return tem;
9891 if (TREE_CODE (arg1) == COND_EXPR
9892 || TREE_CODE (arg1) == VEC_COND_EXPR
9893 || COMPARISON_CLASS_P (arg1))
9895 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9896 arg1, arg0,
9897 /*cond_first_p=*/0);
9898 if (tem != NULL_TREE)
9899 return tem;
9903 switch (code)
9905 case MEM_REF:
9906 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9907 if (TREE_CODE (arg0) == ADDR_EXPR
9908 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9910 tree iref = TREE_OPERAND (arg0, 0);
9911 return fold_build2 (MEM_REF, type,
9912 TREE_OPERAND (iref, 0),
9913 int_const_binop (PLUS_EXPR, arg1,
9914 TREE_OPERAND (iref, 1)));
9917 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9918 if (TREE_CODE (arg0) == ADDR_EXPR
9919 && handled_component_p (TREE_OPERAND (arg0, 0)))
9921 tree base;
9922 HOST_WIDE_INT coffset;
9923 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9924 &coffset);
9925 if (!base)
9926 return NULL_TREE;
9927 return fold_build2 (MEM_REF, type,
9928 build_fold_addr_expr (base),
9929 int_const_binop (PLUS_EXPR, arg1,
9930 size_int (coffset)));
9933 return NULL_TREE;
9935 case POINTER_PLUS_EXPR:
9936 /* 0 +p index -> (type)index */
9937 if (integer_zerop (arg0))
9938 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9940 /* PTR +p 0 -> PTR */
9941 if (integer_zerop (arg1))
9942 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9944 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9945 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9946 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9947 return fold_convert_loc (loc, type,
9948 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9949 fold_convert_loc (loc, sizetype,
9950 arg1),
9951 fold_convert_loc (loc, sizetype,
9952 arg0)));
9954 /* (PTR +p B) +p A -> PTR +p (B + A) */
9955 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9957 tree inner;
9958 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9959 tree arg00 = TREE_OPERAND (arg0, 0);
9960 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9961 arg01, fold_convert_loc (loc, sizetype, arg1));
9962 return fold_convert_loc (loc, type,
9963 fold_build_pointer_plus_loc (loc,
9964 arg00, inner));
9967 /* PTR_CST +p CST -> CST1 */
9968 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9969 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9970 fold_convert_loc (loc, type, arg1));
9972 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9973 of the array. Loop optimizer sometimes produce this type of
9974 expressions. */
9975 if (TREE_CODE (arg0) == ADDR_EXPR)
9977 tem = try_move_mult_to_index (loc, arg0,
9978 fold_convert_loc (loc,
9979 ssizetype, arg1));
9980 if (tem)
9981 return fold_convert_loc (loc, type, tem);
9984 return NULL_TREE;
9986 case PLUS_EXPR:
9987 /* A + (-B) -> A - B */
9988 if (TREE_CODE (arg1) == NEGATE_EXPR)
9989 return fold_build2_loc (loc, MINUS_EXPR, type,
9990 fold_convert_loc (loc, type, arg0),
9991 fold_convert_loc (loc, type,
9992 TREE_OPERAND (arg1, 0)));
9993 /* (-A) + B -> B - A */
9994 if (TREE_CODE (arg0) == NEGATE_EXPR
9995 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9996 return fold_build2_loc (loc, MINUS_EXPR, type,
9997 fold_convert_loc (loc, type, arg1),
9998 fold_convert_loc (loc, type,
9999 TREE_OPERAND (arg0, 0)));
10001 if (INTEGRAL_TYPE_P (type))
10003 /* Convert ~A + 1 to -A. */
10004 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10005 && integer_onep (arg1))
10006 return fold_build1_loc (loc, NEGATE_EXPR, type,
10007 fold_convert_loc (loc, type,
10008 TREE_OPERAND (arg0, 0)));
10010 /* ~X + X is -1. */
10011 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10012 && !TYPE_OVERFLOW_TRAPS (type))
10014 tree tem = TREE_OPERAND (arg0, 0);
10016 STRIP_NOPS (tem);
10017 if (operand_equal_p (tem, arg1, 0))
10019 t1 = build_int_cst_type (type, -1);
10020 return omit_one_operand_loc (loc, type, t1, arg1);
10024 /* X + ~X is -1. */
10025 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10026 && !TYPE_OVERFLOW_TRAPS (type))
10028 tree tem = TREE_OPERAND (arg1, 0);
10030 STRIP_NOPS (tem);
10031 if (operand_equal_p (arg0, tem, 0))
10033 t1 = build_int_cst_type (type, -1);
10034 return omit_one_operand_loc (loc, type, t1, arg0);
10038 /* X + (X / CST) * -CST is X % CST. */
10039 if (TREE_CODE (arg1) == MULT_EXPR
10040 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10041 && operand_equal_p (arg0,
10042 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10044 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10045 tree cst1 = TREE_OPERAND (arg1, 1);
10046 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10047 cst1, cst0);
10048 if (sum && integer_zerop (sum))
10049 return fold_convert_loc (loc, type,
10050 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10051 TREE_TYPE (arg0), arg0,
10052 cst0));
10056 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10057 one. Make sure the type is not saturating and has the signedness of
10058 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10059 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10060 if ((TREE_CODE (arg0) == MULT_EXPR
10061 || TREE_CODE (arg1) == MULT_EXPR)
10062 && !TYPE_SATURATING (type)
10063 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10064 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10065 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10067 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10068 if (tem)
10069 return tem;
10072 if (! FLOAT_TYPE_P (type))
10074 if (integer_zerop (arg1))
10075 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10077 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10078 with a constant, and the two constants have no bits in common,
10079 we should treat this as a BIT_IOR_EXPR since this may produce more
10080 simplifications. */
10081 if (TREE_CODE (arg0) == BIT_AND_EXPR
10082 && TREE_CODE (arg1) == BIT_AND_EXPR
10083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10084 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10085 && integer_zerop (const_binop (BIT_AND_EXPR,
10086 TREE_OPERAND (arg0, 1),
10087 TREE_OPERAND (arg1, 1))))
10089 code = BIT_IOR_EXPR;
10090 goto bit_ior;
10093 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10094 (plus (plus (mult) (mult)) (foo)) so that we can
10095 take advantage of the factoring cases below. */
10096 if (TYPE_OVERFLOW_WRAPS (type)
10097 && (((TREE_CODE (arg0) == PLUS_EXPR
10098 || TREE_CODE (arg0) == MINUS_EXPR)
10099 && TREE_CODE (arg1) == MULT_EXPR)
10100 || ((TREE_CODE (arg1) == PLUS_EXPR
10101 || TREE_CODE (arg1) == MINUS_EXPR)
10102 && TREE_CODE (arg0) == MULT_EXPR)))
10104 tree parg0, parg1, parg, marg;
10105 enum tree_code pcode;
10107 if (TREE_CODE (arg1) == MULT_EXPR)
10108 parg = arg0, marg = arg1;
10109 else
10110 parg = arg1, marg = arg0;
10111 pcode = TREE_CODE (parg);
10112 parg0 = TREE_OPERAND (parg, 0);
10113 parg1 = TREE_OPERAND (parg, 1);
10114 STRIP_NOPS (parg0);
10115 STRIP_NOPS (parg1);
10117 if (TREE_CODE (parg0) == MULT_EXPR
10118 && TREE_CODE (parg1) != MULT_EXPR)
10119 return fold_build2_loc (loc, pcode, type,
10120 fold_build2_loc (loc, PLUS_EXPR, type,
10121 fold_convert_loc (loc, type,
10122 parg0),
10123 fold_convert_loc (loc, type,
10124 marg)),
10125 fold_convert_loc (loc, type, parg1));
10126 if (TREE_CODE (parg0) != MULT_EXPR
10127 && TREE_CODE (parg1) == MULT_EXPR)
10128 return
10129 fold_build2_loc (loc, PLUS_EXPR, type,
10130 fold_convert_loc (loc, type, parg0),
10131 fold_build2_loc (loc, pcode, type,
10132 fold_convert_loc (loc, type, marg),
10133 fold_convert_loc (loc, type,
10134 parg1)));
10137 else
10139 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10140 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10141 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10143 /* Likewise if the operands are reversed. */
10144 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10147 /* Convert X + -C into X - C. */
10148 if (TREE_CODE (arg1) == REAL_CST
10149 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10151 tem = fold_negate_const (arg1, type);
10152 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10153 return fold_build2_loc (loc, MINUS_EXPR, type,
10154 fold_convert_loc (loc, type, arg0),
10155 fold_convert_loc (loc, type, tem));
10158 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10159 to __complex__ ( x, y ). This is not the same for SNaNs or
10160 if signed zeros are involved. */
10161 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10163 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10165 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10166 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10167 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10168 bool arg0rz = false, arg0iz = false;
10169 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10170 || (arg0i && (arg0iz = real_zerop (arg0i))))
10172 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10173 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10174 if (arg0rz && arg1i && real_zerop (arg1i))
10176 tree rp = arg1r ? arg1r
10177 : build1 (REALPART_EXPR, rtype, arg1);
10178 tree ip = arg0i ? arg0i
10179 : build1 (IMAGPART_EXPR, rtype, arg0);
10180 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10182 else if (arg0iz && arg1r && real_zerop (arg1r))
10184 tree rp = arg0r ? arg0r
10185 : build1 (REALPART_EXPR, rtype, arg0);
10186 tree ip = arg1i ? arg1i
10187 : build1 (IMAGPART_EXPR, rtype, arg1);
10188 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10193 if (flag_unsafe_math_optimizations
10194 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10195 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10196 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10197 return tem;
10199 /* Convert x+x into x*2.0. */
10200 if (operand_equal_p (arg0, arg1, 0)
10201 && SCALAR_FLOAT_TYPE_P (type))
10202 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10203 build_real (type, dconst2));
10205 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10206 We associate floats only if the user has specified
10207 -fassociative-math. */
10208 if (flag_associative_math
10209 && TREE_CODE (arg1) == PLUS_EXPR
10210 && TREE_CODE (arg0) != MULT_EXPR)
10212 tree tree10 = TREE_OPERAND (arg1, 0);
10213 tree tree11 = TREE_OPERAND (arg1, 1);
10214 if (TREE_CODE (tree11) == MULT_EXPR
10215 && TREE_CODE (tree10) == MULT_EXPR)
10217 tree tree0;
10218 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10219 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10222 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10223 We associate floats only if the user has specified
10224 -fassociative-math. */
10225 if (flag_associative_math
10226 && TREE_CODE (arg0) == PLUS_EXPR
10227 && TREE_CODE (arg1) != MULT_EXPR)
10229 tree tree00 = TREE_OPERAND (arg0, 0);
10230 tree tree01 = TREE_OPERAND (arg0, 1);
10231 if (TREE_CODE (tree01) == MULT_EXPR
10232 && TREE_CODE (tree00) == MULT_EXPR)
10234 tree tree0;
10235 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10236 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10241 bit_rotate:
10242 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10243 is a rotate of A by C1 bits. */
10244 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10245 is a rotate of A by B bits. */
10247 enum tree_code code0, code1;
10248 tree rtype;
10249 code0 = TREE_CODE (arg0);
10250 code1 = TREE_CODE (arg1);
10251 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10252 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10253 && operand_equal_p (TREE_OPERAND (arg0, 0),
10254 TREE_OPERAND (arg1, 0), 0)
10255 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10256 TYPE_UNSIGNED (rtype))
10257 /* Only create rotates in complete modes. Other cases are not
10258 expanded properly. */
10259 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10261 tree tree01, tree11;
10262 enum tree_code code01, code11;
10264 tree01 = TREE_OPERAND (arg0, 1);
10265 tree11 = TREE_OPERAND (arg1, 1);
10266 STRIP_NOPS (tree01);
10267 STRIP_NOPS (tree11);
10268 code01 = TREE_CODE (tree01);
10269 code11 = TREE_CODE (tree11);
10270 if (code01 == INTEGER_CST
10271 && code11 == INTEGER_CST
10272 && TREE_INT_CST_HIGH (tree01) == 0
10273 && TREE_INT_CST_HIGH (tree11) == 0
10274 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10275 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10277 tem = build2_loc (loc, LROTATE_EXPR,
10278 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10279 TREE_OPERAND (arg0, 0),
10280 code0 == LSHIFT_EXPR ? tree01 : tree11);
10281 return fold_convert_loc (loc, type, tem);
10283 else if (code11 == MINUS_EXPR)
10285 tree tree110, tree111;
10286 tree110 = TREE_OPERAND (tree11, 0);
10287 tree111 = TREE_OPERAND (tree11, 1);
10288 STRIP_NOPS (tree110);
10289 STRIP_NOPS (tree111);
10290 if (TREE_CODE (tree110) == INTEGER_CST
10291 && 0 == compare_tree_int (tree110,
10292 TYPE_PRECISION
10293 (TREE_TYPE (TREE_OPERAND
10294 (arg0, 0))))
10295 && operand_equal_p (tree01, tree111, 0))
10296 return
10297 fold_convert_loc (loc, type,
10298 build2 ((code0 == LSHIFT_EXPR
10299 ? LROTATE_EXPR
10300 : RROTATE_EXPR),
10301 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10302 TREE_OPERAND (arg0, 0), tree01));
10304 else if (code01 == MINUS_EXPR)
10306 tree tree010, tree011;
10307 tree010 = TREE_OPERAND (tree01, 0);
10308 tree011 = TREE_OPERAND (tree01, 1);
10309 STRIP_NOPS (tree010);
10310 STRIP_NOPS (tree011);
10311 if (TREE_CODE (tree010) == INTEGER_CST
10312 && 0 == compare_tree_int (tree010,
10313 TYPE_PRECISION
10314 (TREE_TYPE (TREE_OPERAND
10315 (arg0, 0))))
10316 && operand_equal_p (tree11, tree011, 0))
10317 return fold_convert_loc
10318 (loc, type,
10319 build2 ((code0 != LSHIFT_EXPR
10320 ? LROTATE_EXPR
10321 : RROTATE_EXPR),
10322 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10323 TREE_OPERAND (arg0, 0), tree11));
10328 associate:
10329 /* In most languages, can't associate operations on floats through
10330 parentheses. Rather than remember where the parentheses were, we
10331 don't associate floats at all, unless the user has specified
10332 -fassociative-math.
10333 And, we need to make sure type is not saturating. */
10335 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10336 && !TYPE_SATURATING (type))
10338 tree var0, con0, lit0, minus_lit0;
10339 tree var1, con1, lit1, minus_lit1;
10340 bool ok = true;
10342 /* Split both trees into variables, constants, and literals. Then
10343 associate each group together, the constants with literals,
10344 then the result with variables. This increases the chances of
10345 literals being recombined later and of generating relocatable
10346 expressions for the sum of a constant and literal. */
10347 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10348 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10349 code == MINUS_EXPR);
10351 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10352 if (code == MINUS_EXPR)
10353 code = PLUS_EXPR;
10355 /* With undefined overflow we can only associate constants with one
10356 variable, and constants whose association doesn't overflow. */
10357 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10358 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10360 if (var0 && var1)
10362 tree tmp0 = var0;
10363 tree tmp1 = var1;
10365 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10366 tmp0 = TREE_OPERAND (tmp0, 0);
10367 if (CONVERT_EXPR_P (tmp0)
10368 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10370 <= TYPE_PRECISION (type)))
10371 tmp0 = TREE_OPERAND (tmp0, 0);
10372 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10373 tmp1 = TREE_OPERAND (tmp1, 0);
10374 if (CONVERT_EXPR_P (tmp1)
10375 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10376 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10377 <= TYPE_PRECISION (type)))
10378 tmp1 = TREE_OPERAND (tmp1, 0);
10379 /* The only case we can still associate with two variables
10380 is if they are the same, modulo negation and bit-pattern
10381 preserving conversions. */
10382 if (!operand_equal_p (tmp0, tmp1, 0))
10383 ok = false;
10386 if (ok && lit0 && lit1)
10388 tree tmp0 = fold_convert (type, lit0);
10389 tree tmp1 = fold_convert (type, lit1);
10391 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10392 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10393 ok = false;
10397 /* Only do something if we found more than two objects. Otherwise,
10398 nothing has changed and we risk infinite recursion. */
10399 if (ok
10400 && (2 < ((var0 != 0) + (var1 != 0)
10401 + (con0 != 0) + (con1 != 0)
10402 + (lit0 != 0) + (lit1 != 0)
10403 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10405 var0 = associate_trees (loc, var0, var1, code, type);
10406 con0 = associate_trees (loc, con0, con1, code, type);
10407 lit0 = associate_trees (loc, lit0, lit1, code, type);
10408 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10410 /* Preserve the MINUS_EXPR if the negative part of the literal is
10411 greater than the positive part. Otherwise, the multiplicative
10412 folding code (i.e extract_muldiv) may be fooled in case
10413 unsigned constants are subtracted, like in the following
10414 example: ((X*2 + 4) - 8U)/2. */
10415 if (minus_lit0 && lit0)
10417 if (TREE_CODE (lit0) == INTEGER_CST
10418 && TREE_CODE (minus_lit0) == INTEGER_CST
10419 && tree_int_cst_lt (lit0, minus_lit0))
10421 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10422 MINUS_EXPR, type);
10423 lit0 = 0;
10425 else
10427 lit0 = associate_trees (loc, lit0, minus_lit0,
10428 MINUS_EXPR, type);
10429 minus_lit0 = 0;
10432 if (minus_lit0)
10434 if (con0 == 0)
10435 return
10436 fold_convert_loc (loc, type,
10437 associate_trees (loc, var0, minus_lit0,
10438 MINUS_EXPR, type));
10439 else
10441 con0 = associate_trees (loc, con0, minus_lit0,
10442 MINUS_EXPR, type);
10443 return
10444 fold_convert_loc (loc, type,
10445 associate_trees (loc, var0, con0,
10446 PLUS_EXPR, type));
10450 con0 = associate_trees (loc, con0, lit0, code, type);
10451 return
10452 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10453 code, type));
10457 return NULL_TREE;
10459 case MINUS_EXPR:
10460 /* Pointer simplifications for subtraction, simple reassociations. */
10461 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10463 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10464 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10465 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10467 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10468 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10469 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10470 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10471 return fold_build2_loc (loc, PLUS_EXPR, type,
10472 fold_build2_loc (loc, MINUS_EXPR, type,
10473 arg00, arg10),
10474 fold_build2_loc (loc, MINUS_EXPR, type,
10475 arg01, arg11));
10477 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10478 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10480 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10481 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10482 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10483 fold_convert_loc (loc, type, arg1));
10484 if (tmp)
10485 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10488 /* A - (-B) -> A + B */
10489 if (TREE_CODE (arg1) == NEGATE_EXPR)
10490 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10491 fold_convert_loc (loc, type,
10492 TREE_OPERAND (arg1, 0)));
10493 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10494 if (TREE_CODE (arg0) == NEGATE_EXPR
10495 && (FLOAT_TYPE_P (type)
10496 || INTEGRAL_TYPE_P (type))
10497 && negate_expr_p (arg1)
10498 && reorder_operands_p (arg0, arg1))
10499 return fold_build2_loc (loc, MINUS_EXPR, type,
10500 fold_convert_loc (loc, type,
10501 negate_expr (arg1)),
10502 fold_convert_loc (loc, type,
10503 TREE_OPERAND (arg0, 0)));
10504 /* Convert -A - 1 to ~A. */
10505 if (INTEGRAL_TYPE_P (type)
10506 && TREE_CODE (arg0) == NEGATE_EXPR
10507 && integer_onep (arg1)
10508 && !TYPE_OVERFLOW_TRAPS (type))
10509 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10510 fold_convert_loc (loc, type,
10511 TREE_OPERAND (arg0, 0)));
10513 /* Convert -1 - A to ~A. */
10514 if (INTEGRAL_TYPE_P (type)
10515 && integer_all_onesp (arg0))
10516 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10519 /* X - (X / CST) * CST is X % CST. */
10520 if (INTEGRAL_TYPE_P (type)
10521 && TREE_CODE (arg1) == MULT_EXPR
10522 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10523 && operand_equal_p (arg0,
10524 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10525 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10526 TREE_OPERAND (arg1, 1), 0))
10527 return
10528 fold_convert_loc (loc, type,
10529 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10530 arg0, TREE_OPERAND (arg1, 1)));
10532 if (! FLOAT_TYPE_P (type))
10534 if (integer_zerop (arg0))
10535 return negate_expr (fold_convert_loc (loc, type, arg1));
10536 if (integer_zerop (arg1))
10537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10539 /* Fold A - (A & B) into ~B & A. */
10540 if (!TREE_SIDE_EFFECTS (arg0)
10541 && TREE_CODE (arg1) == BIT_AND_EXPR)
10543 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10545 tree arg10 = fold_convert_loc (loc, type,
10546 TREE_OPERAND (arg1, 0));
10547 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10548 fold_build1_loc (loc, BIT_NOT_EXPR,
10549 type, arg10),
10550 fold_convert_loc (loc, type, arg0));
10552 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10554 tree arg11 = fold_convert_loc (loc,
10555 type, TREE_OPERAND (arg1, 1));
10556 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10557 fold_build1_loc (loc, BIT_NOT_EXPR,
10558 type, arg11),
10559 fold_convert_loc (loc, type, arg0));
10563 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10564 any power of 2 minus 1. */
10565 if (TREE_CODE (arg0) == BIT_AND_EXPR
10566 && TREE_CODE (arg1) == BIT_AND_EXPR
10567 && operand_equal_p (TREE_OPERAND (arg0, 0),
10568 TREE_OPERAND (arg1, 0), 0))
10570 tree mask0 = TREE_OPERAND (arg0, 1);
10571 tree mask1 = TREE_OPERAND (arg1, 1);
10572 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10574 if (operand_equal_p (tem, mask1, 0))
10576 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10577 TREE_OPERAND (arg0, 0), mask1);
10578 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10583 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10584 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10587 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10588 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10589 (-ARG1 + ARG0) reduces to -ARG1. */
10590 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10591 return negate_expr (fold_convert_loc (loc, type, arg1));
10593 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10594 __complex__ ( x, -y ). This is not the same for SNaNs or if
10595 signed zeros are involved. */
10596 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10597 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10598 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10600 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10601 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10602 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10603 bool arg0rz = false, arg0iz = false;
10604 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10605 || (arg0i && (arg0iz = real_zerop (arg0i))))
10607 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10608 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10609 if (arg0rz && arg1i && real_zerop (arg1i))
10611 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10612 arg1r ? arg1r
10613 : build1 (REALPART_EXPR, rtype, arg1));
10614 tree ip = arg0i ? arg0i
10615 : build1 (IMAGPART_EXPR, rtype, arg0);
10616 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10618 else if (arg0iz && arg1r && real_zerop (arg1r))
10620 tree rp = arg0r ? arg0r
10621 : build1 (REALPART_EXPR, rtype, arg0);
10622 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10623 arg1i ? arg1i
10624 : build1 (IMAGPART_EXPR, rtype, arg1));
10625 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10630 /* Fold &x - &x. This can happen from &x.foo - &x.
10631 This is unsafe for certain floats even in non-IEEE formats.
10632 In IEEE, it is unsafe because it does wrong for NaNs.
10633 Also note that operand_equal_p is always false if an operand
10634 is volatile. */
10636 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10637 && operand_equal_p (arg0, arg1, 0))
10638 return build_zero_cst (type);
10640 /* A - B -> A + (-B) if B is easily negatable. */
10641 if (negate_expr_p (arg1)
10642 && ((FLOAT_TYPE_P (type)
10643 /* Avoid this transformation if B is a positive REAL_CST. */
10644 && (TREE_CODE (arg1) != REAL_CST
10645 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10646 || INTEGRAL_TYPE_P (type)))
10647 return fold_build2_loc (loc, PLUS_EXPR, type,
10648 fold_convert_loc (loc, type, arg0),
10649 fold_convert_loc (loc, type,
10650 negate_expr (arg1)));
10652 /* Try folding difference of addresses. */
10654 HOST_WIDE_INT diff;
10656 if ((TREE_CODE (arg0) == ADDR_EXPR
10657 || TREE_CODE (arg1) == ADDR_EXPR)
10658 && ptr_difference_const (arg0, arg1, &diff))
10659 return build_int_cst_type (type, diff);
10662 /* Fold &a[i] - &a[j] to i-j. */
10663 if (TREE_CODE (arg0) == ADDR_EXPR
10664 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10665 && TREE_CODE (arg1) == ADDR_EXPR
10666 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10668 tree tem = fold_addr_of_array_ref_difference (loc, type,
10669 TREE_OPERAND (arg0, 0),
10670 TREE_OPERAND (arg1, 0));
10671 if (tem)
10672 return tem;
10675 if (FLOAT_TYPE_P (type)
10676 && flag_unsafe_math_optimizations
10677 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10678 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10679 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10680 return tem;
10682 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10683 one. Make sure the type is not saturating and has the signedness of
10684 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10685 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10686 if ((TREE_CODE (arg0) == MULT_EXPR
10687 || TREE_CODE (arg1) == MULT_EXPR)
10688 && !TYPE_SATURATING (type)
10689 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10690 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10691 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10693 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10694 if (tem)
10695 return tem;
10698 goto associate;
10700 case MULT_EXPR:
10701 /* (-A) * (-B) -> A * B */
10702 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10703 return fold_build2_loc (loc, MULT_EXPR, type,
10704 fold_convert_loc (loc, type,
10705 TREE_OPERAND (arg0, 0)),
10706 fold_convert_loc (loc, type,
10707 negate_expr (arg1)));
10708 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10709 return fold_build2_loc (loc, MULT_EXPR, type,
10710 fold_convert_loc (loc, type,
10711 negate_expr (arg0)),
10712 fold_convert_loc (loc, type,
10713 TREE_OPERAND (arg1, 0)));
10715 if (! FLOAT_TYPE_P (type))
10717 if (integer_zerop (arg1))
10718 return omit_one_operand_loc (loc, type, arg1, arg0);
10719 if (integer_onep (arg1))
10720 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10721 /* Transform x * -1 into -x. Make sure to do the negation
10722 on the original operand with conversions not stripped
10723 because we can only strip non-sign-changing conversions. */
10724 if (integer_all_onesp (arg1))
10725 return fold_convert_loc (loc, type, negate_expr (op0));
10726 /* Transform x * -C into -x * C if x is easily negatable. */
10727 if (TREE_CODE (arg1) == INTEGER_CST
10728 && tree_int_cst_sgn (arg1) == -1
10729 && negate_expr_p (arg0)
10730 && (tem = negate_expr (arg1)) != arg1
10731 && !TREE_OVERFLOW (tem))
10732 return fold_build2_loc (loc, MULT_EXPR, type,
10733 fold_convert_loc (loc, type,
10734 negate_expr (arg0)),
10735 tem);
10737 /* (a * (1 << b)) is (a << b) */
10738 if (TREE_CODE (arg1) == LSHIFT_EXPR
10739 && integer_onep (TREE_OPERAND (arg1, 0)))
10740 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10741 TREE_OPERAND (arg1, 1));
10742 if (TREE_CODE (arg0) == LSHIFT_EXPR
10743 && integer_onep (TREE_OPERAND (arg0, 0)))
10744 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10745 TREE_OPERAND (arg0, 1));
10747 /* (A + A) * C -> A * 2 * C */
10748 if (TREE_CODE (arg0) == PLUS_EXPR
10749 && TREE_CODE (arg1) == INTEGER_CST
10750 && operand_equal_p (TREE_OPERAND (arg0, 0),
10751 TREE_OPERAND (arg0, 1), 0))
10752 return fold_build2_loc (loc, MULT_EXPR, type,
10753 omit_one_operand_loc (loc, type,
10754 TREE_OPERAND (arg0, 0),
10755 TREE_OPERAND (arg0, 1)),
10756 fold_build2_loc (loc, MULT_EXPR, type,
10757 build_int_cst (type, 2) , arg1));
10759 strict_overflow_p = false;
10760 if (TREE_CODE (arg1) == INTEGER_CST
10761 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10762 &strict_overflow_p)))
10764 if (strict_overflow_p)
10765 fold_overflow_warning (("assuming signed overflow does not "
10766 "occur when simplifying "
10767 "multiplication"),
10768 WARN_STRICT_OVERFLOW_MISC);
10769 return fold_convert_loc (loc, type, tem);
10772 /* Optimize z * conj(z) for integer complex numbers. */
10773 if (TREE_CODE (arg0) == CONJ_EXPR
10774 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10775 return fold_mult_zconjz (loc, type, arg1);
10776 if (TREE_CODE (arg1) == CONJ_EXPR
10777 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10778 return fold_mult_zconjz (loc, type, arg0);
10780 else
10782 /* Maybe fold x * 0 to 0. The expressions aren't the same
10783 when x is NaN, since x * 0 is also NaN. Nor are they the
10784 same in modes with signed zeros, since multiplying a
10785 negative value by 0 gives -0, not +0. */
10786 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10787 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10788 && real_zerop (arg1))
10789 return omit_one_operand_loc (loc, type, arg1, arg0);
10790 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10791 Likewise for complex arithmetic with signed zeros. */
10792 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10793 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10794 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10795 && real_onep (arg1))
10796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10798 /* Transform x * -1.0 into -x. */
10799 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10800 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10801 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10802 && real_minus_onep (arg1))
10803 return fold_convert_loc (loc, type, negate_expr (arg0));
10805 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10806 the result for floating point types due to rounding so it is applied
10807 only if -fassociative-math was specify. */
10808 if (flag_associative_math
10809 && TREE_CODE (arg0) == RDIV_EXPR
10810 && TREE_CODE (arg1) == REAL_CST
10811 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10813 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10814 arg1);
10815 if (tem)
10816 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10817 TREE_OPERAND (arg0, 1));
10820 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10821 if (operand_equal_p (arg0, arg1, 0))
10823 tree tem = fold_strip_sign_ops (arg0);
10824 if (tem != NULL_TREE)
10826 tem = fold_convert_loc (loc, type, tem);
10827 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10831 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10832 This is not the same for NaNs or if signed zeros are
10833 involved. */
10834 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10835 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10836 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10837 && TREE_CODE (arg1) == COMPLEX_CST
10838 && real_zerop (TREE_REALPART (arg1)))
10840 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10841 if (real_onep (TREE_IMAGPART (arg1)))
10842 return
10843 fold_build2_loc (loc, COMPLEX_EXPR, type,
10844 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10845 rtype, arg0)),
10846 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10847 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10848 return
10849 fold_build2_loc (loc, COMPLEX_EXPR, type,
10850 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10851 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10852 rtype, arg0)));
10855 /* Optimize z * conj(z) for floating point complex numbers.
10856 Guarded by flag_unsafe_math_optimizations as non-finite
10857 imaginary components don't produce scalar results. */
10858 if (flag_unsafe_math_optimizations
10859 && TREE_CODE (arg0) == CONJ_EXPR
10860 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10861 return fold_mult_zconjz (loc, type, arg1);
10862 if (flag_unsafe_math_optimizations
10863 && TREE_CODE (arg1) == CONJ_EXPR
10864 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10865 return fold_mult_zconjz (loc, type, arg0);
10867 if (flag_unsafe_math_optimizations)
10869 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10870 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10872 /* Optimizations of root(...)*root(...). */
10873 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10875 tree rootfn, arg;
10876 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10877 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10879 /* Optimize sqrt(x)*sqrt(x) as x. */
10880 if (BUILTIN_SQRT_P (fcode0)
10881 && operand_equal_p (arg00, arg10, 0)
10882 && ! HONOR_SNANS (TYPE_MODE (type)))
10883 return arg00;
10885 /* Optimize root(x)*root(y) as root(x*y). */
10886 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10887 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10888 return build_call_expr_loc (loc, rootfn, 1, arg);
10891 /* Optimize expN(x)*expN(y) as expN(x+y). */
10892 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10894 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10895 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10896 CALL_EXPR_ARG (arg0, 0),
10897 CALL_EXPR_ARG (arg1, 0));
10898 return build_call_expr_loc (loc, expfn, 1, arg);
10901 /* Optimizations of pow(...)*pow(...). */
10902 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10903 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10904 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10906 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10907 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10908 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10909 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10911 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10912 if (operand_equal_p (arg01, arg11, 0))
10914 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10915 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10916 arg00, arg10);
10917 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10920 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10921 if (operand_equal_p (arg00, arg10, 0))
10923 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10924 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10925 arg01, arg11);
10926 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10930 /* Optimize tan(x)*cos(x) as sin(x). */
10931 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10932 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10933 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10934 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10935 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10936 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10937 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10938 CALL_EXPR_ARG (arg1, 0), 0))
10940 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10942 if (sinfn != NULL_TREE)
10943 return build_call_expr_loc (loc, sinfn, 1,
10944 CALL_EXPR_ARG (arg0, 0));
10947 /* Optimize x*pow(x,c) as pow(x,c+1). */
10948 if (fcode1 == BUILT_IN_POW
10949 || fcode1 == BUILT_IN_POWF
10950 || fcode1 == BUILT_IN_POWL)
10952 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10953 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10954 if (TREE_CODE (arg11) == REAL_CST
10955 && !TREE_OVERFLOW (arg11)
10956 && operand_equal_p (arg0, arg10, 0))
10958 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10959 REAL_VALUE_TYPE c;
10960 tree arg;
10962 c = TREE_REAL_CST (arg11);
10963 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10964 arg = build_real (type, c);
10965 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10969 /* Optimize pow(x,c)*x as pow(x,c+1). */
10970 if (fcode0 == BUILT_IN_POW
10971 || fcode0 == BUILT_IN_POWF
10972 || fcode0 == BUILT_IN_POWL)
10974 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10975 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10976 if (TREE_CODE (arg01) == REAL_CST
10977 && !TREE_OVERFLOW (arg01)
10978 && operand_equal_p (arg1, arg00, 0))
10980 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10981 REAL_VALUE_TYPE c;
10982 tree arg;
10984 c = TREE_REAL_CST (arg01);
10985 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10986 arg = build_real (type, c);
10987 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10991 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10992 if (!in_gimple_form
10993 && optimize
10994 && operand_equal_p (arg0, arg1, 0))
10996 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10998 if (powfn)
11000 tree arg = build_real (type, dconst2);
11001 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11006 goto associate;
11008 case BIT_IOR_EXPR:
11009 bit_ior:
11010 if (integer_all_onesp (arg1))
11011 return omit_one_operand_loc (loc, type, arg1, arg0);
11012 if (integer_zerop (arg1))
11013 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11014 if (operand_equal_p (arg0, arg1, 0))
11015 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11017 /* ~X | X is -1. */
11018 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11021 t1 = build_zero_cst (type);
11022 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11023 return omit_one_operand_loc (loc, type, t1, arg1);
11026 /* X | ~X is -1. */
11027 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11028 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11030 t1 = build_zero_cst (type);
11031 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11032 return omit_one_operand_loc (loc, type, t1, arg0);
11035 /* Canonicalize (X & C1) | C2. */
11036 if (TREE_CODE (arg0) == BIT_AND_EXPR
11037 && TREE_CODE (arg1) == INTEGER_CST
11038 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11040 double_int c1, c2, c3, msk;
11041 int width = TYPE_PRECISION (type), w;
11042 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11043 c2 = tree_to_double_int (arg1);
11045 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11046 if ((c1 & c2) == c1)
11047 return omit_one_operand_loc (loc, type, arg1,
11048 TREE_OPERAND (arg0, 0));
11050 msk = double_int::mask (width);
11052 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11053 if (msk.and_not (c1 | c2).is_zero ())
11054 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11055 TREE_OPERAND (arg0, 0), arg1);
11057 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11058 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11059 mode which allows further optimizations. */
11060 c1 &= msk;
11061 c2 &= msk;
11062 c3 = c1.and_not (c2);
11063 for (w = BITS_PER_UNIT;
11064 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11065 w <<= 1)
11067 unsigned HOST_WIDE_INT mask
11068 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11069 if (((c1.low | c2.low) & mask) == mask
11070 && (c1.low & ~mask) == 0 && c1.high == 0)
11072 c3 = double_int::from_uhwi (mask);
11073 break;
11076 if (c3 != c1)
11077 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11078 fold_build2_loc (loc, BIT_AND_EXPR, type,
11079 TREE_OPERAND (arg0, 0),
11080 double_int_to_tree (type,
11081 c3)),
11082 arg1);
11085 /* (X & Y) | Y is (X, Y). */
11086 if (TREE_CODE (arg0) == BIT_AND_EXPR
11087 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11088 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11089 /* (X & Y) | X is (Y, X). */
11090 if (TREE_CODE (arg0) == BIT_AND_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11092 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11093 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11094 /* X | (X & Y) is (Y, X). */
11095 if (TREE_CODE (arg1) == BIT_AND_EXPR
11096 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11097 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11098 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11099 /* X | (Y & X) is (Y, X). */
11100 if (TREE_CODE (arg1) == BIT_AND_EXPR
11101 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11102 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11103 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11105 /* (X & ~Y) | (~X & Y) is X ^ Y */
11106 if (TREE_CODE (arg0) == BIT_AND_EXPR
11107 && TREE_CODE (arg1) == BIT_AND_EXPR)
11109 tree a0, a1, l0, l1, n0, n1;
11111 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11112 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11114 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11115 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11117 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11118 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11120 if ((operand_equal_p (n0, a0, 0)
11121 && operand_equal_p (n1, a1, 0))
11122 || (operand_equal_p (n0, a1, 0)
11123 && operand_equal_p (n1, a0, 0)))
11124 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11127 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11128 if (t1 != NULL_TREE)
11129 return t1;
11131 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11133 This results in more efficient code for machines without a NAND
11134 instruction. Combine will canonicalize to the first form
11135 which will allow use of NAND instructions provided by the
11136 backend if they exist. */
11137 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11138 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11140 return
11141 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11142 build2 (BIT_AND_EXPR, type,
11143 fold_convert_loc (loc, type,
11144 TREE_OPERAND (arg0, 0)),
11145 fold_convert_loc (loc, type,
11146 TREE_OPERAND (arg1, 0))));
11149 /* See if this can be simplified into a rotate first. If that
11150 is unsuccessful continue in the association code. */
11151 goto bit_rotate;
11153 case BIT_XOR_EXPR:
11154 if (integer_zerop (arg1))
11155 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11156 if (integer_all_onesp (arg1))
11157 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11158 if (operand_equal_p (arg0, arg1, 0))
11159 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11161 /* ~X ^ X is -1. */
11162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11165 t1 = build_zero_cst (type);
11166 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11167 return omit_one_operand_loc (loc, type, t1, arg1);
11170 /* X ^ ~X is -1. */
11171 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11174 t1 = build_zero_cst (type);
11175 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11176 return omit_one_operand_loc (loc, type, t1, arg0);
11179 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11180 with a constant, and the two constants have no bits in common,
11181 we should treat this as a BIT_IOR_EXPR since this may produce more
11182 simplifications. */
11183 if (TREE_CODE (arg0) == BIT_AND_EXPR
11184 && TREE_CODE (arg1) == BIT_AND_EXPR
11185 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11186 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11187 && integer_zerop (const_binop (BIT_AND_EXPR,
11188 TREE_OPERAND (arg0, 1),
11189 TREE_OPERAND (arg1, 1))))
11191 code = BIT_IOR_EXPR;
11192 goto bit_ior;
11195 /* (X | Y) ^ X -> Y & ~ X*/
11196 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11199 tree t2 = TREE_OPERAND (arg0, 1);
11200 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11201 arg1);
11202 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11203 fold_convert_loc (loc, type, t2),
11204 fold_convert_loc (loc, type, t1));
11205 return t1;
11208 /* (Y | X) ^ X -> Y & ~ X*/
11209 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11210 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11212 tree t2 = TREE_OPERAND (arg0, 0);
11213 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11214 arg1);
11215 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11216 fold_convert_loc (loc, type, t2),
11217 fold_convert_loc (loc, type, t1));
11218 return t1;
11221 /* X ^ (X | Y) -> Y & ~ X*/
11222 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11223 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11225 tree t2 = TREE_OPERAND (arg1, 1);
11226 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11227 arg0);
11228 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11229 fold_convert_loc (loc, type, t2),
11230 fold_convert_loc (loc, type, t1));
11231 return t1;
11234 /* X ^ (Y | X) -> Y & ~ X*/
11235 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11236 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11238 tree t2 = TREE_OPERAND (arg1, 0);
11239 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11240 arg0);
11241 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11242 fold_convert_loc (loc, type, t2),
11243 fold_convert_loc (loc, type, t1));
11244 return t1;
11247 /* Convert ~X ^ ~Y to X ^ Y. */
11248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11249 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11250 return fold_build2_loc (loc, code, type,
11251 fold_convert_loc (loc, type,
11252 TREE_OPERAND (arg0, 0)),
11253 fold_convert_loc (loc, type,
11254 TREE_OPERAND (arg1, 0)));
11256 /* Convert ~X ^ C to X ^ ~C. */
11257 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11258 && TREE_CODE (arg1) == INTEGER_CST)
11259 return fold_build2_loc (loc, code, type,
11260 fold_convert_loc (loc, type,
11261 TREE_OPERAND (arg0, 0)),
11262 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11264 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11265 if (TREE_CODE (arg0) == BIT_AND_EXPR
11266 && integer_onep (TREE_OPERAND (arg0, 1))
11267 && integer_onep (arg1))
11268 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11269 build_zero_cst (TREE_TYPE (arg0)));
11271 /* Fold (X & Y) ^ Y as ~X & Y. */
11272 if (TREE_CODE (arg0) == BIT_AND_EXPR
11273 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11275 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11276 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11277 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11278 fold_convert_loc (loc, type, arg1));
11280 /* Fold (X & Y) ^ X as ~Y & X. */
11281 if (TREE_CODE (arg0) == BIT_AND_EXPR
11282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11283 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11285 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11286 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11287 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11288 fold_convert_loc (loc, type, arg1));
11290 /* Fold X ^ (X & Y) as X & ~Y. */
11291 if (TREE_CODE (arg1) == BIT_AND_EXPR
11292 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11294 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11295 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11296 fold_convert_loc (loc, type, arg0),
11297 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11299 /* Fold X ^ (Y & X) as ~Y & X. */
11300 if (TREE_CODE (arg1) == BIT_AND_EXPR
11301 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11302 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11304 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11305 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11306 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11307 fold_convert_loc (loc, type, arg0));
11310 /* See if this can be simplified into a rotate first. If that
11311 is unsuccessful continue in the association code. */
11312 goto bit_rotate;
11314 case BIT_AND_EXPR:
11315 if (integer_all_onesp (arg1))
11316 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11317 if (integer_zerop (arg1))
11318 return omit_one_operand_loc (loc, type, arg1, arg0);
11319 if (operand_equal_p (arg0, arg1, 0))
11320 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11322 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11323 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11324 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11325 || (TREE_CODE (arg0) == EQ_EXPR
11326 && integer_zerop (TREE_OPERAND (arg0, 1))))
11327 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11328 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11330 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11331 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11332 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11333 || (TREE_CODE (arg1) == EQ_EXPR
11334 && integer_zerop (TREE_OPERAND (arg1, 1))))
11335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11336 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11338 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11339 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11340 && TREE_CODE (arg1) == INTEGER_CST
11341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11343 tree tmp1 = fold_convert_loc (loc, type, arg1);
11344 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11345 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11346 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11347 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11348 return
11349 fold_convert_loc (loc, type,
11350 fold_build2_loc (loc, BIT_IOR_EXPR,
11351 type, tmp2, tmp3));
11354 /* (X | Y) & Y is (X, Y). */
11355 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11356 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11357 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11358 /* (X | Y) & X is (Y, X). */
11359 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11360 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11361 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11362 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11363 /* X & (X | Y) is (Y, X). */
11364 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11365 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11366 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11367 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11368 /* X & (Y | X) is (Y, X). */
11369 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11370 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11371 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11372 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11374 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11375 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11376 && integer_onep (TREE_OPERAND (arg0, 1))
11377 && integer_onep (arg1))
11379 tree tem2;
11380 tem = TREE_OPERAND (arg0, 0);
11381 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11382 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11383 tem, tem2);
11384 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11385 build_zero_cst (TREE_TYPE (tem)));
11387 /* Fold ~X & 1 as (X & 1) == 0. */
11388 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11389 && integer_onep (arg1))
11391 tree tem2;
11392 tem = TREE_OPERAND (arg0, 0);
11393 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11394 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11395 tem, tem2);
11396 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11397 build_zero_cst (TREE_TYPE (tem)));
11399 /* Fold !X & 1 as X == 0. */
11400 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11401 && integer_onep (arg1))
11403 tem = TREE_OPERAND (arg0, 0);
11404 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11405 build_zero_cst (TREE_TYPE (tem)));
11408 /* Fold (X ^ Y) & Y as ~X & Y. */
11409 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11410 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11412 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11413 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11414 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11415 fold_convert_loc (loc, type, arg1));
11417 /* Fold (X ^ Y) & X as ~Y & X. */
11418 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11420 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11422 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11423 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11424 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11425 fold_convert_loc (loc, type, arg1));
11427 /* Fold X & (X ^ Y) as X & ~Y. */
11428 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11431 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11432 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11433 fold_convert_loc (loc, type, arg0),
11434 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11436 /* Fold X & (Y ^ X) as ~Y & X. */
11437 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11438 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11439 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11441 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11442 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11443 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11444 fold_convert_loc (loc, type, arg0));
11447 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11448 multiple of 1 << CST. */
11449 if (TREE_CODE (arg1) == INTEGER_CST)
11451 double_int cst1 = tree_to_double_int (arg1);
11452 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11453 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11454 if ((cst1 & ncst1) == ncst1
11455 && multiple_of_p (type, arg0,
11456 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11457 return fold_convert_loc (loc, type, arg0);
11460 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11461 bits from CST2. */
11462 if (TREE_CODE (arg1) == INTEGER_CST
11463 && TREE_CODE (arg0) == MULT_EXPR
11464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11466 int arg1tz
11467 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11468 if (arg1tz > 0)
11470 double_int arg1mask, masked;
11471 arg1mask = ~double_int::mask (arg1tz);
11472 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11473 TYPE_UNSIGNED (type));
11474 masked = arg1mask & tree_to_double_int (arg1);
11475 if (masked.is_zero ())
11476 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11477 arg0, arg1);
11478 else if (masked != tree_to_double_int (arg1))
11479 return fold_build2_loc (loc, code, type, op0,
11480 double_int_to_tree (type, masked));
11484 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11485 ((A & N) + B) & M -> (A + B) & M
11486 Similarly if (N & M) == 0,
11487 ((A | N) + B) & M -> (A + B) & M
11488 and for - instead of + (or unary - instead of +)
11489 and/or ^ instead of |.
11490 If B is constant and (B & M) == 0, fold into A & M. */
11491 if (host_integerp (arg1, 1))
11493 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11494 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11495 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11496 && (TREE_CODE (arg0) == PLUS_EXPR
11497 || TREE_CODE (arg0) == MINUS_EXPR
11498 || TREE_CODE (arg0) == NEGATE_EXPR)
11499 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11500 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11502 tree pmop[2];
11503 int which = 0;
11504 unsigned HOST_WIDE_INT cst0;
11506 /* Now we know that arg0 is (C + D) or (C - D) or
11507 -C and arg1 (M) is == (1LL << cst) - 1.
11508 Store C into PMOP[0] and D into PMOP[1]. */
11509 pmop[0] = TREE_OPERAND (arg0, 0);
11510 pmop[1] = NULL;
11511 if (TREE_CODE (arg0) != NEGATE_EXPR)
11513 pmop[1] = TREE_OPERAND (arg0, 1);
11514 which = 1;
11517 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11518 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11519 & cst1) != cst1)
11520 which = -1;
11522 for (; which >= 0; which--)
11523 switch (TREE_CODE (pmop[which]))
11525 case BIT_AND_EXPR:
11526 case BIT_IOR_EXPR:
11527 case BIT_XOR_EXPR:
11528 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11529 != INTEGER_CST)
11530 break;
11531 /* tree_low_cst not used, because we don't care about
11532 the upper bits. */
11533 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11534 cst0 &= cst1;
11535 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11537 if (cst0 != cst1)
11538 break;
11540 else if (cst0 != 0)
11541 break;
11542 /* If C or D is of the form (A & N) where
11543 (N & M) == M, or of the form (A | N) or
11544 (A ^ N) where (N & M) == 0, replace it with A. */
11545 pmop[which] = TREE_OPERAND (pmop[which], 0);
11546 break;
11547 case INTEGER_CST:
11548 /* If C or D is a N where (N & M) == 0, it can be
11549 omitted (assumed 0). */
11550 if ((TREE_CODE (arg0) == PLUS_EXPR
11551 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11552 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11553 pmop[which] = NULL;
11554 break;
11555 default:
11556 break;
11559 /* Only build anything new if we optimized one or both arguments
11560 above. */
11561 if (pmop[0] != TREE_OPERAND (arg0, 0)
11562 || (TREE_CODE (arg0) != NEGATE_EXPR
11563 && pmop[1] != TREE_OPERAND (arg0, 1)))
11565 tree utype = TREE_TYPE (arg0);
11566 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11568 /* Perform the operations in a type that has defined
11569 overflow behavior. */
11570 utype = unsigned_type_for (TREE_TYPE (arg0));
11571 if (pmop[0] != NULL)
11572 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11573 if (pmop[1] != NULL)
11574 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11577 if (TREE_CODE (arg0) == NEGATE_EXPR)
11578 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11579 else if (TREE_CODE (arg0) == PLUS_EXPR)
11581 if (pmop[0] != NULL && pmop[1] != NULL)
11582 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11583 pmop[0], pmop[1]);
11584 else if (pmop[0] != NULL)
11585 tem = pmop[0];
11586 else if (pmop[1] != NULL)
11587 tem = pmop[1];
11588 else
11589 return build_int_cst (type, 0);
11591 else if (pmop[0] == NULL)
11592 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11593 else
11594 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11595 pmop[0], pmop[1]);
11596 /* TEM is now the new binary +, - or unary - replacement. */
11597 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11598 fold_convert_loc (loc, utype, arg1));
11599 return fold_convert_loc (loc, type, tem);
11604 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11605 if (t1 != NULL_TREE)
11606 return t1;
11607 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11608 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11609 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11611 unsigned int prec
11612 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11614 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11615 && (~TREE_INT_CST_LOW (arg1)
11616 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11617 return
11618 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11621 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11623 This results in more efficient code for machines without a NOR
11624 instruction. Combine will canonicalize to the first form
11625 which will allow use of NOR instructions provided by the
11626 backend if they exist. */
11627 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11628 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11630 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11631 build2 (BIT_IOR_EXPR, type,
11632 fold_convert_loc (loc, type,
11633 TREE_OPERAND (arg0, 0)),
11634 fold_convert_loc (loc, type,
11635 TREE_OPERAND (arg1, 0))));
11638 /* If arg0 is derived from the address of an object or function, we may
11639 be able to fold this expression using the object or function's
11640 alignment. */
11641 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11643 unsigned HOST_WIDE_INT modulus, residue;
11644 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11646 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11647 integer_onep (arg1));
11649 /* This works because modulus is a power of 2. If this weren't the
11650 case, we'd have to replace it by its greatest power-of-2
11651 divisor: modulus & -modulus. */
11652 if (low < modulus)
11653 return build_int_cst (type, residue & low);
11656 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11657 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11658 if the new mask might be further optimized. */
11659 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11660 || TREE_CODE (arg0) == RSHIFT_EXPR)
11661 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11662 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11663 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11664 < TYPE_PRECISION (TREE_TYPE (arg0))
11665 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11666 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11668 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11669 unsigned HOST_WIDE_INT mask
11670 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11671 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11672 tree shift_type = TREE_TYPE (arg0);
11674 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11675 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11676 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11677 && TYPE_PRECISION (TREE_TYPE (arg0))
11678 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11680 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11681 tree arg00 = TREE_OPERAND (arg0, 0);
11682 /* See if more bits can be proven as zero because of
11683 zero extension. */
11684 if (TREE_CODE (arg00) == NOP_EXPR
11685 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11687 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11688 if (TYPE_PRECISION (inner_type)
11689 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11690 && TYPE_PRECISION (inner_type) < prec)
11692 prec = TYPE_PRECISION (inner_type);
11693 /* See if we can shorten the right shift. */
11694 if (shiftc < prec)
11695 shift_type = inner_type;
11698 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11699 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11700 zerobits <<= prec - shiftc;
11701 /* For arithmetic shift if sign bit could be set, zerobits
11702 can contain actually sign bits, so no transformation is
11703 possible, unless MASK masks them all away. In that
11704 case the shift needs to be converted into logical shift. */
11705 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11706 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11708 if ((mask & zerobits) == 0)
11709 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11710 else
11711 zerobits = 0;
11715 /* ((X << 16) & 0xff00) is (X, 0). */
11716 if ((mask & zerobits) == mask)
11717 return omit_one_operand_loc (loc, type,
11718 build_int_cst (type, 0), arg0);
11720 newmask = mask | zerobits;
11721 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11723 unsigned int prec;
11725 /* Only do the transformation if NEWMASK is some integer
11726 mode's mask. */
11727 for (prec = BITS_PER_UNIT;
11728 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11729 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11730 break;
11731 if (prec < HOST_BITS_PER_WIDE_INT
11732 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11734 tree newmaskt;
11736 if (shift_type != TREE_TYPE (arg0))
11738 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11739 fold_convert_loc (loc, shift_type,
11740 TREE_OPERAND (arg0, 0)),
11741 TREE_OPERAND (arg0, 1));
11742 tem = fold_convert_loc (loc, type, tem);
11744 else
11745 tem = op0;
11746 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11747 if (!tree_int_cst_equal (newmaskt, arg1))
11748 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11753 goto associate;
11755 case RDIV_EXPR:
11756 /* Don't touch a floating-point divide by zero unless the mode
11757 of the constant can represent infinity. */
11758 if (TREE_CODE (arg1) == REAL_CST
11759 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11760 && real_zerop (arg1))
11761 return NULL_TREE;
11763 /* Optimize A / A to 1.0 if we don't care about
11764 NaNs or Infinities. Skip the transformation
11765 for non-real operands. */
11766 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11767 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11768 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11769 && operand_equal_p (arg0, arg1, 0))
11771 tree r = build_real (TREE_TYPE (arg0), dconst1);
11773 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11776 /* The complex version of the above A / A optimization. */
11777 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11778 && operand_equal_p (arg0, arg1, 0))
11780 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11781 if (! HONOR_NANS (TYPE_MODE (elem_type))
11782 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11784 tree r = build_real (elem_type, dconst1);
11785 /* omit_two_operands will call fold_convert for us. */
11786 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11790 /* (-A) / (-B) -> A / B */
11791 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11792 return fold_build2_loc (loc, RDIV_EXPR, type,
11793 TREE_OPERAND (arg0, 0),
11794 negate_expr (arg1));
11795 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11796 return fold_build2_loc (loc, RDIV_EXPR, type,
11797 negate_expr (arg0),
11798 TREE_OPERAND (arg1, 0));
11800 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11801 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11802 && real_onep (arg1))
11803 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11805 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11806 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11807 && real_minus_onep (arg1))
11808 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11809 negate_expr (arg0)));
11811 /* If ARG1 is a constant, we can convert this to a multiply by the
11812 reciprocal. This does not have the same rounding properties,
11813 so only do this if -freciprocal-math. We can actually
11814 always safely do it if ARG1 is a power of two, but it's hard to
11815 tell if it is or not in a portable manner. */
11816 if (optimize
11817 && (TREE_CODE (arg1) == REAL_CST
11818 || (TREE_CODE (arg1) == COMPLEX_CST
11819 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11820 || (TREE_CODE (arg1) == VECTOR_CST
11821 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11823 if (flag_reciprocal_math
11824 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11825 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11826 /* Find the reciprocal if optimizing and the result is exact.
11827 TODO: Complex reciprocal not implemented. */
11828 if (TREE_CODE (arg1) != COMPLEX_CST)
11830 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11832 if (inverse)
11833 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11836 /* Convert A/B/C to A/(B*C). */
11837 if (flag_reciprocal_math
11838 && TREE_CODE (arg0) == RDIV_EXPR)
11839 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11840 fold_build2_loc (loc, MULT_EXPR, type,
11841 TREE_OPERAND (arg0, 1), arg1));
11843 /* Convert A/(B/C) to (A/B)*C. */
11844 if (flag_reciprocal_math
11845 && TREE_CODE (arg1) == RDIV_EXPR)
11846 return fold_build2_loc (loc, MULT_EXPR, type,
11847 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11848 TREE_OPERAND (arg1, 0)),
11849 TREE_OPERAND (arg1, 1));
11851 /* Convert C1/(X*C2) into (C1/C2)/X. */
11852 if (flag_reciprocal_math
11853 && TREE_CODE (arg1) == MULT_EXPR
11854 && TREE_CODE (arg0) == REAL_CST
11855 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11857 tree tem = const_binop (RDIV_EXPR, arg0,
11858 TREE_OPERAND (arg1, 1));
11859 if (tem)
11860 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11861 TREE_OPERAND (arg1, 0));
11864 if (flag_unsafe_math_optimizations)
11866 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11867 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11869 /* Optimize sin(x)/cos(x) as tan(x). */
11870 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11871 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11872 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11873 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11874 CALL_EXPR_ARG (arg1, 0), 0))
11876 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11878 if (tanfn != NULL_TREE)
11879 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11882 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11883 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11884 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11885 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11886 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11887 CALL_EXPR_ARG (arg1, 0), 0))
11889 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11891 if (tanfn != NULL_TREE)
11893 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11894 CALL_EXPR_ARG (arg0, 0));
11895 return fold_build2_loc (loc, RDIV_EXPR, type,
11896 build_real (type, dconst1), tmp);
11900 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11901 NaNs or Infinities. */
11902 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11903 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11904 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11906 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11907 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11909 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11910 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11911 && operand_equal_p (arg00, arg01, 0))
11913 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11915 if (cosfn != NULL_TREE)
11916 return build_call_expr_loc (loc, cosfn, 1, arg00);
11920 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11921 NaNs or Infinities. */
11922 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11923 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11924 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11926 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11927 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11929 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11930 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11931 && operand_equal_p (arg00, arg01, 0))
11933 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11935 if (cosfn != NULL_TREE)
11937 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11938 return fold_build2_loc (loc, RDIV_EXPR, type,
11939 build_real (type, dconst1),
11940 tmp);
11945 /* Optimize pow(x,c)/x as pow(x,c-1). */
11946 if (fcode0 == BUILT_IN_POW
11947 || fcode0 == BUILT_IN_POWF
11948 || fcode0 == BUILT_IN_POWL)
11950 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11951 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11952 if (TREE_CODE (arg01) == REAL_CST
11953 && !TREE_OVERFLOW (arg01)
11954 && operand_equal_p (arg1, arg00, 0))
11956 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11957 REAL_VALUE_TYPE c;
11958 tree arg;
11960 c = TREE_REAL_CST (arg01);
11961 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11962 arg = build_real (type, c);
11963 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11967 /* Optimize a/root(b/c) into a*root(c/b). */
11968 if (BUILTIN_ROOT_P (fcode1))
11970 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11972 if (TREE_CODE (rootarg) == RDIV_EXPR)
11974 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11975 tree b = TREE_OPERAND (rootarg, 0);
11976 tree c = TREE_OPERAND (rootarg, 1);
11978 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11980 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11981 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11985 /* Optimize x/expN(y) into x*expN(-y). */
11986 if (BUILTIN_EXPONENT_P (fcode1))
11988 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11989 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11990 arg1 = build_call_expr_loc (loc,
11991 expfn, 1,
11992 fold_convert_loc (loc, type, arg));
11993 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11996 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11997 if (fcode1 == BUILT_IN_POW
11998 || fcode1 == BUILT_IN_POWF
11999 || fcode1 == BUILT_IN_POWL)
12001 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12002 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12003 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12004 tree neg11 = fold_convert_loc (loc, type,
12005 negate_expr (arg11));
12006 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12007 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12010 return NULL_TREE;
12012 case TRUNC_DIV_EXPR:
12013 /* Optimize (X & (-A)) / A where A is a power of 2,
12014 to X >> log2(A) */
12015 if (TREE_CODE (arg0) == BIT_AND_EXPR
12016 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12017 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12019 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12020 arg1, TREE_OPERAND (arg0, 1));
12021 if (sum && integer_zerop (sum)) {
12022 unsigned long pow2;
12024 if (TREE_INT_CST_LOW (arg1))
12025 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12026 else
12027 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12028 + HOST_BITS_PER_WIDE_INT;
12030 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12031 TREE_OPERAND (arg0, 0),
12032 build_int_cst (integer_type_node, pow2));
12036 /* Fall through */
12038 case FLOOR_DIV_EXPR:
12039 /* Simplify A / (B << N) where A and B are positive and B is
12040 a power of 2, to A >> (N + log2(B)). */
12041 strict_overflow_p = false;
12042 if (TREE_CODE (arg1) == LSHIFT_EXPR
12043 && (TYPE_UNSIGNED (type)
12044 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12046 tree sval = TREE_OPERAND (arg1, 0);
12047 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12049 tree sh_cnt = TREE_OPERAND (arg1, 1);
12050 unsigned long pow2;
12052 if (TREE_INT_CST_LOW (sval))
12053 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12054 else
12055 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12056 + HOST_BITS_PER_WIDE_INT;
12058 if (strict_overflow_p)
12059 fold_overflow_warning (("assuming signed overflow does not "
12060 "occur when simplifying A / (B << N)"),
12061 WARN_STRICT_OVERFLOW_MISC);
12063 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12064 sh_cnt,
12065 build_int_cst (TREE_TYPE (sh_cnt),
12066 pow2));
12067 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12068 fold_convert_loc (loc, type, arg0), sh_cnt);
12072 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12073 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12074 if (INTEGRAL_TYPE_P (type)
12075 && TYPE_UNSIGNED (type)
12076 && code == FLOOR_DIV_EXPR)
12077 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12079 /* Fall through */
12081 case ROUND_DIV_EXPR:
12082 case CEIL_DIV_EXPR:
12083 case EXACT_DIV_EXPR:
12084 if (integer_onep (arg1))
12085 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12086 if (integer_zerop (arg1))
12087 return NULL_TREE;
12088 /* X / -1 is -X. */
12089 if (!TYPE_UNSIGNED (type)
12090 && TREE_CODE (arg1) == INTEGER_CST
12091 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12092 && TREE_INT_CST_HIGH (arg1) == -1)
12093 return fold_convert_loc (loc, type, negate_expr (arg0));
12095 /* Convert -A / -B to A / B when the type is signed and overflow is
12096 undefined. */
12097 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12098 && TREE_CODE (arg0) == NEGATE_EXPR
12099 && negate_expr_p (arg1))
12101 if (INTEGRAL_TYPE_P (type))
12102 fold_overflow_warning (("assuming signed overflow does not occur "
12103 "when distributing negation across "
12104 "division"),
12105 WARN_STRICT_OVERFLOW_MISC);
12106 return fold_build2_loc (loc, code, type,
12107 fold_convert_loc (loc, type,
12108 TREE_OPERAND (arg0, 0)),
12109 fold_convert_loc (loc, type,
12110 negate_expr (arg1)));
12112 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12113 && TREE_CODE (arg1) == NEGATE_EXPR
12114 && negate_expr_p (arg0))
12116 if (INTEGRAL_TYPE_P (type))
12117 fold_overflow_warning (("assuming signed overflow does not occur "
12118 "when distributing negation across "
12119 "division"),
12120 WARN_STRICT_OVERFLOW_MISC);
12121 return fold_build2_loc (loc, code, type,
12122 fold_convert_loc (loc, type,
12123 negate_expr (arg0)),
12124 fold_convert_loc (loc, type,
12125 TREE_OPERAND (arg1, 0)));
12128 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12129 operation, EXACT_DIV_EXPR.
12131 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12132 At one time others generated faster code, it's not clear if they do
12133 after the last round to changes to the DIV code in expmed.c. */
12134 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12135 && multiple_of_p (type, arg0, arg1))
12136 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12138 strict_overflow_p = false;
12139 if (TREE_CODE (arg1) == INTEGER_CST
12140 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12141 &strict_overflow_p)))
12143 if (strict_overflow_p)
12144 fold_overflow_warning (("assuming signed overflow does not occur "
12145 "when simplifying division"),
12146 WARN_STRICT_OVERFLOW_MISC);
12147 return fold_convert_loc (loc, type, tem);
12150 return NULL_TREE;
12152 case CEIL_MOD_EXPR:
12153 case FLOOR_MOD_EXPR:
12154 case ROUND_MOD_EXPR:
12155 case TRUNC_MOD_EXPR:
12156 /* X % 1 is always zero, but be sure to preserve any side
12157 effects in X. */
12158 if (integer_onep (arg1))
12159 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12161 /* X % 0, return X % 0 unchanged so that we can get the
12162 proper warnings and errors. */
12163 if (integer_zerop (arg1))
12164 return NULL_TREE;
12166 /* 0 % X is always zero, but be sure to preserve any side
12167 effects in X. Place this after checking for X == 0. */
12168 if (integer_zerop (arg0))
12169 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12171 /* X % -1 is zero. */
12172 if (!TYPE_UNSIGNED (type)
12173 && TREE_CODE (arg1) == INTEGER_CST
12174 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12175 && TREE_INT_CST_HIGH (arg1) == -1)
12176 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12178 /* X % -C is the same as X % C. */
12179 if (code == TRUNC_MOD_EXPR
12180 && !TYPE_UNSIGNED (type)
12181 && TREE_CODE (arg1) == INTEGER_CST
12182 && !TREE_OVERFLOW (arg1)
12183 && TREE_INT_CST_HIGH (arg1) < 0
12184 && !TYPE_OVERFLOW_TRAPS (type)
12185 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12186 && !sign_bit_p (arg1, arg1))
12187 return fold_build2_loc (loc, code, type,
12188 fold_convert_loc (loc, type, arg0),
12189 fold_convert_loc (loc, type,
12190 negate_expr (arg1)));
12192 /* X % -Y is the same as X % Y. */
12193 if (code == TRUNC_MOD_EXPR
12194 && !TYPE_UNSIGNED (type)
12195 && TREE_CODE (arg1) == NEGATE_EXPR
12196 && !TYPE_OVERFLOW_TRAPS (type))
12197 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12198 fold_convert_loc (loc, type,
12199 TREE_OPERAND (arg1, 0)));
12201 strict_overflow_p = false;
12202 if (TREE_CODE (arg1) == INTEGER_CST
12203 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12204 &strict_overflow_p)))
12206 if (strict_overflow_p)
12207 fold_overflow_warning (("assuming signed overflow does not occur "
12208 "when simplifying modulus"),
12209 WARN_STRICT_OVERFLOW_MISC);
12210 return fold_convert_loc (loc, type, tem);
12213 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12214 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12215 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12216 && (TYPE_UNSIGNED (type)
12217 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12219 tree c = arg1;
12220 /* Also optimize A % (C << N) where C is a power of 2,
12221 to A & ((C << N) - 1). */
12222 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12223 c = TREE_OPERAND (arg1, 0);
12225 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12227 tree mask
12228 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12229 build_int_cst (TREE_TYPE (arg1), 1));
12230 if (strict_overflow_p)
12231 fold_overflow_warning (("assuming signed overflow does not "
12232 "occur when simplifying "
12233 "X % (power of two)"),
12234 WARN_STRICT_OVERFLOW_MISC);
12235 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12236 fold_convert_loc (loc, type, arg0),
12237 fold_convert_loc (loc, type, mask));
12241 return NULL_TREE;
12243 case LROTATE_EXPR:
12244 case RROTATE_EXPR:
12245 if (integer_all_onesp (arg0))
12246 return omit_one_operand_loc (loc, type, arg0, arg1);
12247 goto shift;
12249 case RSHIFT_EXPR:
12250 /* Optimize -1 >> x for arithmetic right shifts. */
12251 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12252 && tree_expr_nonnegative_p (arg1))
12253 return omit_one_operand_loc (loc, type, arg0, arg1);
12254 /* ... fall through ... */
12256 case LSHIFT_EXPR:
12257 shift:
12258 if (integer_zerop (arg1))
12259 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12260 if (integer_zerop (arg0))
12261 return omit_one_operand_loc (loc, type, arg0, arg1);
12263 /* Since negative shift count is not well-defined,
12264 don't try to compute it in the compiler. */
12265 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12266 return NULL_TREE;
12268 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12269 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12270 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12271 && host_integerp (TREE_OPERAND (arg0, 1), false)
12272 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12274 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12275 + TREE_INT_CST_LOW (arg1));
12277 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12278 being well defined. */
12279 if (low >= TYPE_PRECISION (type))
12281 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12282 low = low % TYPE_PRECISION (type);
12283 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12284 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12285 TREE_OPERAND (arg0, 0));
12286 else
12287 low = TYPE_PRECISION (type) - 1;
12290 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12291 build_int_cst (type, low));
12294 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12295 into x & ((unsigned)-1 >> c) for unsigned types. */
12296 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12297 || (TYPE_UNSIGNED (type)
12298 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12299 && host_integerp (arg1, false)
12300 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12301 && host_integerp (TREE_OPERAND (arg0, 1), false)
12302 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12304 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12305 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12306 tree lshift;
12307 tree arg00;
12309 if (low0 == low1)
12311 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12313 lshift = build_int_cst (type, -1);
12314 lshift = int_const_binop (code, lshift, arg1);
12316 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12320 /* Rewrite an LROTATE_EXPR by a constant into an
12321 RROTATE_EXPR by a new constant. */
12322 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12324 tree tem = build_int_cst (TREE_TYPE (arg1),
12325 TYPE_PRECISION (type));
12326 tem = const_binop (MINUS_EXPR, tem, arg1);
12327 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12330 /* If we have a rotate of a bit operation with the rotate count and
12331 the second operand of the bit operation both constant,
12332 permute the two operations. */
12333 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12334 && (TREE_CODE (arg0) == BIT_AND_EXPR
12335 || TREE_CODE (arg0) == BIT_IOR_EXPR
12336 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12337 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12338 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12339 fold_build2_loc (loc, code, type,
12340 TREE_OPERAND (arg0, 0), arg1),
12341 fold_build2_loc (loc, code, type,
12342 TREE_OPERAND (arg0, 1), arg1));
12344 /* Two consecutive rotates adding up to the precision of the
12345 type can be ignored. */
12346 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12347 && TREE_CODE (arg0) == RROTATE_EXPR
12348 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12349 && TREE_INT_CST_HIGH (arg1) == 0
12350 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12351 && ((TREE_INT_CST_LOW (arg1)
12352 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12353 == (unsigned int) TYPE_PRECISION (type)))
12354 return TREE_OPERAND (arg0, 0);
12356 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12357 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12358 if the latter can be further optimized. */
12359 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12360 && TREE_CODE (arg0) == BIT_AND_EXPR
12361 && TREE_CODE (arg1) == INTEGER_CST
12362 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12364 tree mask = fold_build2_loc (loc, code, type,
12365 fold_convert_loc (loc, type,
12366 TREE_OPERAND (arg0, 1)),
12367 arg1);
12368 tree shift = fold_build2_loc (loc, code, type,
12369 fold_convert_loc (loc, type,
12370 TREE_OPERAND (arg0, 0)),
12371 arg1);
12372 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12373 if (tem)
12374 return tem;
12377 return NULL_TREE;
12379 case MIN_EXPR:
12380 if (operand_equal_p (arg0, arg1, 0))
12381 return omit_one_operand_loc (loc, type, arg0, arg1);
12382 if (INTEGRAL_TYPE_P (type)
12383 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12384 return omit_one_operand_loc (loc, type, arg1, arg0);
12385 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12386 if (tem)
12387 return tem;
12388 goto associate;
12390 case MAX_EXPR:
12391 if (operand_equal_p (arg0, arg1, 0))
12392 return omit_one_operand_loc (loc, type, arg0, arg1);
12393 if (INTEGRAL_TYPE_P (type)
12394 && TYPE_MAX_VALUE (type)
12395 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12396 return omit_one_operand_loc (loc, type, arg1, arg0);
12397 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12398 if (tem)
12399 return tem;
12400 goto associate;
12402 case TRUTH_ANDIF_EXPR:
12403 /* Note that the operands of this must be ints
12404 and their values must be 0 or 1.
12405 ("true" is a fixed value perhaps depending on the language.) */
12406 /* If first arg is constant zero, return it. */
12407 if (integer_zerop (arg0))
12408 return fold_convert_loc (loc, type, arg0);
12409 case TRUTH_AND_EXPR:
12410 /* If either arg is constant true, drop it. */
12411 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12412 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12413 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12414 /* Preserve sequence points. */
12415 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12416 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12417 /* If second arg is constant zero, result is zero, but first arg
12418 must be evaluated. */
12419 if (integer_zerop (arg1))
12420 return omit_one_operand_loc (loc, type, arg1, arg0);
12421 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12422 case will be handled here. */
12423 if (integer_zerop (arg0))
12424 return omit_one_operand_loc (loc, type, arg0, arg1);
12426 /* !X && X is always false. */
12427 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12428 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12429 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12430 /* X && !X is always false. */
12431 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12432 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12433 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12435 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12436 means A >= Y && A != MAX, but in this case we know that
12437 A < X <= MAX. */
12439 if (!TREE_SIDE_EFFECTS (arg0)
12440 && !TREE_SIDE_EFFECTS (arg1))
12442 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12443 if (tem && !operand_equal_p (tem, arg0, 0))
12444 return fold_build2_loc (loc, code, type, tem, arg1);
12446 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12447 if (tem && !operand_equal_p (tem, arg1, 0))
12448 return fold_build2_loc (loc, code, type, arg0, tem);
12451 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12452 != NULL_TREE)
12453 return tem;
12455 return NULL_TREE;
12457 case TRUTH_ORIF_EXPR:
12458 /* Note that the operands of this must be ints
12459 and their values must be 0 or true.
12460 ("true" is a fixed value perhaps depending on the language.) */
12461 /* If first arg is constant true, return it. */
12462 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12463 return fold_convert_loc (loc, type, arg0);
12464 case TRUTH_OR_EXPR:
12465 /* If either arg is constant zero, drop it. */
12466 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12467 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12468 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12469 /* Preserve sequence points. */
12470 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12471 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12472 /* If second arg is constant true, result is true, but we must
12473 evaluate first arg. */
12474 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12475 return omit_one_operand_loc (loc, type, arg1, arg0);
12476 /* Likewise for first arg, but note this only occurs here for
12477 TRUTH_OR_EXPR. */
12478 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12479 return omit_one_operand_loc (loc, type, arg0, arg1);
12481 /* !X || X is always true. */
12482 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12483 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12484 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12485 /* X || !X is always true. */
12486 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12487 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12488 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12490 /* (X && !Y) || (!X && Y) is X ^ Y */
12491 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12492 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12494 tree a0, a1, l0, l1, n0, n1;
12496 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12497 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12499 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12500 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12502 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12503 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12505 if ((operand_equal_p (n0, a0, 0)
12506 && operand_equal_p (n1, a1, 0))
12507 || (operand_equal_p (n0, a1, 0)
12508 && operand_equal_p (n1, a0, 0)))
12509 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12512 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12513 != NULL_TREE)
12514 return tem;
12516 return NULL_TREE;
12518 case TRUTH_XOR_EXPR:
12519 /* If the second arg is constant zero, drop it. */
12520 if (integer_zerop (arg1))
12521 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12522 /* If the second arg is constant true, this is a logical inversion. */
12523 if (integer_onep (arg1))
12525 /* Only call invert_truthvalue if operand is a truth value. */
12526 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12527 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12528 else
12529 tem = invert_truthvalue_loc (loc, arg0);
12530 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12532 /* Identical arguments cancel to zero. */
12533 if (operand_equal_p (arg0, arg1, 0))
12534 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12536 /* !X ^ X is always true. */
12537 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12538 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12539 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12541 /* X ^ !X is always true. */
12542 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12543 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12544 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12546 return NULL_TREE;
12548 case EQ_EXPR:
12549 case NE_EXPR:
12550 STRIP_NOPS (arg0);
12551 STRIP_NOPS (arg1);
12553 tem = fold_comparison (loc, code, type, op0, op1);
12554 if (tem != NULL_TREE)
12555 return tem;
12557 /* bool_var != 0 becomes bool_var. */
12558 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12559 && code == NE_EXPR)
12560 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12562 /* bool_var == 1 becomes bool_var. */
12563 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12564 && code == EQ_EXPR)
12565 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12567 /* bool_var != 1 becomes !bool_var. */
12568 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12569 && code == NE_EXPR)
12570 return fold_convert_loc (loc, type,
12571 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12572 TREE_TYPE (arg0), arg0));
12574 /* bool_var == 0 becomes !bool_var. */
12575 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12576 && code == EQ_EXPR)
12577 return fold_convert_loc (loc, type,
12578 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12579 TREE_TYPE (arg0), arg0));
12581 /* !exp != 0 becomes !exp */
12582 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12583 && code == NE_EXPR)
12584 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12586 /* If this is an equality comparison of the address of two non-weak,
12587 unaliased symbols neither of which are extern (since we do not
12588 have access to attributes for externs), then we know the result. */
12589 if (TREE_CODE (arg0) == ADDR_EXPR
12590 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12591 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12592 && ! lookup_attribute ("alias",
12593 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12594 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12595 && TREE_CODE (arg1) == ADDR_EXPR
12596 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12597 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12598 && ! lookup_attribute ("alias",
12599 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12600 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12602 /* We know that we're looking at the address of two
12603 non-weak, unaliased, static _DECL nodes.
12605 It is both wasteful and incorrect to call operand_equal_p
12606 to compare the two ADDR_EXPR nodes. It is wasteful in that
12607 all we need to do is test pointer equality for the arguments
12608 to the two ADDR_EXPR nodes. It is incorrect to use
12609 operand_equal_p as that function is NOT equivalent to a
12610 C equality test. It can in fact return false for two
12611 objects which would test as equal using the C equality
12612 operator. */
12613 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12614 return constant_boolean_node (equal
12615 ? code == EQ_EXPR : code != EQ_EXPR,
12616 type);
12619 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12620 a MINUS_EXPR of a constant, we can convert it into a comparison with
12621 a revised constant as long as no overflow occurs. */
12622 if (TREE_CODE (arg1) == INTEGER_CST
12623 && (TREE_CODE (arg0) == PLUS_EXPR
12624 || TREE_CODE (arg0) == MINUS_EXPR)
12625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12626 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12627 ? MINUS_EXPR : PLUS_EXPR,
12628 fold_convert_loc (loc, TREE_TYPE (arg0),
12629 arg1),
12630 TREE_OPERAND (arg0, 1)))
12631 && !TREE_OVERFLOW (tem))
12632 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12634 /* Similarly for a NEGATE_EXPR. */
12635 if (TREE_CODE (arg0) == NEGATE_EXPR
12636 && TREE_CODE (arg1) == INTEGER_CST
12637 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12638 arg1)))
12639 && TREE_CODE (tem) == INTEGER_CST
12640 && !TREE_OVERFLOW (tem))
12641 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12643 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12644 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12645 && TREE_CODE (arg1) == INTEGER_CST
12646 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12647 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12648 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12649 fold_convert_loc (loc,
12650 TREE_TYPE (arg0),
12651 arg1),
12652 TREE_OPERAND (arg0, 1)));
12654 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12655 if ((TREE_CODE (arg0) == PLUS_EXPR
12656 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12657 || TREE_CODE (arg0) == MINUS_EXPR)
12658 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12659 0)),
12660 arg1, 0)
12661 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12662 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12664 tree val = TREE_OPERAND (arg0, 1);
12665 return omit_two_operands_loc (loc, type,
12666 fold_build2_loc (loc, code, type,
12667 val,
12668 build_int_cst (TREE_TYPE (val),
12669 0)),
12670 TREE_OPERAND (arg0, 0), arg1);
12673 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12674 if (TREE_CODE (arg0) == MINUS_EXPR
12675 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12676 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12677 1)),
12678 arg1, 0)
12679 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12681 return omit_two_operands_loc (loc, type,
12682 code == NE_EXPR
12683 ? boolean_true_node : boolean_false_node,
12684 TREE_OPERAND (arg0, 1), arg1);
12687 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12688 for !=. Don't do this for ordered comparisons due to overflow. */
12689 if (TREE_CODE (arg0) == MINUS_EXPR
12690 && integer_zerop (arg1))
12691 return fold_build2_loc (loc, code, type,
12692 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12694 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12695 if (TREE_CODE (arg0) == ABS_EXPR
12696 && (integer_zerop (arg1) || real_zerop (arg1)))
12697 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12699 /* If this is an EQ or NE comparison with zero and ARG0 is
12700 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12701 two operations, but the latter can be done in one less insn
12702 on machines that have only two-operand insns or on which a
12703 constant cannot be the first operand. */
12704 if (TREE_CODE (arg0) == BIT_AND_EXPR
12705 && integer_zerop (arg1))
12707 tree arg00 = TREE_OPERAND (arg0, 0);
12708 tree arg01 = TREE_OPERAND (arg0, 1);
12709 if (TREE_CODE (arg00) == LSHIFT_EXPR
12710 && integer_onep (TREE_OPERAND (arg00, 0)))
12712 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12713 arg01, TREE_OPERAND (arg00, 1));
12714 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12715 build_int_cst (TREE_TYPE (arg0), 1));
12716 return fold_build2_loc (loc, code, type,
12717 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12718 arg1);
12720 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12721 && integer_onep (TREE_OPERAND (arg01, 0)))
12723 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12724 arg00, TREE_OPERAND (arg01, 1));
12725 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12726 build_int_cst (TREE_TYPE (arg0), 1));
12727 return fold_build2_loc (loc, code, type,
12728 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12729 arg1);
12733 /* If this is an NE or EQ comparison of zero against the result of a
12734 signed MOD operation whose second operand is a power of 2, make
12735 the MOD operation unsigned since it is simpler and equivalent. */
12736 if (integer_zerop (arg1)
12737 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12738 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12739 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12740 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12741 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12742 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12744 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12745 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12746 fold_convert_loc (loc, newtype,
12747 TREE_OPERAND (arg0, 0)),
12748 fold_convert_loc (loc, newtype,
12749 TREE_OPERAND (arg0, 1)));
12751 return fold_build2_loc (loc, code, type, newmod,
12752 fold_convert_loc (loc, newtype, arg1));
12755 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12756 C1 is a valid shift constant, and C2 is a power of two, i.e.
12757 a single bit. */
12758 if (TREE_CODE (arg0) == BIT_AND_EXPR
12759 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12760 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12761 == INTEGER_CST
12762 && integer_pow2p (TREE_OPERAND (arg0, 1))
12763 && integer_zerop (arg1))
12765 tree itype = TREE_TYPE (arg0);
12766 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12767 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12769 /* Check for a valid shift count. */
12770 if (TREE_INT_CST_HIGH (arg001) == 0
12771 && TREE_INT_CST_LOW (arg001) < prec)
12773 tree arg01 = TREE_OPERAND (arg0, 1);
12774 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12775 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12776 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12777 can be rewritten as (X & (C2 << C1)) != 0. */
12778 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12780 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12781 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12782 return fold_build2_loc (loc, code, type, tem,
12783 fold_convert_loc (loc, itype, arg1));
12785 /* Otherwise, for signed (arithmetic) shifts,
12786 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12787 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12788 else if (!TYPE_UNSIGNED (itype))
12789 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12790 arg000, build_int_cst (itype, 0));
12791 /* Otherwise, of unsigned (logical) shifts,
12792 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12793 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12794 else
12795 return omit_one_operand_loc (loc, type,
12796 code == EQ_EXPR ? integer_one_node
12797 : integer_zero_node,
12798 arg000);
12802 /* If we have (A & C) == C where C is a power of 2, convert this into
12803 (A & C) != 0. Similarly for NE_EXPR. */
12804 if (TREE_CODE (arg0) == BIT_AND_EXPR
12805 && integer_pow2p (TREE_OPERAND (arg0, 1))
12806 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12807 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12808 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12809 integer_zero_node));
12811 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12812 bit, then fold the expression into A < 0 or A >= 0. */
12813 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12814 if (tem)
12815 return tem;
12817 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12818 Similarly for NE_EXPR. */
12819 if (TREE_CODE (arg0) == BIT_AND_EXPR
12820 && TREE_CODE (arg1) == INTEGER_CST
12821 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12823 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12824 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12825 TREE_OPERAND (arg0, 1));
12826 tree dandnotc
12827 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12828 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12829 notc);
12830 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12831 if (integer_nonzerop (dandnotc))
12832 return omit_one_operand_loc (loc, type, rslt, arg0);
12835 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12836 Similarly for NE_EXPR. */
12837 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12838 && TREE_CODE (arg1) == INTEGER_CST
12839 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12841 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12842 tree candnotd
12843 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12844 TREE_OPERAND (arg0, 1),
12845 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12846 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12847 if (integer_nonzerop (candnotd))
12848 return omit_one_operand_loc (loc, type, rslt, arg0);
12851 /* If this is a comparison of a field, we may be able to simplify it. */
12852 if ((TREE_CODE (arg0) == COMPONENT_REF
12853 || TREE_CODE (arg0) == BIT_FIELD_REF)
12854 /* Handle the constant case even without -O
12855 to make sure the warnings are given. */
12856 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12858 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12859 if (t1)
12860 return t1;
12863 /* Optimize comparisons of strlen vs zero to a compare of the
12864 first character of the string vs zero. To wit,
12865 strlen(ptr) == 0 => *ptr == 0
12866 strlen(ptr) != 0 => *ptr != 0
12867 Other cases should reduce to one of these two (or a constant)
12868 due to the return value of strlen being unsigned. */
12869 if (TREE_CODE (arg0) == CALL_EXPR
12870 && integer_zerop (arg1))
12872 tree fndecl = get_callee_fndecl (arg0);
12874 if (fndecl
12875 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12876 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12877 && call_expr_nargs (arg0) == 1
12878 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12880 tree iref = build_fold_indirect_ref_loc (loc,
12881 CALL_EXPR_ARG (arg0, 0));
12882 return fold_build2_loc (loc, code, type, iref,
12883 build_int_cst (TREE_TYPE (iref), 0));
12887 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12888 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12889 if (TREE_CODE (arg0) == RSHIFT_EXPR
12890 && integer_zerop (arg1)
12891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12893 tree arg00 = TREE_OPERAND (arg0, 0);
12894 tree arg01 = TREE_OPERAND (arg0, 1);
12895 tree itype = TREE_TYPE (arg00);
12896 if (TREE_INT_CST_HIGH (arg01) == 0
12897 && TREE_INT_CST_LOW (arg01)
12898 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12900 if (TYPE_UNSIGNED (itype))
12902 itype = signed_type_for (itype);
12903 arg00 = fold_convert_loc (loc, itype, arg00);
12905 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12906 type, arg00, build_zero_cst (itype));
12910 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12911 if (integer_zerop (arg1)
12912 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12913 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12914 TREE_OPERAND (arg0, 1));
12916 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12917 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12918 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12919 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12920 build_zero_cst (TREE_TYPE (arg0)));
12921 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12922 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12923 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12924 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12925 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12926 build_zero_cst (TREE_TYPE (arg0)));
12928 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12929 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12930 && TREE_CODE (arg1) == INTEGER_CST
12931 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12932 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12933 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12934 TREE_OPERAND (arg0, 1), arg1));
12936 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12937 (X & C) == 0 when C is a single bit. */
12938 if (TREE_CODE (arg0) == BIT_AND_EXPR
12939 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12940 && integer_zerop (arg1)
12941 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12943 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12944 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12945 TREE_OPERAND (arg0, 1));
12946 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12947 type, tem,
12948 fold_convert_loc (loc, TREE_TYPE (arg0),
12949 arg1));
12952 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12953 constant C is a power of two, i.e. a single bit. */
12954 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12955 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12956 && integer_zerop (arg1)
12957 && integer_pow2p (TREE_OPERAND (arg0, 1))
12958 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12959 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12961 tree arg00 = TREE_OPERAND (arg0, 0);
12962 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12963 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12966 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12967 when is C is a power of two, i.e. a single bit. */
12968 if (TREE_CODE (arg0) == BIT_AND_EXPR
12969 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12970 && integer_zerop (arg1)
12971 && integer_pow2p (TREE_OPERAND (arg0, 1))
12972 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12973 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12975 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12976 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12977 arg000, TREE_OPERAND (arg0, 1));
12978 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12979 tem, build_int_cst (TREE_TYPE (tem), 0));
12982 if (integer_zerop (arg1)
12983 && tree_expr_nonzero_p (arg0))
12985 tree res = constant_boolean_node (code==NE_EXPR, type);
12986 return omit_one_operand_loc (loc, type, res, arg0);
12989 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12990 if (TREE_CODE (arg0) == NEGATE_EXPR
12991 && TREE_CODE (arg1) == NEGATE_EXPR)
12992 return fold_build2_loc (loc, code, type,
12993 TREE_OPERAND (arg0, 0),
12994 fold_convert_loc (loc, TREE_TYPE (arg0),
12995 TREE_OPERAND (arg1, 0)));
12997 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12998 if (TREE_CODE (arg0) == BIT_AND_EXPR
12999 && TREE_CODE (arg1) == BIT_AND_EXPR)
13001 tree arg00 = TREE_OPERAND (arg0, 0);
13002 tree arg01 = TREE_OPERAND (arg0, 1);
13003 tree arg10 = TREE_OPERAND (arg1, 0);
13004 tree arg11 = TREE_OPERAND (arg1, 1);
13005 tree itype = TREE_TYPE (arg0);
13007 if (operand_equal_p (arg01, arg11, 0))
13008 return fold_build2_loc (loc, code, type,
13009 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13010 fold_build2_loc (loc,
13011 BIT_XOR_EXPR, itype,
13012 arg00, arg10),
13013 arg01),
13014 build_zero_cst (itype));
13016 if (operand_equal_p (arg01, arg10, 0))
13017 return fold_build2_loc (loc, code, type,
13018 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13019 fold_build2_loc (loc,
13020 BIT_XOR_EXPR, itype,
13021 arg00, arg11),
13022 arg01),
13023 build_zero_cst (itype));
13025 if (operand_equal_p (arg00, arg11, 0))
13026 return fold_build2_loc (loc, code, type,
13027 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13028 fold_build2_loc (loc,
13029 BIT_XOR_EXPR, itype,
13030 arg01, arg10),
13031 arg00),
13032 build_zero_cst (itype));
13034 if (operand_equal_p (arg00, arg10, 0))
13035 return fold_build2_loc (loc, code, type,
13036 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13037 fold_build2_loc (loc,
13038 BIT_XOR_EXPR, itype,
13039 arg01, arg11),
13040 arg00),
13041 build_zero_cst (itype));
13044 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13045 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13047 tree arg00 = TREE_OPERAND (arg0, 0);
13048 tree arg01 = TREE_OPERAND (arg0, 1);
13049 tree arg10 = TREE_OPERAND (arg1, 0);
13050 tree arg11 = TREE_OPERAND (arg1, 1);
13051 tree itype = TREE_TYPE (arg0);
13053 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13054 operand_equal_p guarantees no side-effects so we don't need
13055 to use omit_one_operand on Z. */
13056 if (operand_equal_p (arg01, arg11, 0))
13057 return fold_build2_loc (loc, code, type, arg00,
13058 fold_convert_loc (loc, TREE_TYPE (arg00),
13059 arg10));
13060 if (operand_equal_p (arg01, arg10, 0))
13061 return fold_build2_loc (loc, code, type, arg00,
13062 fold_convert_loc (loc, TREE_TYPE (arg00),
13063 arg11));
13064 if (operand_equal_p (arg00, arg11, 0))
13065 return fold_build2_loc (loc, code, type, arg01,
13066 fold_convert_loc (loc, TREE_TYPE (arg01),
13067 arg10));
13068 if (operand_equal_p (arg00, arg10, 0))
13069 return fold_build2_loc (loc, code, type, arg01,
13070 fold_convert_loc (loc, TREE_TYPE (arg01),
13071 arg11));
13073 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13074 if (TREE_CODE (arg01) == INTEGER_CST
13075 && TREE_CODE (arg11) == INTEGER_CST)
13077 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13078 fold_convert_loc (loc, itype, arg11));
13079 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13080 return fold_build2_loc (loc, code, type, tem,
13081 fold_convert_loc (loc, itype, arg10));
13085 /* Attempt to simplify equality/inequality comparisons of complex
13086 values. Only lower the comparison if the result is known or
13087 can be simplified to a single scalar comparison. */
13088 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13089 || TREE_CODE (arg0) == COMPLEX_CST)
13090 && (TREE_CODE (arg1) == COMPLEX_EXPR
13091 || TREE_CODE (arg1) == COMPLEX_CST))
13093 tree real0, imag0, real1, imag1;
13094 tree rcond, icond;
13096 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13098 real0 = TREE_OPERAND (arg0, 0);
13099 imag0 = TREE_OPERAND (arg0, 1);
13101 else
13103 real0 = TREE_REALPART (arg0);
13104 imag0 = TREE_IMAGPART (arg0);
13107 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13109 real1 = TREE_OPERAND (arg1, 0);
13110 imag1 = TREE_OPERAND (arg1, 1);
13112 else
13114 real1 = TREE_REALPART (arg1);
13115 imag1 = TREE_IMAGPART (arg1);
13118 rcond = fold_binary_loc (loc, code, type, real0, real1);
13119 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13121 if (integer_zerop (rcond))
13123 if (code == EQ_EXPR)
13124 return omit_two_operands_loc (loc, type, boolean_false_node,
13125 imag0, imag1);
13126 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13128 else
13130 if (code == NE_EXPR)
13131 return omit_two_operands_loc (loc, type, boolean_true_node,
13132 imag0, imag1);
13133 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13137 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13138 if (icond && TREE_CODE (icond) == INTEGER_CST)
13140 if (integer_zerop (icond))
13142 if (code == EQ_EXPR)
13143 return omit_two_operands_loc (loc, type, boolean_false_node,
13144 real0, real1);
13145 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13147 else
13149 if (code == NE_EXPR)
13150 return omit_two_operands_loc (loc, type, boolean_true_node,
13151 real0, real1);
13152 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13157 return NULL_TREE;
13159 case LT_EXPR:
13160 case GT_EXPR:
13161 case LE_EXPR:
13162 case GE_EXPR:
13163 tem = fold_comparison (loc, code, type, op0, op1);
13164 if (tem != NULL_TREE)
13165 return tem;
13167 /* Transform comparisons of the form X +- C CMP X. */
13168 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13169 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13170 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13171 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13172 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13173 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13175 tree arg01 = TREE_OPERAND (arg0, 1);
13176 enum tree_code code0 = TREE_CODE (arg0);
13177 int is_positive;
13179 if (TREE_CODE (arg01) == REAL_CST)
13180 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13181 else
13182 is_positive = tree_int_cst_sgn (arg01);
13184 /* (X - c) > X becomes false. */
13185 if (code == GT_EXPR
13186 && ((code0 == MINUS_EXPR && is_positive >= 0)
13187 || (code0 == PLUS_EXPR && is_positive <= 0)))
13189 if (TREE_CODE (arg01) == INTEGER_CST
13190 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13191 fold_overflow_warning (("assuming signed overflow does not "
13192 "occur when assuming that (X - c) > X "
13193 "is always false"),
13194 WARN_STRICT_OVERFLOW_ALL);
13195 return constant_boolean_node (0, type);
13198 /* Likewise (X + c) < X becomes false. */
13199 if (code == LT_EXPR
13200 && ((code0 == PLUS_EXPR && is_positive >= 0)
13201 || (code0 == MINUS_EXPR && is_positive <= 0)))
13203 if (TREE_CODE (arg01) == INTEGER_CST
13204 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13205 fold_overflow_warning (("assuming signed overflow does not "
13206 "occur when assuming that "
13207 "(X + c) < X is always false"),
13208 WARN_STRICT_OVERFLOW_ALL);
13209 return constant_boolean_node (0, type);
13212 /* Convert (X - c) <= X to true. */
13213 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13214 && code == LE_EXPR
13215 && ((code0 == MINUS_EXPR && is_positive >= 0)
13216 || (code0 == PLUS_EXPR && is_positive <= 0)))
13218 if (TREE_CODE (arg01) == INTEGER_CST
13219 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13220 fold_overflow_warning (("assuming signed overflow does not "
13221 "occur when assuming that "
13222 "(X - c) <= X is always true"),
13223 WARN_STRICT_OVERFLOW_ALL);
13224 return constant_boolean_node (1, type);
13227 /* Convert (X + c) >= X to true. */
13228 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13229 && code == GE_EXPR
13230 && ((code0 == PLUS_EXPR && is_positive >= 0)
13231 || (code0 == MINUS_EXPR && is_positive <= 0)))
13233 if (TREE_CODE (arg01) == INTEGER_CST
13234 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13235 fold_overflow_warning (("assuming signed overflow does not "
13236 "occur when assuming that "
13237 "(X + c) >= X is always true"),
13238 WARN_STRICT_OVERFLOW_ALL);
13239 return constant_boolean_node (1, type);
13242 if (TREE_CODE (arg01) == INTEGER_CST)
13244 /* Convert X + c > X and X - c < X to true for integers. */
13245 if (code == GT_EXPR
13246 && ((code0 == PLUS_EXPR && is_positive > 0)
13247 || (code0 == MINUS_EXPR && is_positive < 0)))
13249 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13250 fold_overflow_warning (("assuming signed overflow does "
13251 "not occur when assuming that "
13252 "(X + c) > X is always true"),
13253 WARN_STRICT_OVERFLOW_ALL);
13254 return constant_boolean_node (1, type);
13257 if (code == LT_EXPR
13258 && ((code0 == MINUS_EXPR && is_positive > 0)
13259 || (code0 == PLUS_EXPR && is_positive < 0)))
13261 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13262 fold_overflow_warning (("assuming signed overflow does "
13263 "not occur when assuming that "
13264 "(X - c) < X is always true"),
13265 WARN_STRICT_OVERFLOW_ALL);
13266 return constant_boolean_node (1, type);
13269 /* Convert X + c <= X and X - c >= X to false for integers. */
13270 if (code == LE_EXPR
13271 && ((code0 == PLUS_EXPR && is_positive > 0)
13272 || (code0 == MINUS_EXPR && is_positive < 0)))
13274 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13275 fold_overflow_warning (("assuming signed overflow does "
13276 "not occur when assuming that "
13277 "(X + c) <= X is always false"),
13278 WARN_STRICT_OVERFLOW_ALL);
13279 return constant_boolean_node (0, type);
13282 if (code == GE_EXPR
13283 && ((code0 == MINUS_EXPR && is_positive > 0)
13284 || (code0 == PLUS_EXPR && is_positive < 0)))
13286 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13287 fold_overflow_warning (("assuming signed overflow does "
13288 "not occur when assuming that "
13289 "(X - c) >= X is always false"),
13290 WARN_STRICT_OVERFLOW_ALL);
13291 return constant_boolean_node (0, type);
13296 /* Comparisons with the highest or lowest possible integer of
13297 the specified precision will have known values. */
13299 tree arg1_type = TREE_TYPE (arg1);
13300 unsigned int width = TYPE_PRECISION (arg1_type);
13302 if (TREE_CODE (arg1) == INTEGER_CST
13303 && width <= HOST_BITS_PER_DOUBLE_INT
13304 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13306 HOST_WIDE_INT signed_max_hi;
13307 unsigned HOST_WIDE_INT signed_max_lo;
13308 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13310 if (width <= HOST_BITS_PER_WIDE_INT)
13312 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13313 - 1;
13314 signed_max_hi = 0;
13315 max_hi = 0;
13317 if (TYPE_UNSIGNED (arg1_type))
13319 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13320 min_lo = 0;
13321 min_hi = 0;
13323 else
13325 max_lo = signed_max_lo;
13326 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13327 min_hi = -1;
13330 else
13332 width -= HOST_BITS_PER_WIDE_INT;
13333 signed_max_lo = -1;
13334 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13335 - 1;
13336 max_lo = -1;
13337 min_lo = 0;
13339 if (TYPE_UNSIGNED (arg1_type))
13341 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13342 min_hi = 0;
13344 else
13346 max_hi = signed_max_hi;
13347 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13351 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13352 && TREE_INT_CST_LOW (arg1) == max_lo)
13353 switch (code)
13355 case GT_EXPR:
13356 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13358 case GE_EXPR:
13359 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13361 case LE_EXPR:
13362 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13364 case LT_EXPR:
13365 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13367 /* The GE_EXPR and LT_EXPR cases above are not normally
13368 reached because of previous transformations. */
13370 default:
13371 break;
13373 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13374 == max_hi
13375 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13376 switch (code)
13378 case GT_EXPR:
13379 arg1 = const_binop (PLUS_EXPR, arg1,
13380 build_int_cst (TREE_TYPE (arg1), 1));
13381 return fold_build2_loc (loc, EQ_EXPR, type,
13382 fold_convert_loc (loc,
13383 TREE_TYPE (arg1), arg0),
13384 arg1);
13385 case LE_EXPR:
13386 arg1 = const_binop (PLUS_EXPR, arg1,
13387 build_int_cst (TREE_TYPE (arg1), 1));
13388 return fold_build2_loc (loc, NE_EXPR, type,
13389 fold_convert_loc (loc, TREE_TYPE (arg1),
13390 arg0),
13391 arg1);
13392 default:
13393 break;
13395 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13396 == min_hi
13397 && TREE_INT_CST_LOW (arg1) == min_lo)
13398 switch (code)
13400 case LT_EXPR:
13401 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13403 case LE_EXPR:
13404 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13406 case GE_EXPR:
13407 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13409 case GT_EXPR:
13410 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13412 default:
13413 break;
13415 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13416 == min_hi
13417 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13418 switch (code)
13420 case GE_EXPR:
13421 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13422 return fold_build2_loc (loc, NE_EXPR, type,
13423 fold_convert_loc (loc,
13424 TREE_TYPE (arg1), arg0),
13425 arg1);
13426 case LT_EXPR:
13427 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13428 return fold_build2_loc (loc, EQ_EXPR, type,
13429 fold_convert_loc (loc, TREE_TYPE (arg1),
13430 arg0),
13431 arg1);
13432 default:
13433 break;
13436 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13437 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13438 && TYPE_UNSIGNED (arg1_type)
13439 /* We will flip the signedness of the comparison operator
13440 associated with the mode of arg1, so the sign bit is
13441 specified by this mode. Check that arg1 is the signed
13442 max associated with this sign bit. */
13443 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13444 /* signed_type does not work on pointer types. */
13445 && INTEGRAL_TYPE_P (arg1_type))
13447 /* The following case also applies to X < signed_max+1
13448 and X >= signed_max+1 because previous transformations. */
13449 if (code == LE_EXPR || code == GT_EXPR)
13451 tree st;
13452 st = signed_type_for (TREE_TYPE (arg1));
13453 return fold_build2_loc (loc,
13454 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13455 type, fold_convert_loc (loc, st, arg0),
13456 build_int_cst (st, 0));
13462 /* If we are comparing an ABS_EXPR with a constant, we can
13463 convert all the cases into explicit comparisons, but they may
13464 well not be faster than doing the ABS and one comparison.
13465 But ABS (X) <= C is a range comparison, which becomes a subtraction
13466 and a comparison, and is probably faster. */
13467 if (code == LE_EXPR
13468 && TREE_CODE (arg1) == INTEGER_CST
13469 && TREE_CODE (arg0) == ABS_EXPR
13470 && ! TREE_SIDE_EFFECTS (arg0)
13471 && (0 != (tem = negate_expr (arg1)))
13472 && TREE_CODE (tem) == INTEGER_CST
13473 && !TREE_OVERFLOW (tem))
13474 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13475 build2 (GE_EXPR, type,
13476 TREE_OPERAND (arg0, 0), tem),
13477 build2 (LE_EXPR, type,
13478 TREE_OPERAND (arg0, 0), arg1));
13480 /* Convert ABS_EXPR<x> >= 0 to true. */
13481 strict_overflow_p = false;
13482 if (code == GE_EXPR
13483 && (integer_zerop (arg1)
13484 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13485 && real_zerop (arg1)))
13486 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13488 if (strict_overflow_p)
13489 fold_overflow_warning (("assuming signed overflow does not occur "
13490 "when simplifying comparison of "
13491 "absolute value and zero"),
13492 WARN_STRICT_OVERFLOW_CONDITIONAL);
13493 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13496 /* Convert ABS_EXPR<x> < 0 to false. */
13497 strict_overflow_p = false;
13498 if (code == LT_EXPR
13499 && (integer_zerop (arg1) || real_zerop (arg1))
13500 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13502 if (strict_overflow_p)
13503 fold_overflow_warning (("assuming signed overflow does not occur "
13504 "when simplifying comparison of "
13505 "absolute value and zero"),
13506 WARN_STRICT_OVERFLOW_CONDITIONAL);
13507 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13510 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13511 and similarly for >= into !=. */
13512 if ((code == LT_EXPR || code == GE_EXPR)
13513 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13514 && TREE_CODE (arg1) == LSHIFT_EXPR
13515 && integer_onep (TREE_OPERAND (arg1, 0)))
13516 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13517 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13518 TREE_OPERAND (arg1, 1)),
13519 build_zero_cst (TREE_TYPE (arg0)));
13521 if ((code == LT_EXPR || code == GE_EXPR)
13522 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13523 && CONVERT_EXPR_P (arg1)
13524 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13525 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13527 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13528 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13529 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13530 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13531 build_zero_cst (TREE_TYPE (arg0)));
13534 return NULL_TREE;
13536 case UNORDERED_EXPR:
13537 case ORDERED_EXPR:
13538 case UNLT_EXPR:
13539 case UNLE_EXPR:
13540 case UNGT_EXPR:
13541 case UNGE_EXPR:
13542 case UNEQ_EXPR:
13543 case LTGT_EXPR:
13544 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13546 t1 = fold_relational_const (code, type, arg0, arg1);
13547 if (t1 != NULL_TREE)
13548 return t1;
13551 /* If the first operand is NaN, the result is constant. */
13552 if (TREE_CODE (arg0) == REAL_CST
13553 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13554 && (code != LTGT_EXPR || ! flag_trapping_math))
13556 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13557 ? integer_zero_node
13558 : integer_one_node;
13559 return omit_one_operand_loc (loc, type, t1, arg1);
13562 /* If the second operand is NaN, the result is constant. */
13563 if (TREE_CODE (arg1) == REAL_CST
13564 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13565 && (code != LTGT_EXPR || ! flag_trapping_math))
13567 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13568 ? integer_zero_node
13569 : integer_one_node;
13570 return omit_one_operand_loc (loc, type, t1, arg0);
13573 /* Simplify unordered comparison of something with itself. */
13574 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13575 && operand_equal_p (arg0, arg1, 0))
13576 return constant_boolean_node (1, type);
13578 if (code == LTGT_EXPR
13579 && !flag_trapping_math
13580 && operand_equal_p (arg0, arg1, 0))
13581 return constant_boolean_node (0, type);
13583 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13585 tree targ0 = strip_float_extensions (arg0);
13586 tree targ1 = strip_float_extensions (arg1);
13587 tree newtype = TREE_TYPE (targ0);
13589 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13590 newtype = TREE_TYPE (targ1);
13592 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13593 return fold_build2_loc (loc, code, type,
13594 fold_convert_loc (loc, newtype, targ0),
13595 fold_convert_loc (loc, newtype, targ1));
13598 return NULL_TREE;
13600 case COMPOUND_EXPR:
13601 /* When pedantic, a compound expression can be neither an lvalue
13602 nor an integer constant expression. */
13603 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13604 return NULL_TREE;
13605 /* Don't let (0, 0) be null pointer constant. */
13606 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13607 : fold_convert_loc (loc, type, arg1);
13608 return pedantic_non_lvalue_loc (loc, tem);
13610 case COMPLEX_EXPR:
13611 if ((TREE_CODE (arg0) == REAL_CST
13612 && TREE_CODE (arg1) == REAL_CST)
13613 || (TREE_CODE (arg0) == INTEGER_CST
13614 && TREE_CODE (arg1) == INTEGER_CST))
13615 return build_complex (type, arg0, arg1);
13616 if (TREE_CODE (arg0) == REALPART_EXPR
13617 && TREE_CODE (arg1) == IMAGPART_EXPR
13618 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13619 && operand_equal_p (TREE_OPERAND (arg0, 0),
13620 TREE_OPERAND (arg1, 0), 0))
13621 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13622 TREE_OPERAND (arg1, 0));
13623 return NULL_TREE;
13625 case ASSERT_EXPR:
13626 /* An ASSERT_EXPR should never be passed to fold_binary. */
13627 gcc_unreachable ();
13629 case VEC_PACK_TRUNC_EXPR:
13630 case VEC_PACK_FIX_TRUNC_EXPR:
13632 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13633 tree *elts;
13635 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13636 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13637 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13638 return NULL_TREE;
13640 elts = XALLOCAVEC (tree, nelts);
13641 if (!vec_cst_ctor_to_array (arg0, elts)
13642 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13643 return NULL_TREE;
13645 for (i = 0; i < nelts; i++)
13647 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13648 ? NOP_EXPR : FIX_TRUNC_EXPR,
13649 TREE_TYPE (type), elts[i]);
13650 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13651 return NULL_TREE;
13654 return build_vector (type, elts);
13657 case VEC_WIDEN_MULT_LO_EXPR:
13658 case VEC_WIDEN_MULT_HI_EXPR:
13659 case VEC_WIDEN_MULT_EVEN_EXPR:
13660 case VEC_WIDEN_MULT_ODD_EXPR:
13662 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13663 unsigned int out, ofs, scale;
13664 tree *elts;
13666 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13667 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13668 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13669 return NULL_TREE;
13671 elts = XALLOCAVEC (tree, nelts * 4);
13672 if (!vec_cst_ctor_to_array (arg0, elts)
13673 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13674 return NULL_TREE;
13676 if (code == VEC_WIDEN_MULT_LO_EXPR)
13677 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13678 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13679 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13680 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13681 scale = 1, ofs = 0;
13682 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13683 scale = 1, ofs = 1;
13685 for (out = 0; out < nelts; out++)
13687 unsigned int in1 = (out << scale) + ofs;
13688 unsigned int in2 = in1 + nelts * 2;
13689 tree t1, t2;
13691 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13692 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13694 if (t1 == NULL_TREE || t2 == NULL_TREE)
13695 return NULL_TREE;
13696 elts[out] = const_binop (MULT_EXPR, t1, t2);
13697 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13698 return NULL_TREE;
13701 return build_vector (type, elts);
13704 default:
13705 return NULL_TREE;
13706 } /* switch (code) */
13709 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13710 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13711 of GOTO_EXPR. */
13713 static tree
13714 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13716 switch (TREE_CODE (*tp))
13718 case LABEL_EXPR:
13719 return *tp;
13721 case GOTO_EXPR:
13722 *walk_subtrees = 0;
13724 /* ... fall through ... */
13726 default:
13727 return NULL_TREE;
13731 /* Return whether the sub-tree ST contains a label which is accessible from
13732 outside the sub-tree. */
13734 static bool
13735 contains_label_p (tree st)
13737 return
13738 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13741 /* Fold a ternary expression of code CODE and type TYPE with operands
13742 OP0, OP1, and OP2. Return the folded expression if folding is
13743 successful. Otherwise, return NULL_TREE. */
13745 tree
13746 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13747 tree op0, tree op1, tree op2)
13749 tree tem;
13750 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13751 enum tree_code_class kind = TREE_CODE_CLASS (code);
13753 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13754 && TREE_CODE_LENGTH (code) == 3);
13756 /* Strip any conversions that don't change the mode. This is safe
13757 for every expression, except for a comparison expression because
13758 its signedness is derived from its operands. So, in the latter
13759 case, only strip conversions that don't change the signedness.
13761 Note that this is done as an internal manipulation within the
13762 constant folder, in order to find the simplest representation of
13763 the arguments so that their form can be studied. In any cases,
13764 the appropriate type conversions should be put back in the tree
13765 that will get out of the constant folder. */
13766 if (op0)
13768 arg0 = op0;
13769 STRIP_NOPS (arg0);
13772 if (op1)
13774 arg1 = op1;
13775 STRIP_NOPS (arg1);
13778 if (op2)
13780 arg2 = op2;
13781 STRIP_NOPS (arg2);
13784 switch (code)
13786 case COMPONENT_REF:
13787 if (TREE_CODE (arg0) == CONSTRUCTOR
13788 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13790 unsigned HOST_WIDE_INT idx;
13791 tree field, value;
13792 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13793 if (field == arg1)
13794 return value;
13796 return NULL_TREE;
13798 case COND_EXPR:
13799 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13800 so all simple results must be passed through pedantic_non_lvalue. */
13801 if (TREE_CODE (arg0) == INTEGER_CST)
13803 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13804 tem = integer_zerop (arg0) ? op2 : op1;
13805 /* Only optimize constant conditions when the selected branch
13806 has the same type as the COND_EXPR. This avoids optimizing
13807 away "c ? x : throw", where the throw has a void type.
13808 Avoid throwing away that operand which contains label. */
13809 if ((!TREE_SIDE_EFFECTS (unused_op)
13810 || !contains_label_p (unused_op))
13811 && (! VOID_TYPE_P (TREE_TYPE (tem))
13812 || VOID_TYPE_P (type)))
13813 return pedantic_non_lvalue_loc (loc, tem);
13814 return NULL_TREE;
13816 if (operand_equal_p (arg1, op2, 0))
13817 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13819 /* If we have A op B ? A : C, we may be able to convert this to a
13820 simpler expression, depending on the operation and the values
13821 of B and C. Signed zeros prevent all of these transformations,
13822 for reasons given above each one.
13824 Also try swapping the arguments and inverting the conditional. */
13825 if (COMPARISON_CLASS_P (arg0)
13826 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13827 arg1, TREE_OPERAND (arg0, 1))
13828 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13830 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13831 if (tem)
13832 return tem;
13835 if (COMPARISON_CLASS_P (arg0)
13836 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13837 op2,
13838 TREE_OPERAND (arg0, 1))
13839 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13841 location_t loc0 = expr_location_or (arg0, loc);
13842 tem = fold_truth_not_expr (loc0, arg0);
13843 if (tem && COMPARISON_CLASS_P (tem))
13845 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13846 if (tem)
13847 return tem;
13851 /* If the second operand is simpler than the third, swap them
13852 since that produces better jump optimization results. */
13853 if (truth_value_p (TREE_CODE (arg0))
13854 && tree_swap_operands_p (op1, op2, false))
13856 location_t loc0 = expr_location_or (arg0, loc);
13857 /* See if this can be inverted. If it can't, possibly because
13858 it was a floating-point inequality comparison, don't do
13859 anything. */
13860 tem = fold_truth_not_expr (loc0, arg0);
13861 if (tem)
13862 return fold_build3_loc (loc, code, type, tem, op2, op1);
13865 /* Convert A ? 1 : 0 to simply A. */
13866 if (integer_onep (op1)
13867 && integer_zerop (op2)
13868 /* If we try to convert OP0 to our type, the
13869 call to fold will try to move the conversion inside
13870 a COND, which will recurse. In that case, the COND_EXPR
13871 is probably the best choice, so leave it alone. */
13872 && type == TREE_TYPE (arg0))
13873 return pedantic_non_lvalue_loc (loc, arg0);
13875 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13876 over COND_EXPR in cases such as floating point comparisons. */
13877 if (integer_zerop (op1)
13878 && integer_onep (op2)
13879 && truth_value_p (TREE_CODE (arg0)))
13880 return pedantic_non_lvalue_loc (loc,
13881 fold_convert_loc (loc, type,
13882 invert_truthvalue_loc (loc,
13883 arg0)));
13885 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13886 if (TREE_CODE (arg0) == LT_EXPR
13887 && integer_zerop (TREE_OPERAND (arg0, 1))
13888 && integer_zerop (op2)
13889 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13891 /* sign_bit_p only checks ARG1 bits within A's precision.
13892 If <sign bit of A> has wider type than A, bits outside
13893 of A's precision in <sign bit of A> need to be checked.
13894 If they are all 0, this optimization needs to be done
13895 in unsigned A's type, if they are all 1 in signed A's type,
13896 otherwise this can't be done. */
13897 if (TYPE_PRECISION (TREE_TYPE (tem))
13898 < TYPE_PRECISION (TREE_TYPE (arg1))
13899 && TYPE_PRECISION (TREE_TYPE (tem))
13900 < TYPE_PRECISION (type))
13902 unsigned HOST_WIDE_INT mask_lo;
13903 HOST_WIDE_INT mask_hi;
13904 int inner_width, outer_width;
13905 tree tem_type;
13907 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13908 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13909 if (outer_width > TYPE_PRECISION (type))
13910 outer_width = TYPE_PRECISION (type);
13912 if (outer_width > HOST_BITS_PER_WIDE_INT)
13914 mask_hi = ((unsigned HOST_WIDE_INT) -1
13915 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13916 mask_lo = -1;
13918 else
13920 mask_hi = 0;
13921 mask_lo = ((unsigned HOST_WIDE_INT) -1
13922 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13924 if (inner_width > HOST_BITS_PER_WIDE_INT)
13926 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13927 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13928 mask_lo = 0;
13930 else
13931 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13932 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13934 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13935 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13937 tem_type = signed_type_for (TREE_TYPE (tem));
13938 tem = fold_convert_loc (loc, tem_type, tem);
13940 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13941 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13943 tem_type = unsigned_type_for (TREE_TYPE (tem));
13944 tem = fold_convert_loc (loc, tem_type, tem);
13946 else
13947 tem = NULL;
13950 if (tem)
13951 return
13952 fold_convert_loc (loc, type,
13953 fold_build2_loc (loc, BIT_AND_EXPR,
13954 TREE_TYPE (tem), tem,
13955 fold_convert_loc (loc,
13956 TREE_TYPE (tem),
13957 arg1)));
13960 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13961 already handled above. */
13962 if (TREE_CODE (arg0) == BIT_AND_EXPR
13963 && integer_onep (TREE_OPERAND (arg0, 1))
13964 && integer_zerop (op2)
13965 && integer_pow2p (arg1))
13967 tree tem = TREE_OPERAND (arg0, 0);
13968 STRIP_NOPS (tem);
13969 if (TREE_CODE (tem) == RSHIFT_EXPR
13970 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13971 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13972 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13973 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13974 TREE_OPERAND (tem, 0), arg1);
13977 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13978 is probably obsolete because the first operand should be a
13979 truth value (that's why we have the two cases above), but let's
13980 leave it in until we can confirm this for all front-ends. */
13981 if (integer_zerop (op2)
13982 && TREE_CODE (arg0) == NE_EXPR
13983 && integer_zerop (TREE_OPERAND (arg0, 1))
13984 && integer_pow2p (arg1)
13985 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13986 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13987 arg1, OEP_ONLY_CONST))
13988 return pedantic_non_lvalue_loc (loc,
13989 fold_convert_loc (loc, type,
13990 TREE_OPERAND (arg0, 0)));
13992 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13993 if (integer_zerop (op2)
13994 && truth_value_p (TREE_CODE (arg0))
13995 && truth_value_p (TREE_CODE (arg1)))
13996 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13997 fold_convert_loc (loc, type, arg0),
13998 arg1);
14000 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14001 if (integer_onep (op2)
14002 && truth_value_p (TREE_CODE (arg0))
14003 && truth_value_p (TREE_CODE (arg1)))
14005 location_t loc0 = expr_location_or (arg0, loc);
14006 /* Only perform transformation if ARG0 is easily inverted. */
14007 tem = fold_truth_not_expr (loc0, arg0);
14008 if (tem)
14009 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14010 fold_convert_loc (loc, type, tem),
14011 arg1);
14014 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14015 if (integer_zerop (arg1)
14016 && truth_value_p (TREE_CODE (arg0))
14017 && truth_value_p (TREE_CODE (op2)))
14019 location_t loc0 = expr_location_or (arg0, loc);
14020 /* Only perform transformation if ARG0 is easily inverted. */
14021 tem = fold_truth_not_expr (loc0, arg0);
14022 if (tem)
14023 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14024 fold_convert_loc (loc, type, tem),
14025 op2);
14028 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14029 if (integer_onep (arg1)
14030 && truth_value_p (TREE_CODE (arg0))
14031 && truth_value_p (TREE_CODE (op2)))
14032 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14033 fold_convert_loc (loc, type, arg0),
14034 op2);
14036 return NULL_TREE;
14038 case CALL_EXPR:
14039 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14040 of fold_ternary on them. */
14041 gcc_unreachable ();
14043 case BIT_FIELD_REF:
14044 if ((TREE_CODE (arg0) == VECTOR_CST
14045 || (TREE_CODE (arg0) == CONSTRUCTOR
14046 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14047 && (type == TREE_TYPE (TREE_TYPE (arg0))
14048 || (TREE_CODE (type) == VECTOR_TYPE
14049 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14051 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14052 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14053 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14054 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14056 if (n != 0
14057 && (idx % width) == 0
14058 && (n % width) == 0
14059 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14061 idx = idx / width;
14062 n = n / width;
14063 if (TREE_CODE (type) == VECTOR_TYPE)
14065 if (TREE_CODE (arg0) == VECTOR_CST)
14067 tree *vals = XALLOCAVEC (tree, n);
14068 unsigned i;
14069 for (i = 0; i < n; ++i)
14070 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14071 return build_vector (type, vals);
14073 else
14075 VEC(constructor_elt, gc) *vals;
14076 unsigned i;
14077 if (CONSTRUCTOR_NELTS (arg0) == 0)
14078 return build_constructor (type, NULL);
14079 if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14080 0)->value))
14081 != VECTOR_TYPE)
14083 vals = VEC_alloc (constructor_elt, gc, n);
14084 for (i = 0;
14085 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14086 ++i)
14087 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14088 CONSTRUCTOR_ELT
14089 (arg0, idx + i)->value);
14090 return build_constructor (type, vals);
14094 else if (n == 1)
14096 if (TREE_CODE (arg0) == VECTOR_CST)
14097 return VECTOR_CST_ELT (arg0, idx);
14098 else if (CONSTRUCTOR_NELTS (arg0) == 0)
14099 return build_zero_cst (type);
14100 else if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14101 0)->value))
14102 != VECTOR_TYPE)
14104 if (idx < CONSTRUCTOR_NELTS (arg0))
14105 return CONSTRUCTOR_ELT (arg0, idx)->value;
14106 return build_zero_cst (type);
14112 /* A bit-field-ref that referenced the full argument can be stripped. */
14113 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14114 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14115 && integer_zerop (op2))
14116 return fold_convert_loc (loc, type, arg0);
14118 /* On constants we can use native encode/interpret to constant
14119 fold (nearly) all BIT_FIELD_REFs. */
14120 if (CONSTANT_CLASS_P (arg0)
14121 && can_native_interpret_type_p (type)
14122 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14123 /* This limitation should not be necessary, we just need to
14124 round this up to mode size. */
14125 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14126 /* Need bit-shifting of the buffer to relax the following. */
14127 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14129 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14130 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14131 unsigned HOST_WIDE_INT clen;
14132 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14133 /* ??? We cannot tell native_encode_expr to start at
14134 some random byte only. So limit us to a reasonable amount
14135 of work. */
14136 if (clen <= 4096)
14138 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14139 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14140 if (len > 0
14141 && len * BITS_PER_UNIT >= bitpos + bitsize)
14143 tree v = native_interpret_expr (type,
14144 b + bitpos / BITS_PER_UNIT,
14145 bitsize / BITS_PER_UNIT);
14146 if (v)
14147 return v;
14152 return NULL_TREE;
14154 case FMA_EXPR:
14155 /* For integers we can decompose the FMA if possible. */
14156 if (TREE_CODE (arg0) == INTEGER_CST
14157 && TREE_CODE (arg1) == INTEGER_CST)
14158 return fold_build2_loc (loc, PLUS_EXPR, type,
14159 const_binop (MULT_EXPR, arg0, arg1), arg2);
14160 if (integer_zerop (arg2))
14161 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14163 return fold_fma (loc, type, arg0, arg1, arg2);
14165 case VEC_PERM_EXPR:
14166 if (TREE_CODE (arg2) == VECTOR_CST)
14168 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14169 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14170 tree t;
14171 bool need_mask_canon = false;
14172 bool all_in_vec0 = true;
14173 bool all_in_vec1 = true;
14174 bool maybe_identity = true;
14175 bool single_arg = (op0 == op1);
14176 bool changed = false;
14178 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14179 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14180 for (i = 0; i < nelts; i++)
14182 tree val = VECTOR_CST_ELT (arg2, i);
14183 if (TREE_CODE (val) != INTEGER_CST)
14184 return NULL_TREE;
14186 sel[i] = TREE_INT_CST_LOW (val) & mask;
14187 if (TREE_INT_CST_HIGH (val)
14188 || ((unsigned HOST_WIDE_INT)
14189 TREE_INT_CST_LOW (val) != sel[i]))
14190 need_mask_canon = true;
14192 if (sel[i] < nelts)
14193 all_in_vec1 = false;
14194 else
14195 all_in_vec0 = false;
14197 if ((sel[i] & (nelts-1)) != i)
14198 maybe_identity = false;
14201 if (maybe_identity)
14203 if (all_in_vec0)
14204 return op0;
14205 if (all_in_vec1)
14206 return op1;
14209 if (all_in_vec0)
14210 op1 = op0;
14211 else if (all_in_vec1)
14213 op0 = op1;
14214 for (i = 0; i < nelts; i++)
14215 sel[i] -= nelts;
14216 need_mask_canon = true;
14219 if ((TREE_CODE (op0) == VECTOR_CST
14220 || TREE_CODE (op0) == CONSTRUCTOR)
14221 && (TREE_CODE (op1) == VECTOR_CST
14222 || TREE_CODE (op1) == CONSTRUCTOR))
14224 t = fold_vec_perm (type, op0, op1, sel);
14225 if (t != NULL_TREE)
14226 return t;
14229 if (op0 == op1 && !single_arg)
14230 changed = true;
14232 if (need_mask_canon && arg2 == op2)
14234 tree *tsel = XALLOCAVEC (tree, nelts);
14235 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14236 for (i = 0; i < nelts; i++)
14237 tsel[i] = build_int_cst (eltype, sel[i]);
14238 op2 = build_vector (TREE_TYPE (arg2), tsel);
14239 changed = true;
14242 if (changed)
14243 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14245 return NULL_TREE;
14247 default:
14248 return NULL_TREE;
14249 } /* switch (code) */
14252 /* Perform constant folding and related simplification of EXPR.
14253 The related simplifications include x*1 => x, x*0 => 0, etc.,
14254 and application of the associative law.
14255 NOP_EXPR conversions may be removed freely (as long as we
14256 are careful not to change the type of the overall expression).
14257 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14258 but we can constant-fold them if they have constant operands. */
14260 #ifdef ENABLE_FOLD_CHECKING
14261 # define fold(x) fold_1 (x)
14262 static tree fold_1 (tree);
14263 static
14264 #endif
14265 tree
14266 fold (tree expr)
14268 const tree t = expr;
14269 enum tree_code code = TREE_CODE (t);
14270 enum tree_code_class kind = TREE_CODE_CLASS (code);
14271 tree tem;
14272 location_t loc = EXPR_LOCATION (expr);
14274 /* Return right away if a constant. */
14275 if (kind == tcc_constant)
14276 return t;
14278 /* CALL_EXPR-like objects with variable numbers of operands are
14279 treated specially. */
14280 if (kind == tcc_vl_exp)
14282 if (code == CALL_EXPR)
14284 tem = fold_call_expr (loc, expr, false);
14285 return tem ? tem : expr;
14287 return expr;
14290 if (IS_EXPR_CODE_CLASS (kind))
14292 tree type = TREE_TYPE (t);
14293 tree op0, op1, op2;
14295 switch (TREE_CODE_LENGTH (code))
14297 case 1:
14298 op0 = TREE_OPERAND (t, 0);
14299 tem = fold_unary_loc (loc, code, type, op0);
14300 return tem ? tem : expr;
14301 case 2:
14302 op0 = TREE_OPERAND (t, 0);
14303 op1 = TREE_OPERAND (t, 1);
14304 tem = fold_binary_loc (loc, code, type, op0, op1);
14305 return tem ? tem : expr;
14306 case 3:
14307 op0 = TREE_OPERAND (t, 0);
14308 op1 = TREE_OPERAND (t, 1);
14309 op2 = TREE_OPERAND (t, 2);
14310 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14311 return tem ? tem : expr;
14312 default:
14313 break;
14317 switch (code)
14319 case ARRAY_REF:
14321 tree op0 = TREE_OPERAND (t, 0);
14322 tree op1 = TREE_OPERAND (t, 1);
14324 if (TREE_CODE (op1) == INTEGER_CST
14325 && TREE_CODE (op0) == CONSTRUCTOR
14326 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14328 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14329 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14330 unsigned HOST_WIDE_INT begin = 0;
14332 /* Find a matching index by means of a binary search. */
14333 while (begin != end)
14335 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14336 tree index = VEC_index (constructor_elt, elts, middle).index;
14338 if (TREE_CODE (index) == INTEGER_CST
14339 && tree_int_cst_lt (index, op1))
14340 begin = middle + 1;
14341 else if (TREE_CODE (index) == INTEGER_CST
14342 && tree_int_cst_lt (op1, index))
14343 end = middle;
14344 else if (TREE_CODE (index) == RANGE_EXPR
14345 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14346 begin = middle + 1;
14347 else if (TREE_CODE (index) == RANGE_EXPR
14348 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14349 end = middle;
14350 else
14351 return VEC_index (constructor_elt, elts, middle).value;
14355 return t;
14358 case CONST_DECL:
14359 return fold (DECL_INITIAL (t));
14361 default:
14362 return t;
14363 } /* switch (code) */
14366 #ifdef ENABLE_FOLD_CHECKING
14367 #undef fold
14369 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14370 hash_table <pointer_hash <tree_node> >);
14371 static void fold_check_failed (const_tree, const_tree);
14372 void print_fold_checksum (const_tree);
14374 /* When --enable-checking=fold, compute a digest of expr before
14375 and after actual fold call to see if fold did not accidentally
14376 change original expr. */
14378 tree
14379 fold (tree expr)
14381 tree ret;
14382 struct md5_ctx ctx;
14383 unsigned char checksum_before[16], checksum_after[16];
14384 hash_table <pointer_hash <tree_node> > ht;
14386 ht.create (32);
14387 md5_init_ctx (&ctx);
14388 fold_checksum_tree (expr, &ctx, ht);
14389 md5_finish_ctx (&ctx, checksum_before);
14390 ht.empty ();
14392 ret = fold_1 (expr);
14394 md5_init_ctx (&ctx);
14395 fold_checksum_tree (expr, &ctx, ht);
14396 md5_finish_ctx (&ctx, checksum_after);
14397 ht.dispose ();
14399 if (memcmp (checksum_before, checksum_after, 16))
14400 fold_check_failed (expr, ret);
14402 return ret;
14405 void
14406 print_fold_checksum (const_tree expr)
14408 struct md5_ctx ctx;
14409 unsigned char checksum[16], cnt;
14410 hash_table <pointer_hash <tree_node> > ht;
14412 ht.create (32);
14413 md5_init_ctx (&ctx);
14414 fold_checksum_tree (expr, &ctx, ht);
14415 md5_finish_ctx (&ctx, checksum);
14416 ht.dispose ();
14417 for (cnt = 0; cnt < 16; ++cnt)
14418 fprintf (stderr, "%02x", checksum[cnt]);
14419 putc ('\n', stderr);
14422 static void
14423 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14425 internal_error ("fold check: original tree changed by fold");
14428 static void
14429 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14430 hash_table <pointer_hash <tree_node> > ht)
14432 tree_node **slot;
14433 enum tree_code code;
14434 union tree_node buf;
14435 int i, len;
14437 recursive_label:
14438 if (expr == NULL)
14439 return;
14440 slot = ht.find_slot (expr, INSERT);
14441 if (*slot != NULL)
14442 return;
14443 *slot = CONST_CAST_TREE (expr);
14444 code = TREE_CODE (expr);
14445 if (TREE_CODE_CLASS (code) == tcc_declaration
14446 && DECL_ASSEMBLER_NAME_SET_P (expr))
14448 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14449 memcpy ((char *) &buf, expr, tree_size (expr));
14450 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14451 expr = (tree) &buf;
14453 else if (TREE_CODE_CLASS (code) == tcc_type
14454 && (TYPE_POINTER_TO (expr)
14455 || TYPE_REFERENCE_TO (expr)
14456 || TYPE_CACHED_VALUES_P (expr)
14457 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14458 || TYPE_NEXT_VARIANT (expr)))
14460 /* Allow these fields to be modified. */
14461 tree tmp;
14462 memcpy ((char *) &buf, expr, tree_size (expr));
14463 expr = tmp = (tree) &buf;
14464 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14465 TYPE_POINTER_TO (tmp) = NULL;
14466 TYPE_REFERENCE_TO (tmp) = NULL;
14467 TYPE_NEXT_VARIANT (tmp) = NULL;
14468 if (TYPE_CACHED_VALUES_P (tmp))
14470 TYPE_CACHED_VALUES_P (tmp) = 0;
14471 TYPE_CACHED_VALUES (tmp) = NULL;
14474 md5_process_bytes (expr, tree_size (expr), ctx);
14475 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14476 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14477 if (TREE_CODE_CLASS (code) != tcc_type
14478 && TREE_CODE_CLASS (code) != tcc_declaration
14479 && code != TREE_LIST
14480 && code != SSA_NAME
14481 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14482 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14483 switch (TREE_CODE_CLASS (code))
14485 case tcc_constant:
14486 switch (code)
14488 case STRING_CST:
14489 md5_process_bytes (TREE_STRING_POINTER (expr),
14490 TREE_STRING_LENGTH (expr), ctx);
14491 break;
14492 case COMPLEX_CST:
14493 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14494 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14495 break;
14496 case VECTOR_CST:
14497 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14498 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14499 break;
14500 default:
14501 break;
14503 break;
14504 case tcc_exceptional:
14505 switch (code)
14507 case TREE_LIST:
14508 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14509 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14510 expr = TREE_CHAIN (expr);
14511 goto recursive_label;
14512 break;
14513 case TREE_VEC:
14514 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14515 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14516 break;
14517 default:
14518 break;
14520 break;
14521 case tcc_expression:
14522 case tcc_reference:
14523 case tcc_comparison:
14524 case tcc_unary:
14525 case tcc_binary:
14526 case tcc_statement:
14527 case tcc_vl_exp:
14528 len = TREE_OPERAND_LENGTH (expr);
14529 for (i = 0; i < len; ++i)
14530 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14531 break;
14532 case tcc_declaration:
14533 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14534 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14535 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14537 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14538 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14539 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14540 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14541 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14543 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14544 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14546 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14548 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14549 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14550 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14552 break;
14553 case tcc_type:
14554 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14555 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14556 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14557 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14558 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14559 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14560 if (INTEGRAL_TYPE_P (expr)
14561 || SCALAR_FLOAT_TYPE_P (expr))
14563 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14564 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14566 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14567 if (TREE_CODE (expr) == RECORD_TYPE
14568 || TREE_CODE (expr) == UNION_TYPE
14569 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14570 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14571 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14572 break;
14573 default:
14574 break;
14578 /* Helper function for outputting the checksum of a tree T. When
14579 debugging with gdb, you can "define mynext" to be "next" followed
14580 by "call debug_fold_checksum (op0)", then just trace down till the
14581 outputs differ. */
14583 DEBUG_FUNCTION void
14584 debug_fold_checksum (const_tree t)
14586 int i;
14587 unsigned char checksum[16];
14588 struct md5_ctx ctx;
14589 hash_table <pointer_hash <tree_node> > ht;
14590 ht.create (32);
14592 md5_init_ctx (&ctx);
14593 fold_checksum_tree (t, &ctx, ht);
14594 md5_finish_ctx (&ctx, checksum);
14595 ht.empty ();
14597 for (i = 0; i < 16; i++)
14598 fprintf (stderr, "%d ", checksum[i]);
14600 fprintf (stderr, "\n");
14603 #endif
14605 /* Fold a unary tree expression with code CODE of type TYPE with an
14606 operand OP0. LOC is the location of the resulting expression.
14607 Return a folded expression if successful. Otherwise, return a tree
14608 expression with code CODE of type TYPE with an operand OP0. */
14610 tree
14611 fold_build1_stat_loc (location_t loc,
14612 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14614 tree tem;
14615 #ifdef ENABLE_FOLD_CHECKING
14616 unsigned char checksum_before[16], checksum_after[16];
14617 struct md5_ctx ctx;
14618 hash_table <pointer_hash <tree_node> > ht;
14620 ht.create (32);
14621 md5_init_ctx (&ctx);
14622 fold_checksum_tree (op0, &ctx, ht);
14623 md5_finish_ctx (&ctx, checksum_before);
14624 ht.empty ();
14625 #endif
14627 tem = fold_unary_loc (loc, code, type, op0);
14628 if (!tem)
14629 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14631 #ifdef ENABLE_FOLD_CHECKING
14632 md5_init_ctx (&ctx);
14633 fold_checksum_tree (op0, &ctx, ht);
14634 md5_finish_ctx (&ctx, checksum_after);
14635 ht.dispose ();
14637 if (memcmp (checksum_before, checksum_after, 16))
14638 fold_check_failed (op0, tem);
14639 #endif
14640 return tem;
14643 /* Fold a binary tree expression with code CODE of type TYPE with
14644 operands OP0 and OP1. LOC is the location of the resulting
14645 expression. Return a folded expression if successful. Otherwise,
14646 return a tree expression with code CODE of type TYPE with operands
14647 OP0 and OP1. */
14649 tree
14650 fold_build2_stat_loc (location_t loc,
14651 enum tree_code code, tree type, tree op0, tree op1
14652 MEM_STAT_DECL)
14654 tree tem;
14655 #ifdef ENABLE_FOLD_CHECKING
14656 unsigned char checksum_before_op0[16],
14657 checksum_before_op1[16],
14658 checksum_after_op0[16],
14659 checksum_after_op1[16];
14660 struct md5_ctx ctx;
14661 hash_table <pointer_hash <tree_node> > ht;
14663 ht.create (32);
14664 md5_init_ctx (&ctx);
14665 fold_checksum_tree (op0, &ctx, ht);
14666 md5_finish_ctx (&ctx, checksum_before_op0);
14667 ht.empty ();
14669 md5_init_ctx (&ctx);
14670 fold_checksum_tree (op1, &ctx, ht);
14671 md5_finish_ctx (&ctx, checksum_before_op1);
14672 ht.empty ();
14673 #endif
14675 tem = fold_binary_loc (loc, code, type, op0, op1);
14676 if (!tem)
14677 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14679 #ifdef ENABLE_FOLD_CHECKING
14680 md5_init_ctx (&ctx);
14681 fold_checksum_tree (op0, &ctx, ht);
14682 md5_finish_ctx (&ctx, checksum_after_op0);
14683 ht.empty ();
14685 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14686 fold_check_failed (op0, tem);
14688 md5_init_ctx (&ctx);
14689 fold_checksum_tree (op1, &ctx, ht);
14690 md5_finish_ctx (&ctx, checksum_after_op1);
14691 ht.dispose ();
14693 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14694 fold_check_failed (op1, tem);
14695 #endif
14696 return tem;
14699 /* Fold a ternary tree expression with code CODE of type TYPE with
14700 operands OP0, OP1, and OP2. Return a folded expression if
14701 successful. Otherwise, return a tree expression with code CODE of
14702 type TYPE with operands OP0, OP1, and OP2. */
14704 tree
14705 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14706 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14708 tree tem;
14709 #ifdef ENABLE_FOLD_CHECKING
14710 unsigned char checksum_before_op0[16],
14711 checksum_before_op1[16],
14712 checksum_before_op2[16],
14713 checksum_after_op0[16],
14714 checksum_after_op1[16],
14715 checksum_after_op2[16];
14716 struct md5_ctx ctx;
14717 hash_table <pointer_hash <tree_node> > ht;
14719 ht.create (32);
14720 md5_init_ctx (&ctx);
14721 fold_checksum_tree (op0, &ctx, ht);
14722 md5_finish_ctx (&ctx, checksum_before_op0);
14723 ht.empty ();
14725 md5_init_ctx (&ctx);
14726 fold_checksum_tree (op1, &ctx, ht);
14727 md5_finish_ctx (&ctx, checksum_before_op1);
14728 ht.empty ();
14730 md5_init_ctx (&ctx);
14731 fold_checksum_tree (op2, &ctx, ht);
14732 md5_finish_ctx (&ctx, checksum_before_op2);
14733 ht.empty ();
14734 #endif
14736 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14737 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14738 if (!tem)
14739 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14741 #ifdef ENABLE_FOLD_CHECKING
14742 md5_init_ctx (&ctx);
14743 fold_checksum_tree (op0, &ctx, ht);
14744 md5_finish_ctx (&ctx, checksum_after_op0);
14745 ht.empty ();
14747 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14748 fold_check_failed (op0, tem);
14750 md5_init_ctx (&ctx);
14751 fold_checksum_tree (op1, &ctx, ht);
14752 md5_finish_ctx (&ctx, checksum_after_op1);
14753 ht.empty ();
14755 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14756 fold_check_failed (op1, tem);
14758 md5_init_ctx (&ctx);
14759 fold_checksum_tree (op2, &ctx, ht);
14760 md5_finish_ctx (&ctx, checksum_after_op2);
14761 ht.dispose ();
14763 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14764 fold_check_failed (op2, tem);
14765 #endif
14766 return tem;
14769 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14770 arguments in ARGARRAY, and a null static chain.
14771 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14772 of type TYPE from the given operands as constructed by build_call_array. */
14774 tree
14775 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14776 int nargs, tree *argarray)
14778 tree tem;
14779 #ifdef ENABLE_FOLD_CHECKING
14780 unsigned char checksum_before_fn[16],
14781 checksum_before_arglist[16],
14782 checksum_after_fn[16],
14783 checksum_after_arglist[16];
14784 struct md5_ctx ctx;
14785 hash_table <pointer_hash <tree_node> > ht;
14786 int i;
14788 ht.create (32);
14789 md5_init_ctx (&ctx);
14790 fold_checksum_tree (fn, &ctx, ht);
14791 md5_finish_ctx (&ctx, checksum_before_fn);
14792 ht.empty ();
14794 md5_init_ctx (&ctx);
14795 for (i = 0; i < nargs; i++)
14796 fold_checksum_tree (argarray[i], &ctx, ht);
14797 md5_finish_ctx (&ctx, checksum_before_arglist);
14798 ht.empty ();
14799 #endif
14801 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14803 #ifdef ENABLE_FOLD_CHECKING
14804 md5_init_ctx (&ctx);
14805 fold_checksum_tree (fn, &ctx, ht);
14806 md5_finish_ctx (&ctx, checksum_after_fn);
14807 ht.empty ();
14809 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14810 fold_check_failed (fn, tem);
14812 md5_init_ctx (&ctx);
14813 for (i = 0; i < nargs; i++)
14814 fold_checksum_tree (argarray[i], &ctx, ht);
14815 md5_finish_ctx (&ctx, checksum_after_arglist);
14816 ht.dispose ();
14818 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14819 fold_check_failed (NULL_TREE, tem);
14820 #endif
14821 return tem;
14824 /* Perform constant folding and related simplification of initializer
14825 expression EXPR. These behave identically to "fold_buildN" but ignore
14826 potential run-time traps and exceptions that fold must preserve. */
14828 #define START_FOLD_INIT \
14829 int saved_signaling_nans = flag_signaling_nans;\
14830 int saved_trapping_math = flag_trapping_math;\
14831 int saved_rounding_math = flag_rounding_math;\
14832 int saved_trapv = flag_trapv;\
14833 int saved_folding_initializer = folding_initializer;\
14834 flag_signaling_nans = 0;\
14835 flag_trapping_math = 0;\
14836 flag_rounding_math = 0;\
14837 flag_trapv = 0;\
14838 folding_initializer = 1;
14840 #define END_FOLD_INIT \
14841 flag_signaling_nans = saved_signaling_nans;\
14842 flag_trapping_math = saved_trapping_math;\
14843 flag_rounding_math = saved_rounding_math;\
14844 flag_trapv = saved_trapv;\
14845 folding_initializer = saved_folding_initializer;
14847 tree
14848 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14849 tree type, tree op)
14851 tree result;
14852 START_FOLD_INIT;
14854 result = fold_build1_loc (loc, code, type, op);
14856 END_FOLD_INIT;
14857 return result;
14860 tree
14861 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14862 tree type, tree op0, tree op1)
14864 tree result;
14865 START_FOLD_INIT;
14867 result = fold_build2_loc (loc, code, type, op0, op1);
14869 END_FOLD_INIT;
14870 return result;
14873 tree
14874 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14875 tree type, tree op0, tree op1, tree op2)
14877 tree result;
14878 START_FOLD_INIT;
14880 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14882 END_FOLD_INIT;
14883 return result;
14886 tree
14887 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14888 int nargs, tree *argarray)
14890 tree result;
14891 START_FOLD_INIT;
14893 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14895 END_FOLD_INIT;
14896 return result;
14899 #undef START_FOLD_INIT
14900 #undef END_FOLD_INIT
14902 /* Determine if first argument is a multiple of second argument. Return 0 if
14903 it is not, or we cannot easily determined it to be.
14905 An example of the sort of thing we care about (at this point; this routine
14906 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14907 fold cases do now) is discovering that
14909 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14911 is a multiple of
14913 SAVE_EXPR (J * 8)
14915 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14917 This code also handles discovering that
14919 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14921 is a multiple of 8 so we don't have to worry about dealing with a
14922 possible remainder.
14924 Note that we *look* inside a SAVE_EXPR only to determine how it was
14925 calculated; it is not safe for fold to do much of anything else with the
14926 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14927 at run time. For example, the latter example above *cannot* be implemented
14928 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14929 evaluation time of the original SAVE_EXPR is not necessarily the same at
14930 the time the new expression is evaluated. The only optimization of this
14931 sort that would be valid is changing
14933 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14935 divided by 8 to
14937 SAVE_EXPR (I) * SAVE_EXPR (J)
14939 (where the same SAVE_EXPR (J) is used in the original and the
14940 transformed version). */
14943 multiple_of_p (tree type, const_tree top, const_tree bottom)
14945 if (operand_equal_p (top, bottom, 0))
14946 return 1;
14948 if (TREE_CODE (type) != INTEGER_TYPE)
14949 return 0;
14951 switch (TREE_CODE (top))
14953 case BIT_AND_EXPR:
14954 /* Bitwise and provides a power of two multiple. If the mask is
14955 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14956 if (!integer_pow2p (bottom))
14957 return 0;
14958 /* FALLTHRU */
14960 case MULT_EXPR:
14961 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14962 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14964 case PLUS_EXPR:
14965 case MINUS_EXPR:
14966 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14967 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14969 case LSHIFT_EXPR:
14970 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14972 tree op1, t1;
14974 op1 = TREE_OPERAND (top, 1);
14975 /* const_binop may not detect overflow correctly,
14976 so check for it explicitly here. */
14977 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14978 > TREE_INT_CST_LOW (op1)
14979 && TREE_INT_CST_HIGH (op1) == 0
14980 && 0 != (t1 = fold_convert (type,
14981 const_binop (LSHIFT_EXPR,
14982 size_one_node,
14983 op1)))
14984 && !TREE_OVERFLOW (t1))
14985 return multiple_of_p (type, t1, bottom);
14987 return 0;
14989 case NOP_EXPR:
14990 /* Can't handle conversions from non-integral or wider integral type. */
14991 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14992 || (TYPE_PRECISION (type)
14993 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14994 return 0;
14996 /* .. fall through ... */
14998 case SAVE_EXPR:
14999 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15001 case COND_EXPR:
15002 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15003 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15005 case INTEGER_CST:
15006 if (TREE_CODE (bottom) != INTEGER_CST
15007 || integer_zerop (bottom)
15008 || (TYPE_UNSIGNED (type)
15009 && (tree_int_cst_sgn (top) < 0
15010 || tree_int_cst_sgn (bottom) < 0)))
15011 return 0;
15012 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15013 top, bottom));
15015 default:
15016 return 0;
15020 /* Return true if CODE or TYPE is known to be non-negative. */
15022 static bool
15023 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15025 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15026 && truth_value_p (code))
15027 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15028 have a signed:1 type (where the value is -1 and 0). */
15029 return true;
15030 return false;
15033 /* Return true if (CODE OP0) is known to be non-negative. If the return
15034 value is based on the assumption that signed overflow is undefined,
15035 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15036 *STRICT_OVERFLOW_P. */
15038 bool
15039 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15040 bool *strict_overflow_p)
15042 if (TYPE_UNSIGNED (type))
15043 return true;
15045 switch (code)
15047 case ABS_EXPR:
15048 /* We can't return 1 if flag_wrapv is set because
15049 ABS_EXPR<INT_MIN> = INT_MIN. */
15050 if (!INTEGRAL_TYPE_P (type))
15051 return true;
15052 if (TYPE_OVERFLOW_UNDEFINED (type))
15054 *strict_overflow_p = true;
15055 return true;
15057 break;
15059 case NON_LVALUE_EXPR:
15060 case FLOAT_EXPR:
15061 case FIX_TRUNC_EXPR:
15062 return tree_expr_nonnegative_warnv_p (op0,
15063 strict_overflow_p);
15065 case NOP_EXPR:
15067 tree inner_type = TREE_TYPE (op0);
15068 tree outer_type = type;
15070 if (TREE_CODE (outer_type) == REAL_TYPE)
15072 if (TREE_CODE (inner_type) == REAL_TYPE)
15073 return tree_expr_nonnegative_warnv_p (op0,
15074 strict_overflow_p);
15075 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15077 if (TYPE_UNSIGNED (inner_type))
15078 return true;
15079 return tree_expr_nonnegative_warnv_p (op0,
15080 strict_overflow_p);
15083 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15085 if (TREE_CODE (inner_type) == REAL_TYPE)
15086 return tree_expr_nonnegative_warnv_p (op0,
15087 strict_overflow_p);
15088 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15089 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15090 && TYPE_UNSIGNED (inner_type);
15093 break;
15095 default:
15096 return tree_simple_nonnegative_warnv_p (code, type);
15099 /* We don't know sign of `t', so be conservative and return false. */
15100 return false;
15103 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15104 value is based on the assumption that signed overflow is undefined,
15105 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15106 *STRICT_OVERFLOW_P. */
15108 bool
15109 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15110 tree op1, bool *strict_overflow_p)
15112 if (TYPE_UNSIGNED (type))
15113 return true;
15115 switch (code)
15117 case POINTER_PLUS_EXPR:
15118 case PLUS_EXPR:
15119 if (FLOAT_TYPE_P (type))
15120 return (tree_expr_nonnegative_warnv_p (op0,
15121 strict_overflow_p)
15122 && tree_expr_nonnegative_warnv_p (op1,
15123 strict_overflow_p));
15125 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15126 both unsigned and at least 2 bits shorter than the result. */
15127 if (TREE_CODE (type) == INTEGER_TYPE
15128 && TREE_CODE (op0) == NOP_EXPR
15129 && TREE_CODE (op1) == NOP_EXPR)
15131 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15132 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15133 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15134 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15136 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15137 TYPE_PRECISION (inner2)) + 1;
15138 return prec < TYPE_PRECISION (type);
15141 break;
15143 case MULT_EXPR:
15144 if (FLOAT_TYPE_P (type))
15146 /* x * x for floating point x is always non-negative. */
15147 if (operand_equal_p (op0, op1, 0))
15148 return true;
15149 return (tree_expr_nonnegative_warnv_p (op0,
15150 strict_overflow_p)
15151 && tree_expr_nonnegative_warnv_p (op1,
15152 strict_overflow_p));
15155 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15156 both unsigned and their total bits is shorter than the result. */
15157 if (TREE_CODE (type) == INTEGER_TYPE
15158 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15159 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15161 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15162 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15163 : TREE_TYPE (op0);
15164 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15165 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15166 : TREE_TYPE (op1);
15168 bool unsigned0 = TYPE_UNSIGNED (inner0);
15169 bool unsigned1 = TYPE_UNSIGNED (inner1);
15171 if (TREE_CODE (op0) == INTEGER_CST)
15172 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15174 if (TREE_CODE (op1) == INTEGER_CST)
15175 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15177 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15178 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15180 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15181 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15182 : TYPE_PRECISION (inner0);
15184 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15185 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15186 : TYPE_PRECISION (inner1);
15188 return precision0 + precision1 < TYPE_PRECISION (type);
15191 return false;
15193 case BIT_AND_EXPR:
15194 case MAX_EXPR:
15195 return (tree_expr_nonnegative_warnv_p (op0,
15196 strict_overflow_p)
15197 || tree_expr_nonnegative_warnv_p (op1,
15198 strict_overflow_p));
15200 case BIT_IOR_EXPR:
15201 case BIT_XOR_EXPR:
15202 case MIN_EXPR:
15203 case RDIV_EXPR:
15204 case TRUNC_DIV_EXPR:
15205 case CEIL_DIV_EXPR:
15206 case FLOOR_DIV_EXPR:
15207 case ROUND_DIV_EXPR:
15208 return (tree_expr_nonnegative_warnv_p (op0,
15209 strict_overflow_p)
15210 && tree_expr_nonnegative_warnv_p (op1,
15211 strict_overflow_p));
15213 case TRUNC_MOD_EXPR:
15214 case CEIL_MOD_EXPR:
15215 case FLOOR_MOD_EXPR:
15216 case ROUND_MOD_EXPR:
15217 return tree_expr_nonnegative_warnv_p (op0,
15218 strict_overflow_p);
15219 default:
15220 return tree_simple_nonnegative_warnv_p (code, type);
15223 /* We don't know sign of `t', so be conservative and return false. */
15224 return false;
15227 /* Return true if T is known to be non-negative. If the return
15228 value is based on the assumption that signed overflow is undefined,
15229 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15230 *STRICT_OVERFLOW_P. */
15232 bool
15233 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15235 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15236 return true;
15238 switch (TREE_CODE (t))
15240 case INTEGER_CST:
15241 return tree_int_cst_sgn (t) >= 0;
15243 case REAL_CST:
15244 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15246 case FIXED_CST:
15247 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15249 case COND_EXPR:
15250 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15251 strict_overflow_p)
15252 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15253 strict_overflow_p));
15254 default:
15255 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15256 TREE_TYPE (t));
15258 /* We don't know sign of `t', so be conservative and return false. */
15259 return false;
15262 /* Return true if T is known to be non-negative. If the return
15263 value is based on the assumption that signed overflow is undefined,
15264 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15265 *STRICT_OVERFLOW_P. */
15267 bool
15268 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15269 tree arg0, tree arg1, bool *strict_overflow_p)
15271 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15272 switch (DECL_FUNCTION_CODE (fndecl))
15274 CASE_FLT_FN (BUILT_IN_ACOS):
15275 CASE_FLT_FN (BUILT_IN_ACOSH):
15276 CASE_FLT_FN (BUILT_IN_CABS):
15277 CASE_FLT_FN (BUILT_IN_COSH):
15278 CASE_FLT_FN (BUILT_IN_ERFC):
15279 CASE_FLT_FN (BUILT_IN_EXP):
15280 CASE_FLT_FN (BUILT_IN_EXP10):
15281 CASE_FLT_FN (BUILT_IN_EXP2):
15282 CASE_FLT_FN (BUILT_IN_FABS):
15283 CASE_FLT_FN (BUILT_IN_FDIM):
15284 CASE_FLT_FN (BUILT_IN_HYPOT):
15285 CASE_FLT_FN (BUILT_IN_POW10):
15286 CASE_INT_FN (BUILT_IN_FFS):
15287 CASE_INT_FN (BUILT_IN_PARITY):
15288 CASE_INT_FN (BUILT_IN_POPCOUNT):
15289 case BUILT_IN_BSWAP32:
15290 case BUILT_IN_BSWAP64:
15291 /* Always true. */
15292 return true;
15294 CASE_FLT_FN (BUILT_IN_SQRT):
15295 /* sqrt(-0.0) is -0.0. */
15296 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15297 return true;
15298 return tree_expr_nonnegative_warnv_p (arg0,
15299 strict_overflow_p);
15301 CASE_FLT_FN (BUILT_IN_ASINH):
15302 CASE_FLT_FN (BUILT_IN_ATAN):
15303 CASE_FLT_FN (BUILT_IN_ATANH):
15304 CASE_FLT_FN (BUILT_IN_CBRT):
15305 CASE_FLT_FN (BUILT_IN_CEIL):
15306 CASE_FLT_FN (BUILT_IN_ERF):
15307 CASE_FLT_FN (BUILT_IN_EXPM1):
15308 CASE_FLT_FN (BUILT_IN_FLOOR):
15309 CASE_FLT_FN (BUILT_IN_FMOD):
15310 CASE_FLT_FN (BUILT_IN_FREXP):
15311 CASE_FLT_FN (BUILT_IN_ICEIL):
15312 CASE_FLT_FN (BUILT_IN_IFLOOR):
15313 CASE_FLT_FN (BUILT_IN_IRINT):
15314 CASE_FLT_FN (BUILT_IN_IROUND):
15315 CASE_FLT_FN (BUILT_IN_LCEIL):
15316 CASE_FLT_FN (BUILT_IN_LDEXP):
15317 CASE_FLT_FN (BUILT_IN_LFLOOR):
15318 CASE_FLT_FN (BUILT_IN_LLCEIL):
15319 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15320 CASE_FLT_FN (BUILT_IN_LLRINT):
15321 CASE_FLT_FN (BUILT_IN_LLROUND):
15322 CASE_FLT_FN (BUILT_IN_LRINT):
15323 CASE_FLT_FN (BUILT_IN_LROUND):
15324 CASE_FLT_FN (BUILT_IN_MODF):
15325 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15326 CASE_FLT_FN (BUILT_IN_RINT):
15327 CASE_FLT_FN (BUILT_IN_ROUND):
15328 CASE_FLT_FN (BUILT_IN_SCALB):
15329 CASE_FLT_FN (BUILT_IN_SCALBLN):
15330 CASE_FLT_FN (BUILT_IN_SCALBN):
15331 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15332 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15333 CASE_FLT_FN (BUILT_IN_SINH):
15334 CASE_FLT_FN (BUILT_IN_TANH):
15335 CASE_FLT_FN (BUILT_IN_TRUNC):
15336 /* True if the 1st argument is nonnegative. */
15337 return tree_expr_nonnegative_warnv_p (arg0,
15338 strict_overflow_p);
15340 CASE_FLT_FN (BUILT_IN_FMAX):
15341 /* True if the 1st OR 2nd arguments are nonnegative. */
15342 return (tree_expr_nonnegative_warnv_p (arg0,
15343 strict_overflow_p)
15344 || (tree_expr_nonnegative_warnv_p (arg1,
15345 strict_overflow_p)));
15347 CASE_FLT_FN (BUILT_IN_FMIN):
15348 /* True if the 1st AND 2nd arguments are nonnegative. */
15349 return (tree_expr_nonnegative_warnv_p (arg0,
15350 strict_overflow_p)
15351 && (tree_expr_nonnegative_warnv_p (arg1,
15352 strict_overflow_p)));
15354 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15355 /* True if the 2nd argument is nonnegative. */
15356 return tree_expr_nonnegative_warnv_p (arg1,
15357 strict_overflow_p);
15359 CASE_FLT_FN (BUILT_IN_POWI):
15360 /* True if the 1st argument is nonnegative or the second
15361 argument is an even integer. */
15362 if (TREE_CODE (arg1) == INTEGER_CST
15363 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15364 return true;
15365 return tree_expr_nonnegative_warnv_p (arg0,
15366 strict_overflow_p);
15368 CASE_FLT_FN (BUILT_IN_POW):
15369 /* True if the 1st argument is nonnegative or the second
15370 argument is an even integer valued real. */
15371 if (TREE_CODE (arg1) == REAL_CST)
15373 REAL_VALUE_TYPE c;
15374 HOST_WIDE_INT n;
15376 c = TREE_REAL_CST (arg1);
15377 n = real_to_integer (&c);
15378 if ((n & 1) == 0)
15380 REAL_VALUE_TYPE cint;
15381 real_from_integer (&cint, VOIDmode, n,
15382 n < 0 ? -1 : 0, 0);
15383 if (real_identical (&c, &cint))
15384 return true;
15387 return tree_expr_nonnegative_warnv_p (arg0,
15388 strict_overflow_p);
15390 default:
15391 break;
15393 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15394 type);
15397 /* Return true if T is known to be non-negative. If the return
15398 value is based on the assumption that signed overflow is undefined,
15399 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15400 *STRICT_OVERFLOW_P. */
15402 bool
15403 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15405 enum tree_code code = TREE_CODE (t);
15406 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15407 return true;
15409 switch (code)
15411 case TARGET_EXPR:
15413 tree temp = TARGET_EXPR_SLOT (t);
15414 t = TARGET_EXPR_INITIAL (t);
15416 /* If the initializer is non-void, then it's a normal expression
15417 that will be assigned to the slot. */
15418 if (!VOID_TYPE_P (t))
15419 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15421 /* Otherwise, the initializer sets the slot in some way. One common
15422 way is an assignment statement at the end of the initializer. */
15423 while (1)
15425 if (TREE_CODE (t) == BIND_EXPR)
15426 t = expr_last (BIND_EXPR_BODY (t));
15427 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15428 || TREE_CODE (t) == TRY_CATCH_EXPR)
15429 t = expr_last (TREE_OPERAND (t, 0));
15430 else if (TREE_CODE (t) == STATEMENT_LIST)
15431 t = expr_last (t);
15432 else
15433 break;
15435 if (TREE_CODE (t) == MODIFY_EXPR
15436 && TREE_OPERAND (t, 0) == temp)
15437 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15438 strict_overflow_p);
15440 return false;
15443 case CALL_EXPR:
15445 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15446 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15448 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15449 get_callee_fndecl (t),
15450 arg0,
15451 arg1,
15452 strict_overflow_p);
15454 case COMPOUND_EXPR:
15455 case MODIFY_EXPR:
15456 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15457 strict_overflow_p);
15458 case BIND_EXPR:
15459 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15460 strict_overflow_p);
15461 case SAVE_EXPR:
15462 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15463 strict_overflow_p);
15465 default:
15466 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15467 TREE_TYPE (t));
15470 /* We don't know sign of `t', so be conservative and return false. */
15471 return false;
15474 /* Return true if T is known to be non-negative. If the return
15475 value is based on the assumption that signed overflow is undefined,
15476 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15477 *STRICT_OVERFLOW_P. */
15479 bool
15480 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15482 enum tree_code code;
15483 if (t == error_mark_node)
15484 return false;
15486 code = TREE_CODE (t);
15487 switch (TREE_CODE_CLASS (code))
15489 case tcc_binary:
15490 case tcc_comparison:
15491 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15492 TREE_TYPE (t),
15493 TREE_OPERAND (t, 0),
15494 TREE_OPERAND (t, 1),
15495 strict_overflow_p);
15497 case tcc_unary:
15498 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15499 TREE_TYPE (t),
15500 TREE_OPERAND (t, 0),
15501 strict_overflow_p);
15503 case tcc_constant:
15504 case tcc_declaration:
15505 case tcc_reference:
15506 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15508 default:
15509 break;
15512 switch (code)
15514 case TRUTH_AND_EXPR:
15515 case TRUTH_OR_EXPR:
15516 case TRUTH_XOR_EXPR:
15517 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15518 TREE_TYPE (t),
15519 TREE_OPERAND (t, 0),
15520 TREE_OPERAND (t, 1),
15521 strict_overflow_p);
15522 case TRUTH_NOT_EXPR:
15523 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15524 TREE_TYPE (t),
15525 TREE_OPERAND (t, 0),
15526 strict_overflow_p);
15528 case COND_EXPR:
15529 case CONSTRUCTOR:
15530 case OBJ_TYPE_REF:
15531 case ASSERT_EXPR:
15532 case ADDR_EXPR:
15533 case WITH_SIZE_EXPR:
15534 case SSA_NAME:
15535 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15537 default:
15538 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15542 /* Return true if `t' is known to be non-negative. Handle warnings
15543 about undefined signed overflow. */
15545 bool
15546 tree_expr_nonnegative_p (tree t)
15548 bool ret, strict_overflow_p;
15550 strict_overflow_p = false;
15551 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15552 if (strict_overflow_p)
15553 fold_overflow_warning (("assuming signed overflow does not occur when "
15554 "determining that expression is always "
15555 "non-negative"),
15556 WARN_STRICT_OVERFLOW_MISC);
15557 return ret;
15561 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15562 For floating point we further ensure that T is not denormal.
15563 Similar logic is present in nonzero_address in rtlanal.h.
15565 If the return value is based on the assumption that signed overflow
15566 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15567 change *STRICT_OVERFLOW_P. */
15569 bool
15570 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15571 bool *strict_overflow_p)
15573 switch (code)
15575 case ABS_EXPR:
15576 return tree_expr_nonzero_warnv_p (op0,
15577 strict_overflow_p);
15579 case NOP_EXPR:
15581 tree inner_type = TREE_TYPE (op0);
15582 tree outer_type = type;
15584 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15585 && tree_expr_nonzero_warnv_p (op0,
15586 strict_overflow_p));
15588 break;
15590 case NON_LVALUE_EXPR:
15591 return tree_expr_nonzero_warnv_p (op0,
15592 strict_overflow_p);
15594 default:
15595 break;
15598 return false;
15601 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15602 For floating point we further ensure that T is not denormal.
15603 Similar logic is present in nonzero_address in rtlanal.h.
15605 If the return value is based on the assumption that signed overflow
15606 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15607 change *STRICT_OVERFLOW_P. */
15609 bool
15610 tree_binary_nonzero_warnv_p (enum tree_code code,
15611 tree type,
15612 tree op0,
15613 tree op1, bool *strict_overflow_p)
15615 bool sub_strict_overflow_p;
15616 switch (code)
15618 case POINTER_PLUS_EXPR:
15619 case PLUS_EXPR:
15620 if (TYPE_OVERFLOW_UNDEFINED (type))
15622 /* With the presence of negative values it is hard
15623 to say something. */
15624 sub_strict_overflow_p = false;
15625 if (!tree_expr_nonnegative_warnv_p (op0,
15626 &sub_strict_overflow_p)
15627 || !tree_expr_nonnegative_warnv_p (op1,
15628 &sub_strict_overflow_p))
15629 return false;
15630 /* One of operands must be positive and the other non-negative. */
15631 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15632 overflows, on a twos-complement machine the sum of two
15633 nonnegative numbers can never be zero. */
15634 return (tree_expr_nonzero_warnv_p (op0,
15635 strict_overflow_p)
15636 || tree_expr_nonzero_warnv_p (op1,
15637 strict_overflow_p));
15639 break;
15641 case MULT_EXPR:
15642 if (TYPE_OVERFLOW_UNDEFINED (type))
15644 if (tree_expr_nonzero_warnv_p (op0,
15645 strict_overflow_p)
15646 && tree_expr_nonzero_warnv_p (op1,
15647 strict_overflow_p))
15649 *strict_overflow_p = true;
15650 return true;
15653 break;
15655 case MIN_EXPR:
15656 sub_strict_overflow_p = false;
15657 if (tree_expr_nonzero_warnv_p (op0,
15658 &sub_strict_overflow_p)
15659 && tree_expr_nonzero_warnv_p (op1,
15660 &sub_strict_overflow_p))
15662 if (sub_strict_overflow_p)
15663 *strict_overflow_p = true;
15665 break;
15667 case MAX_EXPR:
15668 sub_strict_overflow_p = false;
15669 if (tree_expr_nonzero_warnv_p (op0,
15670 &sub_strict_overflow_p))
15672 if (sub_strict_overflow_p)
15673 *strict_overflow_p = true;
15675 /* When both operands are nonzero, then MAX must be too. */
15676 if (tree_expr_nonzero_warnv_p (op1,
15677 strict_overflow_p))
15678 return true;
15680 /* MAX where operand 0 is positive is positive. */
15681 return tree_expr_nonnegative_warnv_p (op0,
15682 strict_overflow_p);
15684 /* MAX where operand 1 is positive is positive. */
15685 else if (tree_expr_nonzero_warnv_p (op1,
15686 &sub_strict_overflow_p)
15687 && tree_expr_nonnegative_warnv_p (op1,
15688 &sub_strict_overflow_p))
15690 if (sub_strict_overflow_p)
15691 *strict_overflow_p = true;
15692 return true;
15694 break;
15696 case BIT_IOR_EXPR:
15697 return (tree_expr_nonzero_warnv_p (op1,
15698 strict_overflow_p)
15699 || tree_expr_nonzero_warnv_p (op0,
15700 strict_overflow_p));
15702 default:
15703 break;
15706 return false;
15709 /* Return true when T is an address and is known to be nonzero.
15710 For floating point we further ensure that T is not denormal.
15711 Similar logic is present in nonzero_address in rtlanal.h.
15713 If the return value is based on the assumption that signed overflow
15714 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15715 change *STRICT_OVERFLOW_P. */
15717 bool
15718 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15720 bool sub_strict_overflow_p;
15721 switch (TREE_CODE (t))
15723 case INTEGER_CST:
15724 return !integer_zerop (t);
15726 case ADDR_EXPR:
15728 tree base = TREE_OPERAND (t, 0);
15729 if (!DECL_P (base))
15730 base = get_base_address (base);
15732 if (!base)
15733 return false;
15735 /* Weak declarations may link to NULL. Other things may also be NULL
15736 so protect with -fdelete-null-pointer-checks; but not variables
15737 allocated on the stack. */
15738 if (DECL_P (base)
15739 && (flag_delete_null_pointer_checks
15740 || (DECL_CONTEXT (base)
15741 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15742 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15743 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15745 /* Constants are never weak. */
15746 if (CONSTANT_CLASS_P (base))
15747 return true;
15749 return false;
15752 case COND_EXPR:
15753 sub_strict_overflow_p = false;
15754 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15755 &sub_strict_overflow_p)
15756 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15757 &sub_strict_overflow_p))
15759 if (sub_strict_overflow_p)
15760 *strict_overflow_p = true;
15761 return true;
15763 break;
15765 default:
15766 break;
15768 return false;
15771 /* Return true when T is an address and is known to be nonzero.
15772 For floating point we further ensure that T is not denormal.
15773 Similar logic is present in nonzero_address in rtlanal.h.
15775 If the return value is based on the assumption that signed overflow
15776 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15777 change *STRICT_OVERFLOW_P. */
15779 bool
15780 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15782 tree type = TREE_TYPE (t);
15783 enum tree_code code;
15785 /* Doing something useful for floating point would need more work. */
15786 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15787 return false;
15789 code = TREE_CODE (t);
15790 switch (TREE_CODE_CLASS (code))
15792 case tcc_unary:
15793 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15794 strict_overflow_p);
15795 case tcc_binary:
15796 case tcc_comparison:
15797 return tree_binary_nonzero_warnv_p (code, type,
15798 TREE_OPERAND (t, 0),
15799 TREE_OPERAND (t, 1),
15800 strict_overflow_p);
15801 case tcc_constant:
15802 case tcc_declaration:
15803 case tcc_reference:
15804 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15806 default:
15807 break;
15810 switch (code)
15812 case TRUTH_NOT_EXPR:
15813 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15814 strict_overflow_p);
15816 case TRUTH_AND_EXPR:
15817 case TRUTH_OR_EXPR:
15818 case TRUTH_XOR_EXPR:
15819 return tree_binary_nonzero_warnv_p (code, type,
15820 TREE_OPERAND (t, 0),
15821 TREE_OPERAND (t, 1),
15822 strict_overflow_p);
15824 case COND_EXPR:
15825 case CONSTRUCTOR:
15826 case OBJ_TYPE_REF:
15827 case ASSERT_EXPR:
15828 case ADDR_EXPR:
15829 case WITH_SIZE_EXPR:
15830 case SSA_NAME:
15831 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15833 case COMPOUND_EXPR:
15834 case MODIFY_EXPR:
15835 case BIND_EXPR:
15836 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15837 strict_overflow_p);
15839 case SAVE_EXPR:
15840 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15841 strict_overflow_p);
15843 case CALL_EXPR:
15844 return alloca_call_p (t);
15846 default:
15847 break;
15849 return false;
15852 /* Return true when T is an address and is known to be nonzero.
15853 Handle warnings about undefined signed overflow. */
15855 bool
15856 tree_expr_nonzero_p (tree t)
15858 bool ret, strict_overflow_p;
15860 strict_overflow_p = false;
15861 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15862 if (strict_overflow_p)
15863 fold_overflow_warning (("assuming signed overflow does not occur when "
15864 "determining that expression is always "
15865 "non-zero"),
15866 WARN_STRICT_OVERFLOW_MISC);
15867 return ret;
15870 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15871 attempt to fold the expression to a constant without modifying TYPE,
15872 OP0 or OP1.
15874 If the expression could be simplified to a constant, then return
15875 the constant. If the expression would not be simplified to a
15876 constant, then return NULL_TREE. */
15878 tree
15879 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15881 tree tem = fold_binary (code, type, op0, op1);
15882 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15885 /* Given the components of a unary expression CODE, TYPE and OP0,
15886 attempt to fold the expression to a constant without modifying
15887 TYPE or OP0.
15889 If the expression could be simplified to a constant, then return
15890 the constant. If the expression would not be simplified to a
15891 constant, then return NULL_TREE. */
15893 tree
15894 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15896 tree tem = fold_unary (code, type, op0);
15897 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15900 /* If EXP represents referencing an element in a constant string
15901 (either via pointer arithmetic or array indexing), return the
15902 tree representing the value accessed, otherwise return NULL. */
15904 tree
15905 fold_read_from_constant_string (tree exp)
15907 if ((TREE_CODE (exp) == INDIRECT_REF
15908 || TREE_CODE (exp) == ARRAY_REF)
15909 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15911 tree exp1 = TREE_OPERAND (exp, 0);
15912 tree index;
15913 tree string;
15914 location_t loc = EXPR_LOCATION (exp);
15916 if (TREE_CODE (exp) == INDIRECT_REF)
15917 string = string_constant (exp1, &index);
15918 else
15920 tree low_bound = array_ref_low_bound (exp);
15921 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15923 /* Optimize the special-case of a zero lower bound.
15925 We convert the low_bound to sizetype to avoid some problems
15926 with constant folding. (E.g. suppose the lower bound is 1,
15927 and its mode is QI. Without the conversion,l (ARRAY
15928 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15929 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15930 if (! integer_zerop (low_bound))
15931 index = size_diffop_loc (loc, index,
15932 fold_convert_loc (loc, sizetype, low_bound));
15934 string = exp1;
15937 if (string
15938 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15939 && TREE_CODE (string) == STRING_CST
15940 && TREE_CODE (index) == INTEGER_CST
15941 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15942 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15943 == MODE_INT)
15944 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15945 return build_int_cst_type (TREE_TYPE (exp),
15946 (TREE_STRING_POINTER (string)
15947 [TREE_INT_CST_LOW (index)]));
15949 return NULL;
15952 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15953 an integer constant, real, or fixed-point constant.
15955 TYPE is the type of the result. */
15957 static tree
15958 fold_negate_const (tree arg0, tree type)
15960 tree t = NULL_TREE;
15962 switch (TREE_CODE (arg0))
15964 case INTEGER_CST:
15966 double_int val = tree_to_double_int (arg0);
15967 bool overflow;
15968 val = val.neg_with_overflow (&overflow);
15969 t = force_fit_type_double (type, val, 1,
15970 (overflow | TREE_OVERFLOW (arg0))
15971 && !TYPE_UNSIGNED (type));
15972 break;
15975 case REAL_CST:
15976 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15977 break;
15979 case FIXED_CST:
15981 FIXED_VALUE_TYPE f;
15982 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15983 &(TREE_FIXED_CST (arg0)), NULL,
15984 TYPE_SATURATING (type));
15985 t = build_fixed (type, f);
15986 /* Propagate overflow flags. */
15987 if (overflow_p | TREE_OVERFLOW (arg0))
15988 TREE_OVERFLOW (t) = 1;
15989 break;
15992 default:
15993 gcc_unreachable ();
15996 return t;
15999 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16000 an integer constant or real constant.
16002 TYPE is the type of the result. */
16004 tree
16005 fold_abs_const (tree arg0, tree type)
16007 tree t = NULL_TREE;
16009 switch (TREE_CODE (arg0))
16011 case INTEGER_CST:
16013 double_int val = tree_to_double_int (arg0);
16015 /* If the value is unsigned or non-negative, then the absolute value
16016 is the same as the ordinary value. */
16017 if (TYPE_UNSIGNED (type)
16018 || !val.is_negative ())
16019 t = arg0;
16021 /* If the value is negative, then the absolute value is
16022 its negation. */
16023 else
16025 bool overflow;
16026 val = val.neg_with_overflow (&overflow);
16027 t = force_fit_type_double (type, val, -1,
16028 overflow | TREE_OVERFLOW (arg0));
16031 break;
16033 case REAL_CST:
16034 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16035 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16036 else
16037 t = arg0;
16038 break;
16040 default:
16041 gcc_unreachable ();
16044 return t;
16047 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16048 constant. TYPE is the type of the result. */
16050 static tree
16051 fold_not_const (const_tree arg0, tree type)
16053 double_int val;
16055 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16057 val = ~tree_to_double_int (arg0);
16058 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16061 /* Given CODE, a relational operator, the target type, TYPE and two
16062 constant operands OP0 and OP1, return the result of the
16063 relational operation. If the result is not a compile time
16064 constant, then return NULL_TREE. */
16066 static tree
16067 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16069 int result, invert;
16071 /* From here on, the only cases we handle are when the result is
16072 known to be a constant. */
16074 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16076 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16077 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16079 /* Handle the cases where either operand is a NaN. */
16080 if (real_isnan (c0) || real_isnan (c1))
16082 switch (code)
16084 case EQ_EXPR:
16085 case ORDERED_EXPR:
16086 result = 0;
16087 break;
16089 case NE_EXPR:
16090 case UNORDERED_EXPR:
16091 case UNLT_EXPR:
16092 case UNLE_EXPR:
16093 case UNGT_EXPR:
16094 case UNGE_EXPR:
16095 case UNEQ_EXPR:
16096 result = 1;
16097 break;
16099 case LT_EXPR:
16100 case LE_EXPR:
16101 case GT_EXPR:
16102 case GE_EXPR:
16103 case LTGT_EXPR:
16104 if (flag_trapping_math)
16105 return NULL_TREE;
16106 result = 0;
16107 break;
16109 default:
16110 gcc_unreachable ();
16113 return constant_boolean_node (result, type);
16116 return constant_boolean_node (real_compare (code, c0, c1), type);
16119 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16121 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16122 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16123 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16126 /* Handle equality/inequality of complex constants. */
16127 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16129 tree rcond = fold_relational_const (code, type,
16130 TREE_REALPART (op0),
16131 TREE_REALPART (op1));
16132 tree icond = fold_relational_const (code, type,
16133 TREE_IMAGPART (op0),
16134 TREE_IMAGPART (op1));
16135 if (code == EQ_EXPR)
16136 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16137 else if (code == NE_EXPR)
16138 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16139 else
16140 return NULL_TREE;
16143 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16145 unsigned count = VECTOR_CST_NELTS (op0);
16146 tree *elts = XALLOCAVEC (tree, count);
16147 gcc_assert (VECTOR_CST_NELTS (op1) == count
16148 && TYPE_VECTOR_SUBPARTS (type) == count);
16150 for (unsigned i = 0; i < count; i++)
16152 tree elem_type = TREE_TYPE (type);
16153 tree elem0 = VECTOR_CST_ELT (op0, i);
16154 tree elem1 = VECTOR_CST_ELT (op1, i);
16156 tree tem = fold_relational_const (code, elem_type,
16157 elem0, elem1);
16159 if (tem == NULL_TREE)
16160 return NULL_TREE;
16162 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16165 return build_vector (type, elts);
16168 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16170 To compute GT, swap the arguments and do LT.
16171 To compute GE, do LT and invert the result.
16172 To compute LE, swap the arguments, do LT and invert the result.
16173 To compute NE, do EQ and invert the result.
16175 Therefore, the code below must handle only EQ and LT. */
16177 if (code == LE_EXPR || code == GT_EXPR)
16179 tree tem = op0;
16180 op0 = op1;
16181 op1 = tem;
16182 code = swap_tree_comparison (code);
16185 /* Note that it is safe to invert for real values here because we
16186 have already handled the one case that it matters. */
16188 invert = 0;
16189 if (code == NE_EXPR || code == GE_EXPR)
16191 invert = 1;
16192 code = invert_tree_comparison (code, false);
16195 /* Compute a result for LT or EQ if args permit;
16196 Otherwise return T. */
16197 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16199 if (code == EQ_EXPR)
16200 result = tree_int_cst_equal (op0, op1);
16201 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16202 result = INT_CST_LT_UNSIGNED (op0, op1);
16203 else
16204 result = INT_CST_LT (op0, op1);
16206 else
16207 return NULL_TREE;
16209 if (invert)
16210 result ^= 1;
16211 return constant_boolean_node (result, type);
16214 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16215 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16216 itself. */
16218 tree
16219 fold_build_cleanup_point_expr (tree type, tree expr)
16221 /* If the expression does not have side effects then we don't have to wrap
16222 it with a cleanup point expression. */
16223 if (!TREE_SIDE_EFFECTS (expr))
16224 return expr;
16226 /* If the expression is a return, check to see if the expression inside the
16227 return has no side effects or the right hand side of the modify expression
16228 inside the return. If either don't have side effects set we don't need to
16229 wrap the expression in a cleanup point expression. Note we don't check the
16230 left hand side of the modify because it should always be a return decl. */
16231 if (TREE_CODE (expr) == RETURN_EXPR)
16233 tree op = TREE_OPERAND (expr, 0);
16234 if (!op || !TREE_SIDE_EFFECTS (op))
16235 return expr;
16236 op = TREE_OPERAND (op, 1);
16237 if (!TREE_SIDE_EFFECTS (op))
16238 return expr;
16241 return build1 (CLEANUP_POINT_EXPR, type, expr);
16244 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16245 of an indirection through OP0, or NULL_TREE if no simplification is
16246 possible. */
16248 tree
16249 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16251 tree sub = op0;
16252 tree subtype;
16254 STRIP_NOPS (sub);
16255 subtype = TREE_TYPE (sub);
16256 if (!POINTER_TYPE_P (subtype))
16257 return NULL_TREE;
16259 if (TREE_CODE (sub) == ADDR_EXPR)
16261 tree op = TREE_OPERAND (sub, 0);
16262 tree optype = TREE_TYPE (op);
16263 /* *&CONST_DECL -> to the value of the const decl. */
16264 if (TREE_CODE (op) == CONST_DECL)
16265 return DECL_INITIAL (op);
16266 /* *&p => p; make sure to handle *&"str"[cst] here. */
16267 if (type == optype)
16269 tree fop = fold_read_from_constant_string (op);
16270 if (fop)
16271 return fop;
16272 else
16273 return op;
16275 /* *(foo *)&fooarray => fooarray[0] */
16276 else if (TREE_CODE (optype) == ARRAY_TYPE
16277 && type == TREE_TYPE (optype)
16278 && (!in_gimple_form
16279 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16281 tree type_domain = TYPE_DOMAIN (optype);
16282 tree min_val = size_zero_node;
16283 if (type_domain && TYPE_MIN_VALUE (type_domain))
16284 min_val = TYPE_MIN_VALUE (type_domain);
16285 if (in_gimple_form
16286 && TREE_CODE (min_val) != INTEGER_CST)
16287 return NULL_TREE;
16288 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16289 NULL_TREE, NULL_TREE);
16291 /* *(foo *)&complexfoo => __real__ complexfoo */
16292 else if (TREE_CODE (optype) == COMPLEX_TYPE
16293 && type == TREE_TYPE (optype))
16294 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16295 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16296 else if (TREE_CODE (optype) == VECTOR_TYPE
16297 && type == TREE_TYPE (optype))
16299 tree part_width = TYPE_SIZE (type);
16300 tree index = bitsize_int (0);
16301 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16305 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16306 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16308 tree op00 = TREE_OPERAND (sub, 0);
16309 tree op01 = TREE_OPERAND (sub, 1);
16311 STRIP_NOPS (op00);
16312 if (TREE_CODE (op00) == ADDR_EXPR)
16314 tree op00type;
16315 op00 = TREE_OPERAND (op00, 0);
16316 op00type = TREE_TYPE (op00);
16318 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16319 if (TREE_CODE (op00type) == VECTOR_TYPE
16320 && type == TREE_TYPE (op00type))
16322 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16323 tree part_width = TYPE_SIZE (type);
16324 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16325 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16326 tree index = bitsize_int (indexi);
16328 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16329 return fold_build3_loc (loc,
16330 BIT_FIELD_REF, type, op00,
16331 part_width, index);
16334 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16335 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16336 && type == TREE_TYPE (op00type))
16338 tree size = TYPE_SIZE_UNIT (type);
16339 if (tree_int_cst_equal (size, op01))
16340 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16342 /* ((foo *)&fooarray)[1] => fooarray[1] */
16343 else if (TREE_CODE (op00type) == ARRAY_TYPE
16344 && type == TREE_TYPE (op00type))
16346 tree type_domain = TYPE_DOMAIN (op00type);
16347 tree min_val = size_zero_node;
16348 if (type_domain && TYPE_MIN_VALUE (type_domain))
16349 min_val = TYPE_MIN_VALUE (type_domain);
16350 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16351 TYPE_SIZE_UNIT (type));
16352 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16353 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16354 NULL_TREE, NULL_TREE);
16359 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16360 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16361 && type == TREE_TYPE (TREE_TYPE (subtype))
16362 && (!in_gimple_form
16363 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16365 tree type_domain;
16366 tree min_val = size_zero_node;
16367 sub = build_fold_indirect_ref_loc (loc, sub);
16368 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16369 if (type_domain && TYPE_MIN_VALUE (type_domain))
16370 min_val = TYPE_MIN_VALUE (type_domain);
16371 if (in_gimple_form
16372 && TREE_CODE (min_val) != INTEGER_CST)
16373 return NULL_TREE;
16374 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16375 NULL_TREE);
16378 return NULL_TREE;
16381 /* Builds an expression for an indirection through T, simplifying some
16382 cases. */
16384 tree
16385 build_fold_indirect_ref_loc (location_t loc, tree t)
16387 tree type = TREE_TYPE (TREE_TYPE (t));
16388 tree sub = fold_indirect_ref_1 (loc, type, t);
16390 if (sub)
16391 return sub;
16393 return build1_loc (loc, INDIRECT_REF, type, t);
16396 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16398 tree
16399 fold_indirect_ref_loc (location_t loc, tree t)
16401 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16403 if (sub)
16404 return sub;
16405 else
16406 return t;
16409 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16410 whose result is ignored. The type of the returned tree need not be
16411 the same as the original expression. */
16413 tree
16414 fold_ignored_result (tree t)
16416 if (!TREE_SIDE_EFFECTS (t))
16417 return integer_zero_node;
16419 for (;;)
16420 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16422 case tcc_unary:
16423 t = TREE_OPERAND (t, 0);
16424 break;
16426 case tcc_binary:
16427 case tcc_comparison:
16428 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16429 t = TREE_OPERAND (t, 0);
16430 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16431 t = TREE_OPERAND (t, 1);
16432 else
16433 return t;
16434 break;
16436 case tcc_expression:
16437 switch (TREE_CODE (t))
16439 case COMPOUND_EXPR:
16440 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16441 return t;
16442 t = TREE_OPERAND (t, 0);
16443 break;
16445 case COND_EXPR:
16446 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16447 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16448 return t;
16449 t = TREE_OPERAND (t, 0);
16450 break;
16452 default:
16453 return t;
16455 break;
16457 default:
16458 return t;
16462 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16463 This can only be applied to objects of a sizetype. */
16465 tree
16466 round_up_loc (location_t loc, tree value, int divisor)
16468 tree div = NULL_TREE;
16470 gcc_assert (divisor > 0);
16471 if (divisor == 1)
16472 return value;
16474 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16475 have to do anything. Only do this when we are not given a const,
16476 because in that case, this check is more expensive than just
16477 doing it. */
16478 if (TREE_CODE (value) != INTEGER_CST)
16480 div = build_int_cst (TREE_TYPE (value), divisor);
16482 if (multiple_of_p (TREE_TYPE (value), value, div))
16483 return value;
16486 /* If divisor is a power of two, simplify this to bit manipulation. */
16487 if (divisor == (divisor & -divisor))
16489 if (TREE_CODE (value) == INTEGER_CST)
16491 double_int val = tree_to_double_int (value);
16492 bool overflow_p;
16494 if ((val.low & (divisor - 1)) == 0)
16495 return value;
16497 overflow_p = TREE_OVERFLOW (value);
16498 val.low &= ~(divisor - 1);
16499 val.low += divisor;
16500 if (val.low == 0)
16502 val.high++;
16503 if (val.high == 0)
16504 overflow_p = true;
16507 return force_fit_type_double (TREE_TYPE (value), val,
16508 -1, overflow_p);
16510 else
16512 tree t;
16514 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16515 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16516 t = build_int_cst (TREE_TYPE (value), -divisor);
16517 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16520 else
16522 if (!div)
16523 div = build_int_cst (TREE_TYPE (value), divisor);
16524 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16525 value = size_binop_loc (loc, MULT_EXPR, value, div);
16528 return value;
16531 /* Likewise, but round down. */
16533 tree
16534 round_down_loc (location_t loc, tree value, int divisor)
16536 tree div = NULL_TREE;
16538 gcc_assert (divisor > 0);
16539 if (divisor == 1)
16540 return value;
16542 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16543 have to do anything. Only do this when we are not given a const,
16544 because in that case, this check is more expensive than just
16545 doing it. */
16546 if (TREE_CODE (value) != INTEGER_CST)
16548 div = build_int_cst (TREE_TYPE (value), divisor);
16550 if (multiple_of_p (TREE_TYPE (value), value, div))
16551 return value;
16554 /* If divisor is a power of two, simplify this to bit manipulation. */
16555 if (divisor == (divisor & -divisor))
16557 tree t;
16559 t = build_int_cst (TREE_TYPE (value), -divisor);
16560 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16562 else
16564 if (!div)
16565 div = build_int_cst (TREE_TYPE (value), divisor);
16566 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16567 value = size_binop_loc (loc, MULT_EXPR, value, div);
16570 return value;
16573 /* Returns the pointer to the base of the object addressed by EXP and
16574 extracts the information about the offset of the access, storing it
16575 to PBITPOS and POFFSET. */
16577 static tree
16578 split_address_to_core_and_offset (tree exp,
16579 HOST_WIDE_INT *pbitpos, tree *poffset)
16581 tree core;
16582 enum machine_mode mode;
16583 int unsignedp, volatilep;
16584 HOST_WIDE_INT bitsize;
16585 location_t loc = EXPR_LOCATION (exp);
16587 if (TREE_CODE (exp) == ADDR_EXPR)
16589 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16590 poffset, &mode, &unsignedp, &volatilep,
16591 false);
16592 core = build_fold_addr_expr_loc (loc, core);
16594 else
16596 core = exp;
16597 *pbitpos = 0;
16598 *poffset = NULL_TREE;
16601 return core;
16604 /* Returns true if addresses of E1 and E2 differ by a constant, false
16605 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16607 bool
16608 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16610 tree core1, core2;
16611 HOST_WIDE_INT bitpos1, bitpos2;
16612 tree toffset1, toffset2, tdiff, type;
16614 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16615 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16617 if (bitpos1 % BITS_PER_UNIT != 0
16618 || bitpos2 % BITS_PER_UNIT != 0
16619 || !operand_equal_p (core1, core2, 0))
16620 return false;
16622 if (toffset1 && toffset2)
16624 type = TREE_TYPE (toffset1);
16625 if (type != TREE_TYPE (toffset2))
16626 toffset2 = fold_convert (type, toffset2);
16628 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16629 if (!cst_and_fits_in_hwi (tdiff))
16630 return false;
16632 *diff = int_cst_value (tdiff);
16634 else if (toffset1 || toffset2)
16636 /* If only one of the offsets is non-constant, the difference cannot
16637 be a constant. */
16638 return false;
16640 else
16641 *diff = 0;
16643 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16644 return true;
16647 /* Simplify the floating point expression EXP when the sign of the
16648 result is not significant. Return NULL_TREE if no simplification
16649 is possible. */
16651 tree
16652 fold_strip_sign_ops (tree exp)
16654 tree arg0, arg1;
16655 location_t loc = EXPR_LOCATION (exp);
16657 switch (TREE_CODE (exp))
16659 case ABS_EXPR:
16660 case NEGATE_EXPR:
16661 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16662 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16664 case MULT_EXPR:
16665 case RDIV_EXPR:
16666 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16667 return NULL_TREE;
16668 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16669 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16670 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16671 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16672 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16673 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16674 break;
16676 case COMPOUND_EXPR:
16677 arg0 = TREE_OPERAND (exp, 0);
16678 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16679 if (arg1)
16680 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16681 break;
16683 case COND_EXPR:
16684 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16685 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16686 if (arg0 || arg1)
16687 return fold_build3_loc (loc,
16688 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16689 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16690 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16691 break;
16693 case CALL_EXPR:
16695 const enum built_in_function fcode = builtin_mathfn_code (exp);
16696 switch (fcode)
16698 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16699 /* Strip copysign function call, return the 1st argument. */
16700 arg0 = CALL_EXPR_ARG (exp, 0);
16701 arg1 = CALL_EXPR_ARG (exp, 1);
16702 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16704 default:
16705 /* Strip sign ops from the argument of "odd" math functions. */
16706 if (negate_mathfn_p (fcode))
16708 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16709 if (arg0)
16710 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16712 break;
16715 break;
16717 default:
16718 break;
16720 return NULL_TREE;