Merge trunk version 193617 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blob29096cfb39c575fffeda6bea92b31814f12fc773
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hash-table.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
173 tree
174 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
176 double_int quo, rem;
177 int uns;
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
184 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
185 uns, code, &rem);
187 if (rem.is_zero ())
188 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 unsigned HOST_WIDE_INT val;
363 unsigned int prec;
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 prec = TYPE_PRECISION (type);
373 if (prec > HOST_BITS_PER_WIDE_INT)
375 if (TREE_INT_CST_LOW (t) != 0)
376 return true;
377 prec -= HOST_BITS_PER_WIDE_INT;
378 val = TREE_INT_CST_HIGH (t);
380 else
381 val = TREE_INT_CST_LOW (t);
382 if (prec < HOST_BITS_PER_WIDE_INT)
383 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
384 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
390 static bool
391 negate_expr_p (tree t)
393 tree type;
395 if (t == 0)
396 return false;
398 type = TREE_TYPE (t);
400 STRIP_SIGN_NOPS (t);
401 switch (TREE_CODE (t))
403 case INTEGER_CST:
404 if (TYPE_OVERFLOW_WRAPS (type))
405 return true;
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t);
409 case BIT_NOT_EXPR:
410 return (INTEGRAL_TYPE_P (type)
411 && TYPE_OVERFLOW_WRAPS (type));
413 case FIXED_CST:
414 case NEGATE_EXPR:
415 return true;
417 case REAL_CST:
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422 case COMPLEX_CST:
423 return negate_expr_p (TREE_REALPART (t))
424 && negate_expr_p (TREE_IMAGPART (t));
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case FLOOR_DIV_EXPR:
467 case CEIL_DIV_EXPR:
468 case EXACT_DIV_EXPR:
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
473 overflow. */
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
480 case NOP_EXPR:
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type) == REAL_TYPE)
484 tree tem = strip_float_extensions (t);
485 if (tem != t)
486 return negate_expr_p (tem);
488 break;
490 case CALL_EXPR:
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t)))
493 return negate_expr_p (CALL_EXPR_ARG (t, 0));
494 break;
496 case RSHIFT_EXPR:
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
500 tree op1 = TREE_OPERAND (t, 1);
501 if (TREE_INT_CST_HIGH (op1) == 0
502 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
503 == TREE_INT_CST_LOW (op1))
504 return true;
506 break;
508 default:
509 break;
511 return false;
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
517 returned. */
519 static tree
520 fold_negate_expr (location_t loc, tree t)
522 tree type = TREE_TYPE (t);
523 tree tem;
525 switch (TREE_CODE (t))
527 /* Convert - (~A) to A + 1. */
528 case BIT_NOT_EXPR:
529 if (INTEGRAL_TYPE_P (type))
530 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
531 build_int_cst (type, 1));
532 break;
534 case INTEGER_CST:
535 tem = fold_negate_const (t, type);
536 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
537 || !TYPE_OVERFLOW_TRAPS (type))
538 return tem;
539 break;
541 case REAL_CST:
542 tem = fold_negate_const (t, type);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
545 return tem;
546 break;
548 case FIXED_CST:
549 tem = fold_negate_const (t, type);
550 return tem;
552 case COMPLEX_CST:
554 tree rpart = negate_expr (TREE_REALPART (t));
555 tree ipart = negate_expr (TREE_IMAGPART (t));
557 if ((TREE_CODE (rpart) == REAL_CST
558 && TREE_CODE (ipart) == REAL_CST)
559 || (TREE_CODE (rpart) == INTEGER_CST
560 && TREE_CODE (ipart) == INTEGER_CST))
561 return build_complex (type, rpart, ipart);
563 break;
565 case COMPLEX_EXPR:
566 if (negate_expr_p (t))
567 return fold_build2_loc (loc, COMPLEX_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
569 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
570 break;
572 case CONJ_EXPR:
573 if (negate_expr_p (t))
574 return fold_build1_loc (loc, CONJ_EXPR, type,
575 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
576 break;
578 case NEGATE_EXPR:
579 return TREE_OPERAND (t, 0);
581 case PLUS_EXPR:
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t, 1))
587 && reorder_operands_p (TREE_OPERAND (t, 0),
588 TREE_OPERAND (t, 1)))
590 tem = negate_expr (TREE_OPERAND (t, 1));
591 return fold_build2_loc (loc, MINUS_EXPR, type,
592 tem, TREE_OPERAND (t, 0));
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t, 0)))
598 tem = negate_expr (TREE_OPERAND (t, 0));
599 return fold_build2_loc (loc, MINUS_EXPR, type,
600 tem, TREE_OPERAND (t, 1));
603 break;
605 case MINUS_EXPR:
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
609 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
612 break;
614 case MULT_EXPR:
615 if (TYPE_UNSIGNED (type))
616 break;
618 /* Fall through. */
620 case RDIV_EXPR:
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
623 tem = TREE_OPERAND (t, 1);
624 if (negate_expr_p (tem))
625 return fold_build2_loc (loc, TREE_CODE (t), type,
626 TREE_OPERAND (t, 0), negate_expr (tem));
627 tem = TREE_OPERAND (t, 0);
628 if (negate_expr_p (tem))
629 return fold_build2_loc (loc, TREE_CODE (t), type,
630 negate_expr (tem), TREE_OPERAND (t, 1));
632 break;
634 case TRUNC_DIV_EXPR:
635 case ROUND_DIV_EXPR:
636 case FLOOR_DIV_EXPR:
637 case CEIL_DIV_EXPR:
638 case EXACT_DIV_EXPR:
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
643 overflow. */
644 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
646 const char * const warnmsg = G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || integer_onep (tem)))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
661 if (INTEGRAL_TYPE_P (type)
662 && (TREE_CODE (tem) != INTEGER_CST
663 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (TREE_INT_CST_HIGH (op1) == 0
700 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
701 == TREE_INT_CST_LOW (op1))
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
722 return NULL_TREE. */
724 static tree
725 negate_expr (tree t)
727 tree type, tem;
728 location_t loc;
730 if (t == NULL_TREE)
731 return NULL_TREE;
733 loc = EXPR_LOCATION (t);
734 type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
737 tem = fold_negate_expr (loc, t);
738 if (!tem)
739 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
740 return fold_convert_loc (loc, type, tem);
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
758 If IN is itself a literal or constant, return it as appropriate.
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
763 static tree
764 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
765 tree *minus_litp, int negate_p)
767 tree var = 0;
769 *conp = 0;
770 *litp = 0;
771 *minus_litp = 0;
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in);
776 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
777 || TREE_CODE (in) == FIXED_CST)
778 *litp = in;
779 else if (TREE_CODE (in) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
787 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
789 tree op0 = TREE_OPERAND (in, 0);
790 tree op1 = TREE_OPERAND (in, 1);
791 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
792 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
796 || TREE_CODE (op0) == FIXED_CST)
797 *litp = op0, op0 = 0;
798 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
799 || TREE_CODE (op1) == FIXED_CST)
800 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
802 if (op0 != 0 && TREE_CONSTANT (op0))
803 *conp = op0, op0 = 0;
804 else if (op1 != 0 && TREE_CONSTANT (op1))
805 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0 != 0 && op1 != 0)
810 var = in;
811 else if (op0 != 0)
812 var = op0;
813 else
814 var = op1, neg_var_p = neg1_p;
816 /* Now do any needed negations. */
817 if (neg_litp_p)
818 *minus_litp = *litp, *litp = 0;
819 if (neg_conp_p)
820 *conp = negate_expr (*conp);
821 if (neg_var_p)
822 var = negate_expr (var);
824 else if (TREE_CONSTANT (in))
825 *conp = in;
826 else
827 var = in;
829 if (negate_p)
831 if (*litp)
832 *minus_litp = *litp, *litp = 0;
833 else if (*minus_litp)
834 *litp = *minus_litp, *minus_litp = 0;
835 *conp = negate_expr (*conp);
836 var = negate_expr (var);
839 return var;
842 /* Re-associate trees split by the above function. T1 and T2 are
843 either expressions to associate or null. Return the new
844 expression, if any. LOC is the location of the new expression. If
845 we build an operation, do it in TYPE and with CODE. */
847 static tree
848 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
861 if (code == PLUS_EXPR)
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 return build2_loc (loc, MINUS_EXPR, type,
865 fold_convert_loc (loc, type, t2),
866 fold_convert_loc (loc, type,
867 TREE_OPERAND (t1, 0)));
868 else if (TREE_CODE (t2) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t1),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t2, 0)));
873 else if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
876 else if (code == MINUS_EXPR)
878 if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
882 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
886 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
887 fold_convert_loc (loc, type, t2));
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
901 switch (code)
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
909 default:
910 break;
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time. */
923 static tree
924 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
925 int overflowable)
927 double_int op1, op2, res, tmp;
928 tree t;
929 tree type = TREE_TYPE (arg1);
930 bool uns = TYPE_UNSIGNED (type);
931 bool overflow = false;
933 op1 = tree_to_double_int (arg1);
934 op2 = tree_to_double_int (arg2);
936 switch (code)
938 case BIT_IOR_EXPR:
939 res = op1 | op2;
940 break;
942 case BIT_XOR_EXPR:
943 res = op1 ^ op2;
944 break;
946 case BIT_AND_EXPR:
947 res = op1 & op2;
948 break;
950 case RSHIFT_EXPR:
951 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
959 break;
961 case RROTATE_EXPR:
962 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
963 break;
965 case LROTATE_EXPR:
966 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
967 break;
969 case PLUS_EXPR:
970 res = op1.add_with_sign (op2, false, &overflow);
971 break;
973 case MINUS_EXPR:
974 res = op1.sub_with_overflow (op2, &overflow);
975 break;
977 case MULT_EXPR:
978 res = op1.mul_with_sign (op2, false, &overflow);
979 break;
981 case MULT_HIGHPART_EXPR:
982 /* ??? Need quad precision, or an additional shift operand
983 to the multiply primitive, to handle very large highparts. */
984 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
985 return NULL_TREE;
986 tmp = op1 - op2;
987 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
988 break;
990 case TRUNC_DIV_EXPR:
991 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
992 case EXACT_DIV_EXPR:
993 /* This is a shortcut for a common special case. */
994 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
995 && !TREE_OVERFLOW (arg1)
996 && !TREE_OVERFLOW (arg2)
997 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
999 if (code == CEIL_DIV_EXPR)
1000 op1.low += op2.low - 1;
1002 res.low = op1.low / op2.low, res.high = 0;
1003 break;
1006 /* ... fall through ... */
1008 case ROUND_DIV_EXPR:
1009 if (op2.is_zero ())
1010 return NULL_TREE;
1011 if (op2.is_one ())
1013 res = op1;
1014 break;
1016 if (op1 == op2 && !op1.is_zero ())
1018 res = double_int_one;
1019 break;
1021 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1022 break;
1024 case TRUNC_MOD_EXPR:
1025 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1026 /* This is a shortcut for a common special case. */
1027 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1028 && !TREE_OVERFLOW (arg1)
1029 && !TREE_OVERFLOW (arg2)
1030 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1032 if (code == CEIL_MOD_EXPR)
1033 op1.low += op2.low - 1;
1034 res.low = op1.low % op2.low, res.high = 0;
1035 break;
1038 /* ... fall through ... */
1040 case ROUND_MOD_EXPR:
1041 if (op2.is_zero ())
1042 return NULL_TREE;
1043 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1044 break;
1046 case MIN_EXPR:
1047 res = op1.min (op2, uns);
1048 break;
1050 case MAX_EXPR:
1051 res = op1.max (op2, uns);
1052 break;
1054 default:
1055 return NULL_TREE;
1058 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1059 (!uns && overflow)
1060 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1062 return t;
1065 tree
1066 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1068 return int_const_binop_1 (code, arg1, arg2, 1);
1071 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1072 constant. We assume ARG1 and ARG2 have the same data type, or at least
1073 are the same kind of constant and the same machine mode. Return zero if
1074 combining the constants is not allowed in the current operating mode. */
1076 static tree
1077 const_binop (enum tree_code code, tree arg1, tree arg2)
1079 /* Sanity check for the recursive cases. */
1080 if (!arg1 || !arg2)
1081 return NULL_TREE;
1083 STRIP_NOPS (arg1);
1084 STRIP_NOPS (arg2);
1086 if (TREE_CODE (arg1) == INTEGER_CST)
1087 return int_const_binop (code, arg1, arg2);
1089 if (TREE_CODE (arg1) == REAL_CST)
1091 enum machine_mode mode;
1092 REAL_VALUE_TYPE d1;
1093 REAL_VALUE_TYPE d2;
1094 REAL_VALUE_TYPE value;
1095 REAL_VALUE_TYPE result;
1096 bool inexact;
1097 tree t, type;
1099 /* The following codes are handled by real_arithmetic. */
1100 switch (code)
1102 case PLUS_EXPR:
1103 case MINUS_EXPR:
1104 case MULT_EXPR:
1105 case RDIV_EXPR:
1106 case MIN_EXPR:
1107 case MAX_EXPR:
1108 break;
1110 default:
1111 return NULL_TREE;
1114 d1 = TREE_REAL_CST (arg1);
1115 d2 = TREE_REAL_CST (arg2);
1117 type = TREE_TYPE (arg1);
1118 mode = TYPE_MODE (type);
1120 /* Don't perform operation if we honor signaling NaNs and
1121 either operand is a NaN. */
1122 if (HONOR_SNANS (mode)
1123 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1124 return NULL_TREE;
1126 /* Don't perform operation if it would raise a division
1127 by zero exception. */
1128 if (code == RDIV_EXPR
1129 && REAL_VALUES_EQUAL (d2, dconst0)
1130 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1131 return NULL_TREE;
1133 /* If either operand is a NaN, just return it. Otherwise, set up
1134 for floating-point trap; we return an overflow. */
1135 if (REAL_VALUE_ISNAN (d1))
1136 return arg1;
1137 else if (REAL_VALUE_ISNAN (d2))
1138 return arg2;
1140 inexact = real_arithmetic (&value, code, &d1, &d2);
1141 real_convert (&result, mode, &value);
1143 /* Don't constant fold this floating point operation if
1144 the result has overflowed and flag_trapping_math. */
1145 if (flag_trapping_math
1146 && MODE_HAS_INFINITIES (mode)
1147 && REAL_VALUE_ISINF (result)
1148 && !REAL_VALUE_ISINF (d1)
1149 && !REAL_VALUE_ISINF (d2))
1150 return NULL_TREE;
1152 /* Don't constant fold this floating point operation if the
1153 result may dependent upon the run-time rounding mode and
1154 flag_rounding_math is set, or if GCC's software emulation
1155 is unable to accurately represent the result. */
1156 if ((flag_rounding_math
1157 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1158 && (inexact || !real_identical (&result, &value)))
1159 return NULL_TREE;
1161 t = build_real (type, result);
1163 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1164 return t;
1167 if (TREE_CODE (arg1) == FIXED_CST)
1169 FIXED_VALUE_TYPE f1;
1170 FIXED_VALUE_TYPE f2;
1171 FIXED_VALUE_TYPE result;
1172 tree t, type;
1173 int sat_p;
1174 bool overflow_p;
1176 /* The following codes are handled by fixed_arithmetic. */
1177 switch (code)
1179 case PLUS_EXPR:
1180 case MINUS_EXPR:
1181 case MULT_EXPR:
1182 case TRUNC_DIV_EXPR:
1183 f2 = TREE_FIXED_CST (arg2);
1184 break;
1186 case LSHIFT_EXPR:
1187 case RSHIFT_EXPR:
1188 f2.data.high = TREE_INT_CST_HIGH (arg2);
1189 f2.data.low = TREE_INT_CST_LOW (arg2);
1190 f2.mode = SImode;
1191 break;
1193 default:
1194 return NULL_TREE;
1197 f1 = TREE_FIXED_CST (arg1);
1198 type = TREE_TYPE (arg1);
1199 sat_p = TYPE_SATURATING (type);
1200 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1201 t = build_fixed (type, result);
1202 /* Propagate overflow flags. */
1203 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1204 TREE_OVERFLOW (t) = 1;
1205 return t;
1208 if (TREE_CODE (arg1) == COMPLEX_CST)
1210 tree type = TREE_TYPE (arg1);
1211 tree r1 = TREE_REALPART (arg1);
1212 tree i1 = TREE_IMAGPART (arg1);
1213 tree r2 = TREE_REALPART (arg2);
1214 tree i2 = TREE_IMAGPART (arg2);
1215 tree real, imag;
1217 switch (code)
1219 case PLUS_EXPR:
1220 case MINUS_EXPR:
1221 real = const_binop (code, r1, r2);
1222 imag = const_binop (code, i1, i2);
1223 break;
1225 case MULT_EXPR:
1226 if (COMPLEX_FLOAT_TYPE_P (type))
1227 return do_mpc_arg2 (arg1, arg2, type,
1228 /* do_nonfinite= */ folding_initializer,
1229 mpc_mul);
1231 real = const_binop (MINUS_EXPR,
1232 const_binop (MULT_EXPR, r1, r2),
1233 const_binop (MULT_EXPR, i1, i2));
1234 imag = const_binop (PLUS_EXPR,
1235 const_binop (MULT_EXPR, r1, i2),
1236 const_binop (MULT_EXPR, i1, r2));
1237 break;
1239 case RDIV_EXPR:
1240 if (COMPLEX_FLOAT_TYPE_P (type))
1241 return do_mpc_arg2 (arg1, arg2, type,
1242 /* do_nonfinite= */ folding_initializer,
1243 mpc_div);
1244 /* Fallthru ... */
1245 case TRUNC_DIV_EXPR:
1246 case CEIL_DIV_EXPR:
1247 case FLOOR_DIV_EXPR:
1248 case ROUND_DIV_EXPR:
1249 if (flag_complex_method == 0)
1251 /* Keep this algorithm in sync with
1252 tree-complex.c:expand_complex_div_straight().
1254 Expand complex division to scalars, straightforward algorithm.
1255 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1256 t = br*br + bi*bi
1258 tree magsquared
1259 = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r2, r2),
1261 const_binop (MULT_EXPR, i2, i2));
1262 tree t1
1263 = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, r2),
1265 const_binop (MULT_EXPR, i1, i2));
1266 tree t2
1267 = const_binop (MINUS_EXPR,
1268 const_binop (MULT_EXPR, i1, r2),
1269 const_binop (MULT_EXPR, r1, i2));
1271 real = const_binop (code, t1, magsquared);
1272 imag = const_binop (code, t2, magsquared);
1274 else
1276 /* Keep this algorithm in sync with
1277 tree-complex.c:expand_complex_div_wide().
1279 Expand complex division to scalars, modified algorithm to minimize
1280 overflow with wide input ranges. */
1281 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1282 fold_abs_const (r2, TREE_TYPE (type)),
1283 fold_abs_const (i2, TREE_TYPE (type)));
1285 if (integer_nonzerop (compare))
1287 /* In the TRUE branch, we compute
1288 ratio = br/bi;
1289 div = (br * ratio) + bi;
1290 tr = (ar * ratio) + ai;
1291 ti = (ai * ratio) - ar;
1292 tr = tr / div;
1293 ti = ti / div; */
1294 tree ratio = const_binop (code, r2, i2);
1295 tree div = const_binop (PLUS_EXPR, i2,
1296 const_binop (MULT_EXPR, r2, ratio));
1297 real = const_binop (MULT_EXPR, r1, ratio);
1298 real = const_binop (PLUS_EXPR, real, i1);
1299 real = const_binop (code, real, div);
1301 imag = const_binop (MULT_EXPR, i1, ratio);
1302 imag = const_binop (MINUS_EXPR, imag, r1);
1303 imag = const_binop (code, imag, div);
1305 else
1307 /* In the FALSE branch, we compute
1308 ratio = d/c;
1309 divisor = (d * ratio) + c;
1310 tr = (b * ratio) + a;
1311 ti = b - (a * ratio);
1312 tr = tr / div;
1313 ti = ti / div; */
1314 tree ratio = const_binop (code, i2, r2);
1315 tree div = const_binop (PLUS_EXPR, r2,
1316 const_binop (MULT_EXPR, i2, ratio));
1318 real = const_binop (MULT_EXPR, i1, ratio);
1319 real = const_binop (PLUS_EXPR, real, r1);
1320 real = const_binop (code, real, div);
1322 imag = const_binop (MULT_EXPR, r1, ratio);
1323 imag = const_binop (MINUS_EXPR, i1, imag);
1324 imag = const_binop (code, imag, div);
1327 break;
1329 default:
1330 return NULL_TREE;
1333 if (real && imag)
1334 return build_complex (type, real, imag);
1337 if (TREE_CODE (arg1) == VECTOR_CST
1338 && TREE_CODE (arg2) == VECTOR_CST)
1340 tree type = TREE_TYPE(arg1);
1341 int count = TYPE_VECTOR_SUBPARTS (type), i;
1342 tree *elts = XALLOCAVEC (tree, count);
1344 for (i = 0; i < count; i++)
1346 tree elem1 = VECTOR_CST_ELT (arg1, i);
1347 tree elem2 = VECTOR_CST_ELT (arg2, i);
1349 elts[i] = const_binop (code, elem1, elem2);
1351 /* It is possible that const_binop cannot handle the given
1352 code and return NULL_TREE */
1353 if(elts[i] == NULL_TREE)
1354 return NULL_TREE;
1357 return build_vector (type, elts);
1359 return NULL_TREE;
1362 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1363 indicates which particular sizetype to create. */
1365 tree
1366 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1368 return build_int_cst (sizetype_tab[(int) kind], number);
1371 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1372 is a tree code. The type of the result is taken from the operands.
1373 Both must be equivalent integer types, ala int_binop_types_match_p.
1374 If the operands are constant, so is the result. */
1376 tree
1377 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1379 tree type = TREE_TYPE (arg0);
1381 if (arg0 == error_mark_node || arg1 == error_mark_node)
1382 return error_mark_node;
1384 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1385 TREE_TYPE (arg1)));
1387 /* Handle the special case of two integer constants faster. */
1388 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1390 /* And some specific cases even faster than that. */
1391 if (code == PLUS_EXPR)
1393 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1394 return arg1;
1395 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1396 return arg0;
1398 else if (code == MINUS_EXPR)
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MULT_EXPR)
1405 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1406 return arg1;
1409 /* Handle general case of two integer constants. For sizetype
1410 constant calculations we always want to know about overflow,
1411 even in the unsigned case. */
1412 return int_const_binop_1 (code, arg0, arg1, -1);
1415 return fold_build2_loc (loc, code, type, arg0, arg1);
1418 /* Given two values, either both of sizetype or both of bitsizetype,
1419 compute the difference between the two values. Return the value
1420 in signed type corresponding to the type of the operands. */
1422 tree
1423 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1425 tree type = TREE_TYPE (arg0);
1426 tree ctype;
1428 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1429 TREE_TYPE (arg1)));
1431 /* If the type is already signed, just do the simple thing. */
1432 if (!TYPE_UNSIGNED (type))
1433 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1435 if (type == sizetype)
1436 ctype = ssizetype;
1437 else if (type == bitsizetype)
1438 ctype = sbitsizetype;
1439 else
1440 ctype = signed_type_for (type);
1442 /* If either operand is not a constant, do the conversions to the signed
1443 type and subtract. The hardware will do the right thing with any
1444 overflow in the subtraction. */
1445 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1446 return size_binop_loc (loc, MINUS_EXPR,
1447 fold_convert_loc (loc, ctype, arg0),
1448 fold_convert_loc (loc, ctype, arg1));
1450 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1451 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1452 overflow) and negate (which can't either). Special-case a result
1453 of zero while we're here. */
1454 if (tree_int_cst_equal (arg0, arg1))
1455 return build_int_cst (ctype, 0);
1456 else if (tree_int_cst_lt (arg1, arg0))
1457 return fold_convert_loc (loc, ctype,
1458 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1459 else
1460 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1461 fold_convert_loc (loc, ctype,
1462 size_binop_loc (loc,
1463 MINUS_EXPR,
1464 arg1, arg0)));
1467 /* A subroutine of fold_convert_const handling conversions of an
1468 INTEGER_CST to another integer type. */
1470 static tree
1471 fold_convert_const_int_from_int (tree type, const_tree arg1)
1473 tree t;
1475 /* Given an integer constant, make new constant with new type,
1476 appropriately sign-extended or truncated. */
1477 t = force_fit_type_double (type, tree_to_double_int (arg1),
1478 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1479 (TREE_INT_CST_HIGH (arg1) < 0
1480 && (TYPE_UNSIGNED (type)
1481 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1482 | TREE_OVERFLOW (arg1));
1484 return t;
1487 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1488 to an integer type. */
1490 static tree
1491 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1493 int overflow = 0;
1494 tree t;
1496 /* The following code implements the floating point to integer
1497 conversion rules required by the Java Language Specification,
1498 that IEEE NaNs are mapped to zero and values that overflow
1499 the target precision saturate, i.e. values greater than
1500 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1501 are mapped to INT_MIN. These semantics are allowed by the
1502 C and C++ standards that simply state that the behavior of
1503 FP-to-integer conversion is unspecified upon overflow. */
1505 double_int val;
1506 REAL_VALUE_TYPE r;
1507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1509 switch (code)
1511 case FIX_TRUNC_EXPR:
1512 real_trunc (&r, VOIDmode, &x);
1513 break;
1515 default:
1516 gcc_unreachable ();
1519 /* If R is NaN, return zero and show we have an overflow. */
1520 if (REAL_VALUE_ISNAN (r))
1522 overflow = 1;
1523 val = double_int_zero;
1526 /* See if R is less than the lower bound or greater than the
1527 upper bound. */
1529 if (! overflow)
1531 tree lt = TYPE_MIN_VALUE (type);
1532 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1533 if (REAL_VALUES_LESS (r, l))
1535 overflow = 1;
1536 val = tree_to_double_int (lt);
1540 if (! overflow)
1542 tree ut = TYPE_MAX_VALUE (type);
1543 if (ut)
1545 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1546 if (REAL_VALUES_LESS (u, r))
1548 overflow = 1;
1549 val = tree_to_double_int (ut);
1554 if (! overflow)
1555 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1557 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1558 return t;
1561 /* A subroutine of fold_convert_const handling conversions of a
1562 FIXED_CST to an integer type. */
1564 static tree
1565 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1567 tree t;
1568 double_int temp, temp_trunc;
1569 unsigned int mode;
1571 /* Right shift FIXED_CST to temp by fbit. */
1572 temp = TREE_FIXED_CST (arg1).data;
1573 mode = TREE_FIXED_CST (arg1).mode;
1574 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1576 temp = temp.rshift (GET_MODE_FBIT (mode),
1577 HOST_BITS_PER_DOUBLE_INT,
1578 SIGNED_FIXED_POINT_MODE_P (mode));
1580 /* Left shift temp to temp_trunc by fbit. */
1581 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 else
1587 temp = double_int_zero;
1588 temp_trunc = double_int_zero;
1591 /* If FIXED_CST is negative, we need to round the value toward 0.
1592 By checking if the fractional bits are not zero to add 1 to temp. */
1593 if (SIGNED_FIXED_POINT_MODE_P (mode)
1594 && temp_trunc.is_negative ()
1595 && TREE_FIXED_CST (arg1).data != temp_trunc)
1596 temp += double_int_one;
1598 /* Given a fixed-point constant, make new constant with new type,
1599 appropriately sign-extended or truncated. */
1600 t = force_fit_type_double (type, temp, -1,
1601 (temp.is_negative ()
1602 && (TYPE_UNSIGNED (type)
1603 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1604 | TREE_OVERFLOW (arg1));
1606 return t;
1609 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1610 to another floating point type. */
1612 static tree
1613 fold_convert_const_real_from_real (tree type, const_tree arg1)
1615 REAL_VALUE_TYPE value;
1616 tree t;
1618 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1619 t = build_real (type, value);
1621 /* If converting an infinity or NAN to a representation that doesn't
1622 have one, set the overflow bit so that we can produce some kind of
1623 error message at the appropriate point if necessary. It's not the
1624 most user-friendly message, but it's better than nothing. */
1625 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1626 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1627 TREE_OVERFLOW (t) = 1;
1628 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1629 && !MODE_HAS_NANS (TYPE_MODE (type)))
1630 TREE_OVERFLOW (t) = 1;
1631 /* Regular overflow, conversion produced an infinity in a mode that
1632 can't represent them. */
1633 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1634 && REAL_VALUE_ISINF (value)
1635 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1636 TREE_OVERFLOW (t) = 1;
1637 else
1638 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1643 to a floating point type. */
1645 static tree
1646 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1652 t = build_real (type, value);
1654 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1659 to another fixed-point type. */
1661 static tree
1662 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1664 FIXED_VALUE_TYPE value;
1665 tree t;
1666 bool overflow_p;
1668 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1669 TYPE_SATURATING (type));
1670 t = build_fixed (type, value);
1672 /* Propagate overflow flags. */
1673 if (overflow_p | TREE_OVERFLOW (arg1))
1674 TREE_OVERFLOW (t) = 1;
1675 return t;
1678 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1679 to a fixed-point type. */
1681 static tree
1682 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1684 FIXED_VALUE_TYPE value;
1685 tree t;
1686 bool overflow_p;
1688 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1689 TREE_INT_CST (arg1),
1690 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1691 TYPE_SATURATING (type));
1692 t = build_fixed (type, value);
1694 /* Propagate overflow flags. */
1695 if (overflow_p | TREE_OVERFLOW (arg1))
1696 TREE_OVERFLOW (t) = 1;
1697 return t;
1700 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1701 to a fixed-point type. */
1703 static tree
1704 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1706 FIXED_VALUE_TYPE value;
1707 tree t;
1708 bool overflow_p;
1710 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1711 &TREE_REAL_CST (arg1),
1712 TYPE_SATURATING (type));
1713 t = build_fixed (type, value);
1715 /* Propagate overflow flags. */
1716 if (overflow_p | TREE_OVERFLOW (arg1))
1717 TREE_OVERFLOW (t) = 1;
1718 return t;
1721 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1722 type TYPE. If no simplification can be done return NULL_TREE. */
1724 static tree
1725 fold_convert_const (enum tree_code code, tree type, tree arg1)
1727 if (TREE_TYPE (arg1) == type)
1728 return arg1;
1730 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1731 || TREE_CODE (type) == OFFSET_TYPE)
1733 if (TREE_CODE (arg1) == INTEGER_CST)
1734 return fold_convert_const_int_from_int (type, arg1);
1735 else if (TREE_CODE (arg1) == REAL_CST)
1736 return fold_convert_const_int_from_real (code, type, arg1);
1737 else if (TREE_CODE (arg1) == FIXED_CST)
1738 return fold_convert_const_int_from_fixed (type, arg1);
1740 else if (TREE_CODE (type) == REAL_TYPE)
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1743 return build_real_from_int_cst (type, arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1745 return fold_convert_const_real_from_real (type, arg1);
1746 else if (TREE_CODE (arg1) == FIXED_CST)
1747 return fold_convert_const_real_from_fixed (type, arg1);
1749 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1751 if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_fixed_from_fixed (type, arg1);
1753 else if (TREE_CODE (arg1) == INTEGER_CST)
1754 return fold_convert_const_fixed_from_int (type, arg1);
1755 else if (TREE_CODE (arg1) == REAL_CST)
1756 return fold_convert_const_fixed_from_real (type, arg1);
1758 return NULL_TREE;
1761 /* Construct a vector of zero elements of vector type TYPE. */
1763 static tree
1764 build_zero_vector (tree type)
1766 tree t;
1768 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1769 return build_vector_from_val (type, t);
1772 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1774 bool
1775 fold_convertible_p (const_tree type, const_tree arg)
1777 tree orig = TREE_TYPE (arg);
1779 if (type == orig)
1780 return true;
1782 if (TREE_CODE (arg) == ERROR_MARK
1783 || TREE_CODE (type) == ERROR_MARK
1784 || TREE_CODE (orig) == ERROR_MARK)
1785 return false;
1787 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1788 return true;
1790 switch (TREE_CODE (type))
1792 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1793 case POINTER_TYPE: case REFERENCE_TYPE:
1794 case OFFSET_TYPE:
1795 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1796 || TREE_CODE (orig) == OFFSET_TYPE)
1797 return true;
1798 return (TREE_CODE (orig) == VECTOR_TYPE
1799 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1801 case REAL_TYPE:
1802 case FIXED_POINT_TYPE:
1803 case COMPLEX_TYPE:
1804 case VECTOR_TYPE:
1805 case VOID_TYPE:
1806 return TREE_CODE (type) == TREE_CODE (orig);
1808 default:
1809 return false;
1813 /* Convert expression ARG to type TYPE. Used by the middle-end for
1814 simple conversions in preference to calling the front-end's convert. */
1816 tree
1817 fold_convert_loc (location_t loc, tree type, tree arg)
1819 tree orig = TREE_TYPE (arg);
1820 tree tem;
1822 if (type == orig)
1823 return arg;
1825 if (TREE_CODE (arg) == ERROR_MARK
1826 || TREE_CODE (type) == ERROR_MARK
1827 || TREE_CODE (orig) == ERROR_MARK)
1828 return error_mark_node;
1830 switch (TREE_CODE (type))
1832 case POINTER_TYPE:
1833 case REFERENCE_TYPE:
1834 /* Handle conversions between pointers to different address spaces. */
1835 if (POINTER_TYPE_P (orig)
1836 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1837 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1838 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1839 /* fall through */
1841 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1842 case OFFSET_TYPE:
1843 if (TREE_CODE (arg) == INTEGER_CST)
1845 tem = fold_convert_const (NOP_EXPR, type, arg);
1846 if (tem != NULL_TREE)
1847 return tem;
1849 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1850 || TREE_CODE (orig) == OFFSET_TYPE)
1851 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1852 if (TREE_CODE (orig) == COMPLEX_TYPE)
1853 return fold_convert_loc (loc, type,
1854 fold_build1_loc (loc, REALPART_EXPR,
1855 TREE_TYPE (orig), arg));
1856 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1857 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1858 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1860 case REAL_TYPE:
1861 if (TREE_CODE (arg) == INTEGER_CST)
1863 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1864 if (tem != NULL_TREE)
1865 return tem;
1867 else if (TREE_CODE (arg) == REAL_CST)
1869 tem = fold_convert_const (NOP_EXPR, type, arg);
1870 if (tem != NULL_TREE)
1871 return tem;
1873 else if (TREE_CODE (arg) == FIXED_CST)
1875 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1876 if (tem != NULL_TREE)
1877 return tem;
1880 switch (TREE_CODE (orig))
1882 case INTEGER_TYPE:
1883 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1884 case POINTER_TYPE: case REFERENCE_TYPE:
1885 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1887 case REAL_TYPE:
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1890 case FIXED_POINT_TYPE:
1891 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1893 case COMPLEX_TYPE:
1894 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1895 return fold_convert_loc (loc, type, tem);
1897 default:
1898 gcc_unreachable ();
1901 case FIXED_POINT_TYPE:
1902 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1903 || TREE_CODE (arg) == REAL_CST)
1905 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 goto fold_convert_exit;
1910 switch (TREE_CODE (orig))
1912 case FIXED_POINT_TYPE:
1913 case INTEGER_TYPE:
1914 case ENUMERAL_TYPE:
1915 case BOOLEAN_TYPE:
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1919 case COMPLEX_TYPE:
1920 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1921 return fold_convert_loc (loc, type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 case FIXED_POINT_TYPE:
1935 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1936 fold_convert_loc (loc, TREE_TYPE (type), arg),
1937 fold_convert_loc (loc, TREE_TYPE (type),
1938 integer_zero_node));
1939 case COMPLEX_TYPE:
1941 tree rpart, ipart;
1943 if (TREE_CODE (arg) == COMPLEX_EXPR)
1945 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1946 TREE_OPERAND (arg, 0));
1947 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1948 TREE_OPERAND (arg, 1));
1949 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1952 arg = save_expr (arg);
1953 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1954 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1955 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1956 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1957 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1960 default:
1961 gcc_unreachable ();
1964 case VECTOR_TYPE:
1965 if (integer_zerop (arg))
1966 return build_zero_vector (type);
1967 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1968 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1969 || TREE_CODE (orig) == VECTOR_TYPE);
1970 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1972 case VOID_TYPE:
1973 tem = fold_ignored_result (arg);
1974 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1976 default:
1977 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1978 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1979 gcc_unreachable ();
1981 fold_convert_exit:
1982 protected_set_expr_location_unshare (tem, loc);
1983 return tem;
1986 /* Return false if expr can be assumed not to be an lvalue, true
1987 otherwise. */
1989 static bool
1990 maybe_lvalue_p (const_tree x)
1992 /* We only need to wrap lvalue tree codes. */
1993 switch (TREE_CODE (x))
1995 case VAR_DECL:
1996 case PARM_DECL:
1997 case RESULT_DECL:
1998 case LABEL_DECL:
1999 case FUNCTION_DECL:
2000 case SSA_NAME:
2002 case COMPONENT_REF:
2003 case MEM_REF:
2004 case INDIRECT_REF:
2005 case ARRAY_REF:
2006 case ARRAY_RANGE_REF:
2007 case BIT_FIELD_REF:
2008 case OBJ_TYPE_REF:
2010 case REALPART_EXPR:
2011 case IMAGPART_EXPR:
2012 case PREINCREMENT_EXPR:
2013 case PREDECREMENT_EXPR:
2014 case SAVE_EXPR:
2015 case TRY_CATCH_EXPR:
2016 case WITH_CLEANUP_EXPR:
2017 case COMPOUND_EXPR:
2018 case MODIFY_EXPR:
2019 case TARGET_EXPR:
2020 case COND_EXPR:
2021 case BIND_EXPR:
2022 break;
2024 default:
2025 /* Assume the worst for front-end tree codes. */
2026 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2027 break;
2028 return false;
2031 return true;
2034 /* Return an expr equal to X but certainly not valid as an lvalue. */
2036 tree
2037 non_lvalue_loc (location_t loc, tree x)
2039 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2040 us. */
2041 if (in_gimple_form)
2042 return x;
2044 if (! maybe_lvalue_p (x))
2045 return x;
2046 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2049 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2050 Zero means allow extended lvalues. */
2052 int pedantic_lvalues;
2054 /* When pedantic, return an expr equal to X but certainly not valid as a
2055 pedantic lvalue. Otherwise, return X. */
2057 static tree
2058 pedantic_non_lvalue_loc (location_t loc, tree x)
2060 if (pedantic_lvalues)
2061 return non_lvalue_loc (loc, x);
2063 return protected_set_expr_location_unshare (x, loc);
2066 /* Given a tree comparison code, return the code that is the logical inverse.
2067 It is generally not safe to do this for floating-point comparisons, except
2068 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2069 ERROR_MARK in this case. */
2071 enum tree_code
2072 invert_tree_comparison (enum tree_code code, bool honor_nans)
2074 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2075 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2076 return ERROR_MARK;
2078 switch (code)
2080 case EQ_EXPR:
2081 return NE_EXPR;
2082 case NE_EXPR:
2083 return EQ_EXPR;
2084 case GT_EXPR:
2085 return honor_nans ? UNLE_EXPR : LE_EXPR;
2086 case GE_EXPR:
2087 return honor_nans ? UNLT_EXPR : LT_EXPR;
2088 case LT_EXPR:
2089 return honor_nans ? UNGE_EXPR : GE_EXPR;
2090 case LE_EXPR:
2091 return honor_nans ? UNGT_EXPR : GT_EXPR;
2092 case LTGT_EXPR:
2093 return UNEQ_EXPR;
2094 case UNEQ_EXPR:
2095 return LTGT_EXPR;
2096 case UNGT_EXPR:
2097 return LE_EXPR;
2098 case UNGE_EXPR:
2099 return LT_EXPR;
2100 case UNLT_EXPR:
2101 return GE_EXPR;
2102 case UNLE_EXPR:
2103 return GT_EXPR;
2104 case ORDERED_EXPR:
2105 return UNORDERED_EXPR;
2106 case UNORDERED_EXPR:
2107 return ORDERED_EXPR;
2108 default:
2109 gcc_unreachable ();
2113 /* Similar, but return the comparison that results if the operands are
2114 swapped. This is safe for floating-point. */
2116 enum tree_code
2117 swap_tree_comparison (enum tree_code code)
2119 switch (code)
2121 case EQ_EXPR:
2122 case NE_EXPR:
2123 case ORDERED_EXPR:
2124 case UNORDERED_EXPR:
2125 case LTGT_EXPR:
2126 case UNEQ_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 case UNGT_EXPR:
2137 return UNLT_EXPR;
2138 case UNGE_EXPR:
2139 return UNLE_EXPR;
2140 case UNLT_EXPR:
2141 return UNGT_EXPR;
2142 case UNLE_EXPR:
2143 return UNGE_EXPR;
2144 default:
2145 gcc_unreachable ();
2150 /* Convert a comparison tree code from an enum tree_code representation
2151 into a compcode bit-based encoding. This function is the inverse of
2152 compcode_to_comparison. */
2154 static enum comparison_code
2155 comparison_to_compcode (enum tree_code code)
2157 switch (code)
2159 case LT_EXPR:
2160 return COMPCODE_LT;
2161 case EQ_EXPR:
2162 return COMPCODE_EQ;
2163 case LE_EXPR:
2164 return COMPCODE_LE;
2165 case GT_EXPR:
2166 return COMPCODE_GT;
2167 case NE_EXPR:
2168 return COMPCODE_NE;
2169 case GE_EXPR:
2170 return COMPCODE_GE;
2171 case ORDERED_EXPR:
2172 return COMPCODE_ORD;
2173 case UNORDERED_EXPR:
2174 return COMPCODE_UNORD;
2175 case UNLT_EXPR:
2176 return COMPCODE_UNLT;
2177 case UNEQ_EXPR:
2178 return COMPCODE_UNEQ;
2179 case UNLE_EXPR:
2180 return COMPCODE_UNLE;
2181 case UNGT_EXPR:
2182 return COMPCODE_UNGT;
2183 case LTGT_EXPR:
2184 return COMPCODE_LTGT;
2185 case UNGE_EXPR:
2186 return COMPCODE_UNGE;
2187 default:
2188 gcc_unreachable ();
2192 /* Convert a compcode bit-based encoding of a comparison operator back
2193 to GCC's enum tree_code representation. This function is the
2194 inverse of comparison_to_compcode. */
2196 static enum tree_code
2197 compcode_to_comparison (enum comparison_code code)
2199 switch (code)
2201 case COMPCODE_LT:
2202 return LT_EXPR;
2203 case COMPCODE_EQ:
2204 return EQ_EXPR;
2205 case COMPCODE_LE:
2206 return LE_EXPR;
2207 case COMPCODE_GT:
2208 return GT_EXPR;
2209 case COMPCODE_NE:
2210 return NE_EXPR;
2211 case COMPCODE_GE:
2212 return GE_EXPR;
2213 case COMPCODE_ORD:
2214 return ORDERED_EXPR;
2215 case COMPCODE_UNORD:
2216 return UNORDERED_EXPR;
2217 case COMPCODE_UNLT:
2218 return UNLT_EXPR;
2219 case COMPCODE_UNEQ:
2220 return UNEQ_EXPR;
2221 case COMPCODE_UNLE:
2222 return UNLE_EXPR;
2223 case COMPCODE_UNGT:
2224 return UNGT_EXPR;
2225 case COMPCODE_LTGT:
2226 return LTGT_EXPR;
2227 case COMPCODE_UNGE:
2228 return UNGE_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Return a tree for the comparison which is the combination of
2235 doing the AND or OR (depending on CODE) of the two operations LCODE
2236 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2237 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2238 if this makes the transformation invalid. */
2240 tree
2241 combine_comparisons (location_t loc,
2242 enum tree_code code, enum tree_code lcode,
2243 enum tree_code rcode, tree truth_type,
2244 tree ll_arg, tree lr_arg)
2246 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2247 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2248 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2249 int compcode;
2251 switch (code)
2253 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2254 compcode = lcompcode & rcompcode;
2255 break;
2257 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2258 compcode = lcompcode | rcompcode;
2259 break;
2261 default:
2262 return NULL_TREE;
2265 if (!honor_nans)
2267 /* Eliminate unordered comparisons, as well as LTGT and ORD
2268 which are not used unless the mode has NaNs. */
2269 compcode &= ~COMPCODE_UNORD;
2270 if (compcode == COMPCODE_LTGT)
2271 compcode = COMPCODE_NE;
2272 else if (compcode == COMPCODE_ORD)
2273 compcode = COMPCODE_TRUE;
2275 else if (flag_trapping_math)
2277 /* Check that the original operation and the optimized ones will trap
2278 under the same condition. */
2279 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2280 && (lcompcode != COMPCODE_EQ)
2281 && (lcompcode != COMPCODE_ORD);
2282 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2283 && (rcompcode != COMPCODE_EQ)
2284 && (rcompcode != COMPCODE_ORD);
2285 bool trap = (compcode & COMPCODE_UNORD) == 0
2286 && (compcode != COMPCODE_EQ)
2287 && (compcode != COMPCODE_ORD);
2289 /* In a short-circuited boolean expression the LHS might be
2290 such that the RHS, if evaluated, will never trap. For
2291 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2292 if neither x nor y is NaN. (This is a mixed blessing: for
2293 example, the expression above will never trap, hence
2294 optimizing it to x < y would be invalid). */
2295 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2296 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 rtrap = false;
2299 /* If the comparison was short-circuited, and only the RHS
2300 trapped, we may now generate a spurious trap. */
2301 if (rtrap && !ltrap
2302 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 return NULL_TREE;
2305 /* If we changed the conditions that cause a trap, we lose. */
2306 if ((ltrap || rtrap) != trap)
2307 return NULL_TREE;
2310 if (compcode == COMPCODE_TRUE)
2311 return constant_boolean_node (true, truth_type);
2312 else if (compcode == COMPCODE_FALSE)
2313 return constant_boolean_node (false, truth_type);
2314 else
2316 enum tree_code tcode;
2318 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2319 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2354 || TREE_TYPE (arg0) == error_mark_node
2355 || TREE_TYPE (arg1) == error_mark_node)
2356 return 0;
2358 /* Similar, if either does not have a type (like a released SSA name),
2359 they aren't equal. */
2360 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2361 return 0;
2363 /* Check equality of integer constants before bailing out due to
2364 precision differences. */
2365 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2366 return tree_int_cst_equal (arg0, arg1);
2368 /* If both types don't have the same signedness, then we can't consider
2369 them equal. We must check this before the STRIP_NOPS calls
2370 because they may change the signedness of the arguments. As pointers
2371 strictly don't have a signedness, require either two pointers or
2372 two non-pointers as well. */
2373 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2374 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2375 return 0;
2377 /* We cannot consider pointers to different address space equal. */
2378 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2379 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2380 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2381 return 0;
2383 /* If both types don't have the same precision, then it is not safe
2384 to strip NOPs. */
2385 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2386 return 0;
2388 STRIP_NOPS (arg0);
2389 STRIP_NOPS (arg1);
2391 /* In case both args are comparisons but with different comparison
2392 code, try to swap the comparison operands of one arg to produce
2393 a match and compare that variant. */
2394 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2395 && COMPARISON_CLASS_P (arg0)
2396 && COMPARISON_CLASS_P (arg1))
2398 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2400 if (TREE_CODE (arg0) == swap_code)
2401 return operand_equal_p (TREE_OPERAND (arg0, 0),
2402 TREE_OPERAND (arg1, 1), flags)
2403 && operand_equal_p (TREE_OPERAND (arg0, 1),
2404 TREE_OPERAND (arg1, 0), flags);
2407 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2408 /* This is needed for conversions and for COMPONENT_REF.
2409 Might as well play it safe and always test this. */
2410 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2411 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2412 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2413 return 0;
2415 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2416 We don't care about side effects in that case because the SAVE_EXPR
2417 takes care of that for us. In all other cases, two expressions are
2418 equal if they have no side effects. If we have two identical
2419 expressions with side effects that should be treated the same due
2420 to the only side effects being identical SAVE_EXPR's, that will
2421 be detected in the recursive calls below.
2422 If we are taking an invariant address of two identical objects
2423 they are necessarily equal as well. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (flags & OEP_CONSTANT_ADDRESS_OF)
2427 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2428 return 1;
2430 /* Next handle constant cases, those for which we can return 1 even
2431 if ONLY_CONST is set. */
2432 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2433 switch (TREE_CODE (arg0))
2435 case INTEGER_CST:
2436 return tree_int_cst_equal (arg0, arg1);
2438 case FIXED_CST:
2439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2440 TREE_FIXED_CST (arg1));
2442 case REAL_CST:
2443 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2444 TREE_REAL_CST (arg1)))
2445 return 1;
2448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2450 /* If we do not distinguish between signed and unsigned zero,
2451 consider them equal. */
2452 if (real_zerop (arg0) && real_zerop (arg1))
2453 return 1;
2455 return 0;
2457 case VECTOR_CST:
2459 unsigned i;
2461 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2462 return 0;
2464 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2466 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2467 VECTOR_CST_ELT (arg1, i), flags))
2468 return 0;
2470 return 1;
2473 case COMPLEX_CST:
2474 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2475 flags)
2476 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2477 flags));
2479 case STRING_CST:
2480 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2481 && ! memcmp (TREE_STRING_POINTER (arg0),
2482 TREE_STRING_POINTER (arg1),
2483 TREE_STRING_LENGTH (arg0)));
2485 case ADDR_EXPR:
2486 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2487 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2488 ? OEP_CONSTANT_ADDRESS_OF : 0);
2489 default:
2490 break;
2493 if (flags & OEP_ONLY_CONST)
2494 return 0;
2496 /* Define macros to test an operand from arg0 and arg1 for equality and a
2497 variant that allows null and views null as being different from any
2498 non-null value. In the latter case, if either is null, the both
2499 must be; otherwise, do the normal comparison. */
2500 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2501 TREE_OPERAND (arg1, N), flags)
2503 #define OP_SAME_WITH_NULL(N) \
2504 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2505 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2507 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2509 case tcc_unary:
2510 /* Two conversions are equal only if signedness and modes match. */
2511 switch (TREE_CODE (arg0))
2513 CASE_CONVERT:
2514 case FIX_TRUNC_EXPR:
2515 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2516 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2517 return 0;
2518 break;
2519 default:
2520 break;
2523 return OP_SAME (0);
2526 case tcc_comparison:
2527 case tcc_binary:
2528 if (OP_SAME (0) && OP_SAME (1))
2529 return 1;
2531 /* For commutative ops, allow the other order. */
2532 return (commutative_tree_code (TREE_CODE (arg0))
2533 && operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 1), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 0), flags));
2538 case tcc_reference:
2539 /* If either of the pointer (or reference) expressions we are
2540 dereferencing contain a side effect, these cannot be equal. */
2541 if (TREE_SIDE_EFFECTS (arg0)
2542 || TREE_SIDE_EFFECTS (arg1))
2543 return 0;
2545 switch (TREE_CODE (arg0))
2547 case INDIRECT_REF:
2548 case REALPART_EXPR:
2549 case IMAGPART_EXPR:
2550 return OP_SAME (0);
2552 case TARGET_MEM_REF:
2553 /* Require equal extra operands and then fall through to MEM_REF
2554 handling of the two common operands. */
2555 if (!OP_SAME_WITH_NULL (2)
2556 || !OP_SAME_WITH_NULL (3)
2557 || !OP_SAME_WITH_NULL (4))
2558 return 0;
2559 /* Fallthru. */
2560 case MEM_REF:
2561 /* Require equal access sizes, and similar pointer types.
2562 We can have incomplete types for array references of
2563 variable-sized arrays from the Fortran frontent
2564 though. */
2565 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2566 || (TYPE_SIZE (TREE_TYPE (arg0))
2567 && TYPE_SIZE (TREE_TYPE (arg1))
2568 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2569 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2570 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2571 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2572 && OP_SAME (0) && OP_SAME (1));
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 /* Operands 2 and 3 may be null.
2577 Compare the array index by value if it is constant first as we
2578 may have different types but same value here. */
2579 return (OP_SAME (0)
2580 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2581 TREE_OPERAND (arg1, 1))
2582 || OP_SAME (1))
2583 && OP_SAME_WITH_NULL (2)
2584 && OP_SAME_WITH_NULL (3));
2586 case COMPONENT_REF:
2587 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2588 may be NULL when we're called to compare MEM_EXPRs. */
2589 return OP_SAME_WITH_NULL (0)
2590 && OP_SAME (1)
2591 && OP_SAME_WITH_NULL (2);
2593 case BIT_FIELD_REF:
2594 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2596 default:
2597 return 0;
2600 case tcc_expression:
2601 switch (TREE_CODE (arg0))
2603 case ADDR_EXPR:
2604 case TRUTH_NOT_EXPR:
2605 return OP_SAME (0);
2607 case TRUTH_ANDIF_EXPR:
2608 case TRUTH_ORIF_EXPR:
2609 return OP_SAME (0) && OP_SAME (1);
2611 case FMA_EXPR:
2612 case WIDEN_MULT_PLUS_EXPR:
2613 case WIDEN_MULT_MINUS_EXPR:
2614 if (!OP_SAME (2))
2615 return 0;
2616 /* The multiplcation operands are commutative. */
2617 /* FALLTHRU */
2619 case TRUTH_AND_EXPR:
2620 case TRUTH_OR_EXPR:
2621 case TRUTH_XOR_EXPR:
2622 if (OP_SAME (0) && OP_SAME (1))
2623 return 1;
2625 /* Otherwise take into account this is a commutative operation. */
2626 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2627 TREE_OPERAND (arg1, 1), flags)
2628 && operand_equal_p (TREE_OPERAND (arg0, 1),
2629 TREE_OPERAND (arg1, 0), flags));
2631 case COND_EXPR:
2632 case VEC_COND_EXPR:
2633 case DOT_PROD_EXPR:
2634 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 default:
2637 return 0;
2640 case tcc_vl_exp:
2641 switch (TREE_CODE (arg0))
2643 case CALL_EXPR:
2644 /* If the CALL_EXPRs call different functions, then they
2645 clearly can not be equal. */
2646 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2647 flags))
2648 return 0;
2651 unsigned int cef = call_expr_flags (arg0);
2652 if (flags & OEP_PURE_SAME)
2653 cef &= ECF_CONST | ECF_PURE;
2654 else
2655 cef &= ECF_CONST;
2656 if (!cef)
2657 return 0;
2660 /* Now see if all the arguments are the same. */
2662 const_call_expr_arg_iterator iter0, iter1;
2663 const_tree a0, a1;
2664 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2665 a1 = first_const_call_expr_arg (arg1, &iter1);
2666 a0 && a1;
2667 a0 = next_const_call_expr_arg (&iter0),
2668 a1 = next_const_call_expr_arg (&iter1))
2669 if (! operand_equal_p (a0, a1, flags))
2670 return 0;
2672 /* If we get here and both argument lists are exhausted
2673 then the CALL_EXPRs are equal. */
2674 return ! (a0 || a1);
2676 default:
2677 return 0;
2680 case tcc_declaration:
2681 /* Consider __builtin_sqrt equal to sqrt. */
2682 return (TREE_CODE (arg0) == FUNCTION_DECL
2683 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2684 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2685 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2687 default:
2688 return 0;
2691 #undef OP_SAME
2692 #undef OP_SAME_WITH_NULL
2695 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2696 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2698 When in doubt, return 0. */
2700 static int
2701 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2703 int unsignedp1, unsignedpo;
2704 tree primarg0, primarg1, primother;
2705 unsigned int correct_width;
2707 if (operand_equal_p (arg0, arg1, 0))
2708 return 1;
2710 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2711 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2712 return 0;
2714 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2715 and see if the inner values are the same. This removes any
2716 signedness comparison, which doesn't matter here. */
2717 primarg0 = arg0, primarg1 = arg1;
2718 STRIP_NOPS (primarg0);
2719 STRIP_NOPS (primarg1);
2720 if (operand_equal_p (primarg0, primarg1, 0))
2721 return 1;
2723 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2724 actual comparison operand, ARG0.
2726 First throw away any conversions to wider types
2727 already present in the operands. */
2729 primarg1 = get_narrower (arg1, &unsignedp1);
2730 primother = get_narrower (other, &unsignedpo);
2732 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2733 if (unsignedp1 == unsignedpo
2734 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2735 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2737 tree type = TREE_TYPE (arg0);
2739 /* Make sure shorter operand is extended the right way
2740 to match the longer operand. */
2741 primarg1 = fold_convert (signed_or_unsigned_type_for
2742 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2744 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2745 return 1;
2748 return 0;
2751 /* See if ARG is an expression that is either a comparison or is performing
2752 arithmetic on comparisons. The comparisons must only be comparing
2753 two different values, which will be stored in *CVAL1 and *CVAL2; if
2754 they are nonzero it means that some operands have already been found.
2755 No variables may be used anywhere else in the expression except in the
2756 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2757 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2759 If this is true, return 1. Otherwise, return zero. */
2761 static int
2762 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2764 enum tree_code code = TREE_CODE (arg);
2765 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2767 /* We can handle some of the tcc_expression cases here. */
2768 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2769 tclass = tcc_unary;
2770 else if (tclass == tcc_expression
2771 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2772 || code == COMPOUND_EXPR))
2773 tclass = tcc_binary;
2775 else if (tclass == tcc_expression && code == SAVE_EXPR
2776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2778 /* If we've already found a CVAL1 or CVAL2, this expression is
2779 two complex to handle. */
2780 if (*cval1 || *cval2)
2781 return 0;
2783 tclass = tcc_unary;
2784 *save_p = 1;
2787 switch (tclass)
2789 case tcc_unary:
2790 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2792 case tcc_binary:
2793 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2794 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2795 cval1, cval2, save_p));
2797 case tcc_constant:
2798 return 1;
2800 case tcc_expression:
2801 if (code == COND_EXPR)
2802 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2803 cval1, cval2, save_p)
2804 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2805 cval1, cval2, save_p)
2806 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2807 cval1, cval2, save_p));
2808 return 0;
2810 case tcc_comparison:
2811 /* First see if we can handle the first operand, then the second. For
2812 the second operand, we know *CVAL1 can't be zero. It must be that
2813 one side of the comparison is each of the values; test for the
2814 case where this isn't true by failing if the two operands
2815 are the same. */
2817 if (operand_equal_p (TREE_OPERAND (arg, 0),
2818 TREE_OPERAND (arg, 1), 0))
2819 return 0;
2821 if (*cval1 == 0)
2822 *cval1 = TREE_OPERAND (arg, 0);
2823 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2825 else if (*cval2 == 0)
2826 *cval2 = TREE_OPERAND (arg, 0);
2827 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2829 else
2830 return 0;
2832 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2834 else if (*cval2 == 0)
2835 *cval2 = TREE_OPERAND (arg, 1);
2836 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2838 else
2839 return 0;
2841 return 1;
2843 default:
2844 return 0;
2848 /* ARG is a tree that is known to contain just arithmetic operations and
2849 comparisons. Evaluate the operations in the tree substituting NEW0 for
2850 any occurrence of OLD0 as an operand of a comparison and likewise for
2851 NEW1 and OLD1. */
2853 static tree
2854 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2855 tree old1, tree new1)
2857 tree type = TREE_TYPE (arg);
2858 enum tree_code code = TREE_CODE (arg);
2859 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2861 /* We can handle some of the tcc_expression cases here. */
2862 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2863 tclass = tcc_unary;
2864 else if (tclass == tcc_expression
2865 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2866 tclass = tcc_binary;
2868 switch (tclass)
2870 case tcc_unary:
2871 return fold_build1_loc (loc, code, type,
2872 eval_subst (loc, TREE_OPERAND (arg, 0),
2873 old0, new0, old1, new1));
2875 case tcc_binary:
2876 return fold_build2_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1),
2879 eval_subst (loc, TREE_OPERAND (arg, 1),
2880 old0, new0, old1, new1));
2882 case tcc_expression:
2883 switch (code)
2885 case SAVE_EXPR:
2886 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2887 old1, new1);
2889 case COMPOUND_EXPR:
2890 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2891 old1, new1);
2893 case COND_EXPR:
2894 return fold_build3_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (loc, TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1),
2899 eval_subst (loc, TREE_OPERAND (arg, 2),
2900 old0, new0, old1, new1));
2901 default:
2902 break;
2904 /* Fall through - ??? */
2906 case tcc_comparison:
2908 tree arg0 = TREE_OPERAND (arg, 0);
2909 tree arg1 = TREE_OPERAND (arg, 1);
2911 /* We need to check both for exact equality and tree equality. The
2912 former will be true if the operand has a side-effect. In that
2913 case, we know the operand occurred exactly once. */
2915 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2916 arg0 = new0;
2917 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2918 arg0 = new1;
2920 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2921 arg1 = new0;
2922 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2923 arg1 = new1;
2925 return fold_build2_loc (loc, code, type, arg0, arg1);
2928 default:
2929 return arg;
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED was previously an operand of the expression
2935 but is now not needed (e.g., we folded OMITTED * 0).
2937 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2938 the conversion of RESULT to TYPE. */
2940 tree
2941 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2943 tree t = fold_convert_loc (loc, type, result);
2945 /* If the resulting operand is an empty statement, just return the omitted
2946 statement casted to void. */
2947 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2948 return build1_loc (loc, NOP_EXPR, void_type_node,
2949 fold_ignored_result (omitted));
2951 if (TREE_SIDE_EFFECTS (omitted))
2952 return build2_loc (loc, COMPOUND_EXPR, type,
2953 fold_ignored_result (omitted), t);
2955 return non_lvalue_loc (loc, t);
2958 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 static tree
2961 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2962 tree omitted)
2964 tree t = fold_convert_loc (loc, type, result);
2966 /* If the resulting operand is an empty statement, just return the omitted
2967 statement casted to void. */
2968 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2969 return build1_loc (loc, NOP_EXPR, void_type_node,
2970 fold_ignored_result (omitted));
2972 if (TREE_SIDE_EFFECTS (omitted))
2973 return build2_loc (loc, COMPOUND_EXPR, type,
2974 fold_ignored_result (omitted), t);
2976 return pedantic_non_lvalue_loc (loc, t);
2979 /* Return a tree for the case when the result of an expression is RESULT
2980 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2981 of the expression but are now not needed.
2983 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2984 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2985 evaluated before OMITTED2. Otherwise, if neither has side effects,
2986 just do the conversion of RESULT to TYPE. */
2988 tree
2989 omit_two_operands_loc (location_t loc, tree type, tree result,
2990 tree omitted1, tree omitted2)
2992 tree t = fold_convert_loc (loc, type, result);
2994 if (TREE_SIDE_EFFECTS (omitted2))
2995 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
2996 if (TREE_SIDE_EFFECTS (omitted1))
2997 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
2999 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3003 /* Return a simplified tree node for the truth-negation of ARG. This
3004 never alters ARG itself. We assume that ARG is an operation that
3005 returns a truth value (0 or 1).
3007 FIXME: one would think we would fold the result, but it causes
3008 problems with the dominator optimizer. */
3010 tree
3011 fold_truth_not_expr (location_t loc, tree arg)
3013 tree type = TREE_TYPE (arg);
3014 enum tree_code code = TREE_CODE (arg);
3015 location_t loc1, loc2;
3017 /* If this is a comparison, we can simply invert it, except for
3018 floating-point non-equality comparisons, in which case we just
3019 enclose a TRUTH_NOT_EXPR around what we have. */
3021 if (TREE_CODE_CLASS (code) == tcc_comparison)
3023 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3024 if (FLOAT_TYPE_P (op_type)
3025 && flag_trapping_math
3026 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3027 && code != NE_EXPR && code != EQ_EXPR)
3028 return NULL_TREE;
3030 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3031 if (code == ERROR_MARK)
3032 return NULL_TREE;
3034 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3035 TREE_OPERAND (arg, 1));
3038 switch (code)
3040 case INTEGER_CST:
3041 return constant_boolean_node (integer_zerop (arg), type);
3043 case TRUTH_AND_EXPR:
3044 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3045 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3046 return build2_loc (loc, TRUTH_OR_EXPR, type,
3047 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3048 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3050 case TRUTH_OR_EXPR:
3051 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3052 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3053 return build2_loc (loc, TRUTH_AND_EXPR, type,
3054 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3055 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3057 case TRUTH_XOR_EXPR:
3058 /* Here we can invert either operand. We invert the first operand
3059 unless the second operand is a TRUTH_NOT_EXPR in which case our
3060 result is the XOR of the first operand with the inside of the
3061 negation of the second operand. */
3063 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3064 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3066 else
3067 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3068 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3069 TREE_OPERAND (arg, 1));
3071 case TRUTH_ANDIF_EXPR:
3072 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3073 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3074 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3075 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3076 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3078 case TRUTH_ORIF_EXPR:
3079 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3080 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3081 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3082 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3083 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 case TRUTH_NOT_EXPR:
3086 return TREE_OPERAND (arg, 0);
3088 case COND_EXPR:
3090 tree arg1 = TREE_OPERAND (arg, 1);
3091 tree arg2 = TREE_OPERAND (arg, 2);
3093 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3094 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3096 /* A COND_EXPR may have a throw as one operand, which
3097 then has void type. Just leave void operands
3098 as they are. */
3099 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3100 VOID_TYPE_P (TREE_TYPE (arg1))
3101 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3102 VOID_TYPE_P (TREE_TYPE (arg2))
3103 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3106 case COMPOUND_EXPR:
3107 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, COMPOUND_EXPR, type,
3109 TREE_OPERAND (arg, 0),
3110 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3112 case NON_LVALUE_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3116 CASE_CONVERT:
3117 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3118 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3120 /* ... fall through ... */
3122 case FLOAT_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 return build1_loc (loc, TREE_CODE (arg), type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3127 case BIT_AND_EXPR:
3128 if (!integer_onep (TREE_OPERAND (arg, 1)))
3129 return NULL_TREE;
3130 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3132 case SAVE_EXPR:
3133 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3135 case CLEANUP_POINT_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3138 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3140 default:
3141 return NULL_TREE;
3145 /* Return a simplified tree node for the truth-negation of ARG. This
3146 never alters ARG itself. We assume that ARG is an operation that
3147 returns a truth value (0 or 1).
3149 FIXME: one would think we would fold the result, but it causes
3150 problems with the dominator optimizer. */
3152 tree
3153 invert_truthvalue_loc (location_t loc, tree arg)
3155 tree tem;
3157 if (TREE_CODE (arg) == ERROR_MARK)
3158 return arg;
3160 tem = fold_truth_not_expr (loc, arg);
3161 if (!tem)
3162 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3164 return tem;
3167 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3168 operands are another bit-wise operation with a common input. If so,
3169 distribute the bit operations to save an operation and possibly two if
3170 constants are involved. For example, convert
3171 (A | B) & (A | C) into A | (B & C)
3172 Further simplification will occur if B and C are constants.
3174 If this optimization cannot be done, 0 will be returned. */
3176 static tree
3177 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3178 tree arg0, tree arg1)
3180 tree common;
3181 tree left, right;
3183 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3184 || TREE_CODE (arg0) == code
3185 || (TREE_CODE (arg0) != BIT_AND_EXPR
3186 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3187 return 0;
3189 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3191 common = TREE_OPERAND (arg0, 0);
3192 left = TREE_OPERAND (arg0, 1);
3193 right = TREE_OPERAND (arg1, 1);
3195 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3197 common = TREE_OPERAND (arg0, 0);
3198 left = TREE_OPERAND (arg0, 1);
3199 right = TREE_OPERAND (arg1, 0);
3201 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3203 common = TREE_OPERAND (arg0, 1);
3204 left = TREE_OPERAND (arg0, 0);
3205 right = TREE_OPERAND (arg1, 1);
3207 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3209 common = TREE_OPERAND (arg0, 1);
3210 left = TREE_OPERAND (arg0, 0);
3211 right = TREE_OPERAND (arg1, 0);
3213 else
3214 return 0;
3216 common = fold_convert_loc (loc, type, common);
3217 left = fold_convert_loc (loc, type, left);
3218 right = fold_convert_loc (loc, type, right);
3219 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3220 fold_build2_loc (loc, code, type, left, right));
3223 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3224 with code CODE. This optimization is unsafe. */
3225 static tree
3226 distribute_real_division (location_t loc, enum tree_code code, tree type,
3227 tree arg0, tree arg1)
3229 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3230 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3232 /* (A / C) +- (B / C) -> (A +- B) / C. */
3233 if (mul0 == mul1
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 1), 0))
3236 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3237 fold_build2_loc (loc, code, type,
3238 TREE_OPERAND (arg0, 0),
3239 TREE_OPERAND (arg1, 0)),
3240 TREE_OPERAND (arg0, 1));
3242 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3243 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0), 0)
3245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3248 REAL_VALUE_TYPE r0, r1;
3249 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3250 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3251 if (!mul0)
3252 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3253 if (!mul1)
3254 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3255 real_arithmetic (&r0, code, &r0, &r1);
3256 return fold_build2_loc (loc, MULT_EXPR, type,
3257 TREE_OPERAND (arg0, 0),
3258 build_real (type, r0));
3261 return NULL_TREE;
3264 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3265 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3267 static tree
3268 make_bit_field_ref (location_t loc, tree inner, tree type,
3269 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3271 tree result, bftype;
3273 if (bitpos == 0)
3275 tree size = TYPE_SIZE (TREE_TYPE (inner));
3276 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3277 || POINTER_TYPE_P (TREE_TYPE (inner)))
3278 && host_integerp (size, 0)
3279 && tree_low_cst (size, 0) == bitsize)
3280 return fold_convert_loc (loc, type, inner);
3283 bftype = type;
3284 if (TYPE_PRECISION (bftype) != bitsize
3285 || TYPE_UNSIGNED (bftype) == !unsignedp)
3286 bftype = build_nonstandard_integer_type (bitsize, 0);
3288 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 if (bftype != type)
3292 result = fold_convert_loc (loc, type, result);
3294 return result;
3297 /* Optimize a bit-field compare.
3299 There are two cases: First is a compare against a constant and the
3300 second is a comparison of two items where the fields are at the same
3301 bit position relative to the start of a chunk (byte, halfword, word)
3302 large enough to contain it. In these cases we can avoid the shift
3303 implicit in bitfield extractions.
3305 For constants, we emit a compare of the shifted constant with the
3306 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3307 compared. For two fields at the same position, we do the ANDs with the
3308 similar mask and compare the result of the ANDs.
3310 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3311 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3312 are the left and right operands of the comparison, respectively.
3314 If the optimization described above can be done, we return the resulting
3315 tree. Otherwise we return zero. */
3317 static tree
3318 optimize_bit_field_compare (location_t loc, enum tree_code code,
3319 tree compare_type, tree lhs, tree rhs)
3321 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3322 tree type = TREE_TYPE (lhs);
3323 tree signed_type, unsigned_type;
3324 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3325 enum machine_mode lmode, rmode, nmode;
3326 int lunsignedp, runsignedp;
3327 int lvolatilep = 0, rvolatilep = 0;
3328 tree linner, rinner = NULL_TREE;
3329 tree mask;
3330 tree offset;
3332 /* In the strict volatile bitfields case, doing code changes here may prevent
3333 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3334 if (flag_strict_volatile_bitfields > 0)
3335 return 0;
3337 /* Get all the information about the extractions being done. If the bit size
3338 if the same as the size of the underlying object, we aren't doing an
3339 extraction at all and so can do nothing. We also don't want to
3340 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3341 then will no longer be able to replace it. */
3342 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3343 &lunsignedp, &lvolatilep, false);
3344 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3345 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3346 return 0;
3348 if (!const_p)
3350 /* If this is not a constant, we can only do something if bit positions,
3351 sizes, and signedness are the same. */
3352 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3353 &runsignedp, &rvolatilep, false);
3355 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3356 || lunsignedp != runsignedp || offset != 0
3357 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3358 return 0;
3361 /* See if we can find a mode to refer to this field. We should be able to,
3362 but fail if we can't. */
3363 if (lvolatilep
3364 && GET_MODE_BITSIZE (lmode) > 0
3365 && flag_strict_volatile_bitfields > 0)
3366 nmode = lmode;
3367 else
3368 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3369 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3370 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3371 TYPE_ALIGN (TREE_TYPE (rinner))),
3372 word_mode, lvolatilep || rvolatilep);
3373 if (nmode == VOIDmode)
3374 return 0;
3376 /* Set signed and unsigned types of the precision of this mode for the
3377 shifts below. */
3378 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3379 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3381 /* Compute the bit position and size for the new reference and our offset
3382 within it. If the new reference is the same size as the original, we
3383 won't optimize anything, so return zero. */
3384 nbitsize = GET_MODE_BITSIZE (nmode);
3385 nbitpos = lbitpos & ~ (nbitsize - 1);
3386 lbitpos -= nbitpos;
3387 if (nbitsize == lbitsize)
3388 return 0;
3390 if (BYTES_BIG_ENDIAN)
3391 lbitpos = nbitsize - lbitsize - lbitpos;
3393 /* Make the mask to be used against the extracted field. */
3394 mask = build_int_cst_type (unsigned_type, -1);
3395 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3396 mask = const_binop (RSHIFT_EXPR, mask,
3397 size_int (nbitsize - lbitsize - lbitpos));
3399 if (! const_p)
3400 /* If not comparing with constant, just rework the comparison
3401 and return. */
3402 return fold_build2_loc (loc, code, compare_type,
3403 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3404 make_bit_field_ref (loc, linner,
3405 unsigned_type,
3406 nbitsize, nbitpos,
3408 mask),
3409 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3410 make_bit_field_ref (loc, rinner,
3411 unsigned_type,
3412 nbitsize, nbitpos,
3414 mask));
3416 /* Otherwise, we are handling the constant case. See if the constant is too
3417 big for the field. Warn and return a tree of for 0 (false) if so. We do
3418 this not only for its own sake, but to avoid having to test for this
3419 error case below. If we didn't, we might generate wrong code.
3421 For unsigned fields, the constant shifted right by the field length should
3422 be all zero. For signed fields, the high-order bits should agree with
3423 the sign bit. */
3425 if (lunsignedp)
3427 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3428 fold_convert_loc (loc,
3429 unsigned_type, rhs),
3430 size_int (lbitsize))))
3432 warning (0, "comparison is always %d due to width of bit-field",
3433 code == NE_EXPR);
3434 return constant_boolean_node (code == NE_EXPR, compare_type);
3437 else
3439 tree tem = const_binop (RSHIFT_EXPR,
3440 fold_convert_loc (loc, signed_type, rhs),
3441 size_int (lbitsize - 1));
3442 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3444 warning (0, "comparison is always %d due to width of bit-field",
3445 code == NE_EXPR);
3446 return constant_boolean_node (code == NE_EXPR, compare_type);
3450 /* Single-bit compares should always be against zero. */
3451 if (lbitsize == 1 && ! integer_zerop (rhs))
3453 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3454 rhs = build_int_cst (type, 0);
3457 /* Make a new bitfield reference, shift the constant over the
3458 appropriate number of bits and mask it with the computed mask
3459 (in case this was a signed field). If we changed it, make a new one. */
3460 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3461 if (lvolatilep)
3463 TREE_SIDE_EFFECTS (lhs) = 1;
3464 TREE_THIS_VOLATILE (lhs) = 1;
3467 rhs = const_binop (BIT_AND_EXPR,
3468 const_binop (LSHIFT_EXPR,
3469 fold_convert_loc (loc, unsigned_type, rhs),
3470 size_int (lbitpos)),
3471 mask);
3473 lhs = build2_loc (loc, code, compare_type,
3474 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3475 return lhs;
3478 /* Subroutine for fold_truth_andor_1: decode a field reference.
3480 If EXP is a comparison reference, we return the innermost reference.
3482 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3483 set to the starting bit number.
3485 If the innermost field can be completely contained in a mode-sized
3486 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3488 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3489 otherwise it is not changed.
3491 *PUNSIGNEDP is set to the signedness of the field.
3493 *PMASK is set to the mask used. This is either contained in a
3494 BIT_AND_EXPR or derived from the width of the field.
3496 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3498 Return 0 if this is not a component reference or is one that we can't
3499 do anything with. */
3501 static tree
3502 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3503 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3504 int *punsignedp, int *pvolatilep,
3505 tree *pmask, tree *pand_mask)
3507 tree outer_type = 0;
3508 tree and_mask = 0;
3509 tree mask, inner, offset;
3510 tree unsigned_type;
3511 unsigned int precision;
3513 /* All the optimizations using this function assume integer fields.
3514 There are problems with FP fields since the type_for_size call
3515 below can fail for, e.g., XFmode. */
3516 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3517 return 0;
3519 /* We are interested in the bare arrangement of bits, so strip everything
3520 that doesn't affect the machine mode. However, record the type of the
3521 outermost expression if it may matter below. */
3522 if (CONVERT_EXPR_P (exp)
3523 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3524 outer_type = TREE_TYPE (exp);
3525 STRIP_NOPS (exp);
3527 if (TREE_CODE (exp) == BIT_AND_EXPR)
3529 and_mask = TREE_OPERAND (exp, 1);
3530 exp = TREE_OPERAND (exp, 0);
3531 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3532 if (TREE_CODE (and_mask) != INTEGER_CST)
3533 return 0;
3536 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3537 punsignedp, pvolatilep, false);
3538 if ((inner == exp && and_mask == 0)
3539 || *pbitsize < 0 || offset != 0
3540 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3541 return 0;
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3547 *punsignedp = TYPE_UNSIGNED (outer_type);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3551 precision = TYPE_PRECISION (unsigned_type);
3553 mask = build_int_cst_type (unsigned_type, -1);
3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3556 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3558 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3559 if (and_mask != 0)
3560 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3561 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3563 *pmask = mask;
3564 *pand_mask = and_mask;
3565 return inner;
3568 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3569 bit positions. */
3571 static int
3572 all_ones_mask_p (const_tree mask, int size)
3574 tree type = TREE_TYPE (mask);
3575 unsigned int precision = TYPE_PRECISION (type);
3576 tree tmask;
3578 tmask = build_int_cst_type (signed_type_for (type), -1);
3580 return
3581 tree_int_cst_equal (mask,
3582 const_binop (RSHIFT_EXPR,
3583 const_binop (LSHIFT_EXPR, tmask,
3584 size_int (precision - size)),
3585 size_int (precision - size)));
3588 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3589 represents the sign bit of EXP's type. If EXP represents a sign
3590 or zero extension, also test VAL against the unextended type.
3591 The return value is the (sub)expression whose sign bit is VAL,
3592 or NULL_TREE otherwise. */
3594 static tree
3595 sign_bit_p (tree exp, const_tree val)
3597 unsigned HOST_WIDE_INT mask_lo, lo;
3598 HOST_WIDE_INT mask_hi, hi;
3599 int width;
3600 tree t;
3602 /* Tree EXP must have an integral type. */
3603 t = TREE_TYPE (exp);
3604 if (! INTEGRAL_TYPE_P (t))
3605 return NULL_TREE;
3607 /* Tree VAL must be an integer constant. */
3608 if (TREE_CODE (val) != INTEGER_CST
3609 || TREE_OVERFLOW (val))
3610 return NULL_TREE;
3612 width = TYPE_PRECISION (t);
3613 if (width > HOST_BITS_PER_WIDE_INT)
3615 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3616 lo = 0;
3618 mask_hi = ((unsigned HOST_WIDE_INT) -1
3619 >> (HOST_BITS_PER_DOUBLE_INT - width));
3620 mask_lo = -1;
3622 else
3624 hi = 0;
3625 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3627 mask_hi = 0;
3628 mask_lo = ((unsigned HOST_WIDE_INT) -1
3629 >> (HOST_BITS_PER_WIDE_INT - width));
3632 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3633 treat VAL as if it were unsigned. */
3634 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3635 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3636 return exp;
3638 /* Handle extension from a narrower type. */
3639 if (TREE_CODE (exp) == NOP_EXPR
3640 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3641 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3643 return NULL_TREE;
3646 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3647 to be evaluated unconditionally. */
3649 static int
3650 simple_operand_p (const_tree exp)
3652 /* Strip any conversions that don't change the machine mode. */
3653 STRIP_NOPS (exp);
3655 return (CONSTANT_CLASS_P (exp)
3656 || TREE_CODE (exp) == SSA_NAME
3657 || (DECL_P (exp)
3658 && ! TREE_ADDRESSABLE (exp)
3659 && ! TREE_THIS_VOLATILE (exp)
3660 && ! DECL_NONLOCAL (exp)
3661 /* Don't regard global variables as simple. They may be
3662 allocated in ways unknown to the compiler (shared memory,
3663 #pragma weak, etc). */
3664 && ! TREE_PUBLIC (exp)
3665 && ! DECL_EXTERNAL (exp)
3666 /* Loading a static variable is unduly expensive, but global
3667 registers aren't expensive. */
3668 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3671 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3672 to be evaluated unconditionally.
3673 I addition to simple_operand_p, we assume that comparisons, conversions,
3674 and logic-not operations are simple, if their operands are simple, too. */
3676 static bool
3677 simple_operand_p_2 (tree exp)
3679 enum tree_code code;
3681 if (TREE_SIDE_EFFECTS (exp)
3682 || tree_could_trap_p (exp))
3683 return false;
3685 while (CONVERT_EXPR_P (exp))
3686 exp = TREE_OPERAND (exp, 0);
3688 code = TREE_CODE (exp);
3690 if (TREE_CODE_CLASS (code) == tcc_comparison)
3691 return (simple_operand_p (TREE_OPERAND (exp, 0))
3692 && simple_operand_p (TREE_OPERAND (exp, 1)));
3694 if (code == TRUTH_NOT_EXPR)
3695 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3697 return simple_operand_p (exp);
3701 /* The following functions are subroutines to fold_range_test and allow it to
3702 try to change a logical combination of comparisons into a range test.
3704 For example, both
3705 X == 2 || X == 3 || X == 4 || X == 5
3707 X >= 2 && X <= 5
3708 are converted to
3709 (unsigned) (X - 2) <= 3
3711 We describe each set of comparisons as being either inside or outside
3712 a range, using a variable named like IN_P, and then describe the
3713 range with a lower and upper bound. If one of the bounds is omitted,
3714 it represents either the highest or lowest value of the type.
3716 In the comments below, we represent a range by two numbers in brackets
3717 preceded by a "+" to designate being inside that range, or a "-" to
3718 designate being outside that range, so the condition can be inverted by
3719 flipping the prefix. An omitted bound is represented by a "-". For
3720 example, "- [-, 10]" means being outside the range starting at the lowest
3721 possible value and ending at 10, in other words, being greater than 10.
3722 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3723 always false.
3725 We set up things so that the missing bounds are handled in a consistent
3726 manner so neither a missing bound nor "true" and "false" need to be
3727 handled using a special case. */
3729 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3730 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3731 and UPPER1_P are nonzero if the respective argument is an upper bound
3732 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3733 must be specified for a comparison. ARG1 will be converted to ARG0's
3734 type if both are specified. */
3736 static tree
3737 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3738 tree arg1, int upper1_p)
3740 tree tem;
3741 int result;
3742 int sgn0, sgn1;
3744 /* If neither arg represents infinity, do the normal operation.
3745 Else, if not a comparison, return infinity. Else handle the special
3746 comparison rules. Note that most of the cases below won't occur, but
3747 are handled for consistency. */
3749 if (arg0 != 0 && arg1 != 0)
3751 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3752 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 STRIP_NOPS (tem);
3754 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3757 if (TREE_CODE_CLASS (code) != tcc_comparison)
3758 return 0;
3760 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3761 for neither. In real maths, we cannot assume open ended ranges are
3762 the same. But, this is computer arithmetic, where numbers are finite.
3763 We can therefore make the transformation of any unbounded range with
3764 the value Z, Z being greater than any representable number. This permits
3765 us to treat unbounded ranges as equal. */
3766 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3767 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3768 switch (code)
3770 case EQ_EXPR:
3771 result = sgn0 == sgn1;
3772 break;
3773 case NE_EXPR:
3774 result = sgn0 != sgn1;
3775 break;
3776 case LT_EXPR:
3777 result = sgn0 < sgn1;
3778 break;
3779 case LE_EXPR:
3780 result = sgn0 <= sgn1;
3781 break;
3782 case GT_EXPR:
3783 result = sgn0 > sgn1;
3784 break;
3785 case GE_EXPR:
3786 result = sgn0 >= sgn1;
3787 break;
3788 default:
3789 gcc_unreachable ();
3792 return constant_boolean_node (result, type);
3795 /* Helper routine for make_range. Perform one step for it, return
3796 new expression if the loop should continue or NULL_TREE if it should
3797 stop. */
3799 tree
3800 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3801 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3802 bool *strict_overflow_p)
3804 tree arg0_type = TREE_TYPE (arg0);
3805 tree n_low, n_high, low = *p_low, high = *p_high;
3806 int in_p = *p_in_p, n_in_p;
3808 switch (code)
3810 case TRUTH_NOT_EXPR:
3811 *p_in_p = ! in_p;
3812 return arg0;
3814 case EQ_EXPR: case NE_EXPR:
3815 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3816 /* We can only do something if the range is testing for zero
3817 and if the second operand is an integer constant. Note that
3818 saying something is "in" the range we make is done by
3819 complementing IN_P since it will set in the initial case of
3820 being not equal to zero; "out" is leaving it alone. */
3821 if (low == NULL_TREE || high == NULL_TREE
3822 || ! integer_zerop (low) || ! integer_zerop (high)
3823 || TREE_CODE (arg1) != INTEGER_CST)
3824 return NULL_TREE;
3826 switch (code)
3828 case NE_EXPR: /* - [c, c] */
3829 low = high = arg1;
3830 break;
3831 case EQ_EXPR: /* + [c, c] */
3832 in_p = ! in_p, low = high = arg1;
3833 break;
3834 case GT_EXPR: /* - [-, c] */
3835 low = 0, high = arg1;
3836 break;
3837 case GE_EXPR: /* + [c, -] */
3838 in_p = ! in_p, low = arg1, high = 0;
3839 break;
3840 case LT_EXPR: /* - [c, -] */
3841 low = arg1, high = 0;
3842 break;
3843 case LE_EXPR: /* + [-, c] */
3844 in_p = ! in_p, low = 0, high = arg1;
3845 break;
3846 default:
3847 gcc_unreachable ();
3850 /* If this is an unsigned comparison, we also know that EXP is
3851 greater than or equal to zero. We base the range tests we make
3852 on that fact, so we record it here so we can parse existing
3853 range tests. We test arg0_type since often the return type
3854 of, e.g. EQ_EXPR, is boolean. */
3855 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3857 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3858 in_p, low, high, 1,
3859 build_int_cst (arg0_type, 0),
3860 NULL_TREE))
3861 return NULL_TREE;
3863 in_p = n_in_p, low = n_low, high = n_high;
3865 /* If the high bound is missing, but we have a nonzero low
3866 bound, reverse the range so it goes from zero to the low bound
3867 minus 1. */
3868 if (high == 0 && low && ! integer_zerop (low))
3870 in_p = ! in_p;
3871 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3872 integer_one_node, 0);
3873 low = build_int_cst (arg0_type, 0);
3877 *p_low = low;
3878 *p_high = high;
3879 *p_in_p = in_p;
3880 return arg0;
3882 case NEGATE_EXPR:
3883 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3884 low and high are non-NULL, then normalize will DTRT. */
3885 if (!TYPE_UNSIGNED (arg0_type)
3886 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3888 if (low == NULL_TREE)
3889 low = TYPE_MIN_VALUE (arg0_type);
3890 if (high == NULL_TREE)
3891 high = TYPE_MAX_VALUE (arg0_type);
3894 /* (-x) IN [a,b] -> x in [-b, -a] */
3895 n_low = range_binop (MINUS_EXPR, exp_type,
3896 build_int_cst (exp_type, 0),
3897 0, high, 1);
3898 n_high = range_binop (MINUS_EXPR, exp_type,
3899 build_int_cst (exp_type, 0),
3900 0, low, 0);
3901 if (n_high != 0 && TREE_OVERFLOW (n_high))
3902 return NULL_TREE;
3903 goto normalize;
3905 case BIT_NOT_EXPR:
3906 /* ~ X -> -X - 1 */
3907 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3908 build_int_cst (exp_type, 1));
3910 case PLUS_EXPR:
3911 case MINUS_EXPR:
3912 if (TREE_CODE (arg1) != INTEGER_CST)
3913 return NULL_TREE;
3915 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3916 move a constant to the other side. */
3917 if (!TYPE_UNSIGNED (arg0_type)
3918 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3919 return NULL_TREE;
3921 /* If EXP is signed, any overflow in the computation is undefined,
3922 so we don't worry about it so long as our computations on
3923 the bounds don't overflow. For unsigned, overflow is defined
3924 and this is exactly the right thing. */
3925 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3926 arg0_type, low, 0, arg1, 0);
3927 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3928 arg0_type, high, 1, arg1, 0);
3929 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3930 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3931 return NULL_TREE;
3933 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3934 *strict_overflow_p = true;
3936 normalize:
3937 /* Check for an unsigned range which has wrapped around the maximum
3938 value thus making n_high < n_low, and normalize it. */
3939 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3941 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3942 integer_one_node, 0);
3943 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3944 integer_one_node, 0);
3946 /* If the range is of the form +/- [ x+1, x ], we won't
3947 be able to normalize it. But then, it represents the
3948 whole range or the empty set, so make it
3949 +/- [ -, - ]. */
3950 if (tree_int_cst_equal (n_low, low)
3951 && tree_int_cst_equal (n_high, high))
3952 low = high = 0;
3953 else
3954 in_p = ! in_p;
3956 else
3957 low = n_low, high = n_high;
3959 *p_low = low;
3960 *p_high = high;
3961 *p_in_p = in_p;
3962 return arg0;
3964 CASE_CONVERT:
3965 case NON_LVALUE_EXPR:
3966 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3967 return NULL_TREE;
3969 if (! INTEGRAL_TYPE_P (arg0_type)
3970 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3971 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3972 return NULL_TREE;
3974 n_low = low, n_high = high;
3976 if (n_low != 0)
3977 n_low = fold_convert_loc (loc, arg0_type, n_low);
3979 if (n_high != 0)
3980 n_high = fold_convert_loc (loc, arg0_type, n_high);
3982 /* If we're converting arg0 from an unsigned type, to exp,
3983 a signed type, we will be doing the comparison as unsigned.
3984 The tests above have already verified that LOW and HIGH
3985 are both positive.
3987 So we have to ensure that we will handle large unsigned
3988 values the same way that the current signed bounds treat
3989 negative values. */
3991 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3993 tree high_positive;
3994 tree equiv_type;
3995 /* For fixed-point modes, we need to pass the saturating flag
3996 as the 2nd parameter. */
3997 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3998 equiv_type
3999 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4000 TYPE_SATURATING (arg0_type));
4001 else
4002 equiv_type
4003 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4005 /* A range without an upper bound is, naturally, unbounded.
4006 Since convert would have cropped a very large value, use
4007 the max value for the destination type. */
4008 high_positive
4009 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4010 : TYPE_MAX_VALUE (arg0_type);
4012 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4013 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4014 fold_convert_loc (loc, arg0_type,
4015 high_positive),
4016 build_int_cst (arg0_type, 1));
4018 /* If the low bound is specified, "and" the range with the
4019 range for which the original unsigned value will be
4020 positive. */
4021 if (low != 0)
4023 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4024 1, fold_convert_loc (loc, arg0_type,
4025 integer_zero_node),
4026 high_positive))
4027 return NULL_TREE;
4029 in_p = (n_in_p == in_p);
4031 else
4033 /* Otherwise, "or" the range with the range of the input
4034 that will be interpreted as negative. */
4035 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4036 1, fold_convert_loc (loc, arg0_type,
4037 integer_zero_node),
4038 high_positive))
4039 return NULL_TREE;
4041 in_p = (in_p != n_in_p);
4045 *p_low = n_low;
4046 *p_high = n_high;
4047 *p_in_p = in_p;
4048 return arg0;
4050 default:
4051 return NULL_TREE;
4055 /* Given EXP, a logical expression, set the range it is testing into
4056 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4057 actually being tested. *PLOW and *PHIGH will be made of the same
4058 type as the returned expression. If EXP is not a comparison, we
4059 will most likely not be returning a useful value and range. Set
4060 *STRICT_OVERFLOW_P to true if the return value is only valid
4061 because signed overflow is undefined; otherwise, do not change
4062 *STRICT_OVERFLOW_P. */
4064 tree
4065 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4066 bool *strict_overflow_p)
4068 enum tree_code code;
4069 tree arg0, arg1 = NULL_TREE;
4070 tree exp_type, nexp;
4071 int in_p;
4072 tree low, high;
4073 location_t loc = EXPR_LOCATION (exp);
4075 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4076 and see if we can refine the range. Some of the cases below may not
4077 happen, but it doesn't seem worth worrying about this. We "continue"
4078 the outer loop when we've changed something; otherwise we "break"
4079 the switch, which will "break" the while. */
4081 in_p = 0;
4082 low = high = build_int_cst (TREE_TYPE (exp), 0);
4084 while (1)
4086 code = TREE_CODE (exp);
4087 exp_type = TREE_TYPE (exp);
4088 arg0 = NULL_TREE;
4090 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4092 if (TREE_OPERAND_LENGTH (exp) > 0)
4093 arg0 = TREE_OPERAND (exp, 0);
4094 if (TREE_CODE_CLASS (code) == tcc_binary
4095 || TREE_CODE_CLASS (code) == tcc_comparison
4096 || (TREE_CODE_CLASS (code) == tcc_expression
4097 && TREE_OPERAND_LENGTH (exp) > 1))
4098 arg1 = TREE_OPERAND (exp, 1);
4100 if (arg0 == NULL_TREE)
4101 break;
4103 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4104 &high, &in_p, strict_overflow_p);
4105 if (nexp == NULL_TREE)
4106 break;
4107 exp = nexp;
4110 /* If EXP is a constant, we can evaluate whether this is true or false. */
4111 if (TREE_CODE (exp) == INTEGER_CST)
4113 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4114 exp, 0, low, 0))
4115 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4116 exp, 1, high, 1)));
4117 low = high = 0;
4118 exp = 0;
4121 *pin_p = in_p, *plow = low, *phigh = high;
4122 return exp;
4125 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4126 type, TYPE, return an expression to test if EXP is in (or out of, depending
4127 on IN_P) the range. Return 0 if the test couldn't be created. */
4129 tree
4130 build_range_check (location_t loc, tree type, tree exp, int in_p,
4131 tree low, tree high)
4133 tree etype = TREE_TYPE (exp), value;
4135 #ifdef HAVE_canonicalize_funcptr_for_compare
4136 /* Disable this optimization for function pointer expressions
4137 on targets that require function pointer canonicalization. */
4138 if (HAVE_canonicalize_funcptr_for_compare
4139 && TREE_CODE (etype) == POINTER_TYPE
4140 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4141 return NULL_TREE;
4142 #endif
4144 if (! in_p)
4146 value = build_range_check (loc, type, exp, 1, low, high);
4147 if (value != 0)
4148 return invert_truthvalue_loc (loc, value);
4150 return 0;
4153 if (low == 0 && high == 0)
4154 return build_int_cst (type, 1);
4156 if (low == 0)
4157 return fold_build2_loc (loc, LE_EXPR, type, exp,
4158 fold_convert_loc (loc, etype, high));
4160 if (high == 0)
4161 return fold_build2_loc (loc, GE_EXPR, type, exp,
4162 fold_convert_loc (loc, etype, low));
4164 if (operand_equal_p (low, high, 0))
4165 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4166 fold_convert_loc (loc, etype, low));
4168 if (integer_zerop (low))
4170 if (! TYPE_UNSIGNED (etype))
4172 etype = unsigned_type_for (etype);
4173 high = fold_convert_loc (loc, etype, high);
4174 exp = fold_convert_loc (loc, etype, exp);
4176 return build_range_check (loc, type, exp, 1, 0, high);
4179 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4180 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4182 unsigned HOST_WIDE_INT lo;
4183 HOST_WIDE_INT hi;
4184 int prec;
4186 prec = TYPE_PRECISION (etype);
4187 if (prec <= HOST_BITS_PER_WIDE_INT)
4189 hi = 0;
4190 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4192 else
4194 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4195 lo = (unsigned HOST_WIDE_INT) -1;
4198 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4200 if (TYPE_UNSIGNED (etype))
4202 tree signed_etype = signed_type_for (etype);
4203 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4204 etype
4205 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4206 else
4207 etype = signed_etype;
4208 exp = fold_convert_loc (loc, etype, exp);
4210 return fold_build2_loc (loc, GT_EXPR, type, exp,
4211 build_int_cst (etype, 0));
4215 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4216 This requires wrap-around arithmetics for the type of the expression.
4217 First make sure that arithmetics in this type is valid, then make sure
4218 that it wraps around. */
4219 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4220 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4221 TYPE_UNSIGNED (etype));
4223 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4225 tree utype, minv, maxv;
4227 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4228 for the type in question, as we rely on this here. */
4229 utype = unsigned_type_for (etype);
4230 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4231 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4232 integer_one_node, 1);
4233 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4235 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4236 minv, 1, maxv, 1)))
4237 etype = utype;
4238 else
4239 return 0;
4242 high = fold_convert_loc (loc, etype, high);
4243 low = fold_convert_loc (loc, etype, low);
4244 exp = fold_convert_loc (loc, etype, exp);
4246 value = const_binop (MINUS_EXPR, high, low);
4249 if (POINTER_TYPE_P (etype))
4251 if (value != 0 && !TREE_OVERFLOW (value))
4253 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4254 return build_range_check (loc, type,
4255 fold_build_pointer_plus_loc (loc, exp, low),
4256 1, build_int_cst (etype, 0), value);
4258 return 0;
4261 if (value != 0 && !TREE_OVERFLOW (value))
4262 return build_range_check (loc, type,
4263 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4264 1, build_int_cst (etype, 0), value);
4266 return 0;
4269 /* Return the predecessor of VAL in its type, handling the infinite case. */
4271 static tree
4272 range_predecessor (tree val)
4274 tree type = TREE_TYPE (val);
4276 if (INTEGRAL_TYPE_P (type)
4277 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4278 return 0;
4279 else
4280 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4283 /* Return the successor of VAL in its type, handling the infinite case. */
4285 static tree
4286 range_successor (tree val)
4288 tree type = TREE_TYPE (val);
4290 if (INTEGRAL_TYPE_P (type)
4291 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4292 return 0;
4293 else
4294 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4297 /* Given two ranges, see if we can merge them into one. Return 1 if we
4298 can, 0 if we can't. Set the output range into the specified parameters. */
4300 bool
4301 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4302 tree high0, int in1_p, tree low1, tree high1)
4304 int no_overlap;
4305 int subset;
4306 int temp;
4307 tree tem;
4308 int in_p;
4309 tree low, high;
4310 int lowequal = ((low0 == 0 && low1 == 0)
4311 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4312 low0, 0, low1, 0)));
4313 int highequal = ((high0 == 0 && high1 == 0)
4314 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4315 high0, 1, high1, 1)));
4317 /* Make range 0 be the range that starts first, or ends last if they
4318 start at the same value. Swap them if it isn't. */
4319 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4320 low0, 0, low1, 0))
4321 || (lowequal
4322 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4323 high1, 1, high0, 1))))
4325 temp = in0_p, in0_p = in1_p, in1_p = temp;
4326 tem = low0, low0 = low1, low1 = tem;
4327 tem = high0, high0 = high1, high1 = tem;
4330 /* Now flag two cases, whether the ranges are disjoint or whether the
4331 second range is totally subsumed in the first. Note that the tests
4332 below are simplified by the ones above. */
4333 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4334 high0, 1, low1, 0));
4335 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4336 high1, 1, high0, 1));
4338 /* We now have four cases, depending on whether we are including or
4339 excluding the two ranges. */
4340 if (in0_p && in1_p)
4342 /* If they don't overlap, the result is false. If the second range
4343 is a subset it is the result. Otherwise, the range is from the start
4344 of the second to the end of the first. */
4345 if (no_overlap)
4346 in_p = 0, low = high = 0;
4347 else if (subset)
4348 in_p = 1, low = low1, high = high1;
4349 else
4350 in_p = 1, low = low1, high = high0;
4353 else if (in0_p && ! in1_p)
4355 /* If they don't overlap, the result is the first range. If they are
4356 equal, the result is false. If the second range is a subset of the
4357 first, and the ranges begin at the same place, we go from just after
4358 the end of the second range to the end of the first. If the second
4359 range is not a subset of the first, or if it is a subset and both
4360 ranges end at the same place, the range starts at the start of the
4361 first range and ends just before the second range.
4362 Otherwise, we can't describe this as a single range. */
4363 if (no_overlap)
4364 in_p = 1, low = low0, high = high0;
4365 else if (lowequal && highequal)
4366 in_p = 0, low = high = 0;
4367 else if (subset && lowequal)
4369 low = range_successor (high1);
4370 high = high0;
4371 in_p = 1;
4372 if (low == 0)
4374 /* We are in the weird situation where high0 > high1 but
4375 high1 has no successor. Punt. */
4376 return 0;
4379 else if (! subset || highequal)
4381 low = low0;
4382 high = range_predecessor (low1);
4383 in_p = 1;
4384 if (high == 0)
4386 /* low0 < low1 but low1 has no predecessor. Punt. */
4387 return 0;
4390 else
4391 return 0;
4394 else if (! in0_p && in1_p)
4396 /* If they don't overlap, the result is the second range. If the second
4397 is a subset of the first, the result is false. Otherwise,
4398 the range starts just after the first range and ends at the
4399 end of the second. */
4400 if (no_overlap)
4401 in_p = 1, low = low1, high = high1;
4402 else if (subset || highequal)
4403 in_p = 0, low = high = 0;
4404 else
4406 low = range_successor (high0);
4407 high = high1;
4408 in_p = 1;
4409 if (low == 0)
4411 /* high1 > high0 but high0 has no successor. Punt. */
4412 return 0;
4417 else
4419 /* The case where we are excluding both ranges. Here the complex case
4420 is if they don't overlap. In that case, the only time we have a
4421 range is if they are adjacent. If the second is a subset of the
4422 first, the result is the first. Otherwise, the range to exclude
4423 starts at the beginning of the first range and ends at the end of the
4424 second. */
4425 if (no_overlap)
4427 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4428 range_successor (high0),
4429 1, low1, 0)))
4430 in_p = 0, low = low0, high = high1;
4431 else
4433 /* Canonicalize - [min, x] into - [-, x]. */
4434 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4435 switch (TREE_CODE (TREE_TYPE (low0)))
4437 case ENUMERAL_TYPE:
4438 if (TYPE_PRECISION (TREE_TYPE (low0))
4439 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4440 break;
4441 /* FALLTHROUGH */
4442 case INTEGER_TYPE:
4443 if (tree_int_cst_equal (low0,
4444 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4445 low0 = 0;
4446 break;
4447 case POINTER_TYPE:
4448 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4449 && integer_zerop (low0))
4450 low0 = 0;
4451 break;
4452 default:
4453 break;
4456 /* Canonicalize - [x, max] into - [x, -]. */
4457 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4458 switch (TREE_CODE (TREE_TYPE (high1)))
4460 case ENUMERAL_TYPE:
4461 if (TYPE_PRECISION (TREE_TYPE (high1))
4462 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4463 break;
4464 /* FALLTHROUGH */
4465 case INTEGER_TYPE:
4466 if (tree_int_cst_equal (high1,
4467 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4468 high1 = 0;
4469 break;
4470 case POINTER_TYPE:
4471 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4472 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4473 high1, 1,
4474 integer_one_node, 1)))
4475 high1 = 0;
4476 break;
4477 default:
4478 break;
4481 /* The ranges might be also adjacent between the maximum and
4482 minimum values of the given type. For
4483 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4484 return + [x + 1, y - 1]. */
4485 if (low0 == 0 && high1 == 0)
4487 low = range_successor (high0);
4488 high = range_predecessor (low1);
4489 if (low == 0 || high == 0)
4490 return 0;
4492 in_p = 1;
4494 else
4495 return 0;
4498 else if (subset)
4499 in_p = 0, low = low0, high = high0;
4500 else
4501 in_p = 0, low = low0, high = high1;
4504 *pin_p = in_p, *plow = low, *phigh = high;
4505 return 1;
4509 /* Subroutine of fold, looking inside expressions of the form
4510 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4511 of the COND_EXPR. This function is being used also to optimize
4512 A op B ? C : A, by reversing the comparison first.
4514 Return a folded expression whose code is not a COND_EXPR
4515 anymore, or NULL_TREE if no folding opportunity is found. */
4517 static tree
4518 fold_cond_expr_with_comparison (location_t loc, tree type,
4519 tree arg0, tree arg1, tree arg2)
4521 enum tree_code comp_code = TREE_CODE (arg0);
4522 tree arg00 = TREE_OPERAND (arg0, 0);
4523 tree arg01 = TREE_OPERAND (arg0, 1);
4524 tree arg1_type = TREE_TYPE (arg1);
4525 tree tem;
4527 STRIP_NOPS (arg1);
4528 STRIP_NOPS (arg2);
4530 /* If we have A op 0 ? A : -A, consider applying the following
4531 transformations:
4533 A == 0? A : -A same as -A
4534 A != 0? A : -A same as A
4535 A >= 0? A : -A same as abs (A)
4536 A > 0? A : -A same as abs (A)
4537 A <= 0? A : -A same as -abs (A)
4538 A < 0? A : -A same as -abs (A)
4540 None of these transformations work for modes with signed
4541 zeros. If A is +/-0, the first two transformations will
4542 change the sign of the result (from +0 to -0, or vice
4543 versa). The last four will fix the sign of the result,
4544 even though the original expressions could be positive or
4545 negative, depending on the sign of A.
4547 Note that all these transformations are correct if A is
4548 NaN, since the two alternatives (A and -A) are also NaNs. */
4549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4550 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4551 ? real_zerop (arg01)
4552 : integer_zerop (arg01))
4553 && ((TREE_CODE (arg2) == NEGATE_EXPR
4554 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4555 /* In the case that A is of the form X-Y, '-A' (arg2) may
4556 have already been folded to Y-X, check for that. */
4557 || (TREE_CODE (arg1) == MINUS_EXPR
4558 && TREE_CODE (arg2) == MINUS_EXPR
4559 && operand_equal_p (TREE_OPERAND (arg1, 0),
4560 TREE_OPERAND (arg2, 1), 0)
4561 && operand_equal_p (TREE_OPERAND (arg1, 1),
4562 TREE_OPERAND (arg2, 0), 0))))
4563 switch (comp_code)
4565 case EQ_EXPR:
4566 case UNEQ_EXPR:
4567 tem = fold_convert_loc (loc, arg1_type, arg1);
4568 return pedantic_non_lvalue_loc (loc,
4569 fold_convert_loc (loc, type,
4570 negate_expr (tem)));
4571 case NE_EXPR:
4572 case LTGT_EXPR:
4573 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4574 case UNGE_EXPR:
4575 case UNGT_EXPR:
4576 if (flag_trapping_math)
4577 break;
4578 /* Fall through. */
4579 case GE_EXPR:
4580 case GT_EXPR:
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4586 case UNLE_EXPR:
4587 case UNLT_EXPR:
4588 if (flag_trapping_math)
4589 break;
4590 case LE_EXPR:
4591 case LT_EXPR:
4592 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4593 arg1 = fold_convert_loc (loc, signed_type_for
4594 (TREE_TYPE (arg1)), arg1);
4595 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4596 return negate_expr (fold_convert_loc (loc, type, tem));
4597 default:
4598 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4599 break;
4602 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4603 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4604 both transformations are correct when A is NaN: A != 0
4605 is then true, and A == 0 is false. */
4607 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4608 && integer_zerop (arg01) && integer_zerop (arg2))
4610 if (comp_code == NE_EXPR)
4611 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4612 else if (comp_code == EQ_EXPR)
4613 return build_int_cst (type, 0);
4616 /* Try some transformations of A op B ? A : B.
4618 A == B? A : B same as B
4619 A != B? A : B same as A
4620 A >= B? A : B same as max (A, B)
4621 A > B? A : B same as max (B, A)
4622 A <= B? A : B same as min (A, B)
4623 A < B? A : B same as min (B, A)
4625 As above, these transformations don't work in the presence
4626 of signed zeros. For example, if A and B are zeros of
4627 opposite sign, the first two transformations will change
4628 the sign of the result. In the last four, the original
4629 expressions give different results for (A=+0, B=-0) and
4630 (A=-0, B=+0), but the transformed expressions do not.
4632 The first two transformations are correct if either A or B
4633 is a NaN. In the first transformation, the condition will
4634 be false, and B will indeed be chosen. In the case of the
4635 second transformation, the condition A != B will be true,
4636 and A will be chosen.
4638 The conversions to max() and min() are not correct if B is
4639 a number and A is not. The conditions in the original
4640 expressions will be false, so all four give B. The min()
4641 and max() versions would give a NaN instead. */
4642 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4643 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4644 /* Avoid these transformations if the COND_EXPR may be used
4645 as an lvalue in the C++ front-end. PR c++/19199. */
4646 && (in_gimple_form
4647 || (strcmp (lang_hooks.name, "GNU C++") != 0
4648 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4649 || ! maybe_lvalue_p (arg1)
4650 || ! maybe_lvalue_p (arg2)))
4652 tree comp_op0 = arg00;
4653 tree comp_op1 = arg01;
4654 tree comp_type = TREE_TYPE (comp_op0);
4656 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4657 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4659 comp_type = type;
4660 comp_op0 = arg1;
4661 comp_op1 = arg2;
4664 switch (comp_code)
4666 case EQ_EXPR:
4667 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4668 case NE_EXPR:
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4670 case LE_EXPR:
4671 case LT_EXPR:
4672 case UNLE_EXPR:
4673 case UNLT_EXPR:
4674 /* In C++ a ?: expression can be an lvalue, so put the
4675 operand which will be used if they are equal first
4676 so that we can convert this back to the
4677 corresponding COND_EXPR. */
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4681 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4682 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4683 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4685 comp_op1, comp_op0);
4686 return pedantic_non_lvalue_loc (loc,
4687 fold_convert_loc (loc, type, tem));
4689 break;
4690 case GE_EXPR:
4691 case GT_EXPR:
4692 case UNGE_EXPR:
4693 case UNGT_EXPR:
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4696 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4697 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4698 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4699 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4700 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4701 comp_op1, comp_op0);
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, tem));
4705 break;
4706 case UNEQ_EXPR:
4707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4708 return pedantic_non_lvalue_loc (loc,
4709 fold_convert_loc (loc, type, arg2));
4710 break;
4711 case LTGT_EXPR:
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4713 return pedantic_non_lvalue_loc (loc,
4714 fold_convert_loc (loc, type, arg1));
4715 break;
4716 default:
4717 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4718 break;
4722 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4723 we might still be able to simplify this. For example,
4724 if C1 is one less or one more than C2, this might have started
4725 out as a MIN or MAX and been transformed by this function.
4726 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4728 if (INTEGRAL_TYPE_P (type)
4729 && TREE_CODE (arg01) == INTEGER_CST
4730 && TREE_CODE (arg2) == INTEGER_CST)
4731 switch (comp_code)
4733 case EQ_EXPR:
4734 if (TREE_CODE (arg1) == INTEGER_CST)
4735 break;
4736 /* We can replace A with C1 in this case. */
4737 arg1 = fold_convert_loc (loc, type, arg01);
4738 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4740 case LT_EXPR:
4741 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4742 MIN_EXPR, to preserve the signedness of the comparison. */
4743 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4744 OEP_ONLY_CONST)
4745 && operand_equal_p (arg01,
4746 const_binop (PLUS_EXPR, arg2,
4747 build_int_cst (type, 1)),
4748 OEP_ONLY_CONST))
4750 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4751 fold_convert_loc (loc, TREE_TYPE (arg00),
4752 arg2));
4753 return pedantic_non_lvalue_loc (loc,
4754 fold_convert_loc (loc, type, tem));
4756 break;
4758 case LE_EXPR:
4759 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4760 as above. */
4761 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4762 OEP_ONLY_CONST)
4763 && operand_equal_p (arg01,
4764 const_binop (MINUS_EXPR, arg2,
4765 build_int_cst (type, 1)),
4766 OEP_ONLY_CONST))
4768 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4769 fold_convert_loc (loc, TREE_TYPE (arg00),
4770 arg2));
4771 return pedantic_non_lvalue_loc (loc,
4772 fold_convert_loc (loc, type, tem));
4774 break;
4776 case GT_EXPR:
4777 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4778 MAX_EXPR, to preserve the signedness of the comparison. */
4779 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4780 OEP_ONLY_CONST)
4781 && operand_equal_p (arg01,
4782 const_binop (MINUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4784 OEP_ONLY_CONST))
4786 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 arg2));
4789 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4791 break;
4793 case GE_EXPR:
4794 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4795 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4796 OEP_ONLY_CONST)
4797 && operand_equal_p (arg01,
4798 const_binop (PLUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4800 OEP_ONLY_CONST))
4802 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4804 arg2));
4805 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4807 break;
4808 case NE_EXPR:
4809 break;
4810 default:
4811 gcc_unreachable ();
4814 return NULL_TREE;
4819 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4820 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4821 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4822 false) >= 2)
4823 #endif
4825 /* EXP is some logical combination of boolean tests. See if we can
4826 merge it into some range test. Return the new tree if so. */
4828 static tree
4829 fold_range_test (location_t loc, enum tree_code code, tree type,
4830 tree op0, tree op1)
4832 int or_op = (code == TRUTH_ORIF_EXPR
4833 || code == TRUTH_OR_EXPR);
4834 int in0_p, in1_p, in_p;
4835 tree low0, low1, low, high0, high1, high;
4836 bool strict_overflow_p = false;
4837 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4838 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4839 tree tem;
4840 const char * const warnmsg = G_("assuming signed overflow does not occur "
4841 "when simplifying range test");
4843 /* If this is an OR operation, invert both sides; we will invert
4844 again at the end. */
4845 if (or_op)
4846 in0_p = ! in0_p, in1_p = ! in1_p;
4848 /* If both expressions are the same, if we can merge the ranges, and we
4849 can build the range test, return it or it inverted. If one of the
4850 ranges is always true or always false, consider it to be the same
4851 expression as the other. */
4852 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4853 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4854 in1_p, low1, high1)
4855 && 0 != (tem = (build_range_check (loc, type,
4856 lhs != 0 ? lhs
4857 : rhs != 0 ? rhs : integer_zero_node,
4858 in_p, low, high))))
4860 if (strict_overflow_p)
4861 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4862 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4865 /* On machines where the branch cost is expensive, if this is a
4866 short-circuited branch and the underlying object on both sides
4867 is the same, make a non-short-circuit operation. */
4868 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4869 && lhs != 0 && rhs != 0
4870 && (code == TRUTH_ANDIF_EXPR
4871 || code == TRUTH_ORIF_EXPR)
4872 && operand_equal_p (lhs, rhs, 0))
4874 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4875 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4876 which cases we can't do this. */
4877 if (simple_operand_p (lhs))
4878 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4879 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4880 type, op0, op1);
4882 else if (!lang_hooks.decls.global_bindings_p ()
4883 && !CONTAINS_PLACEHOLDER_P (lhs))
4885 tree common = save_expr (lhs);
4887 if (0 != (lhs = build_range_check (loc, type, common,
4888 or_op ? ! in0_p : in0_p,
4889 low0, high0))
4890 && (0 != (rhs = build_range_check (loc, type, common,
4891 or_op ? ! in1_p : in1_p,
4892 low1, high1))))
4894 if (strict_overflow_p)
4895 fold_overflow_warning (warnmsg,
4896 WARN_STRICT_OVERFLOW_COMPARISON);
4897 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4898 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4899 type, lhs, rhs);
4904 return 0;
4907 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4908 bit value. Arrange things so the extra bits will be set to zero if and
4909 only if C is signed-extended to its full width. If MASK is nonzero,
4910 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4912 static tree
4913 unextend (tree c, int p, int unsignedp, tree mask)
4915 tree type = TREE_TYPE (c);
4916 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4917 tree temp;
4919 if (p == modesize || unsignedp)
4920 return c;
4922 /* We work by getting just the sign bit into the low-order bit, then
4923 into the high-order bit, then sign-extend. We then XOR that value
4924 with C. */
4925 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4926 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4928 /* We must use a signed type in order to get an arithmetic right shift.
4929 However, we must also avoid introducing accidental overflows, so that
4930 a subsequent call to integer_zerop will work. Hence we must
4931 do the type conversion here. At this point, the constant is either
4932 zero or one, and the conversion to a signed type can never overflow.
4933 We could get an overflow if this conversion is done anywhere else. */
4934 if (TYPE_UNSIGNED (type))
4935 temp = fold_convert (signed_type_for (type), temp);
4937 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4938 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4939 if (mask != 0)
4940 temp = const_binop (BIT_AND_EXPR, temp,
4941 fold_convert (TREE_TYPE (c), mask));
4942 /* If necessary, convert the type back to match the type of C. */
4943 if (TYPE_UNSIGNED (type))
4944 temp = fold_convert (type, temp);
4946 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4949 /* For an expression that has the form
4950 (A && B) || ~B
4952 (A || B) && ~B,
4953 we can drop one of the inner expressions and simplify to
4954 A || ~B
4956 A && ~B
4957 LOC is the location of the resulting expression. OP is the inner
4958 logical operation; the left-hand side in the examples above, while CMPOP
4959 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4960 removing a condition that guards another, as in
4961 (A != NULL && A->...) || A == NULL
4962 which we must not transform. If RHS_ONLY is true, only eliminate the
4963 right-most operand of the inner logical operation. */
4965 static tree
4966 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4967 bool rhs_only)
4969 tree type = TREE_TYPE (cmpop);
4970 enum tree_code code = TREE_CODE (cmpop);
4971 enum tree_code truthop_code = TREE_CODE (op);
4972 tree lhs = TREE_OPERAND (op, 0);
4973 tree rhs = TREE_OPERAND (op, 1);
4974 tree orig_lhs = lhs, orig_rhs = rhs;
4975 enum tree_code rhs_code = TREE_CODE (rhs);
4976 enum tree_code lhs_code = TREE_CODE (lhs);
4977 enum tree_code inv_code;
4979 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4980 return NULL_TREE;
4982 if (TREE_CODE_CLASS (code) != tcc_comparison)
4983 return NULL_TREE;
4985 if (rhs_code == truthop_code)
4987 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4988 if (newrhs != NULL_TREE)
4990 rhs = newrhs;
4991 rhs_code = TREE_CODE (rhs);
4994 if (lhs_code == truthop_code && !rhs_only)
4996 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4997 if (newlhs != NULL_TREE)
4999 lhs = newlhs;
5000 lhs_code = TREE_CODE (lhs);
5004 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5005 if (inv_code == rhs_code
5006 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5007 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5008 return lhs;
5009 if (!rhs_only && inv_code == lhs_code
5010 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5011 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5012 return rhs;
5013 if (rhs != orig_rhs || lhs != orig_lhs)
5014 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5015 lhs, rhs);
5016 return NULL_TREE;
5019 /* Find ways of folding logical expressions of LHS and RHS:
5020 Try to merge two comparisons to the same innermost item.
5021 Look for range tests like "ch >= '0' && ch <= '9'".
5022 Look for combinations of simple terms on machines with expensive branches
5023 and evaluate the RHS unconditionally.
5025 For example, if we have p->a == 2 && p->b == 4 and we can make an
5026 object large enough to span both A and B, we can do this with a comparison
5027 against the object ANDed with the a mask.
5029 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5030 operations to do this with one comparison.
5032 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5033 function and the one above.
5035 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5036 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5038 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5039 two operands.
5041 We return the simplified tree or 0 if no optimization is possible. */
5043 static tree
5044 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5045 tree lhs, tree rhs)
5047 /* If this is the "or" of two comparisons, we can do something if
5048 the comparisons are NE_EXPR. If this is the "and", we can do something
5049 if the comparisons are EQ_EXPR. I.e.,
5050 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5052 WANTED_CODE is this operation code. For single bit fields, we can
5053 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5054 comparison for one-bit fields. */
5056 enum tree_code wanted_code;
5057 enum tree_code lcode, rcode;
5058 tree ll_arg, lr_arg, rl_arg, rr_arg;
5059 tree ll_inner, lr_inner, rl_inner, rr_inner;
5060 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5061 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5062 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5063 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5064 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5065 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5066 enum machine_mode lnmode, rnmode;
5067 tree ll_mask, lr_mask, rl_mask, rr_mask;
5068 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5069 tree l_const, r_const;
5070 tree lntype, rntype, result;
5071 HOST_WIDE_INT first_bit, end_bit;
5072 int volatilep;
5074 /* Start by getting the comparison codes. Fail if anything is volatile.
5075 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5076 it were surrounded with a NE_EXPR. */
5078 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5079 return 0;
5081 lcode = TREE_CODE (lhs);
5082 rcode = TREE_CODE (rhs);
5084 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5086 lhs = build2 (NE_EXPR, truth_type, lhs,
5087 build_int_cst (TREE_TYPE (lhs), 0));
5088 lcode = NE_EXPR;
5091 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5093 rhs = build2 (NE_EXPR, truth_type, rhs,
5094 build_int_cst (TREE_TYPE (rhs), 0));
5095 rcode = NE_EXPR;
5098 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5099 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5100 return 0;
5102 ll_arg = TREE_OPERAND (lhs, 0);
5103 lr_arg = TREE_OPERAND (lhs, 1);
5104 rl_arg = TREE_OPERAND (rhs, 0);
5105 rr_arg = TREE_OPERAND (rhs, 1);
5107 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5108 if (simple_operand_p (ll_arg)
5109 && simple_operand_p (lr_arg))
5111 if (operand_equal_p (ll_arg, rl_arg, 0)
5112 && operand_equal_p (lr_arg, rr_arg, 0))
5114 result = combine_comparisons (loc, code, lcode, rcode,
5115 truth_type, ll_arg, lr_arg);
5116 if (result)
5117 return result;
5119 else if (operand_equal_p (ll_arg, rr_arg, 0)
5120 && operand_equal_p (lr_arg, rl_arg, 0))
5122 result = combine_comparisons (loc, code, lcode,
5123 swap_tree_comparison (rcode),
5124 truth_type, ll_arg, lr_arg);
5125 if (result)
5126 return result;
5130 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5131 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5133 /* If the RHS can be evaluated unconditionally and its operands are
5134 simple, it wins to evaluate the RHS unconditionally on machines
5135 with expensive branches. In this case, this isn't a comparison
5136 that can be merged. */
5138 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5139 false) >= 2
5140 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5141 && simple_operand_p (rl_arg)
5142 && simple_operand_p (rr_arg))
5144 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5145 if (code == TRUTH_OR_EXPR
5146 && lcode == NE_EXPR && integer_zerop (lr_arg)
5147 && rcode == NE_EXPR && integer_zerop (rr_arg)
5148 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5149 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5150 return build2_loc (loc, NE_EXPR, truth_type,
5151 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5152 ll_arg, rl_arg),
5153 build_int_cst (TREE_TYPE (ll_arg), 0));
5155 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5156 if (code == TRUTH_AND_EXPR
5157 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5158 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5159 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5160 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5161 return build2_loc (loc, EQ_EXPR, truth_type,
5162 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5163 ll_arg, rl_arg),
5164 build_int_cst (TREE_TYPE (ll_arg), 0));
5167 /* See if the comparisons can be merged. Then get all the parameters for
5168 each side. */
5170 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5171 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5172 return 0;
5174 volatilep = 0;
5175 ll_inner = decode_field_reference (loc, ll_arg,
5176 &ll_bitsize, &ll_bitpos, &ll_mode,
5177 &ll_unsignedp, &volatilep, &ll_mask,
5178 &ll_and_mask);
5179 lr_inner = decode_field_reference (loc, lr_arg,
5180 &lr_bitsize, &lr_bitpos, &lr_mode,
5181 &lr_unsignedp, &volatilep, &lr_mask,
5182 &lr_and_mask);
5183 rl_inner = decode_field_reference (loc, rl_arg,
5184 &rl_bitsize, &rl_bitpos, &rl_mode,
5185 &rl_unsignedp, &volatilep, &rl_mask,
5186 &rl_and_mask);
5187 rr_inner = decode_field_reference (loc, rr_arg,
5188 &rr_bitsize, &rr_bitpos, &rr_mode,
5189 &rr_unsignedp, &volatilep, &rr_mask,
5190 &rr_and_mask);
5192 /* It must be true that the inner operation on the lhs of each
5193 comparison must be the same if we are to be able to do anything.
5194 Then see if we have constants. If not, the same must be true for
5195 the rhs's. */
5196 if (volatilep || ll_inner == 0 || rl_inner == 0
5197 || ! operand_equal_p (ll_inner, rl_inner, 0))
5198 return 0;
5200 if (TREE_CODE (lr_arg) == INTEGER_CST
5201 && TREE_CODE (rr_arg) == INTEGER_CST)
5202 l_const = lr_arg, r_const = rr_arg;
5203 else if (lr_inner == 0 || rr_inner == 0
5204 || ! operand_equal_p (lr_inner, rr_inner, 0))
5205 return 0;
5206 else
5207 l_const = r_const = 0;
5209 /* If either comparison code is not correct for our logical operation,
5210 fail. However, we can convert a one-bit comparison against zero into
5211 the opposite comparison against that bit being set in the field. */
5213 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5214 if (lcode != wanted_code)
5216 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5218 /* Make the left operand unsigned, since we are only interested
5219 in the value of one bit. Otherwise we are doing the wrong
5220 thing below. */
5221 ll_unsignedp = 1;
5222 l_const = ll_mask;
5224 else
5225 return 0;
5228 /* This is analogous to the code for l_const above. */
5229 if (rcode != wanted_code)
5231 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5233 rl_unsignedp = 1;
5234 r_const = rl_mask;
5236 else
5237 return 0;
5240 /* See if we can find a mode that contains both fields being compared on
5241 the left. If we can't, fail. Otherwise, update all constants and masks
5242 to be relative to a field of that size. */
5243 first_bit = MIN (ll_bitpos, rl_bitpos);
5244 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5245 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5246 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5247 volatilep);
5248 if (lnmode == VOIDmode)
5249 return 0;
5251 lnbitsize = GET_MODE_BITSIZE (lnmode);
5252 lnbitpos = first_bit & ~ (lnbitsize - 1);
5253 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5254 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5256 if (BYTES_BIG_ENDIAN)
5258 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5259 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5262 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5263 size_int (xll_bitpos));
5264 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5265 size_int (xrl_bitpos));
5267 if (l_const)
5269 l_const = fold_convert_loc (loc, lntype, l_const);
5270 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5271 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5272 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5273 fold_build1_loc (loc, BIT_NOT_EXPR,
5274 lntype, ll_mask))))
5276 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5278 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5281 if (r_const)
5283 r_const = fold_convert_loc (loc, lntype, r_const);
5284 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5285 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5286 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5287 fold_build1_loc (loc, BIT_NOT_EXPR,
5288 lntype, rl_mask))))
5290 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5292 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5296 /* If the right sides are not constant, do the same for it. Also,
5297 disallow this optimization if a size or signedness mismatch occurs
5298 between the left and right sides. */
5299 if (l_const == 0)
5301 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5302 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5303 /* Make sure the two fields on the right
5304 correspond to the left without being swapped. */
5305 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5306 return 0;
5308 first_bit = MIN (lr_bitpos, rr_bitpos);
5309 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5310 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5311 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5312 volatilep);
5313 if (rnmode == VOIDmode)
5314 return 0;
5316 rnbitsize = GET_MODE_BITSIZE (rnmode);
5317 rnbitpos = first_bit & ~ (rnbitsize - 1);
5318 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5319 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5321 if (BYTES_BIG_ENDIAN)
5323 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5324 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5327 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5328 rntype, lr_mask),
5329 size_int (xlr_bitpos));
5330 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5331 rntype, rr_mask),
5332 size_int (xrr_bitpos));
5334 /* Make a mask that corresponds to both fields being compared.
5335 Do this for both items being compared. If the operands are the
5336 same size and the bits being compared are in the same position
5337 then we can do this by masking both and comparing the masked
5338 results. */
5339 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5340 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5341 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5343 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5344 ll_unsignedp || rl_unsignedp);
5345 if (! all_ones_mask_p (ll_mask, lnbitsize))
5346 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5348 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5349 lr_unsignedp || rr_unsignedp);
5350 if (! all_ones_mask_p (lr_mask, rnbitsize))
5351 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5353 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5356 /* There is still another way we can do something: If both pairs of
5357 fields being compared are adjacent, we may be able to make a wider
5358 field containing them both.
5360 Note that we still must mask the lhs/rhs expressions. Furthermore,
5361 the mask must be shifted to account for the shift done by
5362 make_bit_field_ref. */
5363 if ((ll_bitsize + ll_bitpos == rl_bitpos
5364 && lr_bitsize + lr_bitpos == rr_bitpos)
5365 || (ll_bitpos == rl_bitpos + rl_bitsize
5366 && lr_bitpos == rr_bitpos + rr_bitsize))
5368 tree type;
5370 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5371 ll_bitsize + rl_bitsize,
5372 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5373 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5374 lr_bitsize + rr_bitsize,
5375 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5377 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5378 size_int (MIN (xll_bitpos, xrl_bitpos)));
5379 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5380 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5382 /* Convert to the smaller type before masking out unwanted bits. */
5383 type = lntype;
5384 if (lntype != rntype)
5386 if (lnbitsize > rnbitsize)
5388 lhs = fold_convert_loc (loc, rntype, lhs);
5389 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5390 type = rntype;
5392 else if (lnbitsize < rnbitsize)
5394 rhs = fold_convert_loc (loc, lntype, rhs);
5395 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5396 type = lntype;
5400 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5401 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5403 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5404 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5406 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5409 return 0;
5412 /* Handle the case of comparisons with constants. If there is something in
5413 common between the masks, those bits of the constants must be the same.
5414 If not, the condition is always false. Test for this to avoid generating
5415 incorrect code below. */
5416 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5417 if (! integer_zerop (result)
5418 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5419 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5421 if (wanted_code == NE_EXPR)
5423 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5424 return constant_boolean_node (true, truth_type);
5426 else
5428 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5429 return constant_boolean_node (false, truth_type);
5433 /* Construct the expression we will return. First get the component
5434 reference we will make. Unless the mask is all ones the width of
5435 that field, perform the mask operation. Then compare with the
5436 merged constant. */
5437 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5438 ll_unsignedp || rl_unsignedp);
5440 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5441 if (! all_ones_mask_p (ll_mask, lnbitsize))
5442 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5444 return build2_loc (loc, wanted_code, truth_type, result,
5445 const_binop (BIT_IOR_EXPR, l_const, r_const));
5448 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5449 constant. */
5451 static tree
5452 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5453 tree op0, tree op1)
5455 tree arg0 = op0;
5456 enum tree_code op_code;
5457 tree comp_const;
5458 tree minmax_const;
5459 int consts_equal, consts_lt;
5460 tree inner;
5462 STRIP_SIGN_NOPS (arg0);
5464 op_code = TREE_CODE (arg0);
5465 minmax_const = TREE_OPERAND (arg0, 1);
5466 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5467 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5468 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5469 inner = TREE_OPERAND (arg0, 0);
5471 /* If something does not permit us to optimize, return the original tree. */
5472 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5473 || TREE_CODE (comp_const) != INTEGER_CST
5474 || TREE_OVERFLOW (comp_const)
5475 || TREE_CODE (minmax_const) != INTEGER_CST
5476 || TREE_OVERFLOW (minmax_const))
5477 return NULL_TREE;
5479 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5480 and GT_EXPR, doing the rest with recursive calls using logical
5481 simplifications. */
5482 switch (code)
5484 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5486 tree tem
5487 = optimize_minmax_comparison (loc,
5488 invert_tree_comparison (code, false),
5489 type, op0, op1);
5490 if (tem)
5491 return invert_truthvalue_loc (loc, tem);
5492 return NULL_TREE;
5495 case GE_EXPR:
5496 return
5497 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5498 optimize_minmax_comparison
5499 (loc, EQ_EXPR, type, arg0, comp_const),
5500 optimize_minmax_comparison
5501 (loc, GT_EXPR, type, arg0, comp_const));
5503 case EQ_EXPR:
5504 if (op_code == MAX_EXPR && consts_equal)
5505 /* MAX (X, 0) == 0 -> X <= 0 */
5506 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5508 else if (op_code == MAX_EXPR && consts_lt)
5509 /* MAX (X, 0) == 5 -> X == 5 */
5510 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5512 else if (op_code == MAX_EXPR)
5513 /* MAX (X, 0) == -1 -> false */
5514 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5516 else if (consts_equal)
5517 /* MIN (X, 0) == 0 -> X >= 0 */
5518 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5520 else if (consts_lt)
5521 /* MIN (X, 0) == 5 -> false */
5522 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5524 else
5525 /* MIN (X, 0) == -1 -> X == -1 */
5526 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5528 case GT_EXPR:
5529 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5530 /* MAX (X, 0) > 0 -> X > 0
5531 MAX (X, 0) > 5 -> X > 5 */
5532 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5534 else if (op_code == MAX_EXPR)
5535 /* MAX (X, 0) > -1 -> true */
5536 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5538 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5539 /* MIN (X, 0) > 0 -> false
5540 MIN (X, 0) > 5 -> false */
5541 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5543 else
5544 /* MIN (X, 0) > -1 -> X > -1 */
5545 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5547 default:
5548 return NULL_TREE;
5552 /* T is an integer expression that is being multiplied, divided, or taken a
5553 modulus (CODE says which and what kind of divide or modulus) by a
5554 constant C. See if we can eliminate that operation by folding it with
5555 other operations already in T. WIDE_TYPE, if non-null, is a type that
5556 should be used for the computation if wider than our type.
5558 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5559 (X * 2) + (Y * 4). We must, however, be assured that either the original
5560 expression would not overflow or that overflow is undefined for the type
5561 in the language in question.
5563 If we return a non-null expression, it is an equivalent form of the
5564 original computation, but need not be in the original type.
5566 We set *STRICT_OVERFLOW_P to true if the return values depends on
5567 signed overflow being undefined. Otherwise we do not change
5568 *STRICT_OVERFLOW_P. */
5570 static tree
5571 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5572 bool *strict_overflow_p)
5574 /* To avoid exponential search depth, refuse to allow recursion past
5575 three levels. Beyond that (1) it's highly unlikely that we'll find
5576 something interesting and (2) we've probably processed it before
5577 when we built the inner expression. */
5579 static int depth;
5580 tree ret;
5582 if (depth > 3)
5583 return NULL;
5585 depth++;
5586 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5587 depth--;
5589 return ret;
5592 static tree
5593 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5594 bool *strict_overflow_p)
5596 tree type = TREE_TYPE (t);
5597 enum tree_code tcode = TREE_CODE (t);
5598 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5599 > GET_MODE_SIZE (TYPE_MODE (type)))
5600 ? wide_type : type);
5601 tree t1, t2;
5602 int same_p = tcode == code;
5603 tree op0 = NULL_TREE, op1 = NULL_TREE;
5604 bool sub_strict_overflow_p;
5606 /* Don't deal with constants of zero here; they confuse the code below. */
5607 if (integer_zerop (c))
5608 return NULL_TREE;
5610 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5611 op0 = TREE_OPERAND (t, 0);
5613 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5614 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5616 /* Note that we need not handle conditional operations here since fold
5617 already handles those cases. So just do arithmetic here. */
5618 switch (tcode)
5620 case INTEGER_CST:
5621 /* For a constant, we can always simplify if we are a multiply
5622 or (for divide and modulus) if it is a multiple of our constant. */
5623 if (code == MULT_EXPR
5624 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5625 return const_binop (code, fold_convert (ctype, t),
5626 fold_convert (ctype, c));
5627 break;
5629 CASE_CONVERT: case NON_LVALUE_EXPR:
5630 /* If op0 is an expression ... */
5631 if ((COMPARISON_CLASS_P (op0)
5632 || UNARY_CLASS_P (op0)
5633 || BINARY_CLASS_P (op0)
5634 || VL_EXP_CLASS_P (op0)
5635 || EXPRESSION_CLASS_P (op0))
5636 /* ... and has wrapping overflow, and its type is smaller
5637 than ctype, then we cannot pass through as widening. */
5638 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5639 && (TYPE_PRECISION (ctype)
5640 > TYPE_PRECISION (TREE_TYPE (op0))))
5641 /* ... or this is a truncation (t is narrower than op0),
5642 then we cannot pass through this narrowing. */
5643 || (TYPE_PRECISION (type)
5644 < TYPE_PRECISION (TREE_TYPE (op0)))
5645 /* ... or signedness changes for division or modulus,
5646 then we cannot pass through this conversion. */
5647 || (code != MULT_EXPR
5648 && (TYPE_UNSIGNED (ctype)
5649 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5650 /* ... or has undefined overflow while the converted to
5651 type has not, we cannot do the operation in the inner type
5652 as that would introduce undefined overflow. */
5653 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5654 && !TYPE_OVERFLOW_UNDEFINED (type))))
5655 break;
5657 /* Pass the constant down and see if we can make a simplification. If
5658 we can, replace this expression with the inner simplification for
5659 possible later conversion to our or some other type. */
5660 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5661 && TREE_CODE (t2) == INTEGER_CST
5662 && !TREE_OVERFLOW (t2)
5663 && (0 != (t1 = extract_muldiv (op0, t2, code,
5664 code == MULT_EXPR
5665 ? ctype : NULL_TREE,
5666 strict_overflow_p))))
5667 return t1;
5668 break;
5670 case ABS_EXPR:
5671 /* If widening the type changes it from signed to unsigned, then we
5672 must avoid building ABS_EXPR itself as unsigned. */
5673 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5675 tree cstype = (*signed_type_for) (ctype);
5676 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5677 != 0)
5679 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5680 return fold_convert (ctype, t1);
5682 break;
5684 /* If the constant is negative, we cannot simplify this. */
5685 if (tree_int_cst_sgn (c) == -1)
5686 break;
5687 /* FALLTHROUGH */
5688 case NEGATE_EXPR:
5689 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5690 != 0)
5691 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5692 break;
5694 case MIN_EXPR: case MAX_EXPR:
5695 /* If widening the type changes the signedness, then we can't perform
5696 this optimization as that changes the result. */
5697 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5698 break;
5700 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5701 sub_strict_overflow_p = false;
5702 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5703 &sub_strict_overflow_p)) != 0
5704 && (t2 = extract_muldiv (op1, c, code, wide_type,
5705 &sub_strict_overflow_p)) != 0)
5707 if (tree_int_cst_sgn (c) < 0)
5708 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5709 if (sub_strict_overflow_p)
5710 *strict_overflow_p = true;
5711 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5712 fold_convert (ctype, t2));
5714 break;
5716 case LSHIFT_EXPR: case RSHIFT_EXPR:
5717 /* If the second operand is constant, this is a multiplication
5718 or floor division, by a power of two, so we can treat it that
5719 way unless the multiplier or divisor overflows. Signed
5720 left-shift overflow is implementation-defined rather than
5721 undefined in C90, so do not convert signed left shift into
5722 multiplication. */
5723 if (TREE_CODE (op1) == INTEGER_CST
5724 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5725 /* const_binop may not detect overflow correctly,
5726 so check for it explicitly here. */
5727 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5728 && TREE_INT_CST_HIGH (op1) == 0
5729 && 0 != (t1 = fold_convert (ctype,
5730 const_binop (LSHIFT_EXPR,
5731 size_one_node,
5732 op1)))
5733 && !TREE_OVERFLOW (t1))
5734 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5735 ? MULT_EXPR : FLOOR_DIV_EXPR,
5736 ctype,
5737 fold_convert (ctype, op0),
5738 t1),
5739 c, code, wide_type, strict_overflow_p);
5740 break;
5742 case PLUS_EXPR: case MINUS_EXPR:
5743 /* See if we can eliminate the operation on both sides. If we can, we
5744 can return a new PLUS or MINUS. If we can't, the only remaining
5745 cases where we can do anything are if the second operand is a
5746 constant. */
5747 sub_strict_overflow_p = false;
5748 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5749 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5750 if (t1 != 0 && t2 != 0
5751 && (code == MULT_EXPR
5752 /* If not multiplication, we can only do this if both operands
5753 are divisible by c. */
5754 || (multiple_of_p (ctype, op0, c)
5755 && multiple_of_p (ctype, op1, c))))
5757 if (sub_strict_overflow_p)
5758 *strict_overflow_p = true;
5759 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5760 fold_convert (ctype, t2));
5763 /* If this was a subtraction, negate OP1 and set it to be an addition.
5764 This simplifies the logic below. */
5765 if (tcode == MINUS_EXPR)
5767 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5768 /* If OP1 was not easily negatable, the constant may be OP0. */
5769 if (TREE_CODE (op0) == INTEGER_CST)
5771 tree tem = op0;
5772 op0 = op1;
5773 op1 = tem;
5774 tem = t1;
5775 t1 = t2;
5776 t2 = tem;
5780 if (TREE_CODE (op1) != INTEGER_CST)
5781 break;
5783 /* If either OP1 or C are negative, this optimization is not safe for
5784 some of the division and remainder types while for others we need
5785 to change the code. */
5786 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5788 if (code == CEIL_DIV_EXPR)
5789 code = FLOOR_DIV_EXPR;
5790 else if (code == FLOOR_DIV_EXPR)
5791 code = CEIL_DIV_EXPR;
5792 else if (code != MULT_EXPR
5793 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5794 break;
5797 /* If it's a multiply or a division/modulus operation of a multiple
5798 of our constant, do the operation and verify it doesn't overflow. */
5799 if (code == MULT_EXPR
5800 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5802 op1 = const_binop (code, fold_convert (ctype, op1),
5803 fold_convert (ctype, c));
5804 /* We allow the constant to overflow with wrapping semantics. */
5805 if (op1 == 0
5806 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5807 break;
5809 else
5810 break;
5812 /* If we have an unsigned type is not a sizetype, we cannot widen
5813 the operation since it will change the result if the original
5814 computation overflowed. */
5815 if (TYPE_UNSIGNED (ctype)
5816 && ctype != type)
5817 break;
5819 /* If we were able to eliminate our operation from the first side,
5820 apply our operation to the second side and reform the PLUS. */
5821 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5822 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5824 /* The last case is if we are a multiply. In that case, we can
5825 apply the distributive law to commute the multiply and addition
5826 if the multiplication of the constants doesn't overflow. */
5827 if (code == MULT_EXPR)
5828 return fold_build2 (tcode, ctype,
5829 fold_build2 (code, ctype,
5830 fold_convert (ctype, op0),
5831 fold_convert (ctype, c)),
5832 op1);
5834 break;
5836 case MULT_EXPR:
5837 /* We have a special case here if we are doing something like
5838 (C * 8) % 4 since we know that's zero. */
5839 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5840 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5841 /* If the multiplication can overflow we cannot optimize this. */
5842 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5843 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5844 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5846 *strict_overflow_p = true;
5847 return omit_one_operand (type, integer_zero_node, op0);
5850 /* ... fall through ... */
5852 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5853 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5854 /* If we can extract our operation from the LHS, do so and return a
5855 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5856 do something only if the second operand is a constant. */
5857 if (same_p
5858 && (t1 = extract_muldiv (op0, c, code, wide_type,
5859 strict_overflow_p)) != 0)
5860 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5861 fold_convert (ctype, op1));
5862 else if (tcode == MULT_EXPR && code == MULT_EXPR
5863 && (t1 = extract_muldiv (op1, c, code, wide_type,
5864 strict_overflow_p)) != 0)
5865 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5866 fold_convert (ctype, t1));
5867 else if (TREE_CODE (op1) != INTEGER_CST)
5868 return 0;
5870 /* If these are the same operation types, we can associate them
5871 assuming no overflow. */
5872 if (tcode == code)
5874 double_int mul;
5875 bool overflow_p;
5876 unsigned prec = TYPE_PRECISION (ctype);
5877 bool uns = TYPE_UNSIGNED (ctype);
5878 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5879 double_int dic = tree_to_double_int (c).ext (prec, uns);
5880 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5881 overflow_p = ((!uns && overflow_p)
5882 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5883 if (!double_int_fits_to_tree_p (ctype, mul)
5884 && ((uns && tcode != MULT_EXPR) || !uns))
5885 overflow_p = 1;
5886 if (!overflow_p)
5887 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5888 double_int_to_tree (ctype, mul));
5891 /* If these operations "cancel" each other, we have the main
5892 optimizations of this pass, which occur when either constant is a
5893 multiple of the other, in which case we replace this with either an
5894 operation or CODE or TCODE.
5896 If we have an unsigned type, we cannot do this since it will change
5897 the result if the original computation overflowed. */
5898 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5899 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5900 || (tcode == MULT_EXPR
5901 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5902 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5903 && code != MULT_EXPR)))
5905 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5907 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5908 *strict_overflow_p = true;
5909 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5910 fold_convert (ctype,
5911 const_binop (TRUNC_DIV_EXPR,
5912 op1, c)));
5914 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5916 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5917 *strict_overflow_p = true;
5918 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5919 fold_convert (ctype,
5920 const_binop (TRUNC_DIV_EXPR,
5921 c, op1)));
5924 break;
5926 default:
5927 break;
5930 return 0;
5933 /* Return a node which has the indicated constant VALUE (either 0 or
5934 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5935 and is of the indicated TYPE. */
5937 tree
5938 constant_boolean_node (bool value, tree type)
5940 if (type == integer_type_node)
5941 return value ? integer_one_node : integer_zero_node;
5942 else if (type == boolean_type_node)
5943 return value ? boolean_true_node : boolean_false_node;
5944 else if (TREE_CODE (type) == VECTOR_TYPE)
5945 return build_vector_from_val (type,
5946 build_int_cst (TREE_TYPE (type),
5947 value ? -1 : 0));
5948 else
5949 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5953 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5954 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5955 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5956 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5957 COND is the first argument to CODE; otherwise (as in the example
5958 given here), it is the second argument. TYPE is the type of the
5959 original expression. Return NULL_TREE if no simplification is
5960 possible. */
5962 static tree
5963 fold_binary_op_with_conditional_arg (location_t loc,
5964 enum tree_code code,
5965 tree type, tree op0, tree op1,
5966 tree cond, tree arg, int cond_first_p)
5968 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5969 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5970 tree test, true_value, false_value;
5971 tree lhs = NULL_TREE;
5972 tree rhs = NULL_TREE;
5973 enum tree_code cond_code = COND_EXPR;
5975 if (TREE_CODE (cond) == COND_EXPR
5976 || TREE_CODE (cond) == VEC_COND_EXPR)
5978 test = TREE_OPERAND (cond, 0);
5979 true_value = TREE_OPERAND (cond, 1);
5980 false_value = TREE_OPERAND (cond, 2);
5981 /* If this operand throws an expression, then it does not make
5982 sense to try to perform a logical or arithmetic operation
5983 involving it. */
5984 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5985 lhs = true_value;
5986 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5987 rhs = false_value;
5989 else
5991 tree testtype = TREE_TYPE (cond);
5992 test = cond;
5993 true_value = constant_boolean_node (true, testtype);
5994 false_value = constant_boolean_node (false, testtype);
5997 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
5998 cond_code = VEC_COND_EXPR;
6000 /* This transformation is only worthwhile if we don't have to wrap ARG
6001 in a SAVE_EXPR and the operation can be simplified without recursing
6002 on at least one of the branches once its pushed inside the COND_EXPR. */
6003 if (!TREE_CONSTANT (arg)
6004 && (TREE_SIDE_EFFECTS (arg)
6005 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6006 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6007 return NULL_TREE;
6009 arg = fold_convert_loc (loc, arg_type, arg);
6010 if (lhs == 0)
6012 true_value = fold_convert_loc (loc, cond_type, true_value);
6013 if (cond_first_p)
6014 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6015 else
6016 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6018 if (rhs == 0)
6020 false_value = fold_convert_loc (loc, cond_type, false_value);
6021 if (cond_first_p)
6022 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6023 else
6024 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6027 /* Check that we have simplified at least one of the branches. */
6028 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6029 return NULL_TREE;
6031 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6035 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6037 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6038 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6039 ADDEND is the same as X.
6041 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6042 and finite. The problematic cases are when X is zero, and its mode
6043 has signed zeros. In the case of rounding towards -infinity,
6044 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6045 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6047 bool
6048 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6050 if (!real_zerop (addend))
6051 return false;
6053 /* Don't allow the fold with -fsignaling-nans. */
6054 if (HONOR_SNANS (TYPE_MODE (type)))
6055 return false;
6057 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6058 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6059 return true;
6061 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6062 if (TREE_CODE (addend) == REAL_CST
6063 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6064 negate = !negate;
6066 /* The mode has signed zeros, and we have to honor their sign.
6067 In this situation, there is only one case we can return true for.
6068 X - 0 is the same as X unless rounding towards -infinity is
6069 supported. */
6070 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6073 /* Subroutine of fold() that checks comparisons of built-in math
6074 functions against real constants.
6076 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6077 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6078 is the type of the result and ARG0 and ARG1 are the operands of the
6079 comparison. ARG1 must be a TREE_REAL_CST.
6081 The function returns the constant folded tree if a simplification
6082 can be made, and NULL_TREE otherwise. */
6084 static tree
6085 fold_mathfn_compare (location_t loc,
6086 enum built_in_function fcode, enum tree_code code,
6087 tree type, tree arg0, tree arg1)
6089 REAL_VALUE_TYPE c;
6091 if (BUILTIN_SQRT_P (fcode))
6093 tree arg = CALL_EXPR_ARG (arg0, 0);
6094 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6096 c = TREE_REAL_CST (arg1);
6097 if (REAL_VALUE_NEGATIVE (c))
6099 /* sqrt(x) < y is always false, if y is negative. */
6100 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6101 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6103 /* sqrt(x) > y is always true, if y is negative and we
6104 don't care about NaNs, i.e. negative values of x. */
6105 if (code == NE_EXPR || !HONOR_NANS (mode))
6106 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6108 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6109 return fold_build2_loc (loc, GE_EXPR, type, arg,
6110 build_real (TREE_TYPE (arg), dconst0));
6112 else if (code == GT_EXPR || code == GE_EXPR)
6114 REAL_VALUE_TYPE c2;
6116 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6117 real_convert (&c2, mode, &c2);
6119 if (REAL_VALUE_ISINF (c2))
6121 /* sqrt(x) > y is x == +Inf, when y is very large. */
6122 if (HONOR_INFINITIES (mode))
6123 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6124 build_real (TREE_TYPE (arg), c2));
6126 /* sqrt(x) > y is always false, when y is very large
6127 and we don't care about infinities. */
6128 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6131 /* sqrt(x) > c is the same as x > c*c. */
6132 return fold_build2_loc (loc, code, type, arg,
6133 build_real (TREE_TYPE (arg), c2));
6135 else if (code == LT_EXPR || code == LE_EXPR)
6137 REAL_VALUE_TYPE c2;
6139 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6140 real_convert (&c2, mode, &c2);
6142 if (REAL_VALUE_ISINF (c2))
6144 /* sqrt(x) < y is always true, when y is a very large
6145 value and we don't care about NaNs or Infinities. */
6146 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6147 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6149 /* sqrt(x) < y is x != +Inf when y is very large and we
6150 don't care about NaNs. */
6151 if (! HONOR_NANS (mode))
6152 return fold_build2_loc (loc, NE_EXPR, type, arg,
6153 build_real (TREE_TYPE (arg), c2));
6155 /* sqrt(x) < y is x >= 0 when y is very large and we
6156 don't care about Infinities. */
6157 if (! HONOR_INFINITIES (mode))
6158 return fold_build2_loc (loc, GE_EXPR, type, arg,
6159 build_real (TREE_TYPE (arg), dconst0));
6161 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6162 arg = save_expr (arg);
6163 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6164 fold_build2_loc (loc, GE_EXPR, type, arg,
6165 build_real (TREE_TYPE (arg),
6166 dconst0)),
6167 fold_build2_loc (loc, NE_EXPR, type, arg,
6168 build_real (TREE_TYPE (arg),
6169 c2)));
6172 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6173 if (! HONOR_NANS (mode))
6174 return fold_build2_loc (loc, code, type, arg,
6175 build_real (TREE_TYPE (arg), c2));
6177 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6178 arg = save_expr (arg);
6179 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6180 fold_build2_loc (loc, GE_EXPR, type, arg,
6181 build_real (TREE_TYPE (arg),
6182 dconst0)),
6183 fold_build2_loc (loc, code, type, arg,
6184 build_real (TREE_TYPE (arg),
6185 c2)));
6189 return NULL_TREE;
6192 /* Subroutine of fold() that optimizes comparisons against Infinities,
6193 either +Inf or -Inf.
6195 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6196 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6197 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6199 The function returns the constant folded tree if a simplification
6200 can be made, and NULL_TREE otherwise. */
6202 static tree
6203 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6204 tree arg0, tree arg1)
6206 enum machine_mode mode;
6207 REAL_VALUE_TYPE max;
6208 tree temp;
6209 bool neg;
6211 mode = TYPE_MODE (TREE_TYPE (arg0));
6213 /* For negative infinity swap the sense of the comparison. */
6214 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6215 if (neg)
6216 code = swap_tree_comparison (code);
6218 switch (code)
6220 case GT_EXPR:
6221 /* x > +Inf is always false, if with ignore sNANs. */
6222 if (HONOR_SNANS (mode))
6223 return NULL_TREE;
6224 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6226 case LE_EXPR:
6227 /* x <= +Inf is always true, if we don't case about NaNs. */
6228 if (! HONOR_NANS (mode))
6229 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6231 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6232 arg0 = save_expr (arg0);
6233 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6235 case EQ_EXPR:
6236 case GE_EXPR:
6237 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6238 real_maxval (&max, neg, mode);
6239 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6240 arg0, build_real (TREE_TYPE (arg0), max));
6242 case LT_EXPR:
6243 /* x < +Inf is always equal to x <= DBL_MAX. */
6244 real_maxval (&max, neg, mode);
6245 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6246 arg0, build_real (TREE_TYPE (arg0), max));
6248 case NE_EXPR:
6249 /* x != +Inf is always equal to !(x > DBL_MAX). */
6250 real_maxval (&max, neg, mode);
6251 if (! HONOR_NANS (mode))
6252 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6253 arg0, build_real (TREE_TYPE (arg0), max));
6255 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6256 arg0, build_real (TREE_TYPE (arg0), max));
6257 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6259 default:
6260 break;
6263 return NULL_TREE;
6266 /* Subroutine of fold() that optimizes comparisons of a division by
6267 a nonzero integer constant against an integer constant, i.e.
6268 X/C1 op C2.
6270 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6271 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6272 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6274 The function returns the constant folded tree if a simplification
6275 can be made, and NULL_TREE otherwise. */
6277 static tree
6278 fold_div_compare (location_t loc,
6279 enum tree_code code, tree type, tree arg0, tree arg1)
6281 tree prod, tmp, hi, lo;
6282 tree arg00 = TREE_OPERAND (arg0, 0);
6283 tree arg01 = TREE_OPERAND (arg0, 1);
6284 double_int val;
6285 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6286 bool neg_overflow;
6287 bool overflow;
6289 /* We have to do this the hard way to detect unsigned overflow.
6290 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6291 val = TREE_INT_CST (arg01)
6292 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6293 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6294 neg_overflow = false;
6296 if (unsigned_p)
6298 tmp = int_const_binop (MINUS_EXPR, arg01,
6299 build_int_cst (TREE_TYPE (arg01), 1));
6300 lo = prod;
6302 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6303 val = TREE_INT_CST (prod)
6304 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6305 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6306 -1, overflow | TREE_OVERFLOW (prod));
6308 else if (tree_int_cst_sgn (arg01) >= 0)
6310 tmp = int_const_binop (MINUS_EXPR, arg01,
6311 build_int_cst (TREE_TYPE (arg01), 1));
6312 switch (tree_int_cst_sgn (arg1))
6314 case -1:
6315 neg_overflow = true;
6316 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6317 hi = prod;
6318 break;
6320 case 0:
6321 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6322 hi = tmp;
6323 break;
6325 case 1:
6326 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6327 lo = prod;
6328 break;
6330 default:
6331 gcc_unreachable ();
6334 else
6336 /* A negative divisor reverses the relational operators. */
6337 code = swap_tree_comparison (code);
6339 tmp = int_const_binop (PLUS_EXPR, arg01,
6340 build_int_cst (TREE_TYPE (arg01), 1));
6341 switch (tree_int_cst_sgn (arg1))
6343 case -1:
6344 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6345 lo = prod;
6346 break;
6348 case 0:
6349 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6350 lo = tmp;
6351 break;
6353 case 1:
6354 neg_overflow = true;
6355 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6356 hi = prod;
6357 break;
6359 default:
6360 gcc_unreachable ();
6364 switch (code)
6366 case EQ_EXPR:
6367 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6368 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6369 if (TREE_OVERFLOW (hi))
6370 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6371 if (TREE_OVERFLOW (lo))
6372 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6373 return build_range_check (loc, type, arg00, 1, lo, hi);
6375 case NE_EXPR:
6376 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6377 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6378 if (TREE_OVERFLOW (hi))
6379 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6380 if (TREE_OVERFLOW (lo))
6381 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6382 return build_range_check (loc, type, arg00, 0, lo, hi);
6384 case LT_EXPR:
6385 if (TREE_OVERFLOW (lo))
6387 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6388 return omit_one_operand_loc (loc, type, tmp, arg00);
6390 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6392 case LE_EXPR:
6393 if (TREE_OVERFLOW (hi))
6395 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6396 return omit_one_operand_loc (loc, type, tmp, arg00);
6398 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6400 case GT_EXPR:
6401 if (TREE_OVERFLOW (hi))
6403 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6404 return omit_one_operand_loc (loc, type, tmp, arg00);
6406 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6408 case GE_EXPR:
6409 if (TREE_OVERFLOW (lo))
6411 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6412 return omit_one_operand_loc (loc, type, tmp, arg00);
6414 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6416 default:
6417 break;
6420 return NULL_TREE;
6424 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6425 equality/inequality test, then return a simplified form of the test
6426 using a sign testing. Otherwise return NULL. TYPE is the desired
6427 result type. */
6429 static tree
6430 fold_single_bit_test_into_sign_test (location_t loc,
6431 enum tree_code code, tree arg0, tree arg1,
6432 tree result_type)
6434 /* If this is testing a single bit, we can optimize the test. */
6435 if ((code == NE_EXPR || code == EQ_EXPR)
6436 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6437 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6439 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6440 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6441 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6443 if (arg00 != NULL_TREE
6444 /* This is only a win if casting to a signed type is cheap,
6445 i.e. when arg00's type is not a partial mode. */
6446 && TYPE_PRECISION (TREE_TYPE (arg00))
6447 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6449 tree stype = signed_type_for (TREE_TYPE (arg00));
6450 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6451 result_type,
6452 fold_convert_loc (loc, stype, arg00),
6453 build_int_cst (stype, 0));
6457 return NULL_TREE;
6460 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6461 equality/inequality test, then return a simplified form of
6462 the test using shifts and logical operations. Otherwise return
6463 NULL. TYPE is the desired result type. */
6465 tree
6466 fold_single_bit_test (location_t loc, enum tree_code code,
6467 tree arg0, tree arg1, tree result_type)
6469 /* If this is testing a single bit, we can optimize the test. */
6470 if ((code == NE_EXPR || code == EQ_EXPR)
6471 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6472 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6474 tree inner = TREE_OPERAND (arg0, 0);
6475 tree type = TREE_TYPE (arg0);
6476 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6477 enum machine_mode operand_mode = TYPE_MODE (type);
6478 int ops_unsigned;
6479 tree signed_type, unsigned_type, intermediate_type;
6480 tree tem, one;
6482 /* First, see if we can fold the single bit test into a sign-bit
6483 test. */
6484 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6485 result_type);
6486 if (tem)
6487 return tem;
6489 /* Otherwise we have (A & C) != 0 where C is a single bit,
6490 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6491 Similarly for (A & C) == 0. */
6493 /* If INNER is a right shift of a constant and it plus BITNUM does
6494 not overflow, adjust BITNUM and INNER. */
6495 if (TREE_CODE (inner) == RSHIFT_EXPR
6496 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6497 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6498 && bitnum < TYPE_PRECISION (type)
6499 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6500 bitnum - TYPE_PRECISION (type)))
6502 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6503 inner = TREE_OPERAND (inner, 0);
6506 /* If we are going to be able to omit the AND below, we must do our
6507 operations as unsigned. If we must use the AND, we have a choice.
6508 Normally unsigned is faster, but for some machines signed is. */
6509 #ifdef LOAD_EXTEND_OP
6510 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6511 && !flag_syntax_only) ? 0 : 1;
6512 #else
6513 ops_unsigned = 1;
6514 #endif
6516 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6517 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6518 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6519 inner = fold_convert_loc (loc, intermediate_type, inner);
6521 if (bitnum != 0)
6522 inner = build2 (RSHIFT_EXPR, intermediate_type,
6523 inner, size_int (bitnum));
6525 one = build_int_cst (intermediate_type, 1);
6527 if (code == EQ_EXPR)
6528 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6530 /* Put the AND last so it can combine with more things. */
6531 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6533 /* Make sure to return the proper type. */
6534 inner = fold_convert_loc (loc, result_type, inner);
6536 return inner;
6538 return NULL_TREE;
6541 /* Check whether we are allowed to reorder operands arg0 and arg1,
6542 such that the evaluation of arg1 occurs before arg0. */
6544 static bool
6545 reorder_operands_p (const_tree arg0, const_tree arg1)
6547 if (! flag_evaluation_order)
6548 return true;
6549 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6550 return true;
6551 return ! TREE_SIDE_EFFECTS (arg0)
6552 && ! TREE_SIDE_EFFECTS (arg1);
6555 /* Test whether it is preferable two swap two operands, ARG0 and
6556 ARG1, for example because ARG0 is an integer constant and ARG1
6557 isn't. If REORDER is true, only recommend swapping if we can
6558 evaluate the operands in reverse order. */
6560 bool
6561 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6563 STRIP_SIGN_NOPS (arg0);
6564 STRIP_SIGN_NOPS (arg1);
6566 if (TREE_CODE (arg1) == INTEGER_CST)
6567 return 0;
6568 if (TREE_CODE (arg0) == INTEGER_CST)
6569 return 1;
6571 if (TREE_CODE (arg1) == REAL_CST)
6572 return 0;
6573 if (TREE_CODE (arg0) == REAL_CST)
6574 return 1;
6576 if (TREE_CODE (arg1) == FIXED_CST)
6577 return 0;
6578 if (TREE_CODE (arg0) == FIXED_CST)
6579 return 1;
6581 if (TREE_CODE (arg1) == COMPLEX_CST)
6582 return 0;
6583 if (TREE_CODE (arg0) == COMPLEX_CST)
6584 return 1;
6586 if (TREE_CONSTANT (arg1))
6587 return 0;
6588 if (TREE_CONSTANT (arg0))
6589 return 1;
6591 if (optimize_function_for_size_p (cfun))
6592 return 0;
6594 if (reorder && flag_evaluation_order
6595 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6596 return 0;
6598 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6599 for commutative and comparison operators. Ensuring a canonical
6600 form allows the optimizers to find additional redundancies without
6601 having to explicitly check for both orderings. */
6602 if (TREE_CODE (arg0) == SSA_NAME
6603 && TREE_CODE (arg1) == SSA_NAME
6604 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6605 return 1;
6607 /* Put SSA_NAMEs last. */
6608 if (TREE_CODE (arg1) == SSA_NAME)
6609 return 0;
6610 if (TREE_CODE (arg0) == SSA_NAME)
6611 return 1;
6613 /* Put variables last. */
6614 if (DECL_P (arg1))
6615 return 0;
6616 if (DECL_P (arg0))
6617 return 1;
6619 return 0;
6622 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6623 ARG0 is extended to a wider type. */
6625 static tree
6626 fold_widened_comparison (location_t loc, enum tree_code code,
6627 tree type, tree arg0, tree arg1)
6629 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6630 tree arg1_unw;
6631 tree shorter_type, outer_type;
6632 tree min, max;
6633 bool above, below;
6635 if (arg0_unw == arg0)
6636 return NULL_TREE;
6637 shorter_type = TREE_TYPE (arg0_unw);
6639 #ifdef HAVE_canonicalize_funcptr_for_compare
6640 /* Disable this optimization if we're casting a function pointer
6641 type on targets that require function pointer canonicalization. */
6642 if (HAVE_canonicalize_funcptr_for_compare
6643 && TREE_CODE (shorter_type) == POINTER_TYPE
6644 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6645 return NULL_TREE;
6646 #endif
6648 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6649 return NULL_TREE;
6651 arg1_unw = get_unwidened (arg1, NULL_TREE);
6653 /* If possible, express the comparison in the shorter mode. */
6654 if ((code == EQ_EXPR || code == NE_EXPR
6655 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6656 && (TREE_TYPE (arg1_unw) == shorter_type
6657 || ((TYPE_PRECISION (shorter_type)
6658 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6659 && (TYPE_UNSIGNED (shorter_type)
6660 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6661 || (TREE_CODE (arg1_unw) == INTEGER_CST
6662 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6663 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6664 && int_fits_type_p (arg1_unw, shorter_type))))
6665 return fold_build2_loc (loc, code, type, arg0_unw,
6666 fold_convert_loc (loc, shorter_type, arg1_unw));
6668 if (TREE_CODE (arg1_unw) != INTEGER_CST
6669 || TREE_CODE (shorter_type) != INTEGER_TYPE
6670 || !int_fits_type_p (arg1_unw, shorter_type))
6671 return NULL_TREE;
6673 /* If we are comparing with the integer that does not fit into the range
6674 of the shorter type, the result is known. */
6675 outer_type = TREE_TYPE (arg1_unw);
6676 min = lower_bound_in_type (outer_type, shorter_type);
6677 max = upper_bound_in_type (outer_type, shorter_type);
6679 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6680 max, arg1_unw));
6681 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6682 arg1_unw, min));
6684 switch (code)
6686 case EQ_EXPR:
6687 if (above || below)
6688 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6689 break;
6691 case NE_EXPR:
6692 if (above || below)
6693 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6694 break;
6696 case LT_EXPR:
6697 case LE_EXPR:
6698 if (above)
6699 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6700 else if (below)
6701 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6703 case GT_EXPR:
6704 case GE_EXPR:
6705 if (above)
6706 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6707 else if (below)
6708 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6710 default:
6711 break;
6714 return NULL_TREE;
6717 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6718 ARG0 just the signedness is changed. */
6720 static tree
6721 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6722 tree arg0, tree arg1)
6724 tree arg0_inner;
6725 tree inner_type, outer_type;
6727 if (!CONVERT_EXPR_P (arg0))
6728 return NULL_TREE;
6730 outer_type = TREE_TYPE (arg0);
6731 arg0_inner = TREE_OPERAND (arg0, 0);
6732 inner_type = TREE_TYPE (arg0_inner);
6734 #ifdef HAVE_canonicalize_funcptr_for_compare
6735 /* Disable this optimization if we're casting a function pointer
6736 type on targets that require function pointer canonicalization. */
6737 if (HAVE_canonicalize_funcptr_for_compare
6738 && TREE_CODE (inner_type) == POINTER_TYPE
6739 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6740 return NULL_TREE;
6741 #endif
6743 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6744 return NULL_TREE;
6746 if (TREE_CODE (arg1) != INTEGER_CST
6747 && !(CONVERT_EXPR_P (arg1)
6748 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6749 return NULL_TREE;
6751 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6752 && code != NE_EXPR
6753 && code != EQ_EXPR)
6754 return NULL_TREE;
6756 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6757 return NULL_TREE;
6759 if (TREE_CODE (arg1) == INTEGER_CST)
6760 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6761 0, TREE_OVERFLOW (arg1));
6762 else
6763 arg1 = fold_convert_loc (loc, inner_type, arg1);
6765 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6768 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6769 step of the array. Reconstructs s and delta in the case of s *
6770 delta being an integer constant (and thus already folded). ADDR is
6771 the address. MULT is the multiplicative expression. If the
6772 function succeeds, the new address expression is returned.
6773 Otherwise NULL_TREE is returned. LOC is the location of the
6774 resulting expression. */
6776 static tree
6777 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6779 tree s, delta, step;
6780 tree ref = TREE_OPERAND (addr, 0), pref;
6781 tree ret, pos;
6782 tree itype;
6783 bool mdim = false;
6785 /* Strip the nops that might be added when converting op1 to sizetype. */
6786 STRIP_NOPS (op1);
6788 /* Canonicalize op1 into a possibly non-constant delta
6789 and an INTEGER_CST s. */
6790 if (TREE_CODE (op1) == MULT_EXPR)
6792 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6794 STRIP_NOPS (arg0);
6795 STRIP_NOPS (arg1);
6797 if (TREE_CODE (arg0) == INTEGER_CST)
6799 s = arg0;
6800 delta = arg1;
6802 else if (TREE_CODE (arg1) == INTEGER_CST)
6804 s = arg1;
6805 delta = arg0;
6807 else
6808 return NULL_TREE;
6810 else if (TREE_CODE (op1) == INTEGER_CST)
6812 delta = op1;
6813 s = NULL_TREE;
6815 else
6817 /* Simulate we are delta * 1. */
6818 delta = op1;
6819 s = integer_one_node;
6822 /* Handle &x.array the same as we would handle &x.array[0]. */
6823 if (TREE_CODE (ref) == COMPONENT_REF
6824 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6826 tree domain;
6828 /* Remember if this was a multi-dimensional array. */
6829 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6830 mdim = true;
6832 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6833 if (! domain)
6834 goto cont;
6835 itype = TREE_TYPE (domain);
6837 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6838 if (TREE_CODE (step) != INTEGER_CST)
6839 goto cont;
6841 if (s)
6843 if (! tree_int_cst_equal (step, s))
6844 goto cont;
6846 else
6848 /* Try if delta is a multiple of step. */
6849 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6850 if (! tmp)
6851 goto cont;
6852 delta = tmp;
6855 /* Only fold here if we can verify we do not overflow one
6856 dimension of a multi-dimensional array. */
6857 if (mdim)
6859 tree tmp;
6861 if (!TYPE_MIN_VALUE (domain)
6862 || !TYPE_MAX_VALUE (domain)
6863 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6864 goto cont;
6866 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6867 fold_convert_loc (loc, itype,
6868 TYPE_MIN_VALUE (domain)),
6869 fold_convert_loc (loc, itype, delta));
6870 if (TREE_CODE (tmp) != INTEGER_CST
6871 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6872 goto cont;
6875 /* We found a suitable component reference. */
6877 pref = TREE_OPERAND (addr, 0);
6878 ret = copy_node (pref);
6879 SET_EXPR_LOCATION (ret, loc);
6881 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6882 fold_build2_loc
6883 (loc, PLUS_EXPR, itype,
6884 fold_convert_loc (loc, itype,
6885 TYPE_MIN_VALUE
6886 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6887 fold_convert_loc (loc, itype, delta)),
6888 NULL_TREE, NULL_TREE);
6889 return build_fold_addr_expr_loc (loc, ret);
6892 cont:
6894 for (;; ref = TREE_OPERAND (ref, 0))
6896 if (TREE_CODE (ref) == ARRAY_REF)
6898 tree domain;
6900 /* Remember if this was a multi-dimensional array. */
6901 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6902 mdim = true;
6904 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6905 if (! domain)
6906 continue;
6907 itype = TREE_TYPE (domain);
6909 step = array_ref_element_size (ref);
6910 if (TREE_CODE (step) != INTEGER_CST)
6911 continue;
6913 if (s)
6915 if (! tree_int_cst_equal (step, s))
6916 continue;
6918 else
6920 /* Try if delta is a multiple of step. */
6921 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6922 if (! tmp)
6923 continue;
6924 delta = tmp;
6927 /* Only fold here if we can verify we do not overflow one
6928 dimension of a multi-dimensional array. */
6929 if (mdim)
6931 tree tmp;
6933 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6934 || !TYPE_MAX_VALUE (domain)
6935 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6936 continue;
6938 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6939 fold_convert_loc (loc, itype,
6940 TREE_OPERAND (ref, 1)),
6941 fold_convert_loc (loc, itype, delta));
6942 if (!tmp
6943 || TREE_CODE (tmp) != INTEGER_CST
6944 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6945 continue;
6948 break;
6950 else
6951 mdim = false;
6953 if (!handled_component_p (ref))
6954 return NULL_TREE;
6957 /* We found the suitable array reference. So copy everything up to it,
6958 and replace the index. */
6960 pref = TREE_OPERAND (addr, 0);
6961 ret = copy_node (pref);
6962 SET_EXPR_LOCATION (ret, loc);
6963 pos = ret;
6965 while (pref != ref)
6967 pref = TREE_OPERAND (pref, 0);
6968 TREE_OPERAND (pos, 0) = copy_node (pref);
6969 pos = TREE_OPERAND (pos, 0);
6972 TREE_OPERAND (pos, 1)
6973 = fold_build2_loc (loc, PLUS_EXPR, itype,
6974 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6975 fold_convert_loc (loc, itype, delta));
6976 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6980 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6981 means A >= Y && A != MAX, but in this case we know that
6982 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6984 static tree
6985 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6987 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6989 if (TREE_CODE (bound) == LT_EXPR)
6990 a = TREE_OPERAND (bound, 0);
6991 else if (TREE_CODE (bound) == GT_EXPR)
6992 a = TREE_OPERAND (bound, 1);
6993 else
6994 return NULL_TREE;
6996 typea = TREE_TYPE (a);
6997 if (!INTEGRAL_TYPE_P (typea)
6998 && !POINTER_TYPE_P (typea))
6999 return NULL_TREE;
7001 if (TREE_CODE (ineq) == LT_EXPR)
7003 a1 = TREE_OPERAND (ineq, 1);
7004 y = TREE_OPERAND (ineq, 0);
7006 else if (TREE_CODE (ineq) == GT_EXPR)
7008 a1 = TREE_OPERAND (ineq, 0);
7009 y = TREE_OPERAND (ineq, 1);
7011 else
7012 return NULL_TREE;
7014 if (TREE_TYPE (a1) != typea)
7015 return NULL_TREE;
7017 if (POINTER_TYPE_P (typea))
7019 /* Convert the pointer types into integer before taking the difference. */
7020 tree ta = fold_convert_loc (loc, ssizetype, a);
7021 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7022 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7024 else
7025 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7027 if (!diff || !integer_onep (diff))
7028 return NULL_TREE;
7030 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7033 /* Fold a sum or difference of at least one multiplication.
7034 Returns the folded tree or NULL if no simplification could be made. */
7036 static tree
7037 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7038 tree arg0, tree arg1)
7040 tree arg00, arg01, arg10, arg11;
7041 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7043 /* (A * C) +- (B * C) -> (A+-B) * C.
7044 (A * C) +- A -> A * (C+-1).
7045 We are most concerned about the case where C is a constant,
7046 but other combinations show up during loop reduction. Since
7047 it is not difficult, try all four possibilities. */
7049 if (TREE_CODE (arg0) == MULT_EXPR)
7051 arg00 = TREE_OPERAND (arg0, 0);
7052 arg01 = TREE_OPERAND (arg0, 1);
7054 else if (TREE_CODE (arg0) == INTEGER_CST)
7056 arg00 = build_one_cst (type);
7057 arg01 = arg0;
7059 else
7061 /* We cannot generate constant 1 for fract. */
7062 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7063 return NULL_TREE;
7064 arg00 = arg0;
7065 arg01 = build_one_cst (type);
7067 if (TREE_CODE (arg1) == MULT_EXPR)
7069 arg10 = TREE_OPERAND (arg1, 0);
7070 arg11 = TREE_OPERAND (arg1, 1);
7072 else if (TREE_CODE (arg1) == INTEGER_CST)
7074 arg10 = build_one_cst (type);
7075 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7076 the purpose of this canonicalization. */
7077 if (TREE_INT_CST_HIGH (arg1) == -1
7078 && negate_expr_p (arg1)
7079 && code == PLUS_EXPR)
7081 arg11 = negate_expr (arg1);
7082 code = MINUS_EXPR;
7084 else
7085 arg11 = arg1;
7087 else
7089 /* We cannot generate constant 1 for fract. */
7090 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7091 return NULL_TREE;
7092 arg10 = arg1;
7093 arg11 = build_one_cst (type);
7095 same = NULL_TREE;
7097 if (operand_equal_p (arg01, arg11, 0))
7098 same = arg01, alt0 = arg00, alt1 = arg10;
7099 else if (operand_equal_p (arg00, arg10, 0))
7100 same = arg00, alt0 = arg01, alt1 = arg11;
7101 else if (operand_equal_p (arg00, arg11, 0))
7102 same = arg00, alt0 = arg01, alt1 = arg10;
7103 else if (operand_equal_p (arg01, arg10, 0))
7104 same = arg01, alt0 = arg00, alt1 = arg11;
7106 /* No identical multiplicands; see if we can find a common
7107 power-of-two factor in non-power-of-two multiplies. This
7108 can help in multi-dimensional array access. */
7109 else if (host_integerp (arg01, 0)
7110 && host_integerp (arg11, 0))
7112 HOST_WIDE_INT int01, int11, tmp;
7113 bool swap = false;
7114 tree maybe_same;
7115 int01 = TREE_INT_CST_LOW (arg01);
7116 int11 = TREE_INT_CST_LOW (arg11);
7118 /* Move min of absolute values to int11. */
7119 if (absu_hwi (int01) < absu_hwi (int11))
7121 tmp = int01, int01 = int11, int11 = tmp;
7122 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7123 maybe_same = arg01;
7124 swap = true;
7126 else
7127 maybe_same = arg11;
7129 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7130 /* The remainder should not be a constant, otherwise we
7131 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7132 increased the number of multiplications necessary. */
7133 && TREE_CODE (arg10) != INTEGER_CST)
7135 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7136 build_int_cst (TREE_TYPE (arg00),
7137 int01 / int11));
7138 alt1 = arg10;
7139 same = maybe_same;
7140 if (swap)
7141 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7145 if (same)
7146 return fold_build2_loc (loc, MULT_EXPR, type,
7147 fold_build2_loc (loc, code, type,
7148 fold_convert_loc (loc, type, alt0),
7149 fold_convert_loc (loc, type, alt1)),
7150 fold_convert_loc (loc, type, same));
7152 return NULL_TREE;
7155 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7160 static int
7161 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7163 tree type = TREE_TYPE (expr);
7164 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7165 int byte, offset, word, words;
7166 unsigned char value;
7168 if (total_bytes > len)
7169 return 0;
7170 words = total_bytes / UNITS_PER_WORD;
7172 for (byte = 0; byte < total_bytes; byte++)
7174 int bitpos = byte * BITS_PER_UNIT;
7175 if (bitpos < HOST_BITS_PER_WIDE_INT)
7176 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7177 else
7178 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7179 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7181 if (total_bytes > UNITS_PER_WORD)
7183 word = byte / UNITS_PER_WORD;
7184 if (WORDS_BIG_ENDIAN)
7185 word = (words - 1) - word;
7186 offset = word * UNITS_PER_WORD;
7187 if (BYTES_BIG_ENDIAN)
7188 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7189 else
7190 offset += byte % UNITS_PER_WORD;
7192 else
7193 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7194 ptr[offset] = value;
7196 return total_bytes;
7200 /* Subroutine of native_encode_expr. Encode the REAL_CST
7201 specified by EXPR into the buffer PTR of length LEN bytes.
7202 Return the number of bytes placed in the buffer, or zero
7203 upon failure. */
7205 static int
7206 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7208 tree type = TREE_TYPE (expr);
7209 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7210 int byte, offset, word, words, bitpos;
7211 unsigned char value;
7213 /* There are always 32 bits in each long, no matter the size of
7214 the hosts long. We handle floating point representations with
7215 up to 192 bits. */
7216 long tmp[6];
7218 if (total_bytes > len)
7219 return 0;
7220 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7222 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7224 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7225 bitpos += BITS_PER_UNIT)
7227 byte = (bitpos / BITS_PER_UNIT) & 3;
7228 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7230 if (UNITS_PER_WORD < 4)
7232 word = byte / UNITS_PER_WORD;
7233 if (WORDS_BIG_ENDIAN)
7234 word = (words - 1) - word;
7235 offset = word * UNITS_PER_WORD;
7236 if (BYTES_BIG_ENDIAN)
7237 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7238 else
7239 offset += byte % UNITS_PER_WORD;
7241 else
7242 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7243 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7245 return total_bytes;
7248 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7249 specified by EXPR into the buffer PTR of length LEN bytes.
7250 Return the number of bytes placed in the buffer, or zero
7251 upon failure. */
7253 static int
7254 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7256 int rsize, isize;
7257 tree part;
7259 part = TREE_REALPART (expr);
7260 rsize = native_encode_expr (part, ptr, len);
7261 if (rsize == 0)
7262 return 0;
7263 part = TREE_IMAGPART (expr);
7264 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7265 if (isize != rsize)
7266 return 0;
7267 return rsize + isize;
7271 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7272 specified by EXPR into the buffer PTR of length LEN bytes.
7273 Return the number of bytes placed in the buffer, or zero
7274 upon failure. */
7276 static int
7277 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7279 unsigned i, count;
7280 int size, offset;
7281 tree itype, elem;
7283 offset = 0;
7284 count = VECTOR_CST_NELTS (expr);
7285 itype = TREE_TYPE (TREE_TYPE (expr));
7286 size = GET_MODE_SIZE (TYPE_MODE (itype));
7287 for (i = 0; i < count; i++)
7289 elem = VECTOR_CST_ELT (expr, i);
7290 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7291 return 0;
7292 offset += size;
7294 return offset;
7298 /* Subroutine of native_encode_expr. Encode the STRING_CST
7299 specified by EXPR into the buffer PTR of length LEN bytes.
7300 Return the number of bytes placed in the buffer, or zero
7301 upon failure. */
7303 static int
7304 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7306 tree type = TREE_TYPE (expr);
7307 HOST_WIDE_INT total_bytes;
7309 if (TREE_CODE (type) != ARRAY_TYPE
7310 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7311 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7312 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7313 return 0;
7314 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7315 if (total_bytes > len)
7316 return 0;
7317 if (TREE_STRING_LENGTH (expr) < total_bytes)
7319 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7320 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7321 total_bytes - TREE_STRING_LENGTH (expr));
7323 else
7324 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7325 return total_bytes;
7329 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7330 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7331 buffer PTR of length LEN bytes. Return the number of bytes
7332 placed in the buffer, or zero upon failure. */
7335 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7337 switch (TREE_CODE (expr))
7339 case INTEGER_CST:
7340 return native_encode_int (expr, ptr, len);
7342 case REAL_CST:
7343 return native_encode_real (expr, ptr, len);
7345 case COMPLEX_CST:
7346 return native_encode_complex (expr, ptr, len);
7348 case VECTOR_CST:
7349 return native_encode_vector (expr, ptr, len);
7351 case STRING_CST:
7352 return native_encode_string (expr, ptr, len);
7354 default:
7355 return 0;
7360 /* Subroutine of native_interpret_expr. Interpret the contents of
7361 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7362 If the buffer cannot be interpreted, return NULL_TREE. */
7364 static tree
7365 native_interpret_int (tree type, const unsigned char *ptr, int len)
7367 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7368 int byte, offset, word, words;
7369 unsigned char value;
7370 double_int result;
7372 if (total_bytes > len)
7373 return NULL_TREE;
7374 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7375 return NULL_TREE;
7377 result = double_int_zero;
7378 words = total_bytes / UNITS_PER_WORD;
7380 for (byte = 0; byte < total_bytes; byte++)
7382 int bitpos = byte * BITS_PER_UNIT;
7383 if (total_bytes > UNITS_PER_WORD)
7385 word = byte / UNITS_PER_WORD;
7386 if (WORDS_BIG_ENDIAN)
7387 word = (words - 1) - word;
7388 offset = word * UNITS_PER_WORD;
7389 if (BYTES_BIG_ENDIAN)
7390 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7391 else
7392 offset += byte % UNITS_PER_WORD;
7394 else
7395 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7396 value = ptr[offset];
7398 if (bitpos < HOST_BITS_PER_WIDE_INT)
7399 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7400 else
7401 result.high |= (unsigned HOST_WIDE_INT) value
7402 << (bitpos - HOST_BITS_PER_WIDE_INT);
7405 return double_int_to_tree (type, result);
7409 /* Subroutine of native_interpret_expr. Interpret the contents of
7410 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7411 If the buffer cannot be interpreted, return NULL_TREE. */
7413 static tree
7414 native_interpret_real (tree type, const unsigned char *ptr, int len)
7416 enum machine_mode mode = TYPE_MODE (type);
7417 int total_bytes = GET_MODE_SIZE (mode);
7418 int byte, offset, word, words, bitpos;
7419 unsigned char value;
7420 /* There are always 32 bits in each long, no matter the size of
7421 the hosts long. We handle floating point representations with
7422 up to 192 bits. */
7423 REAL_VALUE_TYPE r;
7424 long tmp[6];
7426 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7427 if (total_bytes > len || total_bytes > 24)
7428 return NULL_TREE;
7429 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7431 memset (tmp, 0, sizeof (tmp));
7432 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7433 bitpos += BITS_PER_UNIT)
7435 byte = (bitpos / BITS_PER_UNIT) & 3;
7436 if (UNITS_PER_WORD < 4)
7438 word = byte / UNITS_PER_WORD;
7439 if (WORDS_BIG_ENDIAN)
7440 word = (words - 1) - word;
7441 offset = word * UNITS_PER_WORD;
7442 if (BYTES_BIG_ENDIAN)
7443 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7444 else
7445 offset += byte % UNITS_PER_WORD;
7447 else
7448 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7449 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7451 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7454 real_from_target (&r, tmp, mode);
7455 return build_real (type, r);
7459 /* Subroutine of native_interpret_expr. Interpret the contents of
7460 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7461 If the buffer cannot be interpreted, return NULL_TREE. */
7463 static tree
7464 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7466 tree etype, rpart, ipart;
7467 int size;
7469 etype = TREE_TYPE (type);
7470 size = GET_MODE_SIZE (TYPE_MODE (etype));
7471 if (size * 2 > len)
7472 return NULL_TREE;
7473 rpart = native_interpret_expr (etype, ptr, size);
7474 if (!rpart)
7475 return NULL_TREE;
7476 ipart = native_interpret_expr (etype, ptr+size, size);
7477 if (!ipart)
7478 return NULL_TREE;
7479 return build_complex (type, rpart, ipart);
7483 /* Subroutine of native_interpret_expr. Interpret the contents of
7484 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7485 If the buffer cannot be interpreted, return NULL_TREE. */
7487 static tree
7488 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7490 tree etype, elem;
7491 int i, size, count;
7492 tree *elements;
7494 etype = TREE_TYPE (type);
7495 size = GET_MODE_SIZE (TYPE_MODE (etype));
7496 count = TYPE_VECTOR_SUBPARTS (type);
7497 if (size * count > len)
7498 return NULL_TREE;
7500 elements = XALLOCAVEC (tree, count);
7501 for (i = count - 1; i >= 0; i--)
7503 elem = native_interpret_expr (etype, ptr+(i*size), size);
7504 if (!elem)
7505 return NULL_TREE;
7506 elements[i] = elem;
7508 return build_vector (type, elements);
7512 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7513 the buffer PTR of length LEN as a constant of type TYPE. For
7514 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7515 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7516 return NULL_TREE. */
7518 tree
7519 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7521 switch (TREE_CODE (type))
7523 case INTEGER_TYPE:
7524 case ENUMERAL_TYPE:
7525 case BOOLEAN_TYPE:
7526 case POINTER_TYPE:
7527 case REFERENCE_TYPE:
7528 return native_interpret_int (type, ptr, len);
7530 case REAL_TYPE:
7531 return native_interpret_real (type, ptr, len);
7533 case COMPLEX_TYPE:
7534 return native_interpret_complex (type, ptr, len);
7536 case VECTOR_TYPE:
7537 return native_interpret_vector (type, ptr, len);
7539 default:
7540 return NULL_TREE;
7544 /* Returns true if we can interpret the contents of a native encoding
7545 as TYPE. */
7547 static bool
7548 can_native_interpret_type_p (tree type)
7550 switch (TREE_CODE (type))
7552 case INTEGER_TYPE:
7553 case ENUMERAL_TYPE:
7554 case BOOLEAN_TYPE:
7555 case POINTER_TYPE:
7556 case REFERENCE_TYPE:
7557 case REAL_TYPE:
7558 case COMPLEX_TYPE:
7559 case VECTOR_TYPE:
7560 return true;
7561 default:
7562 return false;
7566 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7567 TYPE at compile-time. If we're unable to perform the conversion
7568 return NULL_TREE. */
7570 static tree
7571 fold_view_convert_expr (tree type, tree expr)
7573 /* We support up to 512-bit values (for V8DFmode). */
7574 unsigned char buffer[64];
7575 int len;
7577 /* Check that the host and target are sane. */
7578 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7579 return NULL_TREE;
7581 len = native_encode_expr (expr, buffer, sizeof (buffer));
7582 if (len == 0)
7583 return NULL_TREE;
7585 return native_interpret_expr (type, buffer, len);
7588 /* Build an expression for the address of T. Folds away INDIRECT_REF
7589 to avoid confusing the gimplify process. */
7591 tree
7592 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7594 /* The size of the object is not relevant when talking about its address. */
7595 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7596 t = TREE_OPERAND (t, 0);
7598 if (TREE_CODE (t) == INDIRECT_REF)
7600 t = TREE_OPERAND (t, 0);
7602 if (TREE_TYPE (t) != ptrtype)
7603 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7605 else if (TREE_CODE (t) == MEM_REF
7606 && integer_zerop (TREE_OPERAND (t, 1)))
7607 return TREE_OPERAND (t, 0);
7608 else if (TREE_CODE (t) == MEM_REF
7609 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7610 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7611 TREE_OPERAND (t, 0),
7612 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7613 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7615 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7617 if (TREE_TYPE (t) != ptrtype)
7618 t = fold_convert_loc (loc, ptrtype, t);
7620 else
7621 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7623 return t;
7626 /* Build an expression for the address of T. */
7628 tree
7629 build_fold_addr_expr_loc (location_t loc, tree t)
7631 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7633 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7636 static bool vec_cst_ctor_to_array (tree, tree *);
7638 /* Fold a unary expression of code CODE and type TYPE with operand
7639 OP0. Return the folded expression if folding is successful.
7640 Otherwise, return NULL_TREE. */
7642 tree
7643 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7645 tree tem;
7646 tree arg0;
7647 enum tree_code_class kind = TREE_CODE_CLASS (code);
7649 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7650 && TREE_CODE_LENGTH (code) == 1);
7652 arg0 = op0;
7653 if (arg0)
7655 if (CONVERT_EXPR_CODE_P (code)
7656 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7658 /* Don't use STRIP_NOPS, because signedness of argument type
7659 matters. */
7660 STRIP_SIGN_NOPS (arg0);
7662 else
7664 /* Strip any conversions that don't change the mode. This
7665 is safe for every expression, except for a comparison
7666 expression because its signedness is derived from its
7667 operands.
7669 Note that this is done as an internal manipulation within
7670 the constant folder, in order to find the simplest
7671 representation of the arguments so that their form can be
7672 studied. In any cases, the appropriate type conversions
7673 should be put back in the tree that will get out of the
7674 constant folder. */
7675 STRIP_NOPS (arg0);
7679 if (TREE_CODE_CLASS (code) == tcc_unary)
7681 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7682 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7683 fold_build1_loc (loc, code, type,
7684 fold_convert_loc (loc, TREE_TYPE (op0),
7685 TREE_OPERAND (arg0, 1))));
7686 else if (TREE_CODE (arg0) == COND_EXPR)
7688 tree arg01 = TREE_OPERAND (arg0, 1);
7689 tree arg02 = TREE_OPERAND (arg0, 2);
7690 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7691 arg01 = fold_build1_loc (loc, code, type,
7692 fold_convert_loc (loc,
7693 TREE_TYPE (op0), arg01));
7694 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7695 arg02 = fold_build1_loc (loc, code, type,
7696 fold_convert_loc (loc,
7697 TREE_TYPE (op0), arg02));
7698 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7699 arg01, arg02);
7701 /* If this was a conversion, and all we did was to move into
7702 inside the COND_EXPR, bring it back out. But leave it if
7703 it is a conversion from integer to integer and the
7704 result precision is no wider than a word since such a
7705 conversion is cheap and may be optimized away by combine,
7706 while it couldn't if it were outside the COND_EXPR. Then return
7707 so we don't get into an infinite recursion loop taking the
7708 conversion out and then back in. */
7710 if ((CONVERT_EXPR_CODE_P (code)
7711 || code == NON_LVALUE_EXPR)
7712 && TREE_CODE (tem) == COND_EXPR
7713 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7714 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7715 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7716 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7717 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7718 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7719 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7720 && (INTEGRAL_TYPE_P
7721 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7722 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7723 || flag_syntax_only))
7724 tem = build1_loc (loc, code, type,
7725 build3 (COND_EXPR,
7726 TREE_TYPE (TREE_OPERAND
7727 (TREE_OPERAND (tem, 1), 0)),
7728 TREE_OPERAND (tem, 0),
7729 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7730 TREE_OPERAND (TREE_OPERAND (tem, 2),
7731 0)));
7732 return tem;
7736 switch (code)
7738 case PAREN_EXPR:
7739 /* Re-association barriers around constants and other re-association
7740 barriers can be removed. */
7741 if (CONSTANT_CLASS_P (op0)
7742 || TREE_CODE (op0) == PAREN_EXPR)
7743 return fold_convert_loc (loc, type, op0);
7744 return NULL_TREE;
7746 CASE_CONVERT:
7747 case FLOAT_EXPR:
7748 case FIX_TRUNC_EXPR:
7749 if (TREE_TYPE (op0) == type)
7750 return op0;
7752 if (COMPARISON_CLASS_P (op0))
7754 /* If we have (type) (a CMP b) and type is an integral type, return
7755 new expression involving the new type. Canonicalize
7756 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7757 non-integral type.
7758 Do not fold the result as that would not simplify further, also
7759 folding again results in recursions. */
7760 if (TREE_CODE (type) == BOOLEAN_TYPE)
7761 return build2_loc (loc, TREE_CODE (op0), type,
7762 TREE_OPERAND (op0, 0),
7763 TREE_OPERAND (op0, 1));
7764 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7765 && TREE_CODE (type) != VECTOR_TYPE)
7766 return build3_loc (loc, COND_EXPR, type, op0,
7767 constant_boolean_node (true, type),
7768 constant_boolean_node (false, type));
7771 /* Handle cases of two conversions in a row. */
7772 if (CONVERT_EXPR_P (op0))
7774 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7775 tree inter_type = TREE_TYPE (op0);
7776 int inside_int = INTEGRAL_TYPE_P (inside_type);
7777 int inside_ptr = POINTER_TYPE_P (inside_type);
7778 int inside_float = FLOAT_TYPE_P (inside_type);
7779 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7780 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7781 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7782 int inter_int = INTEGRAL_TYPE_P (inter_type);
7783 int inter_ptr = POINTER_TYPE_P (inter_type);
7784 int inter_float = FLOAT_TYPE_P (inter_type);
7785 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7786 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7787 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7788 int final_int = INTEGRAL_TYPE_P (type);
7789 int final_ptr = POINTER_TYPE_P (type);
7790 int final_float = FLOAT_TYPE_P (type);
7791 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7792 unsigned int final_prec = TYPE_PRECISION (type);
7793 int final_unsignedp = TYPE_UNSIGNED (type);
7795 /* check for cases specific to UPC, involving pointer types */
7796 if (final_ptr || inter_ptr || inside_ptr)
7798 int final_pts = final_ptr
7799 && upc_shared_type_p (TREE_TYPE (type));
7800 int inter_pts = inter_ptr
7801 && upc_shared_type_p (TREE_TYPE (inter_type));
7802 int inside_pts = inside_ptr
7803 && upc_shared_type_p (TREE_TYPE (inside_type));
7804 if (final_pts || inter_pts || inside_pts)
7806 if (!((final_pts && inter_pts)
7807 && TREE_TYPE (type) == TREE_TYPE (inter_type))
7808 || ((inter_pts && inside_pts)
7809 && (TREE_TYPE (inter_type)
7810 == TREE_TYPE (inside_type))))
7811 return NULL;
7815 /* In addition to the cases of two conversions in a row
7816 handled below, if we are converting something to its own
7817 type via an object of identical or wider precision, neither
7818 conversion is needed. */
7819 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7820 && (((inter_int || inter_ptr) && final_int)
7821 || (inter_float && final_float))
7822 && inter_prec >= final_prec)
7823 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7825 /* Likewise, if the intermediate and initial types are either both
7826 float or both integer, we don't need the middle conversion if the
7827 former is wider than the latter and doesn't change the signedness
7828 (for integers). Avoid this if the final type is a pointer since
7829 then we sometimes need the middle conversion. Likewise if the
7830 final type has a precision not equal to the size of its mode. */
7831 if (((inter_int && inside_int)
7832 || (inter_float && inside_float)
7833 || (inter_vec && inside_vec))
7834 && inter_prec >= inside_prec
7835 && (inter_float || inter_vec
7836 || inter_unsignedp == inside_unsignedp)
7837 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7838 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7839 && ! final_ptr
7840 && (! final_vec || inter_prec == inside_prec))
7841 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7843 /* If we have a sign-extension of a zero-extended value, we can
7844 replace that by a single zero-extension. Likewise if the
7845 final conversion does not change precision we can drop the
7846 intermediate conversion. */
7847 if (inside_int && inter_int && final_int
7848 && ((inside_prec < inter_prec && inter_prec < final_prec
7849 && inside_unsignedp && !inter_unsignedp)
7850 || final_prec == inter_prec))
7851 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7853 /* Two conversions in a row are not needed unless:
7854 - some conversion is floating-point (overstrict for now), or
7855 - some conversion is a vector (overstrict for now), or
7856 - the intermediate type is narrower than both initial and
7857 final, or
7858 - the intermediate type and innermost type differ in signedness,
7859 and the outermost type is wider than the intermediate, or
7860 - the initial type is a pointer type and the precisions of the
7861 intermediate and final types differ, or
7862 - the final type is a pointer type and the precisions of the
7863 initial and intermediate types differ. */
7864 if (! inside_float && ! inter_float && ! final_float
7865 && ! inside_vec && ! inter_vec && ! final_vec
7866 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7867 && ! (inside_int && inter_int
7868 && inter_unsignedp != inside_unsignedp
7869 && inter_prec < final_prec)
7870 && ((inter_unsignedp && inter_prec > inside_prec)
7871 == (final_unsignedp && final_prec > inter_prec))
7872 && ! (inside_ptr && inter_prec != final_prec)
7873 && ! (final_ptr && inside_prec != inter_prec)
7874 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7875 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7876 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7879 /* Handle (T *)&A.B.C for A being of type T and B and C
7880 living at offset zero. This occurs frequently in
7881 C++ upcasting and then accessing the base. */
7882 if (TREE_CODE (op0) == ADDR_EXPR
7883 && POINTER_TYPE_P (type)
7884 && handled_component_p (TREE_OPERAND (op0, 0)))
7886 HOST_WIDE_INT bitsize, bitpos;
7887 tree offset;
7888 enum machine_mode mode;
7889 int unsignedp, volatilep;
7890 tree base = TREE_OPERAND (op0, 0);
7891 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7892 &mode, &unsignedp, &volatilep, false);
7893 /* If the reference was to a (constant) zero offset, we can use
7894 the address of the base if it has the same base type
7895 as the result type and the pointer type is unqualified. */
7896 if (! offset && bitpos == 0
7897 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7898 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7899 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7900 return fold_convert_loc (loc, type,
7901 build_fold_addr_expr_loc (loc, base));
7904 if (TREE_CODE (op0) == MODIFY_EXPR
7905 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7906 /* Detect assigning a bitfield. */
7907 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7908 && DECL_BIT_FIELD
7909 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7911 /* Don't leave an assignment inside a conversion
7912 unless assigning a bitfield. */
7913 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7914 /* First do the assignment, then return converted constant. */
7915 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7916 TREE_NO_WARNING (tem) = 1;
7917 TREE_USED (tem) = 1;
7918 return tem;
7921 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7922 constants (if x has signed type, the sign bit cannot be set
7923 in c). This folds extension into the BIT_AND_EXPR.
7924 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7925 very likely don't have maximal range for their precision and this
7926 transformation effectively doesn't preserve non-maximal ranges. */
7927 if (TREE_CODE (type) == INTEGER_TYPE
7928 && TREE_CODE (op0) == BIT_AND_EXPR
7929 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7931 tree and_expr = op0;
7932 tree and0 = TREE_OPERAND (and_expr, 0);
7933 tree and1 = TREE_OPERAND (and_expr, 1);
7934 int change = 0;
7936 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7937 || (TYPE_PRECISION (type)
7938 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7939 change = 1;
7940 else if (TYPE_PRECISION (TREE_TYPE (and1))
7941 <= HOST_BITS_PER_WIDE_INT
7942 && host_integerp (and1, 1))
7944 unsigned HOST_WIDE_INT cst;
7946 cst = tree_low_cst (and1, 1);
7947 cst &= (HOST_WIDE_INT) -1
7948 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7949 change = (cst == 0);
7950 #ifdef LOAD_EXTEND_OP
7951 if (change
7952 && !flag_syntax_only
7953 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7954 == ZERO_EXTEND))
7956 tree uns = unsigned_type_for (TREE_TYPE (and0));
7957 and0 = fold_convert_loc (loc, uns, and0);
7958 and1 = fold_convert_loc (loc, uns, and1);
7960 #endif
7962 if (change)
7964 tem = force_fit_type_double (type, tree_to_double_int (and1),
7965 0, TREE_OVERFLOW (and1));
7966 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7967 fold_convert_loc (loc, type, and0), tem);
7971 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7972 when one of the new casts will fold away. Conservatively we assume
7973 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7974 if (POINTER_TYPE_P (type)
7975 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7976 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7977 && !upc_shared_type_p (TREE_TYPE (type))
7978 && !upc_shared_type_p (TREE_TYPE (
7979 TREE_TYPE (TREE_OPERAND (arg0, 0))))
7980 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7981 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7982 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7984 tree arg00 = TREE_OPERAND (arg0, 0);
7985 tree arg01 = TREE_OPERAND (arg0, 1);
7987 return fold_build_pointer_plus_loc
7988 (loc, fold_convert_loc (loc, type, arg00), arg01);
7991 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7992 of the same precision, and X is an integer type not narrower than
7993 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7994 if (INTEGRAL_TYPE_P (type)
7995 && TREE_CODE (op0) == BIT_NOT_EXPR
7996 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7997 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7998 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8000 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8001 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8002 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8003 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8004 fold_convert_loc (loc, type, tem));
8007 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8008 type of X and Y (integer types only). */
8009 if (INTEGRAL_TYPE_P (type)
8010 && TREE_CODE (op0) == MULT_EXPR
8011 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8012 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8014 /* Be careful not to introduce new overflows. */
8015 tree mult_type;
8016 if (TYPE_OVERFLOW_WRAPS (type))
8017 mult_type = type;
8018 else
8019 mult_type = unsigned_type_for (type);
8021 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8023 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8024 fold_convert_loc (loc, mult_type,
8025 TREE_OPERAND (op0, 0)),
8026 fold_convert_loc (loc, mult_type,
8027 TREE_OPERAND (op0, 1)));
8028 return fold_convert_loc (loc, type, tem);
8032 tem = fold_convert_const (code, type, op0);
8033 return tem ? tem : NULL_TREE;
8035 case ADDR_SPACE_CONVERT_EXPR:
8036 if (integer_zerop (arg0))
8037 return fold_convert_const (code, type, arg0);
8038 return NULL_TREE;
8040 case FIXED_CONVERT_EXPR:
8041 tem = fold_convert_const (code, type, arg0);
8042 return tem ? tem : NULL_TREE;
8044 case VIEW_CONVERT_EXPR:
8045 if (TREE_TYPE (op0) == type)
8046 return op0;
8047 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8048 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8049 type, TREE_OPERAND (op0, 0));
8050 if (TREE_CODE (op0) == MEM_REF)
8051 return fold_build2_loc (loc, MEM_REF, type,
8052 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8054 /* For integral conversions with the same precision or pointer
8055 conversions use a NOP_EXPR instead. */
8056 if ((INTEGRAL_TYPE_P (type)
8057 || (POINTER_TYPE_P (type)
8058 && !upc_shared_type_p (TREE_TYPE (type))))
8059 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8060 || (POINTER_TYPE_P (TREE_TYPE (op0))
8061 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8062 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8063 return fold_convert_loc (loc, type, op0);
8065 /* Strip inner integral conversions that do not change the precision. */
8066 if (CONVERT_EXPR_P (op0)
8067 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8068 || (POINTER_TYPE_P (TREE_TYPE (op0))
8069 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8070 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8071 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8072 && !upc_shared_type_p (TREE_TYPE (
8073 TREE_TYPE (
8074 TREE_OPERAND (op0, 0))))))
8075 && (TYPE_PRECISION (TREE_TYPE (op0))
8076 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8077 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8078 type, TREE_OPERAND (op0, 0));
8080 return fold_view_convert_expr (type, op0);
8082 case NEGATE_EXPR:
8083 tem = fold_negate_expr (loc, arg0);
8084 if (tem)
8085 return fold_convert_loc (loc, type, tem);
8086 return NULL_TREE;
8088 case ABS_EXPR:
8089 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8090 return fold_abs_const (arg0, type);
8091 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8092 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8093 /* Convert fabs((double)float) into (double)fabsf(float). */
8094 else if (TREE_CODE (arg0) == NOP_EXPR
8095 && TREE_CODE (type) == REAL_TYPE)
8097 tree targ0 = strip_float_extensions (arg0);
8098 if (targ0 != arg0)
8099 return fold_convert_loc (loc, type,
8100 fold_build1_loc (loc, ABS_EXPR,
8101 TREE_TYPE (targ0),
8102 targ0));
8104 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8105 else if (TREE_CODE (arg0) == ABS_EXPR)
8106 return arg0;
8107 else if (tree_expr_nonnegative_p (arg0))
8108 return arg0;
8110 /* Strip sign ops from argument. */
8111 if (TREE_CODE (type) == REAL_TYPE)
8113 tem = fold_strip_sign_ops (arg0);
8114 if (tem)
8115 return fold_build1_loc (loc, ABS_EXPR, type,
8116 fold_convert_loc (loc, type, tem));
8118 return NULL_TREE;
8120 case CONJ_EXPR:
8121 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8122 return fold_convert_loc (loc, type, arg0);
8123 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8125 tree itype = TREE_TYPE (type);
8126 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8127 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8128 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8129 negate_expr (ipart));
8131 if (TREE_CODE (arg0) == COMPLEX_CST)
8133 tree itype = TREE_TYPE (type);
8134 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8135 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8136 return build_complex (type, rpart, negate_expr (ipart));
8138 if (TREE_CODE (arg0) == CONJ_EXPR)
8139 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8140 return NULL_TREE;
8142 case BIT_NOT_EXPR:
8143 if (TREE_CODE (arg0) == INTEGER_CST)
8144 return fold_not_const (arg0, type);
8145 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8146 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8147 /* Convert ~ (-A) to A - 1. */
8148 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8149 return fold_build2_loc (loc, MINUS_EXPR, type,
8150 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8151 build_int_cst (type, 1));
8152 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8153 else if (INTEGRAL_TYPE_P (type)
8154 && ((TREE_CODE (arg0) == MINUS_EXPR
8155 && integer_onep (TREE_OPERAND (arg0, 1)))
8156 || (TREE_CODE (arg0) == PLUS_EXPR
8157 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8158 return fold_build1_loc (loc, NEGATE_EXPR, type,
8159 fold_convert_loc (loc, type,
8160 TREE_OPERAND (arg0, 0)));
8161 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8162 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8163 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8164 fold_convert_loc (loc, type,
8165 TREE_OPERAND (arg0, 0)))))
8166 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8167 fold_convert_loc (loc, type,
8168 TREE_OPERAND (arg0, 1)));
8169 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8170 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8171 fold_convert_loc (loc, type,
8172 TREE_OPERAND (arg0, 1)))))
8173 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8174 fold_convert_loc (loc, type,
8175 TREE_OPERAND (arg0, 0)), tem);
8176 /* Perform BIT_NOT_EXPR on each element individually. */
8177 else if (TREE_CODE (arg0) == VECTOR_CST)
8179 tree *elements;
8180 tree elem;
8181 unsigned count = VECTOR_CST_NELTS (arg0), i;
8183 elements = XALLOCAVEC (tree, count);
8184 for (i = 0; i < count; i++)
8186 elem = VECTOR_CST_ELT (arg0, i);
8187 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8188 if (elem == NULL_TREE)
8189 break;
8190 elements[i] = elem;
8192 if (i == count)
8193 return build_vector (type, elements);
8196 return NULL_TREE;
8198 case TRUTH_NOT_EXPR:
8199 /* The argument to invert_truthvalue must have Boolean type. */
8200 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8201 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8203 /* Note that the operand of this must be an int
8204 and its values must be 0 or 1.
8205 ("true" is a fixed value perhaps depending on the language,
8206 but we don't handle values other than 1 correctly yet.) */
8207 tem = fold_truth_not_expr (loc, arg0);
8208 if (!tem)
8209 return NULL_TREE;
8210 return fold_convert_loc (loc, type, tem);
8212 case REALPART_EXPR:
8213 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8214 return fold_convert_loc (loc, type, arg0);
8215 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8216 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8217 TREE_OPERAND (arg0, 1));
8218 if (TREE_CODE (arg0) == COMPLEX_CST)
8219 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8220 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8222 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8223 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8224 fold_build1_loc (loc, REALPART_EXPR, itype,
8225 TREE_OPERAND (arg0, 0)),
8226 fold_build1_loc (loc, REALPART_EXPR, itype,
8227 TREE_OPERAND (arg0, 1)));
8228 return fold_convert_loc (loc, type, tem);
8230 if (TREE_CODE (arg0) == CONJ_EXPR)
8232 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8233 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8234 TREE_OPERAND (arg0, 0));
8235 return fold_convert_loc (loc, type, tem);
8237 if (TREE_CODE (arg0) == CALL_EXPR)
8239 tree fn = get_callee_fndecl (arg0);
8240 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8241 switch (DECL_FUNCTION_CODE (fn))
8243 CASE_FLT_FN (BUILT_IN_CEXPI):
8244 fn = mathfn_built_in (type, BUILT_IN_COS);
8245 if (fn)
8246 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8247 break;
8249 default:
8250 break;
8253 return NULL_TREE;
8255 case IMAGPART_EXPR:
8256 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8257 return build_zero_cst (type);
8258 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8259 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8260 TREE_OPERAND (arg0, 0));
8261 if (TREE_CODE (arg0) == COMPLEX_CST)
8262 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8263 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8265 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8266 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8267 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8268 TREE_OPERAND (arg0, 0)),
8269 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8270 TREE_OPERAND (arg0, 1)));
8271 return fold_convert_loc (loc, type, tem);
8273 if (TREE_CODE (arg0) == CONJ_EXPR)
8275 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8276 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8277 return fold_convert_loc (loc, type, negate_expr (tem));
8279 if (TREE_CODE (arg0) == CALL_EXPR)
8281 tree fn = get_callee_fndecl (arg0);
8282 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8283 switch (DECL_FUNCTION_CODE (fn))
8285 CASE_FLT_FN (BUILT_IN_CEXPI):
8286 fn = mathfn_built_in (type, BUILT_IN_SIN);
8287 if (fn)
8288 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8289 break;
8291 default:
8292 break;
8295 return NULL_TREE;
8297 case INDIRECT_REF:
8298 /* Fold *&X to X if X is an lvalue. */
8299 if (TREE_CODE (op0) == ADDR_EXPR)
8301 tree op00 = TREE_OPERAND (op0, 0);
8302 if ((TREE_CODE (op00) == VAR_DECL
8303 || TREE_CODE (op00) == PARM_DECL
8304 || TREE_CODE (op00) == RESULT_DECL)
8305 && !TREE_READONLY (op00))
8306 return op00;
8308 return NULL_TREE;
8310 case VEC_UNPACK_LO_EXPR:
8311 case VEC_UNPACK_HI_EXPR:
8312 case VEC_UNPACK_FLOAT_LO_EXPR:
8313 case VEC_UNPACK_FLOAT_HI_EXPR:
8315 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8316 tree *elts;
8317 enum tree_code subcode;
8319 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8320 if (TREE_CODE (arg0) != VECTOR_CST)
8321 return NULL_TREE;
8323 elts = XALLOCAVEC (tree, nelts * 2);
8324 if (!vec_cst_ctor_to_array (arg0, elts))
8325 return NULL_TREE;
8327 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8328 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8329 elts += nelts;
8331 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8332 subcode = NOP_EXPR;
8333 else
8334 subcode = FLOAT_EXPR;
8336 for (i = 0; i < nelts; i++)
8338 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8339 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8340 return NULL_TREE;
8343 return build_vector (type, elts);
8346 case REDUC_MIN_EXPR:
8347 case REDUC_MAX_EXPR:
8348 case REDUC_PLUS_EXPR:
8350 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8351 tree *elts;
8352 enum tree_code subcode;
8354 if (TREE_CODE (op0) != VECTOR_CST)
8355 return NULL_TREE;
8357 elts = XALLOCAVEC (tree, nelts);
8358 if (!vec_cst_ctor_to_array (op0, elts))
8359 return NULL_TREE;
8361 switch (code)
8363 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8364 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8365 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8366 default: gcc_unreachable ();
8369 for (i = 1; i < nelts; i++)
8371 elts[0] = const_binop (subcode, elts[0], elts[i]);
8372 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8373 return NULL_TREE;
8374 elts[i] = build_zero_cst (TREE_TYPE (type));
8377 return build_vector (type, elts);
8380 default:
8381 return NULL_TREE;
8382 } /* switch (code) */
8386 /* If the operation was a conversion do _not_ mark a resulting constant
8387 with TREE_OVERFLOW if the original constant was not. These conversions
8388 have implementation defined behavior and retaining the TREE_OVERFLOW
8389 flag here would confuse later passes such as VRP. */
8390 tree
8391 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8392 tree type, tree op0)
8394 tree res = fold_unary_loc (loc, code, type, op0);
8395 if (res
8396 && TREE_CODE (res) == INTEGER_CST
8397 && TREE_CODE (op0) == INTEGER_CST
8398 && CONVERT_EXPR_CODE_P (code))
8399 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8401 return res;
8404 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8405 operands OP0 and OP1. LOC is the location of the resulting expression.
8406 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8407 Return the folded expression if folding is successful. Otherwise,
8408 return NULL_TREE. */
8409 static tree
8410 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8411 tree arg0, tree arg1, tree op0, tree op1)
8413 tree tem;
8415 /* We only do these simplifications if we are optimizing. */
8416 if (!optimize)
8417 return NULL_TREE;
8419 /* Check for things like (A || B) && (A || C). We can convert this
8420 to A || (B && C). Note that either operator can be any of the four
8421 truth and/or operations and the transformation will still be
8422 valid. Also note that we only care about order for the
8423 ANDIF and ORIF operators. If B contains side effects, this
8424 might change the truth-value of A. */
8425 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8426 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8427 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8428 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8429 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8430 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8432 tree a00 = TREE_OPERAND (arg0, 0);
8433 tree a01 = TREE_OPERAND (arg0, 1);
8434 tree a10 = TREE_OPERAND (arg1, 0);
8435 tree a11 = TREE_OPERAND (arg1, 1);
8436 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8437 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8438 && (code == TRUTH_AND_EXPR
8439 || code == TRUTH_OR_EXPR));
8441 if (operand_equal_p (a00, a10, 0))
8442 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8443 fold_build2_loc (loc, code, type, a01, a11));
8444 else if (commutative && operand_equal_p (a00, a11, 0))
8445 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8446 fold_build2_loc (loc, code, type, a01, a10));
8447 else if (commutative && operand_equal_p (a01, a10, 0))
8448 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8449 fold_build2_loc (loc, code, type, a00, a11));
8451 /* This case if tricky because we must either have commutative
8452 operators or else A10 must not have side-effects. */
8454 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8455 && operand_equal_p (a01, a11, 0))
8456 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8457 fold_build2_loc (loc, code, type, a00, a10),
8458 a01);
8461 /* See if we can build a range comparison. */
8462 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8463 return tem;
8465 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8466 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8468 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8469 if (tem)
8470 return fold_build2_loc (loc, code, type, tem, arg1);
8473 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8474 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8476 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8477 if (tem)
8478 return fold_build2_loc (loc, code, type, arg0, tem);
8481 /* Check for the possibility of merging component references. If our
8482 lhs is another similar operation, try to merge its rhs with our
8483 rhs. Then try to merge our lhs and rhs. */
8484 if (TREE_CODE (arg0) == code
8485 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8486 TREE_OPERAND (arg0, 1), arg1)))
8487 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8489 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8490 return tem;
8492 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8493 && (code == TRUTH_AND_EXPR
8494 || code == TRUTH_ANDIF_EXPR
8495 || code == TRUTH_OR_EXPR
8496 || code == TRUTH_ORIF_EXPR))
8498 enum tree_code ncode, icode;
8500 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8501 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8502 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8504 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8505 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8506 We don't want to pack more than two leafs to a non-IF AND/OR
8507 expression.
8508 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8509 equal to IF-CODE, then we don't want to add right-hand operand.
8510 If the inner right-hand side of left-hand operand has
8511 side-effects, or isn't simple, then we can't add to it,
8512 as otherwise we might destroy if-sequence. */
8513 if (TREE_CODE (arg0) == icode
8514 && simple_operand_p_2 (arg1)
8515 /* Needed for sequence points to handle trappings, and
8516 side-effects. */
8517 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8519 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8520 arg1);
8521 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8522 tem);
8524 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8525 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8526 else if (TREE_CODE (arg1) == icode
8527 && simple_operand_p_2 (arg0)
8528 /* Needed for sequence points to handle trappings, and
8529 side-effects. */
8530 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8532 tem = fold_build2_loc (loc, ncode, type,
8533 arg0, TREE_OPERAND (arg1, 0));
8534 return fold_build2_loc (loc, icode, type, tem,
8535 TREE_OPERAND (arg1, 1));
8537 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8538 into (A OR B).
8539 For sequence point consistancy, we need to check for trapping,
8540 and side-effects. */
8541 else if (code == icode && simple_operand_p_2 (arg0)
8542 && simple_operand_p_2 (arg1))
8543 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8546 return NULL_TREE;
8549 /* Fold a binary expression of code CODE and type TYPE with operands
8550 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8551 Return the folded expression if folding is successful. Otherwise,
8552 return NULL_TREE. */
8554 static tree
8555 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8557 enum tree_code compl_code;
8559 if (code == MIN_EXPR)
8560 compl_code = MAX_EXPR;
8561 else if (code == MAX_EXPR)
8562 compl_code = MIN_EXPR;
8563 else
8564 gcc_unreachable ();
8566 /* MIN (MAX (a, b), b) == b. */
8567 if (TREE_CODE (op0) == compl_code
8568 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8569 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8571 /* MIN (MAX (b, a), b) == b. */
8572 if (TREE_CODE (op0) == compl_code
8573 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8574 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8575 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8577 /* MIN (a, MAX (a, b)) == a. */
8578 if (TREE_CODE (op1) == compl_code
8579 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8580 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8581 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8583 /* MIN (a, MAX (b, a)) == a. */
8584 if (TREE_CODE (op1) == compl_code
8585 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8586 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8587 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8589 return NULL_TREE;
8592 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8593 by changing CODE to reduce the magnitude of constants involved in
8594 ARG0 of the comparison.
8595 Returns a canonicalized comparison tree if a simplification was
8596 possible, otherwise returns NULL_TREE.
8597 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8598 valid if signed overflow is undefined. */
8600 static tree
8601 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8602 tree arg0, tree arg1,
8603 bool *strict_overflow_p)
8605 enum tree_code code0 = TREE_CODE (arg0);
8606 tree t, cst0 = NULL_TREE;
8607 int sgn0;
8608 bool swap = false;
8610 /* Match A +- CST code arg1 and CST code arg1. We can change the
8611 first form only if overflow is undefined. */
8612 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8613 /* In principle pointers also have undefined overflow behavior,
8614 but that causes problems elsewhere. */
8615 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8616 && (code0 == MINUS_EXPR
8617 || code0 == PLUS_EXPR)
8618 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8619 || code0 == INTEGER_CST))
8620 return NULL_TREE;
8622 /* Identify the constant in arg0 and its sign. */
8623 if (code0 == INTEGER_CST)
8624 cst0 = arg0;
8625 else
8626 cst0 = TREE_OPERAND (arg0, 1);
8627 sgn0 = tree_int_cst_sgn (cst0);
8629 /* Overflowed constants and zero will cause problems. */
8630 if (integer_zerop (cst0)
8631 || TREE_OVERFLOW (cst0))
8632 return NULL_TREE;
8634 /* See if we can reduce the magnitude of the constant in
8635 arg0 by changing the comparison code. */
8636 if (code0 == INTEGER_CST)
8638 /* CST <= arg1 -> CST-1 < arg1. */
8639 if (code == LE_EXPR && sgn0 == 1)
8640 code = LT_EXPR;
8641 /* -CST < arg1 -> -CST-1 <= arg1. */
8642 else if (code == LT_EXPR && sgn0 == -1)
8643 code = LE_EXPR;
8644 /* CST > arg1 -> CST-1 >= arg1. */
8645 else if (code == GT_EXPR && sgn0 == 1)
8646 code = GE_EXPR;
8647 /* -CST >= arg1 -> -CST-1 > arg1. */
8648 else if (code == GE_EXPR && sgn0 == -1)
8649 code = GT_EXPR;
8650 else
8651 return NULL_TREE;
8652 /* arg1 code' CST' might be more canonical. */
8653 swap = true;
8655 else
8657 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8658 if (code == LT_EXPR
8659 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8660 code = LE_EXPR;
8661 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8662 else if (code == GT_EXPR
8663 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8664 code = GE_EXPR;
8665 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8666 else if (code == LE_EXPR
8667 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8668 code = LT_EXPR;
8669 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8670 else if (code == GE_EXPR
8671 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8672 code = GT_EXPR;
8673 else
8674 return NULL_TREE;
8675 *strict_overflow_p = true;
8678 /* Now build the constant reduced in magnitude. But not if that
8679 would produce one outside of its types range. */
8680 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8681 && ((sgn0 == 1
8682 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8683 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8684 || (sgn0 == -1
8685 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8686 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8687 /* We cannot swap the comparison here as that would cause us to
8688 endlessly recurse. */
8689 return NULL_TREE;
8691 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8692 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8693 if (code0 != INTEGER_CST)
8694 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8695 t = fold_convert (TREE_TYPE (arg1), t);
8697 /* If swapping might yield to a more canonical form, do so. */
8698 if (swap)
8699 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8700 else
8701 return fold_build2_loc (loc, code, type, t, arg1);
8704 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8705 overflow further. Try to decrease the magnitude of constants involved
8706 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8707 and put sole constants at the second argument position.
8708 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8710 static tree
8711 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8712 tree arg0, tree arg1)
8714 tree t;
8715 bool strict_overflow_p;
8716 const char * const warnmsg = G_("assuming signed overflow does not occur "
8717 "when reducing constant in comparison");
8719 /* Try canonicalization by simplifying arg0. */
8720 strict_overflow_p = false;
8721 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8722 &strict_overflow_p);
8723 if (t)
8725 if (strict_overflow_p)
8726 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8727 return t;
8730 /* Try canonicalization by simplifying arg1 using the swapped
8731 comparison. */
8732 code = swap_tree_comparison (code);
8733 strict_overflow_p = false;
8734 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8735 &strict_overflow_p);
8736 if (t && strict_overflow_p)
8737 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8738 return t;
8741 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8742 space. This is used to avoid issuing overflow warnings for
8743 expressions like &p->x which can not wrap. */
8745 static bool
8746 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8748 double_int di_offset, total;
8750 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8751 return true;
8753 if (bitpos < 0)
8754 return true;
8756 if (offset == NULL_TREE)
8757 di_offset = double_int_zero;
8758 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8759 return true;
8760 else
8761 di_offset = TREE_INT_CST (offset);
8763 bool overflow;
8764 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8765 total = di_offset.add_with_sign (units, true, &overflow);
8766 if (overflow)
8767 return true;
8769 if (total.high != 0)
8770 return true;
8772 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8773 if (size <= 0)
8774 return true;
8776 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8777 array. */
8778 if (TREE_CODE (base) == ADDR_EXPR)
8780 HOST_WIDE_INT base_size;
8782 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8783 if (base_size > 0 && size < base_size)
8784 size = base_size;
8787 return total.low > (unsigned HOST_WIDE_INT) size;
8790 /* Subroutine of fold_binary. This routine performs all of the
8791 transformations that are common to the equality/inequality
8792 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8793 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8794 fold_binary should call fold_binary. Fold a comparison with
8795 tree code CODE and type TYPE with operands OP0 and OP1. Return
8796 the folded comparison or NULL_TREE. */
8798 static tree
8799 fold_comparison (location_t loc, enum tree_code code, tree type,
8800 tree op0, tree op1)
8802 tree arg0, arg1, tem;
8804 arg0 = op0;
8805 arg1 = op1;
8807 STRIP_SIGN_NOPS (arg0);
8808 STRIP_SIGN_NOPS (arg1);
8810 tem = fold_relational_const (code, type, arg0, arg1);
8811 if (tem != NULL_TREE)
8812 return tem;
8814 /* If one arg is a real or integer constant, put it last. */
8815 if (tree_swap_operands_p (arg0, arg1, true))
8816 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8818 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8819 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8820 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8821 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8822 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8823 && (TREE_CODE (arg1) == INTEGER_CST
8824 && !TREE_OVERFLOW (arg1)))
8826 tree const1 = TREE_OPERAND (arg0, 1);
8827 tree const2 = arg1;
8828 tree variable = TREE_OPERAND (arg0, 0);
8829 tree lhs;
8830 int lhs_add;
8831 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8833 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8834 TREE_TYPE (arg1), const2, const1);
8836 /* If the constant operation overflowed this can be
8837 simplified as a comparison against INT_MAX/INT_MIN. */
8838 if (TREE_CODE (lhs) == INTEGER_CST
8839 && TREE_OVERFLOW (lhs))
8841 int const1_sgn = tree_int_cst_sgn (const1);
8842 enum tree_code code2 = code;
8844 /* Get the sign of the constant on the lhs if the
8845 operation were VARIABLE + CONST1. */
8846 if (TREE_CODE (arg0) == MINUS_EXPR)
8847 const1_sgn = -const1_sgn;
8849 /* The sign of the constant determines if we overflowed
8850 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8851 Canonicalize to the INT_MIN overflow by swapping the comparison
8852 if necessary. */
8853 if (const1_sgn == -1)
8854 code2 = swap_tree_comparison (code);
8856 /* We now can look at the canonicalized case
8857 VARIABLE + 1 CODE2 INT_MIN
8858 and decide on the result. */
8859 if (code2 == LT_EXPR
8860 || code2 == LE_EXPR
8861 || code2 == EQ_EXPR)
8862 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8863 else if (code2 == NE_EXPR
8864 || code2 == GE_EXPR
8865 || code2 == GT_EXPR)
8866 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8869 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8870 && (TREE_CODE (lhs) != INTEGER_CST
8871 || !TREE_OVERFLOW (lhs)))
8873 if (code != EQ_EXPR && code != NE_EXPR)
8874 fold_overflow_warning ("assuming signed overflow does not occur "
8875 "when changing X +- C1 cmp C2 to "
8876 "X cmp C1 +- C2",
8877 WARN_STRICT_OVERFLOW_COMPARISON);
8878 return fold_build2_loc (loc, code, type, variable, lhs);
8882 /* For comparisons of pointers we can decompose it to a compile time
8883 comparison of the base objects and the offsets into the object.
8884 This requires at least one operand being an ADDR_EXPR or a
8885 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8886 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8887 && (TREE_CODE (arg0) == ADDR_EXPR
8888 || TREE_CODE (arg1) == ADDR_EXPR
8889 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8890 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8892 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8893 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8894 enum machine_mode mode;
8895 int volatilep, unsignedp;
8896 bool indirect_base0 = false, indirect_base1 = false;
8898 /* Get base and offset for the access. Strip ADDR_EXPR for
8899 get_inner_reference, but put it back by stripping INDIRECT_REF
8900 off the base object if possible. indirect_baseN will be true
8901 if baseN is not an address but refers to the object itself. */
8902 base0 = arg0;
8903 if (TREE_CODE (arg0) == ADDR_EXPR)
8905 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8906 &bitsize, &bitpos0, &offset0, &mode,
8907 &unsignedp, &volatilep, false);
8908 if (TREE_CODE (base0) == INDIRECT_REF)
8909 base0 = TREE_OPERAND (base0, 0);
8910 else
8911 indirect_base0 = true;
8913 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8915 base0 = TREE_OPERAND (arg0, 0);
8916 STRIP_SIGN_NOPS (base0);
8917 if (TREE_CODE (base0) == ADDR_EXPR)
8919 base0 = TREE_OPERAND (base0, 0);
8920 indirect_base0 = true;
8922 offset0 = TREE_OPERAND (arg0, 1);
8923 if (host_integerp (offset0, 0))
8925 HOST_WIDE_INT off = size_low_cst (offset0);
8926 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8927 * BITS_PER_UNIT)
8928 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8930 bitpos0 = off * BITS_PER_UNIT;
8931 offset0 = NULL_TREE;
8936 base1 = arg1;
8937 if (TREE_CODE (arg1) == ADDR_EXPR)
8939 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8940 &bitsize, &bitpos1, &offset1, &mode,
8941 &unsignedp, &volatilep, false);
8942 if (TREE_CODE (base1) == INDIRECT_REF)
8943 base1 = TREE_OPERAND (base1, 0);
8944 else
8945 indirect_base1 = true;
8947 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8949 base1 = TREE_OPERAND (arg1, 0);
8950 STRIP_SIGN_NOPS (base1);
8951 if (TREE_CODE (base1) == ADDR_EXPR)
8953 base1 = TREE_OPERAND (base1, 0);
8954 indirect_base1 = true;
8956 offset1 = TREE_OPERAND (arg1, 1);
8957 if (host_integerp (offset1, 0))
8959 HOST_WIDE_INT off = size_low_cst (offset1);
8960 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8961 * BITS_PER_UNIT)
8962 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8964 bitpos1 = off * BITS_PER_UNIT;
8965 offset1 = NULL_TREE;
8970 /* A local variable can never be pointed to by
8971 the default SSA name of an incoming parameter. */
8972 if ((TREE_CODE (arg0) == ADDR_EXPR
8973 && indirect_base0
8974 && TREE_CODE (base0) == VAR_DECL
8975 && auto_var_in_fn_p (base0, current_function_decl)
8976 && !indirect_base1
8977 && TREE_CODE (base1) == SSA_NAME
8978 && SSA_NAME_IS_DEFAULT_DEF (base1)
8979 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8980 || (TREE_CODE (arg1) == ADDR_EXPR
8981 && indirect_base1
8982 && TREE_CODE (base1) == VAR_DECL
8983 && auto_var_in_fn_p (base1, current_function_decl)
8984 && !indirect_base0
8985 && TREE_CODE (base0) == SSA_NAME
8986 && SSA_NAME_IS_DEFAULT_DEF (base0)
8987 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8989 if (code == NE_EXPR)
8990 return constant_boolean_node (1, type);
8991 else if (code == EQ_EXPR)
8992 return constant_boolean_node (0, type);
8994 /* If we have equivalent bases we might be able to simplify. */
8995 else if (indirect_base0 == indirect_base1
8996 && operand_equal_p (base0, base1, 0))
8998 /* We can fold this expression to a constant if the non-constant
8999 offset parts are equal. */
9000 if ((offset0 == offset1
9001 || (offset0 && offset1
9002 && operand_equal_p (offset0, offset1, 0)))
9003 && (code == EQ_EXPR
9004 || code == NE_EXPR
9005 || (indirect_base0 && DECL_P (base0))
9006 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9009 if (code != EQ_EXPR
9010 && code != NE_EXPR
9011 && bitpos0 != bitpos1
9012 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9013 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9014 fold_overflow_warning (("assuming pointer wraparound does not "
9015 "occur when comparing P +- C1 with "
9016 "P +- C2"),
9017 WARN_STRICT_OVERFLOW_CONDITIONAL);
9019 switch (code)
9021 case EQ_EXPR:
9022 return constant_boolean_node (bitpos0 == bitpos1, type);
9023 case NE_EXPR:
9024 return constant_boolean_node (bitpos0 != bitpos1, type);
9025 case LT_EXPR:
9026 return constant_boolean_node (bitpos0 < bitpos1, type);
9027 case LE_EXPR:
9028 return constant_boolean_node (bitpos0 <= bitpos1, type);
9029 case GE_EXPR:
9030 return constant_boolean_node (bitpos0 >= bitpos1, type);
9031 case GT_EXPR:
9032 return constant_boolean_node (bitpos0 > bitpos1, type);
9033 default:;
9036 /* We can simplify the comparison to a comparison of the variable
9037 offset parts if the constant offset parts are equal.
9038 Be careful to use signed size type here because otherwise we
9039 mess with array offsets in the wrong way. This is possible
9040 because pointer arithmetic is restricted to retain within an
9041 object and overflow on pointer differences is undefined as of
9042 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9043 else if (bitpos0 == bitpos1
9044 && ((code == EQ_EXPR || code == NE_EXPR)
9045 || (indirect_base0 && DECL_P (base0))
9046 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9048 /* By converting to signed size type we cover middle-end pointer
9049 arithmetic which operates on unsigned pointer types of size
9050 type size and ARRAY_REF offsets which are properly sign or
9051 zero extended from their type in case it is narrower than
9052 size type. */
9053 if (offset0 == NULL_TREE)
9054 offset0 = build_int_cst (ssizetype, 0);
9055 else
9056 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9057 if (offset1 == NULL_TREE)
9058 offset1 = build_int_cst (ssizetype, 0);
9059 else
9060 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9062 if (code != EQ_EXPR
9063 && code != NE_EXPR
9064 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9065 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9066 fold_overflow_warning (("assuming pointer wraparound does not "
9067 "occur when comparing P +- C1 with "
9068 "P +- C2"),
9069 WARN_STRICT_OVERFLOW_COMPARISON);
9071 return fold_build2_loc (loc, code, type, offset0, offset1);
9074 /* For non-equal bases we can simplify if they are addresses
9075 of local binding decls or constants. */
9076 else if (indirect_base0 && indirect_base1
9077 /* We know that !operand_equal_p (base0, base1, 0)
9078 because the if condition was false. But make
9079 sure two decls are not the same. */
9080 && base0 != base1
9081 && TREE_CODE (arg0) == ADDR_EXPR
9082 && TREE_CODE (arg1) == ADDR_EXPR
9083 && (((TREE_CODE (base0) == VAR_DECL
9084 || TREE_CODE (base0) == PARM_DECL)
9085 && (targetm.binds_local_p (base0)
9086 || CONSTANT_CLASS_P (base1)))
9087 || CONSTANT_CLASS_P (base0))
9088 && (((TREE_CODE (base1) == VAR_DECL
9089 || TREE_CODE (base1) == PARM_DECL)
9090 && (targetm.binds_local_p (base1)
9091 || CONSTANT_CLASS_P (base0)))
9092 || CONSTANT_CLASS_P (base1)))
9094 if (code == EQ_EXPR)
9095 return omit_two_operands_loc (loc, type, boolean_false_node,
9096 arg0, arg1);
9097 else if (code == NE_EXPR)
9098 return omit_two_operands_loc (loc, type, boolean_true_node,
9099 arg0, arg1);
9101 /* For equal offsets we can simplify to a comparison of the
9102 base addresses. */
9103 else if (bitpos0 == bitpos1
9104 && (indirect_base0
9105 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9106 && (indirect_base1
9107 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9108 && ((offset0 == offset1)
9109 || (offset0 && offset1
9110 && operand_equal_p (offset0, offset1, 0))))
9112 if (indirect_base0)
9113 base0 = build_fold_addr_expr_loc (loc, base0);
9114 if (indirect_base1)
9115 base1 = build_fold_addr_expr_loc (loc, base1);
9116 return fold_build2_loc (loc, code, type, base0, base1);
9120 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9121 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9122 the resulting offset is smaller in absolute value than the
9123 original one. */
9124 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9125 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9126 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9127 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9128 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9129 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9130 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9132 tree const1 = TREE_OPERAND (arg0, 1);
9133 tree const2 = TREE_OPERAND (arg1, 1);
9134 tree variable1 = TREE_OPERAND (arg0, 0);
9135 tree variable2 = TREE_OPERAND (arg1, 0);
9136 tree cst;
9137 const char * const warnmsg = G_("assuming signed overflow does not "
9138 "occur when combining constants around "
9139 "a comparison");
9141 /* Put the constant on the side where it doesn't overflow and is
9142 of lower absolute value than before. */
9143 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9144 ? MINUS_EXPR : PLUS_EXPR,
9145 const2, const1);
9146 if (!TREE_OVERFLOW (cst)
9147 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9149 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9150 return fold_build2_loc (loc, code, type,
9151 variable1,
9152 fold_build2_loc (loc,
9153 TREE_CODE (arg1), TREE_TYPE (arg1),
9154 variable2, cst));
9157 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9158 ? MINUS_EXPR : PLUS_EXPR,
9159 const1, const2);
9160 if (!TREE_OVERFLOW (cst)
9161 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9163 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9164 return fold_build2_loc (loc, code, type,
9165 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9166 variable1, cst),
9167 variable2);
9171 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9172 signed arithmetic case. That form is created by the compiler
9173 often enough for folding it to be of value. One example is in
9174 computing loop trip counts after Operator Strength Reduction. */
9175 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9176 && TREE_CODE (arg0) == MULT_EXPR
9177 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9178 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9179 && integer_zerop (arg1))
9181 tree const1 = TREE_OPERAND (arg0, 1);
9182 tree const2 = arg1; /* zero */
9183 tree variable1 = TREE_OPERAND (arg0, 0);
9184 enum tree_code cmp_code = code;
9186 /* Handle unfolded multiplication by zero. */
9187 if (integer_zerop (const1))
9188 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9190 fold_overflow_warning (("assuming signed overflow does not occur when "
9191 "eliminating multiplication in comparison "
9192 "with zero"),
9193 WARN_STRICT_OVERFLOW_COMPARISON);
9195 /* If const1 is negative we swap the sense of the comparison. */
9196 if (tree_int_cst_sgn (const1) < 0)
9197 cmp_code = swap_tree_comparison (cmp_code);
9199 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9202 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9203 if (tem)
9204 return tem;
9206 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9208 tree targ0 = strip_float_extensions (arg0);
9209 tree targ1 = strip_float_extensions (arg1);
9210 tree newtype = TREE_TYPE (targ0);
9212 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9213 newtype = TREE_TYPE (targ1);
9215 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9216 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9217 return fold_build2_loc (loc, code, type,
9218 fold_convert_loc (loc, newtype, targ0),
9219 fold_convert_loc (loc, newtype, targ1));
9221 /* (-a) CMP (-b) -> b CMP a */
9222 if (TREE_CODE (arg0) == NEGATE_EXPR
9223 && TREE_CODE (arg1) == NEGATE_EXPR)
9224 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9225 TREE_OPERAND (arg0, 0));
9227 if (TREE_CODE (arg1) == REAL_CST)
9229 REAL_VALUE_TYPE cst;
9230 cst = TREE_REAL_CST (arg1);
9232 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9233 if (TREE_CODE (arg0) == NEGATE_EXPR)
9234 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9235 TREE_OPERAND (arg0, 0),
9236 build_real (TREE_TYPE (arg1),
9237 real_value_negate (&cst)));
9239 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9240 /* a CMP (-0) -> a CMP 0 */
9241 if (REAL_VALUE_MINUS_ZERO (cst))
9242 return fold_build2_loc (loc, code, type, arg0,
9243 build_real (TREE_TYPE (arg1), dconst0));
9245 /* x != NaN is always true, other ops are always false. */
9246 if (REAL_VALUE_ISNAN (cst)
9247 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9249 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9250 return omit_one_operand_loc (loc, type, tem, arg0);
9253 /* Fold comparisons against infinity. */
9254 if (REAL_VALUE_ISINF (cst)
9255 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9257 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9258 if (tem != NULL_TREE)
9259 return tem;
9263 /* If this is a comparison of a real constant with a PLUS_EXPR
9264 or a MINUS_EXPR of a real constant, we can convert it into a
9265 comparison with a revised real constant as long as no overflow
9266 occurs when unsafe_math_optimizations are enabled. */
9267 if (flag_unsafe_math_optimizations
9268 && TREE_CODE (arg1) == REAL_CST
9269 && (TREE_CODE (arg0) == PLUS_EXPR
9270 || TREE_CODE (arg0) == MINUS_EXPR)
9271 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9272 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9273 ? MINUS_EXPR : PLUS_EXPR,
9274 arg1, TREE_OPERAND (arg0, 1)))
9275 && !TREE_OVERFLOW (tem))
9276 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9278 /* Likewise, we can simplify a comparison of a real constant with
9279 a MINUS_EXPR whose first operand is also a real constant, i.e.
9280 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9281 floating-point types only if -fassociative-math is set. */
9282 if (flag_associative_math
9283 && TREE_CODE (arg1) == REAL_CST
9284 && TREE_CODE (arg0) == MINUS_EXPR
9285 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9286 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9287 arg1))
9288 && !TREE_OVERFLOW (tem))
9289 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9290 TREE_OPERAND (arg0, 1), tem);
9292 /* Fold comparisons against built-in math functions. */
9293 if (TREE_CODE (arg1) == REAL_CST
9294 && flag_unsafe_math_optimizations
9295 && ! flag_errno_math)
9297 enum built_in_function fcode = builtin_mathfn_code (arg0);
9299 if (fcode != END_BUILTINS)
9301 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9302 if (tem != NULL_TREE)
9303 return tem;
9308 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9309 && CONVERT_EXPR_P (arg0))
9311 /* If we are widening one operand of an integer comparison,
9312 see if the other operand is similarly being widened. Perhaps we
9313 can do the comparison in the narrower type. */
9314 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9315 if (tem)
9316 return tem;
9318 /* Or if we are changing signedness. */
9319 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9320 if (tem)
9321 return tem;
9324 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9325 constant, we can simplify it. */
9326 if (TREE_CODE (arg1) == INTEGER_CST
9327 && (TREE_CODE (arg0) == MIN_EXPR
9328 || TREE_CODE (arg0) == MAX_EXPR)
9329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9331 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9332 if (tem)
9333 return tem;
9336 /* Simplify comparison of something with itself. (For IEEE
9337 floating-point, we can only do some of these simplifications.) */
9338 if (operand_equal_p (arg0, arg1, 0))
9340 switch (code)
9342 case EQ_EXPR:
9343 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9344 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9345 return constant_boolean_node (1, type);
9346 break;
9348 case GE_EXPR:
9349 case LE_EXPR:
9350 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9351 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9352 return constant_boolean_node (1, type);
9353 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9355 case NE_EXPR:
9356 /* For NE, we can only do this simplification if integer
9357 or we don't honor IEEE floating point NaNs. */
9358 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9359 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9360 break;
9361 /* ... fall through ... */
9362 case GT_EXPR:
9363 case LT_EXPR:
9364 return constant_boolean_node (0, type);
9365 default:
9366 gcc_unreachable ();
9370 /* If we are comparing an expression that just has comparisons
9371 of two integer values, arithmetic expressions of those comparisons,
9372 and constants, we can simplify it. There are only three cases
9373 to check: the two values can either be equal, the first can be
9374 greater, or the second can be greater. Fold the expression for
9375 those three values. Since each value must be 0 or 1, we have
9376 eight possibilities, each of which corresponds to the constant 0
9377 or 1 or one of the six possible comparisons.
9379 This handles common cases like (a > b) == 0 but also handles
9380 expressions like ((x > y) - (y > x)) > 0, which supposedly
9381 occur in macroized code. */
9383 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9385 tree cval1 = 0, cval2 = 0;
9386 int save_p = 0;
9388 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9389 /* Don't handle degenerate cases here; they should already
9390 have been handled anyway. */
9391 && cval1 != 0 && cval2 != 0
9392 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9393 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9394 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9395 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9396 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9397 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9398 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9400 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9401 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9403 /* We can't just pass T to eval_subst in case cval1 or cval2
9404 was the same as ARG1. */
9406 tree high_result
9407 = fold_build2_loc (loc, code, type,
9408 eval_subst (loc, arg0, cval1, maxval,
9409 cval2, minval),
9410 arg1);
9411 tree equal_result
9412 = fold_build2_loc (loc, code, type,
9413 eval_subst (loc, arg0, cval1, maxval,
9414 cval2, maxval),
9415 arg1);
9416 tree low_result
9417 = fold_build2_loc (loc, code, type,
9418 eval_subst (loc, arg0, cval1, minval,
9419 cval2, maxval),
9420 arg1);
9422 /* All three of these results should be 0 or 1. Confirm they are.
9423 Then use those values to select the proper code to use. */
9425 if (TREE_CODE (high_result) == INTEGER_CST
9426 && TREE_CODE (equal_result) == INTEGER_CST
9427 && TREE_CODE (low_result) == INTEGER_CST)
9429 /* Make a 3-bit mask with the high-order bit being the
9430 value for `>', the next for '=', and the low for '<'. */
9431 switch ((integer_onep (high_result) * 4)
9432 + (integer_onep (equal_result) * 2)
9433 + integer_onep (low_result))
9435 case 0:
9436 /* Always false. */
9437 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9438 case 1:
9439 code = LT_EXPR;
9440 break;
9441 case 2:
9442 code = EQ_EXPR;
9443 break;
9444 case 3:
9445 code = LE_EXPR;
9446 break;
9447 case 4:
9448 code = GT_EXPR;
9449 break;
9450 case 5:
9451 code = NE_EXPR;
9452 break;
9453 case 6:
9454 code = GE_EXPR;
9455 break;
9456 case 7:
9457 /* Always true. */
9458 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9461 if (save_p)
9463 tem = save_expr (build2 (code, type, cval1, cval2));
9464 SET_EXPR_LOCATION (tem, loc);
9465 return tem;
9467 return fold_build2_loc (loc, code, type, cval1, cval2);
9472 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9473 into a single range test. */
9474 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9475 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9476 && TREE_CODE (arg1) == INTEGER_CST
9477 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9478 && !integer_zerop (TREE_OPERAND (arg0, 1))
9479 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9480 && !TREE_OVERFLOW (arg1))
9482 tem = fold_div_compare (loc, code, type, arg0, arg1);
9483 if (tem != NULL_TREE)
9484 return tem;
9487 /* Fold ~X op ~Y as Y op X. */
9488 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9489 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9491 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9492 return fold_build2_loc (loc, code, type,
9493 fold_convert_loc (loc, cmp_type,
9494 TREE_OPERAND (arg1, 0)),
9495 TREE_OPERAND (arg0, 0));
9498 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9499 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9500 && TREE_CODE (arg1) == INTEGER_CST)
9502 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9503 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9504 TREE_OPERAND (arg0, 0),
9505 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9506 fold_convert_loc (loc, cmp_type, arg1)));
9509 return NULL_TREE;
9513 /* Subroutine of fold_binary. Optimize complex multiplications of the
9514 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9515 argument EXPR represents the expression "z" of type TYPE. */
9517 static tree
9518 fold_mult_zconjz (location_t loc, tree type, tree expr)
9520 tree itype = TREE_TYPE (type);
9521 tree rpart, ipart, tem;
9523 if (TREE_CODE (expr) == COMPLEX_EXPR)
9525 rpart = TREE_OPERAND (expr, 0);
9526 ipart = TREE_OPERAND (expr, 1);
9528 else if (TREE_CODE (expr) == COMPLEX_CST)
9530 rpart = TREE_REALPART (expr);
9531 ipart = TREE_IMAGPART (expr);
9533 else
9535 expr = save_expr (expr);
9536 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9537 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9540 rpart = save_expr (rpart);
9541 ipart = save_expr (ipart);
9542 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9543 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9544 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9545 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9546 build_zero_cst (itype));
9550 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9551 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9552 guarantees that P and N have the same least significant log2(M) bits.
9553 N is not otherwise constrained. In particular, N is not normalized to
9554 0 <= N < M as is common. In general, the precise value of P is unknown.
9555 M is chosen as large as possible such that constant N can be determined.
9557 Returns M and sets *RESIDUE to N.
9559 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9560 account. This is not always possible due to PR 35705.
9563 static unsigned HOST_WIDE_INT
9564 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9565 bool allow_func_align)
9567 enum tree_code code;
9569 *residue = 0;
9571 code = TREE_CODE (expr);
9572 if (code == ADDR_EXPR)
9574 unsigned int bitalign;
9575 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9576 *residue /= BITS_PER_UNIT;
9577 return bitalign / BITS_PER_UNIT;
9579 else if (code == POINTER_PLUS_EXPR)
9581 tree op0, op1;
9582 unsigned HOST_WIDE_INT modulus;
9583 enum tree_code inner_code;
9585 op0 = TREE_OPERAND (expr, 0);
9586 STRIP_NOPS (op0);
9587 modulus = get_pointer_modulus_and_residue (op0, residue,
9588 allow_func_align);
9590 op1 = TREE_OPERAND (expr, 1);
9591 STRIP_NOPS (op1);
9592 inner_code = TREE_CODE (op1);
9593 if (inner_code == INTEGER_CST)
9595 *residue += TREE_INT_CST_LOW (op1);
9596 return modulus;
9598 else if (inner_code == MULT_EXPR)
9600 op1 = TREE_OPERAND (op1, 1);
9601 if (TREE_CODE (op1) == INTEGER_CST)
9603 unsigned HOST_WIDE_INT align;
9605 /* Compute the greatest power-of-2 divisor of op1. */
9606 align = TREE_INT_CST_LOW (op1);
9607 align &= -align;
9609 /* If align is non-zero and less than *modulus, replace
9610 *modulus with align., If align is 0, then either op1 is 0
9611 or the greatest power-of-2 divisor of op1 doesn't fit in an
9612 unsigned HOST_WIDE_INT. In either case, no additional
9613 constraint is imposed. */
9614 if (align)
9615 modulus = MIN (modulus, align);
9617 return modulus;
9622 /* If we get here, we were unable to determine anything useful about the
9623 expression. */
9624 return 1;
9627 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9628 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9630 static bool
9631 vec_cst_ctor_to_array (tree arg, tree *elts)
9633 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9635 if (TREE_CODE (arg) == VECTOR_CST)
9637 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9638 elts[i] = VECTOR_CST_ELT (arg, i);
9640 else if (TREE_CODE (arg) == CONSTRUCTOR)
9642 constructor_elt *elt;
9644 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9645 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9646 return false;
9647 else
9648 elts[i] = elt->value;
9650 else
9651 return false;
9652 for (; i < nelts; i++)
9653 elts[i]
9654 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9655 return true;
9658 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9659 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9660 NULL_TREE otherwise. */
9662 static tree
9663 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9666 tree *elts;
9667 bool need_ctor = false;
9669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9670 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9671 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9672 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9673 return NULL_TREE;
9675 elts = XALLOCAVEC (tree, nelts * 3);
9676 if (!vec_cst_ctor_to_array (arg0, elts)
9677 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9678 return NULL_TREE;
9680 for (i = 0; i < nelts; i++)
9682 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9683 need_ctor = true;
9684 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9687 if (need_ctor)
9689 vec<constructor_elt, va_gc> *v;
9690 vec_alloc (v, nelts);
9691 for (i = 0; i < nelts; i++)
9692 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9693 return build_constructor (type, v);
9695 else
9696 return build_vector (type, &elts[2 * nelts]);
9699 /* Try to fold a pointer difference of type TYPE two address expressions of
9700 array references AREF0 and AREF1 using location LOC. Return a
9701 simplified expression for the difference or NULL_TREE. */
9703 static tree
9704 fold_addr_of_array_ref_difference (location_t loc, tree type,
9705 tree aref0, tree aref1)
9707 tree base0 = TREE_OPERAND (aref0, 0);
9708 tree base1 = TREE_OPERAND (aref1, 0);
9709 tree base_offset = build_int_cst (type, 0);
9711 /* If the bases are array references as well, recurse. If the bases
9712 are pointer indirections compute the difference of the pointers.
9713 If the bases are equal, we are set. */
9714 if ((TREE_CODE (base0) == ARRAY_REF
9715 && TREE_CODE (base1) == ARRAY_REF
9716 && (base_offset
9717 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9718 || (INDIRECT_REF_P (base0)
9719 && INDIRECT_REF_P (base1)
9720 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9721 TREE_OPERAND (base0, 0),
9722 TREE_OPERAND (base1, 0))))
9723 || operand_equal_p (base0, base1, 0))
9725 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9726 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9727 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9728 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9729 return fold_build2_loc (loc, PLUS_EXPR, type,
9730 base_offset,
9731 fold_build2_loc (loc, MULT_EXPR, type,
9732 diff, esz));
9734 return NULL_TREE;
9737 /* If the real or vector real constant CST of type TYPE has an exact
9738 inverse, return it, else return NULL. */
9740 static tree
9741 exact_inverse (tree type, tree cst)
9743 REAL_VALUE_TYPE r;
9744 tree unit_type, *elts;
9745 enum machine_mode mode;
9746 unsigned vec_nelts, i;
9748 switch (TREE_CODE (cst))
9750 case REAL_CST:
9751 r = TREE_REAL_CST (cst);
9753 if (exact_real_inverse (TYPE_MODE (type), &r))
9754 return build_real (type, r);
9756 return NULL_TREE;
9758 case VECTOR_CST:
9759 vec_nelts = VECTOR_CST_NELTS (cst);
9760 elts = XALLOCAVEC (tree, vec_nelts);
9761 unit_type = TREE_TYPE (type);
9762 mode = TYPE_MODE (unit_type);
9764 for (i = 0; i < vec_nelts; i++)
9766 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9767 if (!exact_real_inverse (mode, &r))
9768 return NULL_TREE;
9769 elts[i] = build_real (unit_type, r);
9772 return build_vector (type, elts);
9774 default:
9775 return NULL_TREE;
9779 /* Fold a binary expression of code CODE and type TYPE with operands
9780 OP0 and OP1. LOC is the location of the resulting expression.
9781 Return the folded expression if folding is successful. Otherwise,
9782 return NULL_TREE. */
9784 tree
9785 fold_binary_loc (location_t loc,
9786 enum tree_code code, tree type, tree op0, tree op1)
9788 enum tree_code_class kind = TREE_CODE_CLASS (code);
9789 tree arg0, arg1, tem;
9790 tree t1 = NULL_TREE;
9791 bool strict_overflow_p;
9793 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9794 && TREE_CODE_LENGTH (code) == 2
9795 && op0 != NULL_TREE
9796 && op1 != NULL_TREE);
9798 arg0 = op0;
9799 arg1 = op1;
9801 /* Strip any conversions that don't change the mode. This is
9802 safe for every expression, except for a comparison expression
9803 because its signedness is derived from its operands. So, in
9804 the latter case, only strip conversions that don't change the
9805 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9806 preserved.
9808 Note that this is done as an internal manipulation within the
9809 constant folder, in order to find the simplest representation
9810 of the arguments so that their form can be studied. In any
9811 cases, the appropriate type conversions should be put back in
9812 the tree that will get out of the constant folder. */
9814 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9816 STRIP_SIGN_NOPS (arg0);
9817 STRIP_SIGN_NOPS (arg1);
9819 else
9821 STRIP_NOPS (arg0);
9822 STRIP_NOPS (arg1);
9825 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9826 constant but we can't do arithmetic on them. */
9827 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9828 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9829 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9830 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9831 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9832 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9834 if (kind == tcc_binary)
9836 /* Make sure type and arg0 have the same saturating flag. */
9837 gcc_assert (TYPE_SATURATING (type)
9838 == TYPE_SATURATING (TREE_TYPE (arg0)));
9839 tem = const_binop (code, arg0, arg1);
9841 else if (kind == tcc_comparison)
9842 tem = fold_relational_const (code, type, arg0, arg1);
9843 else
9844 tem = NULL_TREE;
9846 if (tem != NULL_TREE)
9848 if (TREE_TYPE (tem) != type)
9849 tem = fold_convert_loc (loc, type, tem);
9850 return tem;
9854 /* If this is a commutative operation, and ARG0 is a constant, move it
9855 to ARG1 to reduce the number of tests below. */
9856 if (commutative_tree_code (code)
9857 && tree_swap_operands_p (arg0, arg1, true))
9858 return fold_build2_loc (loc, code, type, op1, op0);
9860 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9862 First check for cases where an arithmetic operation is applied to a
9863 compound, conditional, or comparison operation. Push the arithmetic
9864 operation inside the compound or conditional to see if any folding
9865 can then be done. Convert comparison to conditional for this purpose.
9866 The also optimizes non-constant cases that used to be done in
9867 expand_expr.
9869 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9870 one of the operands is a comparison and the other is a comparison, a
9871 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9872 code below would make the expression more complex. Change it to a
9873 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9874 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9876 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9877 || code == EQ_EXPR || code == NE_EXPR)
9878 && TREE_CODE (type) != VECTOR_TYPE
9879 && ((truth_value_p (TREE_CODE (arg0))
9880 && (truth_value_p (TREE_CODE (arg1))
9881 || (TREE_CODE (arg1) == BIT_AND_EXPR
9882 && integer_onep (TREE_OPERAND (arg1, 1)))))
9883 || (truth_value_p (TREE_CODE (arg1))
9884 && (truth_value_p (TREE_CODE (arg0))
9885 || (TREE_CODE (arg0) == BIT_AND_EXPR
9886 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9888 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9889 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9890 : TRUTH_XOR_EXPR,
9891 boolean_type_node,
9892 fold_convert_loc (loc, boolean_type_node, arg0),
9893 fold_convert_loc (loc, boolean_type_node, arg1));
9895 if (code == EQ_EXPR)
9896 tem = invert_truthvalue_loc (loc, tem);
9898 return fold_convert_loc (loc, type, tem);
9901 if (TREE_CODE_CLASS (code) == tcc_binary
9902 || TREE_CODE_CLASS (code) == tcc_comparison)
9904 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9906 tem = fold_build2_loc (loc, code, type,
9907 fold_convert_loc (loc, TREE_TYPE (op0),
9908 TREE_OPERAND (arg0, 1)), op1);
9909 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9910 tem);
9912 if (TREE_CODE (arg1) == COMPOUND_EXPR
9913 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9915 tem = fold_build2_loc (loc, code, type, op0,
9916 fold_convert_loc (loc, TREE_TYPE (op1),
9917 TREE_OPERAND (arg1, 1)));
9918 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9919 tem);
9922 if (TREE_CODE (arg0) == COND_EXPR
9923 || TREE_CODE (arg0) == VEC_COND_EXPR
9924 || COMPARISON_CLASS_P (arg0))
9926 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9927 arg0, arg1,
9928 /*cond_first_p=*/1);
9929 if (tem != NULL_TREE)
9930 return tem;
9933 if (TREE_CODE (arg1) == COND_EXPR
9934 || TREE_CODE (arg1) == VEC_COND_EXPR
9935 || COMPARISON_CLASS_P (arg1))
9937 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9938 arg1, arg0,
9939 /*cond_first_p=*/0);
9940 if (tem != NULL_TREE)
9941 return tem;
9945 switch (code)
9947 case MEM_REF:
9948 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9949 if (TREE_CODE (arg0) == ADDR_EXPR
9950 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9952 tree iref = TREE_OPERAND (arg0, 0);
9953 return fold_build2 (MEM_REF, type,
9954 TREE_OPERAND (iref, 0),
9955 int_const_binop (PLUS_EXPR, arg1,
9956 TREE_OPERAND (iref, 1)));
9959 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9960 if (TREE_CODE (arg0) == ADDR_EXPR
9961 && handled_component_p (TREE_OPERAND (arg0, 0)))
9963 tree base;
9964 HOST_WIDE_INT coffset;
9965 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9966 &coffset);
9967 if (!base)
9968 return NULL_TREE;
9969 return fold_build2 (MEM_REF, type,
9970 build_fold_addr_expr (base),
9971 int_const_binop (PLUS_EXPR, arg1,
9972 size_int (coffset)));
9975 return NULL_TREE;
9977 case POINTER_PLUS_EXPR:
9978 /* 0 +p index -> (type)index */
9979 if (integer_zerop (arg0))
9980 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9982 /* PTR +p 0 -> PTR */
9983 if (integer_zerop (arg1))
9984 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9986 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9987 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9988 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9989 return fold_convert_loc (loc, type,
9990 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9991 fold_convert_loc (loc, sizetype,
9992 arg1),
9993 fold_convert_loc (loc, sizetype,
9994 arg0)));
9996 /* (PTR +p B) +p A -> PTR +p (B + A) */
9997 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9998 && !upc_shared_type_p (TREE_TYPE (type)))
10000 tree inner;
10001 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10002 tree arg00 = TREE_OPERAND (arg0, 0);
10003 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10004 arg01, fold_convert_loc (loc, sizetype, arg1));
10005 return fold_convert_loc (loc, type,
10006 fold_build_pointer_plus_loc (loc,
10007 arg00, inner));
10010 /* PTR_CST +p CST -> CST1 */
10011 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10012 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10013 fold_convert_loc (loc, type, arg1));
10015 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10016 of the array. Loop optimizer sometimes produce this type of
10017 expressions. */
10018 if (TREE_CODE (arg0) == ADDR_EXPR)
10020 tem = try_move_mult_to_index (loc, arg0,
10021 fold_convert_loc (loc,
10022 ssizetype, arg1));
10023 if (tem)
10024 return fold_convert_loc (loc, type, tem);
10027 return NULL_TREE;
10029 case PLUS_EXPR:
10030 /* A + (-B) -> A - B */
10031 if (TREE_CODE (arg1) == NEGATE_EXPR)
10032 return fold_build2_loc (loc, MINUS_EXPR, type,
10033 fold_convert_loc (loc, type, arg0),
10034 fold_convert_loc (loc, type,
10035 TREE_OPERAND (arg1, 0)));
10036 /* (-A) + B -> B - A */
10037 if (TREE_CODE (arg0) == NEGATE_EXPR
10038 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10039 return fold_build2_loc (loc, MINUS_EXPR, type,
10040 fold_convert_loc (loc, type, arg1),
10041 fold_convert_loc (loc, type,
10042 TREE_OPERAND (arg0, 0)));
10044 /* Disable further optimizations involving UPC shared pointers,
10045 because integers are not interoperable with shared pointers. */
10046 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10047 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10048 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10049 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10050 return NULL_TREE;
10052 if (INTEGRAL_TYPE_P (type))
10054 /* Convert ~A + 1 to -A. */
10055 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10056 && integer_onep (arg1))
10057 return fold_build1_loc (loc, NEGATE_EXPR, type,
10058 fold_convert_loc (loc, type,
10059 TREE_OPERAND (arg0, 0)));
10061 /* ~X + X is -1. */
10062 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10063 && !TYPE_OVERFLOW_TRAPS (type))
10065 tree tem = TREE_OPERAND (arg0, 0);
10067 STRIP_NOPS (tem);
10068 if (operand_equal_p (tem, arg1, 0))
10070 t1 = build_int_cst_type (type, -1);
10071 return omit_one_operand_loc (loc, type, t1, arg1);
10075 /* X + ~X is -1. */
10076 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10077 && !TYPE_OVERFLOW_TRAPS (type))
10079 tree tem = TREE_OPERAND (arg1, 0);
10081 STRIP_NOPS (tem);
10082 if (operand_equal_p (arg0, tem, 0))
10084 t1 = build_int_cst_type (type, -1);
10085 return omit_one_operand_loc (loc, type, t1, arg0);
10089 /* X + (X / CST) * -CST is X % CST. */
10090 if (TREE_CODE (arg1) == MULT_EXPR
10091 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10092 && operand_equal_p (arg0,
10093 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10095 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10096 tree cst1 = TREE_OPERAND (arg1, 1);
10097 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10098 cst1, cst0);
10099 if (sum && integer_zerop (sum))
10100 return fold_convert_loc (loc, type,
10101 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10102 TREE_TYPE (arg0), arg0,
10103 cst0));
10107 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10108 one. Make sure the type is not saturating and has the signedness of
10109 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10110 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10111 if ((TREE_CODE (arg0) == MULT_EXPR
10112 || TREE_CODE (arg1) == MULT_EXPR)
10113 && !TYPE_SATURATING (type)
10114 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10115 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10116 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10118 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10119 if (tem)
10120 return tem;
10123 if (! FLOAT_TYPE_P (type))
10125 if (integer_zerop (arg1))
10126 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10128 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10129 with a constant, and the two constants have no bits in common,
10130 we should treat this as a BIT_IOR_EXPR since this may produce more
10131 simplifications. */
10132 if (TREE_CODE (arg0) == BIT_AND_EXPR
10133 && TREE_CODE (arg1) == BIT_AND_EXPR
10134 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10135 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10136 && integer_zerop (const_binop (BIT_AND_EXPR,
10137 TREE_OPERAND (arg0, 1),
10138 TREE_OPERAND (arg1, 1))))
10140 code = BIT_IOR_EXPR;
10141 goto bit_ior;
10144 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10145 (plus (plus (mult) (mult)) (foo)) so that we can
10146 take advantage of the factoring cases below. */
10147 if (TYPE_OVERFLOW_WRAPS (type)
10148 && (((TREE_CODE (arg0) == PLUS_EXPR
10149 || TREE_CODE (arg0) == MINUS_EXPR)
10150 && TREE_CODE (arg1) == MULT_EXPR)
10151 || ((TREE_CODE (arg1) == PLUS_EXPR
10152 || TREE_CODE (arg1) == MINUS_EXPR)
10153 && TREE_CODE (arg0) == MULT_EXPR)))
10155 tree parg0, parg1, parg, marg;
10156 enum tree_code pcode;
10158 if (TREE_CODE (arg1) == MULT_EXPR)
10159 parg = arg0, marg = arg1;
10160 else
10161 parg = arg1, marg = arg0;
10162 pcode = TREE_CODE (parg);
10163 parg0 = TREE_OPERAND (parg, 0);
10164 parg1 = TREE_OPERAND (parg, 1);
10165 STRIP_NOPS (parg0);
10166 STRIP_NOPS (parg1);
10168 if (TREE_CODE (parg0) == MULT_EXPR
10169 && TREE_CODE (parg1) != MULT_EXPR)
10170 return fold_build2_loc (loc, pcode, type,
10171 fold_build2_loc (loc, PLUS_EXPR, type,
10172 fold_convert_loc (loc, type,
10173 parg0),
10174 fold_convert_loc (loc, type,
10175 marg)),
10176 fold_convert_loc (loc, type, parg1));
10177 if (TREE_CODE (parg0) != MULT_EXPR
10178 && TREE_CODE (parg1) == MULT_EXPR)
10179 return
10180 fold_build2_loc (loc, PLUS_EXPR, type,
10181 fold_convert_loc (loc, type, parg0),
10182 fold_build2_loc (loc, pcode, type,
10183 fold_convert_loc (loc, type, marg),
10184 fold_convert_loc (loc, type,
10185 parg1)));
10188 else
10190 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10191 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10194 /* Likewise if the operands are reversed. */
10195 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10196 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10198 /* Convert X + -C into X - C. */
10199 if (TREE_CODE (arg1) == REAL_CST
10200 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10202 tem = fold_negate_const (arg1, type);
10203 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10204 return fold_build2_loc (loc, MINUS_EXPR, type,
10205 fold_convert_loc (loc, type, arg0),
10206 fold_convert_loc (loc, type, tem));
10209 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10210 to __complex__ ( x, y ). This is not the same for SNaNs or
10211 if signed zeros are involved. */
10212 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10213 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10214 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10216 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10217 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10218 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10219 bool arg0rz = false, arg0iz = false;
10220 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10221 || (arg0i && (arg0iz = real_zerop (arg0i))))
10223 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10224 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10225 if (arg0rz && arg1i && real_zerop (arg1i))
10227 tree rp = arg1r ? arg1r
10228 : build1 (REALPART_EXPR, rtype, arg1);
10229 tree ip = arg0i ? arg0i
10230 : build1 (IMAGPART_EXPR, rtype, arg0);
10231 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10233 else if (arg0iz && arg1r && real_zerop (arg1r))
10235 tree rp = arg0r ? arg0r
10236 : build1 (REALPART_EXPR, rtype, arg0);
10237 tree ip = arg1i ? arg1i
10238 : build1 (IMAGPART_EXPR, rtype, arg1);
10239 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10244 if (flag_unsafe_math_optimizations
10245 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10246 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10247 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10248 return tem;
10250 /* Convert x+x into x*2.0. */
10251 if (operand_equal_p (arg0, arg1, 0)
10252 && SCALAR_FLOAT_TYPE_P (type))
10253 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10254 build_real (type, dconst2));
10256 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10257 We associate floats only if the user has specified
10258 -fassociative-math. */
10259 if (flag_associative_math
10260 && TREE_CODE (arg1) == PLUS_EXPR
10261 && TREE_CODE (arg0) != MULT_EXPR)
10263 tree tree10 = TREE_OPERAND (arg1, 0);
10264 tree tree11 = TREE_OPERAND (arg1, 1);
10265 if (TREE_CODE (tree11) == MULT_EXPR
10266 && TREE_CODE (tree10) == MULT_EXPR)
10268 tree tree0;
10269 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10270 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10273 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10274 We associate floats only if the user has specified
10275 -fassociative-math. */
10276 if (flag_associative_math
10277 && TREE_CODE (arg0) == PLUS_EXPR
10278 && TREE_CODE (arg1) != MULT_EXPR)
10280 tree tree00 = TREE_OPERAND (arg0, 0);
10281 tree tree01 = TREE_OPERAND (arg0, 1);
10282 if (TREE_CODE (tree01) == MULT_EXPR
10283 && TREE_CODE (tree00) == MULT_EXPR)
10285 tree tree0;
10286 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10287 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10292 bit_rotate:
10293 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10294 is a rotate of A by C1 bits. */
10295 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10296 is a rotate of A by B bits. */
10298 enum tree_code code0, code1;
10299 tree rtype;
10300 code0 = TREE_CODE (arg0);
10301 code1 = TREE_CODE (arg1);
10302 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10303 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10304 && operand_equal_p (TREE_OPERAND (arg0, 0),
10305 TREE_OPERAND (arg1, 0), 0)
10306 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10307 TYPE_UNSIGNED (rtype))
10308 /* Only create rotates in complete modes. Other cases are not
10309 expanded properly. */
10310 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10312 tree tree01, tree11;
10313 enum tree_code code01, code11;
10315 tree01 = TREE_OPERAND (arg0, 1);
10316 tree11 = TREE_OPERAND (arg1, 1);
10317 STRIP_NOPS (tree01);
10318 STRIP_NOPS (tree11);
10319 code01 = TREE_CODE (tree01);
10320 code11 = TREE_CODE (tree11);
10321 if (code01 == INTEGER_CST
10322 && code11 == INTEGER_CST
10323 && TREE_INT_CST_HIGH (tree01) == 0
10324 && TREE_INT_CST_HIGH (tree11) == 0
10325 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10326 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10328 tem = build2_loc (loc, LROTATE_EXPR,
10329 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10330 TREE_OPERAND (arg0, 0),
10331 code0 == LSHIFT_EXPR ? tree01 : tree11);
10332 return fold_convert_loc (loc, type, tem);
10334 else if (code11 == MINUS_EXPR)
10336 tree tree110, tree111;
10337 tree110 = TREE_OPERAND (tree11, 0);
10338 tree111 = TREE_OPERAND (tree11, 1);
10339 STRIP_NOPS (tree110);
10340 STRIP_NOPS (tree111);
10341 if (TREE_CODE (tree110) == INTEGER_CST
10342 && 0 == compare_tree_int (tree110,
10343 TYPE_PRECISION
10344 (TREE_TYPE (TREE_OPERAND
10345 (arg0, 0))))
10346 && operand_equal_p (tree01, tree111, 0))
10347 return
10348 fold_convert_loc (loc, type,
10349 build2 ((code0 == LSHIFT_EXPR
10350 ? LROTATE_EXPR
10351 : RROTATE_EXPR),
10352 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10353 TREE_OPERAND (arg0, 0), tree01));
10355 else if (code01 == MINUS_EXPR)
10357 tree tree010, tree011;
10358 tree010 = TREE_OPERAND (tree01, 0);
10359 tree011 = TREE_OPERAND (tree01, 1);
10360 STRIP_NOPS (tree010);
10361 STRIP_NOPS (tree011);
10362 if (TREE_CODE (tree010) == INTEGER_CST
10363 && 0 == compare_tree_int (tree010,
10364 TYPE_PRECISION
10365 (TREE_TYPE (TREE_OPERAND
10366 (arg0, 0))))
10367 && operand_equal_p (tree11, tree011, 0))
10368 return fold_convert_loc
10369 (loc, type,
10370 build2 ((code0 != LSHIFT_EXPR
10371 ? LROTATE_EXPR
10372 : RROTATE_EXPR),
10373 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10374 TREE_OPERAND (arg0, 0), tree11));
10379 associate:
10380 /* In most languages, can't associate operations on floats through
10381 parentheses. Rather than remember where the parentheses were, we
10382 don't associate floats at all, unless the user has specified
10383 -fassociative-math.
10384 And, we need to make sure type is not saturating. */
10386 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10387 && !TYPE_SATURATING (type))
10389 tree var0, con0, lit0, minus_lit0;
10390 tree var1, con1, lit1, minus_lit1;
10391 bool ok = true;
10393 /* Split both trees into variables, constants, and literals. Then
10394 associate each group together, the constants with literals,
10395 then the result with variables. This increases the chances of
10396 literals being recombined later and of generating relocatable
10397 expressions for the sum of a constant and literal. */
10398 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10399 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10400 code == MINUS_EXPR);
10402 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10403 if (code == MINUS_EXPR)
10404 code = PLUS_EXPR;
10406 /* With undefined overflow we can only associate constants with one
10407 variable, and constants whose association doesn't overflow. */
10408 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10409 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10411 if (var0 && var1)
10413 tree tmp0 = var0;
10414 tree tmp1 = var1;
10416 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10417 tmp0 = TREE_OPERAND (tmp0, 0);
10418 if (CONVERT_EXPR_P (tmp0)
10419 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10420 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10421 <= TYPE_PRECISION (type)))
10422 tmp0 = TREE_OPERAND (tmp0, 0);
10423 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10424 tmp1 = TREE_OPERAND (tmp1, 0);
10425 if (CONVERT_EXPR_P (tmp1)
10426 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10427 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10428 <= TYPE_PRECISION (type)))
10429 tmp1 = TREE_OPERAND (tmp1, 0);
10430 /* The only case we can still associate with two variables
10431 is if they are the same, modulo negation and bit-pattern
10432 preserving conversions. */
10433 if (!operand_equal_p (tmp0, tmp1, 0))
10434 ok = false;
10437 if (ok && lit0 && lit1)
10439 tree tmp0 = fold_convert (type, lit0);
10440 tree tmp1 = fold_convert (type, lit1);
10442 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10443 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10444 ok = false;
10448 /* Only do something if we found more than two objects. Otherwise,
10449 nothing has changed and we risk infinite recursion. */
10450 if (ok
10451 && (2 < ((var0 != 0) + (var1 != 0)
10452 + (con0 != 0) + (con1 != 0)
10453 + (lit0 != 0) + (lit1 != 0)
10454 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10456 var0 = associate_trees (loc, var0, var1, code, type);
10457 con0 = associate_trees (loc, con0, con1, code, type);
10458 lit0 = associate_trees (loc, lit0, lit1, code, type);
10459 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10461 /* Preserve the MINUS_EXPR if the negative part of the literal is
10462 greater than the positive part. Otherwise, the multiplicative
10463 folding code (i.e extract_muldiv) may be fooled in case
10464 unsigned constants are subtracted, like in the following
10465 example: ((X*2 + 4) - 8U)/2. */
10466 if (minus_lit0 && lit0)
10468 if (TREE_CODE (lit0) == INTEGER_CST
10469 && TREE_CODE (minus_lit0) == INTEGER_CST
10470 && tree_int_cst_lt (lit0, minus_lit0))
10472 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10473 MINUS_EXPR, type);
10474 lit0 = 0;
10476 else
10478 lit0 = associate_trees (loc, lit0, minus_lit0,
10479 MINUS_EXPR, type);
10480 minus_lit0 = 0;
10483 if (minus_lit0)
10485 if (con0 == 0)
10486 return
10487 fold_convert_loc (loc, type,
10488 associate_trees (loc, var0, minus_lit0,
10489 MINUS_EXPR, type));
10490 else
10492 con0 = associate_trees (loc, con0, minus_lit0,
10493 MINUS_EXPR, type);
10494 return
10495 fold_convert_loc (loc, type,
10496 associate_trees (loc, var0, con0,
10497 PLUS_EXPR, type));
10501 con0 = associate_trees (loc, con0, lit0, code, type);
10502 return
10503 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10504 code, type));
10508 return NULL_TREE;
10510 case MINUS_EXPR:
10511 /* Pointer simplifications for subtraction, simple reassociations. */
10512 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10514 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10515 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10516 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10518 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10519 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10520 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10521 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10522 return fold_build2_loc (loc, PLUS_EXPR, type,
10523 fold_build2_loc (loc, MINUS_EXPR, type,
10524 arg00, arg10),
10525 fold_build2_loc (loc, MINUS_EXPR, type,
10526 arg01, arg11));
10528 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10529 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10531 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10532 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10533 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10534 fold_convert_loc (loc, type, arg1));
10535 if (tmp)
10536 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10539 /* A - (-B) -> A + B */
10540 if (TREE_CODE (arg1) == NEGATE_EXPR)
10541 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10542 fold_convert_loc (loc, type,
10543 TREE_OPERAND (arg1, 0)));
10545 /* Disable further optimizations involving UPC shared pointers,
10546 because integers are not interoperable with shared pointers.
10547 (The test below also detects pointer difference between
10548 shared pointers, which cannot be folded. */
10550 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10551 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10552 return NULL_TREE;
10554 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10555 if (TREE_CODE (arg0) == NEGATE_EXPR
10556 && (FLOAT_TYPE_P (type)
10557 || INTEGRAL_TYPE_P (type))
10558 && negate_expr_p (arg1)
10559 && reorder_operands_p (arg0, arg1))
10560 return fold_build2_loc (loc, MINUS_EXPR, type,
10561 fold_convert_loc (loc, type,
10562 negate_expr (arg1)),
10563 fold_convert_loc (loc, type,
10564 TREE_OPERAND (arg0, 0)));
10565 /* Convert -A - 1 to ~A. */
10566 if (INTEGRAL_TYPE_P (type)
10567 && TREE_CODE (arg0) == NEGATE_EXPR
10568 && integer_onep (arg1)
10569 && !TYPE_OVERFLOW_TRAPS (type))
10570 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10571 fold_convert_loc (loc, type,
10572 TREE_OPERAND (arg0, 0)));
10574 /* Convert -1 - A to ~A. */
10575 if (INTEGRAL_TYPE_P (type)
10576 && integer_all_onesp (arg0))
10577 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10580 /* X - (X / CST) * CST is X % CST. */
10581 if (INTEGRAL_TYPE_P (type)
10582 && TREE_CODE (arg1) == MULT_EXPR
10583 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10584 && operand_equal_p (arg0,
10585 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10586 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10587 TREE_OPERAND (arg1, 1), 0))
10588 return
10589 fold_convert_loc (loc, type,
10590 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10591 arg0, TREE_OPERAND (arg1, 1)));
10593 if (! FLOAT_TYPE_P (type))
10595 if (integer_zerop (arg0))
10596 return negate_expr (fold_convert_loc (loc, type, arg1));
10597 if (integer_zerop (arg1))
10598 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10600 /* Fold A - (A & B) into ~B & A. */
10601 if (!TREE_SIDE_EFFECTS (arg0)
10602 && TREE_CODE (arg1) == BIT_AND_EXPR)
10604 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10606 tree arg10 = fold_convert_loc (loc, type,
10607 TREE_OPERAND (arg1, 0));
10608 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10609 fold_build1_loc (loc, BIT_NOT_EXPR,
10610 type, arg10),
10611 fold_convert_loc (loc, type, arg0));
10613 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10615 tree arg11 = fold_convert_loc (loc,
10616 type, TREE_OPERAND (arg1, 1));
10617 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10618 fold_build1_loc (loc, BIT_NOT_EXPR,
10619 type, arg11),
10620 fold_convert_loc (loc, type, arg0));
10624 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10625 any power of 2 minus 1. */
10626 if (TREE_CODE (arg0) == BIT_AND_EXPR
10627 && TREE_CODE (arg1) == BIT_AND_EXPR
10628 && operand_equal_p (TREE_OPERAND (arg0, 0),
10629 TREE_OPERAND (arg1, 0), 0))
10631 tree mask0 = TREE_OPERAND (arg0, 1);
10632 tree mask1 = TREE_OPERAND (arg1, 1);
10633 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10635 if (operand_equal_p (tem, mask1, 0))
10637 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10638 TREE_OPERAND (arg0, 0), mask1);
10639 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10644 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10645 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10646 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10648 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10649 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10650 (-ARG1 + ARG0) reduces to -ARG1. */
10651 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10652 return negate_expr (fold_convert_loc (loc, type, arg1));
10654 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10655 __complex__ ( x, -y ). This is not the same for SNaNs or if
10656 signed zeros are involved. */
10657 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10658 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10659 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10661 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10662 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10663 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10664 bool arg0rz = false, arg0iz = false;
10665 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10666 || (arg0i && (arg0iz = real_zerop (arg0i))))
10668 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10669 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10670 if (arg0rz && arg1i && real_zerop (arg1i))
10672 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10673 arg1r ? arg1r
10674 : build1 (REALPART_EXPR, rtype, arg1));
10675 tree ip = arg0i ? arg0i
10676 : build1 (IMAGPART_EXPR, rtype, arg0);
10677 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10679 else if (arg0iz && arg1r && real_zerop (arg1r))
10681 tree rp = arg0r ? arg0r
10682 : build1 (REALPART_EXPR, rtype, arg0);
10683 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10684 arg1i ? arg1i
10685 : build1 (IMAGPART_EXPR, rtype, arg1));
10686 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10691 /* Fold &x - &x. This can happen from &x.foo - &x.
10692 This is unsafe for certain floats even in non-IEEE formats.
10693 In IEEE, it is unsafe because it does wrong for NaNs.
10694 Also note that operand_equal_p is always false if an operand
10695 is volatile. */
10697 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10698 && operand_equal_p (arg0, arg1, 0))
10699 return build_zero_cst (type);
10701 /* A - B -> A + (-B) if B is easily negatable. */
10702 if (negate_expr_p (arg1)
10703 && ((FLOAT_TYPE_P (type)
10704 /* Avoid this transformation if B is a positive REAL_CST. */
10705 && (TREE_CODE (arg1) != REAL_CST
10706 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10707 || INTEGRAL_TYPE_P (type)))
10708 return fold_build2_loc (loc, PLUS_EXPR, type,
10709 fold_convert_loc (loc, type, arg0),
10710 fold_convert_loc (loc, type,
10711 negate_expr (arg1)));
10713 /* Try folding difference of addresses. */
10715 HOST_WIDE_INT diff;
10717 if ((TREE_CODE (arg0) == ADDR_EXPR
10718 || TREE_CODE (arg1) == ADDR_EXPR)
10719 && ptr_difference_const (arg0, arg1, &diff))
10720 return build_int_cst_type (type, diff);
10723 /* Fold &a[i] - &a[j] to i-j. */
10724 if (TREE_CODE (arg0) == ADDR_EXPR
10725 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10726 && TREE_CODE (arg1) == ADDR_EXPR
10727 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10729 tree tem = fold_addr_of_array_ref_difference (loc, type,
10730 TREE_OPERAND (arg0, 0),
10731 TREE_OPERAND (arg1, 0));
10732 if (tem)
10733 return tem;
10736 if (FLOAT_TYPE_P (type)
10737 && flag_unsafe_math_optimizations
10738 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10739 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10740 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10741 return tem;
10743 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10744 one. Make sure the type is not saturating and has the signedness of
10745 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10746 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10747 if ((TREE_CODE (arg0) == MULT_EXPR
10748 || TREE_CODE (arg1) == MULT_EXPR)
10749 && !TYPE_SATURATING (type)
10750 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10751 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10752 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10754 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10755 if (tem)
10756 return tem;
10759 goto associate;
10761 case MULT_EXPR:
10762 /* (-A) * (-B) -> A * B */
10763 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10764 return fold_build2_loc (loc, MULT_EXPR, type,
10765 fold_convert_loc (loc, type,
10766 TREE_OPERAND (arg0, 0)),
10767 fold_convert_loc (loc, type,
10768 negate_expr (arg1)));
10769 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10770 return fold_build2_loc (loc, MULT_EXPR, type,
10771 fold_convert_loc (loc, type,
10772 negate_expr (arg0)),
10773 fold_convert_loc (loc, type,
10774 TREE_OPERAND (arg1, 0)));
10776 if (! FLOAT_TYPE_P (type))
10778 if (integer_zerop (arg1))
10779 return omit_one_operand_loc (loc, type, arg1, arg0);
10780 if (integer_onep (arg1))
10781 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10782 /* Transform x * -1 into -x. Make sure to do the negation
10783 on the original operand with conversions not stripped
10784 because we can only strip non-sign-changing conversions. */
10785 if (integer_all_onesp (arg1))
10786 return fold_convert_loc (loc, type, negate_expr (op0));
10787 /* Transform x * -C into -x * C if x is easily negatable. */
10788 if (TREE_CODE (arg1) == INTEGER_CST
10789 && tree_int_cst_sgn (arg1) == -1
10790 && negate_expr_p (arg0)
10791 && (tem = negate_expr (arg1)) != arg1
10792 && !TREE_OVERFLOW (tem))
10793 return fold_build2_loc (loc, MULT_EXPR, type,
10794 fold_convert_loc (loc, type,
10795 negate_expr (arg0)),
10796 tem);
10798 /* (a * (1 << b)) is (a << b) */
10799 if (TREE_CODE (arg1) == LSHIFT_EXPR
10800 && integer_onep (TREE_OPERAND (arg1, 0)))
10801 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10802 TREE_OPERAND (arg1, 1));
10803 if (TREE_CODE (arg0) == LSHIFT_EXPR
10804 && integer_onep (TREE_OPERAND (arg0, 0)))
10805 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10806 TREE_OPERAND (arg0, 1));
10808 /* (A + A) * C -> A * 2 * C */
10809 if (TREE_CODE (arg0) == PLUS_EXPR
10810 && TREE_CODE (arg1) == INTEGER_CST
10811 && operand_equal_p (TREE_OPERAND (arg0, 0),
10812 TREE_OPERAND (arg0, 1), 0))
10813 return fold_build2_loc (loc, MULT_EXPR, type,
10814 omit_one_operand_loc (loc, type,
10815 TREE_OPERAND (arg0, 0),
10816 TREE_OPERAND (arg0, 1)),
10817 fold_build2_loc (loc, MULT_EXPR, type,
10818 build_int_cst (type, 2) , arg1));
10820 strict_overflow_p = false;
10821 if (TREE_CODE (arg1) == INTEGER_CST
10822 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10823 &strict_overflow_p)))
10825 if (strict_overflow_p)
10826 fold_overflow_warning (("assuming signed overflow does not "
10827 "occur when simplifying "
10828 "multiplication"),
10829 WARN_STRICT_OVERFLOW_MISC);
10830 return fold_convert_loc (loc, type, tem);
10833 /* Optimize z * conj(z) for integer complex numbers. */
10834 if (TREE_CODE (arg0) == CONJ_EXPR
10835 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10836 return fold_mult_zconjz (loc, type, arg1);
10837 if (TREE_CODE (arg1) == CONJ_EXPR
10838 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10839 return fold_mult_zconjz (loc, type, arg0);
10841 else
10843 /* Maybe fold x * 0 to 0. The expressions aren't the same
10844 when x is NaN, since x * 0 is also NaN. Nor are they the
10845 same in modes with signed zeros, since multiplying a
10846 negative value by 0 gives -0, not +0. */
10847 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10848 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10849 && real_zerop (arg1))
10850 return omit_one_operand_loc (loc, type, arg1, arg0);
10851 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10852 Likewise for complex arithmetic with signed zeros. */
10853 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10854 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10855 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10856 && real_onep (arg1))
10857 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10859 /* Transform x * -1.0 into -x. */
10860 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10861 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10862 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10863 && real_minus_onep (arg1))
10864 return fold_convert_loc (loc, type, negate_expr (arg0));
10866 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10867 the result for floating point types due to rounding so it is applied
10868 only if -fassociative-math was specify. */
10869 if (flag_associative_math
10870 && TREE_CODE (arg0) == RDIV_EXPR
10871 && TREE_CODE (arg1) == REAL_CST
10872 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10874 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10875 arg1);
10876 if (tem)
10877 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10878 TREE_OPERAND (arg0, 1));
10881 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10882 if (operand_equal_p (arg0, arg1, 0))
10884 tree tem = fold_strip_sign_ops (arg0);
10885 if (tem != NULL_TREE)
10887 tem = fold_convert_loc (loc, type, tem);
10888 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10892 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10893 This is not the same for NaNs or if signed zeros are
10894 involved. */
10895 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10896 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10897 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10898 && TREE_CODE (arg1) == COMPLEX_CST
10899 && real_zerop (TREE_REALPART (arg1)))
10901 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10902 if (real_onep (TREE_IMAGPART (arg1)))
10903 return
10904 fold_build2_loc (loc, COMPLEX_EXPR, type,
10905 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10906 rtype, arg0)),
10907 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10908 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10909 return
10910 fold_build2_loc (loc, COMPLEX_EXPR, type,
10911 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10912 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10913 rtype, arg0)));
10916 /* Optimize z * conj(z) for floating point complex numbers.
10917 Guarded by flag_unsafe_math_optimizations as non-finite
10918 imaginary components don't produce scalar results. */
10919 if (flag_unsafe_math_optimizations
10920 && TREE_CODE (arg0) == CONJ_EXPR
10921 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10922 return fold_mult_zconjz (loc, type, arg1);
10923 if (flag_unsafe_math_optimizations
10924 && TREE_CODE (arg1) == CONJ_EXPR
10925 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10926 return fold_mult_zconjz (loc, type, arg0);
10928 if (flag_unsafe_math_optimizations)
10930 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10931 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10933 /* Optimizations of root(...)*root(...). */
10934 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10936 tree rootfn, arg;
10937 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10938 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10940 /* Optimize sqrt(x)*sqrt(x) as x. */
10941 if (BUILTIN_SQRT_P (fcode0)
10942 && operand_equal_p (arg00, arg10, 0)
10943 && ! HONOR_SNANS (TYPE_MODE (type)))
10944 return arg00;
10946 /* Optimize root(x)*root(y) as root(x*y). */
10947 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10948 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10949 return build_call_expr_loc (loc, rootfn, 1, arg);
10952 /* Optimize expN(x)*expN(y) as expN(x+y). */
10953 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10955 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10956 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10957 CALL_EXPR_ARG (arg0, 0),
10958 CALL_EXPR_ARG (arg1, 0));
10959 return build_call_expr_loc (loc, expfn, 1, arg);
10962 /* Optimizations of pow(...)*pow(...). */
10963 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10964 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10965 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10967 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10968 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10969 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10970 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10972 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10973 if (operand_equal_p (arg01, arg11, 0))
10975 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10976 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10977 arg00, arg10);
10978 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10981 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10982 if (operand_equal_p (arg00, arg10, 0))
10984 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10985 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10986 arg01, arg11);
10987 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10991 /* Optimize tan(x)*cos(x) as sin(x). */
10992 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10993 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10994 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10995 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10996 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10997 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10998 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10999 CALL_EXPR_ARG (arg1, 0), 0))
11001 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11003 if (sinfn != NULL_TREE)
11004 return build_call_expr_loc (loc, sinfn, 1,
11005 CALL_EXPR_ARG (arg0, 0));
11008 /* Optimize x*pow(x,c) as pow(x,c+1). */
11009 if (fcode1 == BUILT_IN_POW
11010 || fcode1 == BUILT_IN_POWF
11011 || fcode1 == BUILT_IN_POWL)
11013 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11014 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11015 if (TREE_CODE (arg11) == REAL_CST
11016 && !TREE_OVERFLOW (arg11)
11017 && operand_equal_p (arg0, arg10, 0))
11019 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11020 REAL_VALUE_TYPE c;
11021 tree arg;
11023 c = TREE_REAL_CST (arg11);
11024 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11025 arg = build_real (type, c);
11026 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11030 /* Optimize pow(x,c)*x as pow(x,c+1). */
11031 if (fcode0 == BUILT_IN_POW
11032 || fcode0 == BUILT_IN_POWF
11033 || fcode0 == BUILT_IN_POWL)
11035 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11036 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11037 if (TREE_CODE (arg01) == REAL_CST
11038 && !TREE_OVERFLOW (arg01)
11039 && operand_equal_p (arg1, arg00, 0))
11041 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11042 REAL_VALUE_TYPE c;
11043 tree arg;
11045 c = TREE_REAL_CST (arg01);
11046 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11047 arg = build_real (type, c);
11048 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11052 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11053 if (!in_gimple_form
11054 && optimize
11055 && operand_equal_p (arg0, arg1, 0))
11057 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11059 if (powfn)
11061 tree arg = build_real (type, dconst2);
11062 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11067 goto associate;
11069 case BIT_IOR_EXPR:
11070 bit_ior:
11071 if (integer_all_onesp (arg1))
11072 return omit_one_operand_loc (loc, type, arg1, arg0);
11073 if (integer_zerop (arg1))
11074 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11075 if (operand_equal_p (arg0, arg1, 0))
11076 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11078 /* ~X | X is -1. */
11079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11082 t1 = build_zero_cst (type);
11083 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11084 return omit_one_operand_loc (loc, type, t1, arg1);
11087 /* X | ~X is -1. */
11088 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11089 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11091 t1 = build_zero_cst (type);
11092 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11093 return omit_one_operand_loc (loc, type, t1, arg0);
11096 /* Canonicalize (X & C1) | C2. */
11097 if (TREE_CODE (arg0) == BIT_AND_EXPR
11098 && TREE_CODE (arg1) == INTEGER_CST
11099 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11101 double_int c1, c2, c3, msk;
11102 int width = TYPE_PRECISION (type), w;
11103 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11104 c2 = tree_to_double_int (arg1);
11106 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11107 if ((c1 & c2) == c1)
11108 return omit_one_operand_loc (loc, type, arg1,
11109 TREE_OPERAND (arg0, 0));
11111 msk = double_int::mask (width);
11113 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11114 if (msk.and_not (c1 | c2).is_zero ())
11115 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11116 TREE_OPERAND (arg0, 0), arg1);
11118 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11119 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11120 mode which allows further optimizations. */
11121 c1 &= msk;
11122 c2 &= msk;
11123 c3 = c1.and_not (c2);
11124 for (w = BITS_PER_UNIT;
11125 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11126 w <<= 1)
11128 unsigned HOST_WIDE_INT mask
11129 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11130 if (((c1.low | c2.low) & mask) == mask
11131 && (c1.low & ~mask) == 0 && c1.high == 0)
11133 c3 = double_int::from_uhwi (mask);
11134 break;
11137 if (c3 != c1)
11138 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11139 fold_build2_loc (loc, BIT_AND_EXPR, type,
11140 TREE_OPERAND (arg0, 0),
11141 double_int_to_tree (type,
11142 c3)),
11143 arg1);
11146 /* (X & Y) | Y is (X, Y). */
11147 if (TREE_CODE (arg0) == BIT_AND_EXPR
11148 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11149 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11150 /* (X & Y) | X is (Y, X). */
11151 if (TREE_CODE (arg0) == BIT_AND_EXPR
11152 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11153 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11154 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11155 /* X | (X & Y) is (Y, X). */
11156 if (TREE_CODE (arg1) == BIT_AND_EXPR
11157 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11158 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11159 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11160 /* X | (Y & X) is (Y, X). */
11161 if (TREE_CODE (arg1) == BIT_AND_EXPR
11162 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11163 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11164 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11166 /* (X & ~Y) | (~X & Y) is X ^ Y */
11167 if (TREE_CODE (arg0) == BIT_AND_EXPR
11168 && TREE_CODE (arg1) == BIT_AND_EXPR)
11170 tree a0, a1, l0, l1, n0, n1;
11172 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11173 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11175 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11176 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11178 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11179 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11181 if ((operand_equal_p (n0, a0, 0)
11182 && operand_equal_p (n1, a1, 0))
11183 || (operand_equal_p (n0, a1, 0)
11184 && operand_equal_p (n1, a0, 0)))
11185 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11188 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11189 if (t1 != NULL_TREE)
11190 return t1;
11192 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11194 This results in more efficient code for machines without a NAND
11195 instruction. Combine will canonicalize to the first form
11196 which will allow use of NAND instructions provided by the
11197 backend if they exist. */
11198 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11199 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11201 return
11202 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11203 build2 (BIT_AND_EXPR, type,
11204 fold_convert_loc (loc, type,
11205 TREE_OPERAND (arg0, 0)),
11206 fold_convert_loc (loc, type,
11207 TREE_OPERAND (arg1, 0))));
11210 /* See if this can be simplified into a rotate first. If that
11211 is unsuccessful continue in the association code. */
11212 goto bit_rotate;
11214 case BIT_XOR_EXPR:
11215 if (integer_zerop (arg1))
11216 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11217 if (integer_all_onesp (arg1))
11218 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11219 if (operand_equal_p (arg0, arg1, 0))
11220 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11222 /* ~X ^ X is -1. */
11223 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11224 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11226 t1 = build_zero_cst (type);
11227 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11228 return omit_one_operand_loc (loc, type, t1, arg1);
11231 /* X ^ ~X is -1. */
11232 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11233 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11235 t1 = build_zero_cst (type);
11236 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11237 return omit_one_operand_loc (loc, type, t1, arg0);
11240 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11241 with a constant, and the two constants have no bits in common,
11242 we should treat this as a BIT_IOR_EXPR since this may produce more
11243 simplifications. */
11244 if (TREE_CODE (arg0) == BIT_AND_EXPR
11245 && TREE_CODE (arg1) == BIT_AND_EXPR
11246 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11247 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11248 && integer_zerop (const_binop (BIT_AND_EXPR,
11249 TREE_OPERAND (arg0, 1),
11250 TREE_OPERAND (arg1, 1))))
11252 code = BIT_IOR_EXPR;
11253 goto bit_ior;
11256 /* (X | Y) ^ X -> Y & ~ X*/
11257 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11258 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11260 tree t2 = TREE_OPERAND (arg0, 1);
11261 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11262 arg1);
11263 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11264 fold_convert_loc (loc, type, t2),
11265 fold_convert_loc (loc, type, t1));
11266 return t1;
11269 /* (Y | X) ^ X -> Y & ~ X*/
11270 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11271 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11273 tree t2 = TREE_OPERAND (arg0, 0);
11274 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11275 arg1);
11276 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11277 fold_convert_loc (loc, type, t2),
11278 fold_convert_loc (loc, type, t1));
11279 return t1;
11282 /* X ^ (X | Y) -> Y & ~ X*/
11283 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11284 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11286 tree t2 = TREE_OPERAND (arg1, 1);
11287 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11288 arg0);
11289 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11290 fold_convert_loc (loc, type, t2),
11291 fold_convert_loc (loc, type, t1));
11292 return t1;
11295 /* X ^ (Y | X) -> Y & ~ X*/
11296 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11297 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11299 tree t2 = TREE_OPERAND (arg1, 0);
11300 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11301 arg0);
11302 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11303 fold_convert_loc (loc, type, t2),
11304 fold_convert_loc (loc, type, t1));
11305 return t1;
11308 /* Convert ~X ^ ~Y to X ^ Y. */
11309 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11310 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11311 return fold_build2_loc (loc, code, type,
11312 fold_convert_loc (loc, type,
11313 TREE_OPERAND (arg0, 0)),
11314 fold_convert_loc (loc, type,
11315 TREE_OPERAND (arg1, 0)));
11317 /* Convert ~X ^ C to X ^ ~C. */
11318 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11319 && TREE_CODE (arg1) == INTEGER_CST)
11320 return fold_build2_loc (loc, code, type,
11321 fold_convert_loc (loc, type,
11322 TREE_OPERAND (arg0, 0)),
11323 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11325 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11326 if (TREE_CODE (arg0) == BIT_AND_EXPR
11327 && integer_onep (TREE_OPERAND (arg0, 1))
11328 && integer_onep (arg1))
11329 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11330 build_zero_cst (TREE_TYPE (arg0)));
11332 /* Fold (X & Y) ^ Y as ~X & Y. */
11333 if (TREE_CODE (arg0) == BIT_AND_EXPR
11334 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11336 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11337 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11338 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11339 fold_convert_loc (loc, type, arg1));
11341 /* Fold (X & Y) ^ X as ~Y & X. */
11342 if (TREE_CODE (arg0) == BIT_AND_EXPR
11343 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11344 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11346 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11347 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11348 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11349 fold_convert_loc (loc, type, arg1));
11351 /* Fold X ^ (X & Y) as X & ~Y. */
11352 if (TREE_CODE (arg1) == BIT_AND_EXPR
11353 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11355 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11356 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11357 fold_convert_loc (loc, type, arg0),
11358 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11360 /* Fold X ^ (Y & X) as ~Y & X. */
11361 if (TREE_CODE (arg1) == BIT_AND_EXPR
11362 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11363 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11365 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11366 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11367 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11368 fold_convert_loc (loc, type, arg0));
11371 /* See if this can be simplified into a rotate first. If that
11372 is unsuccessful continue in the association code. */
11373 goto bit_rotate;
11375 case BIT_AND_EXPR:
11376 if (integer_all_onesp (arg1))
11377 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11378 if (integer_zerop (arg1))
11379 return omit_one_operand_loc (loc, type, arg1, arg0);
11380 if (operand_equal_p (arg0, arg1, 0))
11381 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11383 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11384 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11385 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11386 || (TREE_CODE (arg0) == EQ_EXPR
11387 && integer_zerop (TREE_OPERAND (arg0, 1))))
11388 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11389 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11391 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11392 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11393 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11394 || (TREE_CODE (arg1) == EQ_EXPR
11395 && integer_zerop (TREE_OPERAND (arg1, 1))))
11396 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11397 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11399 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11400 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11401 && TREE_CODE (arg1) == INTEGER_CST
11402 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11404 tree tmp1 = fold_convert_loc (loc, type, arg1);
11405 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11406 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11407 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11408 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11409 return
11410 fold_convert_loc (loc, type,
11411 fold_build2_loc (loc, BIT_IOR_EXPR,
11412 type, tmp2, tmp3));
11415 /* (X | Y) & Y is (X, Y). */
11416 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11417 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11418 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11419 /* (X | Y) & X is (Y, X). */
11420 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11421 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11422 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11423 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11424 /* X & (X | Y) is (Y, X). */
11425 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11426 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11427 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11428 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11429 /* X & (Y | X) is (Y, X). */
11430 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11431 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11432 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11433 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11435 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11436 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11437 && integer_onep (TREE_OPERAND (arg0, 1))
11438 && integer_onep (arg1))
11440 tree tem2;
11441 tem = TREE_OPERAND (arg0, 0);
11442 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11443 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11444 tem, tem2);
11445 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11446 build_zero_cst (TREE_TYPE (tem)));
11448 /* Fold ~X & 1 as (X & 1) == 0. */
11449 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11450 && integer_onep (arg1))
11452 tree tem2;
11453 tem = TREE_OPERAND (arg0, 0);
11454 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11455 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11456 tem, tem2);
11457 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11458 build_zero_cst (TREE_TYPE (tem)));
11460 /* Fold !X & 1 as X == 0. */
11461 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11462 && integer_onep (arg1))
11464 tem = TREE_OPERAND (arg0, 0);
11465 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11466 build_zero_cst (TREE_TYPE (tem)));
11469 /* Fold (X ^ Y) & Y as ~X & Y. */
11470 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11471 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11473 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11474 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11475 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11476 fold_convert_loc (loc, type, arg1));
11478 /* Fold (X ^ Y) & X as ~Y & X. */
11479 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11481 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11483 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11484 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11485 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11486 fold_convert_loc (loc, type, arg1));
11488 /* Fold X & (X ^ Y) as X & ~Y. */
11489 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11490 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11492 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11493 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11494 fold_convert_loc (loc, type, arg0),
11495 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11497 /* Fold X & (Y ^ X) as ~Y & X. */
11498 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11499 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11500 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11502 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11503 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11504 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11505 fold_convert_loc (loc, type, arg0));
11508 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11509 multiple of 1 << CST. */
11510 if (TREE_CODE (arg1) == INTEGER_CST)
11512 double_int cst1 = tree_to_double_int (arg1);
11513 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11514 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11515 if ((cst1 & ncst1) == ncst1
11516 && multiple_of_p (type, arg0,
11517 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11518 return fold_convert_loc (loc, type, arg0);
11521 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11522 bits from CST2. */
11523 if (TREE_CODE (arg1) == INTEGER_CST
11524 && TREE_CODE (arg0) == MULT_EXPR
11525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11527 int arg1tz
11528 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11529 if (arg1tz > 0)
11531 double_int arg1mask, masked;
11532 arg1mask = ~double_int::mask (arg1tz);
11533 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11534 TYPE_UNSIGNED (type));
11535 masked = arg1mask & tree_to_double_int (arg1);
11536 if (masked.is_zero ())
11537 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11538 arg0, arg1);
11539 else if (masked != tree_to_double_int (arg1))
11540 return fold_build2_loc (loc, code, type, op0,
11541 double_int_to_tree (type, masked));
11545 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11546 ((A & N) + B) & M -> (A + B) & M
11547 Similarly if (N & M) == 0,
11548 ((A | N) + B) & M -> (A + B) & M
11549 and for - instead of + (or unary - instead of +)
11550 and/or ^ instead of |.
11551 If B is constant and (B & M) == 0, fold into A & M. */
11552 if (host_integerp (arg1, 1))
11554 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11555 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11556 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11557 && (TREE_CODE (arg0) == PLUS_EXPR
11558 || TREE_CODE (arg0) == MINUS_EXPR
11559 || TREE_CODE (arg0) == NEGATE_EXPR)
11560 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11561 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11563 tree pmop[2];
11564 int which = 0;
11565 unsigned HOST_WIDE_INT cst0;
11567 /* Now we know that arg0 is (C + D) or (C - D) or
11568 -C and arg1 (M) is == (1LL << cst) - 1.
11569 Store C into PMOP[0] and D into PMOP[1]. */
11570 pmop[0] = TREE_OPERAND (arg0, 0);
11571 pmop[1] = NULL;
11572 if (TREE_CODE (arg0) != NEGATE_EXPR)
11574 pmop[1] = TREE_OPERAND (arg0, 1);
11575 which = 1;
11578 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11579 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11580 & cst1) != cst1)
11581 which = -1;
11583 for (; which >= 0; which--)
11584 switch (TREE_CODE (pmop[which]))
11586 case BIT_AND_EXPR:
11587 case BIT_IOR_EXPR:
11588 case BIT_XOR_EXPR:
11589 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11590 != INTEGER_CST)
11591 break;
11592 /* tree_low_cst not used, because we don't care about
11593 the upper bits. */
11594 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11595 cst0 &= cst1;
11596 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11598 if (cst0 != cst1)
11599 break;
11601 else if (cst0 != 0)
11602 break;
11603 /* If C or D is of the form (A & N) where
11604 (N & M) == M, or of the form (A | N) or
11605 (A ^ N) where (N & M) == 0, replace it with A. */
11606 pmop[which] = TREE_OPERAND (pmop[which], 0);
11607 break;
11608 case INTEGER_CST:
11609 /* If C or D is a N where (N & M) == 0, it can be
11610 omitted (assumed 0). */
11611 if ((TREE_CODE (arg0) == PLUS_EXPR
11612 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11613 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11614 pmop[which] = NULL;
11615 break;
11616 default:
11617 break;
11620 /* Only build anything new if we optimized one or both arguments
11621 above. */
11622 if (pmop[0] != TREE_OPERAND (arg0, 0)
11623 || (TREE_CODE (arg0) != NEGATE_EXPR
11624 && pmop[1] != TREE_OPERAND (arg0, 1)))
11626 tree utype = TREE_TYPE (arg0);
11627 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11629 /* Perform the operations in a type that has defined
11630 overflow behavior. */
11631 utype = unsigned_type_for (TREE_TYPE (arg0));
11632 if (pmop[0] != NULL)
11633 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11634 if (pmop[1] != NULL)
11635 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11638 if (TREE_CODE (arg0) == NEGATE_EXPR)
11639 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11640 else if (TREE_CODE (arg0) == PLUS_EXPR)
11642 if (pmop[0] != NULL && pmop[1] != NULL)
11643 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11644 pmop[0], pmop[1]);
11645 else if (pmop[0] != NULL)
11646 tem = pmop[0];
11647 else if (pmop[1] != NULL)
11648 tem = pmop[1];
11649 else
11650 return build_int_cst (type, 0);
11652 else if (pmop[0] == NULL)
11653 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11654 else
11655 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11656 pmop[0], pmop[1]);
11657 /* TEM is now the new binary +, - or unary - replacement. */
11658 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11659 fold_convert_loc (loc, utype, arg1));
11660 return fold_convert_loc (loc, type, tem);
11665 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11666 if (t1 != NULL_TREE)
11667 return t1;
11668 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11669 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11670 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11672 unsigned int prec
11673 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11675 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11676 && (~TREE_INT_CST_LOW (arg1)
11677 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11678 return
11679 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11682 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11684 This results in more efficient code for machines without a NOR
11685 instruction. Combine will canonicalize to the first form
11686 which will allow use of NOR instructions provided by the
11687 backend if they exist. */
11688 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11689 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11691 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11692 build2 (BIT_IOR_EXPR, type,
11693 fold_convert_loc (loc, type,
11694 TREE_OPERAND (arg0, 0)),
11695 fold_convert_loc (loc, type,
11696 TREE_OPERAND (arg1, 0))));
11699 /* If arg0 is derived from the address of an object or function, we may
11700 be able to fold this expression using the object or function's
11701 alignment. */
11702 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11704 unsigned HOST_WIDE_INT modulus, residue;
11705 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11707 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11708 integer_onep (arg1));
11710 /* This works because modulus is a power of 2. If this weren't the
11711 case, we'd have to replace it by its greatest power-of-2
11712 divisor: modulus & -modulus. */
11713 if (low < modulus)
11714 return build_int_cst (type, residue & low);
11717 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11718 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11719 if the new mask might be further optimized. */
11720 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11721 || TREE_CODE (arg0) == RSHIFT_EXPR)
11722 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11723 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11724 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11725 < TYPE_PRECISION (TREE_TYPE (arg0))
11726 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11727 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11729 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11730 unsigned HOST_WIDE_INT mask
11731 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11732 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11733 tree shift_type = TREE_TYPE (arg0);
11735 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11736 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11737 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11738 && TYPE_PRECISION (TREE_TYPE (arg0))
11739 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11741 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11742 tree arg00 = TREE_OPERAND (arg0, 0);
11743 /* See if more bits can be proven as zero because of
11744 zero extension. */
11745 if (TREE_CODE (arg00) == NOP_EXPR
11746 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11748 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11749 if (TYPE_PRECISION (inner_type)
11750 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11751 && TYPE_PRECISION (inner_type) < prec)
11753 prec = TYPE_PRECISION (inner_type);
11754 /* See if we can shorten the right shift. */
11755 if (shiftc < prec)
11756 shift_type = inner_type;
11759 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11760 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11761 zerobits <<= prec - shiftc;
11762 /* For arithmetic shift if sign bit could be set, zerobits
11763 can contain actually sign bits, so no transformation is
11764 possible, unless MASK masks them all away. In that
11765 case the shift needs to be converted into logical shift. */
11766 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11767 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11769 if ((mask & zerobits) == 0)
11770 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11771 else
11772 zerobits = 0;
11776 /* ((X << 16) & 0xff00) is (X, 0). */
11777 if ((mask & zerobits) == mask)
11778 return omit_one_operand_loc (loc, type,
11779 build_int_cst (type, 0), arg0);
11781 newmask = mask | zerobits;
11782 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11784 unsigned int prec;
11786 /* Only do the transformation if NEWMASK is some integer
11787 mode's mask. */
11788 for (prec = BITS_PER_UNIT;
11789 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11790 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11791 break;
11792 if (prec < HOST_BITS_PER_WIDE_INT
11793 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11795 tree newmaskt;
11797 if (shift_type != TREE_TYPE (arg0))
11799 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11800 fold_convert_loc (loc, shift_type,
11801 TREE_OPERAND (arg0, 0)),
11802 TREE_OPERAND (arg0, 1));
11803 tem = fold_convert_loc (loc, type, tem);
11805 else
11806 tem = op0;
11807 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11808 if (!tree_int_cst_equal (newmaskt, arg1))
11809 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11814 goto associate;
11816 case RDIV_EXPR:
11817 /* Don't touch a floating-point divide by zero unless the mode
11818 of the constant can represent infinity. */
11819 if (TREE_CODE (arg1) == REAL_CST
11820 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11821 && real_zerop (arg1))
11822 return NULL_TREE;
11824 /* Optimize A / A to 1.0 if we don't care about
11825 NaNs or Infinities. Skip the transformation
11826 for non-real operands. */
11827 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11828 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11829 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11830 && operand_equal_p (arg0, arg1, 0))
11832 tree r = build_real (TREE_TYPE (arg0), dconst1);
11834 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11837 /* The complex version of the above A / A optimization. */
11838 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11839 && operand_equal_p (arg0, arg1, 0))
11841 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11842 if (! HONOR_NANS (TYPE_MODE (elem_type))
11843 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11845 tree r = build_real (elem_type, dconst1);
11846 /* omit_two_operands will call fold_convert for us. */
11847 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11851 /* (-A) / (-B) -> A / B */
11852 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11853 return fold_build2_loc (loc, RDIV_EXPR, type,
11854 TREE_OPERAND (arg0, 0),
11855 negate_expr (arg1));
11856 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11857 return fold_build2_loc (loc, RDIV_EXPR, type,
11858 negate_expr (arg0),
11859 TREE_OPERAND (arg1, 0));
11861 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11862 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11863 && real_onep (arg1))
11864 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11866 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11867 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11868 && real_minus_onep (arg1))
11869 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11870 negate_expr (arg0)));
11872 /* If ARG1 is a constant, we can convert this to a multiply by the
11873 reciprocal. This does not have the same rounding properties,
11874 so only do this if -freciprocal-math. We can actually
11875 always safely do it if ARG1 is a power of two, but it's hard to
11876 tell if it is or not in a portable manner. */
11877 if (optimize
11878 && (TREE_CODE (arg1) == REAL_CST
11879 || (TREE_CODE (arg1) == COMPLEX_CST
11880 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11881 || (TREE_CODE (arg1) == VECTOR_CST
11882 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11884 if (flag_reciprocal_math
11885 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11886 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11887 /* Find the reciprocal if optimizing and the result is exact.
11888 TODO: Complex reciprocal not implemented. */
11889 if (TREE_CODE (arg1) != COMPLEX_CST)
11891 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11893 if (inverse)
11894 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11897 /* Convert A/B/C to A/(B*C). */
11898 if (flag_reciprocal_math
11899 && TREE_CODE (arg0) == RDIV_EXPR)
11900 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11901 fold_build2_loc (loc, MULT_EXPR, type,
11902 TREE_OPERAND (arg0, 1), arg1));
11904 /* Convert A/(B/C) to (A/B)*C. */
11905 if (flag_reciprocal_math
11906 && TREE_CODE (arg1) == RDIV_EXPR)
11907 return fold_build2_loc (loc, MULT_EXPR, type,
11908 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11909 TREE_OPERAND (arg1, 0)),
11910 TREE_OPERAND (arg1, 1));
11912 /* Convert C1/(X*C2) into (C1/C2)/X. */
11913 if (flag_reciprocal_math
11914 && TREE_CODE (arg1) == MULT_EXPR
11915 && TREE_CODE (arg0) == REAL_CST
11916 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11918 tree tem = const_binop (RDIV_EXPR, arg0,
11919 TREE_OPERAND (arg1, 1));
11920 if (tem)
11921 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11922 TREE_OPERAND (arg1, 0));
11925 if (flag_unsafe_math_optimizations)
11927 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11928 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11930 /* Optimize sin(x)/cos(x) as tan(x). */
11931 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11932 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11933 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11934 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11935 CALL_EXPR_ARG (arg1, 0), 0))
11937 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11939 if (tanfn != NULL_TREE)
11940 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11943 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11944 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11945 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11946 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11947 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11948 CALL_EXPR_ARG (arg1, 0), 0))
11950 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11952 if (tanfn != NULL_TREE)
11954 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11955 CALL_EXPR_ARG (arg0, 0));
11956 return fold_build2_loc (loc, RDIV_EXPR, type,
11957 build_real (type, dconst1), tmp);
11961 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11962 NaNs or Infinities. */
11963 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11964 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11965 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11967 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11968 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11970 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11971 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11972 && operand_equal_p (arg00, arg01, 0))
11974 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11976 if (cosfn != NULL_TREE)
11977 return build_call_expr_loc (loc, cosfn, 1, arg00);
11981 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11982 NaNs or Infinities. */
11983 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11984 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11985 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11987 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11988 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11990 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11991 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11992 && operand_equal_p (arg00, arg01, 0))
11994 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11996 if (cosfn != NULL_TREE)
11998 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11999 return fold_build2_loc (loc, RDIV_EXPR, type,
12000 build_real (type, dconst1),
12001 tmp);
12006 /* Optimize pow(x,c)/x as pow(x,c-1). */
12007 if (fcode0 == BUILT_IN_POW
12008 || fcode0 == BUILT_IN_POWF
12009 || fcode0 == BUILT_IN_POWL)
12011 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12012 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12013 if (TREE_CODE (arg01) == REAL_CST
12014 && !TREE_OVERFLOW (arg01)
12015 && operand_equal_p (arg1, arg00, 0))
12017 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12018 REAL_VALUE_TYPE c;
12019 tree arg;
12021 c = TREE_REAL_CST (arg01);
12022 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12023 arg = build_real (type, c);
12024 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12028 /* Optimize a/root(b/c) into a*root(c/b). */
12029 if (BUILTIN_ROOT_P (fcode1))
12031 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12033 if (TREE_CODE (rootarg) == RDIV_EXPR)
12035 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12036 tree b = TREE_OPERAND (rootarg, 0);
12037 tree c = TREE_OPERAND (rootarg, 1);
12039 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12041 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12042 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12046 /* Optimize x/expN(y) into x*expN(-y). */
12047 if (BUILTIN_EXPONENT_P (fcode1))
12049 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12050 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12051 arg1 = build_call_expr_loc (loc,
12052 expfn, 1,
12053 fold_convert_loc (loc, type, arg));
12054 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12057 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12058 if (fcode1 == BUILT_IN_POW
12059 || fcode1 == BUILT_IN_POWF
12060 || fcode1 == BUILT_IN_POWL)
12062 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12063 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12064 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12065 tree neg11 = fold_convert_loc (loc, type,
12066 negate_expr (arg11));
12067 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12068 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12071 return NULL_TREE;
12073 case TRUNC_DIV_EXPR:
12074 /* Optimize (X & (-A)) / A where A is a power of 2,
12075 to X >> log2(A) */
12076 if (TREE_CODE (arg0) == BIT_AND_EXPR
12077 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12078 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12080 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12081 arg1, TREE_OPERAND (arg0, 1));
12082 if (sum && integer_zerop (sum)) {
12083 unsigned long pow2;
12085 if (TREE_INT_CST_LOW (arg1))
12086 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12087 else
12088 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12089 + HOST_BITS_PER_WIDE_INT;
12091 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12092 TREE_OPERAND (arg0, 0),
12093 build_int_cst (integer_type_node, pow2));
12097 /* Fall through */
12099 case FLOOR_DIV_EXPR:
12100 /* Simplify A / (B << N) where A and B are positive and B is
12101 a power of 2, to A >> (N + log2(B)). */
12102 strict_overflow_p = false;
12103 if (TREE_CODE (arg1) == LSHIFT_EXPR
12104 && (TYPE_UNSIGNED (type)
12105 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12107 tree sval = TREE_OPERAND (arg1, 0);
12108 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12110 tree sh_cnt = TREE_OPERAND (arg1, 1);
12111 unsigned long pow2;
12113 if (TREE_INT_CST_LOW (sval))
12114 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12115 else
12116 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12117 + HOST_BITS_PER_WIDE_INT;
12119 if (strict_overflow_p)
12120 fold_overflow_warning (("assuming signed overflow does not "
12121 "occur when simplifying A / (B << N)"),
12122 WARN_STRICT_OVERFLOW_MISC);
12124 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12125 sh_cnt,
12126 build_int_cst (TREE_TYPE (sh_cnt),
12127 pow2));
12128 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12129 fold_convert_loc (loc, type, arg0), sh_cnt);
12133 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12134 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12135 if (INTEGRAL_TYPE_P (type)
12136 && TYPE_UNSIGNED (type)
12137 && code == FLOOR_DIV_EXPR)
12138 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12140 /* Fall through */
12142 case ROUND_DIV_EXPR:
12143 case CEIL_DIV_EXPR:
12144 case EXACT_DIV_EXPR:
12145 if (integer_onep (arg1))
12146 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12147 if (integer_zerop (arg1))
12148 return NULL_TREE;
12149 /* X / -1 is -X. */
12150 if (!TYPE_UNSIGNED (type)
12151 && TREE_CODE (arg1) == INTEGER_CST
12152 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12153 && TREE_INT_CST_HIGH (arg1) == -1)
12154 return fold_convert_loc (loc, type, negate_expr (arg0));
12156 /* Convert -A / -B to A / B when the type is signed and overflow is
12157 undefined. */
12158 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12159 && TREE_CODE (arg0) == NEGATE_EXPR
12160 && negate_expr_p (arg1))
12162 if (INTEGRAL_TYPE_P (type))
12163 fold_overflow_warning (("assuming signed overflow does not occur "
12164 "when distributing negation across "
12165 "division"),
12166 WARN_STRICT_OVERFLOW_MISC);
12167 return fold_build2_loc (loc, code, type,
12168 fold_convert_loc (loc, type,
12169 TREE_OPERAND (arg0, 0)),
12170 fold_convert_loc (loc, type,
12171 negate_expr (arg1)));
12173 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12174 && TREE_CODE (arg1) == NEGATE_EXPR
12175 && negate_expr_p (arg0))
12177 if (INTEGRAL_TYPE_P (type))
12178 fold_overflow_warning (("assuming signed overflow does not occur "
12179 "when distributing negation across "
12180 "division"),
12181 WARN_STRICT_OVERFLOW_MISC);
12182 return fold_build2_loc (loc, code, type,
12183 fold_convert_loc (loc, type,
12184 negate_expr (arg0)),
12185 fold_convert_loc (loc, type,
12186 TREE_OPERAND (arg1, 0)));
12189 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12190 operation, EXACT_DIV_EXPR.
12192 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12193 At one time others generated faster code, it's not clear if they do
12194 after the last round to changes to the DIV code in expmed.c. */
12195 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12196 && multiple_of_p (type, arg0, arg1))
12197 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12199 strict_overflow_p = false;
12200 if (TREE_CODE (arg1) == INTEGER_CST
12201 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12202 &strict_overflow_p)))
12204 if (strict_overflow_p)
12205 fold_overflow_warning (("assuming signed overflow does not occur "
12206 "when simplifying division"),
12207 WARN_STRICT_OVERFLOW_MISC);
12208 return fold_convert_loc (loc, type, tem);
12211 return NULL_TREE;
12213 case CEIL_MOD_EXPR:
12214 case FLOOR_MOD_EXPR:
12215 case ROUND_MOD_EXPR:
12216 case TRUNC_MOD_EXPR:
12217 /* X % 1 is always zero, but be sure to preserve any side
12218 effects in X. */
12219 if (integer_onep (arg1))
12220 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12222 /* X % 0, return X % 0 unchanged so that we can get the
12223 proper warnings and errors. */
12224 if (integer_zerop (arg1))
12225 return NULL_TREE;
12227 /* 0 % X is always zero, but be sure to preserve any side
12228 effects in X. Place this after checking for X == 0. */
12229 if (integer_zerop (arg0))
12230 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12232 /* X % -1 is zero. */
12233 if (!TYPE_UNSIGNED (type)
12234 && TREE_CODE (arg1) == INTEGER_CST
12235 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12236 && TREE_INT_CST_HIGH (arg1) == -1)
12237 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12239 /* X % -C is the same as X % C. */
12240 if (code == TRUNC_MOD_EXPR
12241 && !TYPE_UNSIGNED (type)
12242 && TREE_CODE (arg1) == INTEGER_CST
12243 && !TREE_OVERFLOW (arg1)
12244 && TREE_INT_CST_HIGH (arg1) < 0
12245 && !TYPE_OVERFLOW_TRAPS (type)
12246 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12247 && !sign_bit_p (arg1, arg1))
12248 return fold_build2_loc (loc, code, type,
12249 fold_convert_loc (loc, type, arg0),
12250 fold_convert_loc (loc, type,
12251 negate_expr (arg1)));
12253 /* X % -Y is the same as X % Y. */
12254 if (code == TRUNC_MOD_EXPR
12255 && !TYPE_UNSIGNED (type)
12256 && TREE_CODE (arg1) == NEGATE_EXPR
12257 && !TYPE_OVERFLOW_TRAPS (type))
12258 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12259 fold_convert_loc (loc, type,
12260 TREE_OPERAND (arg1, 0)));
12262 strict_overflow_p = false;
12263 if (TREE_CODE (arg1) == INTEGER_CST
12264 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12265 &strict_overflow_p)))
12267 if (strict_overflow_p)
12268 fold_overflow_warning (("assuming signed overflow does not occur "
12269 "when simplifying modulus"),
12270 WARN_STRICT_OVERFLOW_MISC);
12271 return fold_convert_loc (loc, type, tem);
12274 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12275 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12276 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12277 && (TYPE_UNSIGNED (type)
12278 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12280 tree c = arg1;
12281 /* Also optimize A % (C << N) where C is a power of 2,
12282 to A & ((C << N) - 1). */
12283 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12284 c = TREE_OPERAND (arg1, 0);
12286 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12288 tree mask
12289 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12290 build_int_cst (TREE_TYPE (arg1), 1));
12291 if (strict_overflow_p)
12292 fold_overflow_warning (("assuming signed overflow does not "
12293 "occur when simplifying "
12294 "X % (power of two)"),
12295 WARN_STRICT_OVERFLOW_MISC);
12296 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12297 fold_convert_loc (loc, type, arg0),
12298 fold_convert_loc (loc, type, mask));
12302 return NULL_TREE;
12304 case LROTATE_EXPR:
12305 case RROTATE_EXPR:
12306 if (integer_all_onesp (arg0))
12307 return omit_one_operand_loc (loc, type, arg0, arg1);
12308 goto shift;
12310 case RSHIFT_EXPR:
12311 /* Optimize -1 >> x for arithmetic right shifts. */
12312 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12313 && tree_expr_nonnegative_p (arg1))
12314 return omit_one_operand_loc (loc, type, arg0, arg1);
12315 /* ... fall through ... */
12317 case LSHIFT_EXPR:
12318 shift:
12319 if (integer_zerop (arg1))
12320 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12321 if (integer_zerop (arg0))
12322 return omit_one_operand_loc (loc, type, arg0, arg1);
12324 /* Since negative shift count is not well-defined,
12325 don't try to compute it in the compiler. */
12326 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12327 return NULL_TREE;
12329 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12330 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12331 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12332 && host_integerp (TREE_OPERAND (arg0, 1), false)
12333 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12335 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12336 + TREE_INT_CST_LOW (arg1));
12338 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12339 being well defined. */
12340 if (low >= TYPE_PRECISION (type))
12342 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12343 low = low % TYPE_PRECISION (type);
12344 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12345 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12346 TREE_OPERAND (arg0, 0));
12347 else
12348 low = TYPE_PRECISION (type) - 1;
12351 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12352 build_int_cst (type, low));
12355 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12356 into x & ((unsigned)-1 >> c) for unsigned types. */
12357 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12358 || (TYPE_UNSIGNED (type)
12359 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12360 && host_integerp (arg1, false)
12361 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12362 && host_integerp (TREE_OPERAND (arg0, 1), false)
12363 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12365 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12366 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12367 tree lshift;
12368 tree arg00;
12370 if (low0 == low1)
12372 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12374 lshift = build_int_cst (type, -1);
12375 lshift = int_const_binop (code, lshift, arg1);
12377 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12381 /* Rewrite an LROTATE_EXPR by a constant into an
12382 RROTATE_EXPR by a new constant. */
12383 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12385 tree tem = build_int_cst (TREE_TYPE (arg1),
12386 TYPE_PRECISION (type));
12387 tem = const_binop (MINUS_EXPR, tem, arg1);
12388 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12391 /* If we have a rotate of a bit operation with the rotate count and
12392 the second operand of the bit operation both constant,
12393 permute the two operations. */
12394 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12395 && (TREE_CODE (arg0) == BIT_AND_EXPR
12396 || TREE_CODE (arg0) == BIT_IOR_EXPR
12397 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12399 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12400 fold_build2_loc (loc, code, type,
12401 TREE_OPERAND (arg0, 0), arg1),
12402 fold_build2_loc (loc, code, type,
12403 TREE_OPERAND (arg0, 1), arg1));
12405 /* Two consecutive rotates adding up to the precision of the
12406 type can be ignored. */
12407 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12408 && TREE_CODE (arg0) == RROTATE_EXPR
12409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12410 && TREE_INT_CST_HIGH (arg1) == 0
12411 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12412 && ((TREE_INT_CST_LOW (arg1)
12413 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12414 == (unsigned int) TYPE_PRECISION (type)))
12415 return TREE_OPERAND (arg0, 0);
12417 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12418 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12419 if the latter can be further optimized. */
12420 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12421 && TREE_CODE (arg0) == BIT_AND_EXPR
12422 && TREE_CODE (arg1) == INTEGER_CST
12423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12425 tree mask = fold_build2_loc (loc, code, type,
12426 fold_convert_loc (loc, type,
12427 TREE_OPERAND (arg0, 1)),
12428 arg1);
12429 tree shift = fold_build2_loc (loc, code, type,
12430 fold_convert_loc (loc, type,
12431 TREE_OPERAND (arg0, 0)),
12432 arg1);
12433 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12434 if (tem)
12435 return tem;
12438 return NULL_TREE;
12440 case MIN_EXPR:
12441 if (operand_equal_p (arg0, arg1, 0))
12442 return omit_one_operand_loc (loc, type, arg0, arg1);
12443 if (INTEGRAL_TYPE_P (type)
12444 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12445 return omit_one_operand_loc (loc, type, arg1, arg0);
12446 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12447 if (tem)
12448 return tem;
12449 goto associate;
12451 case MAX_EXPR:
12452 if (operand_equal_p (arg0, arg1, 0))
12453 return omit_one_operand_loc (loc, type, arg0, arg1);
12454 if (INTEGRAL_TYPE_P (type)
12455 && TYPE_MAX_VALUE (type)
12456 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12457 return omit_one_operand_loc (loc, type, arg1, arg0);
12458 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12459 if (tem)
12460 return tem;
12461 goto associate;
12463 case TRUTH_ANDIF_EXPR:
12464 /* Note that the operands of this must be ints
12465 and their values must be 0 or 1.
12466 ("true" is a fixed value perhaps depending on the language.) */
12467 /* If first arg is constant zero, return it. */
12468 if (integer_zerop (arg0))
12469 return fold_convert_loc (loc, type, arg0);
12470 case TRUTH_AND_EXPR:
12471 /* If either arg is constant true, drop it. */
12472 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12473 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12474 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12475 /* Preserve sequence points. */
12476 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12477 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12478 /* If second arg is constant zero, result is zero, but first arg
12479 must be evaluated. */
12480 if (integer_zerop (arg1))
12481 return omit_one_operand_loc (loc, type, arg1, arg0);
12482 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12483 case will be handled here. */
12484 if (integer_zerop (arg0))
12485 return omit_one_operand_loc (loc, type, arg0, arg1);
12487 /* !X && X is always false. */
12488 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12489 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12490 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12491 /* X && !X is always false. */
12492 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12493 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12494 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12496 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12497 means A >= Y && A != MAX, but in this case we know that
12498 A < X <= MAX. */
12500 if (!TREE_SIDE_EFFECTS (arg0)
12501 && !TREE_SIDE_EFFECTS (arg1))
12503 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12504 if (tem && !operand_equal_p (tem, arg0, 0))
12505 return fold_build2_loc (loc, code, type, tem, arg1);
12507 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12508 if (tem && !operand_equal_p (tem, arg1, 0))
12509 return fold_build2_loc (loc, code, type, arg0, tem);
12512 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12513 != NULL_TREE)
12514 return tem;
12516 return NULL_TREE;
12518 case TRUTH_ORIF_EXPR:
12519 /* Note that the operands of this must be ints
12520 and their values must be 0 or true.
12521 ("true" is a fixed value perhaps depending on the language.) */
12522 /* If first arg is constant true, return it. */
12523 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12524 return fold_convert_loc (loc, type, arg0);
12525 case TRUTH_OR_EXPR:
12526 /* If either arg is constant zero, drop it. */
12527 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12528 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12529 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12530 /* Preserve sequence points. */
12531 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12532 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12533 /* If second arg is constant true, result is true, but we must
12534 evaluate first arg. */
12535 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12536 return omit_one_operand_loc (loc, type, arg1, arg0);
12537 /* Likewise for first arg, but note this only occurs here for
12538 TRUTH_OR_EXPR. */
12539 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12540 return omit_one_operand_loc (loc, type, arg0, arg1);
12542 /* !X || X is always true. */
12543 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12545 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12546 /* X || !X is always true. */
12547 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12548 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12549 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12551 /* (X && !Y) || (!X && Y) is X ^ Y */
12552 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12553 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12555 tree a0, a1, l0, l1, n0, n1;
12557 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12558 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12560 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12561 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12563 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12564 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12566 if ((operand_equal_p (n0, a0, 0)
12567 && operand_equal_p (n1, a1, 0))
12568 || (operand_equal_p (n0, a1, 0)
12569 && operand_equal_p (n1, a0, 0)))
12570 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12573 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12574 != NULL_TREE)
12575 return tem;
12577 return NULL_TREE;
12579 case TRUTH_XOR_EXPR:
12580 /* If the second arg is constant zero, drop it. */
12581 if (integer_zerop (arg1))
12582 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12583 /* If the second arg is constant true, this is a logical inversion. */
12584 if (integer_onep (arg1))
12586 /* Only call invert_truthvalue if operand is a truth value. */
12587 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12588 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12589 else
12590 tem = invert_truthvalue_loc (loc, arg0);
12591 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12593 /* Identical arguments cancel to zero. */
12594 if (operand_equal_p (arg0, arg1, 0))
12595 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12597 /* !X ^ X is always true. */
12598 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12599 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12600 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12602 /* X ^ !X is always true. */
12603 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12604 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12605 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12607 return NULL_TREE;
12609 case EQ_EXPR:
12610 case NE_EXPR:
12611 STRIP_NOPS (arg0);
12612 STRIP_NOPS (arg1);
12614 tem = fold_comparison (loc, code, type, op0, op1);
12615 if (tem != NULL_TREE)
12616 return tem;
12618 /* bool_var != 0 becomes bool_var. */
12619 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12620 && code == NE_EXPR)
12621 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12623 /* bool_var == 1 becomes bool_var. */
12624 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12625 && code == EQ_EXPR)
12626 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12628 /* bool_var != 1 becomes !bool_var. */
12629 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12630 && code == NE_EXPR)
12631 return fold_convert_loc (loc, type,
12632 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12633 TREE_TYPE (arg0), arg0));
12635 /* bool_var == 0 becomes !bool_var. */
12636 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12637 && code == EQ_EXPR)
12638 return fold_convert_loc (loc, type,
12639 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12640 TREE_TYPE (arg0), arg0));
12642 /* !exp != 0 becomes !exp */
12643 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12644 && code == NE_EXPR)
12645 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12647 /* If this is an equality comparison of the address of two non-weak,
12648 unaliased symbols neither of which are extern (since we do not
12649 have access to attributes for externs), then we know the result. */
12650 if (TREE_CODE (arg0) == ADDR_EXPR
12651 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12652 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12653 && ! lookup_attribute ("alias",
12654 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12655 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12656 && TREE_CODE (arg1) == ADDR_EXPR
12657 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12658 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12659 && ! lookup_attribute ("alias",
12660 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12661 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12663 /* We know that we're looking at the address of two
12664 non-weak, unaliased, static _DECL nodes.
12666 It is both wasteful and incorrect to call operand_equal_p
12667 to compare the two ADDR_EXPR nodes. It is wasteful in that
12668 all we need to do is test pointer equality for the arguments
12669 to the two ADDR_EXPR nodes. It is incorrect to use
12670 operand_equal_p as that function is NOT equivalent to a
12671 C equality test. It can in fact return false for two
12672 objects which would test as equal using the C equality
12673 operator. */
12674 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12675 return constant_boolean_node (equal
12676 ? code == EQ_EXPR : code != EQ_EXPR,
12677 type);
12680 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12681 a MINUS_EXPR of a constant, we can convert it into a comparison with
12682 a revised constant as long as no overflow occurs. */
12683 if (TREE_CODE (arg1) == INTEGER_CST
12684 && (TREE_CODE (arg0) == PLUS_EXPR
12685 || TREE_CODE (arg0) == MINUS_EXPR)
12686 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12687 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12688 ? MINUS_EXPR : PLUS_EXPR,
12689 fold_convert_loc (loc, TREE_TYPE (arg0),
12690 arg1),
12691 TREE_OPERAND (arg0, 1)))
12692 && !TREE_OVERFLOW (tem))
12693 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12695 /* Similarly for a NEGATE_EXPR. */
12696 if (TREE_CODE (arg0) == NEGATE_EXPR
12697 && TREE_CODE (arg1) == INTEGER_CST
12698 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12699 arg1)))
12700 && TREE_CODE (tem) == INTEGER_CST
12701 && !TREE_OVERFLOW (tem))
12702 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12704 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12705 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12706 && TREE_CODE (arg1) == INTEGER_CST
12707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12708 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12709 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12710 fold_convert_loc (loc,
12711 TREE_TYPE (arg0),
12712 arg1),
12713 TREE_OPERAND (arg0, 1)));
12715 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12716 if ((TREE_CODE (arg0) == PLUS_EXPR
12717 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12718 || TREE_CODE (arg0) == MINUS_EXPR)
12719 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12720 0)),
12721 arg1, 0)
12722 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12723 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12725 tree val = TREE_OPERAND (arg0, 1);
12726 return omit_two_operands_loc (loc, type,
12727 fold_build2_loc (loc, code, type,
12728 val,
12729 build_int_cst (TREE_TYPE (val),
12730 0)),
12731 TREE_OPERAND (arg0, 0), arg1);
12734 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12735 if (TREE_CODE (arg0) == MINUS_EXPR
12736 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12737 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12738 1)),
12739 arg1, 0)
12740 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12742 return omit_two_operands_loc (loc, type,
12743 code == NE_EXPR
12744 ? boolean_true_node : boolean_false_node,
12745 TREE_OPERAND (arg0, 1), arg1);
12748 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12749 for !=. Don't do this for ordered comparisons due to overflow. */
12750 if (TREE_CODE (arg0) == MINUS_EXPR
12751 && integer_zerop (arg1))
12752 return fold_build2_loc (loc, code, type,
12753 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12755 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12756 if (TREE_CODE (arg0) == ABS_EXPR
12757 && (integer_zerop (arg1) || real_zerop (arg1)))
12758 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12760 /* If this is an EQ or NE comparison with zero and ARG0 is
12761 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12762 two operations, but the latter can be done in one less insn
12763 on machines that have only two-operand insns or on which a
12764 constant cannot be the first operand. */
12765 if (TREE_CODE (arg0) == BIT_AND_EXPR
12766 && integer_zerop (arg1))
12768 tree arg00 = TREE_OPERAND (arg0, 0);
12769 tree arg01 = TREE_OPERAND (arg0, 1);
12770 if (TREE_CODE (arg00) == LSHIFT_EXPR
12771 && integer_onep (TREE_OPERAND (arg00, 0)))
12773 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12774 arg01, TREE_OPERAND (arg00, 1));
12775 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12776 build_int_cst (TREE_TYPE (arg0), 1));
12777 return fold_build2_loc (loc, code, type,
12778 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12779 arg1);
12781 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12782 && integer_onep (TREE_OPERAND (arg01, 0)))
12784 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12785 arg00, TREE_OPERAND (arg01, 1));
12786 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12787 build_int_cst (TREE_TYPE (arg0), 1));
12788 return fold_build2_loc (loc, code, type,
12789 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12790 arg1);
12794 /* If this is an NE or EQ comparison of zero against the result of a
12795 signed MOD operation whose second operand is a power of 2, make
12796 the MOD operation unsigned since it is simpler and equivalent. */
12797 if (integer_zerop (arg1)
12798 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12799 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12800 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12801 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12802 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12803 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12805 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12806 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12807 fold_convert_loc (loc, newtype,
12808 TREE_OPERAND (arg0, 0)),
12809 fold_convert_loc (loc, newtype,
12810 TREE_OPERAND (arg0, 1)));
12812 return fold_build2_loc (loc, code, type, newmod,
12813 fold_convert_loc (loc, newtype, arg1));
12816 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12817 C1 is a valid shift constant, and C2 is a power of two, i.e.
12818 a single bit. */
12819 if (TREE_CODE (arg0) == BIT_AND_EXPR
12820 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12821 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12822 == INTEGER_CST
12823 && integer_pow2p (TREE_OPERAND (arg0, 1))
12824 && integer_zerop (arg1))
12826 tree itype = TREE_TYPE (arg0);
12827 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12828 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12830 /* Check for a valid shift count. */
12831 if (TREE_INT_CST_HIGH (arg001) == 0
12832 && TREE_INT_CST_LOW (arg001) < prec)
12834 tree arg01 = TREE_OPERAND (arg0, 1);
12835 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12836 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12837 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12838 can be rewritten as (X & (C2 << C1)) != 0. */
12839 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12841 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12842 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12843 return fold_build2_loc (loc, code, type, tem,
12844 fold_convert_loc (loc, itype, arg1));
12846 /* Otherwise, for signed (arithmetic) shifts,
12847 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12848 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12849 else if (!TYPE_UNSIGNED (itype))
12850 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12851 arg000, build_int_cst (itype, 0));
12852 /* Otherwise, of unsigned (logical) shifts,
12853 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12854 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12855 else
12856 return omit_one_operand_loc (loc, type,
12857 code == EQ_EXPR ? integer_one_node
12858 : integer_zero_node,
12859 arg000);
12863 /* If we have (A & C) == C where C is a power of 2, convert this into
12864 (A & C) != 0. Similarly for NE_EXPR. */
12865 if (TREE_CODE (arg0) == BIT_AND_EXPR
12866 && integer_pow2p (TREE_OPERAND (arg0, 1))
12867 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12868 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12869 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12870 integer_zero_node));
12872 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12873 bit, then fold the expression into A < 0 or A >= 0. */
12874 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12875 if (tem)
12876 return tem;
12878 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12879 Similarly for NE_EXPR. */
12880 if (TREE_CODE (arg0) == BIT_AND_EXPR
12881 && TREE_CODE (arg1) == INTEGER_CST
12882 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12884 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12885 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12886 TREE_OPERAND (arg0, 1));
12887 tree dandnotc
12888 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12889 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12890 notc);
12891 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12892 if (integer_nonzerop (dandnotc))
12893 return omit_one_operand_loc (loc, type, rslt, arg0);
12896 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12897 Similarly for NE_EXPR. */
12898 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12899 && TREE_CODE (arg1) == INTEGER_CST
12900 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12902 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12903 tree candnotd
12904 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12905 TREE_OPERAND (arg0, 1),
12906 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12907 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12908 if (integer_nonzerop (candnotd))
12909 return omit_one_operand_loc (loc, type, rslt, arg0);
12912 /* If this is a comparison of a field, we may be able to simplify it. */
12913 if ((TREE_CODE (arg0) == COMPONENT_REF
12914 || TREE_CODE (arg0) == BIT_FIELD_REF)
12915 /* Handle the constant case even without -O
12916 to make sure the warnings are given. */
12917 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12919 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12920 if (t1)
12921 return t1;
12924 /* Optimize comparisons of strlen vs zero to a compare of the
12925 first character of the string vs zero. To wit,
12926 strlen(ptr) == 0 => *ptr == 0
12927 strlen(ptr) != 0 => *ptr != 0
12928 Other cases should reduce to one of these two (or a constant)
12929 due to the return value of strlen being unsigned. */
12930 if (TREE_CODE (arg0) == CALL_EXPR
12931 && integer_zerop (arg1))
12933 tree fndecl = get_callee_fndecl (arg0);
12935 if (fndecl
12936 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12937 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12938 && call_expr_nargs (arg0) == 1
12939 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12941 tree iref = build_fold_indirect_ref_loc (loc,
12942 CALL_EXPR_ARG (arg0, 0));
12943 return fold_build2_loc (loc, code, type, iref,
12944 build_int_cst (TREE_TYPE (iref), 0));
12948 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12949 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12950 if (TREE_CODE (arg0) == RSHIFT_EXPR
12951 && integer_zerop (arg1)
12952 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12954 tree arg00 = TREE_OPERAND (arg0, 0);
12955 tree arg01 = TREE_OPERAND (arg0, 1);
12956 tree itype = TREE_TYPE (arg00);
12957 if (TREE_INT_CST_HIGH (arg01) == 0
12958 && TREE_INT_CST_LOW (arg01)
12959 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12961 if (TYPE_UNSIGNED (itype))
12963 itype = signed_type_for (itype);
12964 arg00 = fold_convert_loc (loc, itype, arg00);
12966 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12967 type, arg00, build_zero_cst (itype));
12971 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12972 if (integer_zerop (arg1)
12973 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12974 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12975 TREE_OPERAND (arg0, 1));
12977 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12978 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12979 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12980 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12981 build_zero_cst (TREE_TYPE (arg0)));
12982 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12983 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12984 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12985 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12986 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12987 build_zero_cst (TREE_TYPE (arg0)));
12989 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12990 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12991 && TREE_CODE (arg1) == INTEGER_CST
12992 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12993 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12994 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12995 TREE_OPERAND (arg0, 1), arg1));
12997 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12998 (X & C) == 0 when C is a single bit. */
12999 if (TREE_CODE (arg0) == BIT_AND_EXPR
13000 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13001 && integer_zerop (arg1)
13002 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13004 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13005 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13006 TREE_OPERAND (arg0, 1));
13007 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13008 type, tem,
13009 fold_convert_loc (loc, TREE_TYPE (arg0),
13010 arg1));
13013 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13014 constant C is a power of two, i.e. a single bit. */
13015 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13016 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13017 && integer_zerop (arg1)
13018 && integer_pow2p (TREE_OPERAND (arg0, 1))
13019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13020 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13022 tree arg00 = TREE_OPERAND (arg0, 0);
13023 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13024 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13027 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13028 when is C is a power of two, i.e. a single bit. */
13029 if (TREE_CODE (arg0) == BIT_AND_EXPR
13030 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13031 && integer_zerop (arg1)
13032 && integer_pow2p (TREE_OPERAND (arg0, 1))
13033 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13034 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13036 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13037 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13038 arg000, TREE_OPERAND (arg0, 1));
13039 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13040 tem, build_int_cst (TREE_TYPE (tem), 0));
13043 if (integer_zerop (arg1)
13044 && tree_expr_nonzero_p (arg0))
13046 tree res = constant_boolean_node (code==NE_EXPR, type);
13047 return omit_one_operand_loc (loc, type, res, arg0);
13050 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13051 if (TREE_CODE (arg0) == NEGATE_EXPR
13052 && TREE_CODE (arg1) == NEGATE_EXPR)
13053 return fold_build2_loc (loc, code, type,
13054 TREE_OPERAND (arg0, 0),
13055 fold_convert_loc (loc, TREE_TYPE (arg0),
13056 TREE_OPERAND (arg1, 0)));
13058 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13059 if (TREE_CODE (arg0) == BIT_AND_EXPR
13060 && TREE_CODE (arg1) == BIT_AND_EXPR)
13062 tree arg00 = TREE_OPERAND (arg0, 0);
13063 tree arg01 = TREE_OPERAND (arg0, 1);
13064 tree arg10 = TREE_OPERAND (arg1, 0);
13065 tree arg11 = TREE_OPERAND (arg1, 1);
13066 tree itype = TREE_TYPE (arg0);
13068 if (operand_equal_p (arg01, arg11, 0))
13069 return fold_build2_loc (loc, code, type,
13070 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13071 fold_build2_loc (loc,
13072 BIT_XOR_EXPR, itype,
13073 arg00, arg10),
13074 arg01),
13075 build_zero_cst (itype));
13077 if (operand_equal_p (arg01, arg10, 0))
13078 return fold_build2_loc (loc, code, type,
13079 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13080 fold_build2_loc (loc,
13081 BIT_XOR_EXPR, itype,
13082 arg00, arg11),
13083 arg01),
13084 build_zero_cst (itype));
13086 if (operand_equal_p (arg00, arg11, 0))
13087 return fold_build2_loc (loc, code, type,
13088 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13089 fold_build2_loc (loc,
13090 BIT_XOR_EXPR, itype,
13091 arg01, arg10),
13092 arg00),
13093 build_zero_cst (itype));
13095 if (operand_equal_p (arg00, arg10, 0))
13096 return fold_build2_loc (loc, code, type,
13097 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13098 fold_build2_loc (loc,
13099 BIT_XOR_EXPR, itype,
13100 arg01, arg11),
13101 arg00),
13102 build_zero_cst (itype));
13105 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13106 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13108 tree arg00 = TREE_OPERAND (arg0, 0);
13109 tree arg01 = TREE_OPERAND (arg0, 1);
13110 tree arg10 = TREE_OPERAND (arg1, 0);
13111 tree arg11 = TREE_OPERAND (arg1, 1);
13112 tree itype = TREE_TYPE (arg0);
13114 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13115 operand_equal_p guarantees no side-effects so we don't need
13116 to use omit_one_operand on Z. */
13117 if (operand_equal_p (arg01, arg11, 0))
13118 return fold_build2_loc (loc, code, type, arg00,
13119 fold_convert_loc (loc, TREE_TYPE (arg00),
13120 arg10));
13121 if (operand_equal_p (arg01, arg10, 0))
13122 return fold_build2_loc (loc, code, type, arg00,
13123 fold_convert_loc (loc, TREE_TYPE (arg00),
13124 arg11));
13125 if (operand_equal_p (arg00, arg11, 0))
13126 return fold_build2_loc (loc, code, type, arg01,
13127 fold_convert_loc (loc, TREE_TYPE (arg01),
13128 arg10));
13129 if (operand_equal_p (arg00, arg10, 0))
13130 return fold_build2_loc (loc, code, type, arg01,
13131 fold_convert_loc (loc, TREE_TYPE (arg01),
13132 arg11));
13134 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13135 if (TREE_CODE (arg01) == INTEGER_CST
13136 && TREE_CODE (arg11) == INTEGER_CST)
13138 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13139 fold_convert_loc (loc, itype, arg11));
13140 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13141 return fold_build2_loc (loc, code, type, tem,
13142 fold_convert_loc (loc, itype, arg10));
13146 /* Attempt to simplify equality/inequality comparisons of complex
13147 values. Only lower the comparison if the result is known or
13148 can be simplified to a single scalar comparison. */
13149 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13150 || TREE_CODE (arg0) == COMPLEX_CST)
13151 && (TREE_CODE (arg1) == COMPLEX_EXPR
13152 || TREE_CODE (arg1) == COMPLEX_CST))
13154 tree real0, imag0, real1, imag1;
13155 tree rcond, icond;
13157 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13159 real0 = TREE_OPERAND (arg0, 0);
13160 imag0 = TREE_OPERAND (arg0, 1);
13162 else
13164 real0 = TREE_REALPART (arg0);
13165 imag0 = TREE_IMAGPART (arg0);
13168 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13170 real1 = TREE_OPERAND (arg1, 0);
13171 imag1 = TREE_OPERAND (arg1, 1);
13173 else
13175 real1 = TREE_REALPART (arg1);
13176 imag1 = TREE_IMAGPART (arg1);
13179 rcond = fold_binary_loc (loc, code, type, real0, real1);
13180 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13182 if (integer_zerop (rcond))
13184 if (code == EQ_EXPR)
13185 return omit_two_operands_loc (loc, type, boolean_false_node,
13186 imag0, imag1);
13187 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13189 else
13191 if (code == NE_EXPR)
13192 return omit_two_operands_loc (loc, type, boolean_true_node,
13193 imag0, imag1);
13194 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13198 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13199 if (icond && TREE_CODE (icond) == INTEGER_CST)
13201 if (integer_zerop (icond))
13203 if (code == EQ_EXPR)
13204 return omit_two_operands_loc (loc, type, boolean_false_node,
13205 real0, real1);
13206 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13208 else
13210 if (code == NE_EXPR)
13211 return omit_two_operands_loc (loc, type, boolean_true_node,
13212 real0, real1);
13213 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13218 return NULL_TREE;
13220 case LT_EXPR:
13221 case GT_EXPR:
13222 case LE_EXPR:
13223 case GE_EXPR:
13224 tem = fold_comparison (loc, code, type, op0, op1);
13225 if (tem != NULL_TREE)
13226 return tem;
13228 /* Transform comparisons of the form X +- C CMP X. */
13229 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13230 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13231 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13232 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13233 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13234 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13236 tree arg01 = TREE_OPERAND (arg0, 1);
13237 enum tree_code code0 = TREE_CODE (arg0);
13238 int is_positive;
13240 if (TREE_CODE (arg01) == REAL_CST)
13241 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13242 else
13243 is_positive = tree_int_cst_sgn (arg01);
13245 /* (X - c) > X becomes false. */
13246 if (code == GT_EXPR
13247 && ((code0 == MINUS_EXPR && is_positive >= 0)
13248 || (code0 == PLUS_EXPR && is_positive <= 0)))
13250 if (TREE_CODE (arg01) == INTEGER_CST
13251 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13252 fold_overflow_warning (("assuming signed overflow does not "
13253 "occur when assuming that (X - c) > X "
13254 "is always false"),
13255 WARN_STRICT_OVERFLOW_ALL);
13256 return constant_boolean_node (0, type);
13259 /* Likewise (X + c) < X becomes false. */
13260 if (code == LT_EXPR
13261 && ((code0 == PLUS_EXPR && is_positive >= 0)
13262 || (code0 == MINUS_EXPR && is_positive <= 0)))
13264 if (TREE_CODE (arg01) == INTEGER_CST
13265 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13266 fold_overflow_warning (("assuming signed overflow does not "
13267 "occur when assuming that "
13268 "(X + c) < X is always false"),
13269 WARN_STRICT_OVERFLOW_ALL);
13270 return constant_boolean_node (0, type);
13273 /* Convert (X - c) <= X to true. */
13274 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13275 && code == LE_EXPR
13276 && ((code0 == MINUS_EXPR && is_positive >= 0)
13277 || (code0 == PLUS_EXPR && is_positive <= 0)))
13279 if (TREE_CODE (arg01) == INTEGER_CST
13280 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13281 fold_overflow_warning (("assuming signed overflow does not "
13282 "occur when assuming that "
13283 "(X - c) <= X is always true"),
13284 WARN_STRICT_OVERFLOW_ALL);
13285 return constant_boolean_node (1, type);
13288 /* Convert (X + c) >= X to true. */
13289 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13290 && code == GE_EXPR
13291 && ((code0 == PLUS_EXPR && is_positive >= 0)
13292 || (code0 == MINUS_EXPR && is_positive <= 0)))
13294 if (TREE_CODE (arg01) == INTEGER_CST
13295 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13296 fold_overflow_warning (("assuming signed overflow does not "
13297 "occur when assuming that "
13298 "(X + c) >= X is always true"),
13299 WARN_STRICT_OVERFLOW_ALL);
13300 return constant_boolean_node (1, type);
13303 if (TREE_CODE (arg01) == INTEGER_CST)
13305 /* Convert X + c > X and X - c < X to true for integers. */
13306 if (code == GT_EXPR
13307 && ((code0 == PLUS_EXPR && is_positive > 0)
13308 || (code0 == MINUS_EXPR && is_positive < 0)))
13310 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13311 fold_overflow_warning (("assuming signed overflow does "
13312 "not occur when assuming that "
13313 "(X + c) > X is always true"),
13314 WARN_STRICT_OVERFLOW_ALL);
13315 return constant_boolean_node (1, type);
13318 if (code == LT_EXPR
13319 && ((code0 == MINUS_EXPR && is_positive > 0)
13320 || (code0 == PLUS_EXPR && is_positive < 0)))
13322 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13323 fold_overflow_warning (("assuming signed overflow does "
13324 "not occur when assuming that "
13325 "(X - c) < X is always true"),
13326 WARN_STRICT_OVERFLOW_ALL);
13327 return constant_boolean_node (1, type);
13330 /* Convert X + c <= X and X - c >= X to false for integers. */
13331 if (code == LE_EXPR
13332 && ((code0 == PLUS_EXPR && is_positive > 0)
13333 || (code0 == MINUS_EXPR && is_positive < 0)))
13335 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13336 fold_overflow_warning (("assuming signed overflow does "
13337 "not occur when assuming that "
13338 "(X + c) <= X is always false"),
13339 WARN_STRICT_OVERFLOW_ALL);
13340 return constant_boolean_node (0, type);
13343 if (code == GE_EXPR
13344 && ((code0 == MINUS_EXPR && is_positive > 0)
13345 || (code0 == PLUS_EXPR && is_positive < 0)))
13347 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13348 fold_overflow_warning (("assuming signed overflow does "
13349 "not occur when assuming that "
13350 "(X - c) >= X is always false"),
13351 WARN_STRICT_OVERFLOW_ALL);
13352 return constant_boolean_node (0, type);
13357 /* Comparisons with the highest or lowest possible integer of
13358 the specified precision will have known values. */
13360 tree arg1_type = TREE_TYPE (arg1);
13361 unsigned int width = TYPE_PRECISION (arg1_type);
13363 if (TREE_CODE (arg1) == INTEGER_CST
13364 && width <= HOST_BITS_PER_DOUBLE_INT
13365 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13367 HOST_WIDE_INT signed_max_hi;
13368 unsigned HOST_WIDE_INT signed_max_lo;
13369 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13371 if (width <= HOST_BITS_PER_WIDE_INT)
13373 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13374 - 1;
13375 signed_max_hi = 0;
13376 max_hi = 0;
13378 if (TYPE_UNSIGNED (arg1_type))
13380 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13381 min_lo = 0;
13382 min_hi = 0;
13384 else
13386 max_lo = signed_max_lo;
13387 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13388 min_hi = -1;
13391 else
13393 width -= HOST_BITS_PER_WIDE_INT;
13394 signed_max_lo = -1;
13395 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13396 - 1;
13397 max_lo = -1;
13398 min_lo = 0;
13400 if (TYPE_UNSIGNED (arg1_type))
13402 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13403 min_hi = 0;
13405 else
13407 max_hi = signed_max_hi;
13408 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13412 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13413 && TREE_INT_CST_LOW (arg1) == max_lo)
13414 switch (code)
13416 case GT_EXPR:
13417 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13419 case GE_EXPR:
13420 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13422 case LE_EXPR:
13423 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13425 case LT_EXPR:
13426 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13428 /* The GE_EXPR and LT_EXPR cases above are not normally
13429 reached because of previous transformations. */
13431 default:
13432 break;
13434 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13435 == max_hi
13436 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13437 switch (code)
13439 case GT_EXPR:
13440 arg1 = const_binop (PLUS_EXPR, arg1,
13441 build_int_cst (TREE_TYPE (arg1), 1));
13442 return fold_build2_loc (loc, EQ_EXPR, type,
13443 fold_convert_loc (loc,
13444 TREE_TYPE (arg1), arg0),
13445 arg1);
13446 case LE_EXPR:
13447 arg1 = const_binop (PLUS_EXPR, arg1,
13448 build_int_cst (TREE_TYPE (arg1), 1));
13449 return fold_build2_loc (loc, NE_EXPR, type,
13450 fold_convert_loc (loc, TREE_TYPE (arg1),
13451 arg0),
13452 arg1);
13453 default:
13454 break;
13456 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13457 == min_hi
13458 && TREE_INT_CST_LOW (arg1) == min_lo)
13459 switch (code)
13461 case LT_EXPR:
13462 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13464 case LE_EXPR:
13465 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13467 case GE_EXPR:
13468 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13470 case GT_EXPR:
13471 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13473 default:
13474 break;
13476 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13477 == min_hi
13478 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13479 switch (code)
13481 case GE_EXPR:
13482 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13483 return fold_build2_loc (loc, NE_EXPR, type,
13484 fold_convert_loc (loc,
13485 TREE_TYPE (arg1), arg0),
13486 arg1);
13487 case LT_EXPR:
13488 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13489 return fold_build2_loc (loc, EQ_EXPR, type,
13490 fold_convert_loc (loc, TREE_TYPE (arg1),
13491 arg0),
13492 arg1);
13493 default:
13494 break;
13497 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13498 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13499 && TYPE_UNSIGNED (arg1_type)
13500 /* We will flip the signedness of the comparison operator
13501 associated with the mode of arg1, so the sign bit is
13502 specified by this mode. Check that arg1 is the signed
13503 max associated with this sign bit. */
13504 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13505 /* signed_type does not work on pointer types. */
13506 && INTEGRAL_TYPE_P (arg1_type))
13508 /* The following case also applies to X < signed_max+1
13509 and X >= signed_max+1 because previous transformations. */
13510 if (code == LE_EXPR || code == GT_EXPR)
13512 tree st;
13513 st = signed_type_for (TREE_TYPE (arg1));
13514 return fold_build2_loc (loc,
13515 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13516 type, fold_convert_loc (loc, st, arg0),
13517 build_int_cst (st, 0));
13523 /* If we are comparing an ABS_EXPR with a constant, we can
13524 convert all the cases into explicit comparisons, but they may
13525 well not be faster than doing the ABS and one comparison.
13526 But ABS (X) <= C is a range comparison, which becomes a subtraction
13527 and a comparison, and is probably faster. */
13528 if (code == LE_EXPR
13529 && TREE_CODE (arg1) == INTEGER_CST
13530 && TREE_CODE (arg0) == ABS_EXPR
13531 && ! TREE_SIDE_EFFECTS (arg0)
13532 && (0 != (tem = negate_expr (arg1)))
13533 && TREE_CODE (tem) == INTEGER_CST
13534 && !TREE_OVERFLOW (tem))
13535 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13536 build2 (GE_EXPR, type,
13537 TREE_OPERAND (arg0, 0), tem),
13538 build2 (LE_EXPR, type,
13539 TREE_OPERAND (arg0, 0), arg1));
13541 /* Convert ABS_EXPR<x> >= 0 to true. */
13542 strict_overflow_p = false;
13543 if (code == GE_EXPR
13544 && (integer_zerop (arg1)
13545 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13546 && real_zerop (arg1)))
13547 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13549 if (strict_overflow_p)
13550 fold_overflow_warning (("assuming signed overflow does not occur "
13551 "when simplifying comparison of "
13552 "absolute value and zero"),
13553 WARN_STRICT_OVERFLOW_CONDITIONAL);
13554 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13557 /* Convert ABS_EXPR<x> < 0 to false. */
13558 strict_overflow_p = false;
13559 if (code == LT_EXPR
13560 && (integer_zerop (arg1) || real_zerop (arg1))
13561 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13563 if (strict_overflow_p)
13564 fold_overflow_warning (("assuming signed overflow does not occur "
13565 "when simplifying comparison of "
13566 "absolute value and zero"),
13567 WARN_STRICT_OVERFLOW_CONDITIONAL);
13568 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13571 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13572 and similarly for >= into !=. */
13573 if ((code == LT_EXPR || code == GE_EXPR)
13574 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13575 && TREE_CODE (arg1) == LSHIFT_EXPR
13576 && integer_onep (TREE_OPERAND (arg1, 0)))
13577 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13578 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13579 TREE_OPERAND (arg1, 1)),
13580 build_zero_cst (TREE_TYPE (arg0)));
13582 if ((code == LT_EXPR || code == GE_EXPR)
13583 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13584 && CONVERT_EXPR_P (arg1)
13585 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13586 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13588 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13589 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13590 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13591 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13592 build_zero_cst (TREE_TYPE (arg0)));
13595 return NULL_TREE;
13597 case UNORDERED_EXPR:
13598 case ORDERED_EXPR:
13599 case UNLT_EXPR:
13600 case UNLE_EXPR:
13601 case UNGT_EXPR:
13602 case UNGE_EXPR:
13603 case UNEQ_EXPR:
13604 case LTGT_EXPR:
13605 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13607 t1 = fold_relational_const (code, type, arg0, arg1);
13608 if (t1 != NULL_TREE)
13609 return t1;
13612 /* If the first operand is NaN, the result is constant. */
13613 if (TREE_CODE (arg0) == REAL_CST
13614 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13615 && (code != LTGT_EXPR || ! flag_trapping_math))
13617 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13618 ? integer_zero_node
13619 : integer_one_node;
13620 return omit_one_operand_loc (loc, type, t1, arg1);
13623 /* If the second operand is NaN, the result is constant. */
13624 if (TREE_CODE (arg1) == REAL_CST
13625 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13626 && (code != LTGT_EXPR || ! flag_trapping_math))
13628 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13629 ? integer_zero_node
13630 : integer_one_node;
13631 return omit_one_operand_loc (loc, type, t1, arg0);
13634 /* Simplify unordered comparison of something with itself. */
13635 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13636 && operand_equal_p (arg0, arg1, 0))
13637 return constant_boolean_node (1, type);
13639 if (code == LTGT_EXPR
13640 && !flag_trapping_math
13641 && operand_equal_p (arg0, arg1, 0))
13642 return constant_boolean_node (0, type);
13644 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13646 tree targ0 = strip_float_extensions (arg0);
13647 tree targ1 = strip_float_extensions (arg1);
13648 tree newtype = TREE_TYPE (targ0);
13650 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13651 newtype = TREE_TYPE (targ1);
13653 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13654 return fold_build2_loc (loc, code, type,
13655 fold_convert_loc (loc, newtype, targ0),
13656 fold_convert_loc (loc, newtype, targ1));
13659 return NULL_TREE;
13661 case COMPOUND_EXPR:
13662 /* When pedantic, a compound expression can be neither an lvalue
13663 nor an integer constant expression. */
13664 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13665 return NULL_TREE;
13666 /* Don't let (0, 0) be null pointer constant. */
13667 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13668 : fold_convert_loc (loc, type, arg1);
13669 return pedantic_non_lvalue_loc (loc, tem);
13671 case COMPLEX_EXPR:
13672 if ((TREE_CODE (arg0) == REAL_CST
13673 && TREE_CODE (arg1) == REAL_CST)
13674 || (TREE_CODE (arg0) == INTEGER_CST
13675 && TREE_CODE (arg1) == INTEGER_CST))
13676 return build_complex (type, arg0, arg1);
13677 if (TREE_CODE (arg0) == REALPART_EXPR
13678 && TREE_CODE (arg1) == IMAGPART_EXPR
13679 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13680 && operand_equal_p (TREE_OPERAND (arg0, 0),
13681 TREE_OPERAND (arg1, 0), 0))
13682 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13683 TREE_OPERAND (arg1, 0));
13684 return NULL_TREE;
13686 case ASSERT_EXPR:
13687 /* An ASSERT_EXPR should never be passed to fold_binary. */
13688 gcc_unreachable ();
13690 case VEC_PACK_TRUNC_EXPR:
13691 case VEC_PACK_FIX_TRUNC_EXPR:
13693 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13694 tree *elts;
13696 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13697 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13698 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13699 return NULL_TREE;
13701 elts = XALLOCAVEC (tree, nelts);
13702 if (!vec_cst_ctor_to_array (arg0, elts)
13703 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13704 return NULL_TREE;
13706 for (i = 0; i < nelts; i++)
13708 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13709 ? NOP_EXPR : FIX_TRUNC_EXPR,
13710 TREE_TYPE (type), elts[i]);
13711 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13712 return NULL_TREE;
13715 return build_vector (type, elts);
13718 case VEC_WIDEN_MULT_LO_EXPR:
13719 case VEC_WIDEN_MULT_HI_EXPR:
13720 case VEC_WIDEN_MULT_EVEN_EXPR:
13721 case VEC_WIDEN_MULT_ODD_EXPR:
13723 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13724 unsigned int out, ofs, scale;
13725 tree *elts;
13727 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13728 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13729 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13730 return NULL_TREE;
13732 elts = XALLOCAVEC (tree, nelts * 4);
13733 if (!vec_cst_ctor_to_array (arg0, elts)
13734 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13735 return NULL_TREE;
13737 if (code == VEC_WIDEN_MULT_LO_EXPR)
13738 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13739 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13740 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13741 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13742 scale = 1, ofs = 0;
13743 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13744 scale = 1, ofs = 1;
13746 for (out = 0; out < nelts; out++)
13748 unsigned int in1 = (out << scale) + ofs;
13749 unsigned int in2 = in1 + nelts * 2;
13750 tree t1, t2;
13752 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13753 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13755 if (t1 == NULL_TREE || t2 == NULL_TREE)
13756 return NULL_TREE;
13757 elts[out] = const_binop (MULT_EXPR, t1, t2);
13758 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13759 return NULL_TREE;
13762 return build_vector (type, elts);
13765 default:
13766 return NULL_TREE;
13767 } /* switch (code) */
13770 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13771 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13772 of GOTO_EXPR. */
13774 static tree
13775 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13777 switch (TREE_CODE (*tp))
13779 case LABEL_EXPR:
13780 return *tp;
13782 case GOTO_EXPR:
13783 *walk_subtrees = 0;
13785 /* ... fall through ... */
13787 default:
13788 return NULL_TREE;
13792 /* Return whether the sub-tree ST contains a label which is accessible from
13793 outside the sub-tree. */
13795 static bool
13796 contains_label_p (tree st)
13798 return
13799 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13802 /* Fold a ternary expression of code CODE and type TYPE with operands
13803 OP0, OP1, and OP2. Return the folded expression if folding is
13804 successful. Otherwise, return NULL_TREE. */
13806 tree
13807 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13808 tree op0, tree op1, tree op2)
13810 tree tem;
13811 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13812 enum tree_code_class kind = TREE_CODE_CLASS (code);
13814 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13815 && TREE_CODE_LENGTH (code) == 3);
13817 /* Strip any conversions that don't change the mode. This is safe
13818 for every expression, except for a comparison expression because
13819 its signedness is derived from its operands. So, in the latter
13820 case, only strip conversions that don't change the signedness.
13822 Note that this is done as an internal manipulation within the
13823 constant folder, in order to find the simplest representation of
13824 the arguments so that their form can be studied. In any cases,
13825 the appropriate type conversions should be put back in the tree
13826 that will get out of the constant folder. */
13827 if (op0)
13829 arg0 = op0;
13830 STRIP_NOPS (arg0);
13833 if (op1)
13835 arg1 = op1;
13836 STRIP_NOPS (arg1);
13839 if (op2)
13841 arg2 = op2;
13842 STRIP_NOPS (arg2);
13845 switch (code)
13847 case COMPONENT_REF:
13848 if (TREE_CODE (arg0) == CONSTRUCTOR
13849 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13851 unsigned HOST_WIDE_INT idx;
13852 tree field, value;
13853 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13854 if (field == arg1)
13855 return value;
13857 return NULL_TREE;
13859 case COND_EXPR:
13860 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13861 so all simple results must be passed through pedantic_non_lvalue. */
13862 if (TREE_CODE (arg0) == INTEGER_CST)
13864 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13865 tem = integer_zerop (arg0) ? op2 : op1;
13866 /* Only optimize constant conditions when the selected branch
13867 has the same type as the COND_EXPR. This avoids optimizing
13868 away "c ? x : throw", where the throw has a void type.
13869 Avoid throwing away that operand which contains label. */
13870 if ((!TREE_SIDE_EFFECTS (unused_op)
13871 || !contains_label_p (unused_op))
13872 && (! VOID_TYPE_P (TREE_TYPE (tem))
13873 || VOID_TYPE_P (type)))
13874 return pedantic_non_lvalue_loc (loc, tem);
13875 return NULL_TREE;
13877 if (operand_equal_p (arg1, op2, 0))
13878 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13880 /* If we have A op B ? A : C, we may be able to convert this to a
13881 simpler expression, depending on the operation and the values
13882 of B and C. Signed zeros prevent all of these transformations,
13883 for reasons given above each one.
13885 Also try swapping the arguments and inverting the conditional. */
13886 if (COMPARISON_CLASS_P (arg0)
13887 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13888 arg1, TREE_OPERAND (arg0, 1))
13889 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13891 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13892 if (tem)
13893 return tem;
13896 if (COMPARISON_CLASS_P (arg0)
13897 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13898 op2,
13899 TREE_OPERAND (arg0, 1))
13900 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13902 location_t loc0 = expr_location_or (arg0, loc);
13903 tem = fold_truth_not_expr (loc0, arg0);
13904 if (tem && COMPARISON_CLASS_P (tem))
13906 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13907 if (tem)
13908 return tem;
13912 /* If the second operand is simpler than the third, swap them
13913 since that produces better jump optimization results. */
13914 if (truth_value_p (TREE_CODE (arg0))
13915 && tree_swap_operands_p (op1, op2, false))
13917 location_t loc0 = expr_location_or (arg0, loc);
13918 /* See if this can be inverted. If it can't, possibly because
13919 it was a floating-point inequality comparison, don't do
13920 anything. */
13921 tem = fold_truth_not_expr (loc0, arg0);
13922 if (tem)
13923 return fold_build3_loc (loc, code, type, tem, op2, op1);
13926 /* Convert A ? 1 : 0 to simply A. */
13927 if (integer_onep (op1)
13928 && integer_zerop (op2)
13929 /* If we try to convert OP0 to our type, the
13930 call to fold will try to move the conversion inside
13931 a COND, which will recurse. In that case, the COND_EXPR
13932 is probably the best choice, so leave it alone. */
13933 && type == TREE_TYPE (arg0))
13934 return pedantic_non_lvalue_loc (loc, arg0);
13936 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13937 over COND_EXPR in cases such as floating point comparisons. */
13938 if (integer_zerop (op1)
13939 && integer_onep (op2)
13940 && truth_value_p (TREE_CODE (arg0)))
13941 return pedantic_non_lvalue_loc (loc,
13942 fold_convert_loc (loc, type,
13943 invert_truthvalue_loc (loc,
13944 arg0)));
13946 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13947 if (TREE_CODE (arg0) == LT_EXPR
13948 && integer_zerop (TREE_OPERAND (arg0, 1))
13949 && integer_zerop (op2)
13950 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13952 /* sign_bit_p only checks ARG1 bits within A's precision.
13953 If <sign bit of A> has wider type than A, bits outside
13954 of A's precision in <sign bit of A> need to be checked.
13955 If they are all 0, this optimization needs to be done
13956 in unsigned A's type, if they are all 1 in signed A's type,
13957 otherwise this can't be done. */
13958 if (TYPE_PRECISION (TREE_TYPE (tem))
13959 < TYPE_PRECISION (TREE_TYPE (arg1))
13960 && TYPE_PRECISION (TREE_TYPE (tem))
13961 < TYPE_PRECISION (type))
13963 unsigned HOST_WIDE_INT mask_lo;
13964 HOST_WIDE_INT mask_hi;
13965 int inner_width, outer_width;
13966 tree tem_type;
13968 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13969 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13970 if (outer_width > TYPE_PRECISION (type))
13971 outer_width = TYPE_PRECISION (type);
13973 if (outer_width > HOST_BITS_PER_WIDE_INT)
13975 mask_hi = ((unsigned HOST_WIDE_INT) -1
13976 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13977 mask_lo = -1;
13979 else
13981 mask_hi = 0;
13982 mask_lo = ((unsigned HOST_WIDE_INT) -1
13983 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13985 if (inner_width > HOST_BITS_PER_WIDE_INT)
13987 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13988 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13989 mask_lo = 0;
13991 else
13992 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13993 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13995 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13996 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13998 tem_type = signed_type_for (TREE_TYPE (tem));
13999 tem = fold_convert_loc (loc, tem_type, tem);
14001 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14002 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14004 tem_type = unsigned_type_for (TREE_TYPE (tem));
14005 tem = fold_convert_loc (loc, tem_type, tem);
14007 else
14008 tem = NULL;
14011 if (tem)
14012 return
14013 fold_convert_loc (loc, type,
14014 fold_build2_loc (loc, BIT_AND_EXPR,
14015 TREE_TYPE (tem), tem,
14016 fold_convert_loc (loc,
14017 TREE_TYPE (tem),
14018 arg1)));
14021 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14022 already handled above. */
14023 if (TREE_CODE (arg0) == BIT_AND_EXPR
14024 && integer_onep (TREE_OPERAND (arg0, 1))
14025 && integer_zerop (op2)
14026 && integer_pow2p (arg1))
14028 tree tem = TREE_OPERAND (arg0, 0);
14029 STRIP_NOPS (tem);
14030 if (TREE_CODE (tem) == RSHIFT_EXPR
14031 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14032 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14033 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14034 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14035 TREE_OPERAND (tem, 0), arg1);
14038 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14039 is probably obsolete because the first operand should be a
14040 truth value (that's why we have the two cases above), but let's
14041 leave it in until we can confirm this for all front-ends. */
14042 if (integer_zerop (op2)
14043 && TREE_CODE (arg0) == NE_EXPR
14044 && integer_zerop (TREE_OPERAND (arg0, 1))
14045 && integer_pow2p (arg1)
14046 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14047 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14048 arg1, OEP_ONLY_CONST))
14049 return pedantic_non_lvalue_loc (loc,
14050 fold_convert_loc (loc, type,
14051 TREE_OPERAND (arg0, 0)));
14053 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14054 if (integer_zerop (op2)
14055 && truth_value_p (TREE_CODE (arg0))
14056 && truth_value_p (TREE_CODE (arg1)))
14057 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14058 fold_convert_loc (loc, type, arg0),
14059 arg1);
14061 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14062 if (integer_onep (op2)
14063 && truth_value_p (TREE_CODE (arg0))
14064 && truth_value_p (TREE_CODE (arg1)))
14066 location_t loc0 = expr_location_or (arg0, loc);
14067 /* Only perform transformation if ARG0 is easily inverted. */
14068 tem = fold_truth_not_expr (loc0, arg0);
14069 if (tem)
14070 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14071 fold_convert_loc (loc, type, tem),
14072 arg1);
14075 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14076 if (integer_zerop (arg1)
14077 && truth_value_p (TREE_CODE (arg0))
14078 && truth_value_p (TREE_CODE (op2)))
14080 location_t loc0 = expr_location_or (arg0, loc);
14081 /* Only perform transformation if ARG0 is easily inverted. */
14082 tem = fold_truth_not_expr (loc0, arg0);
14083 if (tem)
14084 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14085 fold_convert_loc (loc, type, tem),
14086 op2);
14089 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14090 if (integer_onep (arg1)
14091 && truth_value_p (TREE_CODE (arg0))
14092 && truth_value_p (TREE_CODE (op2)))
14093 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14094 fold_convert_loc (loc, type, arg0),
14095 op2);
14097 return NULL_TREE;
14099 case VEC_COND_EXPR:
14100 if (TREE_CODE (arg0) == VECTOR_CST)
14102 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14103 return pedantic_non_lvalue_loc (loc, op1);
14104 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14105 return pedantic_non_lvalue_loc (loc, op2);
14107 return NULL_TREE;
14109 case CALL_EXPR:
14110 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14111 of fold_ternary on them. */
14112 gcc_unreachable ();
14114 case BIT_FIELD_REF:
14115 if ((TREE_CODE (arg0) == VECTOR_CST
14116 || (TREE_CODE (arg0) == CONSTRUCTOR
14117 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14118 && (type == TREE_TYPE (TREE_TYPE (arg0))
14119 || (TREE_CODE (type) == VECTOR_TYPE
14120 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14122 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14123 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14124 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14125 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14127 if (n != 0
14128 && (idx % width) == 0
14129 && (n % width) == 0
14130 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14132 idx = idx / width;
14133 n = n / width;
14134 if (TREE_CODE (type) == VECTOR_TYPE)
14136 if (TREE_CODE (arg0) == VECTOR_CST)
14138 tree *vals = XALLOCAVEC (tree, n);
14139 unsigned i;
14140 for (i = 0; i < n; ++i)
14141 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14142 return build_vector (type, vals);
14144 else
14146 vec<constructor_elt, va_gc> *vals;
14147 unsigned i;
14148 if (CONSTRUCTOR_NELTS (arg0) == 0)
14149 return build_constructor (type,
14150 NULL);
14151 if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14152 0)->value))
14153 != VECTOR_TYPE)
14155 vec_alloc (vals, n);
14156 for (i = 0;
14157 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14158 ++i)
14159 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14160 CONSTRUCTOR_ELT
14161 (arg0, idx + i)->value);
14162 return build_constructor (type, vals);
14166 else if (n == 1)
14168 if (TREE_CODE (arg0) == VECTOR_CST)
14169 return VECTOR_CST_ELT (arg0, idx);
14170 else if (CONSTRUCTOR_NELTS (arg0) == 0)
14171 return build_zero_cst (type);
14172 else if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14173 0)->value))
14174 != VECTOR_TYPE)
14176 if (idx < CONSTRUCTOR_NELTS (arg0))
14177 return CONSTRUCTOR_ELT (arg0, idx)->value;
14178 return build_zero_cst (type);
14184 /* A bit-field-ref that referenced the full argument can be stripped. */
14185 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14186 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14187 && integer_zerop (op2))
14188 return fold_convert_loc (loc, type, arg0);
14190 /* On constants we can use native encode/interpret to constant
14191 fold (nearly) all BIT_FIELD_REFs. */
14192 if (CONSTANT_CLASS_P (arg0)
14193 && can_native_interpret_type_p (type)
14194 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14195 /* This limitation should not be necessary, we just need to
14196 round this up to mode size. */
14197 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14198 /* Need bit-shifting of the buffer to relax the following. */
14199 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14201 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14202 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14203 unsigned HOST_WIDE_INT clen;
14204 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14205 /* ??? We cannot tell native_encode_expr to start at
14206 some random byte only. So limit us to a reasonable amount
14207 of work. */
14208 if (clen <= 4096)
14210 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14211 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14212 if (len > 0
14213 && len * BITS_PER_UNIT >= bitpos + bitsize)
14215 tree v = native_interpret_expr (type,
14216 b + bitpos / BITS_PER_UNIT,
14217 bitsize / BITS_PER_UNIT);
14218 if (v)
14219 return v;
14224 return NULL_TREE;
14226 case FMA_EXPR:
14227 /* For integers we can decompose the FMA if possible. */
14228 if (TREE_CODE (arg0) == INTEGER_CST
14229 && TREE_CODE (arg1) == INTEGER_CST)
14230 return fold_build2_loc (loc, PLUS_EXPR, type,
14231 const_binop (MULT_EXPR, arg0, arg1), arg2);
14232 if (integer_zerop (arg2))
14233 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14235 return fold_fma (loc, type, arg0, arg1, arg2);
14237 case VEC_PERM_EXPR:
14238 if (TREE_CODE (arg2) == VECTOR_CST)
14240 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14241 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14242 tree t;
14243 bool need_mask_canon = false;
14244 bool all_in_vec0 = true;
14245 bool all_in_vec1 = true;
14246 bool maybe_identity = true;
14247 bool single_arg = (op0 == op1);
14248 bool changed = false;
14250 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14251 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14252 for (i = 0; i < nelts; i++)
14254 tree val = VECTOR_CST_ELT (arg2, i);
14255 if (TREE_CODE (val) != INTEGER_CST)
14256 return NULL_TREE;
14258 sel[i] = TREE_INT_CST_LOW (val) & mask;
14259 if (TREE_INT_CST_HIGH (val)
14260 || ((unsigned HOST_WIDE_INT)
14261 TREE_INT_CST_LOW (val) != sel[i]))
14262 need_mask_canon = true;
14264 if (sel[i] < nelts)
14265 all_in_vec1 = false;
14266 else
14267 all_in_vec0 = false;
14269 if ((sel[i] & (nelts-1)) != i)
14270 maybe_identity = false;
14273 if (maybe_identity)
14275 if (all_in_vec0)
14276 return op0;
14277 if (all_in_vec1)
14278 return op1;
14281 if (all_in_vec0)
14282 op1 = op0;
14283 else if (all_in_vec1)
14285 op0 = op1;
14286 for (i = 0; i < nelts; i++)
14287 sel[i] -= nelts;
14288 need_mask_canon = true;
14291 if ((TREE_CODE (op0) == VECTOR_CST
14292 || TREE_CODE (op0) == CONSTRUCTOR)
14293 && (TREE_CODE (op1) == VECTOR_CST
14294 || TREE_CODE (op1) == CONSTRUCTOR))
14296 t = fold_vec_perm (type, op0, op1, sel);
14297 if (t != NULL_TREE)
14298 return t;
14301 if (op0 == op1 && !single_arg)
14302 changed = true;
14304 if (need_mask_canon && arg2 == op2)
14306 tree *tsel = XALLOCAVEC (tree, nelts);
14307 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14308 for (i = 0; i < nelts; i++)
14309 tsel[i] = build_int_cst (eltype, sel[i]);
14310 op2 = build_vector (TREE_TYPE (arg2), tsel);
14311 changed = true;
14314 if (changed)
14315 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14317 return NULL_TREE;
14319 default:
14320 return NULL_TREE;
14321 } /* switch (code) */
14324 /* Perform constant folding and related simplification of EXPR.
14325 The related simplifications include x*1 => x, x*0 => 0, etc.,
14326 and application of the associative law.
14327 NOP_EXPR conversions may be removed freely (as long as we
14328 are careful not to change the type of the overall expression).
14329 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14330 but we can constant-fold them if they have constant operands. */
14332 #ifdef ENABLE_FOLD_CHECKING
14333 # define fold(x) fold_1 (x)
14334 static tree fold_1 (tree);
14335 static
14336 #endif
14337 tree
14338 fold (tree expr)
14340 const tree t = expr;
14341 enum tree_code code = TREE_CODE (t);
14342 enum tree_code_class kind = TREE_CODE_CLASS (code);
14343 tree tem;
14344 location_t loc = EXPR_LOCATION (expr);
14346 /* Return right away if a constant. */
14347 if (kind == tcc_constant)
14348 return t;
14350 /* CALL_EXPR-like objects with variable numbers of operands are
14351 treated specially. */
14352 if (kind == tcc_vl_exp)
14354 if (code == CALL_EXPR)
14356 tem = fold_call_expr (loc, expr, false);
14357 return tem ? tem : expr;
14359 return expr;
14362 if (IS_EXPR_CODE_CLASS (kind))
14364 tree type = TREE_TYPE (t);
14365 tree op0, op1, op2;
14367 switch (TREE_CODE_LENGTH (code))
14369 case 1:
14370 op0 = TREE_OPERAND (t, 0);
14371 tem = fold_unary_loc (loc, code, type, op0);
14372 return tem ? tem : expr;
14373 case 2:
14374 op0 = TREE_OPERAND (t, 0);
14375 op1 = TREE_OPERAND (t, 1);
14376 tem = fold_binary_loc (loc, code, type, op0, op1);
14377 return tem ? tem : expr;
14378 case 3:
14379 op0 = TREE_OPERAND (t, 0);
14380 op1 = TREE_OPERAND (t, 1);
14381 op2 = TREE_OPERAND (t, 2);
14382 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14383 return tem ? tem : expr;
14384 default:
14385 break;
14389 switch (code)
14391 case ARRAY_REF:
14393 tree op0 = TREE_OPERAND (t, 0);
14394 tree op1 = TREE_OPERAND (t, 1);
14396 if (TREE_CODE (op1) == INTEGER_CST
14397 && TREE_CODE (op0) == CONSTRUCTOR
14398 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14400 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14401 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14402 unsigned HOST_WIDE_INT begin = 0;
14404 /* Find a matching index by means of a binary search. */
14405 while (begin != end)
14407 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14408 tree index = (*elts)[middle].index;
14410 if (TREE_CODE (index) == INTEGER_CST
14411 && tree_int_cst_lt (index, op1))
14412 begin = middle + 1;
14413 else if (TREE_CODE (index) == INTEGER_CST
14414 && tree_int_cst_lt (op1, index))
14415 end = middle;
14416 else if (TREE_CODE (index) == RANGE_EXPR
14417 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14418 begin = middle + 1;
14419 else if (TREE_CODE (index) == RANGE_EXPR
14420 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14421 end = middle;
14422 else
14423 return (*elts)[middle].value;
14427 return t;
14430 case CONST_DECL:
14431 return fold (DECL_INITIAL (t));
14433 default:
14434 return t;
14435 } /* switch (code) */
14438 #ifdef ENABLE_FOLD_CHECKING
14439 #undef fold
14441 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14442 hash_table <pointer_hash <tree_node> >);
14443 static void fold_check_failed (const_tree, const_tree);
14444 void print_fold_checksum (const_tree);
14446 /* When --enable-checking=fold, compute a digest of expr before
14447 and after actual fold call to see if fold did not accidentally
14448 change original expr. */
14450 tree
14451 fold (tree expr)
14453 tree ret;
14454 struct md5_ctx ctx;
14455 unsigned char checksum_before[16], checksum_after[16];
14456 hash_table <pointer_hash <tree_node> > ht;
14458 ht.create (32);
14459 md5_init_ctx (&ctx);
14460 fold_checksum_tree (expr, &ctx, ht);
14461 md5_finish_ctx (&ctx, checksum_before);
14462 ht.empty ();
14464 ret = fold_1 (expr);
14466 md5_init_ctx (&ctx);
14467 fold_checksum_tree (expr, &ctx, ht);
14468 md5_finish_ctx (&ctx, checksum_after);
14469 ht.dispose ();
14471 if (memcmp (checksum_before, checksum_after, 16))
14472 fold_check_failed (expr, ret);
14474 return ret;
14477 void
14478 print_fold_checksum (const_tree expr)
14480 struct md5_ctx ctx;
14481 unsigned char checksum[16], cnt;
14482 hash_table <pointer_hash <tree_node> > ht;
14484 ht.create (32);
14485 md5_init_ctx (&ctx);
14486 fold_checksum_tree (expr, &ctx, ht);
14487 md5_finish_ctx (&ctx, checksum);
14488 ht.dispose ();
14489 for (cnt = 0; cnt < 16; ++cnt)
14490 fprintf (stderr, "%02x", checksum[cnt]);
14491 putc ('\n', stderr);
14494 static void
14495 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14497 internal_error ("fold check: original tree changed by fold");
14500 static void
14501 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14502 hash_table <pointer_hash <tree_node> > ht)
14504 tree_node **slot;
14505 enum tree_code code;
14506 union tree_node buf;
14507 int i, len;
14509 recursive_label:
14510 if (expr == NULL)
14511 return;
14512 slot = ht.find_slot (expr, INSERT);
14513 if (*slot != NULL)
14514 return;
14515 *slot = CONST_CAST_TREE (expr);
14516 code = TREE_CODE (expr);
14517 if (TREE_CODE_CLASS (code) == tcc_declaration
14518 && DECL_ASSEMBLER_NAME_SET_P (expr))
14520 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14521 memcpy ((char *) &buf, expr, tree_size (expr));
14522 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14523 expr = (tree) &buf;
14525 else if (TREE_CODE_CLASS (code) == tcc_type
14526 && (TYPE_POINTER_TO (expr)
14527 || TYPE_REFERENCE_TO (expr)
14528 || TYPE_CACHED_VALUES_P (expr)
14529 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14530 || TYPE_NEXT_VARIANT (expr)))
14532 /* Allow these fields to be modified. */
14533 tree tmp;
14534 memcpy ((char *) &buf, expr, tree_size (expr));
14535 expr = tmp = (tree) &buf;
14536 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14537 TYPE_POINTER_TO (tmp) = NULL;
14538 TYPE_REFERENCE_TO (tmp) = NULL;
14539 TYPE_NEXT_VARIANT (tmp) = NULL;
14540 if (TYPE_CACHED_VALUES_P (tmp))
14542 TYPE_CACHED_VALUES_P (tmp) = 0;
14543 TYPE_CACHED_VALUES (tmp) = NULL;
14546 md5_process_bytes (expr, tree_size (expr), ctx);
14547 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14548 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14549 if (TREE_CODE_CLASS (code) != tcc_type
14550 && TREE_CODE_CLASS (code) != tcc_declaration
14551 && code != TREE_LIST
14552 && code != SSA_NAME
14553 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14554 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14555 switch (TREE_CODE_CLASS (code))
14557 case tcc_constant:
14558 switch (code)
14560 case STRING_CST:
14561 md5_process_bytes (TREE_STRING_POINTER (expr),
14562 TREE_STRING_LENGTH (expr), ctx);
14563 break;
14564 case COMPLEX_CST:
14565 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14566 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14567 break;
14568 case VECTOR_CST:
14569 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14570 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14571 break;
14572 default:
14573 break;
14575 break;
14576 case tcc_exceptional:
14577 switch (code)
14579 case TREE_LIST:
14580 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14581 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14582 expr = TREE_CHAIN (expr);
14583 goto recursive_label;
14584 break;
14585 case TREE_VEC:
14586 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14587 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14588 break;
14589 default:
14590 break;
14592 break;
14593 case tcc_expression:
14594 case tcc_reference:
14595 case tcc_comparison:
14596 case tcc_unary:
14597 case tcc_binary:
14598 case tcc_statement:
14599 case tcc_vl_exp:
14600 len = TREE_OPERAND_LENGTH (expr);
14601 for (i = 0; i < len; ++i)
14602 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14603 break;
14604 case tcc_declaration:
14605 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14606 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14607 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14609 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14610 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14611 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14612 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14613 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14615 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14616 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14618 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14620 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14621 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14622 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14624 break;
14625 case tcc_type:
14626 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14627 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14628 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14629 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14630 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14631 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14632 if (INTEGRAL_TYPE_P (expr)
14633 || SCALAR_FLOAT_TYPE_P (expr))
14635 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14636 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14638 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14639 if (TREE_CODE (expr) == RECORD_TYPE
14640 || TREE_CODE (expr) == UNION_TYPE
14641 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14642 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14643 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14644 break;
14645 default:
14646 break;
14650 /* Helper function for outputting the checksum of a tree T. When
14651 debugging with gdb, you can "define mynext" to be "next" followed
14652 by "call debug_fold_checksum (op0)", then just trace down till the
14653 outputs differ. */
14655 DEBUG_FUNCTION void
14656 debug_fold_checksum (const_tree t)
14658 int i;
14659 unsigned char checksum[16];
14660 struct md5_ctx ctx;
14661 hash_table <pointer_hash <tree_node> > ht;
14662 ht.create (32);
14664 md5_init_ctx (&ctx);
14665 fold_checksum_tree (t, &ctx, ht);
14666 md5_finish_ctx (&ctx, checksum);
14667 ht.empty ();
14669 for (i = 0; i < 16; i++)
14670 fprintf (stderr, "%d ", checksum[i]);
14672 fprintf (stderr, "\n");
14675 #endif
14677 /* Fold a unary tree expression with code CODE of type TYPE with an
14678 operand OP0. LOC is the location of the resulting expression.
14679 Return a folded expression if successful. Otherwise, return a tree
14680 expression with code CODE of type TYPE with an operand OP0. */
14682 tree
14683 fold_build1_stat_loc (location_t loc,
14684 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14686 tree tem;
14687 #ifdef ENABLE_FOLD_CHECKING
14688 unsigned char checksum_before[16], checksum_after[16];
14689 struct md5_ctx ctx;
14690 hash_table <pointer_hash <tree_node> > ht;
14692 ht.create (32);
14693 md5_init_ctx (&ctx);
14694 fold_checksum_tree (op0, &ctx, ht);
14695 md5_finish_ctx (&ctx, checksum_before);
14696 ht.empty ();
14697 #endif
14699 tem = fold_unary_loc (loc, code, type, op0);
14700 if (!tem)
14701 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14703 #ifdef ENABLE_FOLD_CHECKING
14704 md5_init_ctx (&ctx);
14705 fold_checksum_tree (op0, &ctx, ht);
14706 md5_finish_ctx (&ctx, checksum_after);
14707 ht.dispose ();
14709 if (memcmp (checksum_before, checksum_after, 16))
14710 fold_check_failed (op0, tem);
14711 #endif
14712 return tem;
14715 /* Fold a binary tree expression with code CODE of type TYPE with
14716 operands OP0 and OP1. LOC is the location of the resulting
14717 expression. Return a folded expression if successful. Otherwise,
14718 return a tree expression with code CODE of type TYPE with operands
14719 OP0 and OP1. */
14721 tree
14722 fold_build2_stat_loc (location_t loc,
14723 enum tree_code code, tree type, tree op0, tree op1
14724 MEM_STAT_DECL)
14726 tree tem;
14727 #ifdef ENABLE_FOLD_CHECKING
14728 unsigned char checksum_before_op0[16],
14729 checksum_before_op1[16],
14730 checksum_after_op0[16],
14731 checksum_after_op1[16];
14732 struct md5_ctx ctx;
14733 hash_table <pointer_hash <tree_node> > ht;
14735 ht.create (32);
14736 md5_init_ctx (&ctx);
14737 fold_checksum_tree (op0, &ctx, ht);
14738 md5_finish_ctx (&ctx, checksum_before_op0);
14739 ht.empty ();
14741 md5_init_ctx (&ctx);
14742 fold_checksum_tree (op1, &ctx, ht);
14743 md5_finish_ctx (&ctx, checksum_before_op1);
14744 ht.empty ();
14745 #endif
14747 tem = fold_binary_loc (loc, code, type, op0, op1);
14748 if (!tem)
14749 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14751 #ifdef ENABLE_FOLD_CHECKING
14752 md5_init_ctx (&ctx);
14753 fold_checksum_tree (op0, &ctx, ht);
14754 md5_finish_ctx (&ctx, checksum_after_op0);
14755 ht.empty ();
14757 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14758 fold_check_failed (op0, tem);
14760 md5_init_ctx (&ctx);
14761 fold_checksum_tree (op1, &ctx, ht);
14762 md5_finish_ctx (&ctx, checksum_after_op1);
14763 ht.dispose ();
14765 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14766 fold_check_failed (op1, tem);
14767 #endif
14768 return tem;
14771 /* Fold a ternary tree expression with code CODE of type TYPE with
14772 operands OP0, OP1, and OP2. Return a folded expression if
14773 successful. Otherwise, return a tree expression with code CODE of
14774 type TYPE with operands OP0, OP1, and OP2. */
14776 tree
14777 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14778 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14780 tree tem;
14781 #ifdef ENABLE_FOLD_CHECKING
14782 unsigned char checksum_before_op0[16],
14783 checksum_before_op1[16],
14784 checksum_before_op2[16],
14785 checksum_after_op0[16],
14786 checksum_after_op1[16],
14787 checksum_after_op2[16];
14788 struct md5_ctx ctx;
14789 hash_table <pointer_hash <tree_node> > ht;
14791 ht.create (32);
14792 md5_init_ctx (&ctx);
14793 fold_checksum_tree (op0, &ctx, ht);
14794 md5_finish_ctx (&ctx, checksum_before_op0);
14795 ht.empty ();
14797 md5_init_ctx (&ctx);
14798 fold_checksum_tree (op1, &ctx, ht);
14799 md5_finish_ctx (&ctx, checksum_before_op1);
14800 ht.empty ();
14802 md5_init_ctx (&ctx);
14803 fold_checksum_tree (op2, &ctx, ht);
14804 md5_finish_ctx (&ctx, checksum_before_op2);
14805 ht.empty ();
14806 #endif
14808 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14809 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14810 if (!tem)
14811 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14813 #ifdef ENABLE_FOLD_CHECKING
14814 md5_init_ctx (&ctx);
14815 fold_checksum_tree (op0, &ctx, ht);
14816 md5_finish_ctx (&ctx, checksum_after_op0);
14817 ht.empty ();
14819 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14820 fold_check_failed (op0, tem);
14822 md5_init_ctx (&ctx);
14823 fold_checksum_tree (op1, &ctx, ht);
14824 md5_finish_ctx (&ctx, checksum_after_op1);
14825 ht.empty ();
14827 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14828 fold_check_failed (op1, tem);
14830 md5_init_ctx (&ctx);
14831 fold_checksum_tree (op2, &ctx, ht);
14832 md5_finish_ctx (&ctx, checksum_after_op2);
14833 ht.dispose ();
14835 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14836 fold_check_failed (op2, tem);
14837 #endif
14838 return tem;
14841 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14842 arguments in ARGARRAY, and a null static chain.
14843 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14844 of type TYPE from the given operands as constructed by build_call_array. */
14846 tree
14847 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14848 int nargs, tree *argarray)
14850 tree tem;
14851 #ifdef ENABLE_FOLD_CHECKING
14852 unsigned char checksum_before_fn[16],
14853 checksum_before_arglist[16],
14854 checksum_after_fn[16],
14855 checksum_after_arglist[16];
14856 struct md5_ctx ctx;
14857 hash_table <pointer_hash <tree_node> > ht;
14858 int i;
14860 ht.create (32);
14861 md5_init_ctx (&ctx);
14862 fold_checksum_tree (fn, &ctx, ht);
14863 md5_finish_ctx (&ctx, checksum_before_fn);
14864 ht.empty ();
14866 md5_init_ctx (&ctx);
14867 for (i = 0; i < nargs; i++)
14868 fold_checksum_tree (argarray[i], &ctx, ht);
14869 md5_finish_ctx (&ctx, checksum_before_arglist);
14870 ht.empty ();
14871 #endif
14873 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14875 #ifdef ENABLE_FOLD_CHECKING
14876 md5_init_ctx (&ctx);
14877 fold_checksum_tree (fn, &ctx, ht);
14878 md5_finish_ctx (&ctx, checksum_after_fn);
14879 ht.empty ();
14881 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14882 fold_check_failed (fn, tem);
14884 md5_init_ctx (&ctx);
14885 for (i = 0; i < nargs; i++)
14886 fold_checksum_tree (argarray[i], &ctx, ht);
14887 md5_finish_ctx (&ctx, checksum_after_arglist);
14888 ht.dispose ();
14890 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14891 fold_check_failed (NULL_TREE, tem);
14892 #endif
14893 return tem;
14896 /* Perform constant folding and related simplification of initializer
14897 expression EXPR. These behave identically to "fold_buildN" but ignore
14898 potential run-time traps and exceptions that fold must preserve. */
14900 #define START_FOLD_INIT \
14901 int saved_signaling_nans = flag_signaling_nans;\
14902 int saved_trapping_math = flag_trapping_math;\
14903 int saved_rounding_math = flag_rounding_math;\
14904 int saved_trapv = flag_trapv;\
14905 int saved_folding_initializer = folding_initializer;\
14906 flag_signaling_nans = 0;\
14907 flag_trapping_math = 0;\
14908 flag_rounding_math = 0;\
14909 flag_trapv = 0;\
14910 folding_initializer = 1;
14912 #define END_FOLD_INIT \
14913 flag_signaling_nans = saved_signaling_nans;\
14914 flag_trapping_math = saved_trapping_math;\
14915 flag_rounding_math = saved_rounding_math;\
14916 flag_trapv = saved_trapv;\
14917 folding_initializer = saved_folding_initializer;
14919 tree
14920 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14921 tree type, tree op)
14923 tree result;
14924 START_FOLD_INIT;
14926 result = fold_build1_loc (loc, code, type, op);
14928 END_FOLD_INIT;
14929 return result;
14932 tree
14933 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14934 tree type, tree op0, tree op1)
14936 tree result;
14937 START_FOLD_INIT;
14939 result = fold_build2_loc (loc, code, type, op0, op1);
14941 END_FOLD_INIT;
14942 return result;
14945 tree
14946 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14947 tree type, tree op0, tree op1, tree op2)
14949 tree result;
14950 START_FOLD_INIT;
14952 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14954 END_FOLD_INIT;
14955 return result;
14958 tree
14959 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14960 int nargs, tree *argarray)
14962 tree result;
14963 START_FOLD_INIT;
14965 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14967 END_FOLD_INIT;
14968 return result;
14971 #undef START_FOLD_INIT
14972 #undef END_FOLD_INIT
14974 /* Determine if first argument is a multiple of second argument. Return 0 if
14975 it is not, or we cannot easily determined it to be.
14977 An example of the sort of thing we care about (at this point; this routine
14978 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14979 fold cases do now) is discovering that
14981 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14983 is a multiple of
14985 SAVE_EXPR (J * 8)
14987 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14989 This code also handles discovering that
14991 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14993 is a multiple of 8 so we don't have to worry about dealing with a
14994 possible remainder.
14996 Note that we *look* inside a SAVE_EXPR only to determine how it was
14997 calculated; it is not safe for fold to do much of anything else with the
14998 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14999 at run time. For example, the latter example above *cannot* be implemented
15000 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15001 evaluation time of the original SAVE_EXPR is not necessarily the same at
15002 the time the new expression is evaluated. The only optimization of this
15003 sort that would be valid is changing
15005 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15007 divided by 8 to
15009 SAVE_EXPR (I) * SAVE_EXPR (J)
15011 (where the same SAVE_EXPR (J) is used in the original and the
15012 transformed version). */
15015 multiple_of_p (tree type, const_tree top, const_tree bottom)
15017 if (operand_equal_p (top, bottom, 0))
15018 return 1;
15020 if (TREE_CODE (type) != INTEGER_TYPE)
15021 return 0;
15023 switch (TREE_CODE (top))
15025 case BIT_AND_EXPR:
15026 /* Bitwise and provides a power of two multiple. If the mask is
15027 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15028 if (!integer_pow2p (bottom))
15029 return 0;
15030 /* FALLTHRU */
15032 case MULT_EXPR:
15033 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15034 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15036 case PLUS_EXPR:
15037 case MINUS_EXPR:
15038 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15039 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15041 case LSHIFT_EXPR:
15042 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15044 tree op1, t1;
15046 op1 = TREE_OPERAND (top, 1);
15047 /* const_binop may not detect overflow correctly,
15048 so check for it explicitly here. */
15049 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15050 > TREE_INT_CST_LOW (op1)
15051 && TREE_INT_CST_HIGH (op1) == 0
15052 && 0 != (t1 = fold_convert (type,
15053 const_binop (LSHIFT_EXPR,
15054 size_one_node,
15055 op1)))
15056 && !TREE_OVERFLOW (t1))
15057 return multiple_of_p (type, t1, bottom);
15059 return 0;
15061 case NOP_EXPR:
15062 /* Can't handle conversions from non-integral or wider integral type. */
15063 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15064 || (TYPE_PRECISION (type)
15065 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15066 return 0;
15068 /* .. fall through ... */
15070 case SAVE_EXPR:
15071 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15073 case COND_EXPR:
15074 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15075 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15077 case INTEGER_CST:
15078 if (TREE_CODE (bottom) != INTEGER_CST
15079 || integer_zerop (bottom)
15080 || (TYPE_UNSIGNED (type)
15081 && (tree_int_cst_sgn (top) < 0
15082 || tree_int_cst_sgn (bottom) < 0)))
15083 return 0;
15084 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15085 top, bottom));
15087 default:
15088 return 0;
15092 /* Return true if CODE or TYPE is known to be non-negative. */
15094 static bool
15095 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15097 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15098 && truth_value_p (code))
15099 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15100 have a signed:1 type (where the value is -1 and 0). */
15101 return true;
15102 return false;
15105 /* Return true if (CODE OP0) is known to be non-negative. If the return
15106 value is based on the assumption that signed overflow is undefined,
15107 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15108 *STRICT_OVERFLOW_P. */
15110 bool
15111 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15112 bool *strict_overflow_p)
15114 if (TYPE_UNSIGNED (type))
15115 return true;
15117 switch (code)
15119 case ABS_EXPR:
15120 /* We can't return 1 if flag_wrapv is set because
15121 ABS_EXPR<INT_MIN> = INT_MIN. */
15122 if (!INTEGRAL_TYPE_P (type))
15123 return true;
15124 if (TYPE_OVERFLOW_UNDEFINED (type))
15126 *strict_overflow_p = true;
15127 return true;
15129 break;
15131 case NON_LVALUE_EXPR:
15132 case FLOAT_EXPR:
15133 case FIX_TRUNC_EXPR:
15134 return tree_expr_nonnegative_warnv_p (op0,
15135 strict_overflow_p);
15137 case NOP_EXPR:
15139 tree inner_type = TREE_TYPE (op0);
15140 tree outer_type = type;
15142 if (TREE_CODE (outer_type) == REAL_TYPE)
15144 if (TREE_CODE (inner_type) == REAL_TYPE)
15145 return tree_expr_nonnegative_warnv_p (op0,
15146 strict_overflow_p);
15147 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15149 if (TYPE_UNSIGNED (inner_type))
15150 return true;
15151 return tree_expr_nonnegative_warnv_p (op0,
15152 strict_overflow_p);
15155 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15157 if (TREE_CODE (inner_type) == REAL_TYPE)
15158 return tree_expr_nonnegative_warnv_p (op0,
15159 strict_overflow_p);
15160 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15161 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15162 && TYPE_UNSIGNED (inner_type);
15165 break;
15167 default:
15168 return tree_simple_nonnegative_warnv_p (code, type);
15171 /* We don't know sign of `t', so be conservative and return false. */
15172 return false;
15175 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15176 value is based on the assumption that signed overflow is undefined,
15177 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15178 *STRICT_OVERFLOW_P. */
15180 bool
15181 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15182 tree op1, bool *strict_overflow_p)
15184 if (TYPE_UNSIGNED (type))
15185 return true;
15187 switch (code)
15189 case POINTER_PLUS_EXPR:
15190 case PLUS_EXPR:
15191 if (FLOAT_TYPE_P (type))
15192 return (tree_expr_nonnegative_warnv_p (op0,
15193 strict_overflow_p)
15194 && tree_expr_nonnegative_warnv_p (op1,
15195 strict_overflow_p));
15197 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15198 both unsigned and at least 2 bits shorter than the result. */
15199 if (TREE_CODE (type) == INTEGER_TYPE
15200 && TREE_CODE (op0) == NOP_EXPR
15201 && TREE_CODE (op1) == NOP_EXPR)
15203 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15204 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15205 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15206 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15208 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15209 TYPE_PRECISION (inner2)) + 1;
15210 return prec < TYPE_PRECISION (type);
15213 break;
15215 case MULT_EXPR:
15216 if (FLOAT_TYPE_P (type))
15218 /* x * x for floating point x is always non-negative. */
15219 if (operand_equal_p (op0, op1, 0))
15220 return true;
15221 return (tree_expr_nonnegative_warnv_p (op0,
15222 strict_overflow_p)
15223 && tree_expr_nonnegative_warnv_p (op1,
15224 strict_overflow_p));
15227 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15228 both unsigned and their total bits is shorter than the result. */
15229 if (TREE_CODE (type) == INTEGER_TYPE
15230 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15231 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15233 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15234 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15235 : TREE_TYPE (op0);
15236 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15237 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15238 : TREE_TYPE (op1);
15240 bool unsigned0 = TYPE_UNSIGNED (inner0);
15241 bool unsigned1 = TYPE_UNSIGNED (inner1);
15243 if (TREE_CODE (op0) == INTEGER_CST)
15244 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15246 if (TREE_CODE (op1) == INTEGER_CST)
15247 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15249 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15250 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15252 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15253 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15254 : TYPE_PRECISION (inner0);
15256 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15257 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15258 : TYPE_PRECISION (inner1);
15260 return precision0 + precision1 < TYPE_PRECISION (type);
15263 return false;
15265 case BIT_AND_EXPR:
15266 case MAX_EXPR:
15267 return (tree_expr_nonnegative_warnv_p (op0,
15268 strict_overflow_p)
15269 || tree_expr_nonnegative_warnv_p (op1,
15270 strict_overflow_p));
15272 case BIT_IOR_EXPR:
15273 case BIT_XOR_EXPR:
15274 case MIN_EXPR:
15275 case RDIV_EXPR:
15276 case TRUNC_DIV_EXPR:
15277 case CEIL_DIV_EXPR:
15278 case FLOOR_DIV_EXPR:
15279 case ROUND_DIV_EXPR:
15280 return (tree_expr_nonnegative_warnv_p (op0,
15281 strict_overflow_p)
15282 && tree_expr_nonnegative_warnv_p (op1,
15283 strict_overflow_p));
15285 case TRUNC_MOD_EXPR:
15286 case CEIL_MOD_EXPR:
15287 case FLOOR_MOD_EXPR:
15288 case ROUND_MOD_EXPR:
15289 return tree_expr_nonnegative_warnv_p (op0,
15290 strict_overflow_p);
15291 default:
15292 return tree_simple_nonnegative_warnv_p (code, type);
15295 /* We don't know sign of `t', so be conservative and return false. */
15296 return false;
15299 /* Return true if T is known to be non-negative. If the return
15300 value is based on the assumption that signed overflow is undefined,
15301 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15302 *STRICT_OVERFLOW_P. */
15304 bool
15305 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15307 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15308 return true;
15310 switch (TREE_CODE (t))
15312 case INTEGER_CST:
15313 return tree_int_cst_sgn (t) >= 0;
15315 case REAL_CST:
15316 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15318 case FIXED_CST:
15319 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15321 case COND_EXPR:
15322 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15323 strict_overflow_p)
15324 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15325 strict_overflow_p));
15326 default:
15327 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15328 TREE_TYPE (t));
15330 /* We don't know sign of `t', so be conservative and return false. */
15331 return false;
15334 /* Return true if T is known to be non-negative. If the return
15335 value is based on the assumption that signed overflow is undefined,
15336 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15337 *STRICT_OVERFLOW_P. */
15339 bool
15340 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15341 tree arg0, tree arg1, bool *strict_overflow_p)
15343 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15344 switch (DECL_FUNCTION_CODE (fndecl))
15346 CASE_FLT_FN (BUILT_IN_ACOS):
15347 CASE_FLT_FN (BUILT_IN_ACOSH):
15348 CASE_FLT_FN (BUILT_IN_CABS):
15349 CASE_FLT_FN (BUILT_IN_COSH):
15350 CASE_FLT_FN (BUILT_IN_ERFC):
15351 CASE_FLT_FN (BUILT_IN_EXP):
15352 CASE_FLT_FN (BUILT_IN_EXP10):
15353 CASE_FLT_FN (BUILT_IN_EXP2):
15354 CASE_FLT_FN (BUILT_IN_FABS):
15355 CASE_FLT_FN (BUILT_IN_FDIM):
15356 CASE_FLT_FN (BUILT_IN_HYPOT):
15357 CASE_FLT_FN (BUILT_IN_POW10):
15358 CASE_INT_FN (BUILT_IN_FFS):
15359 CASE_INT_FN (BUILT_IN_PARITY):
15360 CASE_INT_FN (BUILT_IN_POPCOUNT):
15361 case BUILT_IN_BSWAP32:
15362 case BUILT_IN_BSWAP64:
15363 /* Always true. */
15364 return true;
15366 CASE_FLT_FN (BUILT_IN_SQRT):
15367 /* sqrt(-0.0) is -0.0. */
15368 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15369 return true;
15370 return tree_expr_nonnegative_warnv_p (arg0,
15371 strict_overflow_p);
15373 CASE_FLT_FN (BUILT_IN_ASINH):
15374 CASE_FLT_FN (BUILT_IN_ATAN):
15375 CASE_FLT_FN (BUILT_IN_ATANH):
15376 CASE_FLT_FN (BUILT_IN_CBRT):
15377 CASE_FLT_FN (BUILT_IN_CEIL):
15378 CASE_FLT_FN (BUILT_IN_ERF):
15379 CASE_FLT_FN (BUILT_IN_EXPM1):
15380 CASE_FLT_FN (BUILT_IN_FLOOR):
15381 CASE_FLT_FN (BUILT_IN_FMOD):
15382 CASE_FLT_FN (BUILT_IN_FREXP):
15383 CASE_FLT_FN (BUILT_IN_ICEIL):
15384 CASE_FLT_FN (BUILT_IN_IFLOOR):
15385 CASE_FLT_FN (BUILT_IN_IRINT):
15386 CASE_FLT_FN (BUILT_IN_IROUND):
15387 CASE_FLT_FN (BUILT_IN_LCEIL):
15388 CASE_FLT_FN (BUILT_IN_LDEXP):
15389 CASE_FLT_FN (BUILT_IN_LFLOOR):
15390 CASE_FLT_FN (BUILT_IN_LLCEIL):
15391 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15392 CASE_FLT_FN (BUILT_IN_LLRINT):
15393 CASE_FLT_FN (BUILT_IN_LLROUND):
15394 CASE_FLT_FN (BUILT_IN_LRINT):
15395 CASE_FLT_FN (BUILT_IN_LROUND):
15396 CASE_FLT_FN (BUILT_IN_MODF):
15397 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15398 CASE_FLT_FN (BUILT_IN_RINT):
15399 CASE_FLT_FN (BUILT_IN_ROUND):
15400 CASE_FLT_FN (BUILT_IN_SCALB):
15401 CASE_FLT_FN (BUILT_IN_SCALBLN):
15402 CASE_FLT_FN (BUILT_IN_SCALBN):
15403 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15404 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15405 CASE_FLT_FN (BUILT_IN_SINH):
15406 CASE_FLT_FN (BUILT_IN_TANH):
15407 CASE_FLT_FN (BUILT_IN_TRUNC):
15408 /* True if the 1st argument is nonnegative. */
15409 return tree_expr_nonnegative_warnv_p (arg0,
15410 strict_overflow_p);
15412 CASE_FLT_FN (BUILT_IN_FMAX):
15413 /* True if the 1st OR 2nd arguments are nonnegative. */
15414 return (tree_expr_nonnegative_warnv_p (arg0,
15415 strict_overflow_p)
15416 || (tree_expr_nonnegative_warnv_p (arg1,
15417 strict_overflow_p)));
15419 CASE_FLT_FN (BUILT_IN_FMIN):
15420 /* True if the 1st AND 2nd arguments are nonnegative. */
15421 return (tree_expr_nonnegative_warnv_p (arg0,
15422 strict_overflow_p)
15423 && (tree_expr_nonnegative_warnv_p (arg1,
15424 strict_overflow_p)));
15426 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15427 /* True if the 2nd argument is nonnegative. */
15428 return tree_expr_nonnegative_warnv_p (arg1,
15429 strict_overflow_p);
15431 CASE_FLT_FN (BUILT_IN_POWI):
15432 /* True if the 1st argument is nonnegative or the second
15433 argument is an even integer. */
15434 if (TREE_CODE (arg1) == INTEGER_CST
15435 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15436 return true;
15437 return tree_expr_nonnegative_warnv_p (arg0,
15438 strict_overflow_p);
15440 CASE_FLT_FN (BUILT_IN_POW):
15441 /* True if the 1st argument is nonnegative or the second
15442 argument is an even integer valued real. */
15443 if (TREE_CODE (arg1) == REAL_CST)
15445 REAL_VALUE_TYPE c;
15446 HOST_WIDE_INT n;
15448 c = TREE_REAL_CST (arg1);
15449 n = real_to_integer (&c);
15450 if ((n & 1) == 0)
15452 REAL_VALUE_TYPE cint;
15453 real_from_integer (&cint, VOIDmode, n,
15454 n < 0 ? -1 : 0, 0);
15455 if (real_identical (&c, &cint))
15456 return true;
15459 return tree_expr_nonnegative_warnv_p (arg0,
15460 strict_overflow_p);
15462 default:
15463 break;
15465 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15466 type);
15469 /* Return true if T is known to be non-negative. If the return
15470 value is based on the assumption that signed overflow is undefined,
15471 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15472 *STRICT_OVERFLOW_P. */
15474 bool
15475 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15477 enum tree_code code = TREE_CODE (t);
15478 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15479 return true;
15481 switch (code)
15483 case TARGET_EXPR:
15485 tree temp = TARGET_EXPR_SLOT (t);
15486 t = TARGET_EXPR_INITIAL (t);
15488 /* If the initializer is non-void, then it's a normal expression
15489 that will be assigned to the slot. */
15490 if (!VOID_TYPE_P (t))
15491 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15493 /* Otherwise, the initializer sets the slot in some way. One common
15494 way is an assignment statement at the end of the initializer. */
15495 while (1)
15497 if (TREE_CODE (t) == BIND_EXPR)
15498 t = expr_last (BIND_EXPR_BODY (t));
15499 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15500 || TREE_CODE (t) == TRY_CATCH_EXPR)
15501 t = expr_last (TREE_OPERAND (t, 0));
15502 else if (TREE_CODE (t) == STATEMENT_LIST)
15503 t = expr_last (t);
15504 else
15505 break;
15507 if (TREE_CODE (t) == MODIFY_EXPR
15508 && TREE_OPERAND (t, 0) == temp)
15509 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15510 strict_overflow_p);
15512 return false;
15515 case CALL_EXPR:
15517 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15518 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15520 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15521 get_callee_fndecl (t),
15522 arg0,
15523 arg1,
15524 strict_overflow_p);
15526 case COMPOUND_EXPR:
15527 case MODIFY_EXPR:
15528 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15529 strict_overflow_p);
15530 case BIND_EXPR:
15531 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15532 strict_overflow_p);
15533 case SAVE_EXPR:
15534 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15535 strict_overflow_p);
15537 default:
15538 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15539 TREE_TYPE (t));
15542 /* We don't know sign of `t', so be conservative and return false. */
15543 return false;
15546 /* Return true if T is known to be non-negative. If the return
15547 value is based on the assumption that signed overflow is undefined,
15548 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15549 *STRICT_OVERFLOW_P. */
15551 bool
15552 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15554 enum tree_code code;
15555 if (t == error_mark_node)
15556 return false;
15558 code = TREE_CODE (t);
15559 switch (TREE_CODE_CLASS (code))
15561 case tcc_binary:
15562 case tcc_comparison:
15563 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15564 TREE_TYPE (t),
15565 TREE_OPERAND (t, 0),
15566 TREE_OPERAND (t, 1),
15567 strict_overflow_p);
15569 case tcc_unary:
15570 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15571 TREE_TYPE (t),
15572 TREE_OPERAND (t, 0),
15573 strict_overflow_p);
15575 case tcc_constant:
15576 case tcc_declaration:
15577 case tcc_reference:
15578 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15580 default:
15581 break;
15584 switch (code)
15586 case TRUTH_AND_EXPR:
15587 case TRUTH_OR_EXPR:
15588 case TRUTH_XOR_EXPR:
15589 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15590 TREE_TYPE (t),
15591 TREE_OPERAND (t, 0),
15592 TREE_OPERAND (t, 1),
15593 strict_overflow_p);
15594 case TRUTH_NOT_EXPR:
15595 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15596 TREE_TYPE (t),
15597 TREE_OPERAND (t, 0),
15598 strict_overflow_p);
15600 case COND_EXPR:
15601 case CONSTRUCTOR:
15602 case OBJ_TYPE_REF:
15603 case ASSERT_EXPR:
15604 case ADDR_EXPR:
15605 case WITH_SIZE_EXPR:
15606 case SSA_NAME:
15607 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15609 default:
15610 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15614 /* Return true if `t' is known to be non-negative. Handle warnings
15615 about undefined signed overflow. */
15617 bool
15618 tree_expr_nonnegative_p (tree t)
15620 bool ret, strict_overflow_p;
15622 strict_overflow_p = false;
15623 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15624 if (strict_overflow_p)
15625 fold_overflow_warning (("assuming signed overflow does not occur when "
15626 "determining that expression is always "
15627 "non-negative"),
15628 WARN_STRICT_OVERFLOW_MISC);
15629 return ret;
15633 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15634 For floating point we further ensure that T is not denormal.
15635 Similar logic is present in nonzero_address in rtlanal.h.
15637 If the return value is based on the assumption that signed overflow
15638 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15639 change *STRICT_OVERFLOW_P. */
15641 bool
15642 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15643 bool *strict_overflow_p)
15645 switch (code)
15647 case ABS_EXPR:
15648 return tree_expr_nonzero_warnv_p (op0,
15649 strict_overflow_p);
15651 case NOP_EXPR:
15653 tree inner_type = TREE_TYPE (op0);
15654 tree outer_type = type;
15656 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15657 && tree_expr_nonzero_warnv_p (op0,
15658 strict_overflow_p));
15660 break;
15662 case NON_LVALUE_EXPR:
15663 return tree_expr_nonzero_warnv_p (op0,
15664 strict_overflow_p);
15666 default:
15667 break;
15670 return false;
15673 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15674 For floating point we further ensure that T is not denormal.
15675 Similar logic is present in nonzero_address in rtlanal.h.
15677 If the return value is based on the assumption that signed overflow
15678 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15679 change *STRICT_OVERFLOW_P. */
15681 bool
15682 tree_binary_nonzero_warnv_p (enum tree_code code,
15683 tree type,
15684 tree op0,
15685 tree op1, bool *strict_overflow_p)
15687 bool sub_strict_overflow_p;
15688 switch (code)
15690 case POINTER_PLUS_EXPR:
15691 case PLUS_EXPR:
15692 if (TYPE_OVERFLOW_UNDEFINED (type))
15694 /* With the presence of negative values it is hard
15695 to say something. */
15696 sub_strict_overflow_p = false;
15697 if (!tree_expr_nonnegative_warnv_p (op0,
15698 &sub_strict_overflow_p)
15699 || !tree_expr_nonnegative_warnv_p (op1,
15700 &sub_strict_overflow_p))
15701 return false;
15702 /* One of operands must be positive and the other non-negative. */
15703 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15704 overflows, on a twos-complement machine the sum of two
15705 nonnegative numbers can never be zero. */
15706 return (tree_expr_nonzero_warnv_p (op0,
15707 strict_overflow_p)
15708 || tree_expr_nonzero_warnv_p (op1,
15709 strict_overflow_p));
15711 break;
15713 case MULT_EXPR:
15714 if (TYPE_OVERFLOW_UNDEFINED (type))
15716 if (tree_expr_nonzero_warnv_p (op0,
15717 strict_overflow_p)
15718 && tree_expr_nonzero_warnv_p (op1,
15719 strict_overflow_p))
15721 *strict_overflow_p = true;
15722 return true;
15725 break;
15727 case MIN_EXPR:
15728 sub_strict_overflow_p = false;
15729 if (tree_expr_nonzero_warnv_p (op0,
15730 &sub_strict_overflow_p)
15731 && tree_expr_nonzero_warnv_p (op1,
15732 &sub_strict_overflow_p))
15734 if (sub_strict_overflow_p)
15735 *strict_overflow_p = true;
15737 break;
15739 case MAX_EXPR:
15740 sub_strict_overflow_p = false;
15741 if (tree_expr_nonzero_warnv_p (op0,
15742 &sub_strict_overflow_p))
15744 if (sub_strict_overflow_p)
15745 *strict_overflow_p = true;
15747 /* When both operands are nonzero, then MAX must be too. */
15748 if (tree_expr_nonzero_warnv_p (op1,
15749 strict_overflow_p))
15750 return true;
15752 /* MAX where operand 0 is positive is positive. */
15753 return tree_expr_nonnegative_warnv_p (op0,
15754 strict_overflow_p);
15756 /* MAX where operand 1 is positive is positive. */
15757 else if (tree_expr_nonzero_warnv_p (op1,
15758 &sub_strict_overflow_p)
15759 && tree_expr_nonnegative_warnv_p (op1,
15760 &sub_strict_overflow_p))
15762 if (sub_strict_overflow_p)
15763 *strict_overflow_p = true;
15764 return true;
15766 break;
15768 case BIT_IOR_EXPR:
15769 return (tree_expr_nonzero_warnv_p (op1,
15770 strict_overflow_p)
15771 || tree_expr_nonzero_warnv_p (op0,
15772 strict_overflow_p));
15774 default:
15775 break;
15778 return false;
15781 /* Return true when T is an address and is known to be nonzero.
15782 For floating point we further ensure that T is not denormal.
15783 Similar logic is present in nonzero_address in rtlanal.h.
15785 If the return value is based on the assumption that signed overflow
15786 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15787 change *STRICT_OVERFLOW_P. */
15789 bool
15790 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15792 bool sub_strict_overflow_p;
15793 switch (TREE_CODE (t))
15795 case INTEGER_CST:
15796 return !integer_zerop (t);
15798 case ADDR_EXPR:
15800 tree base = TREE_OPERAND (t, 0);
15801 if (!DECL_P (base))
15802 base = get_base_address (base);
15804 if (!base)
15805 return false;
15807 /* Weak declarations may link to NULL. Other things may also be NULL
15808 so protect with -fdelete-null-pointer-checks; but not variables
15809 allocated on the stack. */
15810 if (DECL_P (base)
15811 && (flag_delete_null_pointer_checks
15812 || (DECL_CONTEXT (base)
15813 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15814 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15815 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15817 /* Constants are never weak. */
15818 if (CONSTANT_CLASS_P (base))
15819 return true;
15821 return false;
15824 case COND_EXPR:
15825 sub_strict_overflow_p = false;
15826 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15827 &sub_strict_overflow_p)
15828 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15829 &sub_strict_overflow_p))
15831 if (sub_strict_overflow_p)
15832 *strict_overflow_p = true;
15833 return true;
15835 break;
15837 default:
15838 break;
15840 return false;
15843 /* Return true when T is an address and is known to be nonzero.
15844 For floating point we further ensure that T is not denormal.
15845 Similar logic is present in nonzero_address in rtlanal.h.
15847 If the return value is based on the assumption that signed overflow
15848 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15849 change *STRICT_OVERFLOW_P. */
15851 bool
15852 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15854 tree type = TREE_TYPE (t);
15855 enum tree_code code;
15857 /* Doing something useful for floating point would need more work. */
15858 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15859 return false;
15861 code = TREE_CODE (t);
15862 switch (TREE_CODE_CLASS (code))
15864 case tcc_unary:
15865 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15866 strict_overflow_p);
15867 case tcc_binary:
15868 case tcc_comparison:
15869 return tree_binary_nonzero_warnv_p (code, type,
15870 TREE_OPERAND (t, 0),
15871 TREE_OPERAND (t, 1),
15872 strict_overflow_p);
15873 case tcc_constant:
15874 case tcc_declaration:
15875 case tcc_reference:
15876 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15878 default:
15879 break;
15882 switch (code)
15884 case TRUTH_NOT_EXPR:
15885 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15886 strict_overflow_p);
15888 case TRUTH_AND_EXPR:
15889 case TRUTH_OR_EXPR:
15890 case TRUTH_XOR_EXPR:
15891 return tree_binary_nonzero_warnv_p (code, type,
15892 TREE_OPERAND (t, 0),
15893 TREE_OPERAND (t, 1),
15894 strict_overflow_p);
15896 case COND_EXPR:
15897 case CONSTRUCTOR:
15898 case OBJ_TYPE_REF:
15899 case ASSERT_EXPR:
15900 case ADDR_EXPR:
15901 case WITH_SIZE_EXPR:
15902 case SSA_NAME:
15903 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15905 case COMPOUND_EXPR:
15906 case MODIFY_EXPR:
15907 case BIND_EXPR:
15908 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15909 strict_overflow_p);
15911 case SAVE_EXPR:
15912 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15913 strict_overflow_p);
15915 case CALL_EXPR:
15916 return alloca_call_p (t);
15918 default:
15919 break;
15921 return false;
15924 /* Return true when T is an address and is known to be nonzero.
15925 Handle warnings about undefined signed overflow. */
15927 bool
15928 tree_expr_nonzero_p (tree t)
15930 bool ret, strict_overflow_p;
15932 strict_overflow_p = false;
15933 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15934 if (strict_overflow_p)
15935 fold_overflow_warning (("assuming signed overflow does not occur when "
15936 "determining that expression is always "
15937 "non-zero"),
15938 WARN_STRICT_OVERFLOW_MISC);
15939 return ret;
15942 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15943 attempt to fold the expression to a constant without modifying TYPE,
15944 OP0 or OP1.
15946 If the expression could be simplified to a constant, then return
15947 the constant. If the expression would not be simplified to a
15948 constant, then return NULL_TREE. */
15950 tree
15951 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15953 tree tem = fold_binary (code, type, op0, op1);
15954 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15957 /* Given the components of a unary expression CODE, TYPE and OP0,
15958 attempt to fold the expression to a constant without modifying
15959 TYPE or OP0.
15961 If the expression could be simplified to a constant, then return
15962 the constant. If the expression would not be simplified to a
15963 constant, then return NULL_TREE. */
15965 tree
15966 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15968 tree tem = fold_unary (code, type, op0);
15969 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15972 /* If EXP represents referencing an element in a constant string
15973 (either via pointer arithmetic or array indexing), return the
15974 tree representing the value accessed, otherwise return NULL. */
15976 tree
15977 fold_read_from_constant_string (tree exp)
15979 if ((TREE_CODE (exp) == INDIRECT_REF
15980 || TREE_CODE (exp) == ARRAY_REF)
15981 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15983 tree exp1 = TREE_OPERAND (exp, 0);
15984 tree index;
15985 tree string;
15986 location_t loc = EXPR_LOCATION (exp);
15988 if (TREE_CODE (exp) == INDIRECT_REF)
15989 string = string_constant (exp1, &index);
15990 else
15992 tree low_bound = array_ref_low_bound (exp);
15993 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15995 /* Optimize the special-case of a zero lower bound.
15997 We convert the low_bound to sizetype to avoid some problems
15998 with constant folding. (E.g. suppose the lower bound is 1,
15999 and its mode is QI. Without the conversion,l (ARRAY
16000 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16001 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16002 if (! integer_zerop (low_bound))
16003 index = size_diffop_loc (loc, index,
16004 fold_convert_loc (loc, sizetype, low_bound));
16006 string = exp1;
16009 if (string
16010 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16011 && TREE_CODE (string) == STRING_CST
16012 && TREE_CODE (index) == INTEGER_CST
16013 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16014 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16015 == MODE_INT)
16016 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16017 return build_int_cst_type (TREE_TYPE (exp),
16018 (TREE_STRING_POINTER (string)
16019 [TREE_INT_CST_LOW (index)]));
16021 return NULL;
16024 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16025 an integer constant, real, or fixed-point constant.
16027 TYPE is the type of the result. */
16029 static tree
16030 fold_negate_const (tree arg0, tree type)
16032 tree t = NULL_TREE;
16034 switch (TREE_CODE (arg0))
16036 case INTEGER_CST:
16038 double_int val = tree_to_double_int (arg0);
16039 bool overflow;
16040 val = val.neg_with_overflow (&overflow);
16041 t = force_fit_type_double (type, val, 1,
16042 (overflow | TREE_OVERFLOW (arg0))
16043 && !TYPE_UNSIGNED (type));
16044 break;
16047 case REAL_CST:
16048 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16049 break;
16051 case FIXED_CST:
16053 FIXED_VALUE_TYPE f;
16054 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16055 &(TREE_FIXED_CST (arg0)), NULL,
16056 TYPE_SATURATING (type));
16057 t = build_fixed (type, f);
16058 /* Propagate overflow flags. */
16059 if (overflow_p | TREE_OVERFLOW (arg0))
16060 TREE_OVERFLOW (t) = 1;
16061 break;
16064 default:
16065 gcc_unreachable ();
16068 return t;
16071 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16072 an integer constant or real constant.
16074 TYPE is the type of the result. */
16076 tree
16077 fold_abs_const (tree arg0, tree type)
16079 tree t = NULL_TREE;
16081 switch (TREE_CODE (arg0))
16083 case INTEGER_CST:
16085 double_int val = tree_to_double_int (arg0);
16087 /* If the value is unsigned or non-negative, then the absolute value
16088 is the same as the ordinary value. */
16089 if (TYPE_UNSIGNED (type)
16090 || !val.is_negative ())
16091 t = arg0;
16093 /* If the value is negative, then the absolute value is
16094 its negation. */
16095 else
16097 bool overflow;
16098 val = val.neg_with_overflow (&overflow);
16099 t = force_fit_type_double (type, val, -1,
16100 overflow | TREE_OVERFLOW (arg0));
16103 break;
16105 case REAL_CST:
16106 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16107 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16108 else
16109 t = arg0;
16110 break;
16112 default:
16113 gcc_unreachable ();
16116 return t;
16119 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16120 constant. TYPE is the type of the result. */
16122 static tree
16123 fold_not_const (const_tree arg0, tree type)
16125 double_int val;
16127 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16129 val = ~tree_to_double_int (arg0);
16130 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16133 /* Given CODE, a relational operator, the target type, TYPE and two
16134 constant operands OP0 and OP1, return the result of the
16135 relational operation. If the result is not a compile time
16136 constant, then return NULL_TREE. */
16138 static tree
16139 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16141 int result, invert;
16143 /* From here on, the only cases we handle are when the result is
16144 known to be a constant. */
16146 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16148 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16149 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16151 /* Handle the cases where either operand is a NaN. */
16152 if (real_isnan (c0) || real_isnan (c1))
16154 switch (code)
16156 case EQ_EXPR:
16157 case ORDERED_EXPR:
16158 result = 0;
16159 break;
16161 case NE_EXPR:
16162 case UNORDERED_EXPR:
16163 case UNLT_EXPR:
16164 case UNLE_EXPR:
16165 case UNGT_EXPR:
16166 case UNGE_EXPR:
16167 case UNEQ_EXPR:
16168 result = 1;
16169 break;
16171 case LT_EXPR:
16172 case LE_EXPR:
16173 case GT_EXPR:
16174 case GE_EXPR:
16175 case LTGT_EXPR:
16176 if (flag_trapping_math)
16177 return NULL_TREE;
16178 result = 0;
16179 break;
16181 default:
16182 gcc_unreachable ();
16185 return constant_boolean_node (result, type);
16188 return constant_boolean_node (real_compare (code, c0, c1), type);
16191 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16193 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16194 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16195 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16198 /* Handle equality/inequality of complex constants. */
16199 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16201 tree rcond = fold_relational_const (code, type,
16202 TREE_REALPART (op0),
16203 TREE_REALPART (op1));
16204 tree icond = fold_relational_const (code, type,
16205 TREE_IMAGPART (op0),
16206 TREE_IMAGPART (op1));
16207 if (code == EQ_EXPR)
16208 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16209 else if (code == NE_EXPR)
16210 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16211 else
16212 return NULL_TREE;
16215 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16217 unsigned count = VECTOR_CST_NELTS (op0);
16218 tree *elts = XALLOCAVEC (tree, count);
16219 gcc_assert (VECTOR_CST_NELTS (op1) == count
16220 && TYPE_VECTOR_SUBPARTS (type) == count);
16222 for (unsigned i = 0; i < count; i++)
16224 tree elem_type = TREE_TYPE (type);
16225 tree elem0 = VECTOR_CST_ELT (op0, i);
16226 tree elem1 = VECTOR_CST_ELT (op1, i);
16228 tree tem = fold_relational_const (code, elem_type,
16229 elem0, elem1);
16231 if (tem == NULL_TREE)
16232 return NULL_TREE;
16234 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16237 return build_vector (type, elts);
16240 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16242 To compute GT, swap the arguments and do LT.
16243 To compute GE, do LT and invert the result.
16244 To compute LE, swap the arguments, do LT and invert the result.
16245 To compute NE, do EQ and invert the result.
16247 Therefore, the code below must handle only EQ and LT. */
16249 if (code == LE_EXPR || code == GT_EXPR)
16251 tree tem = op0;
16252 op0 = op1;
16253 op1 = tem;
16254 code = swap_tree_comparison (code);
16257 /* Note that it is safe to invert for real values here because we
16258 have already handled the one case that it matters. */
16260 invert = 0;
16261 if (code == NE_EXPR || code == GE_EXPR)
16263 invert = 1;
16264 code = invert_tree_comparison (code, false);
16267 /* Compute a result for LT or EQ if args permit;
16268 Otherwise return T. */
16269 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16271 if (code == EQ_EXPR)
16272 result = tree_int_cst_equal (op0, op1);
16273 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16274 result = INT_CST_LT_UNSIGNED (op0, op1);
16275 else
16276 result = INT_CST_LT (op0, op1);
16278 else
16279 return NULL_TREE;
16281 if (invert)
16282 result ^= 1;
16283 return constant_boolean_node (result, type);
16286 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16287 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16288 itself. */
16290 tree
16291 fold_build_cleanup_point_expr (tree type, tree expr)
16293 /* If the expression does not have side effects then we don't have to wrap
16294 it with a cleanup point expression. */
16295 if (!TREE_SIDE_EFFECTS (expr))
16296 return expr;
16298 /* If the expression is a return, check to see if the expression inside the
16299 return has no side effects or the right hand side of the modify expression
16300 inside the return. If either don't have side effects set we don't need to
16301 wrap the expression in a cleanup point expression. Note we don't check the
16302 left hand side of the modify because it should always be a return decl. */
16303 if (TREE_CODE (expr) == RETURN_EXPR)
16305 tree op = TREE_OPERAND (expr, 0);
16306 if (!op || !TREE_SIDE_EFFECTS (op))
16307 return expr;
16308 op = TREE_OPERAND (op, 1);
16309 if (!TREE_SIDE_EFFECTS (op))
16310 return expr;
16313 return build1 (CLEANUP_POINT_EXPR, type, expr);
16316 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16317 of an indirection through OP0, or NULL_TREE if no simplification is
16318 possible. */
16320 tree
16321 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16323 tree sub = op0;
16324 tree subtype;
16326 STRIP_NOPS (sub);
16327 subtype = TREE_TYPE (sub);
16328 if (!POINTER_TYPE_P (subtype))
16329 return NULL_TREE;
16331 if (TREE_CODE (sub) == ADDR_EXPR)
16333 tree op = TREE_OPERAND (sub, 0);
16334 tree optype = TREE_TYPE (op);
16335 /* *&CONST_DECL -> to the value of the const decl. */
16336 if (TREE_CODE (op) == CONST_DECL)
16337 return DECL_INITIAL (op);
16338 /* *&p => p; make sure to handle *&"str"[cst] here. */
16339 if (type == optype)
16341 tree fop = fold_read_from_constant_string (op);
16342 if (fop)
16343 return fop;
16344 else
16345 return op;
16347 /* *(foo *)&fooarray => fooarray[0] */
16348 else if (TREE_CODE (optype) == ARRAY_TYPE
16349 && type == TREE_TYPE (optype)
16350 && (!in_gimple_form
16351 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16353 tree type_domain = TYPE_DOMAIN (optype);
16354 tree min_val = size_zero_node;
16355 if (type_domain && TYPE_MIN_VALUE (type_domain))
16356 min_val = TYPE_MIN_VALUE (type_domain);
16357 if (in_gimple_form
16358 && TREE_CODE (min_val) != INTEGER_CST)
16359 return NULL_TREE;
16360 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16361 NULL_TREE, NULL_TREE);
16363 /* *(foo *)&complexfoo => __real__ complexfoo */
16364 else if (TREE_CODE (optype) == COMPLEX_TYPE
16365 && type == TREE_TYPE (optype))
16366 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16367 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16368 else if (TREE_CODE (optype) == VECTOR_TYPE
16369 && type == TREE_TYPE (optype))
16371 tree part_width = TYPE_SIZE (type);
16372 tree index = bitsize_int (0);
16373 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16377 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16378 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16380 tree op00 = TREE_OPERAND (sub, 0);
16381 tree op01 = TREE_OPERAND (sub, 1);
16383 STRIP_NOPS (op00);
16384 if (TREE_CODE (op00) == ADDR_EXPR)
16386 tree op00type;
16387 op00 = TREE_OPERAND (op00, 0);
16388 op00type = TREE_TYPE (op00);
16390 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16391 if (TREE_CODE (op00type) == VECTOR_TYPE
16392 && type == TREE_TYPE (op00type))
16394 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16395 tree part_width = TYPE_SIZE (type);
16396 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16397 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16398 tree index = bitsize_int (indexi);
16400 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16401 return fold_build3_loc (loc,
16402 BIT_FIELD_REF, type, op00,
16403 part_width, index);
16406 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16407 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16408 && type == TREE_TYPE (op00type))
16410 tree size = TYPE_SIZE_UNIT (type);
16411 if (tree_int_cst_equal (size, op01))
16412 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16414 /* ((foo *)&fooarray)[1] => fooarray[1] */
16415 else if (TREE_CODE (op00type) == ARRAY_TYPE
16416 && type == TREE_TYPE (op00type))
16418 tree type_domain = TYPE_DOMAIN (op00type);
16419 tree min_val = size_zero_node;
16420 if (type_domain && TYPE_MIN_VALUE (type_domain))
16421 min_val = TYPE_MIN_VALUE (type_domain);
16422 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16423 TYPE_SIZE_UNIT (type));
16424 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16425 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16426 NULL_TREE, NULL_TREE);
16431 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16432 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16433 && type == TREE_TYPE (TREE_TYPE (subtype))
16434 && (!in_gimple_form
16435 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16437 tree type_domain;
16438 tree min_val = size_zero_node;
16439 sub = build_fold_indirect_ref_loc (loc, sub);
16440 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16441 if (type_domain && TYPE_MIN_VALUE (type_domain))
16442 min_val = TYPE_MIN_VALUE (type_domain);
16443 if (in_gimple_form
16444 && TREE_CODE (min_val) != INTEGER_CST)
16445 return NULL_TREE;
16446 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16447 NULL_TREE);
16450 return NULL_TREE;
16453 /* Builds an expression for an indirection through T, simplifying some
16454 cases. */
16456 tree
16457 build_fold_indirect_ref_loc (location_t loc, tree t)
16459 tree type = TREE_TYPE (TREE_TYPE (t));
16460 tree sub = fold_indirect_ref_1 (loc, type, t);
16462 if (sub)
16463 return sub;
16465 return build1_loc (loc, INDIRECT_REF, type, t);
16468 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16470 tree
16471 fold_indirect_ref_loc (location_t loc, tree t)
16473 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16475 if (sub)
16476 return sub;
16477 else
16478 return t;
16481 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16482 whose result is ignored. The type of the returned tree need not be
16483 the same as the original expression. */
16485 tree
16486 fold_ignored_result (tree t)
16488 if (!TREE_SIDE_EFFECTS (t))
16489 return integer_zero_node;
16491 for (;;)
16492 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16494 case tcc_unary:
16495 t = TREE_OPERAND (t, 0);
16496 break;
16498 case tcc_binary:
16499 case tcc_comparison:
16500 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16501 t = TREE_OPERAND (t, 0);
16502 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16503 t = TREE_OPERAND (t, 1);
16504 else
16505 return t;
16506 break;
16508 case tcc_expression:
16509 switch (TREE_CODE (t))
16511 case COMPOUND_EXPR:
16512 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16513 return t;
16514 t = TREE_OPERAND (t, 0);
16515 break;
16517 case COND_EXPR:
16518 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16519 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16520 return t;
16521 t = TREE_OPERAND (t, 0);
16522 break;
16524 default:
16525 return t;
16527 break;
16529 default:
16530 return t;
16534 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16535 This can only be applied to objects of a sizetype. */
16537 tree
16538 round_up_loc (location_t loc, tree value, int divisor)
16540 tree div = NULL_TREE;
16542 gcc_assert (divisor > 0);
16543 if (divisor == 1)
16544 return value;
16546 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16547 have to do anything. Only do this when we are not given a const,
16548 because in that case, this check is more expensive than just
16549 doing it. */
16550 if (TREE_CODE (value) != INTEGER_CST)
16552 div = build_int_cst (TREE_TYPE (value), divisor);
16554 if (multiple_of_p (TREE_TYPE (value), value, div))
16555 return value;
16558 /* If divisor is a power of two, simplify this to bit manipulation. */
16559 if (divisor == (divisor & -divisor))
16561 if (TREE_CODE (value) == INTEGER_CST)
16563 double_int val = tree_to_double_int (value);
16564 bool overflow_p;
16566 if ((val.low & (divisor - 1)) == 0)
16567 return value;
16569 overflow_p = TREE_OVERFLOW (value);
16570 val.low &= ~(divisor - 1);
16571 val.low += divisor;
16572 if (val.low == 0)
16574 val.high++;
16575 if (val.high == 0)
16576 overflow_p = true;
16579 return force_fit_type_double (TREE_TYPE (value), val,
16580 -1, overflow_p);
16582 else
16584 tree t;
16586 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16587 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16588 t = build_int_cst (TREE_TYPE (value), -divisor);
16589 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16592 else
16594 if (!div)
16595 div = build_int_cst (TREE_TYPE (value), divisor);
16596 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16597 value = size_binop_loc (loc, MULT_EXPR, value, div);
16600 return value;
16603 /* Likewise, but round down. */
16605 tree
16606 round_down_loc (location_t loc, tree value, int divisor)
16608 tree div = NULL_TREE;
16610 gcc_assert (divisor > 0);
16611 if (divisor == 1)
16612 return value;
16614 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16615 have to do anything. Only do this when we are not given a const,
16616 because in that case, this check is more expensive than just
16617 doing it. */
16618 if (TREE_CODE (value) != INTEGER_CST)
16620 div = build_int_cst (TREE_TYPE (value), divisor);
16622 if (multiple_of_p (TREE_TYPE (value), value, div))
16623 return value;
16626 /* If divisor is a power of two, simplify this to bit manipulation. */
16627 if (divisor == (divisor & -divisor))
16629 tree t;
16631 t = build_int_cst (TREE_TYPE (value), -divisor);
16632 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16634 else
16636 if (!div)
16637 div = build_int_cst (TREE_TYPE (value), divisor);
16638 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16639 value = size_binop_loc (loc, MULT_EXPR, value, div);
16642 return value;
16645 /* Returns the pointer to the base of the object addressed by EXP and
16646 extracts the information about the offset of the access, storing it
16647 to PBITPOS and POFFSET. */
16649 static tree
16650 split_address_to_core_and_offset (tree exp,
16651 HOST_WIDE_INT *pbitpos, tree *poffset)
16653 tree core;
16654 enum machine_mode mode;
16655 int unsignedp, volatilep;
16656 HOST_WIDE_INT bitsize;
16657 location_t loc = EXPR_LOCATION (exp);
16659 if (TREE_CODE (exp) == ADDR_EXPR)
16661 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16662 poffset, &mode, &unsignedp, &volatilep,
16663 false);
16664 core = build_fold_addr_expr_loc (loc, core);
16666 else
16668 core = exp;
16669 *pbitpos = 0;
16670 *poffset = NULL_TREE;
16673 return core;
16676 /* Returns true if addresses of E1 and E2 differ by a constant, false
16677 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16679 bool
16680 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16682 tree core1, core2;
16683 HOST_WIDE_INT bitpos1, bitpos2;
16684 tree toffset1, toffset2, tdiff, type;
16686 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16687 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16689 if (bitpos1 % BITS_PER_UNIT != 0
16690 || bitpos2 % BITS_PER_UNIT != 0
16691 || !operand_equal_p (core1, core2, 0))
16692 return false;
16694 if (toffset1 && toffset2)
16696 type = TREE_TYPE (toffset1);
16697 if (type != TREE_TYPE (toffset2))
16698 toffset2 = fold_convert (type, toffset2);
16700 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16701 if (!cst_and_fits_in_hwi (tdiff))
16702 return false;
16704 *diff = int_cst_value (tdiff);
16706 else if (toffset1 || toffset2)
16708 /* If only one of the offsets is non-constant, the difference cannot
16709 be a constant. */
16710 return false;
16712 else
16713 *diff = 0;
16715 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16716 return true;
16719 /* Simplify the floating point expression EXP when the sign of the
16720 result is not significant. Return NULL_TREE if no simplification
16721 is possible. */
16723 tree
16724 fold_strip_sign_ops (tree exp)
16726 tree arg0, arg1;
16727 location_t loc = EXPR_LOCATION (exp);
16729 switch (TREE_CODE (exp))
16731 case ABS_EXPR:
16732 case NEGATE_EXPR:
16733 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16734 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16736 case MULT_EXPR:
16737 case RDIV_EXPR:
16738 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16739 return NULL_TREE;
16740 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16741 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16742 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16743 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16744 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16745 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16746 break;
16748 case COMPOUND_EXPR:
16749 arg0 = TREE_OPERAND (exp, 0);
16750 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16751 if (arg1)
16752 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16753 break;
16755 case COND_EXPR:
16756 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16757 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16758 if (arg0 || arg1)
16759 return fold_build3_loc (loc,
16760 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16761 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16762 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16763 break;
16765 case CALL_EXPR:
16767 const enum built_in_function fcode = builtin_mathfn_code (exp);
16768 switch (fcode)
16770 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16771 /* Strip copysign function call, return the 1st argument. */
16772 arg0 = CALL_EXPR_ARG (exp, 0);
16773 arg1 = CALL_EXPR_ARG (exp, 1);
16774 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16776 default:
16777 /* Strip sign ops from the argument of "odd" math functions. */
16778 if (negate_mathfn_p (fcode))
16780 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16781 if (arg0)
16782 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16784 break;
16787 break;
16789 default:
16790 break;
16792 return NULL_TREE;