ChangeLog/
[official-gcc.git] / gcc / fold-const.c
blobe3e4151ae60a42ae1210500ad603d8606d71884d
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hash-table.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
173 tree
174 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
176 double_int quo, rem;
177 int uns;
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
184 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
185 uns, code, &rem);
187 if (rem.is_zero ())
188 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 unsigned HOST_WIDE_INT val;
363 unsigned int prec;
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 prec = TYPE_PRECISION (type);
373 if (prec > HOST_BITS_PER_WIDE_INT)
375 if (TREE_INT_CST_LOW (t) != 0)
376 return true;
377 prec -= HOST_BITS_PER_WIDE_INT;
378 val = TREE_INT_CST_HIGH (t);
380 else
381 val = TREE_INT_CST_LOW (t);
382 if (prec < HOST_BITS_PER_WIDE_INT)
383 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
384 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
390 static bool
391 negate_expr_p (tree t)
393 tree type;
395 if (t == 0)
396 return false;
398 type = TREE_TYPE (t);
400 STRIP_SIGN_NOPS (t);
401 switch (TREE_CODE (t))
403 case INTEGER_CST:
404 if (TYPE_OVERFLOW_WRAPS (type))
405 return true;
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t);
409 case BIT_NOT_EXPR:
410 return (INTEGRAL_TYPE_P (type)
411 && TYPE_OVERFLOW_WRAPS (type));
413 case FIXED_CST:
414 case NEGATE_EXPR:
415 return true;
417 case REAL_CST:
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422 case COMPLEX_CST:
423 return negate_expr_p (TREE_REALPART (t))
424 && negate_expr_p (TREE_IMAGPART (t));
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case FLOOR_DIV_EXPR:
467 case CEIL_DIV_EXPR:
468 case EXACT_DIV_EXPR:
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
473 overflow. */
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
480 case NOP_EXPR:
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type) == REAL_TYPE)
484 tree tem = strip_float_extensions (t);
485 if (tem != t)
486 return negate_expr_p (tem);
488 break;
490 case CALL_EXPR:
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t)))
493 return negate_expr_p (CALL_EXPR_ARG (t, 0));
494 break;
496 case RSHIFT_EXPR:
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
500 tree op1 = TREE_OPERAND (t, 1);
501 if (TREE_INT_CST_HIGH (op1) == 0
502 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
503 == TREE_INT_CST_LOW (op1))
504 return true;
506 break;
508 default:
509 break;
511 return false;
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
517 returned. */
519 static tree
520 fold_negate_expr (location_t loc, tree t)
522 tree type = TREE_TYPE (t);
523 tree tem;
525 switch (TREE_CODE (t))
527 /* Convert - (~A) to A + 1. */
528 case BIT_NOT_EXPR:
529 if (INTEGRAL_TYPE_P (type))
530 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
531 build_int_cst (type, 1));
532 break;
534 case INTEGER_CST:
535 tem = fold_negate_const (t, type);
536 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
537 || !TYPE_OVERFLOW_TRAPS (type))
538 return tem;
539 break;
541 case REAL_CST:
542 tem = fold_negate_const (t, type);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
545 return tem;
546 break;
548 case FIXED_CST:
549 tem = fold_negate_const (t, type);
550 return tem;
552 case COMPLEX_CST:
554 tree rpart = negate_expr (TREE_REALPART (t));
555 tree ipart = negate_expr (TREE_IMAGPART (t));
557 if ((TREE_CODE (rpart) == REAL_CST
558 && TREE_CODE (ipart) == REAL_CST)
559 || (TREE_CODE (rpart) == INTEGER_CST
560 && TREE_CODE (ipart) == INTEGER_CST))
561 return build_complex (type, rpart, ipart);
563 break;
565 case COMPLEX_EXPR:
566 if (negate_expr_p (t))
567 return fold_build2_loc (loc, COMPLEX_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
569 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
570 break;
572 case CONJ_EXPR:
573 if (negate_expr_p (t))
574 return fold_build1_loc (loc, CONJ_EXPR, type,
575 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
576 break;
578 case NEGATE_EXPR:
579 return TREE_OPERAND (t, 0);
581 case PLUS_EXPR:
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t, 1))
587 && reorder_operands_p (TREE_OPERAND (t, 0),
588 TREE_OPERAND (t, 1)))
590 tem = negate_expr (TREE_OPERAND (t, 1));
591 return fold_build2_loc (loc, MINUS_EXPR, type,
592 tem, TREE_OPERAND (t, 0));
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t, 0)))
598 tem = negate_expr (TREE_OPERAND (t, 0));
599 return fold_build2_loc (loc, MINUS_EXPR, type,
600 tem, TREE_OPERAND (t, 1));
603 break;
605 case MINUS_EXPR:
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
609 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
612 break;
614 case MULT_EXPR:
615 if (TYPE_UNSIGNED (type))
616 break;
618 /* Fall through. */
620 case RDIV_EXPR:
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
623 tem = TREE_OPERAND (t, 1);
624 if (negate_expr_p (tem))
625 return fold_build2_loc (loc, TREE_CODE (t), type,
626 TREE_OPERAND (t, 0), negate_expr (tem));
627 tem = TREE_OPERAND (t, 0);
628 if (negate_expr_p (tem))
629 return fold_build2_loc (loc, TREE_CODE (t), type,
630 negate_expr (tem), TREE_OPERAND (t, 1));
632 break;
634 case TRUNC_DIV_EXPR:
635 case ROUND_DIV_EXPR:
636 case FLOOR_DIV_EXPR:
637 case CEIL_DIV_EXPR:
638 case EXACT_DIV_EXPR:
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
643 overflow. */
644 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
646 const char * const warnmsg = G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || integer_onep (tem)))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
661 if (INTEGRAL_TYPE_P (type)
662 && (TREE_CODE (tem) != INTEGER_CST
663 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (TREE_INT_CST_HIGH (op1) == 0
700 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
701 == TREE_INT_CST_LOW (op1))
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
722 return NULL_TREE. */
724 static tree
725 negate_expr (tree t)
727 tree type, tem;
728 location_t loc;
730 if (t == NULL_TREE)
731 return NULL_TREE;
733 loc = EXPR_LOCATION (t);
734 type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
737 tem = fold_negate_expr (loc, t);
738 if (!tem)
739 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
740 return fold_convert_loc (loc, type, tem);
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
758 If IN is itself a literal or constant, return it as appropriate.
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
763 static tree
764 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
765 tree *minus_litp, int negate_p)
767 tree var = 0;
769 *conp = 0;
770 *litp = 0;
771 *minus_litp = 0;
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in);
776 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
777 || TREE_CODE (in) == FIXED_CST)
778 *litp = in;
779 else if (TREE_CODE (in) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
787 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
789 tree op0 = TREE_OPERAND (in, 0);
790 tree op1 = TREE_OPERAND (in, 1);
791 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
792 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
796 || TREE_CODE (op0) == FIXED_CST)
797 *litp = op0, op0 = 0;
798 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
799 || TREE_CODE (op1) == FIXED_CST)
800 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
802 if (op0 != 0 && TREE_CONSTANT (op0))
803 *conp = op0, op0 = 0;
804 else if (op1 != 0 && TREE_CONSTANT (op1))
805 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0 != 0 && op1 != 0)
810 var = in;
811 else if (op0 != 0)
812 var = op0;
813 else
814 var = op1, neg_var_p = neg1_p;
816 /* Now do any needed negations. */
817 if (neg_litp_p)
818 *minus_litp = *litp, *litp = 0;
819 if (neg_conp_p)
820 *conp = negate_expr (*conp);
821 if (neg_var_p)
822 var = negate_expr (var);
824 else if (TREE_CONSTANT (in))
825 *conp = in;
826 else
827 var = in;
829 if (negate_p)
831 if (*litp)
832 *minus_litp = *litp, *litp = 0;
833 else if (*minus_litp)
834 *litp = *minus_litp, *minus_litp = 0;
835 *conp = negate_expr (*conp);
836 var = negate_expr (var);
839 return var;
842 /* Re-associate trees split by the above function. T1 and T2 are
843 either expressions to associate or null. Return the new
844 expression, if any. LOC is the location of the new expression. If
845 we build an operation, do it in TYPE and with CODE. */
847 static tree
848 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
861 if (code == PLUS_EXPR)
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 return build2_loc (loc, MINUS_EXPR, type,
865 fold_convert_loc (loc, type, t2),
866 fold_convert_loc (loc, type,
867 TREE_OPERAND (t1, 0)));
868 else if (TREE_CODE (t2) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t1),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t2, 0)));
873 else if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
876 else if (code == MINUS_EXPR)
878 if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
882 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
886 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
887 fold_convert_loc (loc, type, t2));
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
901 switch (code)
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
909 default:
910 break;
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time. */
923 static tree
924 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
925 int overflowable)
927 double_int op1, op2, res, tmp;
928 tree t;
929 tree type = TREE_TYPE (arg1);
930 bool uns = TYPE_UNSIGNED (type);
931 bool overflow = false;
933 op1 = tree_to_double_int (arg1);
934 op2 = tree_to_double_int (arg2);
936 switch (code)
938 case BIT_IOR_EXPR:
939 res = op1 | op2;
940 break;
942 case BIT_XOR_EXPR:
943 res = op1 ^ op2;
944 break;
946 case BIT_AND_EXPR:
947 res = op1 & op2;
948 break;
950 case RSHIFT_EXPR:
951 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
959 break;
961 case RROTATE_EXPR:
962 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
963 break;
965 case LROTATE_EXPR:
966 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
967 break;
969 case PLUS_EXPR:
970 res = op1.add_with_sign (op2, false, &overflow);
971 break;
973 case MINUS_EXPR:
974 res = op1.sub_with_overflow (op2, &overflow);
975 break;
977 case MULT_EXPR:
978 res = op1.mul_with_sign (op2, false, &overflow);
979 break;
981 case MULT_HIGHPART_EXPR:
982 /* ??? Need quad precision, or an additional shift operand
983 to the multiply primitive, to handle very large highparts. */
984 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
985 return NULL_TREE;
986 tmp = op1 - op2;
987 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
988 break;
990 case TRUNC_DIV_EXPR:
991 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
992 case EXACT_DIV_EXPR:
993 /* This is a shortcut for a common special case. */
994 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
995 && !TREE_OVERFLOW (arg1)
996 && !TREE_OVERFLOW (arg2)
997 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
999 if (code == CEIL_DIV_EXPR)
1000 op1.low += op2.low - 1;
1002 res.low = op1.low / op2.low, res.high = 0;
1003 break;
1006 /* ... fall through ... */
1008 case ROUND_DIV_EXPR:
1009 if (op2.is_zero ())
1010 return NULL_TREE;
1011 if (op2.is_one ())
1013 res = op1;
1014 break;
1016 if (op1 == op2 && !op1.is_zero ())
1018 res = double_int_one;
1019 break;
1021 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1022 break;
1024 case TRUNC_MOD_EXPR:
1025 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1026 /* This is a shortcut for a common special case. */
1027 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1028 && !TREE_OVERFLOW (arg1)
1029 && !TREE_OVERFLOW (arg2)
1030 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1032 if (code == CEIL_MOD_EXPR)
1033 op1.low += op2.low - 1;
1034 res.low = op1.low % op2.low, res.high = 0;
1035 break;
1038 /* ... fall through ... */
1040 case ROUND_MOD_EXPR:
1041 if (op2.is_zero ())
1042 return NULL_TREE;
1043 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1044 break;
1046 case MIN_EXPR:
1047 res = op1.min (op2, uns);
1048 break;
1050 case MAX_EXPR:
1051 res = op1.max (op2, uns);
1052 break;
1054 default:
1055 return NULL_TREE;
1058 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1059 (!uns && overflow)
1060 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1062 return t;
1065 tree
1066 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1068 return int_const_binop_1 (code, arg1, arg2, 1);
1071 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1072 constant. We assume ARG1 and ARG2 have the same data type, or at least
1073 are the same kind of constant and the same machine mode. Return zero if
1074 combining the constants is not allowed in the current operating mode. */
1076 static tree
1077 const_binop (enum tree_code code, tree arg1, tree arg2)
1079 /* Sanity check for the recursive cases. */
1080 if (!arg1 || !arg2)
1081 return NULL_TREE;
1083 STRIP_NOPS (arg1);
1084 STRIP_NOPS (arg2);
1086 if (TREE_CODE (arg1) == INTEGER_CST)
1087 return int_const_binop (code, arg1, arg2);
1089 if (TREE_CODE (arg1) == REAL_CST)
1091 enum machine_mode mode;
1092 REAL_VALUE_TYPE d1;
1093 REAL_VALUE_TYPE d2;
1094 REAL_VALUE_TYPE value;
1095 REAL_VALUE_TYPE result;
1096 bool inexact;
1097 tree t, type;
1099 /* The following codes are handled by real_arithmetic. */
1100 switch (code)
1102 case PLUS_EXPR:
1103 case MINUS_EXPR:
1104 case MULT_EXPR:
1105 case RDIV_EXPR:
1106 case MIN_EXPR:
1107 case MAX_EXPR:
1108 break;
1110 default:
1111 return NULL_TREE;
1114 d1 = TREE_REAL_CST (arg1);
1115 d2 = TREE_REAL_CST (arg2);
1117 type = TREE_TYPE (arg1);
1118 mode = TYPE_MODE (type);
1120 /* Don't perform operation if we honor signaling NaNs and
1121 either operand is a NaN. */
1122 if (HONOR_SNANS (mode)
1123 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1124 return NULL_TREE;
1126 /* Don't perform operation if it would raise a division
1127 by zero exception. */
1128 if (code == RDIV_EXPR
1129 && REAL_VALUES_EQUAL (d2, dconst0)
1130 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1131 return NULL_TREE;
1133 /* If either operand is a NaN, just return it. Otherwise, set up
1134 for floating-point trap; we return an overflow. */
1135 if (REAL_VALUE_ISNAN (d1))
1136 return arg1;
1137 else if (REAL_VALUE_ISNAN (d2))
1138 return arg2;
1140 inexact = real_arithmetic (&value, code, &d1, &d2);
1141 real_convert (&result, mode, &value);
1143 /* Don't constant fold this floating point operation if
1144 the result has overflowed and flag_trapping_math. */
1145 if (flag_trapping_math
1146 && MODE_HAS_INFINITIES (mode)
1147 && REAL_VALUE_ISINF (result)
1148 && !REAL_VALUE_ISINF (d1)
1149 && !REAL_VALUE_ISINF (d2))
1150 return NULL_TREE;
1152 /* Don't constant fold this floating point operation if the
1153 result may dependent upon the run-time rounding mode and
1154 flag_rounding_math is set, or if GCC's software emulation
1155 is unable to accurately represent the result. */
1156 if ((flag_rounding_math
1157 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1158 && (inexact || !real_identical (&result, &value)))
1159 return NULL_TREE;
1161 t = build_real (type, result);
1163 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1164 return t;
1167 if (TREE_CODE (arg1) == FIXED_CST)
1169 FIXED_VALUE_TYPE f1;
1170 FIXED_VALUE_TYPE f2;
1171 FIXED_VALUE_TYPE result;
1172 tree t, type;
1173 int sat_p;
1174 bool overflow_p;
1176 /* The following codes are handled by fixed_arithmetic. */
1177 switch (code)
1179 case PLUS_EXPR:
1180 case MINUS_EXPR:
1181 case MULT_EXPR:
1182 case TRUNC_DIV_EXPR:
1183 f2 = TREE_FIXED_CST (arg2);
1184 break;
1186 case LSHIFT_EXPR:
1187 case RSHIFT_EXPR:
1188 f2.data.high = TREE_INT_CST_HIGH (arg2);
1189 f2.data.low = TREE_INT_CST_LOW (arg2);
1190 f2.mode = SImode;
1191 break;
1193 default:
1194 return NULL_TREE;
1197 f1 = TREE_FIXED_CST (arg1);
1198 type = TREE_TYPE (arg1);
1199 sat_p = TYPE_SATURATING (type);
1200 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1201 t = build_fixed (type, result);
1202 /* Propagate overflow flags. */
1203 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1204 TREE_OVERFLOW (t) = 1;
1205 return t;
1208 if (TREE_CODE (arg1) == COMPLEX_CST)
1210 tree type = TREE_TYPE (arg1);
1211 tree r1 = TREE_REALPART (arg1);
1212 tree i1 = TREE_IMAGPART (arg1);
1213 tree r2 = TREE_REALPART (arg2);
1214 tree i2 = TREE_IMAGPART (arg2);
1215 tree real, imag;
1217 switch (code)
1219 case PLUS_EXPR:
1220 case MINUS_EXPR:
1221 real = const_binop (code, r1, r2);
1222 imag = const_binop (code, i1, i2);
1223 break;
1225 case MULT_EXPR:
1226 if (COMPLEX_FLOAT_TYPE_P (type))
1227 return do_mpc_arg2 (arg1, arg2, type,
1228 /* do_nonfinite= */ folding_initializer,
1229 mpc_mul);
1231 real = const_binop (MINUS_EXPR,
1232 const_binop (MULT_EXPR, r1, r2),
1233 const_binop (MULT_EXPR, i1, i2));
1234 imag = const_binop (PLUS_EXPR,
1235 const_binop (MULT_EXPR, r1, i2),
1236 const_binop (MULT_EXPR, i1, r2));
1237 break;
1239 case RDIV_EXPR:
1240 if (COMPLEX_FLOAT_TYPE_P (type))
1241 return do_mpc_arg2 (arg1, arg2, type,
1242 /* do_nonfinite= */ folding_initializer,
1243 mpc_div);
1244 /* Fallthru ... */
1245 case TRUNC_DIV_EXPR:
1246 case CEIL_DIV_EXPR:
1247 case FLOOR_DIV_EXPR:
1248 case ROUND_DIV_EXPR:
1249 if (flag_complex_method == 0)
1251 /* Keep this algorithm in sync with
1252 tree-complex.c:expand_complex_div_straight().
1254 Expand complex division to scalars, straightforward algorithm.
1255 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1256 t = br*br + bi*bi
1258 tree magsquared
1259 = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r2, r2),
1261 const_binop (MULT_EXPR, i2, i2));
1262 tree t1
1263 = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, r2),
1265 const_binop (MULT_EXPR, i1, i2));
1266 tree t2
1267 = const_binop (MINUS_EXPR,
1268 const_binop (MULT_EXPR, i1, r2),
1269 const_binop (MULT_EXPR, r1, i2));
1271 real = const_binop (code, t1, magsquared);
1272 imag = const_binop (code, t2, magsquared);
1274 else
1276 /* Keep this algorithm in sync with
1277 tree-complex.c:expand_complex_div_wide().
1279 Expand complex division to scalars, modified algorithm to minimize
1280 overflow with wide input ranges. */
1281 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1282 fold_abs_const (r2, TREE_TYPE (type)),
1283 fold_abs_const (i2, TREE_TYPE (type)));
1285 if (integer_nonzerop (compare))
1287 /* In the TRUE branch, we compute
1288 ratio = br/bi;
1289 div = (br * ratio) + bi;
1290 tr = (ar * ratio) + ai;
1291 ti = (ai * ratio) - ar;
1292 tr = tr / div;
1293 ti = ti / div; */
1294 tree ratio = const_binop (code, r2, i2);
1295 tree div = const_binop (PLUS_EXPR, i2,
1296 const_binop (MULT_EXPR, r2, ratio));
1297 real = const_binop (MULT_EXPR, r1, ratio);
1298 real = const_binop (PLUS_EXPR, real, i1);
1299 real = const_binop (code, real, div);
1301 imag = const_binop (MULT_EXPR, i1, ratio);
1302 imag = const_binop (MINUS_EXPR, imag, r1);
1303 imag = const_binop (code, imag, div);
1305 else
1307 /* In the FALSE branch, we compute
1308 ratio = d/c;
1309 divisor = (d * ratio) + c;
1310 tr = (b * ratio) + a;
1311 ti = b - (a * ratio);
1312 tr = tr / div;
1313 ti = ti / div; */
1314 tree ratio = const_binop (code, i2, r2);
1315 tree div = const_binop (PLUS_EXPR, r2,
1316 const_binop (MULT_EXPR, i2, ratio));
1318 real = const_binop (MULT_EXPR, i1, ratio);
1319 real = const_binop (PLUS_EXPR, real, r1);
1320 real = const_binop (code, real, div);
1322 imag = const_binop (MULT_EXPR, r1, ratio);
1323 imag = const_binop (MINUS_EXPR, i1, imag);
1324 imag = const_binop (code, imag, div);
1327 break;
1329 default:
1330 return NULL_TREE;
1333 if (real && imag)
1334 return build_complex (type, real, imag);
1337 if (TREE_CODE (arg1) == VECTOR_CST
1338 && TREE_CODE (arg2) == VECTOR_CST)
1340 tree type = TREE_TYPE(arg1);
1341 int count = TYPE_VECTOR_SUBPARTS (type), i;
1342 tree *elts = XALLOCAVEC (tree, count);
1344 for (i = 0; i < count; i++)
1346 tree elem1 = VECTOR_CST_ELT (arg1, i);
1347 tree elem2 = VECTOR_CST_ELT (arg2, i);
1349 elts[i] = const_binop (code, elem1, elem2);
1351 /* It is possible that const_binop cannot handle the given
1352 code and return NULL_TREE */
1353 if(elts[i] == NULL_TREE)
1354 return NULL_TREE;
1357 return build_vector (type, elts);
1359 return NULL_TREE;
1362 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1363 indicates which particular sizetype to create. */
1365 tree
1366 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1368 return build_int_cst (sizetype_tab[(int) kind], number);
1371 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1372 is a tree code. The type of the result is taken from the operands.
1373 Both must be equivalent integer types, ala int_binop_types_match_p.
1374 If the operands are constant, so is the result. */
1376 tree
1377 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1379 tree type = TREE_TYPE (arg0);
1381 if (arg0 == error_mark_node || arg1 == error_mark_node)
1382 return error_mark_node;
1384 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1385 TREE_TYPE (arg1)));
1387 /* Handle the special case of two integer constants faster. */
1388 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1390 /* And some specific cases even faster than that. */
1391 if (code == PLUS_EXPR)
1393 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1394 return arg1;
1395 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1396 return arg0;
1398 else if (code == MINUS_EXPR)
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MULT_EXPR)
1405 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1406 return arg1;
1409 /* Handle general case of two integer constants. For sizetype
1410 constant calculations we always want to know about overflow,
1411 even in the unsigned case. */
1412 return int_const_binop_1 (code, arg0, arg1, -1);
1415 return fold_build2_loc (loc, code, type, arg0, arg1);
1418 /* Given two values, either both of sizetype or both of bitsizetype,
1419 compute the difference between the two values. Return the value
1420 in signed type corresponding to the type of the operands. */
1422 tree
1423 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1425 tree type = TREE_TYPE (arg0);
1426 tree ctype;
1428 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1429 TREE_TYPE (arg1)));
1431 /* If the type is already signed, just do the simple thing. */
1432 if (!TYPE_UNSIGNED (type))
1433 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1435 if (type == sizetype)
1436 ctype = ssizetype;
1437 else if (type == bitsizetype)
1438 ctype = sbitsizetype;
1439 else
1440 ctype = signed_type_for (type);
1442 /* If either operand is not a constant, do the conversions to the signed
1443 type and subtract. The hardware will do the right thing with any
1444 overflow in the subtraction. */
1445 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1446 return size_binop_loc (loc, MINUS_EXPR,
1447 fold_convert_loc (loc, ctype, arg0),
1448 fold_convert_loc (loc, ctype, arg1));
1450 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1451 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1452 overflow) and negate (which can't either). Special-case a result
1453 of zero while we're here. */
1454 if (tree_int_cst_equal (arg0, arg1))
1455 return build_int_cst (ctype, 0);
1456 else if (tree_int_cst_lt (arg1, arg0))
1457 return fold_convert_loc (loc, ctype,
1458 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1459 else
1460 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1461 fold_convert_loc (loc, ctype,
1462 size_binop_loc (loc,
1463 MINUS_EXPR,
1464 arg1, arg0)));
1467 /* A subroutine of fold_convert_const handling conversions of an
1468 INTEGER_CST to another integer type. */
1470 static tree
1471 fold_convert_const_int_from_int (tree type, const_tree arg1)
1473 tree t;
1475 /* Given an integer constant, make new constant with new type,
1476 appropriately sign-extended or truncated. */
1477 t = force_fit_type_double (type, tree_to_double_int (arg1),
1478 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1479 (TREE_INT_CST_HIGH (arg1) < 0
1480 && (TYPE_UNSIGNED (type)
1481 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1482 | TREE_OVERFLOW (arg1));
1484 return t;
1487 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1488 to an integer type. */
1490 static tree
1491 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1493 int overflow = 0;
1494 tree t;
1496 /* The following code implements the floating point to integer
1497 conversion rules required by the Java Language Specification,
1498 that IEEE NaNs are mapped to zero and values that overflow
1499 the target precision saturate, i.e. values greater than
1500 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1501 are mapped to INT_MIN. These semantics are allowed by the
1502 C and C++ standards that simply state that the behavior of
1503 FP-to-integer conversion is unspecified upon overflow. */
1505 double_int val;
1506 REAL_VALUE_TYPE r;
1507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1509 switch (code)
1511 case FIX_TRUNC_EXPR:
1512 real_trunc (&r, VOIDmode, &x);
1513 break;
1515 default:
1516 gcc_unreachable ();
1519 /* If R is NaN, return zero and show we have an overflow. */
1520 if (REAL_VALUE_ISNAN (r))
1522 overflow = 1;
1523 val = double_int_zero;
1526 /* See if R is less than the lower bound or greater than the
1527 upper bound. */
1529 if (! overflow)
1531 tree lt = TYPE_MIN_VALUE (type);
1532 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1533 if (REAL_VALUES_LESS (r, l))
1535 overflow = 1;
1536 val = tree_to_double_int (lt);
1540 if (! overflow)
1542 tree ut = TYPE_MAX_VALUE (type);
1543 if (ut)
1545 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1546 if (REAL_VALUES_LESS (u, r))
1548 overflow = 1;
1549 val = tree_to_double_int (ut);
1554 if (! overflow)
1555 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1557 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1558 return t;
1561 /* A subroutine of fold_convert_const handling conversions of a
1562 FIXED_CST to an integer type. */
1564 static tree
1565 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1567 tree t;
1568 double_int temp, temp_trunc;
1569 unsigned int mode;
1571 /* Right shift FIXED_CST to temp by fbit. */
1572 temp = TREE_FIXED_CST (arg1).data;
1573 mode = TREE_FIXED_CST (arg1).mode;
1574 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1576 temp = temp.rshift (GET_MODE_FBIT (mode),
1577 HOST_BITS_PER_DOUBLE_INT,
1578 SIGNED_FIXED_POINT_MODE_P (mode));
1580 /* Left shift temp to temp_trunc by fbit. */
1581 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 else
1587 temp = double_int_zero;
1588 temp_trunc = double_int_zero;
1591 /* If FIXED_CST is negative, we need to round the value toward 0.
1592 By checking if the fractional bits are not zero to add 1 to temp. */
1593 if (SIGNED_FIXED_POINT_MODE_P (mode)
1594 && temp_trunc.is_negative ()
1595 && TREE_FIXED_CST (arg1).data != temp_trunc)
1596 temp += double_int_one;
1598 /* Given a fixed-point constant, make new constant with new type,
1599 appropriately sign-extended or truncated. */
1600 t = force_fit_type_double (type, temp, -1,
1601 (temp.is_negative ()
1602 && (TYPE_UNSIGNED (type)
1603 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1604 | TREE_OVERFLOW (arg1));
1606 return t;
1609 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1610 to another floating point type. */
1612 static tree
1613 fold_convert_const_real_from_real (tree type, const_tree arg1)
1615 REAL_VALUE_TYPE value;
1616 tree t;
1618 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1619 t = build_real (type, value);
1621 /* If converting an infinity or NAN to a representation that doesn't
1622 have one, set the overflow bit so that we can produce some kind of
1623 error message at the appropriate point if necessary. It's not the
1624 most user-friendly message, but it's better than nothing. */
1625 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1626 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1627 TREE_OVERFLOW (t) = 1;
1628 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1629 && !MODE_HAS_NANS (TYPE_MODE (type)))
1630 TREE_OVERFLOW (t) = 1;
1631 /* Regular overflow, conversion produced an infinity in a mode that
1632 can't represent them. */
1633 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1634 && REAL_VALUE_ISINF (value)
1635 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1636 TREE_OVERFLOW (t) = 1;
1637 else
1638 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1643 to a floating point type. */
1645 static tree
1646 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1652 t = build_real (type, value);
1654 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1659 to another fixed-point type. */
1661 static tree
1662 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1664 FIXED_VALUE_TYPE value;
1665 tree t;
1666 bool overflow_p;
1668 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1669 TYPE_SATURATING (type));
1670 t = build_fixed (type, value);
1672 /* Propagate overflow flags. */
1673 if (overflow_p | TREE_OVERFLOW (arg1))
1674 TREE_OVERFLOW (t) = 1;
1675 return t;
1678 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1679 to a fixed-point type. */
1681 static tree
1682 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1684 FIXED_VALUE_TYPE value;
1685 tree t;
1686 bool overflow_p;
1688 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1689 TREE_INT_CST (arg1),
1690 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1691 TYPE_SATURATING (type));
1692 t = build_fixed (type, value);
1694 /* Propagate overflow flags. */
1695 if (overflow_p | TREE_OVERFLOW (arg1))
1696 TREE_OVERFLOW (t) = 1;
1697 return t;
1700 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1701 to a fixed-point type. */
1703 static tree
1704 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1706 FIXED_VALUE_TYPE value;
1707 tree t;
1708 bool overflow_p;
1710 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1711 &TREE_REAL_CST (arg1),
1712 TYPE_SATURATING (type));
1713 t = build_fixed (type, value);
1715 /* Propagate overflow flags. */
1716 if (overflow_p | TREE_OVERFLOW (arg1))
1717 TREE_OVERFLOW (t) = 1;
1718 return t;
1721 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1722 type TYPE. If no simplification can be done return NULL_TREE. */
1724 static tree
1725 fold_convert_const (enum tree_code code, tree type, tree arg1)
1727 if (TREE_TYPE (arg1) == type)
1728 return arg1;
1730 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1731 || TREE_CODE (type) == OFFSET_TYPE)
1733 if (TREE_CODE (arg1) == INTEGER_CST)
1734 return fold_convert_const_int_from_int (type, arg1);
1735 else if (TREE_CODE (arg1) == REAL_CST)
1736 return fold_convert_const_int_from_real (code, type, arg1);
1737 else if (TREE_CODE (arg1) == FIXED_CST)
1738 return fold_convert_const_int_from_fixed (type, arg1);
1740 else if (TREE_CODE (type) == REAL_TYPE)
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1743 return build_real_from_int_cst (type, arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1745 return fold_convert_const_real_from_real (type, arg1);
1746 else if (TREE_CODE (arg1) == FIXED_CST)
1747 return fold_convert_const_real_from_fixed (type, arg1);
1749 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1751 if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_fixed_from_fixed (type, arg1);
1753 else if (TREE_CODE (arg1) == INTEGER_CST)
1754 return fold_convert_const_fixed_from_int (type, arg1);
1755 else if (TREE_CODE (arg1) == REAL_CST)
1756 return fold_convert_const_fixed_from_real (type, arg1);
1758 return NULL_TREE;
1761 /* Construct a vector of zero elements of vector type TYPE. */
1763 static tree
1764 build_zero_vector (tree type)
1766 tree t;
1768 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1769 return build_vector_from_val (type, t);
1772 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1774 bool
1775 fold_convertible_p (const_tree type, const_tree arg)
1777 tree orig = TREE_TYPE (arg);
1779 if (type == orig)
1780 return true;
1782 if (TREE_CODE (arg) == ERROR_MARK
1783 || TREE_CODE (type) == ERROR_MARK
1784 || TREE_CODE (orig) == ERROR_MARK)
1785 return false;
1787 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1788 return true;
1790 switch (TREE_CODE (type))
1792 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1793 case POINTER_TYPE: case REFERENCE_TYPE:
1794 case OFFSET_TYPE:
1795 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1796 || TREE_CODE (orig) == OFFSET_TYPE)
1797 return true;
1798 return (TREE_CODE (orig) == VECTOR_TYPE
1799 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1801 case REAL_TYPE:
1802 case FIXED_POINT_TYPE:
1803 case COMPLEX_TYPE:
1804 case VECTOR_TYPE:
1805 case VOID_TYPE:
1806 return TREE_CODE (type) == TREE_CODE (orig);
1808 default:
1809 return false;
1813 /* Convert expression ARG to type TYPE. Used by the middle-end for
1814 simple conversions in preference to calling the front-end's convert. */
1816 tree
1817 fold_convert_loc (location_t loc, tree type, tree arg)
1819 tree orig = TREE_TYPE (arg);
1820 tree tem;
1822 if (type == orig)
1823 return arg;
1825 if (TREE_CODE (arg) == ERROR_MARK
1826 || TREE_CODE (type) == ERROR_MARK
1827 || TREE_CODE (orig) == ERROR_MARK)
1828 return error_mark_node;
1830 switch (TREE_CODE (type))
1832 case POINTER_TYPE:
1833 case REFERENCE_TYPE:
1834 /* Handle conversions between pointers to different address spaces. */
1835 if (POINTER_TYPE_P (orig)
1836 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1837 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1838 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1839 /* fall through */
1841 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1842 case OFFSET_TYPE:
1843 if (TREE_CODE (arg) == INTEGER_CST)
1845 tem = fold_convert_const (NOP_EXPR, type, arg);
1846 if (tem != NULL_TREE)
1847 return tem;
1849 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1850 || TREE_CODE (orig) == OFFSET_TYPE)
1851 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1852 if (TREE_CODE (orig) == COMPLEX_TYPE)
1853 return fold_convert_loc (loc, type,
1854 fold_build1_loc (loc, REALPART_EXPR,
1855 TREE_TYPE (orig), arg));
1856 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1857 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1858 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1860 case REAL_TYPE:
1861 if (TREE_CODE (arg) == INTEGER_CST)
1863 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1864 if (tem != NULL_TREE)
1865 return tem;
1867 else if (TREE_CODE (arg) == REAL_CST)
1869 tem = fold_convert_const (NOP_EXPR, type, arg);
1870 if (tem != NULL_TREE)
1871 return tem;
1873 else if (TREE_CODE (arg) == FIXED_CST)
1875 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1876 if (tem != NULL_TREE)
1877 return tem;
1880 switch (TREE_CODE (orig))
1882 case INTEGER_TYPE:
1883 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1884 case POINTER_TYPE: case REFERENCE_TYPE:
1885 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1887 case REAL_TYPE:
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1890 case FIXED_POINT_TYPE:
1891 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1893 case COMPLEX_TYPE:
1894 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1895 return fold_convert_loc (loc, type, tem);
1897 default:
1898 gcc_unreachable ();
1901 case FIXED_POINT_TYPE:
1902 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1903 || TREE_CODE (arg) == REAL_CST)
1905 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 goto fold_convert_exit;
1910 switch (TREE_CODE (orig))
1912 case FIXED_POINT_TYPE:
1913 case INTEGER_TYPE:
1914 case ENUMERAL_TYPE:
1915 case BOOLEAN_TYPE:
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1919 case COMPLEX_TYPE:
1920 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1921 return fold_convert_loc (loc, type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 case FIXED_POINT_TYPE:
1935 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1936 fold_convert_loc (loc, TREE_TYPE (type), arg),
1937 fold_convert_loc (loc, TREE_TYPE (type),
1938 integer_zero_node));
1939 case COMPLEX_TYPE:
1941 tree rpart, ipart;
1943 if (TREE_CODE (arg) == COMPLEX_EXPR)
1945 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1946 TREE_OPERAND (arg, 0));
1947 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1948 TREE_OPERAND (arg, 1));
1949 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1952 arg = save_expr (arg);
1953 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1954 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1955 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1956 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1957 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1960 default:
1961 gcc_unreachable ();
1964 case VECTOR_TYPE:
1965 if (integer_zerop (arg))
1966 return build_zero_vector (type);
1967 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1968 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1969 || TREE_CODE (orig) == VECTOR_TYPE);
1970 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1972 case VOID_TYPE:
1973 tem = fold_ignored_result (arg);
1974 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1976 default:
1977 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1978 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1979 gcc_unreachable ();
1981 fold_convert_exit:
1982 protected_set_expr_location_unshare (tem, loc);
1983 return tem;
1986 /* Return false if expr can be assumed not to be an lvalue, true
1987 otherwise. */
1989 static bool
1990 maybe_lvalue_p (const_tree x)
1992 /* We only need to wrap lvalue tree codes. */
1993 switch (TREE_CODE (x))
1995 case VAR_DECL:
1996 case PARM_DECL:
1997 case RESULT_DECL:
1998 case LABEL_DECL:
1999 case FUNCTION_DECL:
2000 case SSA_NAME:
2002 case COMPONENT_REF:
2003 case MEM_REF:
2004 case INDIRECT_REF:
2005 case ARRAY_REF:
2006 case ARRAY_RANGE_REF:
2007 case BIT_FIELD_REF:
2008 case OBJ_TYPE_REF:
2010 case REALPART_EXPR:
2011 case IMAGPART_EXPR:
2012 case PREINCREMENT_EXPR:
2013 case PREDECREMENT_EXPR:
2014 case SAVE_EXPR:
2015 case TRY_CATCH_EXPR:
2016 case WITH_CLEANUP_EXPR:
2017 case COMPOUND_EXPR:
2018 case MODIFY_EXPR:
2019 case TARGET_EXPR:
2020 case COND_EXPR:
2021 case BIND_EXPR:
2022 break;
2024 default:
2025 /* Assume the worst for front-end tree codes. */
2026 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2027 break;
2028 return false;
2031 return true;
2034 /* Return an expr equal to X but certainly not valid as an lvalue. */
2036 tree
2037 non_lvalue_loc (location_t loc, tree x)
2039 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2040 us. */
2041 if (in_gimple_form)
2042 return x;
2044 if (! maybe_lvalue_p (x))
2045 return x;
2046 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2049 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2050 Zero means allow extended lvalues. */
2052 int pedantic_lvalues;
2054 /* When pedantic, return an expr equal to X but certainly not valid as a
2055 pedantic lvalue. Otherwise, return X. */
2057 static tree
2058 pedantic_non_lvalue_loc (location_t loc, tree x)
2060 if (pedantic_lvalues)
2061 return non_lvalue_loc (loc, x);
2063 return protected_set_expr_location_unshare (x, loc);
2066 /* Given a tree comparison code, return the code that is the logical inverse.
2067 It is generally not safe to do this for floating-point comparisons, except
2068 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2069 ERROR_MARK in this case. */
2071 enum tree_code
2072 invert_tree_comparison (enum tree_code code, bool honor_nans)
2074 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2075 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2076 return ERROR_MARK;
2078 switch (code)
2080 case EQ_EXPR:
2081 return NE_EXPR;
2082 case NE_EXPR:
2083 return EQ_EXPR;
2084 case GT_EXPR:
2085 return honor_nans ? UNLE_EXPR : LE_EXPR;
2086 case GE_EXPR:
2087 return honor_nans ? UNLT_EXPR : LT_EXPR;
2088 case LT_EXPR:
2089 return honor_nans ? UNGE_EXPR : GE_EXPR;
2090 case LE_EXPR:
2091 return honor_nans ? UNGT_EXPR : GT_EXPR;
2092 case LTGT_EXPR:
2093 return UNEQ_EXPR;
2094 case UNEQ_EXPR:
2095 return LTGT_EXPR;
2096 case UNGT_EXPR:
2097 return LE_EXPR;
2098 case UNGE_EXPR:
2099 return LT_EXPR;
2100 case UNLT_EXPR:
2101 return GE_EXPR;
2102 case UNLE_EXPR:
2103 return GT_EXPR;
2104 case ORDERED_EXPR:
2105 return UNORDERED_EXPR;
2106 case UNORDERED_EXPR:
2107 return ORDERED_EXPR;
2108 default:
2109 gcc_unreachable ();
2113 /* Similar, but return the comparison that results if the operands are
2114 swapped. This is safe for floating-point. */
2116 enum tree_code
2117 swap_tree_comparison (enum tree_code code)
2119 switch (code)
2121 case EQ_EXPR:
2122 case NE_EXPR:
2123 case ORDERED_EXPR:
2124 case UNORDERED_EXPR:
2125 case LTGT_EXPR:
2126 case UNEQ_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 case UNGT_EXPR:
2137 return UNLT_EXPR;
2138 case UNGE_EXPR:
2139 return UNLE_EXPR;
2140 case UNLT_EXPR:
2141 return UNGT_EXPR;
2142 case UNLE_EXPR:
2143 return UNGE_EXPR;
2144 default:
2145 gcc_unreachable ();
2150 /* Convert a comparison tree code from an enum tree_code representation
2151 into a compcode bit-based encoding. This function is the inverse of
2152 compcode_to_comparison. */
2154 static enum comparison_code
2155 comparison_to_compcode (enum tree_code code)
2157 switch (code)
2159 case LT_EXPR:
2160 return COMPCODE_LT;
2161 case EQ_EXPR:
2162 return COMPCODE_EQ;
2163 case LE_EXPR:
2164 return COMPCODE_LE;
2165 case GT_EXPR:
2166 return COMPCODE_GT;
2167 case NE_EXPR:
2168 return COMPCODE_NE;
2169 case GE_EXPR:
2170 return COMPCODE_GE;
2171 case ORDERED_EXPR:
2172 return COMPCODE_ORD;
2173 case UNORDERED_EXPR:
2174 return COMPCODE_UNORD;
2175 case UNLT_EXPR:
2176 return COMPCODE_UNLT;
2177 case UNEQ_EXPR:
2178 return COMPCODE_UNEQ;
2179 case UNLE_EXPR:
2180 return COMPCODE_UNLE;
2181 case UNGT_EXPR:
2182 return COMPCODE_UNGT;
2183 case LTGT_EXPR:
2184 return COMPCODE_LTGT;
2185 case UNGE_EXPR:
2186 return COMPCODE_UNGE;
2187 default:
2188 gcc_unreachable ();
2192 /* Convert a compcode bit-based encoding of a comparison operator back
2193 to GCC's enum tree_code representation. This function is the
2194 inverse of comparison_to_compcode. */
2196 static enum tree_code
2197 compcode_to_comparison (enum comparison_code code)
2199 switch (code)
2201 case COMPCODE_LT:
2202 return LT_EXPR;
2203 case COMPCODE_EQ:
2204 return EQ_EXPR;
2205 case COMPCODE_LE:
2206 return LE_EXPR;
2207 case COMPCODE_GT:
2208 return GT_EXPR;
2209 case COMPCODE_NE:
2210 return NE_EXPR;
2211 case COMPCODE_GE:
2212 return GE_EXPR;
2213 case COMPCODE_ORD:
2214 return ORDERED_EXPR;
2215 case COMPCODE_UNORD:
2216 return UNORDERED_EXPR;
2217 case COMPCODE_UNLT:
2218 return UNLT_EXPR;
2219 case COMPCODE_UNEQ:
2220 return UNEQ_EXPR;
2221 case COMPCODE_UNLE:
2222 return UNLE_EXPR;
2223 case COMPCODE_UNGT:
2224 return UNGT_EXPR;
2225 case COMPCODE_LTGT:
2226 return LTGT_EXPR;
2227 case COMPCODE_UNGE:
2228 return UNGE_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Return a tree for the comparison which is the combination of
2235 doing the AND or OR (depending on CODE) of the two operations LCODE
2236 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2237 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2238 if this makes the transformation invalid. */
2240 tree
2241 combine_comparisons (location_t loc,
2242 enum tree_code code, enum tree_code lcode,
2243 enum tree_code rcode, tree truth_type,
2244 tree ll_arg, tree lr_arg)
2246 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2247 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2248 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2249 int compcode;
2251 switch (code)
2253 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2254 compcode = lcompcode & rcompcode;
2255 break;
2257 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2258 compcode = lcompcode | rcompcode;
2259 break;
2261 default:
2262 return NULL_TREE;
2265 if (!honor_nans)
2267 /* Eliminate unordered comparisons, as well as LTGT and ORD
2268 which are not used unless the mode has NaNs. */
2269 compcode &= ~COMPCODE_UNORD;
2270 if (compcode == COMPCODE_LTGT)
2271 compcode = COMPCODE_NE;
2272 else if (compcode == COMPCODE_ORD)
2273 compcode = COMPCODE_TRUE;
2275 else if (flag_trapping_math)
2277 /* Check that the original operation and the optimized ones will trap
2278 under the same condition. */
2279 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2280 && (lcompcode != COMPCODE_EQ)
2281 && (lcompcode != COMPCODE_ORD);
2282 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2283 && (rcompcode != COMPCODE_EQ)
2284 && (rcompcode != COMPCODE_ORD);
2285 bool trap = (compcode & COMPCODE_UNORD) == 0
2286 && (compcode != COMPCODE_EQ)
2287 && (compcode != COMPCODE_ORD);
2289 /* In a short-circuited boolean expression the LHS might be
2290 such that the RHS, if evaluated, will never trap. For
2291 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2292 if neither x nor y is NaN. (This is a mixed blessing: for
2293 example, the expression above will never trap, hence
2294 optimizing it to x < y would be invalid). */
2295 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2296 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 rtrap = false;
2299 /* If the comparison was short-circuited, and only the RHS
2300 trapped, we may now generate a spurious trap. */
2301 if (rtrap && !ltrap
2302 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 return NULL_TREE;
2305 /* If we changed the conditions that cause a trap, we lose. */
2306 if ((ltrap || rtrap) != trap)
2307 return NULL_TREE;
2310 if (compcode == COMPCODE_TRUE)
2311 return constant_boolean_node (true, truth_type);
2312 else if (compcode == COMPCODE_FALSE)
2313 return constant_boolean_node (false, truth_type);
2314 else
2316 enum tree_code tcode;
2318 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2319 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2354 || TREE_TYPE (arg0) == error_mark_node
2355 || TREE_TYPE (arg1) == error_mark_node)
2356 return 0;
2358 /* Similar, if either does not have a type (like a released SSA name),
2359 they aren't equal. */
2360 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2361 return 0;
2363 /* Check equality of integer constants before bailing out due to
2364 precision differences. */
2365 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2366 return tree_int_cst_equal (arg0, arg1);
2368 /* If both types don't have the same signedness, then we can't consider
2369 them equal. We must check this before the STRIP_NOPS calls
2370 because they may change the signedness of the arguments. As pointers
2371 strictly don't have a signedness, require either two pointers or
2372 two non-pointers as well. */
2373 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2374 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2375 return 0;
2377 /* We cannot consider pointers to different address space equal. */
2378 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2379 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2380 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2381 return 0;
2383 /* If both types don't have the same precision, then it is not safe
2384 to strip NOPs. */
2385 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2386 return 0;
2388 STRIP_NOPS (arg0);
2389 STRIP_NOPS (arg1);
2391 /* In case both args are comparisons but with different comparison
2392 code, try to swap the comparison operands of one arg to produce
2393 a match and compare that variant. */
2394 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2395 && COMPARISON_CLASS_P (arg0)
2396 && COMPARISON_CLASS_P (arg1))
2398 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2400 if (TREE_CODE (arg0) == swap_code)
2401 return operand_equal_p (TREE_OPERAND (arg0, 0),
2402 TREE_OPERAND (arg1, 1), flags)
2403 && operand_equal_p (TREE_OPERAND (arg0, 1),
2404 TREE_OPERAND (arg1, 0), flags);
2407 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2408 /* This is needed for conversions and for COMPONENT_REF.
2409 Might as well play it safe and always test this. */
2410 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2411 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2412 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2413 return 0;
2415 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2416 We don't care about side effects in that case because the SAVE_EXPR
2417 takes care of that for us. In all other cases, two expressions are
2418 equal if they have no side effects. If we have two identical
2419 expressions with side effects that should be treated the same due
2420 to the only side effects being identical SAVE_EXPR's, that will
2421 be detected in the recursive calls below.
2422 If we are taking an invariant address of two identical objects
2423 they are necessarily equal as well. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (flags & OEP_CONSTANT_ADDRESS_OF)
2427 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2428 return 1;
2430 /* Next handle constant cases, those for which we can return 1 even
2431 if ONLY_CONST is set. */
2432 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2433 switch (TREE_CODE (arg0))
2435 case INTEGER_CST:
2436 return tree_int_cst_equal (arg0, arg1);
2438 case FIXED_CST:
2439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2440 TREE_FIXED_CST (arg1));
2442 case REAL_CST:
2443 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2444 TREE_REAL_CST (arg1)))
2445 return 1;
2448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2450 /* If we do not distinguish between signed and unsigned zero,
2451 consider them equal. */
2452 if (real_zerop (arg0) && real_zerop (arg1))
2453 return 1;
2455 return 0;
2457 case VECTOR_CST:
2459 unsigned i;
2461 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2462 return 0;
2464 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2466 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2467 VECTOR_CST_ELT (arg1, i), flags))
2468 return 0;
2470 return 1;
2473 case COMPLEX_CST:
2474 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2475 flags)
2476 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2477 flags));
2479 case STRING_CST:
2480 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2481 && ! memcmp (TREE_STRING_POINTER (arg0),
2482 TREE_STRING_POINTER (arg1),
2483 TREE_STRING_LENGTH (arg0)));
2485 case ADDR_EXPR:
2486 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2487 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2488 ? OEP_CONSTANT_ADDRESS_OF : 0);
2489 default:
2490 break;
2493 if (flags & OEP_ONLY_CONST)
2494 return 0;
2496 /* Define macros to test an operand from arg0 and arg1 for equality and a
2497 variant that allows null and views null as being different from any
2498 non-null value. In the latter case, if either is null, the both
2499 must be; otherwise, do the normal comparison. */
2500 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2501 TREE_OPERAND (arg1, N), flags)
2503 #define OP_SAME_WITH_NULL(N) \
2504 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2505 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2507 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2509 case tcc_unary:
2510 /* Two conversions are equal only if signedness and modes match. */
2511 switch (TREE_CODE (arg0))
2513 CASE_CONVERT:
2514 case FIX_TRUNC_EXPR:
2515 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2516 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2517 return 0;
2518 break;
2519 default:
2520 break;
2523 return OP_SAME (0);
2526 case tcc_comparison:
2527 case tcc_binary:
2528 if (OP_SAME (0) && OP_SAME (1))
2529 return 1;
2531 /* For commutative ops, allow the other order. */
2532 return (commutative_tree_code (TREE_CODE (arg0))
2533 && operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 1), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 0), flags));
2538 case tcc_reference:
2539 /* If either of the pointer (or reference) expressions we are
2540 dereferencing contain a side effect, these cannot be equal. */
2541 if (TREE_SIDE_EFFECTS (arg0)
2542 || TREE_SIDE_EFFECTS (arg1))
2543 return 0;
2545 switch (TREE_CODE (arg0))
2547 case INDIRECT_REF:
2548 case REALPART_EXPR:
2549 case IMAGPART_EXPR:
2550 return OP_SAME (0);
2552 case TARGET_MEM_REF:
2553 /* Require equal extra operands and then fall through to MEM_REF
2554 handling of the two common operands. */
2555 if (!OP_SAME_WITH_NULL (2)
2556 || !OP_SAME_WITH_NULL (3)
2557 || !OP_SAME_WITH_NULL (4))
2558 return 0;
2559 /* Fallthru. */
2560 case MEM_REF:
2561 /* Require equal access sizes, and similar pointer types.
2562 We can have incomplete types for array references of
2563 variable-sized arrays from the Fortran frontent
2564 though. */
2565 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2566 || (TYPE_SIZE (TREE_TYPE (arg0))
2567 && TYPE_SIZE (TREE_TYPE (arg1))
2568 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2569 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2570 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2571 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2572 && OP_SAME (0) && OP_SAME (1));
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 /* Operands 2 and 3 may be null.
2577 Compare the array index by value if it is constant first as we
2578 may have different types but same value here. */
2579 return (OP_SAME (0)
2580 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2581 TREE_OPERAND (arg1, 1))
2582 || OP_SAME (1))
2583 && OP_SAME_WITH_NULL (2)
2584 && OP_SAME_WITH_NULL (3));
2586 case COMPONENT_REF:
2587 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2588 may be NULL when we're called to compare MEM_EXPRs. */
2589 return OP_SAME_WITH_NULL (0)
2590 && OP_SAME (1)
2591 && OP_SAME_WITH_NULL (2);
2593 case BIT_FIELD_REF:
2594 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2596 default:
2597 return 0;
2600 case tcc_expression:
2601 switch (TREE_CODE (arg0))
2603 case ADDR_EXPR:
2604 case TRUTH_NOT_EXPR:
2605 return OP_SAME (0);
2607 case TRUTH_ANDIF_EXPR:
2608 case TRUTH_ORIF_EXPR:
2609 return OP_SAME (0) && OP_SAME (1);
2611 case FMA_EXPR:
2612 case WIDEN_MULT_PLUS_EXPR:
2613 case WIDEN_MULT_MINUS_EXPR:
2614 if (!OP_SAME (2))
2615 return 0;
2616 /* The multiplcation operands are commutative. */
2617 /* FALLTHRU */
2619 case TRUTH_AND_EXPR:
2620 case TRUTH_OR_EXPR:
2621 case TRUTH_XOR_EXPR:
2622 if (OP_SAME (0) && OP_SAME (1))
2623 return 1;
2625 /* Otherwise take into account this is a commutative operation. */
2626 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2627 TREE_OPERAND (arg1, 1), flags)
2628 && operand_equal_p (TREE_OPERAND (arg0, 1),
2629 TREE_OPERAND (arg1, 0), flags));
2631 case COND_EXPR:
2632 case VEC_COND_EXPR:
2633 case DOT_PROD_EXPR:
2634 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 default:
2637 return 0;
2640 case tcc_vl_exp:
2641 switch (TREE_CODE (arg0))
2643 case CALL_EXPR:
2644 /* If the CALL_EXPRs call different functions, then they
2645 clearly can not be equal. */
2646 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2647 flags))
2648 return 0;
2651 unsigned int cef = call_expr_flags (arg0);
2652 if (flags & OEP_PURE_SAME)
2653 cef &= ECF_CONST | ECF_PURE;
2654 else
2655 cef &= ECF_CONST;
2656 if (!cef)
2657 return 0;
2660 /* Now see if all the arguments are the same. */
2662 const_call_expr_arg_iterator iter0, iter1;
2663 const_tree a0, a1;
2664 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2665 a1 = first_const_call_expr_arg (arg1, &iter1);
2666 a0 && a1;
2667 a0 = next_const_call_expr_arg (&iter0),
2668 a1 = next_const_call_expr_arg (&iter1))
2669 if (! operand_equal_p (a0, a1, flags))
2670 return 0;
2672 /* If we get here and both argument lists are exhausted
2673 then the CALL_EXPRs are equal. */
2674 return ! (a0 || a1);
2676 default:
2677 return 0;
2680 case tcc_declaration:
2681 /* Consider __builtin_sqrt equal to sqrt. */
2682 return (TREE_CODE (arg0) == FUNCTION_DECL
2683 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2684 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2685 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2687 default:
2688 return 0;
2691 #undef OP_SAME
2692 #undef OP_SAME_WITH_NULL
2695 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2696 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2698 When in doubt, return 0. */
2700 static int
2701 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2703 int unsignedp1, unsignedpo;
2704 tree primarg0, primarg1, primother;
2705 unsigned int correct_width;
2707 if (operand_equal_p (arg0, arg1, 0))
2708 return 1;
2710 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2711 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2712 return 0;
2714 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2715 and see if the inner values are the same. This removes any
2716 signedness comparison, which doesn't matter here. */
2717 primarg0 = arg0, primarg1 = arg1;
2718 STRIP_NOPS (primarg0);
2719 STRIP_NOPS (primarg1);
2720 if (operand_equal_p (primarg0, primarg1, 0))
2721 return 1;
2723 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2724 actual comparison operand, ARG0.
2726 First throw away any conversions to wider types
2727 already present in the operands. */
2729 primarg1 = get_narrower (arg1, &unsignedp1);
2730 primother = get_narrower (other, &unsignedpo);
2732 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2733 if (unsignedp1 == unsignedpo
2734 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2735 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2737 tree type = TREE_TYPE (arg0);
2739 /* Make sure shorter operand is extended the right way
2740 to match the longer operand. */
2741 primarg1 = fold_convert (signed_or_unsigned_type_for
2742 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2744 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2745 return 1;
2748 return 0;
2751 /* See if ARG is an expression that is either a comparison or is performing
2752 arithmetic on comparisons. The comparisons must only be comparing
2753 two different values, which will be stored in *CVAL1 and *CVAL2; if
2754 they are nonzero it means that some operands have already been found.
2755 No variables may be used anywhere else in the expression except in the
2756 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2757 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2759 If this is true, return 1. Otherwise, return zero. */
2761 static int
2762 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2764 enum tree_code code = TREE_CODE (arg);
2765 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2767 /* We can handle some of the tcc_expression cases here. */
2768 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2769 tclass = tcc_unary;
2770 else if (tclass == tcc_expression
2771 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2772 || code == COMPOUND_EXPR))
2773 tclass = tcc_binary;
2775 else if (tclass == tcc_expression && code == SAVE_EXPR
2776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2778 /* If we've already found a CVAL1 or CVAL2, this expression is
2779 two complex to handle. */
2780 if (*cval1 || *cval2)
2781 return 0;
2783 tclass = tcc_unary;
2784 *save_p = 1;
2787 switch (tclass)
2789 case tcc_unary:
2790 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2792 case tcc_binary:
2793 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2794 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2795 cval1, cval2, save_p));
2797 case tcc_constant:
2798 return 1;
2800 case tcc_expression:
2801 if (code == COND_EXPR)
2802 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2803 cval1, cval2, save_p)
2804 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2805 cval1, cval2, save_p)
2806 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2807 cval1, cval2, save_p));
2808 return 0;
2810 case tcc_comparison:
2811 /* First see if we can handle the first operand, then the second. For
2812 the second operand, we know *CVAL1 can't be zero. It must be that
2813 one side of the comparison is each of the values; test for the
2814 case where this isn't true by failing if the two operands
2815 are the same. */
2817 if (operand_equal_p (TREE_OPERAND (arg, 0),
2818 TREE_OPERAND (arg, 1), 0))
2819 return 0;
2821 if (*cval1 == 0)
2822 *cval1 = TREE_OPERAND (arg, 0);
2823 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2825 else if (*cval2 == 0)
2826 *cval2 = TREE_OPERAND (arg, 0);
2827 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2829 else
2830 return 0;
2832 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2834 else if (*cval2 == 0)
2835 *cval2 = TREE_OPERAND (arg, 1);
2836 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2838 else
2839 return 0;
2841 return 1;
2843 default:
2844 return 0;
2848 /* ARG is a tree that is known to contain just arithmetic operations and
2849 comparisons. Evaluate the operations in the tree substituting NEW0 for
2850 any occurrence of OLD0 as an operand of a comparison and likewise for
2851 NEW1 and OLD1. */
2853 static tree
2854 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2855 tree old1, tree new1)
2857 tree type = TREE_TYPE (arg);
2858 enum tree_code code = TREE_CODE (arg);
2859 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2861 /* We can handle some of the tcc_expression cases here. */
2862 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2863 tclass = tcc_unary;
2864 else if (tclass == tcc_expression
2865 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2866 tclass = tcc_binary;
2868 switch (tclass)
2870 case tcc_unary:
2871 return fold_build1_loc (loc, code, type,
2872 eval_subst (loc, TREE_OPERAND (arg, 0),
2873 old0, new0, old1, new1));
2875 case tcc_binary:
2876 return fold_build2_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1),
2879 eval_subst (loc, TREE_OPERAND (arg, 1),
2880 old0, new0, old1, new1));
2882 case tcc_expression:
2883 switch (code)
2885 case SAVE_EXPR:
2886 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2887 old1, new1);
2889 case COMPOUND_EXPR:
2890 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2891 old1, new1);
2893 case COND_EXPR:
2894 return fold_build3_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (loc, TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1),
2899 eval_subst (loc, TREE_OPERAND (arg, 2),
2900 old0, new0, old1, new1));
2901 default:
2902 break;
2904 /* Fall through - ??? */
2906 case tcc_comparison:
2908 tree arg0 = TREE_OPERAND (arg, 0);
2909 tree arg1 = TREE_OPERAND (arg, 1);
2911 /* We need to check both for exact equality and tree equality. The
2912 former will be true if the operand has a side-effect. In that
2913 case, we know the operand occurred exactly once. */
2915 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2916 arg0 = new0;
2917 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2918 arg0 = new1;
2920 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2921 arg1 = new0;
2922 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2923 arg1 = new1;
2925 return fold_build2_loc (loc, code, type, arg0, arg1);
2928 default:
2929 return arg;
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED was previously an operand of the expression
2935 but is now not needed (e.g., we folded OMITTED * 0).
2937 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2938 the conversion of RESULT to TYPE. */
2940 tree
2941 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2943 tree t = fold_convert_loc (loc, type, result);
2945 /* If the resulting operand is an empty statement, just return the omitted
2946 statement casted to void. */
2947 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2948 return build1_loc (loc, NOP_EXPR, void_type_node,
2949 fold_ignored_result (omitted));
2951 if (TREE_SIDE_EFFECTS (omitted))
2952 return build2_loc (loc, COMPOUND_EXPR, type,
2953 fold_ignored_result (omitted), t);
2955 return non_lvalue_loc (loc, t);
2958 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 static tree
2961 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2962 tree omitted)
2964 tree t = fold_convert_loc (loc, type, result);
2966 /* If the resulting operand is an empty statement, just return the omitted
2967 statement casted to void. */
2968 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2969 return build1_loc (loc, NOP_EXPR, void_type_node,
2970 fold_ignored_result (omitted));
2972 if (TREE_SIDE_EFFECTS (omitted))
2973 return build2_loc (loc, COMPOUND_EXPR, type,
2974 fold_ignored_result (omitted), t);
2976 return pedantic_non_lvalue_loc (loc, t);
2979 /* Return a tree for the case when the result of an expression is RESULT
2980 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2981 of the expression but are now not needed.
2983 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2984 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2985 evaluated before OMITTED2. Otherwise, if neither has side effects,
2986 just do the conversion of RESULT to TYPE. */
2988 tree
2989 omit_two_operands_loc (location_t loc, tree type, tree result,
2990 tree omitted1, tree omitted2)
2992 tree t = fold_convert_loc (loc, type, result);
2994 if (TREE_SIDE_EFFECTS (omitted2))
2995 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
2996 if (TREE_SIDE_EFFECTS (omitted1))
2997 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
2999 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3003 /* Return a simplified tree node for the truth-negation of ARG. This
3004 never alters ARG itself. We assume that ARG is an operation that
3005 returns a truth value (0 or 1).
3007 FIXME: one would think we would fold the result, but it causes
3008 problems with the dominator optimizer. */
3010 tree
3011 fold_truth_not_expr (location_t loc, tree arg)
3013 tree type = TREE_TYPE (arg);
3014 enum tree_code code = TREE_CODE (arg);
3015 location_t loc1, loc2;
3017 /* If this is a comparison, we can simply invert it, except for
3018 floating-point non-equality comparisons, in which case we just
3019 enclose a TRUTH_NOT_EXPR around what we have. */
3021 if (TREE_CODE_CLASS (code) == tcc_comparison)
3023 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3024 if (FLOAT_TYPE_P (op_type)
3025 && flag_trapping_math
3026 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3027 && code != NE_EXPR && code != EQ_EXPR)
3028 return NULL_TREE;
3030 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3031 if (code == ERROR_MARK)
3032 return NULL_TREE;
3034 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3035 TREE_OPERAND (arg, 1));
3038 switch (code)
3040 case INTEGER_CST:
3041 return constant_boolean_node (integer_zerop (arg), type);
3043 case TRUTH_AND_EXPR:
3044 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3045 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3046 return build2_loc (loc, TRUTH_OR_EXPR, type,
3047 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3048 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3050 case TRUTH_OR_EXPR:
3051 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3052 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3053 return build2_loc (loc, TRUTH_AND_EXPR, type,
3054 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3055 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3057 case TRUTH_XOR_EXPR:
3058 /* Here we can invert either operand. We invert the first operand
3059 unless the second operand is a TRUTH_NOT_EXPR in which case our
3060 result is the XOR of the first operand with the inside of the
3061 negation of the second operand. */
3063 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3064 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3066 else
3067 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3068 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3069 TREE_OPERAND (arg, 1));
3071 case TRUTH_ANDIF_EXPR:
3072 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3073 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3074 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3075 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3076 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3078 case TRUTH_ORIF_EXPR:
3079 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3080 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3081 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3082 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3083 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 case TRUTH_NOT_EXPR:
3086 return TREE_OPERAND (arg, 0);
3088 case COND_EXPR:
3090 tree arg1 = TREE_OPERAND (arg, 1);
3091 tree arg2 = TREE_OPERAND (arg, 2);
3093 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3094 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3096 /* A COND_EXPR may have a throw as one operand, which
3097 then has void type. Just leave void operands
3098 as they are. */
3099 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3100 VOID_TYPE_P (TREE_TYPE (arg1))
3101 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3102 VOID_TYPE_P (TREE_TYPE (arg2))
3103 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3106 case COMPOUND_EXPR:
3107 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, COMPOUND_EXPR, type,
3109 TREE_OPERAND (arg, 0),
3110 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3112 case NON_LVALUE_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3116 CASE_CONVERT:
3117 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3118 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3120 /* ... fall through ... */
3122 case FLOAT_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 return build1_loc (loc, TREE_CODE (arg), type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3127 case BIT_AND_EXPR:
3128 if (!integer_onep (TREE_OPERAND (arg, 1)))
3129 return NULL_TREE;
3130 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3132 case SAVE_EXPR:
3133 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3135 case CLEANUP_POINT_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3138 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3140 default:
3141 return NULL_TREE;
3145 /* Return a simplified tree node for the truth-negation of ARG. This
3146 never alters ARG itself. We assume that ARG is an operation that
3147 returns a truth value (0 or 1).
3149 FIXME: one would think we would fold the result, but it causes
3150 problems with the dominator optimizer. */
3152 tree
3153 invert_truthvalue_loc (location_t loc, tree arg)
3155 tree tem;
3157 if (TREE_CODE (arg) == ERROR_MARK)
3158 return arg;
3160 tem = fold_truth_not_expr (loc, arg);
3161 if (!tem)
3162 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3164 return tem;
3167 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3168 operands are another bit-wise operation with a common input. If so,
3169 distribute the bit operations to save an operation and possibly two if
3170 constants are involved. For example, convert
3171 (A | B) & (A | C) into A | (B & C)
3172 Further simplification will occur if B and C are constants.
3174 If this optimization cannot be done, 0 will be returned. */
3176 static tree
3177 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3178 tree arg0, tree arg1)
3180 tree common;
3181 tree left, right;
3183 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3184 || TREE_CODE (arg0) == code
3185 || (TREE_CODE (arg0) != BIT_AND_EXPR
3186 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3187 return 0;
3189 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3191 common = TREE_OPERAND (arg0, 0);
3192 left = TREE_OPERAND (arg0, 1);
3193 right = TREE_OPERAND (arg1, 1);
3195 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3197 common = TREE_OPERAND (arg0, 0);
3198 left = TREE_OPERAND (arg0, 1);
3199 right = TREE_OPERAND (arg1, 0);
3201 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3203 common = TREE_OPERAND (arg0, 1);
3204 left = TREE_OPERAND (arg0, 0);
3205 right = TREE_OPERAND (arg1, 1);
3207 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3209 common = TREE_OPERAND (arg0, 1);
3210 left = TREE_OPERAND (arg0, 0);
3211 right = TREE_OPERAND (arg1, 0);
3213 else
3214 return 0;
3216 common = fold_convert_loc (loc, type, common);
3217 left = fold_convert_loc (loc, type, left);
3218 right = fold_convert_loc (loc, type, right);
3219 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3220 fold_build2_loc (loc, code, type, left, right));
3223 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3224 with code CODE. This optimization is unsafe. */
3225 static tree
3226 distribute_real_division (location_t loc, enum tree_code code, tree type,
3227 tree arg0, tree arg1)
3229 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3230 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3232 /* (A / C) +- (B / C) -> (A +- B) / C. */
3233 if (mul0 == mul1
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 1), 0))
3236 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3237 fold_build2_loc (loc, code, type,
3238 TREE_OPERAND (arg0, 0),
3239 TREE_OPERAND (arg1, 0)),
3240 TREE_OPERAND (arg0, 1));
3242 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3243 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0), 0)
3245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3248 REAL_VALUE_TYPE r0, r1;
3249 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3250 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3251 if (!mul0)
3252 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3253 if (!mul1)
3254 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3255 real_arithmetic (&r0, code, &r0, &r1);
3256 return fold_build2_loc (loc, MULT_EXPR, type,
3257 TREE_OPERAND (arg0, 0),
3258 build_real (type, r0));
3261 return NULL_TREE;
3264 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3265 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3267 static tree
3268 make_bit_field_ref (location_t loc, tree inner, tree type,
3269 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3271 tree result, bftype;
3273 if (bitpos == 0)
3275 tree size = TYPE_SIZE (TREE_TYPE (inner));
3276 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3277 || POINTER_TYPE_P (TREE_TYPE (inner)))
3278 && host_integerp (size, 0)
3279 && tree_low_cst (size, 0) == bitsize)
3280 return fold_convert_loc (loc, type, inner);
3283 bftype = type;
3284 if (TYPE_PRECISION (bftype) != bitsize
3285 || TYPE_UNSIGNED (bftype) == !unsignedp)
3286 bftype = build_nonstandard_integer_type (bitsize, 0);
3288 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 if (bftype != type)
3292 result = fold_convert_loc (loc, type, result);
3294 return result;
3297 /* Optimize a bit-field compare.
3299 There are two cases: First is a compare against a constant and the
3300 second is a comparison of two items where the fields are at the same
3301 bit position relative to the start of a chunk (byte, halfword, word)
3302 large enough to contain it. In these cases we can avoid the shift
3303 implicit in bitfield extractions.
3305 For constants, we emit a compare of the shifted constant with the
3306 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3307 compared. For two fields at the same position, we do the ANDs with the
3308 similar mask and compare the result of the ANDs.
3310 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3311 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3312 are the left and right operands of the comparison, respectively.
3314 If the optimization described above can be done, we return the resulting
3315 tree. Otherwise we return zero. */
3317 static tree
3318 optimize_bit_field_compare (location_t loc, enum tree_code code,
3319 tree compare_type, tree lhs, tree rhs)
3321 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3322 tree type = TREE_TYPE (lhs);
3323 tree signed_type, unsigned_type;
3324 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3325 enum machine_mode lmode, rmode, nmode;
3326 int lunsignedp, runsignedp;
3327 int lvolatilep = 0, rvolatilep = 0;
3328 tree linner, rinner = NULL_TREE;
3329 tree mask;
3330 tree offset;
3332 /* In the strict volatile bitfields case, doing code changes here may prevent
3333 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3334 if (flag_strict_volatile_bitfields > 0)
3335 return 0;
3337 /* Get all the information about the extractions being done. If the bit size
3338 if the same as the size of the underlying object, we aren't doing an
3339 extraction at all and so can do nothing. We also don't want to
3340 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3341 then will no longer be able to replace it. */
3342 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3343 &lunsignedp, &lvolatilep, false);
3344 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3345 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3346 return 0;
3348 if (!const_p)
3350 /* If this is not a constant, we can only do something if bit positions,
3351 sizes, and signedness are the same. */
3352 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3353 &runsignedp, &rvolatilep, false);
3355 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3356 || lunsignedp != runsignedp || offset != 0
3357 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3358 return 0;
3361 /* See if we can find a mode to refer to this field. We should be able to,
3362 but fail if we can't. */
3363 if (lvolatilep
3364 && GET_MODE_BITSIZE (lmode) > 0
3365 && flag_strict_volatile_bitfields > 0)
3366 nmode = lmode;
3367 else
3368 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3369 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3370 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3371 TYPE_ALIGN (TREE_TYPE (rinner))),
3372 word_mode, lvolatilep || rvolatilep);
3373 if (nmode == VOIDmode)
3374 return 0;
3376 /* Set signed and unsigned types of the precision of this mode for the
3377 shifts below. */
3378 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3379 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3381 /* Compute the bit position and size for the new reference and our offset
3382 within it. If the new reference is the same size as the original, we
3383 won't optimize anything, so return zero. */
3384 nbitsize = GET_MODE_BITSIZE (nmode);
3385 nbitpos = lbitpos & ~ (nbitsize - 1);
3386 lbitpos -= nbitpos;
3387 if (nbitsize == lbitsize)
3388 return 0;
3390 if (BYTES_BIG_ENDIAN)
3391 lbitpos = nbitsize - lbitsize - lbitpos;
3393 /* Make the mask to be used against the extracted field. */
3394 mask = build_int_cst_type (unsigned_type, -1);
3395 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3396 mask = const_binop (RSHIFT_EXPR, mask,
3397 size_int (nbitsize - lbitsize - lbitpos));
3399 if (! const_p)
3400 /* If not comparing with constant, just rework the comparison
3401 and return. */
3402 return fold_build2_loc (loc, code, compare_type,
3403 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3404 make_bit_field_ref (loc, linner,
3405 unsigned_type,
3406 nbitsize, nbitpos,
3408 mask),
3409 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3410 make_bit_field_ref (loc, rinner,
3411 unsigned_type,
3412 nbitsize, nbitpos,
3414 mask));
3416 /* Otherwise, we are handling the constant case. See if the constant is too
3417 big for the field. Warn and return a tree of for 0 (false) if so. We do
3418 this not only for its own sake, but to avoid having to test for this
3419 error case below. If we didn't, we might generate wrong code.
3421 For unsigned fields, the constant shifted right by the field length should
3422 be all zero. For signed fields, the high-order bits should agree with
3423 the sign bit. */
3425 if (lunsignedp)
3427 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3428 fold_convert_loc (loc,
3429 unsigned_type, rhs),
3430 size_int (lbitsize))))
3432 warning (0, "comparison is always %d due to width of bit-field",
3433 code == NE_EXPR);
3434 return constant_boolean_node (code == NE_EXPR, compare_type);
3437 else
3439 tree tem = const_binop (RSHIFT_EXPR,
3440 fold_convert_loc (loc, signed_type, rhs),
3441 size_int (lbitsize - 1));
3442 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3444 warning (0, "comparison is always %d due to width of bit-field",
3445 code == NE_EXPR);
3446 return constant_boolean_node (code == NE_EXPR, compare_type);
3450 /* Single-bit compares should always be against zero. */
3451 if (lbitsize == 1 && ! integer_zerop (rhs))
3453 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3454 rhs = build_int_cst (type, 0);
3457 /* Make a new bitfield reference, shift the constant over the
3458 appropriate number of bits and mask it with the computed mask
3459 (in case this was a signed field). If we changed it, make a new one. */
3460 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3461 if (lvolatilep)
3463 TREE_SIDE_EFFECTS (lhs) = 1;
3464 TREE_THIS_VOLATILE (lhs) = 1;
3467 rhs = const_binop (BIT_AND_EXPR,
3468 const_binop (LSHIFT_EXPR,
3469 fold_convert_loc (loc, unsigned_type, rhs),
3470 size_int (lbitpos)),
3471 mask);
3473 lhs = build2_loc (loc, code, compare_type,
3474 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3475 return lhs;
3478 /* Subroutine for fold_truth_andor_1: decode a field reference.
3480 If EXP is a comparison reference, we return the innermost reference.
3482 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3483 set to the starting bit number.
3485 If the innermost field can be completely contained in a mode-sized
3486 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3488 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3489 otherwise it is not changed.
3491 *PUNSIGNEDP is set to the signedness of the field.
3493 *PMASK is set to the mask used. This is either contained in a
3494 BIT_AND_EXPR or derived from the width of the field.
3496 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3498 Return 0 if this is not a component reference or is one that we can't
3499 do anything with. */
3501 static tree
3502 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3503 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3504 int *punsignedp, int *pvolatilep,
3505 tree *pmask, tree *pand_mask)
3507 tree outer_type = 0;
3508 tree and_mask = 0;
3509 tree mask, inner, offset;
3510 tree unsigned_type;
3511 unsigned int precision;
3513 /* All the optimizations using this function assume integer fields.
3514 There are problems with FP fields since the type_for_size call
3515 below can fail for, e.g., XFmode. */
3516 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3517 return 0;
3519 /* We are interested in the bare arrangement of bits, so strip everything
3520 that doesn't affect the machine mode. However, record the type of the
3521 outermost expression if it may matter below. */
3522 if (CONVERT_EXPR_P (exp)
3523 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3524 outer_type = TREE_TYPE (exp);
3525 STRIP_NOPS (exp);
3527 if (TREE_CODE (exp) == BIT_AND_EXPR)
3529 and_mask = TREE_OPERAND (exp, 1);
3530 exp = TREE_OPERAND (exp, 0);
3531 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3532 if (TREE_CODE (and_mask) != INTEGER_CST)
3533 return 0;
3536 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3537 punsignedp, pvolatilep, false);
3538 if ((inner == exp && and_mask == 0)
3539 || *pbitsize < 0 || offset != 0
3540 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3541 return 0;
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3547 *punsignedp = TYPE_UNSIGNED (outer_type);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3551 precision = TYPE_PRECISION (unsigned_type);
3553 mask = build_int_cst_type (unsigned_type, -1);
3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3556 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3558 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3559 if (and_mask != 0)
3560 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3561 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3563 *pmask = mask;
3564 *pand_mask = and_mask;
3565 return inner;
3568 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3569 bit positions. */
3571 static int
3572 all_ones_mask_p (const_tree mask, int size)
3574 tree type = TREE_TYPE (mask);
3575 unsigned int precision = TYPE_PRECISION (type);
3576 tree tmask;
3578 tmask = build_int_cst_type (signed_type_for (type), -1);
3580 return
3581 tree_int_cst_equal (mask,
3582 const_binop (RSHIFT_EXPR,
3583 const_binop (LSHIFT_EXPR, tmask,
3584 size_int (precision - size)),
3585 size_int (precision - size)));
3588 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3589 represents the sign bit of EXP's type. If EXP represents a sign
3590 or zero extension, also test VAL against the unextended type.
3591 The return value is the (sub)expression whose sign bit is VAL,
3592 or NULL_TREE otherwise. */
3594 static tree
3595 sign_bit_p (tree exp, const_tree val)
3597 unsigned HOST_WIDE_INT mask_lo, lo;
3598 HOST_WIDE_INT mask_hi, hi;
3599 int width;
3600 tree t;
3602 /* Tree EXP must have an integral type. */
3603 t = TREE_TYPE (exp);
3604 if (! INTEGRAL_TYPE_P (t))
3605 return NULL_TREE;
3607 /* Tree VAL must be an integer constant. */
3608 if (TREE_CODE (val) != INTEGER_CST
3609 || TREE_OVERFLOW (val))
3610 return NULL_TREE;
3612 width = TYPE_PRECISION (t);
3613 if (width > HOST_BITS_PER_WIDE_INT)
3615 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3616 lo = 0;
3618 mask_hi = ((unsigned HOST_WIDE_INT) -1
3619 >> (HOST_BITS_PER_DOUBLE_INT - width));
3620 mask_lo = -1;
3622 else
3624 hi = 0;
3625 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3627 mask_hi = 0;
3628 mask_lo = ((unsigned HOST_WIDE_INT) -1
3629 >> (HOST_BITS_PER_WIDE_INT - width));
3632 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3633 treat VAL as if it were unsigned. */
3634 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3635 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3636 return exp;
3638 /* Handle extension from a narrower type. */
3639 if (TREE_CODE (exp) == NOP_EXPR
3640 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3641 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3643 return NULL_TREE;
3646 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3647 to be evaluated unconditionally. */
3649 static int
3650 simple_operand_p (const_tree exp)
3652 /* Strip any conversions that don't change the machine mode. */
3653 STRIP_NOPS (exp);
3655 return (CONSTANT_CLASS_P (exp)
3656 || TREE_CODE (exp) == SSA_NAME
3657 || (DECL_P (exp)
3658 && ! TREE_ADDRESSABLE (exp)
3659 && ! TREE_THIS_VOLATILE (exp)
3660 && ! DECL_NONLOCAL (exp)
3661 /* Don't regard global variables as simple. They may be
3662 allocated in ways unknown to the compiler (shared memory,
3663 #pragma weak, etc). */
3664 && ! TREE_PUBLIC (exp)
3665 && ! DECL_EXTERNAL (exp)
3666 /* Loading a static variable is unduly expensive, but global
3667 registers aren't expensive. */
3668 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3671 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3672 to be evaluated unconditionally.
3673 I addition to simple_operand_p, we assume that comparisons, conversions,
3674 and logic-not operations are simple, if their operands are simple, too. */
3676 static bool
3677 simple_operand_p_2 (tree exp)
3679 enum tree_code code;
3681 if (TREE_SIDE_EFFECTS (exp)
3682 || tree_could_trap_p (exp))
3683 return false;
3685 while (CONVERT_EXPR_P (exp))
3686 exp = TREE_OPERAND (exp, 0);
3688 code = TREE_CODE (exp);
3690 if (TREE_CODE_CLASS (code) == tcc_comparison)
3691 return (simple_operand_p (TREE_OPERAND (exp, 0))
3692 && simple_operand_p (TREE_OPERAND (exp, 1)));
3694 if (code == TRUTH_NOT_EXPR)
3695 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3697 return simple_operand_p (exp);
3701 /* The following functions are subroutines to fold_range_test and allow it to
3702 try to change a logical combination of comparisons into a range test.
3704 For example, both
3705 X == 2 || X == 3 || X == 4 || X == 5
3707 X >= 2 && X <= 5
3708 are converted to
3709 (unsigned) (X - 2) <= 3
3711 We describe each set of comparisons as being either inside or outside
3712 a range, using a variable named like IN_P, and then describe the
3713 range with a lower and upper bound. If one of the bounds is omitted,
3714 it represents either the highest or lowest value of the type.
3716 In the comments below, we represent a range by two numbers in brackets
3717 preceded by a "+" to designate being inside that range, or a "-" to
3718 designate being outside that range, so the condition can be inverted by
3719 flipping the prefix. An omitted bound is represented by a "-". For
3720 example, "- [-, 10]" means being outside the range starting at the lowest
3721 possible value and ending at 10, in other words, being greater than 10.
3722 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3723 always false.
3725 We set up things so that the missing bounds are handled in a consistent
3726 manner so neither a missing bound nor "true" and "false" need to be
3727 handled using a special case. */
3729 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3730 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3731 and UPPER1_P are nonzero if the respective argument is an upper bound
3732 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3733 must be specified for a comparison. ARG1 will be converted to ARG0's
3734 type if both are specified. */
3736 static tree
3737 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3738 tree arg1, int upper1_p)
3740 tree tem;
3741 int result;
3742 int sgn0, sgn1;
3744 /* If neither arg represents infinity, do the normal operation.
3745 Else, if not a comparison, return infinity. Else handle the special
3746 comparison rules. Note that most of the cases below won't occur, but
3747 are handled for consistency. */
3749 if (arg0 != 0 && arg1 != 0)
3751 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3752 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 STRIP_NOPS (tem);
3754 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3757 if (TREE_CODE_CLASS (code) != tcc_comparison)
3758 return 0;
3760 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3761 for neither. In real maths, we cannot assume open ended ranges are
3762 the same. But, this is computer arithmetic, where numbers are finite.
3763 We can therefore make the transformation of any unbounded range with
3764 the value Z, Z being greater than any representable number. This permits
3765 us to treat unbounded ranges as equal. */
3766 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3767 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3768 switch (code)
3770 case EQ_EXPR:
3771 result = sgn0 == sgn1;
3772 break;
3773 case NE_EXPR:
3774 result = sgn0 != sgn1;
3775 break;
3776 case LT_EXPR:
3777 result = sgn0 < sgn1;
3778 break;
3779 case LE_EXPR:
3780 result = sgn0 <= sgn1;
3781 break;
3782 case GT_EXPR:
3783 result = sgn0 > sgn1;
3784 break;
3785 case GE_EXPR:
3786 result = sgn0 >= sgn1;
3787 break;
3788 default:
3789 gcc_unreachable ();
3792 return constant_boolean_node (result, type);
3795 /* Helper routine for make_range. Perform one step for it, return
3796 new expression if the loop should continue or NULL_TREE if it should
3797 stop. */
3799 tree
3800 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3801 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3802 bool *strict_overflow_p)
3804 tree arg0_type = TREE_TYPE (arg0);
3805 tree n_low, n_high, low = *p_low, high = *p_high;
3806 int in_p = *p_in_p, n_in_p;
3808 switch (code)
3810 case TRUTH_NOT_EXPR:
3811 *p_in_p = ! in_p;
3812 return arg0;
3814 case EQ_EXPR: case NE_EXPR:
3815 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3816 /* We can only do something if the range is testing for zero
3817 and if the second operand is an integer constant. Note that
3818 saying something is "in" the range we make is done by
3819 complementing IN_P since it will set in the initial case of
3820 being not equal to zero; "out" is leaving it alone. */
3821 if (low == NULL_TREE || high == NULL_TREE
3822 || ! integer_zerop (low) || ! integer_zerop (high)
3823 || TREE_CODE (arg1) != INTEGER_CST)
3824 return NULL_TREE;
3826 switch (code)
3828 case NE_EXPR: /* - [c, c] */
3829 low = high = arg1;
3830 break;
3831 case EQ_EXPR: /* + [c, c] */
3832 in_p = ! in_p, low = high = arg1;
3833 break;
3834 case GT_EXPR: /* - [-, c] */
3835 low = 0, high = arg1;
3836 break;
3837 case GE_EXPR: /* + [c, -] */
3838 in_p = ! in_p, low = arg1, high = 0;
3839 break;
3840 case LT_EXPR: /* - [c, -] */
3841 low = arg1, high = 0;
3842 break;
3843 case LE_EXPR: /* + [-, c] */
3844 in_p = ! in_p, low = 0, high = arg1;
3845 break;
3846 default:
3847 gcc_unreachable ();
3850 /* If this is an unsigned comparison, we also know that EXP is
3851 greater than or equal to zero. We base the range tests we make
3852 on that fact, so we record it here so we can parse existing
3853 range tests. We test arg0_type since often the return type
3854 of, e.g. EQ_EXPR, is boolean. */
3855 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3857 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3858 in_p, low, high, 1,
3859 build_int_cst (arg0_type, 0),
3860 NULL_TREE))
3861 return NULL_TREE;
3863 in_p = n_in_p, low = n_low, high = n_high;
3865 /* If the high bound is missing, but we have a nonzero low
3866 bound, reverse the range so it goes from zero to the low bound
3867 minus 1. */
3868 if (high == 0 && low && ! integer_zerop (low))
3870 in_p = ! in_p;
3871 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3872 integer_one_node, 0);
3873 low = build_int_cst (arg0_type, 0);
3877 *p_low = low;
3878 *p_high = high;
3879 *p_in_p = in_p;
3880 return arg0;
3882 case NEGATE_EXPR:
3883 /* (-x) IN [a,b] -> x in [-b, -a] */
3884 n_low = range_binop (MINUS_EXPR, exp_type,
3885 build_int_cst (exp_type, 0),
3886 0, high, 1);
3887 n_high = range_binop (MINUS_EXPR, exp_type,
3888 build_int_cst (exp_type, 0),
3889 0, low, 0);
3890 if (n_high != 0 && TREE_OVERFLOW (n_high))
3891 return NULL_TREE;
3892 goto normalize;
3894 case BIT_NOT_EXPR:
3895 /* ~ X -> -X - 1 */
3896 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3897 build_int_cst (exp_type, 1));
3899 case PLUS_EXPR:
3900 case MINUS_EXPR:
3901 if (TREE_CODE (arg1) != INTEGER_CST)
3902 return NULL_TREE;
3904 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3905 move a constant to the other side. */
3906 if (!TYPE_UNSIGNED (arg0_type)
3907 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3908 return NULL_TREE;
3910 /* If EXP is signed, any overflow in the computation is undefined,
3911 so we don't worry about it so long as our computations on
3912 the bounds don't overflow. For unsigned, overflow is defined
3913 and this is exactly the right thing. */
3914 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3915 arg0_type, low, 0, arg1, 0);
3916 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3917 arg0_type, high, 1, arg1, 0);
3918 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3919 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3920 return NULL_TREE;
3922 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3923 *strict_overflow_p = true;
3925 normalize:
3926 /* Check for an unsigned range which has wrapped around the maximum
3927 value thus making n_high < n_low, and normalize it. */
3928 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3930 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3931 integer_one_node, 0);
3932 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3933 integer_one_node, 0);
3935 /* If the range is of the form +/- [ x+1, x ], we won't
3936 be able to normalize it. But then, it represents the
3937 whole range or the empty set, so make it
3938 +/- [ -, - ]. */
3939 if (tree_int_cst_equal (n_low, low)
3940 && tree_int_cst_equal (n_high, high))
3941 low = high = 0;
3942 else
3943 in_p = ! in_p;
3945 else
3946 low = n_low, high = n_high;
3948 *p_low = low;
3949 *p_high = high;
3950 *p_in_p = in_p;
3951 return arg0;
3953 CASE_CONVERT:
3954 case NON_LVALUE_EXPR:
3955 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3956 return NULL_TREE;
3958 if (! INTEGRAL_TYPE_P (arg0_type)
3959 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3960 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3961 return NULL_TREE;
3963 n_low = low, n_high = high;
3965 if (n_low != 0)
3966 n_low = fold_convert_loc (loc, arg0_type, n_low);
3968 if (n_high != 0)
3969 n_high = fold_convert_loc (loc, arg0_type, n_high);
3971 /* If we're converting arg0 from an unsigned type, to exp,
3972 a signed type, we will be doing the comparison as unsigned.
3973 The tests above have already verified that LOW and HIGH
3974 are both positive.
3976 So we have to ensure that we will handle large unsigned
3977 values the same way that the current signed bounds treat
3978 negative values. */
3980 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3982 tree high_positive;
3983 tree equiv_type;
3984 /* For fixed-point modes, we need to pass the saturating flag
3985 as the 2nd parameter. */
3986 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3987 equiv_type
3988 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
3989 TYPE_SATURATING (arg0_type));
3990 else
3991 equiv_type
3992 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
3994 /* A range without an upper bound is, naturally, unbounded.
3995 Since convert would have cropped a very large value, use
3996 the max value for the destination type. */
3997 high_positive
3998 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3999 : TYPE_MAX_VALUE (arg0_type);
4001 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4002 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4003 fold_convert_loc (loc, arg0_type,
4004 high_positive),
4005 build_int_cst (arg0_type, 1));
4007 /* If the low bound is specified, "and" the range with the
4008 range for which the original unsigned value will be
4009 positive. */
4010 if (low != 0)
4012 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4013 1, fold_convert_loc (loc, arg0_type,
4014 integer_zero_node),
4015 high_positive))
4016 return NULL_TREE;
4018 in_p = (n_in_p == in_p);
4020 else
4022 /* Otherwise, "or" the range with the range of the input
4023 that will be interpreted as negative. */
4024 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4025 1, fold_convert_loc (loc, arg0_type,
4026 integer_zero_node),
4027 high_positive))
4028 return NULL_TREE;
4030 in_p = (in_p != n_in_p);
4034 *p_low = n_low;
4035 *p_high = n_high;
4036 *p_in_p = in_p;
4037 return arg0;
4039 default:
4040 return NULL_TREE;
4044 /* Given EXP, a logical expression, set the range it is testing into
4045 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4046 actually being tested. *PLOW and *PHIGH will be made of the same
4047 type as the returned expression. If EXP is not a comparison, we
4048 will most likely not be returning a useful value and range. Set
4049 *STRICT_OVERFLOW_P to true if the return value is only valid
4050 because signed overflow is undefined; otherwise, do not change
4051 *STRICT_OVERFLOW_P. */
4053 tree
4054 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4055 bool *strict_overflow_p)
4057 enum tree_code code;
4058 tree arg0, arg1 = NULL_TREE;
4059 tree exp_type, nexp;
4060 int in_p;
4061 tree low, high;
4062 location_t loc = EXPR_LOCATION (exp);
4064 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4065 and see if we can refine the range. Some of the cases below may not
4066 happen, but it doesn't seem worth worrying about this. We "continue"
4067 the outer loop when we've changed something; otherwise we "break"
4068 the switch, which will "break" the while. */
4070 in_p = 0;
4071 low = high = build_int_cst (TREE_TYPE (exp), 0);
4073 while (1)
4075 code = TREE_CODE (exp);
4076 exp_type = TREE_TYPE (exp);
4077 arg0 = NULL_TREE;
4079 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4081 if (TREE_OPERAND_LENGTH (exp) > 0)
4082 arg0 = TREE_OPERAND (exp, 0);
4083 if (TREE_CODE_CLASS (code) == tcc_binary
4084 || TREE_CODE_CLASS (code) == tcc_comparison
4085 || (TREE_CODE_CLASS (code) == tcc_expression
4086 && TREE_OPERAND_LENGTH (exp) > 1))
4087 arg1 = TREE_OPERAND (exp, 1);
4089 if (arg0 == NULL_TREE)
4090 break;
4092 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4093 &high, &in_p, strict_overflow_p);
4094 if (nexp == NULL_TREE)
4095 break;
4096 exp = nexp;
4099 /* If EXP is a constant, we can evaluate whether this is true or false. */
4100 if (TREE_CODE (exp) == INTEGER_CST)
4102 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4103 exp, 0, low, 0))
4104 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4105 exp, 1, high, 1)));
4106 low = high = 0;
4107 exp = 0;
4110 *pin_p = in_p, *plow = low, *phigh = high;
4111 return exp;
4114 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4115 type, TYPE, return an expression to test if EXP is in (or out of, depending
4116 on IN_P) the range. Return 0 if the test couldn't be created. */
4118 tree
4119 build_range_check (location_t loc, tree type, tree exp, int in_p,
4120 tree low, tree high)
4122 tree etype = TREE_TYPE (exp), value;
4124 #ifdef HAVE_canonicalize_funcptr_for_compare
4125 /* Disable this optimization for function pointer expressions
4126 on targets that require function pointer canonicalization. */
4127 if (HAVE_canonicalize_funcptr_for_compare
4128 && TREE_CODE (etype) == POINTER_TYPE
4129 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4130 return NULL_TREE;
4131 #endif
4133 if (! in_p)
4135 value = build_range_check (loc, type, exp, 1, low, high);
4136 if (value != 0)
4137 return invert_truthvalue_loc (loc, value);
4139 return 0;
4142 if (low == 0 && high == 0)
4143 return build_int_cst (type, 1);
4145 if (low == 0)
4146 return fold_build2_loc (loc, LE_EXPR, type, exp,
4147 fold_convert_loc (loc, etype, high));
4149 if (high == 0)
4150 return fold_build2_loc (loc, GE_EXPR, type, exp,
4151 fold_convert_loc (loc, etype, low));
4153 if (operand_equal_p (low, high, 0))
4154 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4155 fold_convert_loc (loc, etype, low));
4157 if (integer_zerop (low))
4159 if (! TYPE_UNSIGNED (etype))
4161 etype = unsigned_type_for (etype);
4162 high = fold_convert_loc (loc, etype, high);
4163 exp = fold_convert_loc (loc, etype, exp);
4165 return build_range_check (loc, type, exp, 1, 0, high);
4168 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4169 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4171 unsigned HOST_WIDE_INT lo;
4172 HOST_WIDE_INT hi;
4173 int prec;
4175 prec = TYPE_PRECISION (etype);
4176 if (prec <= HOST_BITS_PER_WIDE_INT)
4178 hi = 0;
4179 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4181 else
4183 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4184 lo = (unsigned HOST_WIDE_INT) -1;
4187 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4189 if (TYPE_UNSIGNED (etype))
4191 tree signed_etype = signed_type_for (etype);
4192 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4193 etype
4194 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4195 else
4196 etype = signed_etype;
4197 exp = fold_convert_loc (loc, etype, exp);
4199 return fold_build2_loc (loc, GT_EXPR, type, exp,
4200 build_int_cst (etype, 0));
4204 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4205 This requires wrap-around arithmetics for the type of the expression.
4206 First make sure that arithmetics in this type is valid, then make sure
4207 that it wraps around. */
4208 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4209 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4210 TYPE_UNSIGNED (etype));
4212 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4214 tree utype, minv, maxv;
4216 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4217 for the type in question, as we rely on this here. */
4218 utype = unsigned_type_for (etype);
4219 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4220 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4221 integer_one_node, 1);
4222 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4224 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4225 minv, 1, maxv, 1)))
4226 etype = utype;
4227 else
4228 return 0;
4231 high = fold_convert_loc (loc, etype, high);
4232 low = fold_convert_loc (loc, etype, low);
4233 exp = fold_convert_loc (loc, etype, exp);
4235 value = const_binop (MINUS_EXPR, high, low);
4238 if (POINTER_TYPE_P (etype))
4240 if (value != 0 && !TREE_OVERFLOW (value))
4242 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4243 return build_range_check (loc, type,
4244 fold_build_pointer_plus_loc (loc, exp, low),
4245 1, build_int_cst (etype, 0), value);
4247 return 0;
4250 if (value != 0 && !TREE_OVERFLOW (value))
4251 return build_range_check (loc, type,
4252 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4253 1, build_int_cst (etype, 0), value);
4255 return 0;
4258 /* Return the predecessor of VAL in its type, handling the infinite case. */
4260 static tree
4261 range_predecessor (tree val)
4263 tree type = TREE_TYPE (val);
4265 if (INTEGRAL_TYPE_P (type)
4266 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4267 return 0;
4268 else
4269 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4272 /* Return the successor of VAL in its type, handling the infinite case. */
4274 static tree
4275 range_successor (tree val)
4277 tree type = TREE_TYPE (val);
4279 if (INTEGRAL_TYPE_P (type)
4280 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4281 return 0;
4282 else
4283 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4286 /* Given two ranges, see if we can merge them into one. Return 1 if we
4287 can, 0 if we can't. Set the output range into the specified parameters. */
4289 bool
4290 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4291 tree high0, int in1_p, tree low1, tree high1)
4293 int no_overlap;
4294 int subset;
4295 int temp;
4296 tree tem;
4297 int in_p;
4298 tree low, high;
4299 int lowequal = ((low0 == 0 && low1 == 0)
4300 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4301 low0, 0, low1, 0)));
4302 int highequal = ((high0 == 0 && high1 == 0)
4303 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4304 high0, 1, high1, 1)));
4306 /* Make range 0 be the range that starts first, or ends last if they
4307 start at the same value. Swap them if it isn't. */
4308 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4309 low0, 0, low1, 0))
4310 || (lowequal
4311 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4312 high1, 1, high0, 1))))
4314 temp = in0_p, in0_p = in1_p, in1_p = temp;
4315 tem = low0, low0 = low1, low1 = tem;
4316 tem = high0, high0 = high1, high1 = tem;
4319 /* Now flag two cases, whether the ranges are disjoint or whether the
4320 second range is totally subsumed in the first. Note that the tests
4321 below are simplified by the ones above. */
4322 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4323 high0, 1, low1, 0));
4324 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4325 high1, 1, high0, 1));
4327 /* We now have four cases, depending on whether we are including or
4328 excluding the two ranges. */
4329 if (in0_p && in1_p)
4331 /* If they don't overlap, the result is false. If the second range
4332 is a subset it is the result. Otherwise, the range is from the start
4333 of the second to the end of the first. */
4334 if (no_overlap)
4335 in_p = 0, low = high = 0;
4336 else if (subset)
4337 in_p = 1, low = low1, high = high1;
4338 else
4339 in_p = 1, low = low1, high = high0;
4342 else if (in0_p && ! in1_p)
4344 /* If they don't overlap, the result is the first range. If they are
4345 equal, the result is false. If the second range is a subset of the
4346 first, and the ranges begin at the same place, we go from just after
4347 the end of the second range to the end of the first. If the second
4348 range is not a subset of the first, or if it is a subset and both
4349 ranges end at the same place, the range starts at the start of the
4350 first range and ends just before the second range.
4351 Otherwise, we can't describe this as a single range. */
4352 if (no_overlap)
4353 in_p = 1, low = low0, high = high0;
4354 else if (lowequal && highequal)
4355 in_p = 0, low = high = 0;
4356 else if (subset && lowequal)
4358 low = range_successor (high1);
4359 high = high0;
4360 in_p = 1;
4361 if (low == 0)
4363 /* We are in the weird situation where high0 > high1 but
4364 high1 has no successor. Punt. */
4365 return 0;
4368 else if (! subset || highequal)
4370 low = low0;
4371 high = range_predecessor (low1);
4372 in_p = 1;
4373 if (high == 0)
4375 /* low0 < low1 but low1 has no predecessor. Punt. */
4376 return 0;
4379 else
4380 return 0;
4383 else if (! in0_p && in1_p)
4385 /* If they don't overlap, the result is the second range. If the second
4386 is a subset of the first, the result is false. Otherwise,
4387 the range starts just after the first range and ends at the
4388 end of the second. */
4389 if (no_overlap)
4390 in_p = 1, low = low1, high = high1;
4391 else if (subset || highequal)
4392 in_p = 0, low = high = 0;
4393 else
4395 low = range_successor (high0);
4396 high = high1;
4397 in_p = 1;
4398 if (low == 0)
4400 /* high1 > high0 but high0 has no successor. Punt. */
4401 return 0;
4406 else
4408 /* The case where we are excluding both ranges. Here the complex case
4409 is if they don't overlap. In that case, the only time we have a
4410 range is if they are adjacent. If the second is a subset of the
4411 first, the result is the first. Otherwise, the range to exclude
4412 starts at the beginning of the first range and ends at the end of the
4413 second. */
4414 if (no_overlap)
4416 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4417 range_successor (high0),
4418 1, low1, 0)))
4419 in_p = 0, low = low0, high = high1;
4420 else
4422 /* Canonicalize - [min, x] into - [-, x]. */
4423 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4424 switch (TREE_CODE (TREE_TYPE (low0)))
4426 case ENUMERAL_TYPE:
4427 if (TYPE_PRECISION (TREE_TYPE (low0))
4428 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4429 break;
4430 /* FALLTHROUGH */
4431 case INTEGER_TYPE:
4432 if (tree_int_cst_equal (low0,
4433 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4434 low0 = 0;
4435 break;
4436 case POINTER_TYPE:
4437 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4438 && integer_zerop (low0))
4439 low0 = 0;
4440 break;
4441 default:
4442 break;
4445 /* Canonicalize - [x, max] into - [x, -]. */
4446 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4447 switch (TREE_CODE (TREE_TYPE (high1)))
4449 case ENUMERAL_TYPE:
4450 if (TYPE_PRECISION (TREE_TYPE (high1))
4451 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4452 break;
4453 /* FALLTHROUGH */
4454 case INTEGER_TYPE:
4455 if (tree_int_cst_equal (high1,
4456 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4457 high1 = 0;
4458 break;
4459 case POINTER_TYPE:
4460 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4461 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4462 high1, 1,
4463 integer_one_node, 1)))
4464 high1 = 0;
4465 break;
4466 default:
4467 break;
4470 /* The ranges might be also adjacent between the maximum and
4471 minimum values of the given type. For
4472 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4473 return + [x + 1, y - 1]. */
4474 if (low0 == 0 && high1 == 0)
4476 low = range_successor (high0);
4477 high = range_predecessor (low1);
4478 if (low == 0 || high == 0)
4479 return 0;
4481 in_p = 1;
4483 else
4484 return 0;
4487 else if (subset)
4488 in_p = 0, low = low0, high = high0;
4489 else
4490 in_p = 0, low = low0, high = high1;
4493 *pin_p = in_p, *plow = low, *phigh = high;
4494 return 1;
4498 /* Subroutine of fold, looking inside expressions of the form
4499 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4500 of the COND_EXPR. This function is being used also to optimize
4501 A op B ? C : A, by reversing the comparison first.
4503 Return a folded expression whose code is not a COND_EXPR
4504 anymore, or NULL_TREE if no folding opportunity is found. */
4506 static tree
4507 fold_cond_expr_with_comparison (location_t loc, tree type,
4508 tree arg0, tree arg1, tree arg2)
4510 enum tree_code comp_code = TREE_CODE (arg0);
4511 tree arg00 = TREE_OPERAND (arg0, 0);
4512 tree arg01 = TREE_OPERAND (arg0, 1);
4513 tree arg1_type = TREE_TYPE (arg1);
4514 tree tem;
4516 STRIP_NOPS (arg1);
4517 STRIP_NOPS (arg2);
4519 /* If we have A op 0 ? A : -A, consider applying the following
4520 transformations:
4522 A == 0? A : -A same as -A
4523 A != 0? A : -A same as A
4524 A >= 0? A : -A same as abs (A)
4525 A > 0? A : -A same as abs (A)
4526 A <= 0? A : -A same as -abs (A)
4527 A < 0? A : -A same as -abs (A)
4529 None of these transformations work for modes with signed
4530 zeros. If A is +/-0, the first two transformations will
4531 change the sign of the result (from +0 to -0, or vice
4532 versa). The last four will fix the sign of the result,
4533 even though the original expressions could be positive or
4534 negative, depending on the sign of A.
4536 Note that all these transformations are correct if A is
4537 NaN, since the two alternatives (A and -A) are also NaNs. */
4538 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4539 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4540 ? real_zerop (arg01)
4541 : integer_zerop (arg01))
4542 && ((TREE_CODE (arg2) == NEGATE_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4544 /* In the case that A is of the form X-Y, '-A' (arg2) may
4545 have already been folded to Y-X, check for that. */
4546 || (TREE_CODE (arg1) == MINUS_EXPR
4547 && TREE_CODE (arg2) == MINUS_EXPR
4548 && operand_equal_p (TREE_OPERAND (arg1, 0),
4549 TREE_OPERAND (arg2, 1), 0)
4550 && operand_equal_p (TREE_OPERAND (arg1, 1),
4551 TREE_OPERAND (arg2, 0), 0))))
4552 switch (comp_code)
4554 case EQ_EXPR:
4555 case UNEQ_EXPR:
4556 tem = fold_convert_loc (loc, arg1_type, arg1);
4557 return pedantic_non_lvalue_loc (loc,
4558 fold_convert_loc (loc, type,
4559 negate_expr (tem)));
4560 case NE_EXPR:
4561 case LTGT_EXPR:
4562 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4563 case UNGE_EXPR:
4564 case UNGT_EXPR:
4565 if (flag_trapping_math)
4566 break;
4567 /* Fall through. */
4568 case GE_EXPR:
4569 case GT_EXPR:
4570 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4571 arg1 = fold_convert_loc (loc, signed_type_for
4572 (TREE_TYPE (arg1)), arg1);
4573 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4574 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4575 case UNLE_EXPR:
4576 case UNLT_EXPR:
4577 if (flag_trapping_math)
4578 break;
4579 case LE_EXPR:
4580 case LT_EXPR:
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return negate_expr (fold_convert_loc (loc, type, tem));
4586 default:
4587 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4588 break;
4591 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4592 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4593 both transformations are correct when A is NaN: A != 0
4594 is then true, and A == 0 is false. */
4596 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4597 && integer_zerop (arg01) && integer_zerop (arg2))
4599 if (comp_code == NE_EXPR)
4600 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4601 else if (comp_code == EQ_EXPR)
4602 return build_int_cst (type, 0);
4605 /* Try some transformations of A op B ? A : B.
4607 A == B? A : B same as B
4608 A != B? A : B same as A
4609 A >= B? A : B same as max (A, B)
4610 A > B? A : B same as max (B, A)
4611 A <= B? A : B same as min (A, B)
4612 A < B? A : B same as min (B, A)
4614 As above, these transformations don't work in the presence
4615 of signed zeros. For example, if A and B are zeros of
4616 opposite sign, the first two transformations will change
4617 the sign of the result. In the last four, the original
4618 expressions give different results for (A=+0, B=-0) and
4619 (A=-0, B=+0), but the transformed expressions do not.
4621 The first two transformations are correct if either A or B
4622 is a NaN. In the first transformation, the condition will
4623 be false, and B will indeed be chosen. In the case of the
4624 second transformation, the condition A != B will be true,
4625 and A will be chosen.
4627 The conversions to max() and min() are not correct if B is
4628 a number and A is not. The conditions in the original
4629 expressions will be false, so all four give B. The min()
4630 and max() versions would give a NaN instead. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4632 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4633 /* Avoid these transformations if the COND_EXPR may be used
4634 as an lvalue in the C++ front-end. PR c++/19199. */
4635 && (in_gimple_form
4636 || (strcmp (lang_hooks.name, "GNU C++") != 0
4637 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4638 || ! maybe_lvalue_p (arg1)
4639 || ! maybe_lvalue_p (arg2)))
4641 tree comp_op0 = arg00;
4642 tree comp_op1 = arg01;
4643 tree comp_type = TREE_TYPE (comp_op0);
4645 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4646 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4648 comp_type = type;
4649 comp_op0 = arg1;
4650 comp_op1 = arg2;
4653 switch (comp_code)
4655 case EQ_EXPR:
4656 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4657 case NE_EXPR:
4658 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4659 case LE_EXPR:
4660 case LT_EXPR:
4661 case UNLE_EXPR:
4662 case UNLT_EXPR:
4663 /* In C++ a ?: expression can be an lvalue, so put the
4664 operand which will be used if they are equal first
4665 so that we can convert this back to the
4666 corresponding COND_EXPR. */
4667 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4669 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4670 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4671 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4672 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4673 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4674 comp_op1, comp_op0);
4675 return pedantic_non_lvalue_loc (loc,
4676 fold_convert_loc (loc, type, tem));
4678 break;
4679 case GE_EXPR:
4680 case GT_EXPR:
4681 case UNGE_EXPR:
4682 case UNGT_EXPR:
4683 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4685 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4686 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4687 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4688 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4689 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4690 comp_op1, comp_op0);
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, tem));
4694 break;
4695 case UNEQ_EXPR:
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4697 return pedantic_non_lvalue_loc (loc,
4698 fold_convert_loc (loc, type, arg2));
4699 break;
4700 case LTGT_EXPR:
4701 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, arg1));
4704 break;
4705 default:
4706 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4707 break;
4711 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4712 we might still be able to simplify this. For example,
4713 if C1 is one less or one more than C2, this might have started
4714 out as a MIN or MAX and been transformed by this function.
4715 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4717 if (INTEGRAL_TYPE_P (type)
4718 && TREE_CODE (arg01) == INTEGER_CST
4719 && TREE_CODE (arg2) == INTEGER_CST)
4720 switch (comp_code)
4722 case EQ_EXPR:
4723 if (TREE_CODE (arg1) == INTEGER_CST)
4724 break;
4725 /* We can replace A with C1 in this case. */
4726 arg1 = fold_convert_loc (loc, type, arg01);
4727 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4729 case LT_EXPR:
4730 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4731 MIN_EXPR, to preserve the signedness of the comparison. */
4732 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4733 OEP_ONLY_CONST)
4734 && operand_equal_p (arg01,
4735 const_binop (PLUS_EXPR, arg2,
4736 build_int_cst (type, 1)),
4737 OEP_ONLY_CONST))
4739 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4740 fold_convert_loc (loc, TREE_TYPE (arg00),
4741 arg2));
4742 return pedantic_non_lvalue_loc (loc,
4743 fold_convert_loc (loc, type, tem));
4745 break;
4747 case LE_EXPR:
4748 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4749 as above. */
4750 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4751 OEP_ONLY_CONST)
4752 && operand_equal_p (arg01,
4753 const_binop (MINUS_EXPR, arg2,
4754 build_int_cst (type, 1)),
4755 OEP_ONLY_CONST))
4757 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4758 fold_convert_loc (loc, TREE_TYPE (arg00),
4759 arg2));
4760 return pedantic_non_lvalue_loc (loc,
4761 fold_convert_loc (loc, type, tem));
4763 break;
4765 case GT_EXPR:
4766 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4767 MAX_EXPR, to preserve the signedness of the comparison. */
4768 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4769 OEP_ONLY_CONST)
4770 && operand_equal_p (arg01,
4771 const_binop (MINUS_EXPR, arg2,
4772 build_int_cst (type, 1)),
4773 OEP_ONLY_CONST))
4775 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4776 fold_convert_loc (loc, TREE_TYPE (arg00),
4777 arg2));
4778 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4780 break;
4782 case GE_EXPR:
4783 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4784 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (PLUS_EXPR, arg2,
4788 build_int_cst (type, 1)),
4789 OEP_ONLY_CONST))
4791 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4796 break;
4797 case NE_EXPR:
4798 break;
4799 default:
4800 gcc_unreachable ();
4803 return NULL_TREE;
4808 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4809 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4810 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4811 false) >= 2)
4812 #endif
4814 /* EXP is some logical combination of boolean tests. See if we can
4815 merge it into some range test. Return the new tree if so. */
4817 static tree
4818 fold_range_test (location_t loc, enum tree_code code, tree type,
4819 tree op0, tree op1)
4821 int or_op = (code == TRUTH_ORIF_EXPR
4822 || code == TRUTH_OR_EXPR);
4823 int in0_p, in1_p, in_p;
4824 tree low0, low1, low, high0, high1, high;
4825 bool strict_overflow_p = false;
4826 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4827 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4828 tree tem;
4829 const char * const warnmsg = G_("assuming signed overflow does not occur "
4830 "when simplifying range test");
4832 /* If this is an OR operation, invert both sides; we will invert
4833 again at the end. */
4834 if (or_op)
4835 in0_p = ! in0_p, in1_p = ! in1_p;
4837 /* If both expressions are the same, if we can merge the ranges, and we
4838 can build the range test, return it or it inverted. If one of the
4839 ranges is always true or always false, consider it to be the same
4840 expression as the other. */
4841 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4842 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4843 in1_p, low1, high1)
4844 && 0 != (tem = (build_range_check (loc, type,
4845 lhs != 0 ? lhs
4846 : rhs != 0 ? rhs : integer_zero_node,
4847 in_p, low, high))))
4849 if (strict_overflow_p)
4850 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4851 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4854 /* On machines where the branch cost is expensive, if this is a
4855 short-circuited branch and the underlying object on both sides
4856 is the same, make a non-short-circuit operation. */
4857 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4858 && lhs != 0 && rhs != 0
4859 && (code == TRUTH_ANDIF_EXPR
4860 || code == TRUTH_ORIF_EXPR)
4861 && operand_equal_p (lhs, rhs, 0))
4863 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4864 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4865 which cases we can't do this. */
4866 if (simple_operand_p (lhs))
4867 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4868 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4869 type, op0, op1);
4871 else if (!lang_hooks.decls.global_bindings_p ()
4872 && !CONTAINS_PLACEHOLDER_P (lhs))
4874 tree common = save_expr (lhs);
4876 if (0 != (lhs = build_range_check (loc, type, common,
4877 or_op ? ! in0_p : in0_p,
4878 low0, high0))
4879 && (0 != (rhs = build_range_check (loc, type, common,
4880 or_op ? ! in1_p : in1_p,
4881 low1, high1))))
4883 if (strict_overflow_p)
4884 fold_overflow_warning (warnmsg,
4885 WARN_STRICT_OVERFLOW_COMPARISON);
4886 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4887 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4888 type, lhs, rhs);
4893 return 0;
4896 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4897 bit value. Arrange things so the extra bits will be set to zero if and
4898 only if C is signed-extended to its full width. If MASK is nonzero,
4899 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4901 static tree
4902 unextend (tree c, int p, int unsignedp, tree mask)
4904 tree type = TREE_TYPE (c);
4905 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4906 tree temp;
4908 if (p == modesize || unsignedp)
4909 return c;
4911 /* We work by getting just the sign bit into the low-order bit, then
4912 into the high-order bit, then sign-extend. We then XOR that value
4913 with C. */
4914 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4915 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4917 /* We must use a signed type in order to get an arithmetic right shift.
4918 However, we must also avoid introducing accidental overflows, so that
4919 a subsequent call to integer_zerop will work. Hence we must
4920 do the type conversion here. At this point, the constant is either
4921 zero or one, and the conversion to a signed type can never overflow.
4922 We could get an overflow if this conversion is done anywhere else. */
4923 if (TYPE_UNSIGNED (type))
4924 temp = fold_convert (signed_type_for (type), temp);
4926 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4927 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4928 if (mask != 0)
4929 temp = const_binop (BIT_AND_EXPR, temp,
4930 fold_convert (TREE_TYPE (c), mask));
4931 /* If necessary, convert the type back to match the type of C. */
4932 if (TYPE_UNSIGNED (type))
4933 temp = fold_convert (type, temp);
4935 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4938 /* For an expression that has the form
4939 (A && B) || ~B
4941 (A || B) && ~B,
4942 we can drop one of the inner expressions and simplify to
4943 A || ~B
4945 A && ~B
4946 LOC is the location of the resulting expression. OP is the inner
4947 logical operation; the left-hand side in the examples above, while CMPOP
4948 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4949 removing a condition that guards another, as in
4950 (A != NULL && A->...) || A == NULL
4951 which we must not transform. If RHS_ONLY is true, only eliminate the
4952 right-most operand of the inner logical operation. */
4954 static tree
4955 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4956 bool rhs_only)
4958 tree type = TREE_TYPE (cmpop);
4959 enum tree_code code = TREE_CODE (cmpop);
4960 enum tree_code truthop_code = TREE_CODE (op);
4961 tree lhs = TREE_OPERAND (op, 0);
4962 tree rhs = TREE_OPERAND (op, 1);
4963 tree orig_lhs = lhs, orig_rhs = rhs;
4964 enum tree_code rhs_code = TREE_CODE (rhs);
4965 enum tree_code lhs_code = TREE_CODE (lhs);
4966 enum tree_code inv_code;
4968 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4969 return NULL_TREE;
4971 if (TREE_CODE_CLASS (code) != tcc_comparison)
4972 return NULL_TREE;
4974 if (rhs_code == truthop_code)
4976 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4977 if (newrhs != NULL_TREE)
4979 rhs = newrhs;
4980 rhs_code = TREE_CODE (rhs);
4983 if (lhs_code == truthop_code && !rhs_only)
4985 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4986 if (newlhs != NULL_TREE)
4988 lhs = newlhs;
4989 lhs_code = TREE_CODE (lhs);
4993 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4994 if (inv_code == rhs_code
4995 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4996 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4997 return lhs;
4998 if (!rhs_only && inv_code == lhs_code
4999 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5000 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5001 return rhs;
5002 if (rhs != orig_rhs || lhs != orig_lhs)
5003 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5004 lhs, rhs);
5005 return NULL_TREE;
5008 /* Find ways of folding logical expressions of LHS and RHS:
5009 Try to merge two comparisons to the same innermost item.
5010 Look for range tests like "ch >= '0' && ch <= '9'".
5011 Look for combinations of simple terms on machines with expensive branches
5012 and evaluate the RHS unconditionally.
5014 For example, if we have p->a == 2 && p->b == 4 and we can make an
5015 object large enough to span both A and B, we can do this with a comparison
5016 against the object ANDed with the a mask.
5018 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5019 operations to do this with one comparison.
5021 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5022 function and the one above.
5024 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5025 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5027 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5028 two operands.
5030 We return the simplified tree or 0 if no optimization is possible. */
5032 static tree
5033 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5034 tree lhs, tree rhs)
5036 /* If this is the "or" of two comparisons, we can do something if
5037 the comparisons are NE_EXPR. If this is the "and", we can do something
5038 if the comparisons are EQ_EXPR. I.e.,
5039 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5041 WANTED_CODE is this operation code. For single bit fields, we can
5042 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5043 comparison for one-bit fields. */
5045 enum tree_code wanted_code;
5046 enum tree_code lcode, rcode;
5047 tree ll_arg, lr_arg, rl_arg, rr_arg;
5048 tree ll_inner, lr_inner, rl_inner, rr_inner;
5049 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5050 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5051 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5052 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5053 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5054 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5055 enum machine_mode lnmode, rnmode;
5056 tree ll_mask, lr_mask, rl_mask, rr_mask;
5057 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5058 tree l_const, r_const;
5059 tree lntype, rntype, result;
5060 HOST_WIDE_INT first_bit, end_bit;
5061 int volatilep;
5063 /* Start by getting the comparison codes. Fail if anything is volatile.
5064 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5065 it were surrounded with a NE_EXPR. */
5067 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5068 return 0;
5070 lcode = TREE_CODE (lhs);
5071 rcode = TREE_CODE (rhs);
5073 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5075 lhs = build2 (NE_EXPR, truth_type, lhs,
5076 build_int_cst (TREE_TYPE (lhs), 0));
5077 lcode = NE_EXPR;
5080 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5082 rhs = build2 (NE_EXPR, truth_type, rhs,
5083 build_int_cst (TREE_TYPE (rhs), 0));
5084 rcode = NE_EXPR;
5087 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5088 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5089 return 0;
5091 ll_arg = TREE_OPERAND (lhs, 0);
5092 lr_arg = TREE_OPERAND (lhs, 1);
5093 rl_arg = TREE_OPERAND (rhs, 0);
5094 rr_arg = TREE_OPERAND (rhs, 1);
5096 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5097 if (simple_operand_p (ll_arg)
5098 && simple_operand_p (lr_arg))
5100 if (operand_equal_p (ll_arg, rl_arg, 0)
5101 && operand_equal_p (lr_arg, rr_arg, 0))
5103 result = combine_comparisons (loc, code, lcode, rcode,
5104 truth_type, ll_arg, lr_arg);
5105 if (result)
5106 return result;
5108 else if (operand_equal_p (ll_arg, rr_arg, 0)
5109 && operand_equal_p (lr_arg, rl_arg, 0))
5111 result = combine_comparisons (loc, code, lcode,
5112 swap_tree_comparison (rcode),
5113 truth_type, ll_arg, lr_arg);
5114 if (result)
5115 return result;
5119 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5120 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5122 /* If the RHS can be evaluated unconditionally and its operands are
5123 simple, it wins to evaluate the RHS unconditionally on machines
5124 with expensive branches. In this case, this isn't a comparison
5125 that can be merged. */
5127 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5128 false) >= 2
5129 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5130 && simple_operand_p (rl_arg)
5131 && simple_operand_p (rr_arg))
5133 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5134 if (code == TRUTH_OR_EXPR
5135 && lcode == NE_EXPR && integer_zerop (lr_arg)
5136 && rcode == NE_EXPR && integer_zerop (rr_arg)
5137 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5138 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5139 return build2_loc (loc, NE_EXPR, truth_type,
5140 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5141 ll_arg, rl_arg),
5142 build_int_cst (TREE_TYPE (ll_arg), 0));
5144 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5145 if (code == TRUTH_AND_EXPR
5146 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5147 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5148 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5149 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5150 return build2_loc (loc, EQ_EXPR, truth_type,
5151 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5152 ll_arg, rl_arg),
5153 build_int_cst (TREE_TYPE (ll_arg), 0));
5156 /* See if the comparisons can be merged. Then get all the parameters for
5157 each side. */
5159 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5160 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5161 return 0;
5163 volatilep = 0;
5164 ll_inner = decode_field_reference (loc, ll_arg,
5165 &ll_bitsize, &ll_bitpos, &ll_mode,
5166 &ll_unsignedp, &volatilep, &ll_mask,
5167 &ll_and_mask);
5168 lr_inner = decode_field_reference (loc, lr_arg,
5169 &lr_bitsize, &lr_bitpos, &lr_mode,
5170 &lr_unsignedp, &volatilep, &lr_mask,
5171 &lr_and_mask);
5172 rl_inner = decode_field_reference (loc, rl_arg,
5173 &rl_bitsize, &rl_bitpos, &rl_mode,
5174 &rl_unsignedp, &volatilep, &rl_mask,
5175 &rl_and_mask);
5176 rr_inner = decode_field_reference (loc, rr_arg,
5177 &rr_bitsize, &rr_bitpos, &rr_mode,
5178 &rr_unsignedp, &volatilep, &rr_mask,
5179 &rr_and_mask);
5181 /* It must be true that the inner operation on the lhs of each
5182 comparison must be the same if we are to be able to do anything.
5183 Then see if we have constants. If not, the same must be true for
5184 the rhs's. */
5185 if (volatilep || ll_inner == 0 || rl_inner == 0
5186 || ! operand_equal_p (ll_inner, rl_inner, 0))
5187 return 0;
5189 if (TREE_CODE (lr_arg) == INTEGER_CST
5190 && TREE_CODE (rr_arg) == INTEGER_CST)
5191 l_const = lr_arg, r_const = rr_arg;
5192 else if (lr_inner == 0 || rr_inner == 0
5193 || ! operand_equal_p (lr_inner, rr_inner, 0))
5194 return 0;
5195 else
5196 l_const = r_const = 0;
5198 /* If either comparison code is not correct for our logical operation,
5199 fail. However, we can convert a one-bit comparison against zero into
5200 the opposite comparison against that bit being set in the field. */
5202 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5203 if (lcode != wanted_code)
5205 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5207 /* Make the left operand unsigned, since we are only interested
5208 in the value of one bit. Otherwise we are doing the wrong
5209 thing below. */
5210 ll_unsignedp = 1;
5211 l_const = ll_mask;
5213 else
5214 return 0;
5217 /* This is analogous to the code for l_const above. */
5218 if (rcode != wanted_code)
5220 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5222 rl_unsignedp = 1;
5223 r_const = rl_mask;
5225 else
5226 return 0;
5229 /* See if we can find a mode that contains both fields being compared on
5230 the left. If we can't, fail. Otherwise, update all constants and masks
5231 to be relative to a field of that size. */
5232 first_bit = MIN (ll_bitpos, rl_bitpos);
5233 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5234 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5235 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5236 volatilep);
5237 if (lnmode == VOIDmode)
5238 return 0;
5240 lnbitsize = GET_MODE_BITSIZE (lnmode);
5241 lnbitpos = first_bit & ~ (lnbitsize - 1);
5242 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5243 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5245 if (BYTES_BIG_ENDIAN)
5247 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5248 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5251 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5252 size_int (xll_bitpos));
5253 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5254 size_int (xrl_bitpos));
5256 if (l_const)
5258 l_const = fold_convert_loc (loc, lntype, l_const);
5259 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5260 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5261 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5262 fold_build1_loc (loc, BIT_NOT_EXPR,
5263 lntype, ll_mask))))
5265 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5267 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5270 if (r_const)
5272 r_const = fold_convert_loc (loc, lntype, r_const);
5273 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5274 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5275 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5276 fold_build1_loc (loc, BIT_NOT_EXPR,
5277 lntype, rl_mask))))
5279 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5281 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5285 /* If the right sides are not constant, do the same for it. Also,
5286 disallow this optimization if a size or signedness mismatch occurs
5287 between the left and right sides. */
5288 if (l_const == 0)
5290 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5291 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5292 /* Make sure the two fields on the right
5293 correspond to the left without being swapped. */
5294 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5295 return 0;
5297 first_bit = MIN (lr_bitpos, rr_bitpos);
5298 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5299 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5300 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5301 volatilep);
5302 if (rnmode == VOIDmode)
5303 return 0;
5305 rnbitsize = GET_MODE_BITSIZE (rnmode);
5306 rnbitpos = first_bit & ~ (rnbitsize - 1);
5307 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5308 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5310 if (BYTES_BIG_ENDIAN)
5312 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5313 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5316 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5317 rntype, lr_mask),
5318 size_int (xlr_bitpos));
5319 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5320 rntype, rr_mask),
5321 size_int (xrr_bitpos));
5323 /* Make a mask that corresponds to both fields being compared.
5324 Do this for both items being compared. If the operands are the
5325 same size and the bits being compared are in the same position
5326 then we can do this by masking both and comparing the masked
5327 results. */
5328 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5329 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5330 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5332 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5333 ll_unsignedp || rl_unsignedp);
5334 if (! all_ones_mask_p (ll_mask, lnbitsize))
5335 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5337 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5338 lr_unsignedp || rr_unsignedp);
5339 if (! all_ones_mask_p (lr_mask, rnbitsize))
5340 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5342 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5345 /* There is still another way we can do something: If both pairs of
5346 fields being compared are adjacent, we may be able to make a wider
5347 field containing them both.
5349 Note that we still must mask the lhs/rhs expressions. Furthermore,
5350 the mask must be shifted to account for the shift done by
5351 make_bit_field_ref. */
5352 if ((ll_bitsize + ll_bitpos == rl_bitpos
5353 && lr_bitsize + lr_bitpos == rr_bitpos)
5354 || (ll_bitpos == rl_bitpos + rl_bitsize
5355 && lr_bitpos == rr_bitpos + rr_bitsize))
5357 tree type;
5359 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5360 ll_bitsize + rl_bitsize,
5361 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5362 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5363 lr_bitsize + rr_bitsize,
5364 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5366 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5367 size_int (MIN (xll_bitpos, xrl_bitpos)));
5368 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5369 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5371 /* Convert to the smaller type before masking out unwanted bits. */
5372 type = lntype;
5373 if (lntype != rntype)
5375 if (lnbitsize > rnbitsize)
5377 lhs = fold_convert_loc (loc, rntype, lhs);
5378 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5379 type = rntype;
5381 else if (lnbitsize < rnbitsize)
5383 rhs = fold_convert_loc (loc, lntype, rhs);
5384 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5385 type = lntype;
5389 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5390 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5392 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5393 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5395 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5398 return 0;
5401 /* Handle the case of comparisons with constants. If there is something in
5402 common between the masks, those bits of the constants must be the same.
5403 If not, the condition is always false. Test for this to avoid generating
5404 incorrect code below. */
5405 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5406 if (! integer_zerop (result)
5407 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5408 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5410 if (wanted_code == NE_EXPR)
5412 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5413 return constant_boolean_node (true, truth_type);
5415 else
5417 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5418 return constant_boolean_node (false, truth_type);
5422 /* Construct the expression we will return. First get the component
5423 reference we will make. Unless the mask is all ones the width of
5424 that field, perform the mask operation. Then compare with the
5425 merged constant. */
5426 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5427 ll_unsignedp || rl_unsignedp);
5429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5430 if (! all_ones_mask_p (ll_mask, lnbitsize))
5431 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5433 return build2_loc (loc, wanted_code, truth_type, result,
5434 const_binop (BIT_IOR_EXPR, l_const, r_const));
5437 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5438 constant. */
5440 static tree
5441 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5442 tree op0, tree op1)
5444 tree arg0 = op0;
5445 enum tree_code op_code;
5446 tree comp_const;
5447 tree minmax_const;
5448 int consts_equal, consts_lt;
5449 tree inner;
5451 STRIP_SIGN_NOPS (arg0);
5453 op_code = TREE_CODE (arg0);
5454 minmax_const = TREE_OPERAND (arg0, 1);
5455 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5456 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5457 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5458 inner = TREE_OPERAND (arg0, 0);
5460 /* If something does not permit us to optimize, return the original tree. */
5461 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5462 || TREE_CODE (comp_const) != INTEGER_CST
5463 || TREE_OVERFLOW (comp_const)
5464 || TREE_CODE (minmax_const) != INTEGER_CST
5465 || TREE_OVERFLOW (minmax_const))
5466 return NULL_TREE;
5468 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5469 and GT_EXPR, doing the rest with recursive calls using logical
5470 simplifications. */
5471 switch (code)
5473 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5475 tree tem
5476 = optimize_minmax_comparison (loc,
5477 invert_tree_comparison (code, false),
5478 type, op0, op1);
5479 if (tem)
5480 return invert_truthvalue_loc (loc, tem);
5481 return NULL_TREE;
5484 case GE_EXPR:
5485 return
5486 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5487 optimize_minmax_comparison
5488 (loc, EQ_EXPR, type, arg0, comp_const),
5489 optimize_minmax_comparison
5490 (loc, GT_EXPR, type, arg0, comp_const));
5492 case EQ_EXPR:
5493 if (op_code == MAX_EXPR && consts_equal)
5494 /* MAX (X, 0) == 0 -> X <= 0 */
5495 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5497 else if (op_code == MAX_EXPR && consts_lt)
5498 /* MAX (X, 0) == 5 -> X == 5 */
5499 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5501 else if (op_code == MAX_EXPR)
5502 /* MAX (X, 0) == -1 -> false */
5503 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5505 else if (consts_equal)
5506 /* MIN (X, 0) == 0 -> X >= 0 */
5507 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5509 else if (consts_lt)
5510 /* MIN (X, 0) == 5 -> false */
5511 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5513 else
5514 /* MIN (X, 0) == -1 -> X == -1 */
5515 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5517 case GT_EXPR:
5518 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5519 /* MAX (X, 0) > 0 -> X > 0
5520 MAX (X, 0) > 5 -> X > 5 */
5521 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5523 else if (op_code == MAX_EXPR)
5524 /* MAX (X, 0) > -1 -> true */
5525 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5527 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5528 /* MIN (X, 0) > 0 -> false
5529 MIN (X, 0) > 5 -> false */
5530 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5532 else
5533 /* MIN (X, 0) > -1 -> X > -1 */
5534 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5536 default:
5537 return NULL_TREE;
5541 /* T is an integer expression that is being multiplied, divided, or taken a
5542 modulus (CODE says which and what kind of divide or modulus) by a
5543 constant C. See if we can eliminate that operation by folding it with
5544 other operations already in T. WIDE_TYPE, if non-null, is a type that
5545 should be used for the computation if wider than our type.
5547 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5548 (X * 2) + (Y * 4). We must, however, be assured that either the original
5549 expression would not overflow or that overflow is undefined for the type
5550 in the language in question.
5552 If we return a non-null expression, it is an equivalent form of the
5553 original computation, but need not be in the original type.
5555 We set *STRICT_OVERFLOW_P to true if the return values depends on
5556 signed overflow being undefined. Otherwise we do not change
5557 *STRICT_OVERFLOW_P. */
5559 static tree
5560 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5561 bool *strict_overflow_p)
5563 /* To avoid exponential search depth, refuse to allow recursion past
5564 three levels. Beyond that (1) it's highly unlikely that we'll find
5565 something interesting and (2) we've probably processed it before
5566 when we built the inner expression. */
5568 static int depth;
5569 tree ret;
5571 if (depth > 3)
5572 return NULL;
5574 depth++;
5575 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5576 depth--;
5578 return ret;
5581 static tree
5582 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5583 bool *strict_overflow_p)
5585 tree type = TREE_TYPE (t);
5586 enum tree_code tcode = TREE_CODE (t);
5587 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5588 > GET_MODE_SIZE (TYPE_MODE (type)))
5589 ? wide_type : type);
5590 tree t1, t2;
5591 int same_p = tcode == code;
5592 tree op0 = NULL_TREE, op1 = NULL_TREE;
5593 bool sub_strict_overflow_p;
5595 /* Don't deal with constants of zero here; they confuse the code below. */
5596 if (integer_zerop (c))
5597 return NULL_TREE;
5599 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5600 op0 = TREE_OPERAND (t, 0);
5602 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5603 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5605 /* Note that we need not handle conditional operations here since fold
5606 already handles those cases. So just do arithmetic here. */
5607 switch (tcode)
5609 case INTEGER_CST:
5610 /* For a constant, we can always simplify if we are a multiply
5611 or (for divide and modulus) if it is a multiple of our constant. */
5612 if (code == MULT_EXPR
5613 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5614 return const_binop (code, fold_convert (ctype, t),
5615 fold_convert (ctype, c));
5616 break;
5618 CASE_CONVERT: case NON_LVALUE_EXPR:
5619 /* If op0 is an expression ... */
5620 if ((COMPARISON_CLASS_P (op0)
5621 || UNARY_CLASS_P (op0)
5622 || BINARY_CLASS_P (op0)
5623 || VL_EXP_CLASS_P (op0)
5624 || EXPRESSION_CLASS_P (op0))
5625 /* ... and has wrapping overflow, and its type is smaller
5626 than ctype, then we cannot pass through as widening. */
5627 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5628 && (TYPE_PRECISION (ctype)
5629 > TYPE_PRECISION (TREE_TYPE (op0))))
5630 /* ... or this is a truncation (t is narrower than op0),
5631 then we cannot pass through this narrowing. */
5632 || (TYPE_PRECISION (type)
5633 < TYPE_PRECISION (TREE_TYPE (op0)))
5634 /* ... or signedness changes for division or modulus,
5635 then we cannot pass through this conversion. */
5636 || (code != MULT_EXPR
5637 && (TYPE_UNSIGNED (ctype)
5638 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5639 /* ... or has undefined overflow while the converted to
5640 type has not, we cannot do the operation in the inner type
5641 as that would introduce undefined overflow. */
5642 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5643 && !TYPE_OVERFLOW_UNDEFINED (type))))
5644 break;
5646 /* Pass the constant down and see if we can make a simplification. If
5647 we can, replace this expression with the inner simplification for
5648 possible later conversion to our or some other type. */
5649 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5650 && TREE_CODE (t2) == INTEGER_CST
5651 && !TREE_OVERFLOW (t2)
5652 && (0 != (t1 = extract_muldiv (op0, t2, code,
5653 code == MULT_EXPR
5654 ? ctype : NULL_TREE,
5655 strict_overflow_p))))
5656 return t1;
5657 break;
5659 case ABS_EXPR:
5660 /* If widening the type changes it from signed to unsigned, then we
5661 must avoid building ABS_EXPR itself as unsigned. */
5662 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5664 tree cstype = (*signed_type_for) (ctype);
5665 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5666 != 0)
5668 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5669 return fold_convert (ctype, t1);
5671 break;
5673 /* If the constant is negative, we cannot simplify this. */
5674 if (tree_int_cst_sgn (c) == -1)
5675 break;
5676 /* FALLTHROUGH */
5677 case NEGATE_EXPR:
5678 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5679 != 0)
5680 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5681 break;
5683 case MIN_EXPR: case MAX_EXPR:
5684 /* If widening the type changes the signedness, then we can't perform
5685 this optimization as that changes the result. */
5686 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5687 break;
5689 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5690 sub_strict_overflow_p = false;
5691 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5692 &sub_strict_overflow_p)) != 0
5693 && (t2 = extract_muldiv (op1, c, code, wide_type,
5694 &sub_strict_overflow_p)) != 0)
5696 if (tree_int_cst_sgn (c) < 0)
5697 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5698 if (sub_strict_overflow_p)
5699 *strict_overflow_p = true;
5700 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5701 fold_convert (ctype, t2));
5703 break;
5705 case LSHIFT_EXPR: case RSHIFT_EXPR:
5706 /* If the second operand is constant, this is a multiplication
5707 or floor division, by a power of two, so we can treat it that
5708 way unless the multiplier or divisor overflows. Signed
5709 left-shift overflow is implementation-defined rather than
5710 undefined in C90, so do not convert signed left shift into
5711 multiplication. */
5712 if (TREE_CODE (op1) == INTEGER_CST
5713 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5714 /* const_binop may not detect overflow correctly,
5715 so check for it explicitly here. */
5716 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5717 && TREE_INT_CST_HIGH (op1) == 0
5718 && 0 != (t1 = fold_convert (ctype,
5719 const_binop (LSHIFT_EXPR,
5720 size_one_node,
5721 op1)))
5722 && !TREE_OVERFLOW (t1))
5723 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5724 ? MULT_EXPR : FLOOR_DIV_EXPR,
5725 ctype,
5726 fold_convert (ctype, op0),
5727 t1),
5728 c, code, wide_type, strict_overflow_p);
5729 break;
5731 case PLUS_EXPR: case MINUS_EXPR:
5732 /* See if we can eliminate the operation on both sides. If we can, we
5733 can return a new PLUS or MINUS. If we can't, the only remaining
5734 cases where we can do anything are if the second operand is a
5735 constant. */
5736 sub_strict_overflow_p = false;
5737 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5738 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5739 if (t1 != 0 && t2 != 0
5740 && (code == MULT_EXPR
5741 /* If not multiplication, we can only do this if both operands
5742 are divisible by c. */
5743 || (multiple_of_p (ctype, op0, c)
5744 && multiple_of_p (ctype, op1, c))))
5746 if (sub_strict_overflow_p)
5747 *strict_overflow_p = true;
5748 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5749 fold_convert (ctype, t2));
5752 /* If this was a subtraction, negate OP1 and set it to be an addition.
5753 This simplifies the logic below. */
5754 if (tcode == MINUS_EXPR)
5756 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5757 /* If OP1 was not easily negatable, the constant may be OP0. */
5758 if (TREE_CODE (op0) == INTEGER_CST)
5760 tree tem = op0;
5761 op0 = op1;
5762 op1 = tem;
5763 tem = t1;
5764 t1 = t2;
5765 t2 = tem;
5769 if (TREE_CODE (op1) != INTEGER_CST)
5770 break;
5772 /* If either OP1 or C are negative, this optimization is not safe for
5773 some of the division and remainder types while for others we need
5774 to change the code. */
5775 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5777 if (code == CEIL_DIV_EXPR)
5778 code = FLOOR_DIV_EXPR;
5779 else if (code == FLOOR_DIV_EXPR)
5780 code = CEIL_DIV_EXPR;
5781 else if (code != MULT_EXPR
5782 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5783 break;
5786 /* If it's a multiply or a division/modulus operation of a multiple
5787 of our constant, do the operation and verify it doesn't overflow. */
5788 if (code == MULT_EXPR
5789 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5791 op1 = const_binop (code, fold_convert (ctype, op1),
5792 fold_convert (ctype, c));
5793 /* We allow the constant to overflow with wrapping semantics. */
5794 if (op1 == 0
5795 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5796 break;
5798 else
5799 break;
5801 /* If we have an unsigned type is not a sizetype, we cannot widen
5802 the operation since it will change the result if the original
5803 computation overflowed. */
5804 if (TYPE_UNSIGNED (ctype)
5805 && ctype != type)
5806 break;
5808 /* If we were able to eliminate our operation from the first side,
5809 apply our operation to the second side and reform the PLUS. */
5810 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5811 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5813 /* The last case is if we are a multiply. In that case, we can
5814 apply the distributive law to commute the multiply and addition
5815 if the multiplication of the constants doesn't overflow. */
5816 if (code == MULT_EXPR)
5817 return fold_build2 (tcode, ctype,
5818 fold_build2 (code, ctype,
5819 fold_convert (ctype, op0),
5820 fold_convert (ctype, c)),
5821 op1);
5823 break;
5825 case MULT_EXPR:
5826 /* We have a special case here if we are doing something like
5827 (C * 8) % 4 since we know that's zero. */
5828 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5829 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5830 /* If the multiplication can overflow we cannot optimize this. */
5831 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5832 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5833 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5835 *strict_overflow_p = true;
5836 return omit_one_operand (type, integer_zero_node, op0);
5839 /* ... fall through ... */
5841 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5842 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5843 /* If we can extract our operation from the LHS, do so and return a
5844 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5845 do something only if the second operand is a constant. */
5846 if (same_p
5847 && (t1 = extract_muldiv (op0, c, code, wide_type,
5848 strict_overflow_p)) != 0)
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, op1));
5851 else if (tcode == MULT_EXPR && code == MULT_EXPR
5852 && (t1 = extract_muldiv (op1, c, code, wide_type,
5853 strict_overflow_p)) != 0)
5854 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5855 fold_convert (ctype, t1));
5856 else if (TREE_CODE (op1) != INTEGER_CST)
5857 return 0;
5859 /* If these are the same operation types, we can associate them
5860 assuming no overflow. */
5861 if (tcode == code)
5863 double_int mul;
5864 bool overflow_p;
5865 unsigned prec = TYPE_PRECISION (ctype);
5866 bool uns = TYPE_UNSIGNED (ctype);
5867 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5868 double_int dic = tree_to_double_int (c).ext (prec, uns);
5869 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5870 overflow_p = ((!uns && overflow_p)
5871 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5872 if (!double_int_fits_to_tree_p (ctype, mul)
5873 && ((uns && tcode != MULT_EXPR) || !uns))
5874 overflow_p = 1;
5875 if (!overflow_p)
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5877 double_int_to_tree (ctype, mul));
5880 /* If these operations "cancel" each other, we have the main
5881 optimizations of this pass, which occur when either constant is a
5882 multiple of the other, in which case we replace this with either an
5883 operation or CODE or TCODE.
5885 If we have an unsigned type, we cannot do this since it will change
5886 the result if the original computation overflowed. */
5887 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5888 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5889 || (tcode == MULT_EXPR
5890 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5891 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5892 && code != MULT_EXPR)))
5894 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5896 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5897 *strict_overflow_p = true;
5898 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5899 fold_convert (ctype,
5900 const_binop (TRUNC_DIV_EXPR,
5901 op1, c)));
5903 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5906 *strict_overflow_p = true;
5907 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5908 fold_convert (ctype,
5909 const_binop (TRUNC_DIV_EXPR,
5910 c, op1)));
5913 break;
5915 default:
5916 break;
5919 return 0;
5922 /* Return a node which has the indicated constant VALUE (either 0 or
5923 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5924 and is of the indicated TYPE. */
5926 tree
5927 constant_boolean_node (bool value, tree type)
5929 if (type == integer_type_node)
5930 return value ? integer_one_node : integer_zero_node;
5931 else if (type == boolean_type_node)
5932 return value ? boolean_true_node : boolean_false_node;
5933 else if (TREE_CODE (type) == VECTOR_TYPE)
5934 return build_vector_from_val (type,
5935 build_int_cst (TREE_TYPE (type),
5936 value ? -1 : 0));
5937 else
5938 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5942 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5943 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5944 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5945 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5946 COND is the first argument to CODE; otherwise (as in the example
5947 given here), it is the second argument. TYPE is the type of the
5948 original expression. Return NULL_TREE if no simplification is
5949 possible. */
5951 static tree
5952 fold_binary_op_with_conditional_arg (location_t loc,
5953 enum tree_code code,
5954 tree type, tree op0, tree op1,
5955 tree cond, tree arg, int cond_first_p)
5957 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5958 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5959 tree test, true_value, false_value;
5960 tree lhs = NULL_TREE;
5961 tree rhs = NULL_TREE;
5963 if (TREE_CODE (cond) == COND_EXPR)
5965 test = TREE_OPERAND (cond, 0);
5966 true_value = TREE_OPERAND (cond, 1);
5967 false_value = TREE_OPERAND (cond, 2);
5968 /* If this operand throws an expression, then it does not make
5969 sense to try to perform a logical or arithmetic operation
5970 involving it. */
5971 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5972 lhs = true_value;
5973 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5974 rhs = false_value;
5976 else
5978 tree testtype = TREE_TYPE (cond);
5979 test = cond;
5980 true_value = constant_boolean_node (true, testtype);
5981 false_value = constant_boolean_node (false, testtype);
5984 /* This transformation is only worthwhile if we don't have to wrap ARG
5985 in a SAVE_EXPR and the operation can be simplified on at least one
5986 of the branches once its pushed inside the COND_EXPR. */
5987 if (!TREE_CONSTANT (arg)
5988 && (TREE_SIDE_EFFECTS (arg)
5989 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5990 return NULL_TREE;
5992 arg = fold_convert_loc (loc, arg_type, arg);
5993 if (lhs == 0)
5995 true_value = fold_convert_loc (loc, cond_type, true_value);
5996 if (cond_first_p)
5997 lhs = fold_build2_loc (loc, code, type, true_value, arg);
5998 else
5999 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6001 if (rhs == 0)
6003 false_value = fold_convert_loc (loc, cond_type, false_value);
6004 if (cond_first_p)
6005 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6006 else
6007 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6010 /* Check that we have simplified at least one of the branches. */
6011 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6012 return NULL_TREE;
6014 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6018 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6020 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6021 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6022 ADDEND is the same as X.
6024 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6025 and finite. The problematic cases are when X is zero, and its mode
6026 has signed zeros. In the case of rounding towards -infinity,
6027 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6028 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6030 bool
6031 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6033 if (!real_zerop (addend))
6034 return false;
6036 /* Don't allow the fold with -fsignaling-nans. */
6037 if (HONOR_SNANS (TYPE_MODE (type)))
6038 return false;
6040 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6041 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6042 return true;
6044 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6045 if (TREE_CODE (addend) == REAL_CST
6046 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6047 negate = !negate;
6049 /* The mode has signed zeros, and we have to honor their sign.
6050 In this situation, there is only one case we can return true for.
6051 X - 0 is the same as X unless rounding towards -infinity is
6052 supported. */
6053 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6056 /* Subroutine of fold() that checks comparisons of built-in math
6057 functions against real constants.
6059 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6060 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6061 is the type of the result and ARG0 and ARG1 are the operands of the
6062 comparison. ARG1 must be a TREE_REAL_CST.
6064 The function returns the constant folded tree if a simplification
6065 can be made, and NULL_TREE otherwise. */
6067 static tree
6068 fold_mathfn_compare (location_t loc,
6069 enum built_in_function fcode, enum tree_code code,
6070 tree type, tree arg0, tree arg1)
6072 REAL_VALUE_TYPE c;
6074 if (BUILTIN_SQRT_P (fcode))
6076 tree arg = CALL_EXPR_ARG (arg0, 0);
6077 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6079 c = TREE_REAL_CST (arg1);
6080 if (REAL_VALUE_NEGATIVE (c))
6082 /* sqrt(x) < y is always false, if y is negative. */
6083 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6084 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6086 /* sqrt(x) > y is always true, if y is negative and we
6087 don't care about NaNs, i.e. negative values of x. */
6088 if (code == NE_EXPR || !HONOR_NANS (mode))
6089 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6091 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6092 return fold_build2_loc (loc, GE_EXPR, type, arg,
6093 build_real (TREE_TYPE (arg), dconst0));
6095 else if (code == GT_EXPR || code == GE_EXPR)
6097 REAL_VALUE_TYPE c2;
6099 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6100 real_convert (&c2, mode, &c2);
6102 if (REAL_VALUE_ISINF (c2))
6104 /* sqrt(x) > y is x == +Inf, when y is very large. */
6105 if (HONOR_INFINITIES (mode))
6106 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6107 build_real (TREE_TYPE (arg), c2));
6109 /* sqrt(x) > y is always false, when y is very large
6110 and we don't care about infinities. */
6111 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6114 /* sqrt(x) > c is the same as x > c*c. */
6115 return fold_build2_loc (loc, code, type, arg,
6116 build_real (TREE_TYPE (arg), c2));
6118 else if (code == LT_EXPR || code == LE_EXPR)
6120 REAL_VALUE_TYPE c2;
6122 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6123 real_convert (&c2, mode, &c2);
6125 if (REAL_VALUE_ISINF (c2))
6127 /* sqrt(x) < y is always true, when y is a very large
6128 value and we don't care about NaNs or Infinities. */
6129 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6130 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6132 /* sqrt(x) < y is x != +Inf when y is very large and we
6133 don't care about NaNs. */
6134 if (! HONOR_NANS (mode))
6135 return fold_build2_loc (loc, NE_EXPR, type, arg,
6136 build_real (TREE_TYPE (arg), c2));
6138 /* sqrt(x) < y is x >= 0 when y is very large and we
6139 don't care about Infinities. */
6140 if (! HONOR_INFINITIES (mode))
6141 return fold_build2_loc (loc, GE_EXPR, type, arg,
6142 build_real (TREE_TYPE (arg), dconst0));
6144 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6145 arg = save_expr (arg);
6146 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6147 fold_build2_loc (loc, GE_EXPR, type, arg,
6148 build_real (TREE_TYPE (arg),
6149 dconst0)),
6150 fold_build2_loc (loc, NE_EXPR, type, arg,
6151 build_real (TREE_TYPE (arg),
6152 c2)));
6155 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6156 if (! HONOR_NANS (mode))
6157 return fold_build2_loc (loc, code, type, arg,
6158 build_real (TREE_TYPE (arg), c2));
6160 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6161 arg = save_expr (arg);
6162 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6163 fold_build2_loc (loc, GE_EXPR, type, arg,
6164 build_real (TREE_TYPE (arg),
6165 dconst0)),
6166 fold_build2_loc (loc, code, type, arg,
6167 build_real (TREE_TYPE (arg),
6168 c2)));
6172 return NULL_TREE;
6175 /* Subroutine of fold() that optimizes comparisons against Infinities,
6176 either +Inf or -Inf.
6178 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6179 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6180 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6182 The function returns the constant folded tree if a simplification
6183 can be made, and NULL_TREE otherwise. */
6185 static tree
6186 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6187 tree arg0, tree arg1)
6189 enum machine_mode mode;
6190 REAL_VALUE_TYPE max;
6191 tree temp;
6192 bool neg;
6194 mode = TYPE_MODE (TREE_TYPE (arg0));
6196 /* For negative infinity swap the sense of the comparison. */
6197 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6198 if (neg)
6199 code = swap_tree_comparison (code);
6201 switch (code)
6203 case GT_EXPR:
6204 /* x > +Inf is always false, if with ignore sNANs. */
6205 if (HONOR_SNANS (mode))
6206 return NULL_TREE;
6207 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6209 case LE_EXPR:
6210 /* x <= +Inf is always true, if we don't case about NaNs. */
6211 if (! HONOR_NANS (mode))
6212 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6214 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6215 arg0 = save_expr (arg0);
6216 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6218 case EQ_EXPR:
6219 case GE_EXPR:
6220 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6221 real_maxval (&max, neg, mode);
6222 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6223 arg0, build_real (TREE_TYPE (arg0), max));
6225 case LT_EXPR:
6226 /* x < +Inf is always equal to x <= DBL_MAX. */
6227 real_maxval (&max, neg, mode);
6228 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6229 arg0, build_real (TREE_TYPE (arg0), max));
6231 case NE_EXPR:
6232 /* x != +Inf is always equal to !(x > DBL_MAX). */
6233 real_maxval (&max, neg, mode);
6234 if (! HONOR_NANS (mode))
6235 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6236 arg0, build_real (TREE_TYPE (arg0), max));
6238 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6239 arg0, build_real (TREE_TYPE (arg0), max));
6240 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6242 default:
6243 break;
6246 return NULL_TREE;
6249 /* Subroutine of fold() that optimizes comparisons of a division by
6250 a nonzero integer constant against an integer constant, i.e.
6251 X/C1 op C2.
6253 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6254 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6255 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6257 The function returns the constant folded tree if a simplification
6258 can be made, and NULL_TREE otherwise. */
6260 static tree
6261 fold_div_compare (location_t loc,
6262 enum tree_code code, tree type, tree arg0, tree arg1)
6264 tree prod, tmp, hi, lo;
6265 tree arg00 = TREE_OPERAND (arg0, 0);
6266 tree arg01 = TREE_OPERAND (arg0, 1);
6267 double_int val;
6268 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6269 bool neg_overflow;
6270 bool overflow;
6272 /* We have to do this the hard way to detect unsigned overflow.
6273 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6274 val = TREE_INT_CST (arg01)
6275 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6276 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6277 neg_overflow = false;
6279 if (unsigned_p)
6281 tmp = int_const_binop (MINUS_EXPR, arg01,
6282 build_int_cst (TREE_TYPE (arg01), 1));
6283 lo = prod;
6285 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6286 val = TREE_INT_CST (prod)
6287 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6288 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6289 -1, overflow | TREE_OVERFLOW (prod));
6291 else if (tree_int_cst_sgn (arg01) >= 0)
6293 tmp = int_const_binop (MINUS_EXPR, arg01,
6294 build_int_cst (TREE_TYPE (arg01), 1));
6295 switch (tree_int_cst_sgn (arg1))
6297 case -1:
6298 neg_overflow = true;
6299 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6300 hi = prod;
6301 break;
6303 case 0:
6304 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6305 hi = tmp;
6306 break;
6308 case 1:
6309 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6310 lo = prod;
6311 break;
6313 default:
6314 gcc_unreachable ();
6317 else
6319 /* A negative divisor reverses the relational operators. */
6320 code = swap_tree_comparison (code);
6322 tmp = int_const_binop (PLUS_EXPR, arg01,
6323 build_int_cst (TREE_TYPE (arg01), 1));
6324 switch (tree_int_cst_sgn (arg1))
6326 case -1:
6327 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6328 lo = prod;
6329 break;
6331 case 0:
6332 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6333 lo = tmp;
6334 break;
6336 case 1:
6337 neg_overflow = true;
6338 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6339 hi = prod;
6340 break;
6342 default:
6343 gcc_unreachable ();
6347 switch (code)
6349 case EQ_EXPR:
6350 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6351 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6352 if (TREE_OVERFLOW (hi))
6353 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6354 if (TREE_OVERFLOW (lo))
6355 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6356 return build_range_check (loc, type, arg00, 1, lo, hi);
6358 case NE_EXPR:
6359 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6360 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6361 if (TREE_OVERFLOW (hi))
6362 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6363 if (TREE_OVERFLOW (lo))
6364 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6365 return build_range_check (loc, type, arg00, 0, lo, hi);
6367 case LT_EXPR:
6368 if (TREE_OVERFLOW (lo))
6370 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6371 return omit_one_operand_loc (loc, type, tmp, arg00);
6373 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6375 case LE_EXPR:
6376 if (TREE_OVERFLOW (hi))
6378 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6379 return omit_one_operand_loc (loc, type, tmp, arg00);
6381 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6383 case GT_EXPR:
6384 if (TREE_OVERFLOW (hi))
6386 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6387 return omit_one_operand_loc (loc, type, tmp, arg00);
6389 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6391 case GE_EXPR:
6392 if (TREE_OVERFLOW (lo))
6394 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6395 return omit_one_operand_loc (loc, type, tmp, arg00);
6397 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6399 default:
6400 break;
6403 return NULL_TREE;
6407 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6408 equality/inequality test, then return a simplified form of the test
6409 using a sign testing. Otherwise return NULL. TYPE is the desired
6410 result type. */
6412 static tree
6413 fold_single_bit_test_into_sign_test (location_t loc,
6414 enum tree_code code, tree arg0, tree arg1,
6415 tree result_type)
6417 /* If this is testing a single bit, we can optimize the test. */
6418 if ((code == NE_EXPR || code == EQ_EXPR)
6419 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6420 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6422 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6423 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6424 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6426 if (arg00 != NULL_TREE
6427 /* This is only a win if casting to a signed type is cheap,
6428 i.e. when arg00's type is not a partial mode. */
6429 && TYPE_PRECISION (TREE_TYPE (arg00))
6430 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6432 tree stype = signed_type_for (TREE_TYPE (arg00));
6433 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6434 result_type,
6435 fold_convert_loc (loc, stype, arg00),
6436 build_int_cst (stype, 0));
6440 return NULL_TREE;
6443 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6444 equality/inequality test, then return a simplified form of
6445 the test using shifts and logical operations. Otherwise return
6446 NULL. TYPE is the desired result type. */
6448 tree
6449 fold_single_bit_test (location_t loc, enum tree_code code,
6450 tree arg0, tree arg1, tree result_type)
6452 /* If this is testing a single bit, we can optimize the test. */
6453 if ((code == NE_EXPR || code == EQ_EXPR)
6454 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6455 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6457 tree inner = TREE_OPERAND (arg0, 0);
6458 tree type = TREE_TYPE (arg0);
6459 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6460 enum machine_mode operand_mode = TYPE_MODE (type);
6461 int ops_unsigned;
6462 tree signed_type, unsigned_type, intermediate_type;
6463 tree tem, one;
6465 /* First, see if we can fold the single bit test into a sign-bit
6466 test. */
6467 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6468 result_type);
6469 if (tem)
6470 return tem;
6472 /* Otherwise we have (A & C) != 0 where C is a single bit,
6473 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6474 Similarly for (A & C) == 0. */
6476 /* If INNER is a right shift of a constant and it plus BITNUM does
6477 not overflow, adjust BITNUM and INNER. */
6478 if (TREE_CODE (inner) == RSHIFT_EXPR
6479 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6480 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6481 && bitnum < TYPE_PRECISION (type)
6482 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6483 bitnum - TYPE_PRECISION (type)))
6485 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6486 inner = TREE_OPERAND (inner, 0);
6489 /* If we are going to be able to omit the AND below, we must do our
6490 operations as unsigned. If we must use the AND, we have a choice.
6491 Normally unsigned is faster, but for some machines signed is. */
6492 #ifdef LOAD_EXTEND_OP
6493 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6494 && !flag_syntax_only) ? 0 : 1;
6495 #else
6496 ops_unsigned = 1;
6497 #endif
6499 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6500 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6501 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6502 inner = fold_convert_loc (loc, intermediate_type, inner);
6504 if (bitnum != 0)
6505 inner = build2 (RSHIFT_EXPR, intermediate_type,
6506 inner, size_int (bitnum));
6508 one = build_int_cst (intermediate_type, 1);
6510 if (code == EQ_EXPR)
6511 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6513 /* Put the AND last so it can combine with more things. */
6514 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6516 /* Make sure to return the proper type. */
6517 inner = fold_convert_loc (loc, result_type, inner);
6519 return inner;
6521 return NULL_TREE;
6524 /* Check whether we are allowed to reorder operands arg0 and arg1,
6525 such that the evaluation of arg1 occurs before arg0. */
6527 static bool
6528 reorder_operands_p (const_tree arg0, const_tree arg1)
6530 if (! flag_evaluation_order)
6531 return true;
6532 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6533 return true;
6534 return ! TREE_SIDE_EFFECTS (arg0)
6535 && ! TREE_SIDE_EFFECTS (arg1);
6538 /* Test whether it is preferable two swap two operands, ARG0 and
6539 ARG1, for example because ARG0 is an integer constant and ARG1
6540 isn't. If REORDER is true, only recommend swapping if we can
6541 evaluate the operands in reverse order. */
6543 bool
6544 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6546 STRIP_SIGN_NOPS (arg0);
6547 STRIP_SIGN_NOPS (arg1);
6549 if (TREE_CODE (arg1) == INTEGER_CST)
6550 return 0;
6551 if (TREE_CODE (arg0) == INTEGER_CST)
6552 return 1;
6554 if (TREE_CODE (arg1) == REAL_CST)
6555 return 0;
6556 if (TREE_CODE (arg0) == REAL_CST)
6557 return 1;
6559 if (TREE_CODE (arg1) == FIXED_CST)
6560 return 0;
6561 if (TREE_CODE (arg0) == FIXED_CST)
6562 return 1;
6564 if (TREE_CODE (arg1) == COMPLEX_CST)
6565 return 0;
6566 if (TREE_CODE (arg0) == COMPLEX_CST)
6567 return 1;
6569 if (TREE_CONSTANT (arg1))
6570 return 0;
6571 if (TREE_CONSTANT (arg0))
6572 return 1;
6574 if (optimize_function_for_size_p (cfun))
6575 return 0;
6577 if (reorder && flag_evaluation_order
6578 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6579 return 0;
6581 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6582 for commutative and comparison operators. Ensuring a canonical
6583 form allows the optimizers to find additional redundancies without
6584 having to explicitly check for both orderings. */
6585 if (TREE_CODE (arg0) == SSA_NAME
6586 && TREE_CODE (arg1) == SSA_NAME
6587 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6588 return 1;
6590 /* Put SSA_NAMEs last. */
6591 if (TREE_CODE (arg1) == SSA_NAME)
6592 return 0;
6593 if (TREE_CODE (arg0) == SSA_NAME)
6594 return 1;
6596 /* Put variables last. */
6597 if (DECL_P (arg1))
6598 return 0;
6599 if (DECL_P (arg0))
6600 return 1;
6602 return 0;
6605 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6606 ARG0 is extended to a wider type. */
6608 static tree
6609 fold_widened_comparison (location_t loc, enum tree_code code,
6610 tree type, tree arg0, tree arg1)
6612 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6613 tree arg1_unw;
6614 tree shorter_type, outer_type;
6615 tree min, max;
6616 bool above, below;
6618 if (arg0_unw == arg0)
6619 return NULL_TREE;
6620 shorter_type = TREE_TYPE (arg0_unw);
6622 #ifdef HAVE_canonicalize_funcptr_for_compare
6623 /* Disable this optimization if we're casting a function pointer
6624 type on targets that require function pointer canonicalization. */
6625 if (HAVE_canonicalize_funcptr_for_compare
6626 && TREE_CODE (shorter_type) == POINTER_TYPE
6627 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6628 return NULL_TREE;
6629 #endif
6631 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6632 return NULL_TREE;
6634 arg1_unw = get_unwidened (arg1, NULL_TREE);
6636 /* If possible, express the comparison in the shorter mode. */
6637 if ((code == EQ_EXPR || code == NE_EXPR
6638 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6639 && (TREE_TYPE (arg1_unw) == shorter_type
6640 || ((TYPE_PRECISION (shorter_type)
6641 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6642 && (TYPE_UNSIGNED (shorter_type)
6643 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6644 || (TREE_CODE (arg1_unw) == INTEGER_CST
6645 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6646 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6647 && int_fits_type_p (arg1_unw, shorter_type))))
6648 return fold_build2_loc (loc, code, type, arg0_unw,
6649 fold_convert_loc (loc, shorter_type, arg1_unw));
6651 if (TREE_CODE (arg1_unw) != INTEGER_CST
6652 || TREE_CODE (shorter_type) != INTEGER_TYPE
6653 || !int_fits_type_p (arg1_unw, shorter_type))
6654 return NULL_TREE;
6656 /* If we are comparing with the integer that does not fit into the range
6657 of the shorter type, the result is known. */
6658 outer_type = TREE_TYPE (arg1_unw);
6659 min = lower_bound_in_type (outer_type, shorter_type);
6660 max = upper_bound_in_type (outer_type, shorter_type);
6662 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6663 max, arg1_unw));
6664 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6665 arg1_unw, min));
6667 switch (code)
6669 case EQ_EXPR:
6670 if (above || below)
6671 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6672 break;
6674 case NE_EXPR:
6675 if (above || below)
6676 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6677 break;
6679 case LT_EXPR:
6680 case LE_EXPR:
6681 if (above)
6682 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6683 else if (below)
6684 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6686 case GT_EXPR:
6687 case GE_EXPR:
6688 if (above)
6689 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6690 else if (below)
6691 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6693 default:
6694 break;
6697 return NULL_TREE;
6700 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6701 ARG0 just the signedness is changed. */
6703 static tree
6704 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6705 tree arg0, tree arg1)
6707 tree arg0_inner;
6708 tree inner_type, outer_type;
6710 if (!CONVERT_EXPR_P (arg0))
6711 return NULL_TREE;
6713 outer_type = TREE_TYPE (arg0);
6714 arg0_inner = TREE_OPERAND (arg0, 0);
6715 inner_type = TREE_TYPE (arg0_inner);
6717 #ifdef HAVE_canonicalize_funcptr_for_compare
6718 /* Disable this optimization if we're casting a function pointer
6719 type on targets that require function pointer canonicalization. */
6720 if (HAVE_canonicalize_funcptr_for_compare
6721 && TREE_CODE (inner_type) == POINTER_TYPE
6722 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6723 return NULL_TREE;
6724 #endif
6726 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6727 return NULL_TREE;
6729 if (TREE_CODE (arg1) != INTEGER_CST
6730 && !(CONVERT_EXPR_P (arg1)
6731 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6732 return NULL_TREE;
6734 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6735 && code != NE_EXPR
6736 && code != EQ_EXPR)
6737 return NULL_TREE;
6739 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6740 return NULL_TREE;
6742 if (TREE_CODE (arg1) == INTEGER_CST)
6743 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6744 0, TREE_OVERFLOW (arg1));
6745 else
6746 arg1 = fold_convert_loc (loc, inner_type, arg1);
6748 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6751 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6752 step of the array. Reconstructs s and delta in the case of s *
6753 delta being an integer constant (and thus already folded). ADDR is
6754 the address. MULT is the multiplicative expression. If the
6755 function succeeds, the new address expression is returned.
6756 Otherwise NULL_TREE is returned. LOC is the location of the
6757 resulting expression. */
6759 static tree
6760 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6762 tree s, delta, step;
6763 tree ref = TREE_OPERAND (addr, 0), pref;
6764 tree ret, pos;
6765 tree itype;
6766 bool mdim = false;
6768 /* Strip the nops that might be added when converting op1 to sizetype. */
6769 STRIP_NOPS (op1);
6771 /* Canonicalize op1 into a possibly non-constant delta
6772 and an INTEGER_CST s. */
6773 if (TREE_CODE (op1) == MULT_EXPR)
6775 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6777 STRIP_NOPS (arg0);
6778 STRIP_NOPS (arg1);
6780 if (TREE_CODE (arg0) == INTEGER_CST)
6782 s = arg0;
6783 delta = arg1;
6785 else if (TREE_CODE (arg1) == INTEGER_CST)
6787 s = arg1;
6788 delta = arg0;
6790 else
6791 return NULL_TREE;
6793 else if (TREE_CODE (op1) == INTEGER_CST)
6795 delta = op1;
6796 s = NULL_TREE;
6798 else
6800 /* Simulate we are delta * 1. */
6801 delta = op1;
6802 s = integer_one_node;
6805 /* Handle &x.array the same as we would handle &x.array[0]. */
6806 if (TREE_CODE (ref) == COMPONENT_REF
6807 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6809 tree domain;
6811 /* Remember if this was a multi-dimensional array. */
6812 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6813 mdim = true;
6815 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6816 if (! domain)
6817 goto cont;
6818 itype = TREE_TYPE (domain);
6820 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6821 if (TREE_CODE (step) != INTEGER_CST)
6822 goto cont;
6824 if (s)
6826 if (! tree_int_cst_equal (step, s))
6827 goto cont;
6829 else
6831 /* Try if delta is a multiple of step. */
6832 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6833 if (! tmp)
6834 goto cont;
6835 delta = tmp;
6838 /* Only fold here if we can verify we do not overflow one
6839 dimension of a multi-dimensional array. */
6840 if (mdim)
6842 tree tmp;
6844 if (!TYPE_MIN_VALUE (domain)
6845 || !TYPE_MAX_VALUE (domain)
6846 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6847 goto cont;
6849 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6850 fold_convert_loc (loc, itype,
6851 TYPE_MIN_VALUE (domain)),
6852 fold_convert_loc (loc, itype, delta));
6853 if (TREE_CODE (tmp) != INTEGER_CST
6854 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6855 goto cont;
6858 /* We found a suitable component reference. */
6860 pref = TREE_OPERAND (addr, 0);
6861 ret = copy_node (pref);
6862 SET_EXPR_LOCATION (ret, loc);
6864 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6865 fold_build2_loc
6866 (loc, PLUS_EXPR, itype,
6867 fold_convert_loc (loc, itype,
6868 TYPE_MIN_VALUE
6869 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6870 fold_convert_loc (loc, itype, delta)),
6871 NULL_TREE, NULL_TREE);
6872 return build_fold_addr_expr_loc (loc, ret);
6875 cont:
6877 for (;; ref = TREE_OPERAND (ref, 0))
6879 if (TREE_CODE (ref) == ARRAY_REF)
6881 tree domain;
6883 /* Remember if this was a multi-dimensional array. */
6884 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6885 mdim = true;
6887 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6888 if (! domain)
6889 continue;
6890 itype = TREE_TYPE (domain);
6892 step = array_ref_element_size (ref);
6893 if (TREE_CODE (step) != INTEGER_CST)
6894 continue;
6896 if (s)
6898 if (! tree_int_cst_equal (step, s))
6899 continue;
6901 else
6903 /* Try if delta is a multiple of step. */
6904 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6905 if (! tmp)
6906 continue;
6907 delta = tmp;
6910 /* Only fold here if we can verify we do not overflow one
6911 dimension of a multi-dimensional array. */
6912 if (mdim)
6914 tree tmp;
6916 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6917 || !TYPE_MAX_VALUE (domain)
6918 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6919 continue;
6921 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6922 fold_convert_loc (loc, itype,
6923 TREE_OPERAND (ref, 1)),
6924 fold_convert_loc (loc, itype, delta));
6925 if (!tmp
6926 || TREE_CODE (tmp) != INTEGER_CST
6927 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6928 continue;
6931 break;
6933 else
6934 mdim = false;
6936 if (!handled_component_p (ref))
6937 return NULL_TREE;
6940 /* We found the suitable array reference. So copy everything up to it,
6941 and replace the index. */
6943 pref = TREE_OPERAND (addr, 0);
6944 ret = copy_node (pref);
6945 SET_EXPR_LOCATION (ret, loc);
6946 pos = ret;
6948 while (pref != ref)
6950 pref = TREE_OPERAND (pref, 0);
6951 TREE_OPERAND (pos, 0) = copy_node (pref);
6952 pos = TREE_OPERAND (pos, 0);
6955 TREE_OPERAND (pos, 1)
6956 = fold_build2_loc (loc, PLUS_EXPR, itype,
6957 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6958 fold_convert_loc (loc, itype, delta));
6959 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6963 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6964 means A >= Y && A != MAX, but in this case we know that
6965 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6967 static tree
6968 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6970 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6972 if (TREE_CODE (bound) == LT_EXPR)
6973 a = TREE_OPERAND (bound, 0);
6974 else if (TREE_CODE (bound) == GT_EXPR)
6975 a = TREE_OPERAND (bound, 1);
6976 else
6977 return NULL_TREE;
6979 typea = TREE_TYPE (a);
6980 if (!INTEGRAL_TYPE_P (typea)
6981 && !POINTER_TYPE_P (typea))
6982 return NULL_TREE;
6984 if (TREE_CODE (ineq) == LT_EXPR)
6986 a1 = TREE_OPERAND (ineq, 1);
6987 y = TREE_OPERAND (ineq, 0);
6989 else if (TREE_CODE (ineq) == GT_EXPR)
6991 a1 = TREE_OPERAND (ineq, 0);
6992 y = TREE_OPERAND (ineq, 1);
6994 else
6995 return NULL_TREE;
6997 if (TREE_TYPE (a1) != typea)
6998 return NULL_TREE;
7000 if (POINTER_TYPE_P (typea))
7002 /* Convert the pointer types into integer before taking the difference. */
7003 tree ta = fold_convert_loc (loc, ssizetype, a);
7004 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7005 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7007 else
7008 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7010 if (!diff || !integer_onep (diff))
7011 return NULL_TREE;
7013 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7016 /* Fold a sum or difference of at least one multiplication.
7017 Returns the folded tree or NULL if no simplification could be made. */
7019 static tree
7020 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7021 tree arg0, tree arg1)
7023 tree arg00, arg01, arg10, arg11;
7024 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7026 /* (A * C) +- (B * C) -> (A+-B) * C.
7027 (A * C) +- A -> A * (C+-1).
7028 We are most concerned about the case where C is a constant,
7029 but other combinations show up during loop reduction. Since
7030 it is not difficult, try all four possibilities. */
7032 if (TREE_CODE (arg0) == MULT_EXPR)
7034 arg00 = TREE_OPERAND (arg0, 0);
7035 arg01 = TREE_OPERAND (arg0, 1);
7037 else if (TREE_CODE (arg0) == INTEGER_CST)
7039 arg00 = build_one_cst (type);
7040 arg01 = arg0;
7042 else
7044 /* We cannot generate constant 1 for fract. */
7045 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7046 return NULL_TREE;
7047 arg00 = arg0;
7048 arg01 = build_one_cst (type);
7050 if (TREE_CODE (arg1) == MULT_EXPR)
7052 arg10 = TREE_OPERAND (arg1, 0);
7053 arg11 = TREE_OPERAND (arg1, 1);
7055 else if (TREE_CODE (arg1) == INTEGER_CST)
7057 arg10 = build_one_cst (type);
7058 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7059 the purpose of this canonicalization. */
7060 if (TREE_INT_CST_HIGH (arg1) == -1
7061 && negate_expr_p (arg1)
7062 && code == PLUS_EXPR)
7064 arg11 = negate_expr (arg1);
7065 code = MINUS_EXPR;
7067 else
7068 arg11 = arg1;
7070 else
7072 /* We cannot generate constant 1 for fract. */
7073 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7074 return NULL_TREE;
7075 arg10 = arg1;
7076 arg11 = build_one_cst (type);
7078 same = NULL_TREE;
7080 if (operand_equal_p (arg01, arg11, 0))
7081 same = arg01, alt0 = arg00, alt1 = arg10;
7082 else if (operand_equal_p (arg00, arg10, 0))
7083 same = arg00, alt0 = arg01, alt1 = arg11;
7084 else if (operand_equal_p (arg00, arg11, 0))
7085 same = arg00, alt0 = arg01, alt1 = arg10;
7086 else if (operand_equal_p (arg01, arg10, 0))
7087 same = arg01, alt0 = arg00, alt1 = arg11;
7089 /* No identical multiplicands; see if we can find a common
7090 power-of-two factor in non-power-of-two multiplies. This
7091 can help in multi-dimensional array access. */
7092 else if (host_integerp (arg01, 0)
7093 && host_integerp (arg11, 0))
7095 HOST_WIDE_INT int01, int11, tmp;
7096 bool swap = false;
7097 tree maybe_same;
7098 int01 = TREE_INT_CST_LOW (arg01);
7099 int11 = TREE_INT_CST_LOW (arg11);
7101 /* Move min of absolute values to int11. */
7102 if (absu_hwi (int01) < absu_hwi (int11))
7104 tmp = int01, int01 = int11, int11 = tmp;
7105 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7106 maybe_same = arg01;
7107 swap = true;
7109 else
7110 maybe_same = arg11;
7112 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7113 /* The remainder should not be a constant, otherwise we
7114 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7115 increased the number of multiplications necessary. */
7116 && TREE_CODE (arg10) != INTEGER_CST)
7118 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7119 build_int_cst (TREE_TYPE (arg00),
7120 int01 / int11));
7121 alt1 = arg10;
7122 same = maybe_same;
7123 if (swap)
7124 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7128 if (same)
7129 return fold_build2_loc (loc, MULT_EXPR, type,
7130 fold_build2_loc (loc, code, type,
7131 fold_convert_loc (loc, type, alt0),
7132 fold_convert_loc (loc, type, alt1)),
7133 fold_convert_loc (loc, type, same));
7135 return NULL_TREE;
7138 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7139 specified by EXPR into the buffer PTR of length LEN bytes.
7140 Return the number of bytes placed in the buffer, or zero
7141 upon failure. */
7143 static int
7144 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7146 tree type = TREE_TYPE (expr);
7147 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7148 int byte, offset, word, words;
7149 unsigned char value;
7151 if (total_bytes > len)
7152 return 0;
7153 words = total_bytes / UNITS_PER_WORD;
7155 for (byte = 0; byte < total_bytes; byte++)
7157 int bitpos = byte * BITS_PER_UNIT;
7158 if (bitpos < HOST_BITS_PER_WIDE_INT)
7159 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7160 else
7161 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7162 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7164 if (total_bytes > UNITS_PER_WORD)
7166 word = byte / UNITS_PER_WORD;
7167 if (WORDS_BIG_ENDIAN)
7168 word = (words - 1) - word;
7169 offset = word * UNITS_PER_WORD;
7170 if (BYTES_BIG_ENDIAN)
7171 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7172 else
7173 offset += byte % UNITS_PER_WORD;
7175 else
7176 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7177 ptr[offset] = value;
7179 return total_bytes;
7183 /* Subroutine of native_encode_expr. Encode the REAL_CST
7184 specified by EXPR into the buffer PTR of length LEN bytes.
7185 Return the number of bytes placed in the buffer, or zero
7186 upon failure. */
7188 static int
7189 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7191 tree type = TREE_TYPE (expr);
7192 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7193 int byte, offset, word, words, bitpos;
7194 unsigned char value;
7196 /* There are always 32 bits in each long, no matter the size of
7197 the hosts long. We handle floating point representations with
7198 up to 192 bits. */
7199 long tmp[6];
7201 if (total_bytes > len)
7202 return 0;
7203 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7205 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7207 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7208 bitpos += BITS_PER_UNIT)
7210 byte = (bitpos / BITS_PER_UNIT) & 3;
7211 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7213 if (UNITS_PER_WORD < 4)
7215 word = byte / UNITS_PER_WORD;
7216 if (WORDS_BIG_ENDIAN)
7217 word = (words - 1) - word;
7218 offset = word * UNITS_PER_WORD;
7219 if (BYTES_BIG_ENDIAN)
7220 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7221 else
7222 offset += byte % UNITS_PER_WORD;
7224 else
7225 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7226 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7228 return total_bytes;
7231 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7232 specified by EXPR into the buffer PTR of length LEN bytes.
7233 Return the number of bytes placed in the buffer, or zero
7234 upon failure. */
7236 static int
7237 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7239 int rsize, isize;
7240 tree part;
7242 part = TREE_REALPART (expr);
7243 rsize = native_encode_expr (part, ptr, len);
7244 if (rsize == 0)
7245 return 0;
7246 part = TREE_IMAGPART (expr);
7247 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7248 if (isize != rsize)
7249 return 0;
7250 return rsize + isize;
7254 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7255 specified by EXPR into the buffer PTR of length LEN bytes.
7256 Return the number of bytes placed in the buffer, or zero
7257 upon failure. */
7259 static int
7260 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7262 unsigned i, count;
7263 int size, offset;
7264 tree itype, elem;
7266 offset = 0;
7267 count = VECTOR_CST_NELTS (expr);
7268 itype = TREE_TYPE (TREE_TYPE (expr));
7269 size = GET_MODE_SIZE (TYPE_MODE (itype));
7270 for (i = 0; i < count; i++)
7272 elem = VECTOR_CST_ELT (expr, i);
7273 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7274 return 0;
7275 offset += size;
7277 return offset;
7281 /* Subroutine of native_encode_expr. Encode the STRING_CST
7282 specified by EXPR into the buffer PTR of length LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero
7284 upon failure. */
7286 static int
7287 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7289 tree type = TREE_TYPE (expr);
7290 HOST_WIDE_INT total_bytes;
7292 if (TREE_CODE (type) != ARRAY_TYPE
7293 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7294 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7295 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7296 return 0;
7297 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7298 if (total_bytes > len)
7299 return 0;
7300 if (TREE_STRING_LENGTH (expr) < total_bytes)
7302 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7303 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7304 total_bytes - TREE_STRING_LENGTH (expr));
7306 else
7307 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7308 return total_bytes;
7312 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7313 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7314 buffer PTR of length LEN bytes. Return the number of bytes
7315 placed in the buffer, or zero upon failure. */
7318 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7320 switch (TREE_CODE (expr))
7322 case INTEGER_CST:
7323 return native_encode_int (expr, ptr, len);
7325 case REAL_CST:
7326 return native_encode_real (expr, ptr, len);
7328 case COMPLEX_CST:
7329 return native_encode_complex (expr, ptr, len);
7331 case VECTOR_CST:
7332 return native_encode_vector (expr, ptr, len);
7334 case STRING_CST:
7335 return native_encode_string (expr, ptr, len);
7337 default:
7338 return 0;
7343 /* Subroutine of native_interpret_expr. Interpret the contents of
7344 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7345 If the buffer cannot be interpreted, return NULL_TREE. */
7347 static tree
7348 native_interpret_int (tree type, const unsigned char *ptr, int len)
7350 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7351 int byte, offset, word, words;
7352 unsigned char value;
7353 double_int result;
7355 if (total_bytes > len)
7356 return NULL_TREE;
7357 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7358 return NULL_TREE;
7360 result = double_int_zero;
7361 words = total_bytes / UNITS_PER_WORD;
7363 for (byte = 0; byte < total_bytes; byte++)
7365 int bitpos = byte * BITS_PER_UNIT;
7366 if (total_bytes > UNITS_PER_WORD)
7368 word = byte / UNITS_PER_WORD;
7369 if (WORDS_BIG_ENDIAN)
7370 word = (words - 1) - word;
7371 offset = word * UNITS_PER_WORD;
7372 if (BYTES_BIG_ENDIAN)
7373 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7374 else
7375 offset += byte % UNITS_PER_WORD;
7377 else
7378 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7379 value = ptr[offset];
7381 if (bitpos < HOST_BITS_PER_WIDE_INT)
7382 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7383 else
7384 result.high |= (unsigned HOST_WIDE_INT) value
7385 << (bitpos - HOST_BITS_PER_WIDE_INT);
7388 return double_int_to_tree (type, result);
7392 /* Subroutine of native_interpret_expr. Interpret the contents of
7393 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7394 If the buffer cannot be interpreted, return NULL_TREE. */
7396 static tree
7397 native_interpret_real (tree type, const unsigned char *ptr, int len)
7399 enum machine_mode mode = TYPE_MODE (type);
7400 int total_bytes = GET_MODE_SIZE (mode);
7401 int byte, offset, word, words, bitpos;
7402 unsigned char value;
7403 /* There are always 32 bits in each long, no matter the size of
7404 the hosts long. We handle floating point representations with
7405 up to 192 bits. */
7406 REAL_VALUE_TYPE r;
7407 long tmp[6];
7409 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7410 if (total_bytes > len || total_bytes > 24)
7411 return NULL_TREE;
7412 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7414 memset (tmp, 0, sizeof (tmp));
7415 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7416 bitpos += BITS_PER_UNIT)
7418 byte = (bitpos / BITS_PER_UNIT) & 3;
7419 if (UNITS_PER_WORD < 4)
7421 word = byte / UNITS_PER_WORD;
7422 if (WORDS_BIG_ENDIAN)
7423 word = (words - 1) - word;
7424 offset = word * UNITS_PER_WORD;
7425 if (BYTES_BIG_ENDIAN)
7426 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7427 else
7428 offset += byte % UNITS_PER_WORD;
7430 else
7431 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7432 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7434 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7437 real_from_target (&r, tmp, mode);
7438 return build_real (type, r);
7442 /* Subroutine of native_interpret_expr. Interpret the contents of
7443 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7444 If the buffer cannot be interpreted, return NULL_TREE. */
7446 static tree
7447 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7449 tree etype, rpart, ipart;
7450 int size;
7452 etype = TREE_TYPE (type);
7453 size = GET_MODE_SIZE (TYPE_MODE (etype));
7454 if (size * 2 > len)
7455 return NULL_TREE;
7456 rpart = native_interpret_expr (etype, ptr, size);
7457 if (!rpart)
7458 return NULL_TREE;
7459 ipart = native_interpret_expr (etype, ptr+size, size);
7460 if (!ipart)
7461 return NULL_TREE;
7462 return build_complex (type, rpart, ipart);
7466 /* Subroutine of native_interpret_expr. Interpret the contents of
7467 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7468 If the buffer cannot be interpreted, return NULL_TREE. */
7470 static tree
7471 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7473 tree etype, elem;
7474 int i, size, count;
7475 tree *elements;
7477 etype = TREE_TYPE (type);
7478 size = GET_MODE_SIZE (TYPE_MODE (etype));
7479 count = TYPE_VECTOR_SUBPARTS (type);
7480 if (size * count > len)
7481 return NULL_TREE;
7483 elements = XALLOCAVEC (tree, count);
7484 for (i = count - 1; i >= 0; i--)
7486 elem = native_interpret_expr (etype, ptr+(i*size), size);
7487 if (!elem)
7488 return NULL_TREE;
7489 elements[i] = elem;
7491 return build_vector (type, elements);
7495 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7496 the buffer PTR of length LEN as a constant of type TYPE. For
7497 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7498 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7499 return NULL_TREE. */
7501 tree
7502 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7504 switch (TREE_CODE (type))
7506 case INTEGER_TYPE:
7507 case ENUMERAL_TYPE:
7508 case BOOLEAN_TYPE:
7509 case POINTER_TYPE:
7510 case REFERENCE_TYPE:
7511 return native_interpret_int (type, ptr, len);
7513 case REAL_TYPE:
7514 return native_interpret_real (type, ptr, len);
7516 case COMPLEX_TYPE:
7517 return native_interpret_complex (type, ptr, len);
7519 case VECTOR_TYPE:
7520 return native_interpret_vector (type, ptr, len);
7522 default:
7523 return NULL_TREE;
7527 /* Returns true if we can interpret the contents of a native encoding
7528 as TYPE. */
7530 static bool
7531 can_native_interpret_type_p (tree type)
7533 switch (TREE_CODE (type))
7535 case INTEGER_TYPE:
7536 case ENUMERAL_TYPE:
7537 case BOOLEAN_TYPE:
7538 case POINTER_TYPE:
7539 case REFERENCE_TYPE:
7540 case REAL_TYPE:
7541 case COMPLEX_TYPE:
7542 case VECTOR_TYPE:
7543 return true;
7544 default:
7545 return false;
7549 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7550 TYPE at compile-time. If we're unable to perform the conversion
7551 return NULL_TREE. */
7553 static tree
7554 fold_view_convert_expr (tree type, tree expr)
7556 /* We support up to 512-bit values (for V8DFmode). */
7557 unsigned char buffer[64];
7558 int len;
7560 /* Check that the host and target are sane. */
7561 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7562 return NULL_TREE;
7564 len = native_encode_expr (expr, buffer, sizeof (buffer));
7565 if (len == 0)
7566 return NULL_TREE;
7568 return native_interpret_expr (type, buffer, len);
7571 /* Build an expression for the address of T. Folds away INDIRECT_REF
7572 to avoid confusing the gimplify process. */
7574 tree
7575 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7577 /* The size of the object is not relevant when talking about its address. */
7578 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7579 t = TREE_OPERAND (t, 0);
7581 if (TREE_CODE (t) == INDIRECT_REF)
7583 t = TREE_OPERAND (t, 0);
7585 if (TREE_TYPE (t) != ptrtype)
7586 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7588 else if (TREE_CODE (t) == MEM_REF
7589 && integer_zerop (TREE_OPERAND (t, 1)))
7590 return TREE_OPERAND (t, 0);
7591 else if (TREE_CODE (t) == MEM_REF
7592 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7593 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7594 TREE_OPERAND (t, 0),
7595 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7596 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7598 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7600 if (TREE_TYPE (t) != ptrtype)
7601 t = fold_convert_loc (loc, ptrtype, t);
7603 else
7604 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7606 return t;
7609 /* Build an expression for the address of T. */
7611 tree
7612 build_fold_addr_expr_loc (location_t loc, tree t)
7614 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7616 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7619 static bool vec_cst_ctor_to_array (tree, tree *);
7621 /* Fold a unary expression of code CODE and type TYPE with operand
7622 OP0. Return the folded expression if folding is successful.
7623 Otherwise, return NULL_TREE. */
7625 tree
7626 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7628 tree tem;
7629 tree arg0;
7630 enum tree_code_class kind = TREE_CODE_CLASS (code);
7632 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7633 && TREE_CODE_LENGTH (code) == 1);
7635 arg0 = op0;
7636 if (arg0)
7638 if (CONVERT_EXPR_CODE_P (code)
7639 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7641 /* Don't use STRIP_NOPS, because signedness of argument type
7642 matters. */
7643 STRIP_SIGN_NOPS (arg0);
7645 else
7647 /* Strip any conversions that don't change the mode. This
7648 is safe for every expression, except for a comparison
7649 expression because its signedness is derived from its
7650 operands.
7652 Note that this is done as an internal manipulation within
7653 the constant folder, in order to find the simplest
7654 representation of the arguments so that their form can be
7655 studied. In any cases, the appropriate type conversions
7656 should be put back in the tree that will get out of the
7657 constant folder. */
7658 STRIP_NOPS (arg0);
7662 if (TREE_CODE_CLASS (code) == tcc_unary)
7664 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7665 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7666 fold_build1_loc (loc, code, type,
7667 fold_convert_loc (loc, TREE_TYPE (op0),
7668 TREE_OPERAND (arg0, 1))));
7669 else if (TREE_CODE (arg0) == COND_EXPR)
7671 tree arg01 = TREE_OPERAND (arg0, 1);
7672 tree arg02 = TREE_OPERAND (arg0, 2);
7673 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7674 arg01 = fold_build1_loc (loc, code, type,
7675 fold_convert_loc (loc,
7676 TREE_TYPE (op0), arg01));
7677 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7678 arg02 = fold_build1_loc (loc, code, type,
7679 fold_convert_loc (loc,
7680 TREE_TYPE (op0), arg02));
7681 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7682 arg01, arg02);
7684 /* If this was a conversion, and all we did was to move into
7685 inside the COND_EXPR, bring it back out. But leave it if
7686 it is a conversion from integer to integer and the
7687 result precision is no wider than a word since such a
7688 conversion is cheap and may be optimized away by combine,
7689 while it couldn't if it were outside the COND_EXPR. Then return
7690 so we don't get into an infinite recursion loop taking the
7691 conversion out and then back in. */
7693 if ((CONVERT_EXPR_CODE_P (code)
7694 || code == NON_LVALUE_EXPR)
7695 && TREE_CODE (tem) == COND_EXPR
7696 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7697 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7698 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7699 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7700 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7701 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7702 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7703 && (INTEGRAL_TYPE_P
7704 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7705 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7706 || flag_syntax_only))
7707 tem = build1_loc (loc, code, type,
7708 build3 (COND_EXPR,
7709 TREE_TYPE (TREE_OPERAND
7710 (TREE_OPERAND (tem, 1), 0)),
7711 TREE_OPERAND (tem, 0),
7712 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7713 TREE_OPERAND (TREE_OPERAND (tem, 2),
7714 0)));
7715 return tem;
7719 switch (code)
7721 case PAREN_EXPR:
7722 /* Re-association barriers around constants and other re-association
7723 barriers can be removed. */
7724 if (CONSTANT_CLASS_P (op0)
7725 || TREE_CODE (op0) == PAREN_EXPR)
7726 return fold_convert_loc (loc, type, op0);
7727 return NULL_TREE;
7729 CASE_CONVERT:
7730 case FLOAT_EXPR:
7731 case FIX_TRUNC_EXPR:
7732 if (TREE_TYPE (op0) == type)
7733 return op0;
7735 if (COMPARISON_CLASS_P (op0))
7737 /* If we have (type) (a CMP b) and type is an integral type, return
7738 new expression involving the new type. Canonicalize
7739 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7740 non-integral type.
7741 Do not fold the result as that would not simplify further, also
7742 folding again results in recursions. */
7743 if (TREE_CODE (type) == BOOLEAN_TYPE)
7744 return build2_loc (loc, TREE_CODE (op0), type,
7745 TREE_OPERAND (op0, 0),
7746 TREE_OPERAND (op0, 1));
7747 else if (!INTEGRAL_TYPE_P (type) && TREE_CODE (type) != VECTOR_TYPE)
7748 return build3_loc (loc, COND_EXPR, type, op0,
7749 constant_boolean_node (true, type),
7750 constant_boolean_node (false, type));
7753 /* Handle cases of two conversions in a row. */
7754 if (CONVERT_EXPR_P (op0))
7756 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7757 tree inter_type = TREE_TYPE (op0);
7758 int inside_int = INTEGRAL_TYPE_P (inside_type);
7759 int inside_ptr = POINTER_TYPE_P (inside_type);
7760 int inside_float = FLOAT_TYPE_P (inside_type);
7761 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7762 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7763 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7764 int inter_int = INTEGRAL_TYPE_P (inter_type);
7765 int inter_ptr = POINTER_TYPE_P (inter_type);
7766 int inter_float = FLOAT_TYPE_P (inter_type);
7767 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7768 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7769 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7770 int final_int = INTEGRAL_TYPE_P (type);
7771 int final_ptr = POINTER_TYPE_P (type);
7772 int final_float = FLOAT_TYPE_P (type);
7773 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7774 unsigned int final_prec = TYPE_PRECISION (type);
7775 int final_unsignedp = TYPE_UNSIGNED (type);
7777 /* In addition to the cases of two conversions in a row
7778 handled below, if we are converting something to its own
7779 type via an object of identical or wider precision, neither
7780 conversion is needed. */
7781 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7782 && (((inter_int || inter_ptr) && final_int)
7783 || (inter_float && final_float))
7784 && inter_prec >= final_prec)
7785 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7787 /* Likewise, if the intermediate and initial types are either both
7788 float or both integer, we don't need the middle conversion if the
7789 former is wider than the latter and doesn't change the signedness
7790 (for integers). Avoid this if the final type is a pointer since
7791 then we sometimes need the middle conversion. Likewise if the
7792 final type has a precision not equal to the size of its mode. */
7793 if (((inter_int && inside_int)
7794 || (inter_float && inside_float)
7795 || (inter_vec && inside_vec))
7796 && inter_prec >= inside_prec
7797 && (inter_float || inter_vec
7798 || inter_unsignedp == inside_unsignedp)
7799 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7800 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7801 && ! final_ptr
7802 && (! final_vec || inter_prec == inside_prec))
7803 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7805 /* If we have a sign-extension of a zero-extended value, we can
7806 replace that by a single zero-extension. Likewise if the
7807 final conversion does not change precision we can drop the
7808 intermediate conversion. */
7809 if (inside_int && inter_int && final_int
7810 && ((inside_prec < inter_prec && inter_prec < final_prec
7811 && inside_unsignedp && !inter_unsignedp)
7812 || final_prec == inter_prec))
7813 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7815 /* Two conversions in a row are not needed unless:
7816 - some conversion is floating-point (overstrict for now), or
7817 - some conversion is a vector (overstrict for now), or
7818 - the intermediate type is narrower than both initial and
7819 final, or
7820 - the intermediate type and innermost type differ in signedness,
7821 and the outermost type is wider than the intermediate, or
7822 - the initial type is a pointer type and the precisions of the
7823 intermediate and final types differ, or
7824 - the final type is a pointer type and the precisions of the
7825 initial and intermediate types differ. */
7826 if (! inside_float && ! inter_float && ! final_float
7827 && ! inside_vec && ! inter_vec && ! final_vec
7828 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7829 && ! (inside_int && inter_int
7830 && inter_unsignedp != inside_unsignedp
7831 && inter_prec < final_prec)
7832 && ((inter_unsignedp && inter_prec > inside_prec)
7833 == (final_unsignedp && final_prec > inter_prec))
7834 && ! (inside_ptr && inter_prec != final_prec)
7835 && ! (final_ptr && inside_prec != inter_prec)
7836 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7837 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7838 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7841 /* Handle (T *)&A.B.C for A being of type T and B and C
7842 living at offset zero. This occurs frequently in
7843 C++ upcasting and then accessing the base. */
7844 if (TREE_CODE (op0) == ADDR_EXPR
7845 && POINTER_TYPE_P (type)
7846 && handled_component_p (TREE_OPERAND (op0, 0)))
7848 HOST_WIDE_INT bitsize, bitpos;
7849 tree offset;
7850 enum machine_mode mode;
7851 int unsignedp, volatilep;
7852 tree base = TREE_OPERAND (op0, 0);
7853 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7854 &mode, &unsignedp, &volatilep, false);
7855 /* If the reference was to a (constant) zero offset, we can use
7856 the address of the base if it has the same base type
7857 as the result type and the pointer type is unqualified. */
7858 if (! offset && bitpos == 0
7859 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7860 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7861 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7862 return fold_convert_loc (loc, type,
7863 build_fold_addr_expr_loc (loc, base));
7866 if (TREE_CODE (op0) == MODIFY_EXPR
7867 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7868 /* Detect assigning a bitfield. */
7869 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7870 && DECL_BIT_FIELD
7871 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7873 /* Don't leave an assignment inside a conversion
7874 unless assigning a bitfield. */
7875 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7876 /* First do the assignment, then return converted constant. */
7877 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7878 TREE_NO_WARNING (tem) = 1;
7879 TREE_USED (tem) = 1;
7880 return tem;
7883 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7884 constants (if x has signed type, the sign bit cannot be set
7885 in c). This folds extension into the BIT_AND_EXPR.
7886 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7887 very likely don't have maximal range for their precision and this
7888 transformation effectively doesn't preserve non-maximal ranges. */
7889 if (TREE_CODE (type) == INTEGER_TYPE
7890 && TREE_CODE (op0) == BIT_AND_EXPR
7891 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7893 tree and_expr = op0;
7894 tree and0 = TREE_OPERAND (and_expr, 0);
7895 tree and1 = TREE_OPERAND (and_expr, 1);
7896 int change = 0;
7898 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7899 || (TYPE_PRECISION (type)
7900 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7901 change = 1;
7902 else if (TYPE_PRECISION (TREE_TYPE (and1))
7903 <= HOST_BITS_PER_WIDE_INT
7904 && host_integerp (and1, 1))
7906 unsigned HOST_WIDE_INT cst;
7908 cst = tree_low_cst (and1, 1);
7909 cst &= (HOST_WIDE_INT) -1
7910 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7911 change = (cst == 0);
7912 #ifdef LOAD_EXTEND_OP
7913 if (change
7914 && !flag_syntax_only
7915 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7916 == ZERO_EXTEND))
7918 tree uns = unsigned_type_for (TREE_TYPE (and0));
7919 and0 = fold_convert_loc (loc, uns, and0);
7920 and1 = fold_convert_loc (loc, uns, and1);
7922 #endif
7924 if (change)
7926 tem = force_fit_type_double (type, tree_to_double_int (and1),
7927 0, TREE_OVERFLOW (and1));
7928 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7929 fold_convert_loc (loc, type, and0), tem);
7933 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7934 when one of the new casts will fold away. Conservatively we assume
7935 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7936 if (POINTER_TYPE_P (type)
7937 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7938 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7939 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7940 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7941 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7943 tree arg00 = TREE_OPERAND (arg0, 0);
7944 tree arg01 = TREE_OPERAND (arg0, 1);
7946 return fold_build_pointer_plus_loc
7947 (loc, fold_convert_loc (loc, type, arg00), arg01);
7950 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7951 of the same precision, and X is an integer type not narrower than
7952 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7953 if (INTEGRAL_TYPE_P (type)
7954 && TREE_CODE (op0) == BIT_NOT_EXPR
7955 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7956 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7957 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7959 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7960 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7961 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7962 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7963 fold_convert_loc (loc, type, tem));
7966 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7967 type of X and Y (integer types only). */
7968 if (INTEGRAL_TYPE_P (type)
7969 && TREE_CODE (op0) == MULT_EXPR
7970 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7971 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7973 /* Be careful not to introduce new overflows. */
7974 tree mult_type;
7975 if (TYPE_OVERFLOW_WRAPS (type))
7976 mult_type = type;
7977 else
7978 mult_type = unsigned_type_for (type);
7980 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7982 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7983 fold_convert_loc (loc, mult_type,
7984 TREE_OPERAND (op0, 0)),
7985 fold_convert_loc (loc, mult_type,
7986 TREE_OPERAND (op0, 1)));
7987 return fold_convert_loc (loc, type, tem);
7991 tem = fold_convert_const (code, type, op0);
7992 return tem ? tem : NULL_TREE;
7994 case ADDR_SPACE_CONVERT_EXPR:
7995 if (integer_zerop (arg0))
7996 return fold_convert_const (code, type, arg0);
7997 return NULL_TREE;
7999 case FIXED_CONVERT_EXPR:
8000 tem = fold_convert_const (code, type, arg0);
8001 return tem ? tem : NULL_TREE;
8003 case VIEW_CONVERT_EXPR:
8004 if (TREE_TYPE (op0) == type)
8005 return op0;
8006 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8007 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8008 type, TREE_OPERAND (op0, 0));
8009 if (TREE_CODE (op0) == MEM_REF)
8010 return fold_build2_loc (loc, MEM_REF, type,
8011 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8013 /* For integral conversions with the same precision or pointer
8014 conversions use a NOP_EXPR instead. */
8015 if ((INTEGRAL_TYPE_P (type)
8016 || POINTER_TYPE_P (type))
8017 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8018 || POINTER_TYPE_P (TREE_TYPE (op0)))
8019 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8020 return fold_convert_loc (loc, type, op0);
8022 /* Strip inner integral conversions that do not change the precision. */
8023 if (CONVERT_EXPR_P (op0)
8024 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8025 || POINTER_TYPE_P (TREE_TYPE (op0)))
8026 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8027 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8028 && (TYPE_PRECISION (TREE_TYPE (op0))
8029 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8030 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8031 type, TREE_OPERAND (op0, 0));
8033 return fold_view_convert_expr (type, op0);
8035 case NEGATE_EXPR:
8036 tem = fold_negate_expr (loc, arg0);
8037 if (tem)
8038 return fold_convert_loc (loc, type, tem);
8039 return NULL_TREE;
8041 case ABS_EXPR:
8042 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8043 return fold_abs_const (arg0, type);
8044 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8045 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8046 /* Convert fabs((double)float) into (double)fabsf(float). */
8047 else if (TREE_CODE (arg0) == NOP_EXPR
8048 && TREE_CODE (type) == REAL_TYPE)
8050 tree targ0 = strip_float_extensions (arg0);
8051 if (targ0 != arg0)
8052 return fold_convert_loc (loc, type,
8053 fold_build1_loc (loc, ABS_EXPR,
8054 TREE_TYPE (targ0),
8055 targ0));
8057 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8058 else if (TREE_CODE (arg0) == ABS_EXPR)
8059 return arg0;
8060 else if (tree_expr_nonnegative_p (arg0))
8061 return arg0;
8063 /* Strip sign ops from argument. */
8064 if (TREE_CODE (type) == REAL_TYPE)
8066 tem = fold_strip_sign_ops (arg0);
8067 if (tem)
8068 return fold_build1_loc (loc, ABS_EXPR, type,
8069 fold_convert_loc (loc, type, tem));
8071 return NULL_TREE;
8073 case CONJ_EXPR:
8074 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8075 return fold_convert_loc (loc, type, arg0);
8076 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8078 tree itype = TREE_TYPE (type);
8079 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8080 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8081 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8082 negate_expr (ipart));
8084 if (TREE_CODE (arg0) == COMPLEX_CST)
8086 tree itype = TREE_TYPE (type);
8087 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8088 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8089 return build_complex (type, rpart, negate_expr (ipart));
8091 if (TREE_CODE (arg0) == CONJ_EXPR)
8092 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8093 return NULL_TREE;
8095 case BIT_NOT_EXPR:
8096 if (TREE_CODE (arg0) == INTEGER_CST)
8097 return fold_not_const (arg0, type);
8098 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8099 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8100 /* Convert ~ (-A) to A - 1. */
8101 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8102 return fold_build2_loc (loc, MINUS_EXPR, type,
8103 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8104 build_int_cst (type, 1));
8105 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8106 else if (INTEGRAL_TYPE_P (type)
8107 && ((TREE_CODE (arg0) == MINUS_EXPR
8108 && integer_onep (TREE_OPERAND (arg0, 1)))
8109 || (TREE_CODE (arg0) == PLUS_EXPR
8110 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8111 return fold_build1_loc (loc, NEGATE_EXPR, type,
8112 fold_convert_loc (loc, type,
8113 TREE_OPERAND (arg0, 0)));
8114 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8115 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8116 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8117 fold_convert_loc (loc, type,
8118 TREE_OPERAND (arg0, 0)))))
8119 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8120 fold_convert_loc (loc, type,
8121 TREE_OPERAND (arg0, 1)));
8122 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8123 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8124 fold_convert_loc (loc, type,
8125 TREE_OPERAND (arg0, 1)))))
8126 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8127 fold_convert_loc (loc, type,
8128 TREE_OPERAND (arg0, 0)), tem);
8129 /* Perform BIT_NOT_EXPR on each element individually. */
8130 else if (TREE_CODE (arg0) == VECTOR_CST)
8132 tree *elements;
8133 tree elem;
8134 unsigned count = VECTOR_CST_NELTS (arg0), i;
8136 elements = XALLOCAVEC (tree, count);
8137 for (i = 0; i < count; i++)
8139 elem = VECTOR_CST_ELT (arg0, i);
8140 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8141 if (elem == NULL_TREE)
8142 break;
8143 elements[i] = elem;
8145 if (i == count)
8146 return build_vector (type, elements);
8149 return NULL_TREE;
8151 case TRUTH_NOT_EXPR:
8152 /* The argument to invert_truthvalue must have Boolean type. */
8153 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8154 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8156 /* Note that the operand of this must be an int
8157 and its values must be 0 or 1.
8158 ("true" is a fixed value perhaps depending on the language,
8159 but we don't handle values other than 1 correctly yet.) */
8160 tem = fold_truth_not_expr (loc, arg0);
8161 if (!tem)
8162 return NULL_TREE;
8163 return fold_convert_loc (loc, type, tem);
8165 case REALPART_EXPR:
8166 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8167 return fold_convert_loc (loc, type, arg0);
8168 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8169 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8170 TREE_OPERAND (arg0, 1));
8171 if (TREE_CODE (arg0) == COMPLEX_CST)
8172 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8173 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8175 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8176 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8177 fold_build1_loc (loc, REALPART_EXPR, itype,
8178 TREE_OPERAND (arg0, 0)),
8179 fold_build1_loc (loc, REALPART_EXPR, itype,
8180 TREE_OPERAND (arg0, 1)));
8181 return fold_convert_loc (loc, type, tem);
8183 if (TREE_CODE (arg0) == CONJ_EXPR)
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8187 TREE_OPERAND (arg0, 0));
8188 return fold_convert_loc (loc, type, tem);
8190 if (TREE_CODE (arg0) == CALL_EXPR)
8192 tree fn = get_callee_fndecl (arg0);
8193 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8194 switch (DECL_FUNCTION_CODE (fn))
8196 CASE_FLT_FN (BUILT_IN_CEXPI):
8197 fn = mathfn_built_in (type, BUILT_IN_COS);
8198 if (fn)
8199 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8200 break;
8202 default:
8203 break;
8206 return NULL_TREE;
8208 case IMAGPART_EXPR:
8209 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8210 return build_zero_cst (type);
8211 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8212 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8213 TREE_OPERAND (arg0, 0));
8214 if (TREE_CODE (arg0) == COMPLEX_CST)
8215 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8216 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8218 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8219 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8220 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8221 TREE_OPERAND (arg0, 0)),
8222 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8223 TREE_OPERAND (arg0, 1)));
8224 return fold_convert_loc (loc, type, tem);
8226 if (TREE_CODE (arg0) == CONJ_EXPR)
8228 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8229 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8230 return fold_convert_loc (loc, type, negate_expr (tem));
8232 if (TREE_CODE (arg0) == CALL_EXPR)
8234 tree fn = get_callee_fndecl (arg0);
8235 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8236 switch (DECL_FUNCTION_CODE (fn))
8238 CASE_FLT_FN (BUILT_IN_CEXPI):
8239 fn = mathfn_built_in (type, BUILT_IN_SIN);
8240 if (fn)
8241 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8242 break;
8244 default:
8245 break;
8248 return NULL_TREE;
8250 case INDIRECT_REF:
8251 /* Fold *&X to X if X is an lvalue. */
8252 if (TREE_CODE (op0) == ADDR_EXPR)
8254 tree op00 = TREE_OPERAND (op0, 0);
8255 if ((TREE_CODE (op00) == VAR_DECL
8256 || TREE_CODE (op00) == PARM_DECL
8257 || TREE_CODE (op00) == RESULT_DECL)
8258 && !TREE_READONLY (op00))
8259 return op00;
8261 return NULL_TREE;
8263 case VEC_UNPACK_LO_EXPR:
8264 case VEC_UNPACK_HI_EXPR:
8265 case VEC_UNPACK_FLOAT_LO_EXPR:
8266 case VEC_UNPACK_FLOAT_HI_EXPR:
8268 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8269 tree *elts;
8270 enum tree_code subcode;
8272 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8273 if (TREE_CODE (arg0) != VECTOR_CST)
8274 return NULL_TREE;
8276 elts = XALLOCAVEC (tree, nelts * 2);
8277 if (!vec_cst_ctor_to_array (arg0, elts))
8278 return NULL_TREE;
8280 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8281 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8282 elts += nelts;
8284 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8285 subcode = NOP_EXPR;
8286 else
8287 subcode = FLOAT_EXPR;
8289 for (i = 0; i < nelts; i++)
8291 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8292 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8293 return NULL_TREE;
8296 return build_vector (type, elts);
8299 case REDUC_MIN_EXPR:
8300 case REDUC_MAX_EXPR:
8301 case REDUC_PLUS_EXPR:
8303 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8304 tree *elts;
8305 enum tree_code subcode;
8307 if (TREE_CODE (op0) != VECTOR_CST)
8308 return NULL_TREE;
8310 elts = XALLOCAVEC (tree, nelts);
8311 if (!vec_cst_ctor_to_array (op0, elts))
8312 return NULL_TREE;
8314 switch (code)
8316 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8317 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8318 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8319 default: gcc_unreachable ();
8322 for (i = 1; i < nelts; i++)
8324 elts[0] = const_binop (subcode, elts[0], elts[i]);
8325 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8326 return NULL_TREE;
8327 elts[i] = build_zero_cst (TREE_TYPE (type));
8330 return build_vector (type, elts);
8333 default:
8334 return NULL_TREE;
8335 } /* switch (code) */
8339 /* If the operation was a conversion do _not_ mark a resulting constant
8340 with TREE_OVERFLOW if the original constant was not. These conversions
8341 have implementation defined behavior and retaining the TREE_OVERFLOW
8342 flag here would confuse later passes such as VRP. */
8343 tree
8344 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8345 tree type, tree op0)
8347 tree res = fold_unary_loc (loc, code, type, op0);
8348 if (res
8349 && TREE_CODE (res) == INTEGER_CST
8350 && TREE_CODE (op0) == INTEGER_CST
8351 && CONVERT_EXPR_CODE_P (code))
8352 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8354 return res;
8357 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8358 operands OP0 and OP1. LOC is the location of the resulting expression.
8359 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8360 Return the folded expression if folding is successful. Otherwise,
8361 return NULL_TREE. */
8362 static tree
8363 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8364 tree arg0, tree arg1, tree op0, tree op1)
8366 tree tem;
8368 /* We only do these simplifications if we are optimizing. */
8369 if (!optimize)
8370 return NULL_TREE;
8372 /* Check for things like (A || B) && (A || C). We can convert this
8373 to A || (B && C). Note that either operator can be any of the four
8374 truth and/or operations and the transformation will still be
8375 valid. Also note that we only care about order for the
8376 ANDIF and ORIF operators. If B contains side effects, this
8377 might change the truth-value of A. */
8378 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8379 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8380 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8381 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8382 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8383 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8385 tree a00 = TREE_OPERAND (arg0, 0);
8386 tree a01 = TREE_OPERAND (arg0, 1);
8387 tree a10 = TREE_OPERAND (arg1, 0);
8388 tree a11 = TREE_OPERAND (arg1, 1);
8389 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8390 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8391 && (code == TRUTH_AND_EXPR
8392 || code == TRUTH_OR_EXPR));
8394 if (operand_equal_p (a00, a10, 0))
8395 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8396 fold_build2_loc (loc, code, type, a01, a11));
8397 else if (commutative && operand_equal_p (a00, a11, 0))
8398 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8399 fold_build2_loc (loc, code, type, a01, a10));
8400 else if (commutative && operand_equal_p (a01, a10, 0))
8401 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8402 fold_build2_loc (loc, code, type, a00, a11));
8404 /* This case if tricky because we must either have commutative
8405 operators or else A10 must not have side-effects. */
8407 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8408 && operand_equal_p (a01, a11, 0))
8409 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8410 fold_build2_loc (loc, code, type, a00, a10),
8411 a01);
8414 /* See if we can build a range comparison. */
8415 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8416 return tem;
8418 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8419 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8421 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8422 if (tem)
8423 return fold_build2_loc (loc, code, type, tem, arg1);
8426 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8427 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8429 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8430 if (tem)
8431 return fold_build2_loc (loc, code, type, arg0, tem);
8434 /* Check for the possibility of merging component references. If our
8435 lhs is another similar operation, try to merge its rhs with our
8436 rhs. Then try to merge our lhs and rhs. */
8437 if (TREE_CODE (arg0) == code
8438 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8439 TREE_OPERAND (arg0, 1), arg1)))
8440 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8442 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8443 return tem;
8445 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8446 && (code == TRUTH_AND_EXPR
8447 || code == TRUTH_ANDIF_EXPR
8448 || code == TRUTH_OR_EXPR
8449 || code == TRUTH_ORIF_EXPR))
8451 enum tree_code ncode, icode;
8453 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8454 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8455 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8457 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8458 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8459 We don't want to pack more than two leafs to a non-IF AND/OR
8460 expression.
8461 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8462 equal to IF-CODE, then we don't want to add right-hand operand.
8463 If the inner right-hand side of left-hand operand has
8464 side-effects, or isn't simple, then we can't add to it,
8465 as otherwise we might destroy if-sequence. */
8466 if (TREE_CODE (arg0) == icode
8467 && simple_operand_p_2 (arg1)
8468 /* Needed for sequence points to handle trappings, and
8469 side-effects. */
8470 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8472 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8473 arg1);
8474 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8475 tem);
8477 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8478 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8479 else if (TREE_CODE (arg1) == icode
8480 && simple_operand_p_2 (arg0)
8481 /* Needed for sequence points to handle trappings, and
8482 side-effects. */
8483 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8485 tem = fold_build2_loc (loc, ncode, type,
8486 arg0, TREE_OPERAND (arg1, 0));
8487 return fold_build2_loc (loc, icode, type, tem,
8488 TREE_OPERAND (arg1, 1));
8490 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8491 into (A OR B).
8492 For sequence point consistancy, we need to check for trapping,
8493 and side-effects. */
8494 else if (code == icode && simple_operand_p_2 (arg0)
8495 && simple_operand_p_2 (arg1))
8496 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8499 return NULL_TREE;
8502 /* Fold a binary expression of code CODE and type TYPE with operands
8503 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8504 Return the folded expression if folding is successful. Otherwise,
8505 return NULL_TREE. */
8507 static tree
8508 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8510 enum tree_code compl_code;
8512 if (code == MIN_EXPR)
8513 compl_code = MAX_EXPR;
8514 else if (code == MAX_EXPR)
8515 compl_code = MIN_EXPR;
8516 else
8517 gcc_unreachable ();
8519 /* MIN (MAX (a, b), b) == b. */
8520 if (TREE_CODE (op0) == compl_code
8521 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8522 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8524 /* MIN (MAX (b, a), b) == b. */
8525 if (TREE_CODE (op0) == compl_code
8526 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8527 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8528 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8530 /* MIN (a, MAX (a, b)) == a. */
8531 if (TREE_CODE (op1) == compl_code
8532 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8533 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8534 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8536 /* MIN (a, MAX (b, a)) == a. */
8537 if (TREE_CODE (op1) == compl_code
8538 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8539 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8540 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8542 return NULL_TREE;
8545 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8546 by changing CODE to reduce the magnitude of constants involved in
8547 ARG0 of the comparison.
8548 Returns a canonicalized comparison tree if a simplification was
8549 possible, otherwise returns NULL_TREE.
8550 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8551 valid if signed overflow is undefined. */
8553 static tree
8554 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8555 tree arg0, tree arg1,
8556 bool *strict_overflow_p)
8558 enum tree_code code0 = TREE_CODE (arg0);
8559 tree t, cst0 = NULL_TREE;
8560 int sgn0;
8561 bool swap = false;
8563 /* Match A +- CST code arg1 and CST code arg1. We can change the
8564 first form only if overflow is undefined. */
8565 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8566 /* In principle pointers also have undefined overflow behavior,
8567 but that causes problems elsewhere. */
8568 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8569 && (code0 == MINUS_EXPR
8570 || code0 == PLUS_EXPR)
8571 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8572 || code0 == INTEGER_CST))
8573 return NULL_TREE;
8575 /* Identify the constant in arg0 and its sign. */
8576 if (code0 == INTEGER_CST)
8577 cst0 = arg0;
8578 else
8579 cst0 = TREE_OPERAND (arg0, 1);
8580 sgn0 = tree_int_cst_sgn (cst0);
8582 /* Overflowed constants and zero will cause problems. */
8583 if (integer_zerop (cst0)
8584 || TREE_OVERFLOW (cst0))
8585 return NULL_TREE;
8587 /* See if we can reduce the magnitude of the constant in
8588 arg0 by changing the comparison code. */
8589 if (code0 == INTEGER_CST)
8591 /* CST <= arg1 -> CST-1 < arg1. */
8592 if (code == LE_EXPR && sgn0 == 1)
8593 code = LT_EXPR;
8594 /* -CST < arg1 -> -CST-1 <= arg1. */
8595 else if (code == LT_EXPR && sgn0 == -1)
8596 code = LE_EXPR;
8597 /* CST > arg1 -> CST-1 >= arg1. */
8598 else if (code == GT_EXPR && sgn0 == 1)
8599 code = GE_EXPR;
8600 /* -CST >= arg1 -> -CST-1 > arg1. */
8601 else if (code == GE_EXPR && sgn0 == -1)
8602 code = GT_EXPR;
8603 else
8604 return NULL_TREE;
8605 /* arg1 code' CST' might be more canonical. */
8606 swap = true;
8608 else
8610 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8611 if (code == LT_EXPR
8612 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8613 code = LE_EXPR;
8614 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8615 else if (code == GT_EXPR
8616 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8617 code = GE_EXPR;
8618 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8619 else if (code == LE_EXPR
8620 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8621 code = LT_EXPR;
8622 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8623 else if (code == GE_EXPR
8624 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8625 code = GT_EXPR;
8626 else
8627 return NULL_TREE;
8628 *strict_overflow_p = true;
8631 /* Now build the constant reduced in magnitude. But not if that
8632 would produce one outside of its types range. */
8633 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8634 && ((sgn0 == 1
8635 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8636 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8637 || (sgn0 == -1
8638 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8639 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8640 /* We cannot swap the comparison here as that would cause us to
8641 endlessly recurse. */
8642 return NULL_TREE;
8644 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8645 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8646 if (code0 != INTEGER_CST)
8647 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8648 t = fold_convert (TREE_TYPE (arg1), t);
8650 /* If swapping might yield to a more canonical form, do so. */
8651 if (swap)
8652 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8653 else
8654 return fold_build2_loc (loc, code, type, t, arg1);
8657 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8658 overflow further. Try to decrease the magnitude of constants involved
8659 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8660 and put sole constants at the second argument position.
8661 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8663 static tree
8664 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8665 tree arg0, tree arg1)
8667 tree t;
8668 bool strict_overflow_p;
8669 const char * const warnmsg = G_("assuming signed overflow does not occur "
8670 "when reducing constant in comparison");
8672 /* Try canonicalization by simplifying arg0. */
8673 strict_overflow_p = false;
8674 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8675 &strict_overflow_p);
8676 if (t)
8678 if (strict_overflow_p)
8679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8680 return t;
8683 /* Try canonicalization by simplifying arg1 using the swapped
8684 comparison. */
8685 code = swap_tree_comparison (code);
8686 strict_overflow_p = false;
8687 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8688 &strict_overflow_p);
8689 if (t && strict_overflow_p)
8690 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8691 return t;
8694 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8695 space. This is used to avoid issuing overflow warnings for
8696 expressions like &p->x which can not wrap. */
8698 static bool
8699 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8701 double_int di_offset, total;
8703 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8704 return true;
8706 if (bitpos < 0)
8707 return true;
8709 if (offset == NULL_TREE)
8710 di_offset = double_int_zero;
8711 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8712 return true;
8713 else
8714 di_offset = TREE_INT_CST (offset);
8716 bool overflow;
8717 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8718 total = di_offset.add_with_sign (units, true, &overflow);
8719 if (overflow)
8720 return true;
8722 if (total.high != 0)
8723 return true;
8725 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8726 if (size <= 0)
8727 return true;
8729 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8730 array. */
8731 if (TREE_CODE (base) == ADDR_EXPR)
8733 HOST_WIDE_INT base_size;
8735 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8736 if (base_size > 0 && size < base_size)
8737 size = base_size;
8740 return total.low > (unsigned HOST_WIDE_INT) size;
8743 /* Subroutine of fold_binary. This routine performs all of the
8744 transformations that are common to the equality/inequality
8745 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8746 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8747 fold_binary should call fold_binary. Fold a comparison with
8748 tree code CODE and type TYPE with operands OP0 and OP1. Return
8749 the folded comparison or NULL_TREE. */
8751 static tree
8752 fold_comparison (location_t loc, enum tree_code code, tree type,
8753 tree op0, tree op1)
8755 tree arg0, arg1, tem;
8757 arg0 = op0;
8758 arg1 = op1;
8760 STRIP_SIGN_NOPS (arg0);
8761 STRIP_SIGN_NOPS (arg1);
8763 tem = fold_relational_const (code, type, arg0, arg1);
8764 if (tem != NULL_TREE)
8765 return tem;
8767 /* If one arg is a real or integer constant, put it last. */
8768 if (tree_swap_operands_p (arg0, arg1, true))
8769 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8771 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8772 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8773 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8774 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8775 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8776 && (TREE_CODE (arg1) == INTEGER_CST
8777 && !TREE_OVERFLOW (arg1)))
8779 tree const1 = TREE_OPERAND (arg0, 1);
8780 tree const2 = arg1;
8781 tree variable = TREE_OPERAND (arg0, 0);
8782 tree lhs;
8783 int lhs_add;
8784 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8786 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8787 TREE_TYPE (arg1), const2, const1);
8789 /* If the constant operation overflowed this can be
8790 simplified as a comparison against INT_MAX/INT_MIN. */
8791 if (TREE_CODE (lhs) == INTEGER_CST
8792 && TREE_OVERFLOW (lhs))
8794 int const1_sgn = tree_int_cst_sgn (const1);
8795 enum tree_code code2 = code;
8797 /* Get the sign of the constant on the lhs if the
8798 operation were VARIABLE + CONST1. */
8799 if (TREE_CODE (arg0) == MINUS_EXPR)
8800 const1_sgn = -const1_sgn;
8802 /* The sign of the constant determines if we overflowed
8803 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8804 Canonicalize to the INT_MIN overflow by swapping the comparison
8805 if necessary. */
8806 if (const1_sgn == -1)
8807 code2 = swap_tree_comparison (code);
8809 /* We now can look at the canonicalized case
8810 VARIABLE + 1 CODE2 INT_MIN
8811 and decide on the result. */
8812 if (code2 == LT_EXPR
8813 || code2 == LE_EXPR
8814 || code2 == EQ_EXPR)
8815 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8816 else if (code2 == NE_EXPR
8817 || code2 == GE_EXPR
8818 || code2 == GT_EXPR)
8819 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8822 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8823 && (TREE_CODE (lhs) != INTEGER_CST
8824 || !TREE_OVERFLOW (lhs)))
8826 if (code != EQ_EXPR && code != NE_EXPR)
8827 fold_overflow_warning ("assuming signed overflow does not occur "
8828 "when changing X +- C1 cmp C2 to "
8829 "X cmp C1 +- C2",
8830 WARN_STRICT_OVERFLOW_COMPARISON);
8831 return fold_build2_loc (loc, code, type, variable, lhs);
8835 /* For comparisons of pointers we can decompose it to a compile time
8836 comparison of the base objects and the offsets into the object.
8837 This requires at least one operand being an ADDR_EXPR or a
8838 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8839 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8840 && (TREE_CODE (arg0) == ADDR_EXPR
8841 || TREE_CODE (arg1) == ADDR_EXPR
8842 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8843 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8845 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8846 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8847 enum machine_mode mode;
8848 int volatilep, unsignedp;
8849 bool indirect_base0 = false, indirect_base1 = false;
8851 /* Get base and offset for the access. Strip ADDR_EXPR for
8852 get_inner_reference, but put it back by stripping INDIRECT_REF
8853 off the base object if possible. indirect_baseN will be true
8854 if baseN is not an address but refers to the object itself. */
8855 base0 = arg0;
8856 if (TREE_CODE (arg0) == ADDR_EXPR)
8858 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8859 &bitsize, &bitpos0, &offset0, &mode,
8860 &unsignedp, &volatilep, false);
8861 if (TREE_CODE (base0) == INDIRECT_REF)
8862 base0 = TREE_OPERAND (base0, 0);
8863 else
8864 indirect_base0 = true;
8866 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8868 base0 = TREE_OPERAND (arg0, 0);
8869 STRIP_SIGN_NOPS (base0);
8870 if (TREE_CODE (base0) == ADDR_EXPR)
8872 base0 = TREE_OPERAND (base0, 0);
8873 indirect_base0 = true;
8875 offset0 = TREE_OPERAND (arg0, 1);
8876 if (host_integerp (offset0, 0))
8878 HOST_WIDE_INT off = size_low_cst (offset0);
8879 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8880 * BITS_PER_UNIT)
8881 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8883 bitpos0 = off * BITS_PER_UNIT;
8884 offset0 = NULL_TREE;
8889 base1 = arg1;
8890 if (TREE_CODE (arg1) == ADDR_EXPR)
8892 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8893 &bitsize, &bitpos1, &offset1, &mode,
8894 &unsignedp, &volatilep, false);
8895 if (TREE_CODE (base1) == INDIRECT_REF)
8896 base1 = TREE_OPERAND (base1, 0);
8897 else
8898 indirect_base1 = true;
8900 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8902 base1 = TREE_OPERAND (arg1, 0);
8903 STRIP_SIGN_NOPS (base1);
8904 if (TREE_CODE (base1) == ADDR_EXPR)
8906 base1 = TREE_OPERAND (base1, 0);
8907 indirect_base1 = true;
8909 offset1 = TREE_OPERAND (arg1, 1);
8910 if (host_integerp (offset1, 0))
8912 HOST_WIDE_INT off = size_low_cst (offset1);
8913 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8914 * BITS_PER_UNIT)
8915 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8917 bitpos1 = off * BITS_PER_UNIT;
8918 offset1 = NULL_TREE;
8923 /* A local variable can never be pointed to by
8924 the default SSA name of an incoming parameter. */
8925 if ((TREE_CODE (arg0) == ADDR_EXPR
8926 && indirect_base0
8927 && TREE_CODE (base0) == VAR_DECL
8928 && auto_var_in_fn_p (base0, current_function_decl)
8929 && !indirect_base1
8930 && TREE_CODE (base1) == SSA_NAME
8931 && SSA_NAME_IS_DEFAULT_DEF (base1)
8932 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8933 || (TREE_CODE (arg1) == ADDR_EXPR
8934 && indirect_base1
8935 && TREE_CODE (base1) == VAR_DECL
8936 && auto_var_in_fn_p (base1, current_function_decl)
8937 && !indirect_base0
8938 && TREE_CODE (base0) == SSA_NAME
8939 && SSA_NAME_IS_DEFAULT_DEF (base0)
8940 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8942 if (code == NE_EXPR)
8943 return constant_boolean_node (1, type);
8944 else if (code == EQ_EXPR)
8945 return constant_boolean_node (0, type);
8947 /* If we have equivalent bases we might be able to simplify. */
8948 else if (indirect_base0 == indirect_base1
8949 && operand_equal_p (base0, base1, 0))
8951 /* We can fold this expression to a constant if the non-constant
8952 offset parts are equal. */
8953 if ((offset0 == offset1
8954 || (offset0 && offset1
8955 && operand_equal_p (offset0, offset1, 0)))
8956 && (code == EQ_EXPR
8957 || code == NE_EXPR
8958 || (indirect_base0 && DECL_P (base0))
8959 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8962 if (code != EQ_EXPR
8963 && code != NE_EXPR
8964 && bitpos0 != bitpos1
8965 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8966 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8967 fold_overflow_warning (("assuming pointer wraparound does not "
8968 "occur when comparing P +- C1 with "
8969 "P +- C2"),
8970 WARN_STRICT_OVERFLOW_CONDITIONAL);
8972 switch (code)
8974 case EQ_EXPR:
8975 return constant_boolean_node (bitpos0 == bitpos1, type);
8976 case NE_EXPR:
8977 return constant_boolean_node (bitpos0 != bitpos1, type);
8978 case LT_EXPR:
8979 return constant_boolean_node (bitpos0 < bitpos1, type);
8980 case LE_EXPR:
8981 return constant_boolean_node (bitpos0 <= bitpos1, type);
8982 case GE_EXPR:
8983 return constant_boolean_node (bitpos0 >= bitpos1, type);
8984 case GT_EXPR:
8985 return constant_boolean_node (bitpos0 > bitpos1, type);
8986 default:;
8989 /* We can simplify the comparison to a comparison of the variable
8990 offset parts if the constant offset parts are equal.
8991 Be careful to use signed size type here because otherwise we
8992 mess with array offsets in the wrong way. This is possible
8993 because pointer arithmetic is restricted to retain within an
8994 object and overflow on pointer differences is undefined as of
8995 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8996 else if (bitpos0 == bitpos1
8997 && ((code == EQ_EXPR || code == NE_EXPR)
8998 || (indirect_base0 && DECL_P (base0))
8999 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9001 /* By converting to signed size type we cover middle-end pointer
9002 arithmetic which operates on unsigned pointer types of size
9003 type size and ARRAY_REF offsets which are properly sign or
9004 zero extended from their type in case it is narrower than
9005 size type. */
9006 if (offset0 == NULL_TREE)
9007 offset0 = build_int_cst (ssizetype, 0);
9008 else
9009 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9010 if (offset1 == NULL_TREE)
9011 offset1 = build_int_cst (ssizetype, 0);
9012 else
9013 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9015 if (code != EQ_EXPR
9016 && code != NE_EXPR
9017 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9018 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9019 fold_overflow_warning (("assuming pointer wraparound does not "
9020 "occur when comparing P +- C1 with "
9021 "P +- C2"),
9022 WARN_STRICT_OVERFLOW_COMPARISON);
9024 return fold_build2_loc (loc, code, type, offset0, offset1);
9027 /* For non-equal bases we can simplify if they are addresses
9028 of local binding decls or constants. */
9029 else if (indirect_base0 && indirect_base1
9030 /* We know that !operand_equal_p (base0, base1, 0)
9031 because the if condition was false. But make
9032 sure two decls are not the same. */
9033 && base0 != base1
9034 && TREE_CODE (arg0) == ADDR_EXPR
9035 && TREE_CODE (arg1) == ADDR_EXPR
9036 && (((TREE_CODE (base0) == VAR_DECL
9037 || TREE_CODE (base0) == PARM_DECL)
9038 && (targetm.binds_local_p (base0)
9039 || CONSTANT_CLASS_P (base1)))
9040 || CONSTANT_CLASS_P (base0))
9041 && (((TREE_CODE (base1) == VAR_DECL
9042 || TREE_CODE (base1) == PARM_DECL)
9043 && (targetm.binds_local_p (base1)
9044 || CONSTANT_CLASS_P (base0)))
9045 || CONSTANT_CLASS_P (base1)))
9047 if (code == EQ_EXPR)
9048 return omit_two_operands_loc (loc, type, boolean_false_node,
9049 arg0, arg1);
9050 else if (code == NE_EXPR)
9051 return omit_two_operands_loc (loc, type, boolean_true_node,
9052 arg0, arg1);
9054 /* For equal offsets we can simplify to a comparison of the
9055 base addresses. */
9056 else if (bitpos0 == bitpos1
9057 && (indirect_base0
9058 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9059 && (indirect_base1
9060 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9061 && ((offset0 == offset1)
9062 || (offset0 && offset1
9063 && operand_equal_p (offset0, offset1, 0))))
9065 if (indirect_base0)
9066 base0 = build_fold_addr_expr_loc (loc, base0);
9067 if (indirect_base1)
9068 base1 = build_fold_addr_expr_loc (loc, base1);
9069 return fold_build2_loc (loc, code, type, base0, base1);
9073 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9074 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9075 the resulting offset is smaller in absolute value than the
9076 original one. */
9077 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9078 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9079 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9080 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9081 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9082 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9083 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9085 tree const1 = TREE_OPERAND (arg0, 1);
9086 tree const2 = TREE_OPERAND (arg1, 1);
9087 tree variable1 = TREE_OPERAND (arg0, 0);
9088 tree variable2 = TREE_OPERAND (arg1, 0);
9089 tree cst;
9090 const char * const warnmsg = G_("assuming signed overflow does not "
9091 "occur when combining constants around "
9092 "a comparison");
9094 /* Put the constant on the side where it doesn't overflow and is
9095 of lower absolute value than before. */
9096 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9097 ? MINUS_EXPR : PLUS_EXPR,
9098 const2, const1);
9099 if (!TREE_OVERFLOW (cst)
9100 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9102 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9103 return fold_build2_loc (loc, code, type,
9104 variable1,
9105 fold_build2_loc (loc,
9106 TREE_CODE (arg1), TREE_TYPE (arg1),
9107 variable2, cst));
9110 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9111 ? MINUS_EXPR : PLUS_EXPR,
9112 const1, const2);
9113 if (!TREE_OVERFLOW (cst)
9114 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9116 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9117 return fold_build2_loc (loc, code, type,
9118 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9119 variable1, cst),
9120 variable2);
9124 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9125 signed arithmetic case. That form is created by the compiler
9126 often enough for folding it to be of value. One example is in
9127 computing loop trip counts after Operator Strength Reduction. */
9128 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9129 && TREE_CODE (arg0) == MULT_EXPR
9130 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9131 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9132 && integer_zerop (arg1))
9134 tree const1 = TREE_OPERAND (arg0, 1);
9135 tree const2 = arg1; /* zero */
9136 tree variable1 = TREE_OPERAND (arg0, 0);
9137 enum tree_code cmp_code = code;
9139 /* Handle unfolded multiplication by zero. */
9140 if (integer_zerop (const1))
9141 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9143 fold_overflow_warning (("assuming signed overflow does not occur when "
9144 "eliminating multiplication in comparison "
9145 "with zero"),
9146 WARN_STRICT_OVERFLOW_COMPARISON);
9148 /* If const1 is negative we swap the sense of the comparison. */
9149 if (tree_int_cst_sgn (const1) < 0)
9150 cmp_code = swap_tree_comparison (cmp_code);
9152 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9155 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9156 if (tem)
9157 return tem;
9159 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9161 tree targ0 = strip_float_extensions (arg0);
9162 tree targ1 = strip_float_extensions (arg1);
9163 tree newtype = TREE_TYPE (targ0);
9165 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9166 newtype = TREE_TYPE (targ1);
9168 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9169 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9170 return fold_build2_loc (loc, code, type,
9171 fold_convert_loc (loc, newtype, targ0),
9172 fold_convert_loc (loc, newtype, targ1));
9174 /* (-a) CMP (-b) -> b CMP a */
9175 if (TREE_CODE (arg0) == NEGATE_EXPR
9176 && TREE_CODE (arg1) == NEGATE_EXPR)
9177 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9178 TREE_OPERAND (arg0, 0));
9180 if (TREE_CODE (arg1) == REAL_CST)
9182 REAL_VALUE_TYPE cst;
9183 cst = TREE_REAL_CST (arg1);
9185 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9186 if (TREE_CODE (arg0) == NEGATE_EXPR)
9187 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9188 TREE_OPERAND (arg0, 0),
9189 build_real (TREE_TYPE (arg1),
9190 real_value_negate (&cst)));
9192 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9193 /* a CMP (-0) -> a CMP 0 */
9194 if (REAL_VALUE_MINUS_ZERO (cst))
9195 return fold_build2_loc (loc, code, type, arg0,
9196 build_real (TREE_TYPE (arg1), dconst0));
9198 /* x != NaN is always true, other ops are always false. */
9199 if (REAL_VALUE_ISNAN (cst)
9200 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9202 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9203 return omit_one_operand_loc (loc, type, tem, arg0);
9206 /* Fold comparisons against infinity. */
9207 if (REAL_VALUE_ISINF (cst)
9208 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9210 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9211 if (tem != NULL_TREE)
9212 return tem;
9216 /* If this is a comparison of a real constant with a PLUS_EXPR
9217 or a MINUS_EXPR of a real constant, we can convert it into a
9218 comparison with a revised real constant as long as no overflow
9219 occurs when unsafe_math_optimizations are enabled. */
9220 if (flag_unsafe_math_optimizations
9221 && TREE_CODE (arg1) == REAL_CST
9222 && (TREE_CODE (arg0) == PLUS_EXPR
9223 || TREE_CODE (arg0) == MINUS_EXPR)
9224 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9225 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9226 ? MINUS_EXPR : PLUS_EXPR,
9227 arg1, TREE_OPERAND (arg0, 1)))
9228 && !TREE_OVERFLOW (tem))
9229 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9231 /* Likewise, we can simplify a comparison of a real constant with
9232 a MINUS_EXPR whose first operand is also a real constant, i.e.
9233 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9234 floating-point types only if -fassociative-math is set. */
9235 if (flag_associative_math
9236 && TREE_CODE (arg1) == REAL_CST
9237 && TREE_CODE (arg0) == MINUS_EXPR
9238 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9239 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9240 arg1))
9241 && !TREE_OVERFLOW (tem))
9242 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9243 TREE_OPERAND (arg0, 1), tem);
9245 /* Fold comparisons against built-in math functions. */
9246 if (TREE_CODE (arg1) == REAL_CST
9247 && flag_unsafe_math_optimizations
9248 && ! flag_errno_math)
9250 enum built_in_function fcode = builtin_mathfn_code (arg0);
9252 if (fcode != END_BUILTINS)
9254 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9255 if (tem != NULL_TREE)
9256 return tem;
9261 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9262 && CONVERT_EXPR_P (arg0))
9264 /* If we are widening one operand of an integer comparison,
9265 see if the other operand is similarly being widened. Perhaps we
9266 can do the comparison in the narrower type. */
9267 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9268 if (tem)
9269 return tem;
9271 /* Or if we are changing signedness. */
9272 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9273 if (tem)
9274 return tem;
9277 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9278 constant, we can simplify it. */
9279 if (TREE_CODE (arg1) == INTEGER_CST
9280 && (TREE_CODE (arg0) == MIN_EXPR
9281 || TREE_CODE (arg0) == MAX_EXPR)
9282 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9284 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9285 if (tem)
9286 return tem;
9289 /* Simplify comparison of something with itself. (For IEEE
9290 floating-point, we can only do some of these simplifications.) */
9291 if (operand_equal_p (arg0, arg1, 0))
9293 switch (code)
9295 case EQ_EXPR:
9296 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9297 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9298 return constant_boolean_node (1, type);
9299 break;
9301 case GE_EXPR:
9302 case LE_EXPR:
9303 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9304 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9305 return constant_boolean_node (1, type);
9306 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9308 case NE_EXPR:
9309 /* For NE, we can only do this simplification if integer
9310 or we don't honor IEEE floating point NaNs. */
9311 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9312 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9313 break;
9314 /* ... fall through ... */
9315 case GT_EXPR:
9316 case LT_EXPR:
9317 return constant_boolean_node (0, type);
9318 default:
9319 gcc_unreachable ();
9323 /* If we are comparing an expression that just has comparisons
9324 of two integer values, arithmetic expressions of those comparisons,
9325 and constants, we can simplify it. There are only three cases
9326 to check: the two values can either be equal, the first can be
9327 greater, or the second can be greater. Fold the expression for
9328 those three values. Since each value must be 0 or 1, we have
9329 eight possibilities, each of which corresponds to the constant 0
9330 or 1 or one of the six possible comparisons.
9332 This handles common cases like (a > b) == 0 but also handles
9333 expressions like ((x > y) - (y > x)) > 0, which supposedly
9334 occur in macroized code. */
9336 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9338 tree cval1 = 0, cval2 = 0;
9339 int save_p = 0;
9341 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9342 /* Don't handle degenerate cases here; they should already
9343 have been handled anyway. */
9344 && cval1 != 0 && cval2 != 0
9345 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9346 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9347 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9348 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9349 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9350 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9351 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9353 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9354 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9356 /* We can't just pass T to eval_subst in case cval1 or cval2
9357 was the same as ARG1. */
9359 tree high_result
9360 = fold_build2_loc (loc, code, type,
9361 eval_subst (loc, arg0, cval1, maxval,
9362 cval2, minval),
9363 arg1);
9364 tree equal_result
9365 = fold_build2_loc (loc, code, type,
9366 eval_subst (loc, arg0, cval1, maxval,
9367 cval2, maxval),
9368 arg1);
9369 tree low_result
9370 = fold_build2_loc (loc, code, type,
9371 eval_subst (loc, arg0, cval1, minval,
9372 cval2, maxval),
9373 arg1);
9375 /* All three of these results should be 0 or 1. Confirm they are.
9376 Then use those values to select the proper code to use. */
9378 if (TREE_CODE (high_result) == INTEGER_CST
9379 && TREE_CODE (equal_result) == INTEGER_CST
9380 && TREE_CODE (low_result) == INTEGER_CST)
9382 /* Make a 3-bit mask with the high-order bit being the
9383 value for `>', the next for '=', and the low for '<'. */
9384 switch ((integer_onep (high_result) * 4)
9385 + (integer_onep (equal_result) * 2)
9386 + integer_onep (low_result))
9388 case 0:
9389 /* Always false. */
9390 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9391 case 1:
9392 code = LT_EXPR;
9393 break;
9394 case 2:
9395 code = EQ_EXPR;
9396 break;
9397 case 3:
9398 code = LE_EXPR;
9399 break;
9400 case 4:
9401 code = GT_EXPR;
9402 break;
9403 case 5:
9404 code = NE_EXPR;
9405 break;
9406 case 6:
9407 code = GE_EXPR;
9408 break;
9409 case 7:
9410 /* Always true. */
9411 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9414 if (save_p)
9416 tem = save_expr (build2 (code, type, cval1, cval2));
9417 SET_EXPR_LOCATION (tem, loc);
9418 return tem;
9420 return fold_build2_loc (loc, code, type, cval1, cval2);
9425 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9426 into a single range test. */
9427 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9428 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9429 && TREE_CODE (arg1) == INTEGER_CST
9430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9431 && !integer_zerop (TREE_OPERAND (arg0, 1))
9432 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9433 && !TREE_OVERFLOW (arg1))
9435 tem = fold_div_compare (loc, code, type, arg0, arg1);
9436 if (tem != NULL_TREE)
9437 return tem;
9440 /* Fold ~X op ~Y as Y op X. */
9441 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9442 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9444 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9445 return fold_build2_loc (loc, code, type,
9446 fold_convert_loc (loc, cmp_type,
9447 TREE_OPERAND (arg1, 0)),
9448 TREE_OPERAND (arg0, 0));
9451 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9452 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9453 && TREE_CODE (arg1) == INTEGER_CST)
9455 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9456 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9457 TREE_OPERAND (arg0, 0),
9458 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9459 fold_convert_loc (loc, cmp_type, arg1)));
9462 return NULL_TREE;
9466 /* Subroutine of fold_binary. Optimize complex multiplications of the
9467 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9468 argument EXPR represents the expression "z" of type TYPE. */
9470 static tree
9471 fold_mult_zconjz (location_t loc, tree type, tree expr)
9473 tree itype = TREE_TYPE (type);
9474 tree rpart, ipart, tem;
9476 if (TREE_CODE (expr) == COMPLEX_EXPR)
9478 rpart = TREE_OPERAND (expr, 0);
9479 ipart = TREE_OPERAND (expr, 1);
9481 else if (TREE_CODE (expr) == COMPLEX_CST)
9483 rpart = TREE_REALPART (expr);
9484 ipart = TREE_IMAGPART (expr);
9486 else
9488 expr = save_expr (expr);
9489 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9490 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9493 rpart = save_expr (rpart);
9494 ipart = save_expr (ipart);
9495 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9496 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9497 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9498 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9499 build_zero_cst (itype));
9503 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9504 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9505 guarantees that P and N have the same least significant log2(M) bits.
9506 N is not otherwise constrained. In particular, N is not normalized to
9507 0 <= N < M as is common. In general, the precise value of P is unknown.
9508 M is chosen as large as possible such that constant N can be determined.
9510 Returns M and sets *RESIDUE to N.
9512 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9513 account. This is not always possible due to PR 35705.
9516 static unsigned HOST_WIDE_INT
9517 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9518 bool allow_func_align)
9520 enum tree_code code;
9522 *residue = 0;
9524 code = TREE_CODE (expr);
9525 if (code == ADDR_EXPR)
9527 unsigned int bitalign;
9528 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9529 *residue /= BITS_PER_UNIT;
9530 return bitalign / BITS_PER_UNIT;
9532 else if (code == POINTER_PLUS_EXPR)
9534 tree op0, op1;
9535 unsigned HOST_WIDE_INT modulus;
9536 enum tree_code inner_code;
9538 op0 = TREE_OPERAND (expr, 0);
9539 STRIP_NOPS (op0);
9540 modulus = get_pointer_modulus_and_residue (op0, residue,
9541 allow_func_align);
9543 op1 = TREE_OPERAND (expr, 1);
9544 STRIP_NOPS (op1);
9545 inner_code = TREE_CODE (op1);
9546 if (inner_code == INTEGER_CST)
9548 *residue += TREE_INT_CST_LOW (op1);
9549 return modulus;
9551 else if (inner_code == MULT_EXPR)
9553 op1 = TREE_OPERAND (op1, 1);
9554 if (TREE_CODE (op1) == INTEGER_CST)
9556 unsigned HOST_WIDE_INT align;
9558 /* Compute the greatest power-of-2 divisor of op1. */
9559 align = TREE_INT_CST_LOW (op1);
9560 align &= -align;
9562 /* If align is non-zero and less than *modulus, replace
9563 *modulus with align., If align is 0, then either op1 is 0
9564 or the greatest power-of-2 divisor of op1 doesn't fit in an
9565 unsigned HOST_WIDE_INT. In either case, no additional
9566 constraint is imposed. */
9567 if (align)
9568 modulus = MIN (modulus, align);
9570 return modulus;
9575 /* If we get here, we were unable to determine anything useful about the
9576 expression. */
9577 return 1;
9580 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9581 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9583 static bool
9584 vec_cst_ctor_to_array (tree arg, tree *elts)
9586 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9588 if (TREE_CODE (arg) == VECTOR_CST)
9590 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9591 elts[i] = VECTOR_CST_ELT (arg, i);
9593 else if (TREE_CODE (arg) == CONSTRUCTOR)
9595 constructor_elt *elt;
9597 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9598 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9599 return false;
9600 else
9601 elts[i] = elt->value;
9603 else
9604 return false;
9605 for (; i < nelts; i++)
9606 elts[i]
9607 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9608 return true;
9611 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9612 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9613 NULL_TREE otherwise. */
9615 static tree
9616 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9618 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9619 tree *elts;
9620 bool need_ctor = false;
9622 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9623 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9624 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9625 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9626 return NULL_TREE;
9628 elts = XALLOCAVEC (tree, nelts * 3);
9629 if (!vec_cst_ctor_to_array (arg0, elts)
9630 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9631 return NULL_TREE;
9633 for (i = 0; i < nelts; i++)
9635 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9636 need_ctor = true;
9637 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9640 if (need_ctor)
9642 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9643 for (i = 0; i < nelts; i++)
9644 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9645 return build_constructor (type, v);
9647 else
9648 return build_vector (type, &elts[2 * nelts]);
9651 /* Try to fold a pointer difference of type TYPE two address expressions of
9652 array references AREF0 and AREF1 using location LOC. Return a
9653 simplified expression for the difference or NULL_TREE. */
9655 static tree
9656 fold_addr_of_array_ref_difference (location_t loc, tree type,
9657 tree aref0, tree aref1)
9659 tree base0 = TREE_OPERAND (aref0, 0);
9660 tree base1 = TREE_OPERAND (aref1, 0);
9661 tree base_offset = build_int_cst (type, 0);
9663 /* If the bases are array references as well, recurse. If the bases
9664 are pointer indirections compute the difference of the pointers.
9665 If the bases are equal, we are set. */
9666 if ((TREE_CODE (base0) == ARRAY_REF
9667 && TREE_CODE (base1) == ARRAY_REF
9668 && (base_offset
9669 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9670 || (INDIRECT_REF_P (base0)
9671 && INDIRECT_REF_P (base1)
9672 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9673 TREE_OPERAND (base0, 0),
9674 TREE_OPERAND (base1, 0))))
9675 || operand_equal_p (base0, base1, 0))
9677 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9678 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9679 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9680 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9681 return fold_build2_loc (loc, PLUS_EXPR, type,
9682 base_offset,
9683 fold_build2_loc (loc, MULT_EXPR, type,
9684 diff, esz));
9686 return NULL_TREE;
9689 /* If the real or vector real constant CST of type TYPE has an exact
9690 inverse, return it, else return NULL. */
9692 static tree
9693 exact_inverse (tree type, tree cst)
9695 REAL_VALUE_TYPE r;
9696 tree unit_type, *elts;
9697 enum machine_mode mode;
9698 unsigned vec_nelts, i;
9700 switch (TREE_CODE (cst))
9702 case REAL_CST:
9703 r = TREE_REAL_CST (cst);
9705 if (exact_real_inverse (TYPE_MODE (type), &r))
9706 return build_real (type, r);
9708 return NULL_TREE;
9710 case VECTOR_CST:
9711 vec_nelts = VECTOR_CST_NELTS (cst);
9712 elts = XALLOCAVEC (tree, vec_nelts);
9713 unit_type = TREE_TYPE (type);
9714 mode = TYPE_MODE (unit_type);
9716 for (i = 0; i < vec_nelts; i++)
9718 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9719 if (!exact_real_inverse (mode, &r))
9720 return NULL_TREE;
9721 elts[i] = build_real (unit_type, r);
9724 return build_vector (type, elts);
9726 default:
9727 return NULL_TREE;
9731 /* Fold a binary expression of code CODE and type TYPE with operands
9732 OP0 and OP1. LOC is the location of the resulting expression.
9733 Return the folded expression if folding is successful. Otherwise,
9734 return NULL_TREE. */
9736 tree
9737 fold_binary_loc (location_t loc,
9738 enum tree_code code, tree type, tree op0, tree op1)
9740 enum tree_code_class kind = TREE_CODE_CLASS (code);
9741 tree arg0, arg1, tem;
9742 tree t1 = NULL_TREE;
9743 bool strict_overflow_p;
9745 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9746 && TREE_CODE_LENGTH (code) == 2
9747 && op0 != NULL_TREE
9748 && op1 != NULL_TREE);
9750 arg0 = op0;
9751 arg1 = op1;
9753 /* Strip any conversions that don't change the mode. This is
9754 safe for every expression, except for a comparison expression
9755 because its signedness is derived from its operands. So, in
9756 the latter case, only strip conversions that don't change the
9757 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9758 preserved.
9760 Note that this is done as an internal manipulation within the
9761 constant folder, in order to find the simplest representation
9762 of the arguments so that their form can be studied. In any
9763 cases, the appropriate type conversions should be put back in
9764 the tree that will get out of the constant folder. */
9766 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9768 STRIP_SIGN_NOPS (arg0);
9769 STRIP_SIGN_NOPS (arg1);
9771 else
9773 STRIP_NOPS (arg0);
9774 STRIP_NOPS (arg1);
9777 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9778 constant but we can't do arithmetic on them. */
9779 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9780 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9781 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9782 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9783 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9784 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9786 if (kind == tcc_binary)
9788 /* Make sure type and arg0 have the same saturating flag. */
9789 gcc_assert (TYPE_SATURATING (type)
9790 == TYPE_SATURATING (TREE_TYPE (arg0)));
9791 tem = const_binop (code, arg0, arg1);
9793 else if (kind == tcc_comparison)
9794 tem = fold_relational_const (code, type, arg0, arg1);
9795 else
9796 tem = NULL_TREE;
9798 if (tem != NULL_TREE)
9800 if (TREE_TYPE (tem) != type)
9801 tem = fold_convert_loc (loc, type, tem);
9802 return tem;
9806 /* If this is a commutative operation, and ARG0 is a constant, move it
9807 to ARG1 to reduce the number of tests below. */
9808 if (commutative_tree_code (code)
9809 && tree_swap_operands_p (arg0, arg1, true))
9810 return fold_build2_loc (loc, code, type, op1, op0);
9812 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9814 First check for cases where an arithmetic operation is applied to a
9815 compound, conditional, or comparison operation. Push the arithmetic
9816 operation inside the compound or conditional to see if any folding
9817 can then be done. Convert comparison to conditional for this purpose.
9818 The also optimizes non-constant cases that used to be done in
9819 expand_expr.
9821 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9822 one of the operands is a comparison and the other is a comparison, a
9823 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9824 code below would make the expression more complex. Change it to a
9825 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9826 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9828 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9829 || code == EQ_EXPR || code == NE_EXPR)
9830 && TREE_CODE (type) != VECTOR_TYPE
9831 && ((truth_value_p (TREE_CODE (arg0))
9832 && (truth_value_p (TREE_CODE (arg1))
9833 || (TREE_CODE (arg1) == BIT_AND_EXPR
9834 && integer_onep (TREE_OPERAND (arg1, 1)))))
9835 || (truth_value_p (TREE_CODE (arg1))
9836 && (truth_value_p (TREE_CODE (arg0))
9837 || (TREE_CODE (arg0) == BIT_AND_EXPR
9838 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9840 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9841 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9842 : TRUTH_XOR_EXPR,
9843 boolean_type_node,
9844 fold_convert_loc (loc, boolean_type_node, arg0),
9845 fold_convert_loc (loc, boolean_type_node, arg1));
9847 if (code == EQ_EXPR)
9848 tem = invert_truthvalue_loc (loc, tem);
9850 return fold_convert_loc (loc, type, tem);
9853 if (TREE_CODE_CLASS (code) == tcc_binary
9854 || TREE_CODE_CLASS (code) == tcc_comparison)
9856 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9858 tem = fold_build2_loc (loc, code, type,
9859 fold_convert_loc (loc, TREE_TYPE (op0),
9860 TREE_OPERAND (arg0, 1)), op1);
9861 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9862 tem);
9864 if (TREE_CODE (arg1) == COMPOUND_EXPR
9865 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9867 tem = fold_build2_loc (loc, code, type, op0,
9868 fold_convert_loc (loc, TREE_TYPE (op1),
9869 TREE_OPERAND (arg1, 1)));
9870 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9871 tem);
9874 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9876 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9877 arg0, arg1,
9878 /*cond_first_p=*/1);
9879 if (tem != NULL_TREE)
9880 return tem;
9883 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9885 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9886 arg1, arg0,
9887 /*cond_first_p=*/0);
9888 if (tem != NULL_TREE)
9889 return tem;
9893 switch (code)
9895 case MEM_REF:
9896 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9897 if (TREE_CODE (arg0) == ADDR_EXPR
9898 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9900 tree iref = TREE_OPERAND (arg0, 0);
9901 return fold_build2 (MEM_REF, type,
9902 TREE_OPERAND (iref, 0),
9903 int_const_binop (PLUS_EXPR, arg1,
9904 TREE_OPERAND (iref, 1)));
9907 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9908 if (TREE_CODE (arg0) == ADDR_EXPR
9909 && handled_component_p (TREE_OPERAND (arg0, 0)))
9911 tree base;
9912 HOST_WIDE_INT coffset;
9913 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9914 &coffset);
9915 if (!base)
9916 return NULL_TREE;
9917 return fold_build2 (MEM_REF, type,
9918 build_fold_addr_expr (base),
9919 int_const_binop (PLUS_EXPR, arg1,
9920 size_int (coffset)));
9923 return NULL_TREE;
9925 case POINTER_PLUS_EXPR:
9926 /* 0 +p index -> (type)index */
9927 if (integer_zerop (arg0))
9928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9930 /* PTR +p 0 -> PTR */
9931 if (integer_zerop (arg1))
9932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9934 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9935 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9936 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9937 return fold_convert_loc (loc, type,
9938 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9939 fold_convert_loc (loc, sizetype,
9940 arg1),
9941 fold_convert_loc (loc, sizetype,
9942 arg0)));
9944 /* (PTR +p B) +p A -> PTR +p (B + A) */
9945 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9947 tree inner;
9948 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9949 tree arg00 = TREE_OPERAND (arg0, 0);
9950 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9951 arg01, fold_convert_loc (loc, sizetype, arg1));
9952 return fold_convert_loc (loc, type,
9953 fold_build_pointer_plus_loc (loc,
9954 arg00, inner));
9957 /* PTR_CST +p CST -> CST1 */
9958 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9959 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9960 fold_convert_loc (loc, type, arg1));
9962 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9963 of the array. Loop optimizer sometimes produce this type of
9964 expressions. */
9965 if (TREE_CODE (arg0) == ADDR_EXPR)
9967 tem = try_move_mult_to_index (loc, arg0,
9968 fold_convert_loc (loc,
9969 ssizetype, arg1));
9970 if (tem)
9971 return fold_convert_loc (loc, type, tem);
9974 return NULL_TREE;
9976 case PLUS_EXPR:
9977 /* A + (-B) -> A - B */
9978 if (TREE_CODE (arg1) == NEGATE_EXPR)
9979 return fold_build2_loc (loc, MINUS_EXPR, type,
9980 fold_convert_loc (loc, type, arg0),
9981 fold_convert_loc (loc, type,
9982 TREE_OPERAND (arg1, 0)));
9983 /* (-A) + B -> B - A */
9984 if (TREE_CODE (arg0) == NEGATE_EXPR
9985 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9986 return fold_build2_loc (loc, MINUS_EXPR, type,
9987 fold_convert_loc (loc, type, arg1),
9988 fold_convert_loc (loc, type,
9989 TREE_OPERAND (arg0, 0)));
9991 if (INTEGRAL_TYPE_P (type))
9993 /* Convert ~A + 1 to -A. */
9994 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9995 && integer_onep (arg1))
9996 return fold_build1_loc (loc, NEGATE_EXPR, type,
9997 fold_convert_loc (loc, type,
9998 TREE_OPERAND (arg0, 0)));
10000 /* ~X + X is -1. */
10001 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10002 && !TYPE_OVERFLOW_TRAPS (type))
10004 tree tem = TREE_OPERAND (arg0, 0);
10006 STRIP_NOPS (tem);
10007 if (operand_equal_p (tem, arg1, 0))
10009 t1 = build_int_cst_type (type, -1);
10010 return omit_one_operand_loc (loc, type, t1, arg1);
10014 /* X + ~X is -1. */
10015 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10016 && !TYPE_OVERFLOW_TRAPS (type))
10018 tree tem = TREE_OPERAND (arg1, 0);
10020 STRIP_NOPS (tem);
10021 if (operand_equal_p (arg0, tem, 0))
10023 t1 = build_int_cst_type (type, -1);
10024 return omit_one_operand_loc (loc, type, t1, arg0);
10028 /* X + (X / CST) * -CST is X % CST. */
10029 if (TREE_CODE (arg1) == MULT_EXPR
10030 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10031 && operand_equal_p (arg0,
10032 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10034 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10035 tree cst1 = TREE_OPERAND (arg1, 1);
10036 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10037 cst1, cst0);
10038 if (sum && integer_zerop (sum))
10039 return fold_convert_loc (loc, type,
10040 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10041 TREE_TYPE (arg0), arg0,
10042 cst0));
10046 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10047 one. Make sure the type is not saturating and has the signedness of
10048 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10049 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10050 if ((TREE_CODE (arg0) == MULT_EXPR
10051 || TREE_CODE (arg1) == MULT_EXPR)
10052 && !TYPE_SATURATING (type)
10053 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10054 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10055 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10057 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10058 if (tem)
10059 return tem;
10062 if (! FLOAT_TYPE_P (type))
10064 if (integer_zerop (arg1))
10065 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10067 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10068 with a constant, and the two constants have no bits in common,
10069 we should treat this as a BIT_IOR_EXPR since this may produce more
10070 simplifications. */
10071 if (TREE_CODE (arg0) == BIT_AND_EXPR
10072 && TREE_CODE (arg1) == BIT_AND_EXPR
10073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10074 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10075 && integer_zerop (const_binop (BIT_AND_EXPR,
10076 TREE_OPERAND (arg0, 1),
10077 TREE_OPERAND (arg1, 1))))
10079 code = BIT_IOR_EXPR;
10080 goto bit_ior;
10083 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10084 (plus (plus (mult) (mult)) (foo)) so that we can
10085 take advantage of the factoring cases below. */
10086 if (TYPE_OVERFLOW_WRAPS (type)
10087 && (((TREE_CODE (arg0) == PLUS_EXPR
10088 || TREE_CODE (arg0) == MINUS_EXPR)
10089 && TREE_CODE (arg1) == MULT_EXPR)
10090 || ((TREE_CODE (arg1) == PLUS_EXPR
10091 || TREE_CODE (arg1) == MINUS_EXPR)
10092 && TREE_CODE (arg0) == MULT_EXPR)))
10094 tree parg0, parg1, parg, marg;
10095 enum tree_code pcode;
10097 if (TREE_CODE (arg1) == MULT_EXPR)
10098 parg = arg0, marg = arg1;
10099 else
10100 parg = arg1, marg = arg0;
10101 pcode = TREE_CODE (parg);
10102 parg0 = TREE_OPERAND (parg, 0);
10103 parg1 = TREE_OPERAND (parg, 1);
10104 STRIP_NOPS (parg0);
10105 STRIP_NOPS (parg1);
10107 if (TREE_CODE (parg0) == MULT_EXPR
10108 && TREE_CODE (parg1) != MULT_EXPR)
10109 return fold_build2_loc (loc, pcode, type,
10110 fold_build2_loc (loc, PLUS_EXPR, type,
10111 fold_convert_loc (loc, type,
10112 parg0),
10113 fold_convert_loc (loc, type,
10114 marg)),
10115 fold_convert_loc (loc, type, parg1));
10116 if (TREE_CODE (parg0) != MULT_EXPR
10117 && TREE_CODE (parg1) == MULT_EXPR)
10118 return
10119 fold_build2_loc (loc, PLUS_EXPR, type,
10120 fold_convert_loc (loc, type, parg0),
10121 fold_build2_loc (loc, pcode, type,
10122 fold_convert_loc (loc, type, marg),
10123 fold_convert_loc (loc, type,
10124 parg1)));
10127 else
10129 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10130 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10131 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10133 /* Likewise if the operands are reversed. */
10134 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10135 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10137 /* Convert X + -C into X - C. */
10138 if (TREE_CODE (arg1) == REAL_CST
10139 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10141 tem = fold_negate_const (arg1, type);
10142 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10143 return fold_build2_loc (loc, MINUS_EXPR, type,
10144 fold_convert_loc (loc, type, arg0),
10145 fold_convert_loc (loc, type, tem));
10148 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10149 to __complex__ ( x, y ). This is not the same for SNaNs or
10150 if signed zeros are involved. */
10151 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10153 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10155 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10156 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10157 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10158 bool arg0rz = false, arg0iz = false;
10159 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10160 || (arg0i && (arg0iz = real_zerop (arg0i))))
10162 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10163 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10164 if (arg0rz && arg1i && real_zerop (arg1i))
10166 tree rp = arg1r ? arg1r
10167 : build1 (REALPART_EXPR, rtype, arg1);
10168 tree ip = arg0i ? arg0i
10169 : build1 (IMAGPART_EXPR, rtype, arg0);
10170 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10172 else if (arg0iz && arg1r && real_zerop (arg1r))
10174 tree rp = arg0r ? arg0r
10175 : build1 (REALPART_EXPR, rtype, arg0);
10176 tree ip = arg1i ? arg1i
10177 : build1 (IMAGPART_EXPR, rtype, arg1);
10178 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10183 if (flag_unsafe_math_optimizations
10184 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10185 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10186 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10187 return tem;
10189 /* Convert x+x into x*2.0. */
10190 if (operand_equal_p (arg0, arg1, 0)
10191 && SCALAR_FLOAT_TYPE_P (type))
10192 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10193 build_real (type, dconst2));
10195 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10196 We associate floats only if the user has specified
10197 -fassociative-math. */
10198 if (flag_associative_math
10199 && TREE_CODE (arg1) == PLUS_EXPR
10200 && TREE_CODE (arg0) != MULT_EXPR)
10202 tree tree10 = TREE_OPERAND (arg1, 0);
10203 tree tree11 = TREE_OPERAND (arg1, 1);
10204 if (TREE_CODE (tree11) == MULT_EXPR
10205 && TREE_CODE (tree10) == MULT_EXPR)
10207 tree tree0;
10208 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10209 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10212 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10213 We associate floats only if the user has specified
10214 -fassociative-math. */
10215 if (flag_associative_math
10216 && TREE_CODE (arg0) == PLUS_EXPR
10217 && TREE_CODE (arg1) != MULT_EXPR)
10219 tree tree00 = TREE_OPERAND (arg0, 0);
10220 tree tree01 = TREE_OPERAND (arg0, 1);
10221 if (TREE_CODE (tree01) == MULT_EXPR
10222 && TREE_CODE (tree00) == MULT_EXPR)
10224 tree tree0;
10225 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10226 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10231 bit_rotate:
10232 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10233 is a rotate of A by C1 bits. */
10234 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10235 is a rotate of A by B bits. */
10237 enum tree_code code0, code1;
10238 tree rtype;
10239 code0 = TREE_CODE (arg0);
10240 code1 = TREE_CODE (arg1);
10241 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10242 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10243 && operand_equal_p (TREE_OPERAND (arg0, 0),
10244 TREE_OPERAND (arg1, 0), 0)
10245 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10246 TYPE_UNSIGNED (rtype))
10247 /* Only create rotates in complete modes. Other cases are not
10248 expanded properly. */
10249 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10251 tree tree01, tree11;
10252 enum tree_code code01, code11;
10254 tree01 = TREE_OPERAND (arg0, 1);
10255 tree11 = TREE_OPERAND (arg1, 1);
10256 STRIP_NOPS (tree01);
10257 STRIP_NOPS (tree11);
10258 code01 = TREE_CODE (tree01);
10259 code11 = TREE_CODE (tree11);
10260 if (code01 == INTEGER_CST
10261 && code11 == INTEGER_CST
10262 && TREE_INT_CST_HIGH (tree01) == 0
10263 && TREE_INT_CST_HIGH (tree11) == 0
10264 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10265 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10267 tem = build2_loc (loc, LROTATE_EXPR,
10268 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10269 TREE_OPERAND (arg0, 0),
10270 code0 == LSHIFT_EXPR ? tree01 : tree11);
10271 return fold_convert_loc (loc, type, tem);
10273 else if (code11 == MINUS_EXPR)
10275 tree tree110, tree111;
10276 tree110 = TREE_OPERAND (tree11, 0);
10277 tree111 = TREE_OPERAND (tree11, 1);
10278 STRIP_NOPS (tree110);
10279 STRIP_NOPS (tree111);
10280 if (TREE_CODE (tree110) == INTEGER_CST
10281 && 0 == compare_tree_int (tree110,
10282 TYPE_PRECISION
10283 (TREE_TYPE (TREE_OPERAND
10284 (arg0, 0))))
10285 && operand_equal_p (tree01, tree111, 0))
10286 return
10287 fold_convert_loc (loc, type,
10288 build2 ((code0 == LSHIFT_EXPR
10289 ? LROTATE_EXPR
10290 : RROTATE_EXPR),
10291 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10292 TREE_OPERAND (arg0, 0), tree01));
10294 else if (code01 == MINUS_EXPR)
10296 tree tree010, tree011;
10297 tree010 = TREE_OPERAND (tree01, 0);
10298 tree011 = TREE_OPERAND (tree01, 1);
10299 STRIP_NOPS (tree010);
10300 STRIP_NOPS (tree011);
10301 if (TREE_CODE (tree010) == INTEGER_CST
10302 && 0 == compare_tree_int (tree010,
10303 TYPE_PRECISION
10304 (TREE_TYPE (TREE_OPERAND
10305 (arg0, 0))))
10306 && operand_equal_p (tree11, tree011, 0))
10307 return fold_convert_loc
10308 (loc, type,
10309 build2 ((code0 != LSHIFT_EXPR
10310 ? LROTATE_EXPR
10311 : RROTATE_EXPR),
10312 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10313 TREE_OPERAND (arg0, 0), tree11));
10318 associate:
10319 /* In most languages, can't associate operations on floats through
10320 parentheses. Rather than remember where the parentheses were, we
10321 don't associate floats at all, unless the user has specified
10322 -fassociative-math.
10323 And, we need to make sure type is not saturating. */
10325 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10326 && !TYPE_SATURATING (type))
10328 tree var0, con0, lit0, minus_lit0;
10329 tree var1, con1, lit1, minus_lit1;
10330 bool ok = true;
10332 /* Split both trees into variables, constants, and literals. Then
10333 associate each group together, the constants with literals,
10334 then the result with variables. This increases the chances of
10335 literals being recombined later and of generating relocatable
10336 expressions for the sum of a constant and literal. */
10337 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10338 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10339 code == MINUS_EXPR);
10341 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10342 if (code == MINUS_EXPR)
10343 code = PLUS_EXPR;
10345 /* With undefined overflow we can only associate constants with one
10346 variable, and constants whose association doesn't overflow. */
10347 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10348 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10350 if (var0 && var1)
10352 tree tmp0 = var0;
10353 tree tmp1 = var1;
10355 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10356 tmp0 = TREE_OPERAND (tmp0, 0);
10357 if (CONVERT_EXPR_P (tmp0)
10358 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10359 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10360 <= TYPE_PRECISION (type)))
10361 tmp0 = TREE_OPERAND (tmp0, 0);
10362 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10363 tmp1 = TREE_OPERAND (tmp1, 0);
10364 if (CONVERT_EXPR_P (tmp1)
10365 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10366 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10367 <= TYPE_PRECISION (type)))
10368 tmp1 = TREE_OPERAND (tmp1, 0);
10369 /* The only case we can still associate with two variables
10370 is if they are the same, modulo negation and bit-pattern
10371 preserving conversions. */
10372 if (!operand_equal_p (tmp0, tmp1, 0))
10373 ok = false;
10376 if (ok && lit0 && lit1)
10378 tree tmp0 = fold_convert (type, lit0);
10379 tree tmp1 = fold_convert (type, lit1);
10381 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10382 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10383 ok = false;
10387 /* Only do something if we found more than two objects. Otherwise,
10388 nothing has changed and we risk infinite recursion. */
10389 if (ok
10390 && (2 < ((var0 != 0) + (var1 != 0)
10391 + (con0 != 0) + (con1 != 0)
10392 + (lit0 != 0) + (lit1 != 0)
10393 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10395 var0 = associate_trees (loc, var0, var1, code, type);
10396 con0 = associate_trees (loc, con0, con1, code, type);
10397 lit0 = associate_trees (loc, lit0, lit1, code, type);
10398 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10400 /* Preserve the MINUS_EXPR if the negative part of the literal is
10401 greater than the positive part. Otherwise, the multiplicative
10402 folding code (i.e extract_muldiv) may be fooled in case
10403 unsigned constants are subtracted, like in the following
10404 example: ((X*2 + 4) - 8U)/2. */
10405 if (minus_lit0 && lit0)
10407 if (TREE_CODE (lit0) == INTEGER_CST
10408 && TREE_CODE (minus_lit0) == INTEGER_CST
10409 && tree_int_cst_lt (lit0, minus_lit0))
10411 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10412 MINUS_EXPR, type);
10413 lit0 = 0;
10415 else
10417 lit0 = associate_trees (loc, lit0, minus_lit0,
10418 MINUS_EXPR, type);
10419 minus_lit0 = 0;
10422 if (minus_lit0)
10424 if (con0 == 0)
10425 return
10426 fold_convert_loc (loc, type,
10427 associate_trees (loc, var0, minus_lit0,
10428 MINUS_EXPR, type));
10429 else
10431 con0 = associate_trees (loc, con0, minus_lit0,
10432 MINUS_EXPR, type);
10433 return
10434 fold_convert_loc (loc, type,
10435 associate_trees (loc, var0, con0,
10436 PLUS_EXPR, type));
10440 con0 = associate_trees (loc, con0, lit0, code, type);
10441 return
10442 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10443 code, type));
10447 return NULL_TREE;
10449 case MINUS_EXPR:
10450 /* Pointer simplifications for subtraction, simple reassociations. */
10451 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10453 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10454 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10455 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10457 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10458 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10459 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10460 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10461 return fold_build2_loc (loc, PLUS_EXPR, type,
10462 fold_build2_loc (loc, MINUS_EXPR, type,
10463 arg00, arg10),
10464 fold_build2_loc (loc, MINUS_EXPR, type,
10465 arg01, arg11));
10467 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10468 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10470 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10471 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10472 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10473 fold_convert_loc (loc, type, arg1));
10474 if (tmp)
10475 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10478 /* A - (-B) -> A + B */
10479 if (TREE_CODE (arg1) == NEGATE_EXPR)
10480 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10481 fold_convert_loc (loc, type,
10482 TREE_OPERAND (arg1, 0)));
10483 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10484 if (TREE_CODE (arg0) == NEGATE_EXPR
10485 && (FLOAT_TYPE_P (type)
10486 || INTEGRAL_TYPE_P (type))
10487 && negate_expr_p (arg1)
10488 && reorder_operands_p (arg0, arg1))
10489 return fold_build2_loc (loc, MINUS_EXPR, type,
10490 fold_convert_loc (loc, type,
10491 negate_expr (arg1)),
10492 fold_convert_loc (loc, type,
10493 TREE_OPERAND (arg0, 0)));
10494 /* Convert -A - 1 to ~A. */
10495 if (INTEGRAL_TYPE_P (type)
10496 && TREE_CODE (arg0) == NEGATE_EXPR
10497 && integer_onep (arg1)
10498 && !TYPE_OVERFLOW_TRAPS (type))
10499 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10500 fold_convert_loc (loc, type,
10501 TREE_OPERAND (arg0, 0)));
10503 /* Convert -1 - A to ~A. */
10504 if (INTEGRAL_TYPE_P (type)
10505 && integer_all_onesp (arg0))
10506 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10509 /* X - (X / CST) * CST is X % CST. */
10510 if (INTEGRAL_TYPE_P (type)
10511 && TREE_CODE (arg1) == MULT_EXPR
10512 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10513 && operand_equal_p (arg0,
10514 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10515 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10516 TREE_OPERAND (arg1, 1), 0))
10517 return
10518 fold_convert_loc (loc, type,
10519 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10520 arg0, TREE_OPERAND (arg1, 1)));
10522 if (! FLOAT_TYPE_P (type))
10524 if (integer_zerop (arg0))
10525 return negate_expr (fold_convert_loc (loc, type, arg1));
10526 if (integer_zerop (arg1))
10527 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10529 /* Fold A - (A & B) into ~B & A. */
10530 if (!TREE_SIDE_EFFECTS (arg0)
10531 && TREE_CODE (arg1) == BIT_AND_EXPR)
10533 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10535 tree arg10 = fold_convert_loc (loc, type,
10536 TREE_OPERAND (arg1, 0));
10537 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10538 fold_build1_loc (loc, BIT_NOT_EXPR,
10539 type, arg10),
10540 fold_convert_loc (loc, type, arg0));
10542 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10544 tree arg11 = fold_convert_loc (loc,
10545 type, TREE_OPERAND (arg1, 1));
10546 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10547 fold_build1_loc (loc, BIT_NOT_EXPR,
10548 type, arg11),
10549 fold_convert_loc (loc, type, arg0));
10553 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10554 any power of 2 minus 1. */
10555 if (TREE_CODE (arg0) == BIT_AND_EXPR
10556 && TREE_CODE (arg1) == BIT_AND_EXPR
10557 && operand_equal_p (TREE_OPERAND (arg0, 0),
10558 TREE_OPERAND (arg1, 0), 0))
10560 tree mask0 = TREE_OPERAND (arg0, 1);
10561 tree mask1 = TREE_OPERAND (arg1, 1);
10562 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10564 if (operand_equal_p (tem, mask1, 0))
10566 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10567 TREE_OPERAND (arg0, 0), mask1);
10568 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10573 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10574 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10575 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10577 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10578 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10579 (-ARG1 + ARG0) reduces to -ARG1. */
10580 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10581 return negate_expr (fold_convert_loc (loc, type, arg1));
10583 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10584 __complex__ ( x, -y ). This is not the same for SNaNs or if
10585 signed zeros are involved. */
10586 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10587 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10588 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10590 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10591 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10592 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10593 bool arg0rz = false, arg0iz = false;
10594 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10595 || (arg0i && (arg0iz = real_zerop (arg0i))))
10597 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10598 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10599 if (arg0rz && arg1i && real_zerop (arg1i))
10601 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10602 arg1r ? arg1r
10603 : build1 (REALPART_EXPR, rtype, arg1));
10604 tree ip = arg0i ? arg0i
10605 : build1 (IMAGPART_EXPR, rtype, arg0);
10606 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10608 else if (arg0iz && arg1r && real_zerop (arg1r))
10610 tree rp = arg0r ? arg0r
10611 : build1 (REALPART_EXPR, rtype, arg0);
10612 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10613 arg1i ? arg1i
10614 : build1 (IMAGPART_EXPR, rtype, arg1));
10615 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10620 /* Fold &x - &x. This can happen from &x.foo - &x.
10621 This is unsafe for certain floats even in non-IEEE formats.
10622 In IEEE, it is unsafe because it does wrong for NaNs.
10623 Also note that operand_equal_p is always false if an operand
10624 is volatile. */
10626 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10627 && operand_equal_p (arg0, arg1, 0))
10628 return build_zero_cst (type);
10630 /* A - B -> A + (-B) if B is easily negatable. */
10631 if (negate_expr_p (arg1)
10632 && ((FLOAT_TYPE_P (type)
10633 /* Avoid this transformation if B is a positive REAL_CST. */
10634 && (TREE_CODE (arg1) != REAL_CST
10635 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10636 || INTEGRAL_TYPE_P (type)))
10637 return fold_build2_loc (loc, PLUS_EXPR, type,
10638 fold_convert_loc (loc, type, arg0),
10639 fold_convert_loc (loc, type,
10640 negate_expr (arg1)));
10642 /* Try folding difference of addresses. */
10644 HOST_WIDE_INT diff;
10646 if ((TREE_CODE (arg0) == ADDR_EXPR
10647 || TREE_CODE (arg1) == ADDR_EXPR)
10648 && ptr_difference_const (arg0, arg1, &diff))
10649 return build_int_cst_type (type, diff);
10652 /* Fold &a[i] - &a[j] to i-j. */
10653 if (TREE_CODE (arg0) == ADDR_EXPR
10654 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10655 && TREE_CODE (arg1) == ADDR_EXPR
10656 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10658 tree tem = fold_addr_of_array_ref_difference (loc, type,
10659 TREE_OPERAND (arg0, 0),
10660 TREE_OPERAND (arg1, 0));
10661 if (tem)
10662 return tem;
10665 if (FLOAT_TYPE_P (type)
10666 && flag_unsafe_math_optimizations
10667 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10668 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10669 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10670 return tem;
10672 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10673 one. Make sure the type is not saturating and has the signedness of
10674 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10675 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10676 if ((TREE_CODE (arg0) == MULT_EXPR
10677 || TREE_CODE (arg1) == MULT_EXPR)
10678 && !TYPE_SATURATING (type)
10679 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10680 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10681 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10683 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10684 if (tem)
10685 return tem;
10688 goto associate;
10690 case MULT_EXPR:
10691 /* (-A) * (-B) -> A * B */
10692 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10693 return fold_build2_loc (loc, MULT_EXPR, type,
10694 fold_convert_loc (loc, type,
10695 TREE_OPERAND (arg0, 0)),
10696 fold_convert_loc (loc, type,
10697 negate_expr (arg1)));
10698 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10699 return fold_build2_loc (loc, MULT_EXPR, type,
10700 fold_convert_loc (loc, type,
10701 negate_expr (arg0)),
10702 fold_convert_loc (loc, type,
10703 TREE_OPERAND (arg1, 0)));
10705 if (! FLOAT_TYPE_P (type))
10707 if (integer_zerop (arg1))
10708 return omit_one_operand_loc (loc, type, arg1, arg0);
10709 if (integer_onep (arg1))
10710 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10711 /* Transform x * -1 into -x. Make sure to do the negation
10712 on the original operand with conversions not stripped
10713 because we can only strip non-sign-changing conversions. */
10714 if (integer_all_onesp (arg1))
10715 return fold_convert_loc (loc, type, negate_expr (op0));
10716 /* Transform x * -C into -x * C if x is easily negatable. */
10717 if (TREE_CODE (arg1) == INTEGER_CST
10718 && tree_int_cst_sgn (arg1) == -1
10719 && negate_expr_p (arg0)
10720 && (tem = negate_expr (arg1)) != arg1
10721 && !TREE_OVERFLOW (tem))
10722 return fold_build2_loc (loc, MULT_EXPR, type,
10723 fold_convert_loc (loc, type,
10724 negate_expr (arg0)),
10725 tem);
10727 /* (a * (1 << b)) is (a << b) */
10728 if (TREE_CODE (arg1) == LSHIFT_EXPR
10729 && integer_onep (TREE_OPERAND (arg1, 0)))
10730 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10731 TREE_OPERAND (arg1, 1));
10732 if (TREE_CODE (arg0) == LSHIFT_EXPR
10733 && integer_onep (TREE_OPERAND (arg0, 0)))
10734 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10735 TREE_OPERAND (arg0, 1));
10737 /* (A + A) * C -> A * 2 * C */
10738 if (TREE_CODE (arg0) == PLUS_EXPR
10739 && TREE_CODE (arg1) == INTEGER_CST
10740 && operand_equal_p (TREE_OPERAND (arg0, 0),
10741 TREE_OPERAND (arg0, 1), 0))
10742 return fold_build2_loc (loc, MULT_EXPR, type,
10743 omit_one_operand_loc (loc, type,
10744 TREE_OPERAND (arg0, 0),
10745 TREE_OPERAND (arg0, 1)),
10746 fold_build2_loc (loc, MULT_EXPR, type,
10747 build_int_cst (type, 2) , arg1));
10749 strict_overflow_p = false;
10750 if (TREE_CODE (arg1) == INTEGER_CST
10751 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10752 &strict_overflow_p)))
10754 if (strict_overflow_p)
10755 fold_overflow_warning (("assuming signed overflow does not "
10756 "occur when simplifying "
10757 "multiplication"),
10758 WARN_STRICT_OVERFLOW_MISC);
10759 return fold_convert_loc (loc, type, tem);
10762 /* Optimize z * conj(z) for integer complex numbers. */
10763 if (TREE_CODE (arg0) == CONJ_EXPR
10764 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10765 return fold_mult_zconjz (loc, type, arg1);
10766 if (TREE_CODE (arg1) == CONJ_EXPR
10767 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10768 return fold_mult_zconjz (loc, type, arg0);
10770 else
10772 /* Maybe fold x * 0 to 0. The expressions aren't the same
10773 when x is NaN, since x * 0 is also NaN. Nor are they the
10774 same in modes with signed zeros, since multiplying a
10775 negative value by 0 gives -0, not +0. */
10776 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10777 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10778 && real_zerop (arg1))
10779 return omit_one_operand_loc (loc, type, arg1, arg0);
10780 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10781 Likewise for complex arithmetic with signed zeros. */
10782 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10783 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10784 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10785 && real_onep (arg1))
10786 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10788 /* Transform x * -1.0 into -x. */
10789 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10790 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10791 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10792 && real_minus_onep (arg1))
10793 return fold_convert_loc (loc, type, negate_expr (arg0));
10795 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10796 the result for floating point types due to rounding so it is applied
10797 only if -fassociative-math was specify. */
10798 if (flag_associative_math
10799 && TREE_CODE (arg0) == RDIV_EXPR
10800 && TREE_CODE (arg1) == REAL_CST
10801 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10803 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10804 arg1);
10805 if (tem)
10806 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10807 TREE_OPERAND (arg0, 1));
10810 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10811 if (operand_equal_p (arg0, arg1, 0))
10813 tree tem = fold_strip_sign_ops (arg0);
10814 if (tem != NULL_TREE)
10816 tem = fold_convert_loc (loc, type, tem);
10817 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10821 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10822 This is not the same for NaNs or if signed zeros are
10823 involved. */
10824 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10825 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10826 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10827 && TREE_CODE (arg1) == COMPLEX_CST
10828 && real_zerop (TREE_REALPART (arg1)))
10830 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10831 if (real_onep (TREE_IMAGPART (arg1)))
10832 return
10833 fold_build2_loc (loc, COMPLEX_EXPR, type,
10834 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10835 rtype, arg0)),
10836 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10837 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10838 return
10839 fold_build2_loc (loc, COMPLEX_EXPR, type,
10840 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10841 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10842 rtype, arg0)));
10845 /* Optimize z * conj(z) for floating point complex numbers.
10846 Guarded by flag_unsafe_math_optimizations as non-finite
10847 imaginary components don't produce scalar results. */
10848 if (flag_unsafe_math_optimizations
10849 && TREE_CODE (arg0) == CONJ_EXPR
10850 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10851 return fold_mult_zconjz (loc, type, arg1);
10852 if (flag_unsafe_math_optimizations
10853 && TREE_CODE (arg1) == CONJ_EXPR
10854 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10855 return fold_mult_zconjz (loc, type, arg0);
10857 if (flag_unsafe_math_optimizations)
10859 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10860 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10862 /* Optimizations of root(...)*root(...). */
10863 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10865 tree rootfn, arg;
10866 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10867 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10869 /* Optimize sqrt(x)*sqrt(x) as x. */
10870 if (BUILTIN_SQRT_P (fcode0)
10871 && operand_equal_p (arg00, arg10, 0)
10872 && ! HONOR_SNANS (TYPE_MODE (type)))
10873 return arg00;
10875 /* Optimize root(x)*root(y) as root(x*y). */
10876 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10877 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10878 return build_call_expr_loc (loc, rootfn, 1, arg);
10881 /* Optimize expN(x)*expN(y) as expN(x+y). */
10882 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10884 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10885 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10886 CALL_EXPR_ARG (arg0, 0),
10887 CALL_EXPR_ARG (arg1, 0));
10888 return build_call_expr_loc (loc, expfn, 1, arg);
10891 /* Optimizations of pow(...)*pow(...). */
10892 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10893 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10894 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10896 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10897 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10898 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10899 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10901 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10902 if (operand_equal_p (arg01, arg11, 0))
10904 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10905 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10906 arg00, arg10);
10907 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10910 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10911 if (operand_equal_p (arg00, arg10, 0))
10913 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10914 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10915 arg01, arg11);
10916 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10920 /* Optimize tan(x)*cos(x) as sin(x). */
10921 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10922 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10923 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10924 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10925 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10926 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10927 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10928 CALL_EXPR_ARG (arg1, 0), 0))
10930 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10932 if (sinfn != NULL_TREE)
10933 return build_call_expr_loc (loc, sinfn, 1,
10934 CALL_EXPR_ARG (arg0, 0));
10937 /* Optimize x*pow(x,c) as pow(x,c+1). */
10938 if (fcode1 == BUILT_IN_POW
10939 || fcode1 == BUILT_IN_POWF
10940 || fcode1 == BUILT_IN_POWL)
10942 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10943 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10944 if (TREE_CODE (arg11) == REAL_CST
10945 && !TREE_OVERFLOW (arg11)
10946 && operand_equal_p (arg0, arg10, 0))
10948 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10949 REAL_VALUE_TYPE c;
10950 tree arg;
10952 c = TREE_REAL_CST (arg11);
10953 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10954 arg = build_real (type, c);
10955 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10959 /* Optimize pow(x,c)*x as pow(x,c+1). */
10960 if (fcode0 == BUILT_IN_POW
10961 || fcode0 == BUILT_IN_POWF
10962 || fcode0 == BUILT_IN_POWL)
10964 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10965 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10966 if (TREE_CODE (arg01) == REAL_CST
10967 && !TREE_OVERFLOW (arg01)
10968 && operand_equal_p (arg1, arg00, 0))
10970 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10971 REAL_VALUE_TYPE c;
10972 tree arg;
10974 c = TREE_REAL_CST (arg01);
10975 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10976 arg = build_real (type, c);
10977 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10981 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10982 if (!in_gimple_form
10983 && optimize
10984 && operand_equal_p (arg0, arg1, 0))
10986 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10988 if (powfn)
10990 tree arg = build_real (type, dconst2);
10991 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10996 goto associate;
10998 case BIT_IOR_EXPR:
10999 bit_ior:
11000 if (integer_all_onesp (arg1))
11001 return omit_one_operand_loc (loc, type, arg1, arg0);
11002 if (integer_zerop (arg1))
11003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11004 if (operand_equal_p (arg0, arg1, 0))
11005 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11007 /* ~X | X is -1. */
11008 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11009 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11011 t1 = build_zero_cst (type);
11012 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11013 return omit_one_operand_loc (loc, type, t1, arg1);
11016 /* X | ~X is -1. */
11017 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11018 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11020 t1 = build_zero_cst (type);
11021 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11022 return omit_one_operand_loc (loc, type, t1, arg0);
11025 /* Canonicalize (X & C1) | C2. */
11026 if (TREE_CODE (arg0) == BIT_AND_EXPR
11027 && TREE_CODE (arg1) == INTEGER_CST
11028 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11030 double_int c1, c2, c3, msk;
11031 int width = TYPE_PRECISION (type), w;
11032 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11033 c2 = tree_to_double_int (arg1);
11035 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11036 if ((c1 & c2) == c1)
11037 return omit_one_operand_loc (loc, type, arg1,
11038 TREE_OPERAND (arg0, 0));
11040 msk = double_int::mask (width);
11042 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11043 if (msk.and_not (c1 | c2).is_zero ())
11044 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11045 TREE_OPERAND (arg0, 0), arg1);
11047 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11048 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11049 mode which allows further optimizations. */
11050 c1 &= msk;
11051 c2 &= msk;
11052 c3 = c1.and_not (c2);
11053 for (w = BITS_PER_UNIT;
11054 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11055 w <<= 1)
11057 unsigned HOST_WIDE_INT mask
11058 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11059 if (((c1.low | c2.low) & mask) == mask
11060 && (c1.low & ~mask) == 0 && c1.high == 0)
11062 c3 = double_int::from_uhwi (mask);
11063 break;
11066 if (c3 != c1)
11067 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11068 fold_build2_loc (loc, BIT_AND_EXPR, type,
11069 TREE_OPERAND (arg0, 0),
11070 double_int_to_tree (type,
11071 c3)),
11072 arg1);
11075 /* (X & Y) | Y is (X, Y). */
11076 if (TREE_CODE (arg0) == BIT_AND_EXPR
11077 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11078 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11079 /* (X & Y) | X is (Y, X). */
11080 if (TREE_CODE (arg0) == BIT_AND_EXPR
11081 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11082 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11083 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11084 /* X | (X & Y) is (Y, X). */
11085 if (TREE_CODE (arg1) == BIT_AND_EXPR
11086 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11087 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11088 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11089 /* X | (Y & X) is (Y, X). */
11090 if (TREE_CODE (arg1) == BIT_AND_EXPR
11091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11092 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11093 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11095 /* (X & ~Y) | (~X & Y) is X ^ Y */
11096 if (TREE_CODE (arg0) == BIT_AND_EXPR
11097 && TREE_CODE (arg1) == BIT_AND_EXPR)
11099 tree a0, a1, l0, l1, n0, n1;
11101 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11102 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11104 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11105 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11107 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11108 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11110 if ((operand_equal_p (n0, a0, 0)
11111 && operand_equal_p (n1, a1, 0))
11112 || (operand_equal_p (n0, a1, 0)
11113 && operand_equal_p (n1, a0, 0)))
11114 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11117 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11118 if (t1 != NULL_TREE)
11119 return t1;
11121 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11123 This results in more efficient code for machines without a NAND
11124 instruction. Combine will canonicalize to the first form
11125 which will allow use of NAND instructions provided by the
11126 backend if they exist. */
11127 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11128 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11130 return
11131 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11132 build2 (BIT_AND_EXPR, type,
11133 fold_convert_loc (loc, type,
11134 TREE_OPERAND (arg0, 0)),
11135 fold_convert_loc (loc, type,
11136 TREE_OPERAND (arg1, 0))));
11139 /* See if this can be simplified into a rotate first. If that
11140 is unsuccessful continue in the association code. */
11141 goto bit_rotate;
11143 case BIT_XOR_EXPR:
11144 if (integer_zerop (arg1))
11145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11146 if (integer_all_onesp (arg1))
11147 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11148 if (operand_equal_p (arg0, arg1, 0))
11149 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11151 /* ~X ^ X is -1. */
11152 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11155 t1 = build_zero_cst (type);
11156 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11157 return omit_one_operand_loc (loc, type, t1, arg1);
11160 /* X ^ ~X is -1. */
11161 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11162 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11164 t1 = build_zero_cst (type);
11165 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11166 return omit_one_operand_loc (loc, type, t1, arg0);
11169 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11170 with a constant, and the two constants have no bits in common,
11171 we should treat this as a BIT_IOR_EXPR since this may produce more
11172 simplifications. */
11173 if (TREE_CODE (arg0) == BIT_AND_EXPR
11174 && TREE_CODE (arg1) == BIT_AND_EXPR
11175 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11176 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11177 && integer_zerop (const_binop (BIT_AND_EXPR,
11178 TREE_OPERAND (arg0, 1),
11179 TREE_OPERAND (arg1, 1))))
11181 code = BIT_IOR_EXPR;
11182 goto bit_ior;
11185 /* (X | Y) ^ X -> Y & ~ X*/
11186 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11187 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11189 tree t2 = TREE_OPERAND (arg0, 1);
11190 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11191 arg1);
11192 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11193 fold_convert_loc (loc, type, t2),
11194 fold_convert_loc (loc, type, t1));
11195 return t1;
11198 /* (Y | X) ^ X -> Y & ~ X*/
11199 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11200 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11202 tree t2 = TREE_OPERAND (arg0, 0);
11203 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11204 arg1);
11205 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11206 fold_convert_loc (loc, type, t2),
11207 fold_convert_loc (loc, type, t1));
11208 return t1;
11211 /* X ^ (X | Y) -> Y & ~ X*/
11212 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11213 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11215 tree t2 = TREE_OPERAND (arg1, 1);
11216 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11217 arg0);
11218 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11219 fold_convert_loc (loc, type, t2),
11220 fold_convert_loc (loc, type, t1));
11221 return t1;
11224 /* X ^ (Y | X) -> Y & ~ X*/
11225 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11226 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11228 tree t2 = TREE_OPERAND (arg1, 0);
11229 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11230 arg0);
11231 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11232 fold_convert_loc (loc, type, t2),
11233 fold_convert_loc (loc, type, t1));
11234 return t1;
11237 /* Convert ~X ^ ~Y to X ^ Y. */
11238 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11239 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11240 return fold_build2_loc (loc, code, type,
11241 fold_convert_loc (loc, type,
11242 TREE_OPERAND (arg0, 0)),
11243 fold_convert_loc (loc, type,
11244 TREE_OPERAND (arg1, 0)));
11246 /* Convert ~X ^ C to X ^ ~C. */
11247 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11248 && TREE_CODE (arg1) == INTEGER_CST)
11249 return fold_build2_loc (loc, code, type,
11250 fold_convert_loc (loc, type,
11251 TREE_OPERAND (arg0, 0)),
11252 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11254 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11255 if (TREE_CODE (arg0) == BIT_AND_EXPR
11256 && integer_onep (TREE_OPERAND (arg0, 1))
11257 && integer_onep (arg1))
11258 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11259 build_zero_cst (TREE_TYPE (arg0)));
11261 /* Fold (X & Y) ^ Y as ~X & Y. */
11262 if (TREE_CODE (arg0) == BIT_AND_EXPR
11263 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11265 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11266 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11267 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11268 fold_convert_loc (loc, type, arg1));
11270 /* Fold (X & Y) ^ X as ~Y & X. */
11271 if (TREE_CODE (arg0) == BIT_AND_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11273 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11275 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11276 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11277 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11278 fold_convert_loc (loc, type, arg1));
11280 /* Fold X ^ (X & Y) as X & ~Y. */
11281 if (TREE_CODE (arg1) == BIT_AND_EXPR
11282 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11284 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11285 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11286 fold_convert_loc (loc, type, arg0),
11287 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11289 /* Fold X ^ (Y & X) as ~Y & X. */
11290 if (TREE_CODE (arg1) == BIT_AND_EXPR
11291 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11292 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11294 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11295 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11296 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11297 fold_convert_loc (loc, type, arg0));
11300 /* See if this can be simplified into a rotate first. If that
11301 is unsuccessful continue in the association code. */
11302 goto bit_rotate;
11304 case BIT_AND_EXPR:
11305 if (integer_all_onesp (arg1))
11306 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11307 if (integer_zerop (arg1))
11308 return omit_one_operand_loc (loc, type, arg1, arg0);
11309 if (operand_equal_p (arg0, arg1, 0))
11310 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11312 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11313 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11314 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11315 || (TREE_CODE (arg0) == EQ_EXPR
11316 && integer_zerop (TREE_OPERAND (arg0, 1))))
11317 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11318 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11320 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11321 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11322 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11323 || (TREE_CODE (arg1) == EQ_EXPR
11324 && integer_zerop (TREE_OPERAND (arg1, 1))))
11325 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11326 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11328 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11329 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11330 && TREE_CODE (arg1) == INTEGER_CST
11331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11333 tree tmp1 = fold_convert_loc (loc, type, arg1);
11334 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11335 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11336 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11337 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11338 return
11339 fold_convert_loc (loc, type,
11340 fold_build2_loc (loc, BIT_IOR_EXPR,
11341 type, tmp2, tmp3));
11344 /* (X | Y) & Y is (X, Y). */
11345 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11346 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11347 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11348 /* (X | Y) & X is (Y, X). */
11349 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11350 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11351 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11352 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11353 /* X & (X | Y) is (Y, X). */
11354 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11355 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11356 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11357 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11358 /* X & (Y | X) is (Y, X). */
11359 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11360 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11361 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11362 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11364 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11365 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11366 && integer_onep (TREE_OPERAND (arg0, 1))
11367 && integer_onep (arg1))
11369 tree tem2;
11370 tem = TREE_OPERAND (arg0, 0);
11371 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11372 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11373 tem, tem2);
11374 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11375 build_zero_cst (TREE_TYPE (tem)));
11377 /* Fold ~X & 1 as (X & 1) == 0. */
11378 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11379 && integer_onep (arg1))
11381 tree tem2;
11382 tem = TREE_OPERAND (arg0, 0);
11383 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11384 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11385 tem, tem2);
11386 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11387 build_zero_cst (TREE_TYPE (tem)));
11389 /* Fold !X & 1 as X == 0. */
11390 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11391 && integer_onep (arg1))
11393 tem = TREE_OPERAND (arg0, 0);
11394 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11395 build_zero_cst (TREE_TYPE (tem)));
11398 /* Fold (X ^ Y) & Y as ~X & Y. */
11399 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11400 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11402 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11403 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11404 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11405 fold_convert_loc (loc, type, arg1));
11407 /* Fold (X ^ Y) & X as ~Y & X. */
11408 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11409 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11410 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11412 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11413 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11414 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11415 fold_convert_loc (loc, type, arg1));
11417 /* Fold X & (X ^ Y) as X & ~Y. */
11418 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11419 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11421 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11422 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11423 fold_convert_loc (loc, type, arg0),
11424 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11426 /* Fold X & (Y ^ X) as ~Y & X. */
11427 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11428 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11429 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11431 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11432 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11433 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11434 fold_convert_loc (loc, type, arg0));
11437 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11438 multiple of 1 << CST. */
11439 if (TREE_CODE (arg1) == INTEGER_CST)
11441 double_int cst1 = tree_to_double_int (arg1);
11442 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11443 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11444 if ((cst1 & ncst1) == ncst1
11445 && multiple_of_p (type, arg0,
11446 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11447 return fold_convert_loc (loc, type, arg0);
11450 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11451 bits from CST2. */
11452 if (TREE_CODE (arg1) == INTEGER_CST
11453 && TREE_CODE (arg0) == MULT_EXPR
11454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11456 int arg1tz
11457 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11458 if (arg1tz > 0)
11460 double_int arg1mask, masked;
11461 arg1mask = ~double_int::mask (arg1tz);
11462 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11463 TYPE_UNSIGNED (type));
11464 masked = arg1mask & tree_to_double_int (arg1);
11465 if (masked.is_zero ())
11466 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11467 arg0, arg1);
11468 else if (masked != tree_to_double_int (arg1))
11469 return fold_build2_loc (loc, code, type, op0,
11470 double_int_to_tree (type, masked));
11474 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11475 ((A & N) + B) & M -> (A + B) & M
11476 Similarly if (N & M) == 0,
11477 ((A | N) + B) & M -> (A + B) & M
11478 and for - instead of + (or unary - instead of +)
11479 and/or ^ instead of |.
11480 If B is constant and (B & M) == 0, fold into A & M. */
11481 if (host_integerp (arg1, 1))
11483 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11484 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11485 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11486 && (TREE_CODE (arg0) == PLUS_EXPR
11487 || TREE_CODE (arg0) == MINUS_EXPR
11488 || TREE_CODE (arg0) == NEGATE_EXPR)
11489 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11490 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11492 tree pmop[2];
11493 int which = 0;
11494 unsigned HOST_WIDE_INT cst0;
11496 /* Now we know that arg0 is (C + D) or (C - D) or
11497 -C and arg1 (M) is == (1LL << cst) - 1.
11498 Store C into PMOP[0] and D into PMOP[1]. */
11499 pmop[0] = TREE_OPERAND (arg0, 0);
11500 pmop[1] = NULL;
11501 if (TREE_CODE (arg0) != NEGATE_EXPR)
11503 pmop[1] = TREE_OPERAND (arg0, 1);
11504 which = 1;
11507 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11508 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11509 & cst1) != cst1)
11510 which = -1;
11512 for (; which >= 0; which--)
11513 switch (TREE_CODE (pmop[which]))
11515 case BIT_AND_EXPR:
11516 case BIT_IOR_EXPR:
11517 case BIT_XOR_EXPR:
11518 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11519 != INTEGER_CST)
11520 break;
11521 /* tree_low_cst not used, because we don't care about
11522 the upper bits. */
11523 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11524 cst0 &= cst1;
11525 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11527 if (cst0 != cst1)
11528 break;
11530 else if (cst0 != 0)
11531 break;
11532 /* If C or D is of the form (A & N) where
11533 (N & M) == M, or of the form (A | N) or
11534 (A ^ N) where (N & M) == 0, replace it with A. */
11535 pmop[which] = TREE_OPERAND (pmop[which], 0);
11536 break;
11537 case INTEGER_CST:
11538 /* If C or D is a N where (N & M) == 0, it can be
11539 omitted (assumed 0). */
11540 if ((TREE_CODE (arg0) == PLUS_EXPR
11541 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11542 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11543 pmop[which] = NULL;
11544 break;
11545 default:
11546 break;
11549 /* Only build anything new if we optimized one or both arguments
11550 above. */
11551 if (pmop[0] != TREE_OPERAND (arg0, 0)
11552 || (TREE_CODE (arg0) != NEGATE_EXPR
11553 && pmop[1] != TREE_OPERAND (arg0, 1)))
11555 tree utype = TREE_TYPE (arg0);
11556 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11558 /* Perform the operations in a type that has defined
11559 overflow behavior. */
11560 utype = unsigned_type_for (TREE_TYPE (arg0));
11561 if (pmop[0] != NULL)
11562 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11563 if (pmop[1] != NULL)
11564 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11567 if (TREE_CODE (arg0) == NEGATE_EXPR)
11568 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11569 else if (TREE_CODE (arg0) == PLUS_EXPR)
11571 if (pmop[0] != NULL && pmop[1] != NULL)
11572 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11573 pmop[0], pmop[1]);
11574 else if (pmop[0] != NULL)
11575 tem = pmop[0];
11576 else if (pmop[1] != NULL)
11577 tem = pmop[1];
11578 else
11579 return build_int_cst (type, 0);
11581 else if (pmop[0] == NULL)
11582 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11583 else
11584 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11585 pmop[0], pmop[1]);
11586 /* TEM is now the new binary +, - or unary - replacement. */
11587 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11588 fold_convert_loc (loc, utype, arg1));
11589 return fold_convert_loc (loc, type, tem);
11594 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11595 if (t1 != NULL_TREE)
11596 return t1;
11597 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11598 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11599 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11601 unsigned int prec
11602 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11604 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11605 && (~TREE_INT_CST_LOW (arg1)
11606 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11607 return
11608 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11611 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11613 This results in more efficient code for machines without a NOR
11614 instruction. Combine will canonicalize to the first form
11615 which will allow use of NOR instructions provided by the
11616 backend if they exist. */
11617 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11618 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11620 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11621 build2 (BIT_IOR_EXPR, type,
11622 fold_convert_loc (loc, type,
11623 TREE_OPERAND (arg0, 0)),
11624 fold_convert_loc (loc, type,
11625 TREE_OPERAND (arg1, 0))));
11628 /* If arg0 is derived from the address of an object or function, we may
11629 be able to fold this expression using the object or function's
11630 alignment. */
11631 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11633 unsigned HOST_WIDE_INT modulus, residue;
11634 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11636 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11637 integer_onep (arg1));
11639 /* This works because modulus is a power of 2. If this weren't the
11640 case, we'd have to replace it by its greatest power-of-2
11641 divisor: modulus & -modulus. */
11642 if (low < modulus)
11643 return build_int_cst (type, residue & low);
11646 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11647 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11648 if the new mask might be further optimized. */
11649 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11650 || TREE_CODE (arg0) == RSHIFT_EXPR)
11651 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11652 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11653 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11654 < TYPE_PRECISION (TREE_TYPE (arg0))
11655 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11656 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11658 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11659 unsigned HOST_WIDE_INT mask
11660 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11661 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11662 tree shift_type = TREE_TYPE (arg0);
11664 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11665 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11666 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11667 && TYPE_PRECISION (TREE_TYPE (arg0))
11668 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11670 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11671 tree arg00 = TREE_OPERAND (arg0, 0);
11672 /* See if more bits can be proven as zero because of
11673 zero extension. */
11674 if (TREE_CODE (arg00) == NOP_EXPR
11675 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11677 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11678 if (TYPE_PRECISION (inner_type)
11679 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11680 && TYPE_PRECISION (inner_type) < prec)
11682 prec = TYPE_PRECISION (inner_type);
11683 /* See if we can shorten the right shift. */
11684 if (shiftc < prec)
11685 shift_type = inner_type;
11688 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11689 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11690 zerobits <<= prec - shiftc;
11691 /* For arithmetic shift if sign bit could be set, zerobits
11692 can contain actually sign bits, so no transformation is
11693 possible, unless MASK masks them all away. In that
11694 case the shift needs to be converted into logical shift. */
11695 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11696 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11698 if ((mask & zerobits) == 0)
11699 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11700 else
11701 zerobits = 0;
11705 /* ((X << 16) & 0xff00) is (X, 0). */
11706 if ((mask & zerobits) == mask)
11707 return omit_one_operand_loc (loc, type,
11708 build_int_cst (type, 0), arg0);
11710 newmask = mask | zerobits;
11711 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11713 unsigned int prec;
11715 /* Only do the transformation if NEWMASK is some integer
11716 mode's mask. */
11717 for (prec = BITS_PER_UNIT;
11718 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11719 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11720 break;
11721 if (prec < HOST_BITS_PER_WIDE_INT
11722 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11724 tree newmaskt;
11726 if (shift_type != TREE_TYPE (arg0))
11728 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11729 fold_convert_loc (loc, shift_type,
11730 TREE_OPERAND (arg0, 0)),
11731 TREE_OPERAND (arg0, 1));
11732 tem = fold_convert_loc (loc, type, tem);
11734 else
11735 tem = op0;
11736 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11737 if (!tree_int_cst_equal (newmaskt, arg1))
11738 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11743 goto associate;
11745 case RDIV_EXPR:
11746 /* Don't touch a floating-point divide by zero unless the mode
11747 of the constant can represent infinity. */
11748 if (TREE_CODE (arg1) == REAL_CST
11749 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11750 && real_zerop (arg1))
11751 return NULL_TREE;
11753 /* Optimize A / A to 1.0 if we don't care about
11754 NaNs or Infinities. Skip the transformation
11755 for non-real operands. */
11756 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11757 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11758 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11759 && operand_equal_p (arg0, arg1, 0))
11761 tree r = build_real (TREE_TYPE (arg0), dconst1);
11763 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11766 /* The complex version of the above A / A optimization. */
11767 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11768 && operand_equal_p (arg0, arg1, 0))
11770 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11771 if (! HONOR_NANS (TYPE_MODE (elem_type))
11772 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11774 tree r = build_real (elem_type, dconst1);
11775 /* omit_two_operands will call fold_convert for us. */
11776 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11780 /* (-A) / (-B) -> A / B */
11781 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11782 return fold_build2_loc (loc, RDIV_EXPR, type,
11783 TREE_OPERAND (arg0, 0),
11784 negate_expr (arg1));
11785 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11786 return fold_build2_loc (loc, RDIV_EXPR, type,
11787 negate_expr (arg0),
11788 TREE_OPERAND (arg1, 0));
11790 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11791 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11792 && real_onep (arg1))
11793 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11795 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11796 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11797 && real_minus_onep (arg1))
11798 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11799 negate_expr (arg0)));
11801 /* If ARG1 is a constant, we can convert this to a multiply by the
11802 reciprocal. This does not have the same rounding properties,
11803 so only do this if -freciprocal-math. We can actually
11804 always safely do it if ARG1 is a power of two, but it's hard to
11805 tell if it is or not in a portable manner. */
11806 if (optimize
11807 && (TREE_CODE (arg1) == REAL_CST
11808 || (TREE_CODE (arg1) == COMPLEX_CST
11809 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11810 || (TREE_CODE (arg1) == VECTOR_CST
11811 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11813 if (flag_reciprocal_math
11814 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11815 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11816 /* Find the reciprocal if optimizing and the result is exact.
11817 TODO: Complex reciprocal not implemented. */
11818 if (TREE_CODE (arg1) != COMPLEX_CST)
11820 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11822 if (inverse)
11823 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11826 /* Convert A/B/C to A/(B*C). */
11827 if (flag_reciprocal_math
11828 && TREE_CODE (arg0) == RDIV_EXPR)
11829 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11830 fold_build2_loc (loc, MULT_EXPR, type,
11831 TREE_OPERAND (arg0, 1), arg1));
11833 /* Convert A/(B/C) to (A/B)*C. */
11834 if (flag_reciprocal_math
11835 && TREE_CODE (arg1) == RDIV_EXPR)
11836 return fold_build2_loc (loc, MULT_EXPR, type,
11837 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11838 TREE_OPERAND (arg1, 0)),
11839 TREE_OPERAND (arg1, 1));
11841 /* Convert C1/(X*C2) into (C1/C2)/X. */
11842 if (flag_reciprocal_math
11843 && TREE_CODE (arg1) == MULT_EXPR
11844 && TREE_CODE (arg0) == REAL_CST
11845 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11847 tree tem = const_binop (RDIV_EXPR, arg0,
11848 TREE_OPERAND (arg1, 1));
11849 if (tem)
11850 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11851 TREE_OPERAND (arg1, 0));
11854 if (flag_unsafe_math_optimizations)
11856 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11857 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11859 /* Optimize sin(x)/cos(x) as tan(x). */
11860 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11861 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11862 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11863 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11864 CALL_EXPR_ARG (arg1, 0), 0))
11866 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11868 if (tanfn != NULL_TREE)
11869 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11872 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11873 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11874 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11875 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11876 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11877 CALL_EXPR_ARG (arg1, 0), 0))
11879 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11881 if (tanfn != NULL_TREE)
11883 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11884 CALL_EXPR_ARG (arg0, 0));
11885 return fold_build2_loc (loc, RDIV_EXPR, type,
11886 build_real (type, dconst1), tmp);
11890 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11891 NaNs or Infinities. */
11892 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11893 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11894 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11896 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11897 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11899 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11900 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11901 && operand_equal_p (arg00, arg01, 0))
11903 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11905 if (cosfn != NULL_TREE)
11906 return build_call_expr_loc (loc, cosfn, 1, arg00);
11910 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11911 NaNs or Infinities. */
11912 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11913 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11914 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11916 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11917 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11919 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11920 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11921 && operand_equal_p (arg00, arg01, 0))
11923 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11925 if (cosfn != NULL_TREE)
11927 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11928 return fold_build2_loc (loc, RDIV_EXPR, type,
11929 build_real (type, dconst1),
11930 tmp);
11935 /* Optimize pow(x,c)/x as pow(x,c-1). */
11936 if (fcode0 == BUILT_IN_POW
11937 || fcode0 == BUILT_IN_POWF
11938 || fcode0 == BUILT_IN_POWL)
11940 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11941 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11942 if (TREE_CODE (arg01) == REAL_CST
11943 && !TREE_OVERFLOW (arg01)
11944 && operand_equal_p (arg1, arg00, 0))
11946 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11947 REAL_VALUE_TYPE c;
11948 tree arg;
11950 c = TREE_REAL_CST (arg01);
11951 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11952 arg = build_real (type, c);
11953 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11957 /* Optimize a/root(b/c) into a*root(c/b). */
11958 if (BUILTIN_ROOT_P (fcode1))
11960 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11962 if (TREE_CODE (rootarg) == RDIV_EXPR)
11964 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11965 tree b = TREE_OPERAND (rootarg, 0);
11966 tree c = TREE_OPERAND (rootarg, 1);
11968 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11970 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11971 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11975 /* Optimize x/expN(y) into x*expN(-y). */
11976 if (BUILTIN_EXPONENT_P (fcode1))
11978 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11979 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11980 arg1 = build_call_expr_loc (loc,
11981 expfn, 1,
11982 fold_convert_loc (loc, type, arg));
11983 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11986 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11987 if (fcode1 == BUILT_IN_POW
11988 || fcode1 == BUILT_IN_POWF
11989 || fcode1 == BUILT_IN_POWL)
11991 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11992 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11993 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11994 tree neg11 = fold_convert_loc (loc, type,
11995 negate_expr (arg11));
11996 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11997 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12000 return NULL_TREE;
12002 case TRUNC_DIV_EXPR:
12003 /* Optimize (X & (-A)) / A where A is a power of 2,
12004 to X >> log2(A) */
12005 if (TREE_CODE (arg0) == BIT_AND_EXPR
12006 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12007 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12009 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12010 arg1, TREE_OPERAND (arg0, 1));
12011 if (sum && integer_zerop (sum)) {
12012 unsigned long pow2;
12014 if (TREE_INT_CST_LOW (arg1))
12015 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12016 else
12017 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12018 + HOST_BITS_PER_WIDE_INT;
12020 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12021 TREE_OPERAND (arg0, 0),
12022 build_int_cst (integer_type_node, pow2));
12026 /* Fall through */
12028 case FLOOR_DIV_EXPR:
12029 /* Simplify A / (B << N) where A and B are positive and B is
12030 a power of 2, to A >> (N + log2(B)). */
12031 strict_overflow_p = false;
12032 if (TREE_CODE (arg1) == LSHIFT_EXPR
12033 && (TYPE_UNSIGNED (type)
12034 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12036 tree sval = TREE_OPERAND (arg1, 0);
12037 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12039 tree sh_cnt = TREE_OPERAND (arg1, 1);
12040 unsigned long pow2;
12042 if (TREE_INT_CST_LOW (sval))
12043 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12044 else
12045 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12046 + HOST_BITS_PER_WIDE_INT;
12048 if (strict_overflow_p)
12049 fold_overflow_warning (("assuming signed overflow does not "
12050 "occur when simplifying A / (B << N)"),
12051 WARN_STRICT_OVERFLOW_MISC);
12053 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12054 sh_cnt,
12055 build_int_cst (TREE_TYPE (sh_cnt),
12056 pow2));
12057 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12058 fold_convert_loc (loc, type, arg0), sh_cnt);
12062 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12063 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12064 if (INTEGRAL_TYPE_P (type)
12065 && TYPE_UNSIGNED (type)
12066 && code == FLOOR_DIV_EXPR)
12067 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12069 /* Fall through */
12071 case ROUND_DIV_EXPR:
12072 case CEIL_DIV_EXPR:
12073 case EXACT_DIV_EXPR:
12074 if (integer_onep (arg1))
12075 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12076 if (integer_zerop (arg1))
12077 return NULL_TREE;
12078 /* X / -1 is -X. */
12079 if (!TYPE_UNSIGNED (type)
12080 && TREE_CODE (arg1) == INTEGER_CST
12081 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12082 && TREE_INT_CST_HIGH (arg1) == -1)
12083 return fold_convert_loc (loc, type, negate_expr (arg0));
12085 /* Convert -A / -B to A / B when the type is signed and overflow is
12086 undefined. */
12087 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12088 && TREE_CODE (arg0) == NEGATE_EXPR
12089 && negate_expr_p (arg1))
12091 if (INTEGRAL_TYPE_P (type))
12092 fold_overflow_warning (("assuming signed overflow does not occur "
12093 "when distributing negation across "
12094 "division"),
12095 WARN_STRICT_OVERFLOW_MISC);
12096 return fold_build2_loc (loc, code, type,
12097 fold_convert_loc (loc, type,
12098 TREE_OPERAND (arg0, 0)),
12099 fold_convert_loc (loc, type,
12100 negate_expr (arg1)));
12102 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12103 && TREE_CODE (arg1) == NEGATE_EXPR
12104 && negate_expr_p (arg0))
12106 if (INTEGRAL_TYPE_P (type))
12107 fold_overflow_warning (("assuming signed overflow does not occur "
12108 "when distributing negation across "
12109 "division"),
12110 WARN_STRICT_OVERFLOW_MISC);
12111 return fold_build2_loc (loc, code, type,
12112 fold_convert_loc (loc, type,
12113 negate_expr (arg0)),
12114 fold_convert_loc (loc, type,
12115 TREE_OPERAND (arg1, 0)));
12118 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12119 operation, EXACT_DIV_EXPR.
12121 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12122 At one time others generated faster code, it's not clear if they do
12123 after the last round to changes to the DIV code in expmed.c. */
12124 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12125 && multiple_of_p (type, arg0, arg1))
12126 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12128 strict_overflow_p = false;
12129 if (TREE_CODE (arg1) == INTEGER_CST
12130 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12131 &strict_overflow_p)))
12133 if (strict_overflow_p)
12134 fold_overflow_warning (("assuming signed overflow does not occur "
12135 "when simplifying division"),
12136 WARN_STRICT_OVERFLOW_MISC);
12137 return fold_convert_loc (loc, type, tem);
12140 return NULL_TREE;
12142 case CEIL_MOD_EXPR:
12143 case FLOOR_MOD_EXPR:
12144 case ROUND_MOD_EXPR:
12145 case TRUNC_MOD_EXPR:
12146 /* X % 1 is always zero, but be sure to preserve any side
12147 effects in X. */
12148 if (integer_onep (arg1))
12149 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12151 /* X % 0, return X % 0 unchanged so that we can get the
12152 proper warnings and errors. */
12153 if (integer_zerop (arg1))
12154 return NULL_TREE;
12156 /* 0 % X is always zero, but be sure to preserve any side
12157 effects in X. Place this after checking for X == 0. */
12158 if (integer_zerop (arg0))
12159 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12161 /* X % -1 is zero. */
12162 if (!TYPE_UNSIGNED (type)
12163 && TREE_CODE (arg1) == INTEGER_CST
12164 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12165 && TREE_INT_CST_HIGH (arg1) == -1)
12166 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12168 /* X % -C is the same as X % C. */
12169 if (code == TRUNC_MOD_EXPR
12170 && !TYPE_UNSIGNED (type)
12171 && TREE_CODE (arg1) == INTEGER_CST
12172 && !TREE_OVERFLOW (arg1)
12173 && TREE_INT_CST_HIGH (arg1) < 0
12174 && !TYPE_OVERFLOW_TRAPS (type)
12175 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12176 && !sign_bit_p (arg1, arg1))
12177 return fold_build2_loc (loc, code, type,
12178 fold_convert_loc (loc, type, arg0),
12179 fold_convert_loc (loc, type,
12180 negate_expr (arg1)));
12182 /* X % -Y is the same as X % Y. */
12183 if (code == TRUNC_MOD_EXPR
12184 && !TYPE_UNSIGNED (type)
12185 && TREE_CODE (arg1) == NEGATE_EXPR
12186 && !TYPE_OVERFLOW_TRAPS (type))
12187 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12188 fold_convert_loc (loc, type,
12189 TREE_OPERAND (arg1, 0)));
12191 strict_overflow_p = false;
12192 if (TREE_CODE (arg1) == INTEGER_CST
12193 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12194 &strict_overflow_p)))
12196 if (strict_overflow_p)
12197 fold_overflow_warning (("assuming signed overflow does not occur "
12198 "when simplifying modulus"),
12199 WARN_STRICT_OVERFLOW_MISC);
12200 return fold_convert_loc (loc, type, tem);
12203 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12204 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12205 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12206 && (TYPE_UNSIGNED (type)
12207 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12209 tree c = arg1;
12210 /* Also optimize A % (C << N) where C is a power of 2,
12211 to A & ((C << N) - 1). */
12212 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12213 c = TREE_OPERAND (arg1, 0);
12215 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12217 tree mask
12218 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12219 build_int_cst (TREE_TYPE (arg1), 1));
12220 if (strict_overflow_p)
12221 fold_overflow_warning (("assuming signed overflow does not "
12222 "occur when simplifying "
12223 "X % (power of two)"),
12224 WARN_STRICT_OVERFLOW_MISC);
12225 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12226 fold_convert_loc (loc, type, arg0),
12227 fold_convert_loc (loc, type, mask));
12231 return NULL_TREE;
12233 case LROTATE_EXPR:
12234 case RROTATE_EXPR:
12235 if (integer_all_onesp (arg0))
12236 return omit_one_operand_loc (loc, type, arg0, arg1);
12237 goto shift;
12239 case RSHIFT_EXPR:
12240 /* Optimize -1 >> x for arithmetic right shifts. */
12241 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12242 && tree_expr_nonnegative_p (arg1))
12243 return omit_one_operand_loc (loc, type, arg0, arg1);
12244 /* ... fall through ... */
12246 case LSHIFT_EXPR:
12247 shift:
12248 if (integer_zerop (arg1))
12249 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12250 if (integer_zerop (arg0))
12251 return omit_one_operand_loc (loc, type, arg0, arg1);
12253 /* Since negative shift count is not well-defined,
12254 don't try to compute it in the compiler. */
12255 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12256 return NULL_TREE;
12258 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12259 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12260 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12261 && host_integerp (TREE_OPERAND (arg0, 1), false)
12262 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12264 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12265 + TREE_INT_CST_LOW (arg1));
12267 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12268 being well defined. */
12269 if (low >= TYPE_PRECISION (type))
12271 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12272 low = low % TYPE_PRECISION (type);
12273 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12274 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12275 TREE_OPERAND (arg0, 0));
12276 else
12277 low = TYPE_PRECISION (type) - 1;
12280 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12281 build_int_cst (type, low));
12284 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12285 into x & ((unsigned)-1 >> c) for unsigned types. */
12286 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12287 || (TYPE_UNSIGNED (type)
12288 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12289 && host_integerp (arg1, false)
12290 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12291 && host_integerp (TREE_OPERAND (arg0, 1), false)
12292 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12294 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12295 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12296 tree lshift;
12297 tree arg00;
12299 if (low0 == low1)
12301 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12303 lshift = build_int_cst (type, -1);
12304 lshift = int_const_binop (code, lshift, arg1);
12306 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12310 /* Rewrite an LROTATE_EXPR by a constant into an
12311 RROTATE_EXPR by a new constant. */
12312 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12314 tree tem = build_int_cst (TREE_TYPE (arg1),
12315 TYPE_PRECISION (type));
12316 tem = const_binop (MINUS_EXPR, tem, arg1);
12317 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12320 /* If we have a rotate of a bit operation with the rotate count and
12321 the second operand of the bit operation both constant,
12322 permute the two operations. */
12323 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12324 && (TREE_CODE (arg0) == BIT_AND_EXPR
12325 || TREE_CODE (arg0) == BIT_IOR_EXPR
12326 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12327 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12328 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12329 fold_build2_loc (loc, code, type,
12330 TREE_OPERAND (arg0, 0), arg1),
12331 fold_build2_loc (loc, code, type,
12332 TREE_OPERAND (arg0, 1), arg1));
12334 /* Two consecutive rotates adding up to the precision of the
12335 type can be ignored. */
12336 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12337 && TREE_CODE (arg0) == RROTATE_EXPR
12338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12339 && TREE_INT_CST_HIGH (arg1) == 0
12340 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12341 && ((TREE_INT_CST_LOW (arg1)
12342 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12343 == (unsigned int) TYPE_PRECISION (type)))
12344 return TREE_OPERAND (arg0, 0);
12346 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12347 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12348 if the latter can be further optimized. */
12349 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12350 && TREE_CODE (arg0) == BIT_AND_EXPR
12351 && TREE_CODE (arg1) == INTEGER_CST
12352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12354 tree mask = fold_build2_loc (loc, code, type,
12355 fold_convert_loc (loc, type,
12356 TREE_OPERAND (arg0, 1)),
12357 arg1);
12358 tree shift = fold_build2_loc (loc, code, type,
12359 fold_convert_loc (loc, type,
12360 TREE_OPERAND (arg0, 0)),
12361 arg1);
12362 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12363 if (tem)
12364 return tem;
12367 return NULL_TREE;
12369 case MIN_EXPR:
12370 if (operand_equal_p (arg0, arg1, 0))
12371 return omit_one_operand_loc (loc, type, arg0, arg1);
12372 if (INTEGRAL_TYPE_P (type)
12373 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12374 return omit_one_operand_loc (loc, type, arg1, arg0);
12375 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12376 if (tem)
12377 return tem;
12378 goto associate;
12380 case MAX_EXPR:
12381 if (operand_equal_p (arg0, arg1, 0))
12382 return omit_one_operand_loc (loc, type, arg0, arg1);
12383 if (INTEGRAL_TYPE_P (type)
12384 && TYPE_MAX_VALUE (type)
12385 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12386 return omit_one_operand_loc (loc, type, arg1, arg0);
12387 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12388 if (tem)
12389 return tem;
12390 goto associate;
12392 case TRUTH_ANDIF_EXPR:
12393 /* Note that the operands of this must be ints
12394 and their values must be 0 or 1.
12395 ("true" is a fixed value perhaps depending on the language.) */
12396 /* If first arg is constant zero, return it. */
12397 if (integer_zerop (arg0))
12398 return fold_convert_loc (loc, type, arg0);
12399 case TRUTH_AND_EXPR:
12400 /* If either arg is constant true, drop it. */
12401 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12402 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12403 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12404 /* Preserve sequence points. */
12405 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12406 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12407 /* If second arg is constant zero, result is zero, but first arg
12408 must be evaluated. */
12409 if (integer_zerop (arg1))
12410 return omit_one_operand_loc (loc, type, arg1, arg0);
12411 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12412 case will be handled here. */
12413 if (integer_zerop (arg0))
12414 return omit_one_operand_loc (loc, type, arg0, arg1);
12416 /* !X && X is always false. */
12417 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12419 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12420 /* X && !X is always false. */
12421 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12423 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12425 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12426 means A >= Y && A != MAX, but in this case we know that
12427 A < X <= MAX. */
12429 if (!TREE_SIDE_EFFECTS (arg0)
12430 && !TREE_SIDE_EFFECTS (arg1))
12432 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12433 if (tem && !operand_equal_p (tem, arg0, 0))
12434 return fold_build2_loc (loc, code, type, tem, arg1);
12436 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12437 if (tem && !operand_equal_p (tem, arg1, 0))
12438 return fold_build2_loc (loc, code, type, arg0, tem);
12441 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12442 != NULL_TREE)
12443 return tem;
12445 return NULL_TREE;
12447 case TRUTH_ORIF_EXPR:
12448 /* Note that the operands of this must be ints
12449 and their values must be 0 or true.
12450 ("true" is a fixed value perhaps depending on the language.) */
12451 /* If first arg is constant true, return it. */
12452 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12453 return fold_convert_loc (loc, type, arg0);
12454 case TRUTH_OR_EXPR:
12455 /* If either arg is constant zero, drop it. */
12456 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12457 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12458 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12459 /* Preserve sequence points. */
12460 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12461 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12462 /* If second arg is constant true, result is true, but we must
12463 evaluate first arg. */
12464 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12465 return omit_one_operand_loc (loc, type, arg1, arg0);
12466 /* Likewise for first arg, but note this only occurs here for
12467 TRUTH_OR_EXPR. */
12468 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12469 return omit_one_operand_loc (loc, type, arg0, arg1);
12471 /* !X || X is always true. */
12472 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12473 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12474 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12475 /* X || !X is always true. */
12476 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12477 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12478 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12480 /* (X && !Y) || (!X && Y) is X ^ Y */
12481 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12482 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12484 tree a0, a1, l0, l1, n0, n1;
12486 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12487 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12489 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12490 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12492 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12493 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12495 if ((operand_equal_p (n0, a0, 0)
12496 && operand_equal_p (n1, a1, 0))
12497 || (operand_equal_p (n0, a1, 0)
12498 && operand_equal_p (n1, a0, 0)))
12499 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12502 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12503 != NULL_TREE)
12504 return tem;
12506 return NULL_TREE;
12508 case TRUTH_XOR_EXPR:
12509 /* If the second arg is constant zero, drop it. */
12510 if (integer_zerop (arg1))
12511 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12512 /* If the second arg is constant true, this is a logical inversion. */
12513 if (integer_onep (arg1))
12515 /* Only call invert_truthvalue if operand is a truth value. */
12516 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12517 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12518 else
12519 tem = invert_truthvalue_loc (loc, arg0);
12520 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12522 /* Identical arguments cancel to zero. */
12523 if (operand_equal_p (arg0, arg1, 0))
12524 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12526 /* !X ^ X is always true. */
12527 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12528 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12529 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12531 /* X ^ !X is always true. */
12532 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12533 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12534 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12536 return NULL_TREE;
12538 case EQ_EXPR:
12539 case NE_EXPR:
12540 STRIP_NOPS (arg0);
12541 STRIP_NOPS (arg1);
12543 tem = fold_comparison (loc, code, type, op0, op1);
12544 if (tem != NULL_TREE)
12545 return tem;
12547 /* bool_var != 0 becomes bool_var. */
12548 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12549 && code == NE_EXPR)
12550 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12552 /* bool_var == 1 becomes bool_var. */
12553 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12554 && code == EQ_EXPR)
12555 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12557 /* bool_var != 1 becomes !bool_var. */
12558 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12559 && code == NE_EXPR)
12560 return fold_convert_loc (loc, type,
12561 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12562 TREE_TYPE (arg0), arg0));
12564 /* bool_var == 0 becomes !bool_var. */
12565 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12566 && code == EQ_EXPR)
12567 return fold_convert_loc (loc, type,
12568 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12569 TREE_TYPE (arg0), arg0));
12571 /* !exp != 0 becomes !exp */
12572 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12573 && code == NE_EXPR)
12574 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12576 /* If this is an equality comparison of the address of two non-weak,
12577 unaliased symbols neither of which are extern (since we do not
12578 have access to attributes for externs), then we know the result. */
12579 if (TREE_CODE (arg0) == ADDR_EXPR
12580 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12581 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12582 && ! lookup_attribute ("alias",
12583 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12584 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12585 && TREE_CODE (arg1) == ADDR_EXPR
12586 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12587 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12588 && ! lookup_attribute ("alias",
12589 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12590 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12592 /* We know that we're looking at the address of two
12593 non-weak, unaliased, static _DECL nodes.
12595 It is both wasteful and incorrect to call operand_equal_p
12596 to compare the two ADDR_EXPR nodes. It is wasteful in that
12597 all we need to do is test pointer equality for the arguments
12598 to the two ADDR_EXPR nodes. It is incorrect to use
12599 operand_equal_p as that function is NOT equivalent to a
12600 C equality test. It can in fact return false for two
12601 objects which would test as equal using the C equality
12602 operator. */
12603 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12604 return constant_boolean_node (equal
12605 ? code == EQ_EXPR : code != EQ_EXPR,
12606 type);
12609 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12610 a MINUS_EXPR of a constant, we can convert it into a comparison with
12611 a revised constant as long as no overflow occurs. */
12612 if (TREE_CODE (arg1) == INTEGER_CST
12613 && (TREE_CODE (arg0) == PLUS_EXPR
12614 || TREE_CODE (arg0) == MINUS_EXPR)
12615 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12616 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12617 ? MINUS_EXPR : PLUS_EXPR,
12618 fold_convert_loc (loc, TREE_TYPE (arg0),
12619 arg1),
12620 TREE_OPERAND (arg0, 1)))
12621 && !TREE_OVERFLOW (tem))
12622 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12624 /* Similarly for a NEGATE_EXPR. */
12625 if (TREE_CODE (arg0) == NEGATE_EXPR
12626 && TREE_CODE (arg1) == INTEGER_CST
12627 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12628 arg1)))
12629 && TREE_CODE (tem) == INTEGER_CST
12630 && !TREE_OVERFLOW (tem))
12631 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12633 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12634 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12635 && TREE_CODE (arg1) == INTEGER_CST
12636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12637 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12638 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12639 fold_convert_loc (loc,
12640 TREE_TYPE (arg0),
12641 arg1),
12642 TREE_OPERAND (arg0, 1)));
12644 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12645 if ((TREE_CODE (arg0) == PLUS_EXPR
12646 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12647 || TREE_CODE (arg0) == MINUS_EXPR)
12648 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12649 0)),
12650 arg1, 0)
12651 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12652 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12654 tree val = TREE_OPERAND (arg0, 1);
12655 return omit_two_operands_loc (loc, type,
12656 fold_build2_loc (loc, code, type,
12657 val,
12658 build_int_cst (TREE_TYPE (val),
12659 0)),
12660 TREE_OPERAND (arg0, 0), arg1);
12663 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12664 if (TREE_CODE (arg0) == MINUS_EXPR
12665 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12666 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12667 1)),
12668 arg1, 0)
12669 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12671 return omit_two_operands_loc (loc, type,
12672 code == NE_EXPR
12673 ? boolean_true_node : boolean_false_node,
12674 TREE_OPERAND (arg0, 1), arg1);
12677 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12678 for !=. Don't do this for ordered comparisons due to overflow. */
12679 if (TREE_CODE (arg0) == MINUS_EXPR
12680 && integer_zerop (arg1))
12681 return fold_build2_loc (loc, code, type,
12682 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12684 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12685 if (TREE_CODE (arg0) == ABS_EXPR
12686 && (integer_zerop (arg1) || real_zerop (arg1)))
12687 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12689 /* If this is an EQ or NE comparison with zero and ARG0 is
12690 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12691 two operations, but the latter can be done in one less insn
12692 on machines that have only two-operand insns or on which a
12693 constant cannot be the first operand. */
12694 if (TREE_CODE (arg0) == BIT_AND_EXPR
12695 && integer_zerop (arg1))
12697 tree arg00 = TREE_OPERAND (arg0, 0);
12698 tree arg01 = TREE_OPERAND (arg0, 1);
12699 if (TREE_CODE (arg00) == LSHIFT_EXPR
12700 && integer_onep (TREE_OPERAND (arg00, 0)))
12702 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12703 arg01, TREE_OPERAND (arg00, 1));
12704 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12705 build_int_cst (TREE_TYPE (arg0), 1));
12706 return fold_build2_loc (loc, code, type,
12707 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12708 arg1);
12710 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12711 && integer_onep (TREE_OPERAND (arg01, 0)))
12713 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12714 arg00, TREE_OPERAND (arg01, 1));
12715 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12716 build_int_cst (TREE_TYPE (arg0), 1));
12717 return fold_build2_loc (loc, code, type,
12718 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12719 arg1);
12723 /* If this is an NE or EQ comparison of zero against the result of a
12724 signed MOD operation whose second operand is a power of 2, make
12725 the MOD operation unsigned since it is simpler and equivalent. */
12726 if (integer_zerop (arg1)
12727 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12728 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12729 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12730 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12731 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12732 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12734 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12735 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12736 fold_convert_loc (loc, newtype,
12737 TREE_OPERAND (arg0, 0)),
12738 fold_convert_loc (loc, newtype,
12739 TREE_OPERAND (arg0, 1)));
12741 return fold_build2_loc (loc, code, type, newmod,
12742 fold_convert_loc (loc, newtype, arg1));
12745 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12746 C1 is a valid shift constant, and C2 is a power of two, i.e.
12747 a single bit. */
12748 if (TREE_CODE (arg0) == BIT_AND_EXPR
12749 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12750 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12751 == INTEGER_CST
12752 && integer_pow2p (TREE_OPERAND (arg0, 1))
12753 && integer_zerop (arg1))
12755 tree itype = TREE_TYPE (arg0);
12756 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12757 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12759 /* Check for a valid shift count. */
12760 if (TREE_INT_CST_HIGH (arg001) == 0
12761 && TREE_INT_CST_LOW (arg001) < prec)
12763 tree arg01 = TREE_OPERAND (arg0, 1);
12764 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12765 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12766 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12767 can be rewritten as (X & (C2 << C1)) != 0. */
12768 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12770 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12771 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12772 return fold_build2_loc (loc, code, type, tem,
12773 fold_convert_loc (loc, itype, arg1));
12775 /* Otherwise, for signed (arithmetic) shifts,
12776 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12777 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12778 else if (!TYPE_UNSIGNED (itype))
12779 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12780 arg000, build_int_cst (itype, 0));
12781 /* Otherwise, of unsigned (logical) shifts,
12782 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12783 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12784 else
12785 return omit_one_operand_loc (loc, type,
12786 code == EQ_EXPR ? integer_one_node
12787 : integer_zero_node,
12788 arg000);
12792 /* If we have (A & C) == C where C is a power of 2, convert this into
12793 (A & C) != 0. Similarly for NE_EXPR. */
12794 if (TREE_CODE (arg0) == BIT_AND_EXPR
12795 && integer_pow2p (TREE_OPERAND (arg0, 1))
12796 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12797 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12798 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12799 integer_zero_node));
12801 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12802 bit, then fold the expression into A < 0 or A >= 0. */
12803 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12804 if (tem)
12805 return tem;
12807 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12808 Similarly for NE_EXPR. */
12809 if (TREE_CODE (arg0) == BIT_AND_EXPR
12810 && TREE_CODE (arg1) == INTEGER_CST
12811 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12813 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12814 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12815 TREE_OPERAND (arg0, 1));
12816 tree dandnotc
12817 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12818 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12819 notc);
12820 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12821 if (integer_nonzerop (dandnotc))
12822 return omit_one_operand_loc (loc, type, rslt, arg0);
12825 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12826 Similarly for NE_EXPR. */
12827 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12828 && TREE_CODE (arg1) == INTEGER_CST
12829 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12831 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12832 tree candnotd
12833 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12834 TREE_OPERAND (arg0, 1),
12835 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12836 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12837 if (integer_nonzerop (candnotd))
12838 return omit_one_operand_loc (loc, type, rslt, arg0);
12841 /* If this is a comparison of a field, we may be able to simplify it. */
12842 if ((TREE_CODE (arg0) == COMPONENT_REF
12843 || TREE_CODE (arg0) == BIT_FIELD_REF)
12844 /* Handle the constant case even without -O
12845 to make sure the warnings are given. */
12846 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12848 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12849 if (t1)
12850 return t1;
12853 /* Optimize comparisons of strlen vs zero to a compare of the
12854 first character of the string vs zero. To wit,
12855 strlen(ptr) == 0 => *ptr == 0
12856 strlen(ptr) != 0 => *ptr != 0
12857 Other cases should reduce to one of these two (or a constant)
12858 due to the return value of strlen being unsigned. */
12859 if (TREE_CODE (arg0) == CALL_EXPR
12860 && integer_zerop (arg1))
12862 tree fndecl = get_callee_fndecl (arg0);
12864 if (fndecl
12865 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12866 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12867 && call_expr_nargs (arg0) == 1
12868 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12870 tree iref = build_fold_indirect_ref_loc (loc,
12871 CALL_EXPR_ARG (arg0, 0));
12872 return fold_build2_loc (loc, code, type, iref,
12873 build_int_cst (TREE_TYPE (iref), 0));
12877 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12878 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12879 if (TREE_CODE (arg0) == RSHIFT_EXPR
12880 && integer_zerop (arg1)
12881 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12883 tree arg00 = TREE_OPERAND (arg0, 0);
12884 tree arg01 = TREE_OPERAND (arg0, 1);
12885 tree itype = TREE_TYPE (arg00);
12886 if (TREE_INT_CST_HIGH (arg01) == 0
12887 && TREE_INT_CST_LOW (arg01)
12888 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12890 if (TYPE_UNSIGNED (itype))
12892 itype = signed_type_for (itype);
12893 arg00 = fold_convert_loc (loc, itype, arg00);
12895 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12896 type, arg00, build_zero_cst (itype));
12900 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12901 if (integer_zerop (arg1)
12902 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12903 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12904 TREE_OPERAND (arg0, 1));
12906 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12907 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12908 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12909 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12910 build_zero_cst (TREE_TYPE (arg0)));
12911 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12912 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12913 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12914 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12915 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12916 build_zero_cst (TREE_TYPE (arg0)));
12918 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12919 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12920 && TREE_CODE (arg1) == INTEGER_CST
12921 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12922 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12923 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12924 TREE_OPERAND (arg0, 1), arg1));
12926 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12927 (X & C) == 0 when C is a single bit. */
12928 if (TREE_CODE (arg0) == BIT_AND_EXPR
12929 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12930 && integer_zerop (arg1)
12931 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12933 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12934 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12935 TREE_OPERAND (arg0, 1));
12936 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12937 type, tem,
12938 fold_convert_loc (loc, TREE_TYPE (arg0),
12939 arg1));
12942 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12943 constant C is a power of two, i.e. a single bit. */
12944 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12945 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12946 && integer_zerop (arg1)
12947 && integer_pow2p (TREE_OPERAND (arg0, 1))
12948 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12949 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12951 tree arg00 = TREE_OPERAND (arg0, 0);
12952 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12953 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12956 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12957 when is C is a power of two, i.e. a single bit. */
12958 if (TREE_CODE (arg0) == BIT_AND_EXPR
12959 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12960 && integer_zerop (arg1)
12961 && integer_pow2p (TREE_OPERAND (arg0, 1))
12962 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12963 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12965 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12966 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12967 arg000, TREE_OPERAND (arg0, 1));
12968 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12969 tem, build_int_cst (TREE_TYPE (tem), 0));
12972 if (integer_zerop (arg1)
12973 && tree_expr_nonzero_p (arg0))
12975 tree res = constant_boolean_node (code==NE_EXPR, type);
12976 return omit_one_operand_loc (loc, type, res, arg0);
12979 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12980 if (TREE_CODE (arg0) == NEGATE_EXPR
12981 && TREE_CODE (arg1) == NEGATE_EXPR)
12982 return fold_build2_loc (loc, code, type,
12983 TREE_OPERAND (arg0, 0),
12984 fold_convert_loc (loc, TREE_TYPE (arg0),
12985 TREE_OPERAND (arg1, 0)));
12987 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12988 if (TREE_CODE (arg0) == BIT_AND_EXPR
12989 && TREE_CODE (arg1) == BIT_AND_EXPR)
12991 tree arg00 = TREE_OPERAND (arg0, 0);
12992 tree arg01 = TREE_OPERAND (arg0, 1);
12993 tree arg10 = TREE_OPERAND (arg1, 0);
12994 tree arg11 = TREE_OPERAND (arg1, 1);
12995 tree itype = TREE_TYPE (arg0);
12997 if (operand_equal_p (arg01, arg11, 0))
12998 return fold_build2_loc (loc, code, type,
12999 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13000 fold_build2_loc (loc,
13001 BIT_XOR_EXPR, itype,
13002 arg00, arg10),
13003 arg01),
13004 build_zero_cst (itype));
13006 if (operand_equal_p (arg01, arg10, 0))
13007 return fold_build2_loc (loc, code, type,
13008 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13009 fold_build2_loc (loc,
13010 BIT_XOR_EXPR, itype,
13011 arg00, arg11),
13012 arg01),
13013 build_zero_cst (itype));
13015 if (operand_equal_p (arg00, arg11, 0))
13016 return fold_build2_loc (loc, code, type,
13017 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13018 fold_build2_loc (loc,
13019 BIT_XOR_EXPR, itype,
13020 arg01, arg10),
13021 arg00),
13022 build_zero_cst (itype));
13024 if (operand_equal_p (arg00, arg10, 0))
13025 return fold_build2_loc (loc, code, type,
13026 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13027 fold_build2_loc (loc,
13028 BIT_XOR_EXPR, itype,
13029 arg01, arg11),
13030 arg00),
13031 build_zero_cst (itype));
13034 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13035 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13037 tree arg00 = TREE_OPERAND (arg0, 0);
13038 tree arg01 = TREE_OPERAND (arg0, 1);
13039 tree arg10 = TREE_OPERAND (arg1, 0);
13040 tree arg11 = TREE_OPERAND (arg1, 1);
13041 tree itype = TREE_TYPE (arg0);
13043 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13044 operand_equal_p guarantees no side-effects so we don't need
13045 to use omit_one_operand on Z. */
13046 if (operand_equal_p (arg01, arg11, 0))
13047 return fold_build2_loc (loc, code, type, arg00,
13048 fold_convert_loc (loc, TREE_TYPE (arg00),
13049 arg10));
13050 if (operand_equal_p (arg01, arg10, 0))
13051 return fold_build2_loc (loc, code, type, arg00,
13052 fold_convert_loc (loc, TREE_TYPE (arg00),
13053 arg11));
13054 if (operand_equal_p (arg00, arg11, 0))
13055 return fold_build2_loc (loc, code, type, arg01,
13056 fold_convert_loc (loc, TREE_TYPE (arg01),
13057 arg10));
13058 if (operand_equal_p (arg00, arg10, 0))
13059 return fold_build2_loc (loc, code, type, arg01,
13060 fold_convert_loc (loc, TREE_TYPE (arg01),
13061 arg11));
13063 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13064 if (TREE_CODE (arg01) == INTEGER_CST
13065 && TREE_CODE (arg11) == INTEGER_CST)
13067 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13068 fold_convert_loc (loc, itype, arg11));
13069 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13070 return fold_build2_loc (loc, code, type, tem,
13071 fold_convert_loc (loc, itype, arg10));
13075 /* Attempt to simplify equality/inequality comparisons of complex
13076 values. Only lower the comparison if the result is known or
13077 can be simplified to a single scalar comparison. */
13078 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13079 || TREE_CODE (arg0) == COMPLEX_CST)
13080 && (TREE_CODE (arg1) == COMPLEX_EXPR
13081 || TREE_CODE (arg1) == COMPLEX_CST))
13083 tree real0, imag0, real1, imag1;
13084 tree rcond, icond;
13086 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13088 real0 = TREE_OPERAND (arg0, 0);
13089 imag0 = TREE_OPERAND (arg0, 1);
13091 else
13093 real0 = TREE_REALPART (arg0);
13094 imag0 = TREE_IMAGPART (arg0);
13097 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13099 real1 = TREE_OPERAND (arg1, 0);
13100 imag1 = TREE_OPERAND (arg1, 1);
13102 else
13104 real1 = TREE_REALPART (arg1);
13105 imag1 = TREE_IMAGPART (arg1);
13108 rcond = fold_binary_loc (loc, code, type, real0, real1);
13109 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13111 if (integer_zerop (rcond))
13113 if (code == EQ_EXPR)
13114 return omit_two_operands_loc (loc, type, boolean_false_node,
13115 imag0, imag1);
13116 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13118 else
13120 if (code == NE_EXPR)
13121 return omit_two_operands_loc (loc, type, boolean_true_node,
13122 imag0, imag1);
13123 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13127 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13128 if (icond && TREE_CODE (icond) == INTEGER_CST)
13130 if (integer_zerop (icond))
13132 if (code == EQ_EXPR)
13133 return omit_two_operands_loc (loc, type, boolean_false_node,
13134 real0, real1);
13135 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13137 else
13139 if (code == NE_EXPR)
13140 return omit_two_operands_loc (loc, type, boolean_true_node,
13141 real0, real1);
13142 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13147 return NULL_TREE;
13149 case LT_EXPR:
13150 case GT_EXPR:
13151 case LE_EXPR:
13152 case GE_EXPR:
13153 tem = fold_comparison (loc, code, type, op0, op1);
13154 if (tem != NULL_TREE)
13155 return tem;
13157 /* Transform comparisons of the form X +- C CMP X. */
13158 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13159 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13160 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13161 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13162 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13163 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13165 tree arg01 = TREE_OPERAND (arg0, 1);
13166 enum tree_code code0 = TREE_CODE (arg0);
13167 int is_positive;
13169 if (TREE_CODE (arg01) == REAL_CST)
13170 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13171 else
13172 is_positive = tree_int_cst_sgn (arg01);
13174 /* (X - c) > X becomes false. */
13175 if (code == GT_EXPR
13176 && ((code0 == MINUS_EXPR && is_positive >= 0)
13177 || (code0 == PLUS_EXPR && is_positive <= 0)))
13179 if (TREE_CODE (arg01) == INTEGER_CST
13180 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13181 fold_overflow_warning (("assuming signed overflow does not "
13182 "occur when assuming that (X - c) > X "
13183 "is always false"),
13184 WARN_STRICT_OVERFLOW_ALL);
13185 return constant_boolean_node (0, type);
13188 /* Likewise (X + c) < X becomes false. */
13189 if (code == LT_EXPR
13190 && ((code0 == PLUS_EXPR && is_positive >= 0)
13191 || (code0 == MINUS_EXPR && is_positive <= 0)))
13193 if (TREE_CODE (arg01) == INTEGER_CST
13194 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13195 fold_overflow_warning (("assuming signed overflow does not "
13196 "occur when assuming that "
13197 "(X + c) < X is always false"),
13198 WARN_STRICT_OVERFLOW_ALL);
13199 return constant_boolean_node (0, type);
13202 /* Convert (X - c) <= X to true. */
13203 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13204 && code == LE_EXPR
13205 && ((code0 == MINUS_EXPR && is_positive >= 0)
13206 || (code0 == PLUS_EXPR && is_positive <= 0)))
13208 if (TREE_CODE (arg01) == INTEGER_CST
13209 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13210 fold_overflow_warning (("assuming signed overflow does not "
13211 "occur when assuming that "
13212 "(X - c) <= X is always true"),
13213 WARN_STRICT_OVERFLOW_ALL);
13214 return constant_boolean_node (1, type);
13217 /* Convert (X + c) >= X to true. */
13218 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13219 && code == GE_EXPR
13220 && ((code0 == PLUS_EXPR && is_positive >= 0)
13221 || (code0 == MINUS_EXPR && is_positive <= 0)))
13223 if (TREE_CODE (arg01) == INTEGER_CST
13224 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13225 fold_overflow_warning (("assuming signed overflow does not "
13226 "occur when assuming that "
13227 "(X + c) >= X is always true"),
13228 WARN_STRICT_OVERFLOW_ALL);
13229 return constant_boolean_node (1, type);
13232 if (TREE_CODE (arg01) == INTEGER_CST)
13234 /* Convert X + c > X and X - c < X to true for integers. */
13235 if (code == GT_EXPR
13236 && ((code0 == PLUS_EXPR && is_positive > 0)
13237 || (code0 == MINUS_EXPR && is_positive < 0)))
13239 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13240 fold_overflow_warning (("assuming signed overflow does "
13241 "not occur when assuming that "
13242 "(X + c) > X is always true"),
13243 WARN_STRICT_OVERFLOW_ALL);
13244 return constant_boolean_node (1, type);
13247 if (code == LT_EXPR
13248 && ((code0 == MINUS_EXPR && is_positive > 0)
13249 || (code0 == PLUS_EXPR && is_positive < 0)))
13251 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13252 fold_overflow_warning (("assuming signed overflow does "
13253 "not occur when assuming that "
13254 "(X - c) < X is always true"),
13255 WARN_STRICT_OVERFLOW_ALL);
13256 return constant_boolean_node (1, type);
13259 /* Convert X + c <= X and X - c >= X to false for integers. */
13260 if (code == LE_EXPR
13261 && ((code0 == PLUS_EXPR && is_positive > 0)
13262 || (code0 == MINUS_EXPR && is_positive < 0)))
13264 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13265 fold_overflow_warning (("assuming signed overflow does "
13266 "not occur when assuming that "
13267 "(X + c) <= X is always false"),
13268 WARN_STRICT_OVERFLOW_ALL);
13269 return constant_boolean_node (0, type);
13272 if (code == GE_EXPR
13273 && ((code0 == MINUS_EXPR && is_positive > 0)
13274 || (code0 == PLUS_EXPR && is_positive < 0)))
13276 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13277 fold_overflow_warning (("assuming signed overflow does "
13278 "not occur when assuming that "
13279 "(X - c) >= X is always false"),
13280 WARN_STRICT_OVERFLOW_ALL);
13281 return constant_boolean_node (0, type);
13286 /* Comparisons with the highest or lowest possible integer of
13287 the specified precision will have known values. */
13289 tree arg1_type = TREE_TYPE (arg1);
13290 unsigned int width = TYPE_PRECISION (arg1_type);
13292 if (TREE_CODE (arg1) == INTEGER_CST
13293 && width <= HOST_BITS_PER_DOUBLE_INT
13294 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13296 HOST_WIDE_INT signed_max_hi;
13297 unsigned HOST_WIDE_INT signed_max_lo;
13298 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13300 if (width <= HOST_BITS_PER_WIDE_INT)
13302 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13303 - 1;
13304 signed_max_hi = 0;
13305 max_hi = 0;
13307 if (TYPE_UNSIGNED (arg1_type))
13309 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13310 min_lo = 0;
13311 min_hi = 0;
13313 else
13315 max_lo = signed_max_lo;
13316 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13317 min_hi = -1;
13320 else
13322 width -= HOST_BITS_PER_WIDE_INT;
13323 signed_max_lo = -1;
13324 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13325 - 1;
13326 max_lo = -1;
13327 min_lo = 0;
13329 if (TYPE_UNSIGNED (arg1_type))
13331 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13332 min_hi = 0;
13334 else
13336 max_hi = signed_max_hi;
13337 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13341 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13342 && TREE_INT_CST_LOW (arg1) == max_lo)
13343 switch (code)
13345 case GT_EXPR:
13346 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13348 case GE_EXPR:
13349 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13351 case LE_EXPR:
13352 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13354 case LT_EXPR:
13355 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13357 /* The GE_EXPR and LT_EXPR cases above are not normally
13358 reached because of previous transformations. */
13360 default:
13361 break;
13363 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13364 == max_hi
13365 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13366 switch (code)
13368 case GT_EXPR:
13369 arg1 = const_binop (PLUS_EXPR, arg1,
13370 build_int_cst (TREE_TYPE (arg1), 1));
13371 return fold_build2_loc (loc, EQ_EXPR, type,
13372 fold_convert_loc (loc,
13373 TREE_TYPE (arg1), arg0),
13374 arg1);
13375 case LE_EXPR:
13376 arg1 = const_binop (PLUS_EXPR, arg1,
13377 build_int_cst (TREE_TYPE (arg1), 1));
13378 return fold_build2_loc (loc, NE_EXPR, type,
13379 fold_convert_loc (loc, TREE_TYPE (arg1),
13380 arg0),
13381 arg1);
13382 default:
13383 break;
13385 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13386 == min_hi
13387 && TREE_INT_CST_LOW (arg1) == min_lo)
13388 switch (code)
13390 case LT_EXPR:
13391 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13393 case LE_EXPR:
13394 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13396 case GE_EXPR:
13397 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13399 case GT_EXPR:
13400 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13402 default:
13403 break;
13405 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13406 == min_hi
13407 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13408 switch (code)
13410 case GE_EXPR:
13411 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13412 return fold_build2_loc (loc, NE_EXPR, type,
13413 fold_convert_loc (loc,
13414 TREE_TYPE (arg1), arg0),
13415 arg1);
13416 case LT_EXPR:
13417 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13418 return fold_build2_loc (loc, EQ_EXPR, type,
13419 fold_convert_loc (loc, TREE_TYPE (arg1),
13420 arg0),
13421 arg1);
13422 default:
13423 break;
13426 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13427 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13428 && TYPE_UNSIGNED (arg1_type)
13429 /* We will flip the signedness of the comparison operator
13430 associated with the mode of arg1, so the sign bit is
13431 specified by this mode. Check that arg1 is the signed
13432 max associated with this sign bit. */
13433 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13434 /* signed_type does not work on pointer types. */
13435 && INTEGRAL_TYPE_P (arg1_type))
13437 /* The following case also applies to X < signed_max+1
13438 and X >= signed_max+1 because previous transformations. */
13439 if (code == LE_EXPR || code == GT_EXPR)
13441 tree st;
13442 st = signed_type_for (TREE_TYPE (arg1));
13443 return fold_build2_loc (loc,
13444 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13445 type, fold_convert_loc (loc, st, arg0),
13446 build_int_cst (st, 0));
13452 /* If we are comparing an ABS_EXPR with a constant, we can
13453 convert all the cases into explicit comparisons, but they may
13454 well not be faster than doing the ABS and one comparison.
13455 But ABS (X) <= C is a range comparison, which becomes a subtraction
13456 and a comparison, and is probably faster. */
13457 if (code == LE_EXPR
13458 && TREE_CODE (arg1) == INTEGER_CST
13459 && TREE_CODE (arg0) == ABS_EXPR
13460 && ! TREE_SIDE_EFFECTS (arg0)
13461 && (0 != (tem = negate_expr (arg1)))
13462 && TREE_CODE (tem) == INTEGER_CST
13463 && !TREE_OVERFLOW (tem))
13464 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13465 build2 (GE_EXPR, type,
13466 TREE_OPERAND (arg0, 0), tem),
13467 build2 (LE_EXPR, type,
13468 TREE_OPERAND (arg0, 0), arg1));
13470 /* Convert ABS_EXPR<x> >= 0 to true. */
13471 strict_overflow_p = false;
13472 if (code == GE_EXPR
13473 && (integer_zerop (arg1)
13474 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13475 && real_zerop (arg1)))
13476 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13478 if (strict_overflow_p)
13479 fold_overflow_warning (("assuming signed overflow does not occur "
13480 "when simplifying comparison of "
13481 "absolute value and zero"),
13482 WARN_STRICT_OVERFLOW_CONDITIONAL);
13483 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13486 /* Convert ABS_EXPR<x> < 0 to false. */
13487 strict_overflow_p = false;
13488 if (code == LT_EXPR
13489 && (integer_zerop (arg1) || real_zerop (arg1))
13490 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13492 if (strict_overflow_p)
13493 fold_overflow_warning (("assuming signed overflow does not occur "
13494 "when simplifying comparison of "
13495 "absolute value and zero"),
13496 WARN_STRICT_OVERFLOW_CONDITIONAL);
13497 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13500 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13501 and similarly for >= into !=. */
13502 if ((code == LT_EXPR || code == GE_EXPR)
13503 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13504 && TREE_CODE (arg1) == LSHIFT_EXPR
13505 && integer_onep (TREE_OPERAND (arg1, 0)))
13506 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13507 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13508 TREE_OPERAND (arg1, 1)),
13509 build_zero_cst (TREE_TYPE (arg0)));
13511 if ((code == LT_EXPR || code == GE_EXPR)
13512 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13513 && CONVERT_EXPR_P (arg1)
13514 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13515 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13517 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13518 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13519 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13520 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13521 build_zero_cst (TREE_TYPE (arg0)));
13524 return NULL_TREE;
13526 case UNORDERED_EXPR:
13527 case ORDERED_EXPR:
13528 case UNLT_EXPR:
13529 case UNLE_EXPR:
13530 case UNGT_EXPR:
13531 case UNGE_EXPR:
13532 case UNEQ_EXPR:
13533 case LTGT_EXPR:
13534 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13536 t1 = fold_relational_const (code, type, arg0, arg1);
13537 if (t1 != NULL_TREE)
13538 return t1;
13541 /* If the first operand is NaN, the result is constant. */
13542 if (TREE_CODE (arg0) == REAL_CST
13543 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13544 && (code != LTGT_EXPR || ! flag_trapping_math))
13546 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13547 ? integer_zero_node
13548 : integer_one_node;
13549 return omit_one_operand_loc (loc, type, t1, arg1);
13552 /* If the second operand is NaN, the result is constant. */
13553 if (TREE_CODE (arg1) == REAL_CST
13554 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13555 && (code != LTGT_EXPR || ! flag_trapping_math))
13557 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13558 ? integer_zero_node
13559 : integer_one_node;
13560 return omit_one_operand_loc (loc, type, t1, arg0);
13563 /* Simplify unordered comparison of something with itself. */
13564 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13565 && operand_equal_p (arg0, arg1, 0))
13566 return constant_boolean_node (1, type);
13568 if (code == LTGT_EXPR
13569 && !flag_trapping_math
13570 && operand_equal_p (arg0, arg1, 0))
13571 return constant_boolean_node (0, type);
13573 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13575 tree targ0 = strip_float_extensions (arg0);
13576 tree targ1 = strip_float_extensions (arg1);
13577 tree newtype = TREE_TYPE (targ0);
13579 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13580 newtype = TREE_TYPE (targ1);
13582 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13583 return fold_build2_loc (loc, code, type,
13584 fold_convert_loc (loc, newtype, targ0),
13585 fold_convert_loc (loc, newtype, targ1));
13588 return NULL_TREE;
13590 case COMPOUND_EXPR:
13591 /* When pedantic, a compound expression can be neither an lvalue
13592 nor an integer constant expression. */
13593 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13594 return NULL_TREE;
13595 /* Don't let (0, 0) be null pointer constant. */
13596 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13597 : fold_convert_loc (loc, type, arg1);
13598 return pedantic_non_lvalue_loc (loc, tem);
13600 case COMPLEX_EXPR:
13601 if ((TREE_CODE (arg0) == REAL_CST
13602 && TREE_CODE (arg1) == REAL_CST)
13603 || (TREE_CODE (arg0) == INTEGER_CST
13604 && TREE_CODE (arg1) == INTEGER_CST))
13605 return build_complex (type, arg0, arg1);
13606 if (TREE_CODE (arg0) == REALPART_EXPR
13607 && TREE_CODE (arg1) == IMAGPART_EXPR
13608 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13609 && operand_equal_p (TREE_OPERAND (arg0, 0),
13610 TREE_OPERAND (arg1, 0), 0))
13611 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13612 TREE_OPERAND (arg1, 0));
13613 return NULL_TREE;
13615 case ASSERT_EXPR:
13616 /* An ASSERT_EXPR should never be passed to fold_binary. */
13617 gcc_unreachable ();
13619 case VEC_PACK_TRUNC_EXPR:
13620 case VEC_PACK_FIX_TRUNC_EXPR:
13622 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13623 tree *elts;
13625 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13626 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13627 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13628 return NULL_TREE;
13630 elts = XALLOCAVEC (tree, nelts);
13631 if (!vec_cst_ctor_to_array (arg0, elts)
13632 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13633 return NULL_TREE;
13635 for (i = 0; i < nelts; i++)
13637 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13638 ? NOP_EXPR : FIX_TRUNC_EXPR,
13639 TREE_TYPE (type), elts[i]);
13640 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13641 return NULL_TREE;
13644 return build_vector (type, elts);
13647 case VEC_WIDEN_MULT_LO_EXPR:
13648 case VEC_WIDEN_MULT_HI_EXPR:
13649 case VEC_WIDEN_MULT_EVEN_EXPR:
13650 case VEC_WIDEN_MULT_ODD_EXPR:
13652 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13653 unsigned int out, ofs, scale;
13654 tree *elts;
13656 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13657 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13658 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13659 return NULL_TREE;
13661 elts = XALLOCAVEC (tree, nelts * 4);
13662 if (!vec_cst_ctor_to_array (arg0, elts)
13663 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13664 return NULL_TREE;
13666 if (code == VEC_WIDEN_MULT_LO_EXPR)
13667 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13668 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13669 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13670 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13671 scale = 1, ofs = 0;
13672 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13673 scale = 1, ofs = 1;
13675 for (out = 0; out < nelts; out++)
13677 unsigned int in1 = (out << scale) + ofs;
13678 unsigned int in2 = in1 + nelts * 2;
13679 tree t1, t2;
13681 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13682 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13684 if (t1 == NULL_TREE || t2 == NULL_TREE)
13685 return NULL_TREE;
13686 elts[out] = const_binop (MULT_EXPR, t1, t2);
13687 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13688 return NULL_TREE;
13691 return build_vector (type, elts);
13694 default:
13695 return NULL_TREE;
13696 } /* switch (code) */
13699 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13700 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13701 of GOTO_EXPR. */
13703 static tree
13704 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13706 switch (TREE_CODE (*tp))
13708 case LABEL_EXPR:
13709 return *tp;
13711 case GOTO_EXPR:
13712 *walk_subtrees = 0;
13714 /* ... fall through ... */
13716 default:
13717 return NULL_TREE;
13721 /* Return whether the sub-tree ST contains a label which is accessible from
13722 outside the sub-tree. */
13724 static bool
13725 contains_label_p (tree st)
13727 return
13728 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13731 /* Fold a ternary expression of code CODE and type TYPE with operands
13732 OP0, OP1, and OP2. Return the folded expression if folding is
13733 successful. Otherwise, return NULL_TREE. */
13735 tree
13736 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13737 tree op0, tree op1, tree op2)
13739 tree tem;
13740 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13741 enum tree_code_class kind = TREE_CODE_CLASS (code);
13743 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13744 && TREE_CODE_LENGTH (code) == 3);
13746 /* Strip any conversions that don't change the mode. This is safe
13747 for every expression, except for a comparison expression because
13748 its signedness is derived from its operands. So, in the latter
13749 case, only strip conversions that don't change the signedness.
13751 Note that this is done as an internal manipulation within the
13752 constant folder, in order to find the simplest representation of
13753 the arguments so that their form can be studied. In any cases,
13754 the appropriate type conversions should be put back in the tree
13755 that will get out of the constant folder. */
13756 if (op0)
13758 arg0 = op0;
13759 STRIP_NOPS (arg0);
13762 if (op1)
13764 arg1 = op1;
13765 STRIP_NOPS (arg1);
13768 if (op2)
13770 arg2 = op2;
13771 STRIP_NOPS (arg2);
13774 switch (code)
13776 case COMPONENT_REF:
13777 if (TREE_CODE (arg0) == CONSTRUCTOR
13778 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13780 unsigned HOST_WIDE_INT idx;
13781 tree field, value;
13782 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13783 if (field == arg1)
13784 return value;
13786 return NULL_TREE;
13788 case COND_EXPR:
13789 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13790 so all simple results must be passed through pedantic_non_lvalue. */
13791 if (TREE_CODE (arg0) == INTEGER_CST)
13793 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13794 tem = integer_zerop (arg0) ? op2 : op1;
13795 /* Only optimize constant conditions when the selected branch
13796 has the same type as the COND_EXPR. This avoids optimizing
13797 away "c ? x : throw", where the throw has a void type.
13798 Avoid throwing away that operand which contains label. */
13799 if ((!TREE_SIDE_EFFECTS (unused_op)
13800 || !contains_label_p (unused_op))
13801 && (! VOID_TYPE_P (TREE_TYPE (tem))
13802 || VOID_TYPE_P (type)))
13803 return pedantic_non_lvalue_loc (loc, tem);
13804 return NULL_TREE;
13806 if (operand_equal_p (arg1, op2, 0))
13807 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13809 /* If we have A op B ? A : C, we may be able to convert this to a
13810 simpler expression, depending on the operation and the values
13811 of B and C. Signed zeros prevent all of these transformations,
13812 for reasons given above each one.
13814 Also try swapping the arguments and inverting the conditional. */
13815 if (COMPARISON_CLASS_P (arg0)
13816 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13817 arg1, TREE_OPERAND (arg0, 1))
13818 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13820 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13821 if (tem)
13822 return tem;
13825 if (COMPARISON_CLASS_P (arg0)
13826 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13827 op2,
13828 TREE_OPERAND (arg0, 1))
13829 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13831 location_t loc0 = expr_location_or (arg0, loc);
13832 tem = fold_truth_not_expr (loc0, arg0);
13833 if (tem && COMPARISON_CLASS_P (tem))
13835 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13836 if (tem)
13837 return tem;
13841 /* If the second operand is simpler than the third, swap them
13842 since that produces better jump optimization results. */
13843 if (truth_value_p (TREE_CODE (arg0))
13844 && tree_swap_operands_p (op1, op2, false))
13846 location_t loc0 = expr_location_or (arg0, loc);
13847 /* See if this can be inverted. If it can't, possibly because
13848 it was a floating-point inequality comparison, don't do
13849 anything. */
13850 tem = fold_truth_not_expr (loc0, arg0);
13851 if (tem)
13852 return fold_build3_loc (loc, code, type, tem, op2, op1);
13855 /* Convert A ? 1 : 0 to simply A. */
13856 if (integer_onep (op1)
13857 && integer_zerop (op2)
13858 /* If we try to convert OP0 to our type, the
13859 call to fold will try to move the conversion inside
13860 a COND, which will recurse. In that case, the COND_EXPR
13861 is probably the best choice, so leave it alone. */
13862 && type == TREE_TYPE (arg0))
13863 return pedantic_non_lvalue_loc (loc, arg0);
13865 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13866 over COND_EXPR in cases such as floating point comparisons. */
13867 if (integer_zerop (op1)
13868 && integer_onep (op2)
13869 && truth_value_p (TREE_CODE (arg0)))
13870 return pedantic_non_lvalue_loc (loc,
13871 fold_convert_loc (loc, type,
13872 invert_truthvalue_loc (loc,
13873 arg0)));
13875 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13876 if (TREE_CODE (arg0) == LT_EXPR
13877 && integer_zerop (TREE_OPERAND (arg0, 1))
13878 && integer_zerop (op2)
13879 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13881 /* sign_bit_p only checks ARG1 bits within A's precision.
13882 If <sign bit of A> has wider type than A, bits outside
13883 of A's precision in <sign bit of A> need to be checked.
13884 If they are all 0, this optimization needs to be done
13885 in unsigned A's type, if they are all 1 in signed A's type,
13886 otherwise this can't be done. */
13887 if (TYPE_PRECISION (TREE_TYPE (tem))
13888 < TYPE_PRECISION (TREE_TYPE (arg1))
13889 && TYPE_PRECISION (TREE_TYPE (tem))
13890 < TYPE_PRECISION (type))
13892 unsigned HOST_WIDE_INT mask_lo;
13893 HOST_WIDE_INT mask_hi;
13894 int inner_width, outer_width;
13895 tree tem_type;
13897 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13898 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13899 if (outer_width > TYPE_PRECISION (type))
13900 outer_width = TYPE_PRECISION (type);
13902 if (outer_width > HOST_BITS_PER_WIDE_INT)
13904 mask_hi = ((unsigned HOST_WIDE_INT) -1
13905 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13906 mask_lo = -1;
13908 else
13910 mask_hi = 0;
13911 mask_lo = ((unsigned HOST_WIDE_INT) -1
13912 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13914 if (inner_width > HOST_BITS_PER_WIDE_INT)
13916 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13917 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13918 mask_lo = 0;
13920 else
13921 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13922 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13924 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13925 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13927 tem_type = signed_type_for (TREE_TYPE (tem));
13928 tem = fold_convert_loc (loc, tem_type, tem);
13930 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13931 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13933 tem_type = unsigned_type_for (TREE_TYPE (tem));
13934 tem = fold_convert_loc (loc, tem_type, tem);
13936 else
13937 tem = NULL;
13940 if (tem)
13941 return
13942 fold_convert_loc (loc, type,
13943 fold_build2_loc (loc, BIT_AND_EXPR,
13944 TREE_TYPE (tem), tem,
13945 fold_convert_loc (loc,
13946 TREE_TYPE (tem),
13947 arg1)));
13950 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13951 already handled above. */
13952 if (TREE_CODE (arg0) == BIT_AND_EXPR
13953 && integer_onep (TREE_OPERAND (arg0, 1))
13954 && integer_zerop (op2)
13955 && integer_pow2p (arg1))
13957 tree tem = TREE_OPERAND (arg0, 0);
13958 STRIP_NOPS (tem);
13959 if (TREE_CODE (tem) == RSHIFT_EXPR
13960 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13961 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13962 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13963 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13964 TREE_OPERAND (tem, 0), arg1);
13967 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13968 is probably obsolete because the first operand should be a
13969 truth value (that's why we have the two cases above), but let's
13970 leave it in until we can confirm this for all front-ends. */
13971 if (integer_zerop (op2)
13972 && TREE_CODE (arg0) == NE_EXPR
13973 && integer_zerop (TREE_OPERAND (arg0, 1))
13974 && integer_pow2p (arg1)
13975 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13976 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13977 arg1, OEP_ONLY_CONST))
13978 return pedantic_non_lvalue_loc (loc,
13979 fold_convert_loc (loc, type,
13980 TREE_OPERAND (arg0, 0)));
13982 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13983 if (integer_zerop (op2)
13984 && truth_value_p (TREE_CODE (arg0))
13985 && truth_value_p (TREE_CODE (arg1)))
13986 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13987 fold_convert_loc (loc, type, arg0),
13988 arg1);
13990 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13991 if (integer_onep (op2)
13992 && truth_value_p (TREE_CODE (arg0))
13993 && truth_value_p (TREE_CODE (arg1)))
13995 location_t loc0 = expr_location_or (arg0, loc);
13996 /* Only perform transformation if ARG0 is easily inverted. */
13997 tem = fold_truth_not_expr (loc0, arg0);
13998 if (tem)
13999 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14000 fold_convert_loc (loc, type, tem),
14001 arg1);
14004 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14005 if (integer_zerop (arg1)
14006 && truth_value_p (TREE_CODE (arg0))
14007 && truth_value_p (TREE_CODE (op2)))
14009 location_t loc0 = expr_location_or (arg0, loc);
14010 /* Only perform transformation if ARG0 is easily inverted. */
14011 tem = fold_truth_not_expr (loc0, arg0);
14012 if (tem)
14013 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14014 fold_convert_loc (loc, type, tem),
14015 op2);
14018 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14019 if (integer_onep (arg1)
14020 && truth_value_p (TREE_CODE (arg0))
14021 && truth_value_p (TREE_CODE (op2)))
14022 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14023 fold_convert_loc (loc, type, arg0),
14024 op2);
14026 return NULL_TREE;
14028 case CALL_EXPR:
14029 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14030 of fold_ternary on them. */
14031 gcc_unreachable ();
14033 case BIT_FIELD_REF:
14034 if ((TREE_CODE (arg0) == VECTOR_CST
14035 || (TREE_CODE (arg0) == CONSTRUCTOR
14036 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14037 && (type == TREE_TYPE (TREE_TYPE (arg0))
14038 || (TREE_CODE (type) == VECTOR_TYPE
14039 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14041 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14042 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14043 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14044 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14046 if (n != 0
14047 && (idx % width) == 0
14048 && (n % width) == 0
14049 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14051 idx = idx / width;
14052 n = n / width;
14053 if (TREE_CODE (type) == VECTOR_TYPE)
14055 if (TREE_CODE (arg0) == VECTOR_CST)
14057 tree *vals = XALLOCAVEC (tree, n);
14058 unsigned i;
14059 for (i = 0; i < n; ++i)
14060 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14061 return build_vector (type, vals);
14063 else
14065 VEC(constructor_elt, gc) *vals;
14066 unsigned i;
14067 if (CONSTRUCTOR_NELTS (arg0) == 0)
14068 return build_constructor (type, NULL);
14069 if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14070 0)->value))
14071 != VECTOR_TYPE)
14073 vals = VEC_alloc (constructor_elt, gc, n);
14074 for (i = 0;
14075 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14076 ++i)
14077 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14078 CONSTRUCTOR_ELT
14079 (arg0, idx + i)->value);
14080 return build_constructor (type, vals);
14084 else if (n == 1)
14086 if (TREE_CODE (arg0) == VECTOR_CST)
14087 return VECTOR_CST_ELT (arg0, idx);
14088 else if (CONSTRUCTOR_NELTS (arg0) == 0)
14089 return build_zero_cst (type);
14090 else if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14091 0)->value))
14092 != VECTOR_TYPE)
14094 if (idx < CONSTRUCTOR_NELTS (arg0))
14095 return CONSTRUCTOR_ELT (arg0, idx)->value;
14096 return build_zero_cst (type);
14102 /* A bit-field-ref that referenced the full argument can be stripped. */
14103 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14104 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14105 && integer_zerop (op2))
14106 return fold_convert_loc (loc, type, arg0);
14108 /* On constants we can use native encode/interpret to constant
14109 fold (nearly) all BIT_FIELD_REFs. */
14110 if (CONSTANT_CLASS_P (arg0)
14111 && can_native_interpret_type_p (type)
14112 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14113 /* This limitation should not be necessary, we just need to
14114 round this up to mode size. */
14115 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14116 /* Need bit-shifting of the buffer to relax the following. */
14117 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14119 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14120 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14121 unsigned HOST_WIDE_INT clen;
14122 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14123 /* ??? We cannot tell native_encode_expr to start at
14124 some random byte only. So limit us to a reasonable amount
14125 of work. */
14126 if (clen <= 4096)
14128 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14129 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14130 if (len > 0
14131 && len * BITS_PER_UNIT >= bitpos + bitsize)
14133 tree v = native_interpret_expr (type,
14134 b + bitpos / BITS_PER_UNIT,
14135 bitsize / BITS_PER_UNIT);
14136 if (v)
14137 return v;
14142 return NULL_TREE;
14144 case FMA_EXPR:
14145 /* For integers we can decompose the FMA if possible. */
14146 if (TREE_CODE (arg0) == INTEGER_CST
14147 && TREE_CODE (arg1) == INTEGER_CST)
14148 return fold_build2_loc (loc, PLUS_EXPR, type,
14149 const_binop (MULT_EXPR, arg0, arg1), arg2);
14150 if (integer_zerop (arg2))
14151 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14153 return fold_fma (loc, type, arg0, arg1, arg2);
14155 case VEC_PERM_EXPR:
14156 if (TREE_CODE (arg2) == VECTOR_CST)
14158 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14159 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14160 tree t;
14161 bool need_mask_canon = false;
14162 bool all_in_vec0 = true;
14163 bool all_in_vec1 = true;
14164 bool maybe_identity = true;
14165 bool single_arg = (op0 == op1);
14166 bool changed = false;
14168 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14169 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14170 for (i = 0; i < nelts; i++)
14172 tree val = VECTOR_CST_ELT (arg2, i);
14173 if (TREE_CODE (val) != INTEGER_CST)
14174 return NULL_TREE;
14176 sel[i] = TREE_INT_CST_LOW (val) & mask;
14177 if (TREE_INT_CST_HIGH (val)
14178 || ((unsigned HOST_WIDE_INT)
14179 TREE_INT_CST_LOW (val) != sel[i]))
14180 need_mask_canon = true;
14182 if (sel[i] < nelts)
14183 all_in_vec1 = false;
14184 else
14185 all_in_vec0 = false;
14187 if ((sel[i] & (nelts-1)) != i)
14188 maybe_identity = false;
14191 if (maybe_identity)
14193 if (all_in_vec0)
14194 return op0;
14195 if (all_in_vec1)
14196 return op1;
14199 if (all_in_vec0)
14200 op1 = op0;
14201 else if (all_in_vec1)
14203 op0 = op1;
14204 for (i = 0; i < nelts; i++)
14205 sel[i] -= nelts;
14206 need_mask_canon = true;
14209 if ((TREE_CODE (op0) == VECTOR_CST
14210 || TREE_CODE (op0) == CONSTRUCTOR)
14211 && (TREE_CODE (op1) == VECTOR_CST
14212 || TREE_CODE (op1) == CONSTRUCTOR))
14214 t = fold_vec_perm (type, op0, op1, sel);
14215 if (t != NULL_TREE)
14216 return t;
14219 if (op0 == op1 && !single_arg)
14220 changed = true;
14222 if (need_mask_canon && arg2 == op2)
14224 tree *tsel = XALLOCAVEC (tree, nelts);
14225 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14226 for (i = 0; i < nelts; i++)
14227 tsel[i] = build_int_cst (eltype, sel[i]);
14228 op2 = build_vector (TREE_TYPE (arg2), tsel);
14229 changed = true;
14232 if (changed)
14233 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14235 return NULL_TREE;
14237 default:
14238 return NULL_TREE;
14239 } /* switch (code) */
14242 /* Perform constant folding and related simplification of EXPR.
14243 The related simplifications include x*1 => x, x*0 => 0, etc.,
14244 and application of the associative law.
14245 NOP_EXPR conversions may be removed freely (as long as we
14246 are careful not to change the type of the overall expression).
14247 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14248 but we can constant-fold them if they have constant operands. */
14250 #ifdef ENABLE_FOLD_CHECKING
14251 # define fold(x) fold_1 (x)
14252 static tree fold_1 (tree);
14253 static
14254 #endif
14255 tree
14256 fold (tree expr)
14258 const tree t = expr;
14259 enum tree_code code = TREE_CODE (t);
14260 enum tree_code_class kind = TREE_CODE_CLASS (code);
14261 tree tem;
14262 location_t loc = EXPR_LOCATION (expr);
14264 /* Return right away if a constant. */
14265 if (kind == tcc_constant)
14266 return t;
14268 /* CALL_EXPR-like objects with variable numbers of operands are
14269 treated specially. */
14270 if (kind == tcc_vl_exp)
14272 if (code == CALL_EXPR)
14274 tem = fold_call_expr (loc, expr, false);
14275 return tem ? tem : expr;
14277 return expr;
14280 if (IS_EXPR_CODE_CLASS (kind))
14282 tree type = TREE_TYPE (t);
14283 tree op0, op1, op2;
14285 switch (TREE_CODE_LENGTH (code))
14287 case 1:
14288 op0 = TREE_OPERAND (t, 0);
14289 tem = fold_unary_loc (loc, code, type, op0);
14290 return tem ? tem : expr;
14291 case 2:
14292 op0 = TREE_OPERAND (t, 0);
14293 op1 = TREE_OPERAND (t, 1);
14294 tem = fold_binary_loc (loc, code, type, op0, op1);
14295 return tem ? tem : expr;
14296 case 3:
14297 op0 = TREE_OPERAND (t, 0);
14298 op1 = TREE_OPERAND (t, 1);
14299 op2 = TREE_OPERAND (t, 2);
14300 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14301 return tem ? tem : expr;
14302 default:
14303 break;
14307 switch (code)
14309 case ARRAY_REF:
14311 tree op0 = TREE_OPERAND (t, 0);
14312 tree op1 = TREE_OPERAND (t, 1);
14314 if (TREE_CODE (op1) == INTEGER_CST
14315 && TREE_CODE (op0) == CONSTRUCTOR
14316 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14318 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14319 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14320 unsigned HOST_WIDE_INT begin = 0;
14322 /* Find a matching index by means of a binary search. */
14323 while (begin != end)
14325 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14326 tree index = VEC_index (constructor_elt, elts, middle).index;
14328 if (TREE_CODE (index) == INTEGER_CST
14329 && tree_int_cst_lt (index, op1))
14330 begin = middle + 1;
14331 else if (TREE_CODE (index) == INTEGER_CST
14332 && tree_int_cst_lt (op1, index))
14333 end = middle;
14334 else if (TREE_CODE (index) == RANGE_EXPR
14335 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14336 begin = middle + 1;
14337 else if (TREE_CODE (index) == RANGE_EXPR
14338 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14339 end = middle;
14340 else
14341 return VEC_index (constructor_elt, elts, middle).value;
14345 return t;
14348 case CONST_DECL:
14349 return fold (DECL_INITIAL (t));
14351 default:
14352 return t;
14353 } /* switch (code) */
14356 #ifdef ENABLE_FOLD_CHECKING
14357 #undef fold
14359 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14360 hash_table <pointer_hash <tree_node> >);
14361 static void fold_check_failed (const_tree, const_tree);
14362 void print_fold_checksum (const_tree);
14364 /* When --enable-checking=fold, compute a digest of expr before
14365 and after actual fold call to see if fold did not accidentally
14366 change original expr. */
14368 tree
14369 fold (tree expr)
14371 tree ret;
14372 struct md5_ctx ctx;
14373 unsigned char checksum_before[16], checksum_after[16];
14374 hash_table <pointer_hash <tree_node> > ht;
14376 ht.create (32);
14377 md5_init_ctx (&ctx);
14378 fold_checksum_tree (expr, &ctx, ht);
14379 md5_finish_ctx (&ctx, checksum_before);
14380 ht.empty ();
14382 ret = fold_1 (expr);
14384 md5_init_ctx (&ctx);
14385 fold_checksum_tree (expr, &ctx, ht);
14386 md5_finish_ctx (&ctx, checksum_after);
14387 ht.dispose ();
14389 if (memcmp (checksum_before, checksum_after, 16))
14390 fold_check_failed (expr, ret);
14392 return ret;
14395 void
14396 print_fold_checksum (const_tree expr)
14398 struct md5_ctx ctx;
14399 unsigned char checksum[16], cnt;
14400 hash_table <pointer_hash <tree_node> > ht;
14402 ht.create (32);
14403 md5_init_ctx (&ctx);
14404 fold_checksum_tree (expr, &ctx, ht);
14405 md5_finish_ctx (&ctx, checksum);
14406 ht.dispose ();
14407 for (cnt = 0; cnt < 16; ++cnt)
14408 fprintf (stderr, "%02x", checksum[cnt]);
14409 putc ('\n', stderr);
14412 static void
14413 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14415 internal_error ("fold check: original tree changed by fold");
14418 static void
14419 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14420 hash_table <pointer_hash <tree_node> > ht)
14422 tree_node **slot;
14423 enum tree_code code;
14424 union tree_node buf;
14425 int i, len;
14427 recursive_label:
14428 if (expr == NULL)
14429 return;
14430 slot = ht.find_slot (expr, INSERT);
14431 if (*slot != NULL)
14432 return;
14433 *slot = CONST_CAST_TREE (expr);
14434 code = TREE_CODE (expr);
14435 if (TREE_CODE_CLASS (code) == tcc_declaration
14436 && DECL_ASSEMBLER_NAME_SET_P (expr))
14438 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14439 memcpy ((char *) &buf, expr, tree_size (expr));
14440 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14441 expr = (tree) &buf;
14443 else if (TREE_CODE_CLASS (code) == tcc_type
14444 && (TYPE_POINTER_TO (expr)
14445 || TYPE_REFERENCE_TO (expr)
14446 || TYPE_CACHED_VALUES_P (expr)
14447 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14448 || TYPE_NEXT_VARIANT (expr)))
14450 /* Allow these fields to be modified. */
14451 tree tmp;
14452 memcpy ((char *) &buf, expr, tree_size (expr));
14453 expr = tmp = (tree) &buf;
14454 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14455 TYPE_POINTER_TO (tmp) = NULL;
14456 TYPE_REFERENCE_TO (tmp) = NULL;
14457 TYPE_NEXT_VARIANT (tmp) = NULL;
14458 if (TYPE_CACHED_VALUES_P (tmp))
14460 TYPE_CACHED_VALUES_P (tmp) = 0;
14461 TYPE_CACHED_VALUES (tmp) = NULL;
14464 md5_process_bytes (expr, tree_size (expr), ctx);
14465 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14466 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14467 if (TREE_CODE_CLASS (code) != tcc_type
14468 && TREE_CODE_CLASS (code) != tcc_declaration
14469 && code != TREE_LIST
14470 && code != SSA_NAME
14471 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14472 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14473 switch (TREE_CODE_CLASS (code))
14475 case tcc_constant:
14476 switch (code)
14478 case STRING_CST:
14479 md5_process_bytes (TREE_STRING_POINTER (expr),
14480 TREE_STRING_LENGTH (expr), ctx);
14481 break;
14482 case COMPLEX_CST:
14483 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14484 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14485 break;
14486 case VECTOR_CST:
14487 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14488 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14489 break;
14490 default:
14491 break;
14493 break;
14494 case tcc_exceptional:
14495 switch (code)
14497 case TREE_LIST:
14498 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14499 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14500 expr = TREE_CHAIN (expr);
14501 goto recursive_label;
14502 break;
14503 case TREE_VEC:
14504 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14505 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14506 break;
14507 default:
14508 break;
14510 break;
14511 case tcc_expression:
14512 case tcc_reference:
14513 case tcc_comparison:
14514 case tcc_unary:
14515 case tcc_binary:
14516 case tcc_statement:
14517 case tcc_vl_exp:
14518 len = TREE_OPERAND_LENGTH (expr);
14519 for (i = 0; i < len; ++i)
14520 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14521 break;
14522 case tcc_declaration:
14523 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14524 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14525 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14527 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14528 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14529 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14530 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14531 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14533 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14534 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14536 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14538 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14539 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14540 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14542 break;
14543 case tcc_type:
14544 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14545 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14546 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14547 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14548 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14549 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14550 if (INTEGRAL_TYPE_P (expr)
14551 || SCALAR_FLOAT_TYPE_P (expr))
14553 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14554 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14556 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14557 if (TREE_CODE (expr) == RECORD_TYPE
14558 || TREE_CODE (expr) == UNION_TYPE
14559 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14560 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14561 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14562 break;
14563 default:
14564 break;
14568 /* Helper function for outputting the checksum of a tree T. When
14569 debugging with gdb, you can "define mynext" to be "next" followed
14570 by "call debug_fold_checksum (op0)", then just trace down till the
14571 outputs differ. */
14573 DEBUG_FUNCTION void
14574 debug_fold_checksum (const_tree t)
14576 int i;
14577 unsigned char checksum[16];
14578 struct md5_ctx ctx;
14579 hash_table <pointer_hash <tree_node> > ht;
14580 ht.create (32);
14582 md5_init_ctx (&ctx);
14583 fold_checksum_tree (t, &ctx, ht);
14584 md5_finish_ctx (&ctx, checksum);
14585 ht.empty ();
14587 for (i = 0; i < 16; i++)
14588 fprintf (stderr, "%d ", checksum[i]);
14590 fprintf (stderr, "\n");
14593 #endif
14595 /* Fold a unary tree expression with code CODE of type TYPE with an
14596 operand OP0. LOC is the location of the resulting expression.
14597 Return a folded expression if successful. Otherwise, return a tree
14598 expression with code CODE of type TYPE with an operand OP0. */
14600 tree
14601 fold_build1_stat_loc (location_t loc,
14602 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14604 tree tem;
14605 #ifdef ENABLE_FOLD_CHECKING
14606 unsigned char checksum_before[16], checksum_after[16];
14607 struct md5_ctx ctx;
14608 hash_table <pointer_hash <tree_node> > ht;
14610 ht.create (32);
14611 md5_init_ctx (&ctx);
14612 fold_checksum_tree (op0, &ctx, ht);
14613 md5_finish_ctx (&ctx, checksum_before);
14614 ht.empty ();
14615 #endif
14617 tem = fold_unary_loc (loc, code, type, op0);
14618 if (!tem)
14619 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14621 #ifdef ENABLE_FOLD_CHECKING
14622 md5_init_ctx (&ctx);
14623 fold_checksum_tree (op0, &ctx, ht);
14624 md5_finish_ctx (&ctx, checksum_after);
14625 ht.dispose ();
14627 if (memcmp (checksum_before, checksum_after, 16))
14628 fold_check_failed (op0, tem);
14629 #endif
14630 return tem;
14633 /* Fold a binary tree expression with code CODE of type TYPE with
14634 operands OP0 and OP1. LOC is the location of the resulting
14635 expression. Return a folded expression if successful. Otherwise,
14636 return a tree expression with code CODE of type TYPE with operands
14637 OP0 and OP1. */
14639 tree
14640 fold_build2_stat_loc (location_t loc,
14641 enum tree_code code, tree type, tree op0, tree op1
14642 MEM_STAT_DECL)
14644 tree tem;
14645 #ifdef ENABLE_FOLD_CHECKING
14646 unsigned char checksum_before_op0[16],
14647 checksum_before_op1[16],
14648 checksum_after_op0[16],
14649 checksum_after_op1[16];
14650 struct md5_ctx ctx;
14651 hash_table <pointer_hash <tree_node> > ht;
14653 ht.create (32);
14654 md5_init_ctx (&ctx);
14655 fold_checksum_tree (op0, &ctx, ht);
14656 md5_finish_ctx (&ctx, checksum_before_op0);
14657 ht.empty ();
14659 md5_init_ctx (&ctx);
14660 fold_checksum_tree (op1, &ctx, ht);
14661 md5_finish_ctx (&ctx, checksum_before_op1);
14662 ht.empty ();
14663 #endif
14665 tem = fold_binary_loc (loc, code, type, op0, op1);
14666 if (!tem)
14667 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14669 #ifdef ENABLE_FOLD_CHECKING
14670 md5_init_ctx (&ctx);
14671 fold_checksum_tree (op0, &ctx, ht);
14672 md5_finish_ctx (&ctx, checksum_after_op0);
14673 ht.empty ();
14675 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14676 fold_check_failed (op0, tem);
14678 md5_init_ctx (&ctx);
14679 fold_checksum_tree (op1, &ctx, ht);
14680 md5_finish_ctx (&ctx, checksum_after_op1);
14681 ht.dispose ();
14683 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14684 fold_check_failed (op1, tem);
14685 #endif
14686 return tem;
14689 /* Fold a ternary tree expression with code CODE of type TYPE with
14690 operands OP0, OP1, and OP2. Return a folded expression if
14691 successful. Otherwise, return a tree expression with code CODE of
14692 type TYPE with operands OP0, OP1, and OP2. */
14694 tree
14695 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14696 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14698 tree tem;
14699 #ifdef ENABLE_FOLD_CHECKING
14700 unsigned char checksum_before_op0[16],
14701 checksum_before_op1[16],
14702 checksum_before_op2[16],
14703 checksum_after_op0[16],
14704 checksum_after_op1[16],
14705 checksum_after_op2[16];
14706 struct md5_ctx ctx;
14707 hash_table <pointer_hash <tree_node> > ht;
14709 ht.create (32);
14710 md5_init_ctx (&ctx);
14711 fold_checksum_tree (op0, &ctx, ht);
14712 md5_finish_ctx (&ctx, checksum_before_op0);
14713 ht.empty ();
14715 md5_init_ctx (&ctx);
14716 fold_checksum_tree (op1, &ctx, ht);
14717 md5_finish_ctx (&ctx, checksum_before_op1);
14718 ht.empty ();
14720 md5_init_ctx (&ctx);
14721 fold_checksum_tree (op2, &ctx, ht);
14722 md5_finish_ctx (&ctx, checksum_before_op2);
14723 ht.empty ();
14724 #endif
14726 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14727 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14728 if (!tem)
14729 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14731 #ifdef ENABLE_FOLD_CHECKING
14732 md5_init_ctx (&ctx);
14733 fold_checksum_tree (op0, &ctx, ht);
14734 md5_finish_ctx (&ctx, checksum_after_op0);
14735 ht.empty ();
14737 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14738 fold_check_failed (op0, tem);
14740 md5_init_ctx (&ctx);
14741 fold_checksum_tree (op1, &ctx, ht);
14742 md5_finish_ctx (&ctx, checksum_after_op1);
14743 ht.empty ();
14745 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14746 fold_check_failed (op1, tem);
14748 md5_init_ctx (&ctx);
14749 fold_checksum_tree (op2, &ctx, ht);
14750 md5_finish_ctx (&ctx, checksum_after_op2);
14751 ht.dispose ();
14753 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14754 fold_check_failed (op2, tem);
14755 #endif
14756 return tem;
14759 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14760 arguments in ARGARRAY, and a null static chain.
14761 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14762 of type TYPE from the given operands as constructed by build_call_array. */
14764 tree
14765 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14766 int nargs, tree *argarray)
14768 tree tem;
14769 #ifdef ENABLE_FOLD_CHECKING
14770 unsigned char checksum_before_fn[16],
14771 checksum_before_arglist[16],
14772 checksum_after_fn[16],
14773 checksum_after_arglist[16];
14774 struct md5_ctx ctx;
14775 hash_table <pointer_hash <tree_node> > ht;
14776 int i;
14778 ht.create (32);
14779 md5_init_ctx (&ctx);
14780 fold_checksum_tree (fn, &ctx, ht);
14781 md5_finish_ctx (&ctx, checksum_before_fn);
14782 ht.empty ();
14784 md5_init_ctx (&ctx);
14785 for (i = 0; i < nargs; i++)
14786 fold_checksum_tree (argarray[i], &ctx, ht);
14787 md5_finish_ctx (&ctx, checksum_before_arglist);
14788 ht.empty ();
14789 #endif
14791 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14793 #ifdef ENABLE_FOLD_CHECKING
14794 md5_init_ctx (&ctx);
14795 fold_checksum_tree (fn, &ctx, ht);
14796 md5_finish_ctx (&ctx, checksum_after_fn);
14797 ht.empty ();
14799 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14800 fold_check_failed (fn, tem);
14802 md5_init_ctx (&ctx);
14803 for (i = 0; i < nargs; i++)
14804 fold_checksum_tree (argarray[i], &ctx, ht);
14805 md5_finish_ctx (&ctx, checksum_after_arglist);
14806 ht.dispose ();
14808 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14809 fold_check_failed (NULL_TREE, tem);
14810 #endif
14811 return tem;
14814 /* Perform constant folding and related simplification of initializer
14815 expression EXPR. These behave identically to "fold_buildN" but ignore
14816 potential run-time traps and exceptions that fold must preserve. */
14818 #define START_FOLD_INIT \
14819 int saved_signaling_nans = flag_signaling_nans;\
14820 int saved_trapping_math = flag_trapping_math;\
14821 int saved_rounding_math = flag_rounding_math;\
14822 int saved_trapv = flag_trapv;\
14823 int saved_folding_initializer = folding_initializer;\
14824 flag_signaling_nans = 0;\
14825 flag_trapping_math = 0;\
14826 flag_rounding_math = 0;\
14827 flag_trapv = 0;\
14828 folding_initializer = 1;
14830 #define END_FOLD_INIT \
14831 flag_signaling_nans = saved_signaling_nans;\
14832 flag_trapping_math = saved_trapping_math;\
14833 flag_rounding_math = saved_rounding_math;\
14834 flag_trapv = saved_trapv;\
14835 folding_initializer = saved_folding_initializer;
14837 tree
14838 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14839 tree type, tree op)
14841 tree result;
14842 START_FOLD_INIT;
14844 result = fold_build1_loc (loc, code, type, op);
14846 END_FOLD_INIT;
14847 return result;
14850 tree
14851 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14852 tree type, tree op0, tree op1)
14854 tree result;
14855 START_FOLD_INIT;
14857 result = fold_build2_loc (loc, code, type, op0, op1);
14859 END_FOLD_INIT;
14860 return result;
14863 tree
14864 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14865 tree type, tree op0, tree op1, tree op2)
14867 tree result;
14868 START_FOLD_INIT;
14870 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14872 END_FOLD_INIT;
14873 return result;
14876 tree
14877 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14878 int nargs, tree *argarray)
14880 tree result;
14881 START_FOLD_INIT;
14883 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14885 END_FOLD_INIT;
14886 return result;
14889 #undef START_FOLD_INIT
14890 #undef END_FOLD_INIT
14892 /* Determine if first argument is a multiple of second argument. Return 0 if
14893 it is not, or we cannot easily determined it to be.
14895 An example of the sort of thing we care about (at this point; this routine
14896 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14897 fold cases do now) is discovering that
14899 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14901 is a multiple of
14903 SAVE_EXPR (J * 8)
14905 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14907 This code also handles discovering that
14909 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14911 is a multiple of 8 so we don't have to worry about dealing with a
14912 possible remainder.
14914 Note that we *look* inside a SAVE_EXPR only to determine how it was
14915 calculated; it is not safe for fold to do much of anything else with the
14916 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14917 at run time. For example, the latter example above *cannot* be implemented
14918 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14919 evaluation time of the original SAVE_EXPR is not necessarily the same at
14920 the time the new expression is evaluated. The only optimization of this
14921 sort that would be valid is changing
14923 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14925 divided by 8 to
14927 SAVE_EXPR (I) * SAVE_EXPR (J)
14929 (where the same SAVE_EXPR (J) is used in the original and the
14930 transformed version). */
14933 multiple_of_p (tree type, const_tree top, const_tree bottom)
14935 if (operand_equal_p (top, bottom, 0))
14936 return 1;
14938 if (TREE_CODE (type) != INTEGER_TYPE)
14939 return 0;
14941 switch (TREE_CODE (top))
14943 case BIT_AND_EXPR:
14944 /* Bitwise and provides a power of two multiple. If the mask is
14945 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14946 if (!integer_pow2p (bottom))
14947 return 0;
14948 /* FALLTHRU */
14950 case MULT_EXPR:
14951 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14952 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14954 case PLUS_EXPR:
14955 case MINUS_EXPR:
14956 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14957 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14959 case LSHIFT_EXPR:
14960 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14962 tree op1, t1;
14964 op1 = TREE_OPERAND (top, 1);
14965 /* const_binop may not detect overflow correctly,
14966 so check for it explicitly here. */
14967 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14968 > TREE_INT_CST_LOW (op1)
14969 && TREE_INT_CST_HIGH (op1) == 0
14970 && 0 != (t1 = fold_convert (type,
14971 const_binop (LSHIFT_EXPR,
14972 size_one_node,
14973 op1)))
14974 && !TREE_OVERFLOW (t1))
14975 return multiple_of_p (type, t1, bottom);
14977 return 0;
14979 case NOP_EXPR:
14980 /* Can't handle conversions from non-integral or wider integral type. */
14981 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14982 || (TYPE_PRECISION (type)
14983 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14984 return 0;
14986 /* .. fall through ... */
14988 case SAVE_EXPR:
14989 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14991 case COND_EXPR:
14992 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14993 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14995 case INTEGER_CST:
14996 if (TREE_CODE (bottom) != INTEGER_CST
14997 || integer_zerop (bottom)
14998 || (TYPE_UNSIGNED (type)
14999 && (tree_int_cst_sgn (top) < 0
15000 || tree_int_cst_sgn (bottom) < 0)))
15001 return 0;
15002 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15003 top, bottom));
15005 default:
15006 return 0;
15010 /* Return true if CODE or TYPE is known to be non-negative. */
15012 static bool
15013 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15015 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15016 && truth_value_p (code))
15017 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15018 have a signed:1 type (where the value is -1 and 0). */
15019 return true;
15020 return false;
15023 /* Return true if (CODE OP0) is known to be non-negative. If the return
15024 value is based on the assumption that signed overflow is undefined,
15025 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15026 *STRICT_OVERFLOW_P. */
15028 bool
15029 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15030 bool *strict_overflow_p)
15032 if (TYPE_UNSIGNED (type))
15033 return true;
15035 switch (code)
15037 case ABS_EXPR:
15038 /* We can't return 1 if flag_wrapv is set because
15039 ABS_EXPR<INT_MIN> = INT_MIN. */
15040 if (!INTEGRAL_TYPE_P (type))
15041 return true;
15042 if (TYPE_OVERFLOW_UNDEFINED (type))
15044 *strict_overflow_p = true;
15045 return true;
15047 break;
15049 case NON_LVALUE_EXPR:
15050 case FLOAT_EXPR:
15051 case FIX_TRUNC_EXPR:
15052 return tree_expr_nonnegative_warnv_p (op0,
15053 strict_overflow_p);
15055 case NOP_EXPR:
15057 tree inner_type = TREE_TYPE (op0);
15058 tree outer_type = type;
15060 if (TREE_CODE (outer_type) == REAL_TYPE)
15062 if (TREE_CODE (inner_type) == REAL_TYPE)
15063 return tree_expr_nonnegative_warnv_p (op0,
15064 strict_overflow_p);
15065 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15067 if (TYPE_UNSIGNED (inner_type))
15068 return true;
15069 return tree_expr_nonnegative_warnv_p (op0,
15070 strict_overflow_p);
15073 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15075 if (TREE_CODE (inner_type) == REAL_TYPE)
15076 return tree_expr_nonnegative_warnv_p (op0,
15077 strict_overflow_p);
15078 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15079 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15080 && TYPE_UNSIGNED (inner_type);
15083 break;
15085 default:
15086 return tree_simple_nonnegative_warnv_p (code, type);
15089 /* We don't know sign of `t', so be conservative and return false. */
15090 return false;
15093 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15094 value is based on the assumption that signed overflow is undefined,
15095 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15096 *STRICT_OVERFLOW_P. */
15098 bool
15099 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15100 tree op1, bool *strict_overflow_p)
15102 if (TYPE_UNSIGNED (type))
15103 return true;
15105 switch (code)
15107 case POINTER_PLUS_EXPR:
15108 case PLUS_EXPR:
15109 if (FLOAT_TYPE_P (type))
15110 return (tree_expr_nonnegative_warnv_p (op0,
15111 strict_overflow_p)
15112 && tree_expr_nonnegative_warnv_p (op1,
15113 strict_overflow_p));
15115 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15116 both unsigned and at least 2 bits shorter than the result. */
15117 if (TREE_CODE (type) == INTEGER_TYPE
15118 && TREE_CODE (op0) == NOP_EXPR
15119 && TREE_CODE (op1) == NOP_EXPR)
15121 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15122 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15123 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15124 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15126 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15127 TYPE_PRECISION (inner2)) + 1;
15128 return prec < TYPE_PRECISION (type);
15131 break;
15133 case MULT_EXPR:
15134 if (FLOAT_TYPE_P (type))
15136 /* x * x for floating point x is always non-negative. */
15137 if (operand_equal_p (op0, op1, 0))
15138 return true;
15139 return (tree_expr_nonnegative_warnv_p (op0,
15140 strict_overflow_p)
15141 && tree_expr_nonnegative_warnv_p (op1,
15142 strict_overflow_p));
15145 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15146 both unsigned and their total bits is shorter than the result. */
15147 if (TREE_CODE (type) == INTEGER_TYPE
15148 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15149 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15151 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15152 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15153 : TREE_TYPE (op0);
15154 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15155 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15156 : TREE_TYPE (op1);
15158 bool unsigned0 = TYPE_UNSIGNED (inner0);
15159 bool unsigned1 = TYPE_UNSIGNED (inner1);
15161 if (TREE_CODE (op0) == INTEGER_CST)
15162 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15164 if (TREE_CODE (op1) == INTEGER_CST)
15165 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15167 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15168 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15170 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15171 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15172 : TYPE_PRECISION (inner0);
15174 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15175 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15176 : TYPE_PRECISION (inner1);
15178 return precision0 + precision1 < TYPE_PRECISION (type);
15181 return false;
15183 case BIT_AND_EXPR:
15184 case MAX_EXPR:
15185 return (tree_expr_nonnegative_warnv_p (op0,
15186 strict_overflow_p)
15187 || tree_expr_nonnegative_warnv_p (op1,
15188 strict_overflow_p));
15190 case BIT_IOR_EXPR:
15191 case BIT_XOR_EXPR:
15192 case MIN_EXPR:
15193 case RDIV_EXPR:
15194 case TRUNC_DIV_EXPR:
15195 case CEIL_DIV_EXPR:
15196 case FLOOR_DIV_EXPR:
15197 case ROUND_DIV_EXPR:
15198 return (tree_expr_nonnegative_warnv_p (op0,
15199 strict_overflow_p)
15200 && tree_expr_nonnegative_warnv_p (op1,
15201 strict_overflow_p));
15203 case TRUNC_MOD_EXPR:
15204 case CEIL_MOD_EXPR:
15205 case FLOOR_MOD_EXPR:
15206 case ROUND_MOD_EXPR:
15207 return tree_expr_nonnegative_warnv_p (op0,
15208 strict_overflow_p);
15209 default:
15210 return tree_simple_nonnegative_warnv_p (code, type);
15213 /* We don't know sign of `t', so be conservative and return false. */
15214 return false;
15217 /* Return true if T is known to be non-negative. If the return
15218 value is based on the assumption that signed overflow is undefined,
15219 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15220 *STRICT_OVERFLOW_P. */
15222 bool
15223 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15225 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15226 return true;
15228 switch (TREE_CODE (t))
15230 case INTEGER_CST:
15231 return tree_int_cst_sgn (t) >= 0;
15233 case REAL_CST:
15234 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15236 case FIXED_CST:
15237 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15239 case COND_EXPR:
15240 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15241 strict_overflow_p)
15242 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15243 strict_overflow_p));
15244 default:
15245 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15246 TREE_TYPE (t));
15248 /* We don't know sign of `t', so be conservative and return false. */
15249 return false;
15252 /* Return true if T is known to be non-negative. If the return
15253 value is based on the assumption that signed overflow is undefined,
15254 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15255 *STRICT_OVERFLOW_P. */
15257 bool
15258 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15259 tree arg0, tree arg1, bool *strict_overflow_p)
15261 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15262 switch (DECL_FUNCTION_CODE (fndecl))
15264 CASE_FLT_FN (BUILT_IN_ACOS):
15265 CASE_FLT_FN (BUILT_IN_ACOSH):
15266 CASE_FLT_FN (BUILT_IN_CABS):
15267 CASE_FLT_FN (BUILT_IN_COSH):
15268 CASE_FLT_FN (BUILT_IN_ERFC):
15269 CASE_FLT_FN (BUILT_IN_EXP):
15270 CASE_FLT_FN (BUILT_IN_EXP10):
15271 CASE_FLT_FN (BUILT_IN_EXP2):
15272 CASE_FLT_FN (BUILT_IN_FABS):
15273 CASE_FLT_FN (BUILT_IN_FDIM):
15274 CASE_FLT_FN (BUILT_IN_HYPOT):
15275 CASE_FLT_FN (BUILT_IN_POW10):
15276 CASE_INT_FN (BUILT_IN_FFS):
15277 CASE_INT_FN (BUILT_IN_PARITY):
15278 CASE_INT_FN (BUILT_IN_POPCOUNT):
15279 case BUILT_IN_BSWAP32:
15280 case BUILT_IN_BSWAP64:
15281 /* Always true. */
15282 return true;
15284 CASE_FLT_FN (BUILT_IN_SQRT):
15285 /* sqrt(-0.0) is -0.0. */
15286 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15287 return true;
15288 return tree_expr_nonnegative_warnv_p (arg0,
15289 strict_overflow_p);
15291 CASE_FLT_FN (BUILT_IN_ASINH):
15292 CASE_FLT_FN (BUILT_IN_ATAN):
15293 CASE_FLT_FN (BUILT_IN_ATANH):
15294 CASE_FLT_FN (BUILT_IN_CBRT):
15295 CASE_FLT_FN (BUILT_IN_CEIL):
15296 CASE_FLT_FN (BUILT_IN_ERF):
15297 CASE_FLT_FN (BUILT_IN_EXPM1):
15298 CASE_FLT_FN (BUILT_IN_FLOOR):
15299 CASE_FLT_FN (BUILT_IN_FMOD):
15300 CASE_FLT_FN (BUILT_IN_FREXP):
15301 CASE_FLT_FN (BUILT_IN_ICEIL):
15302 CASE_FLT_FN (BUILT_IN_IFLOOR):
15303 CASE_FLT_FN (BUILT_IN_IRINT):
15304 CASE_FLT_FN (BUILT_IN_IROUND):
15305 CASE_FLT_FN (BUILT_IN_LCEIL):
15306 CASE_FLT_FN (BUILT_IN_LDEXP):
15307 CASE_FLT_FN (BUILT_IN_LFLOOR):
15308 CASE_FLT_FN (BUILT_IN_LLCEIL):
15309 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15310 CASE_FLT_FN (BUILT_IN_LLRINT):
15311 CASE_FLT_FN (BUILT_IN_LLROUND):
15312 CASE_FLT_FN (BUILT_IN_LRINT):
15313 CASE_FLT_FN (BUILT_IN_LROUND):
15314 CASE_FLT_FN (BUILT_IN_MODF):
15315 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15316 CASE_FLT_FN (BUILT_IN_RINT):
15317 CASE_FLT_FN (BUILT_IN_ROUND):
15318 CASE_FLT_FN (BUILT_IN_SCALB):
15319 CASE_FLT_FN (BUILT_IN_SCALBLN):
15320 CASE_FLT_FN (BUILT_IN_SCALBN):
15321 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15322 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15323 CASE_FLT_FN (BUILT_IN_SINH):
15324 CASE_FLT_FN (BUILT_IN_TANH):
15325 CASE_FLT_FN (BUILT_IN_TRUNC):
15326 /* True if the 1st argument is nonnegative. */
15327 return tree_expr_nonnegative_warnv_p (arg0,
15328 strict_overflow_p);
15330 CASE_FLT_FN (BUILT_IN_FMAX):
15331 /* True if the 1st OR 2nd arguments are nonnegative. */
15332 return (tree_expr_nonnegative_warnv_p (arg0,
15333 strict_overflow_p)
15334 || (tree_expr_nonnegative_warnv_p (arg1,
15335 strict_overflow_p)));
15337 CASE_FLT_FN (BUILT_IN_FMIN):
15338 /* True if the 1st AND 2nd arguments are nonnegative. */
15339 return (tree_expr_nonnegative_warnv_p (arg0,
15340 strict_overflow_p)
15341 && (tree_expr_nonnegative_warnv_p (arg1,
15342 strict_overflow_p)));
15344 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15345 /* True if the 2nd argument is nonnegative. */
15346 return tree_expr_nonnegative_warnv_p (arg1,
15347 strict_overflow_p);
15349 CASE_FLT_FN (BUILT_IN_POWI):
15350 /* True if the 1st argument is nonnegative or the second
15351 argument is an even integer. */
15352 if (TREE_CODE (arg1) == INTEGER_CST
15353 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15354 return true;
15355 return tree_expr_nonnegative_warnv_p (arg0,
15356 strict_overflow_p);
15358 CASE_FLT_FN (BUILT_IN_POW):
15359 /* True if the 1st argument is nonnegative or the second
15360 argument is an even integer valued real. */
15361 if (TREE_CODE (arg1) == REAL_CST)
15363 REAL_VALUE_TYPE c;
15364 HOST_WIDE_INT n;
15366 c = TREE_REAL_CST (arg1);
15367 n = real_to_integer (&c);
15368 if ((n & 1) == 0)
15370 REAL_VALUE_TYPE cint;
15371 real_from_integer (&cint, VOIDmode, n,
15372 n < 0 ? -1 : 0, 0);
15373 if (real_identical (&c, &cint))
15374 return true;
15377 return tree_expr_nonnegative_warnv_p (arg0,
15378 strict_overflow_p);
15380 default:
15381 break;
15383 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15384 type);
15387 /* Return true if T is known to be non-negative. If the return
15388 value is based on the assumption that signed overflow is undefined,
15389 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15390 *STRICT_OVERFLOW_P. */
15392 bool
15393 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15395 enum tree_code code = TREE_CODE (t);
15396 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15397 return true;
15399 switch (code)
15401 case TARGET_EXPR:
15403 tree temp = TARGET_EXPR_SLOT (t);
15404 t = TARGET_EXPR_INITIAL (t);
15406 /* If the initializer is non-void, then it's a normal expression
15407 that will be assigned to the slot. */
15408 if (!VOID_TYPE_P (t))
15409 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15411 /* Otherwise, the initializer sets the slot in some way. One common
15412 way is an assignment statement at the end of the initializer. */
15413 while (1)
15415 if (TREE_CODE (t) == BIND_EXPR)
15416 t = expr_last (BIND_EXPR_BODY (t));
15417 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15418 || TREE_CODE (t) == TRY_CATCH_EXPR)
15419 t = expr_last (TREE_OPERAND (t, 0));
15420 else if (TREE_CODE (t) == STATEMENT_LIST)
15421 t = expr_last (t);
15422 else
15423 break;
15425 if (TREE_CODE (t) == MODIFY_EXPR
15426 && TREE_OPERAND (t, 0) == temp)
15427 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15428 strict_overflow_p);
15430 return false;
15433 case CALL_EXPR:
15435 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15436 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15438 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15439 get_callee_fndecl (t),
15440 arg0,
15441 arg1,
15442 strict_overflow_p);
15444 case COMPOUND_EXPR:
15445 case MODIFY_EXPR:
15446 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15447 strict_overflow_p);
15448 case BIND_EXPR:
15449 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15450 strict_overflow_p);
15451 case SAVE_EXPR:
15452 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15453 strict_overflow_p);
15455 default:
15456 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15457 TREE_TYPE (t));
15460 /* We don't know sign of `t', so be conservative and return false. */
15461 return false;
15464 /* Return true if T is known to be non-negative. If the return
15465 value is based on the assumption that signed overflow is undefined,
15466 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15467 *STRICT_OVERFLOW_P. */
15469 bool
15470 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15472 enum tree_code code;
15473 if (t == error_mark_node)
15474 return false;
15476 code = TREE_CODE (t);
15477 switch (TREE_CODE_CLASS (code))
15479 case tcc_binary:
15480 case tcc_comparison:
15481 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15482 TREE_TYPE (t),
15483 TREE_OPERAND (t, 0),
15484 TREE_OPERAND (t, 1),
15485 strict_overflow_p);
15487 case tcc_unary:
15488 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15489 TREE_TYPE (t),
15490 TREE_OPERAND (t, 0),
15491 strict_overflow_p);
15493 case tcc_constant:
15494 case tcc_declaration:
15495 case tcc_reference:
15496 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15498 default:
15499 break;
15502 switch (code)
15504 case TRUTH_AND_EXPR:
15505 case TRUTH_OR_EXPR:
15506 case TRUTH_XOR_EXPR:
15507 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15508 TREE_TYPE (t),
15509 TREE_OPERAND (t, 0),
15510 TREE_OPERAND (t, 1),
15511 strict_overflow_p);
15512 case TRUTH_NOT_EXPR:
15513 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15514 TREE_TYPE (t),
15515 TREE_OPERAND (t, 0),
15516 strict_overflow_p);
15518 case COND_EXPR:
15519 case CONSTRUCTOR:
15520 case OBJ_TYPE_REF:
15521 case ASSERT_EXPR:
15522 case ADDR_EXPR:
15523 case WITH_SIZE_EXPR:
15524 case SSA_NAME:
15525 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15527 default:
15528 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15532 /* Return true if `t' is known to be non-negative. Handle warnings
15533 about undefined signed overflow. */
15535 bool
15536 tree_expr_nonnegative_p (tree t)
15538 bool ret, strict_overflow_p;
15540 strict_overflow_p = false;
15541 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15542 if (strict_overflow_p)
15543 fold_overflow_warning (("assuming signed overflow does not occur when "
15544 "determining that expression is always "
15545 "non-negative"),
15546 WARN_STRICT_OVERFLOW_MISC);
15547 return ret;
15551 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15552 For floating point we further ensure that T is not denormal.
15553 Similar logic is present in nonzero_address in rtlanal.h.
15555 If the return value is based on the assumption that signed overflow
15556 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15557 change *STRICT_OVERFLOW_P. */
15559 bool
15560 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15561 bool *strict_overflow_p)
15563 switch (code)
15565 case ABS_EXPR:
15566 return tree_expr_nonzero_warnv_p (op0,
15567 strict_overflow_p);
15569 case NOP_EXPR:
15571 tree inner_type = TREE_TYPE (op0);
15572 tree outer_type = type;
15574 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15575 && tree_expr_nonzero_warnv_p (op0,
15576 strict_overflow_p));
15578 break;
15580 case NON_LVALUE_EXPR:
15581 return tree_expr_nonzero_warnv_p (op0,
15582 strict_overflow_p);
15584 default:
15585 break;
15588 return false;
15591 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15592 For floating point we further ensure that T is not denormal.
15593 Similar logic is present in nonzero_address in rtlanal.h.
15595 If the return value is based on the assumption that signed overflow
15596 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15597 change *STRICT_OVERFLOW_P. */
15599 bool
15600 tree_binary_nonzero_warnv_p (enum tree_code code,
15601 tree type,
15602 tree op0,
15603 tree op1, bool *strict_overflow_p)
15605 bool sub_strict_overflow_p;
15606 switch (code)
15608 case POINTER_PLUS_EXPR:
15609 case PLUS_EXPR:
15610 if (TYPE_OVERFLOW_UNDEFINED (type))
15612 /* With the presence of negative values it is hard
15613 to say something. */
15614 sub_strict_overflow_p = false;
15615 if (!tree_expr_nonnegative_warnv_p (op0,
15616 &sub_strict_overflow_p)
15617 || !tree_expr_nonnegative_warnv_p (op1,
15618 &sub_strict_overflow_p))
15619 return false;
15620 /* One of operands must be positive and the other non-negative. */
15621 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15622 overflows, on a twos-complement machine the sum of two
15623 nonnegative numbers can never be zero. */
15624 return (tree_expr_nonzero_warnv_p (op0,
15625 strict_overflow_p)
15626 || tree_expr_nonzero_warnv_p (op1,
15627 strict_overflow_p));
15629 break;
15631 case MULT_EXPR:
15632 if (TYPE_OVERFLOW_UNDEFINED (type))
15634 if (tree_expr_nonzero_warnv_p (op0,
15635 strict_overflow_p)
15636 && tree_expr_nonzero_warnv_p (op1,
15637 strict_overflow_p))
15639 *strict_overflow_p = true;
15640 return true;
15643 break;
15645 case MIN_EXPR:
15646 sub_strict_overflow_p = false;
15647 if (tree_expr_nonzero_warnv_p (op0,
15648 &sub_strict_overflow_p)
15649 && tree_expr_nonzero_warnv_p (op1,
15650 &sub_strict_overflow_p))
15652 if (sub_strict_overflow_p)
15653 *strict_overflow_p = true;
15655 break;
15657 case MAX_EXPR:
15658 sub_strict_overflow_p = false;
15659 if (tree_expr_nonzero_warnv_p (op0,
15660 &sub_strict_overflow_p))
15662 if (sub_strict_overflow_p)
15663 *strict_overflow_p = true;
15665 /* When both operands are nonzero, then MAX must be too. */
15666 if (tree_expr_nonzero_warnv_p (op1,
15667 strict_overflow_p))
15668 return true;
15670 /* MAX where operand 0 is positive is positive. */
15671 return tree_expr_nonnegative_warnv_p (op0,
15672 strict_overflow_p);
15674 /* MAX where operand 1 is positive is positive. */
15675 else if (tree_expr_nonzero_warnv_p (op1,
15676 &sub_strict_overflow_p)
15677 && tree_expr_nonnegative_warnv_p (op1,
15678 &sub_strict_overflow_p))
15680 if (sub_strict_overflow_p)
15681 *strict_overflow_p = true;
15682 return true;
15684 break;
15686 case BIT_IOR_EXPR:
15687 return (tree_expr_nonzero_warnv_p (op1,
15688 strict_overflow_p)
15689 || tree_expr_nonzero_warnv_p (op0,
15690 strict_overflow_p));
15692 default:
15693 break;
15696 return false;
15699 /* Return true when T is an address and is known to be nonzero.
15700 For floating point we further ensure that T is not denormal.
15701 Similar logic is present in nonzero_address in rtlanal.h.
15703 If the return value is based on the assumption that signed overflow
15704 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15705 change *STRICT_OVERFLOW_P. */
15707 bool
15708 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15710 bool sub_strict_overflow_p;
15711 switch (TREE_CODE (t))
15713 case INTEGER_CST:
15714 return !integer_zerop (t);
15716 case ADDR_EXPR:
15718 tree base = TREE_OPERAND (t, 0);
15719 if (!DECL_P (base))
15720 base = get_base_address (base);
15722 if (!base)
15723 return false;
15725 /* Weak declarations may link to NULL. Other things may also be NULL
15726 so protect with -fdelete-null-pointer-checks; but not variables
15727 allocated on the stack. */
15728 if (DECL_P (base)
15729 && (flag_delete_null_pointer_checks
15730 || (DECL_CONTEXT (base)
15731 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15732 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15733 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15735 /* Constants are never weak. */
15736 if (CONSTANT_CLASS_P (base))
15737 return true;
15739 return false;
15742 case COND_EXPR:
15743 sub_strict_overflow_p = false;
15744 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15745 &sub_strict_overflow_p)
15746 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15747 &sub_strict_overflow_p))
15749 if (sub_strict_overflow_p)
15750 *strict_overflow_p = true;
15751 return true;
15753 break;
15755 default:
15756 break;
15758 return false;
15761 /* Return true when T is an address and is known to be nonzero.
15762 For floating point we further ensure that T is not denormal.
15763 Similar logic is present in nonzero_address in rtlanal.h.
15765 If the return value is based on the assumption that signed overflow
15766 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15767 change *STRICT_OVERFLOW_P. */
15769 bool
15770 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15772 tree type = TREE_TYPE (t);
15773 enum tree_code code;
15775 /* Doing something useful for floating point would need more work. */
15776 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15777 return false;
15779 code = TREE_CODE (t);
15780 switch (TREE_CODE_CLASS (code))
15782 case tcc_unary:
15783 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15784 strict_overflow_p);
15785 case tcc_binary:
15786 case tcc_comparison:
15787 return tree_binary_nonzero_warnv_p (code, type,
15788 TREE_OPERAND (t, 0),
15789 TREE_OPERAND (t, 1),
15790 strict_overflow_p);
15791 case tcc_constant:
15792 case tcc_declaration:
15793 case tcc_reference:
15794 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15796 default:
15797 break;
15800 switch (code)
15802 case TRUTH_NOT_EXPR:
15803 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15804 strict_overflow_p);
15806 case TRUTH_AND_EXPR:
15807 case TRUTH_OR_EXPR:
15808 case TRUTH_XOR_EXPR:
15809 return tree_binary_nonzero_warnv_p (code, type,
15810 TREE_OPERAND (t, 0),
15811 TREE_OPERAND (t, 1),
15812 strict_overflow_p);
15814 case COND_EXPR:
15815 case CONSTRUCTOR:
15816 case OBJ_TYPE_REF:
15817 case ASSERT_EXPR:
15818 case ADDR_EXPR:
15819 case WITH_SIZE_EXPR:
15820 case SSA_NAME:
15821 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15823 case COMPOUND_EXPR:
15824 case MODIFY_EXPR:
15825 case BIND_EXPR:
15826 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15827 strict_overflow_p);
15829 case SAVE_EXPR:
15830 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15831 strict_overflow_p);
15833 case CALL_EXPR:
15834 return alloca_call_p (t);
15836 default:
15837 break;
15839 return false;
15842 /* Return true when T is an address and is known to be nonzero.
15843 Handle warnings about undefined signed overflow. */
15845 bool
15846 tree_expr_nonzero_p (tree t)
15848 bool ret, strict_overflow_p;
15850 strict_overflow_p = false;
15851 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15852 if (strict_overflow_p)
15853 fold_overflow_warning (("assuming signed overflow does not occur when "
15854 "determining that expression is always "
15855 "non-zero"),
15856 WARN_STRICT_OVERFLOW_MISC);
15857 return ret;
15860 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15861 attempt to fold the expression to a constant without modifying TYPE,
15862 OP0 or OP1.
15864 If the expression could be simplified to a constant, then return
15865 the constant. If the expression would not be simplified to a
15866 constant, then return NULL_TREE. */
15868 tree
15869 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15871 tree tem = fold_binary (code, type, op0, op1);
15872 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15875 /* Given the components of a unary expression CODE, TYPE and OP0,
15876 attempt to fold the expression to a constant without modifying
15877 TYPE or OP0.
15879 If the expression could be simplified to a constant, then return
15880 the constant. If the expression would not be simplified to a
15881 constant, then return NULL_TREE. */
15883 tree
15884 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15886 tree tem = fold_unary (code, type, op0);
15887 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15890 /* If EXP represents referencing an element in a constant string
15891 (either via pointer arithmetic or array indexing), return the
15892 tree representing the value accessed, otherwise return NULL. */
15894 tree
15895 fold_read_from_constant_string (tree exp)
15897 if ((TREE_CODE (exp) == INDIRECT_REF
15898 || TREE_CODE (exp) == ARRAY_REF)
15899 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15901 tree exp1 = TREE_OPERAND (exp, 0);
15902 tree index;
15903 tree string;
15904 location_t loc = EXPR_LOCATION (exp);
15906 if (TREE_CODE (exp) == INDIRECT_REF)
15907 string = string_constant (exp1, &index);
15908 else
15910 tree low_bound = array_ref_low_bound (exp);
15911 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15913 /* Optimize the special-case of a zero lower bound.
15915 We convert the low_bound to sizetype to avoid some problems
15916 with constant folding. (E.g. suppose the lower bound is 1,
15917 and its mode is QI. Without the conversion,l (ARRAY
15918 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15919 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15920 if (! integer_zerop (low_bound))
15921 index = size_diffop_loc (loc, index,
15922 fold_convert_loc (loc, sizetype, low_bound));
15924 string = exp1;
15927 if (string
15928 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15929 && TREE_CODE (string) == STRING_CST
15930 && TREE_CODE (index) == INTEGER_CST
15931 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15932 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15933 == MODE_INT)
15934 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15935 return build_int_cst_type (TREE_TYPE (exp),
15936 (TREE_STRING_POINTER (string)
15937 [TREE_INT_CST_LOW (index)]));
15939 return NULL;
15942 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15943 an integer constant, real, or fixed-point constant.
15945 TYPE is the type of the result. */
15947 static tree
15948 fold_negate_const (tree arg0, tree type)
15950 tree t = NULL_TREE;
15952 switch (TREE_CODE (arg0))
15954 case INTEGER_CST:
15956 double_int val = tree_to_double_int (arg0);
15957 bool overflow;
15958 val = val.neg_with_overflow (&overflow);
15959 t = force_fit_type_double (type, val, 1,
15960 (overflow | TREE_OVERFLOW (arg0))
15961 && !TYPE_UNSIGNED (type));
15962 break;
15965 case REAL_CST:
15966 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15967 break;
15969 case FIXED_CST:
15971 FIXED_VALUE_TYPE f;
15972 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15973 &(TREE_FIXED_CST (arg0)), NULL,
15974 TYPE_SATURATING (type));
15975 t = build_fixed (type, f);
15976 /* Propagate overflow flags. */
15977 if (overflow_p | TREE_OVERFLOW (arg0))
15978 TREE_OVERFLOW (t) = 1;
15979 break;
15982 default:
15983 gcc_unreachable ();
15986 return t;
15989 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15990 an integer constant or real constant.
15992 TYPE is the type of the result. */
15994 tree
15995 fold_abs_const (tree arg0, tree type)
15997 tree t = NULL_TREE;
15999 switch (TREE_CODE (arg0))
16001 case INTEGER_CST:
16003 double_int val = tree_to_double_int (arg0);
16005 /* If the value is unsigned or non-negative, then the absolute value
16006 is the same as the ordinary value. */
16007 if (TYPE_UNSIGNED (type)
16008 || !val.is_negative ())
16009 t = arg0;
16011 /* If the value is negative, then the absolute value is
16012 its negation. */
16013 else
16015 bool overflow;
16016 val = val.neg_with_overflow (&overflow);
16017 t = force_fit_type_double (type, val, -1,
16018 overflow | TREE_OVERFLOW (arg0));
16021 break;
16023 case REAL_CST:
16024 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16025 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16026 else
16027 t = arg0;
16028 break;
16030 default:
16031 gcc_unreachable ();
16034 return t;
16037 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16038 constant. TYPE is the type of the result. */
16040 static tree
16041 fold_not_const (const_tree arg0, tree type)
16043 double_int val;
16045 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16047 val = ~tree_to_double_int (arg0);
16048 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16051 /* Given CODE, a relational operator, the target type, TYPE and two
16052 constant operands OP0 and OP1, return the result of the
16053 relational operation. If the result is not a compile time
16054 constant, then return NULL_TREE. */
16056 static tree
16057 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16059 int result, invert;
16061 /* From here on, the only cases we handle are when the result is
16062 known to be a constant. */
16064 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16066 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16067 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16069 /* Handle the cases where either operand is a NaN. */
16070 if (real_isnan (c0) || real_isnan (c1))
16072 switch (code)
16074 case EQ_EXPR:
16075 case ORDERED_EXPR:
16076 result = 0;
16077 break;
16079 case NE_EXPR:
16080 case UNORDERED_EXPR:
16081 case UNLT_EXPR:
16082 case UNLE_EXPR:
16083 case UNGT_EXPR:
16084 case UNGE_EXPR:
16085 case UNEQ_EXPR:
16086 result = 1;
16087 break;
16089 case LT_EXPR:
16090 case LE_EXPR:
16091 case GT_EXPR:
16092 case GE_EXPR:
16093 case LTGT_EXPR:
16094 if (flag_trapping_math)
16095 return NULL_TREE;
16096 result = 0;
16097 break;
16099 default:
16100 gcc_unreachable ();
16103 return constant_boolean_node (result, type);
16106 return constant_boolean_node (real_compare (code, c0, c1), type);
16109 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16111 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16112 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16113 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16116 /* Handle equality/inequality of complex constants. */
16117 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16119 tree rcond = fold_relational_const (code, type,
16120 TREE_REALPART (op0),
16121 TREE_REALPART (op1));
16122 tree icond = fold_relational_const (code, type,
16123 TREE_IMAGPART (op0),
16124 TREE_IMAGPART (op1));
16125 if (code == EQ_EXPR)
16126 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16127 else if (code == NE_EXPR)
16128 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16129 else
16130 return NULL_TREE;
16133 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16135 unsigned count = VECTOR_CST_NELTS (op0);
16136 tree *elts = XALLOCAVEC (tree, count);
16137 gcc_assert (VECTOR_CST_NELTS (op1) == count
16138 && TYPE_VECTOR_SUBPARTS (type) == count);
16140 for (unsigned i = 0; i < count; i++)
16142 tree elem_type = TREE_TYPE (type);
16143 tree elem0 = VECTOR_CST_ELT (op0, i);
16144 tree elem1 = VECTOR_CST_ELT (op1, i);
16146 tree tem = fold_relational_const (code, elem_type,
16147 elem0, elem1);
16149 if (tem == NULL_TREE)
16150 return NULL_TREE;
16152 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16155 return build_vector (type, elts);
16158 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16160 To compute GT, swap the arguments and do LT.
16161 To compute GE, do LT and invert the result.
16162 To compute LE, swap the arguments, do LT and invert the result.
16163 To compute NE, do EQ and invert the result.
16165 Therefore, the code below must handle only EQ and LT. */
16167 if (code == LE_EXPR || code == GT_EXPR)
16169 tree tem = op0;
16170 op0 = op1;
16171 op1 = tem;
16172 code = swap_tree_comparison (code);
16175 /* Note that it is safe to invert for real values here because we
16176 have already handled the one case that it matters. */
16178 invert = 0;
16179 if (code == NE_EXPR || code == GE_EXPR)
16181 invert = 1;
16182 code = invert_tree_comparison (code, false);
16185 /* Compute a result for LT or EQ if args permit;
16186 Otherwise return T. */
16187 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16189 if (code == EQ_EXPR)
16190 result = tree_int_cst_equal (op0, op1);
16191 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16192 result = INT_CST_LT_UNSIGNED (op0, op1);
16193 else
16194 result = INT_CST_LT (op0, op1);
16196 else
16197 return NULL_TREE;
16199 if (invert)
16200 result ^= 1;
16201 return constant_boolean_node (result, type);
16204 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16205 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16206 itself. */
16208 tree
16209 fold_build_cleanup_point_expr (tree type, tree expr)
16211 /* If the expression does not have side effects then we don't have to wrap
16212 it with a cleanup point expression. */
16213 if (!TREE_SIDE_EFFECTS (expr))
16214 return expr;
16216 /* If the expression is a return, check to see if the expression inside the
16217 return has no side effects or the right hand side of the modify expression
16218 inside the return. If either don't have side effects set we don't need to
16219 wrap the expression in a cleanup point expression. Note we don't check the
16220 left hand side of the modify because it should always be a return decl. */
16221 if (TREE_CODE (expr) == RETURN_EXPR)
16223 tree op = TREE_OPERAND (expr, 0);
16224 if (!op || !TREE_SIDE_EFFECTS (op))
16225 return expr;
16226 op = TREE_OPERAND (op, 1);
16227 if (!TREE_SIDE_EFFECTS (op))
16228 return expr;
16231 return build1 (CLEANUP_POINT_EXPR, type, expr);
16234 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16235 of an indirection through OP0, or NULL_TREE if no simplification is
16236 possible. */
16238 tree
16239 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16241 tree sub = op0;
16242 tree subtype;
16244 STRIP_NOPS (sub);
16245 subtype = TREE_TYPE (sub);
16246 if (!POINTER_TYPE_P (subtype))
16247 return NULL_TREE;
16249 if (TREE_CODE (sub) == ADDR_EXPR)
16251 tree op = TREE_OPERAND (sub, 0);
16252 tree optype = TREE_TYPE (op);
16253 /* *&CONST_DECL -> to the value of the const decl. */
16254 if (TREE_CODE (op) == CONST_DECL)
16255 return DECL_INITIAL (op);
16256 /* *&p => p; make sure to handle *&"str"[cst] here. */
16257 if (type == optype)
16259 tree fop = fold_read_from_constant_string (op);
16260 if (fop)
16261 return fop;
16262 else
16263 return op;
16265 /* *(foo *)&fooarray => fooarray[0] */
16266 else if (TREE_CODE (optype) == ARRAY_TYPE
16267 && type == TREE_TYPE (optype)
16268 && (!in_gimple_form
16269 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16271 tree type_domain = TYPE_DOMAIN (optype);
16272 tree min_val = size_zero_node;
16273 if (type_domain && TYPE_MIN_VALUE (type_domain))
16274 min_val = TYPE_MIN_VALUE (type_domain);
16275 if (in_gimple_form
16276 && TREE_CODE (min_val) != INTEGER_CST)
16277 return NULL_TREE;
16278 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16279 NULL_TREE, NULL_TREE);
16281 /* *(foo *)&complexfoo => __real__ complexfoo */
16282 else if (TREE_CODE (optype) == COMPLEX_TYPE
16283 && type == TREE_TYPE (optype))
16284 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16285 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16286 else if (TREE_CODE (optype) == VECTOR_TYPE
16287 && type == TREE_TYPE (optype))
16289 tree part_width = TYPE_SIZE (type);
16290 tree index = bitsize_int (0);
16291 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16295 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16296 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16298 tree op00 = TREE_OPERAND (sub, 0);
16299 tree op01 = TREE_OPERAND (sub, 1);
16301 STRIP_NOPS (op00);
16302 if (TREE_CODE (op00) == ADDR_EXPR)
16304 tree op00type;
16305 op00 = TREE_OPERAND (op00, 0);
16306 op00type = TREE_TYPE (op00);
16308 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16309 if (TREE_CODE (op00type) == VECTOR_TYPE
16310 && type == TREE_TYPE (op00type))
16312 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16313 tree part_width = TYPE_SIZE (type);
16314 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16315 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16316 tree index = bitsize_int (indexi);
16318 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16319 return fold_build3_loc (loc,
16320 BIT_FIELD_REF, type, op00,
16321 part_width, index);
16324 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16325 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16326 && type == TREE_TYPE (op00type))
16328 tree size = TYPE_SIZE_UNIT (type);
16329 if (tree_int_cst_equal (size, op01))
16330 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16332 /* ((foo *)&fooarray)[1] => fooarray[1] */
16333 else if (TREE_CODE (op00type) == ARRAY_TYPE
16334 && type == TREE_TYPE (op00type))
16336 tree type_domain = TYPE_DOMAIN (op00type);
16337 tree min_val = size_zero_node;
16338 if (type_domain && TYPE_MIN_VALUE (type_domain))
16339 min_val = TYPE_MIN_VALUE (type_domain);
16340 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16341 TYPE_SIZE_UNIT (type));
16342 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16343 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16344 NULL_TREE, NULL_TREE);
16349 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16350 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16351 && type == TREE_TYPE (TREE_TYPE (subtype))
16352 && (!in_gimple_form
16353 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16355 tree type_domain;
16356 tree min_val = size_zero_node;
16357 sub = build_fold_indirect_ref_loc (loc, sub);
16358 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16359 if (type_domain && TYPE_MIN_VALUE (type_domain))
16360 min_val = TYPE_MIN_VALUE (type_domain);
16361 if (in_gimple_form
16362 && TREE_CODE (min_val) != INTEGER_CST)
16363 return NULL_TREE;
16364 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16365 NULL_TREE);
16368 return NULL_TREE;
16371 /* Builds an expression for an indirection through T, simplifying some
16372 cases. */
16374 tree
16375 build_fold_indirect_ref_loc (location_t loc, tree t)
16377 tree type = TREE_TYPE (TREE_TYPE (t));
16378 tree sub = fold_indirect_ref_1 (loc, type, t);
16380 if (sub)
16381 return sub;
16383 return build1_loc (loc, INDIRECT_REF, type, t);
16386 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16388 tree
16389 fold_indirect_ref_loc (location_t loc, tree t)
16391 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16393 if (sub)
16394 return sub;
16395 else
16396 return t;
16399 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16400 whose result is ignored. The type of the returned tree need not be
16401 the same as the original expression. */
16403 tree
16404 fold_ignored_result (tree t)
16406 if (!TREE_SIDE_EFFECTS (t))
16407 return integer_zero_node;
16409 for (;;)
16410 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16412 case tcc_unary:
16413 t = TREE_OPERAND (t, 0);
16414 break;
16416 case tcc_binary:
16417 case tcc_comparison:
16418 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16419 t = TREE_OPERAND (t, 0);
16420 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16421 t = TREE_OPERAND (t, 1);
16422 else
16423 return t;
16424 break;
16426 case tcc_expression:
16427 switch (TREE_CODE (t))
16429 case COMPOUND_EXPR:
16430 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16431 return t;
16432 t = TREE_OPERAND (t, 0);
16433 break;
16435 case COND_EXPR:
16436 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16437 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16438 return t;
16439 t = TREE_OPERAND (t, 0);
16440 break;
16442 default:
16443 return t;
16445 break;
16447 default:
16448 return t;
16452 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16453 This can only be applied to objects of a sizetype. */
16455 tree
16456 round_up_loc (location_t loc, tree value, int divisor)
16458 tree div = NULL_TREE;
16460 gcc_assert (divisor > 0);
16461 if (divisor == 1)
16462 return value;
16464 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16465 have to do anything. Only do this when we are not given a const,
16466 because in that case, this check is more expensive than just
16467 doing it. */
16468 if (TREE_CODE (value) != INTEGER_CST)
16470 div = build_int_cst (TREE_TYPE (value), divisor);
16472 if (multiple_of_p (TREE_TYPE (value), value, div))
16473 return value;
16476 /* If divisor is a power of two, simplify this to bit manipulation. */
16477 if (divisor == (divisor & -divisor))
16479 if (TREE_CODE (value) == INTEGER_CST)
16481 double_int val = tree_to_double_int (value);
16482 bool overflow_p;
16484 if ((val.low & (divisor - 1)) == 0)
16485 return value;
16487 overflow_p = TREE_OVERFLOW (value);
16488 val.low &= ~(divisor - 1);
16489 val.low += divisor;
16490 if (val.low == 0)
16492 val.high++;
16493 if (val.high == 0)
16494 overflow_p = true;
16497 return force_fit_type_double (TREE_TYPE (value), val,
16498 -1, overflow_p);
16500 else
16502 tree t;
16504 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16505 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16506 t = build_int_cst (TREE_TYPE (value), -divisor);
16507 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16510 else
16512 if (!div)
16513 div = build_int_cst (TREE_TYPE (value), divisor);
16514 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16515 value = size_binop_loc (loc, MULT_EXPR, value, div);
16518 return value;
16521 /* Likewise, but round down. */
16523 tree
16524 round_down_loc (location_t loc, tree value, int divisor)
16526 tree div = NULL_TREE;
16528 gcc_assert (divisor > 0);
16529 if (divisor == 1)
16530 return value;
16532 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16533 have to do anything. Only do this when we are not given a const,
16534 because in that case, this check is more expensive than just
16535 doing it. */
16536 if (TREE_CODE (value) != INTEGER_CST)
16538 div = build_int_cst (TREE_TYPE (value), divisor);
16540 if (multiple_of_p (TREE_TYPE (value), value, div))
16541 return value;
16544 /* If divisor is a power of two, simplify this to bit manipulation. */
16545 if (divisor == (divisor & -divisor))
16547 tree t;
16549 t = build_int_cst (TREE_TYPE (value), -divisor);
16550 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16552 else
16554 if (!div)
16555 div = build_int_cst (TREE_TYPE (value), divisor);
16556 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16557 value = size_binop_loc (loc, MULT_EXPR, value, div);
16560 return value;
16563 /* Returns the pointer to the base of the object addressed by EXP and
16564 extracts the information about the offset of the access, storing it
16565 to PBITPOS and POFFSET. */
16567 static tree
16568 split_address_to_core_and_offset (tree exp,
16569 HOST_WIDE_INT *pbitpos, tree *poffset)
16571 tree core;
16572 enum machine_mode mode;
16573 int unsignedp, volatilep;
16574 HOST_WIDE_INT bitsize;
16575 location_t loc = EXPR_LOCATION (exp);
16577 if (TREE_CODE (exp) == ADDR_EXPR)
16579 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16580 poffset, &mode, &unsignedp, &volatilep,
16581 false);
16582 core = build_fold_addr_expr_loc (loc, core);
16584 else
16586 core = exp;
16587 *pbitpos = 0;
16588 *poffset = NULL_TREE;
16591 return core;
16594 /* Returns true if addresses of E1 and E2 differ by a constant, false
16595 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16597 bool
16598 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16600 tree core1, core2;
16601 HOST_WIDE_INT bitpos1, bitpos2;
16602 tree toffset1, toffset2, tdiff, type;
16604 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16605 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16607 if (bitpos1 % BITS_PER_UNIT != 0
16608 || bitpos2 % BITS_PER_UNIT != 0
16609 || !operand_equal_p (core1, core2, 0))
16610 return false;
16612 if (toffset1 && toffset2)
16614 type = TREE_TYPE (toffset1);
16615 if (type != TREE_TYPE (toffset2))
16616 toffset2 = fold_convert (type, toffset2);
16618 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16619 if (!cst_and_fits_in_hwi (tdiff))
16620 return false;
16622 *diff = int_cst_value (tdiff);
16624 else if (toffset1 || toffset2)
16626 /* If only one of the offsets is non-constant, the difference cannot
16627 be a constant. */
16628 return false;
16630 else
16631 *diff = 0;
16633 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16634 return true;
16637 /* Simplify the floating point expression EXP when the sign of the
16638 result is not significant. Return NULL_TREE if no simplification
16639 is possible. */
16641 tree
16642 fold_strip_sign_ops (tree exp)
16644 tree arg0, arg1;
16645 location_t loc = EXPR_LOCATION (exp);
16647 switch (TREE_CODE (exp))
16649 case ABS_EXPR:
16650 case NEGATE_EXPR:
16651 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16652 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16654 case MULT_EXPR:
16655 case RDIV_EXPR:
16656 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16657 return NULL_TREE;
16658 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16659 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16660 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16661 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16662 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16663 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16664 break;
16666 case COMPOUND_EXPR:
16667 arg0 = TREE_OPERAND (exp, 0);
16668 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16669 if (arg1)
16670 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16671 break;
16673 case COND_EXPR:
16674 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16675 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16676 if (arg0 || arg1)
16677 return fold_build3_loc (loc,
16678 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16679 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16680 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16681 break;
16683 case CALL_EXPR:
16685 const enum built_in_function fcode = builtin_mathfn_code (exp);
16686 switch (fcode)
16688 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16689 /* Strip copysign function call, return the 1st argument. */
16690 arg0 = CALL_EXPR_ARG (exp, 0);
16691 arg1 = CALL_EXPR_ARG (exp, 1);
16692 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16694 default:
16695 /* Strip sign ops from the argument of "odd" math functions. */
16696 if (negate_mathfn_p (fcode))
16698 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16699 if (arg0)
16700 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16702 break;
16705 break;
16707 default:
16708 break;
16710 return NULL_TREE;