PR tree-optimization/55832
[official-gcc.git] / gcc / fold-const.c
blob4f384a753444b811de58b36a3cbe518243721fd1
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012, 2013 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hash-table.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
173 tree
174 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
176 double_int quo, rem;
177 int uns;
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
184 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
185 uns, code, &rem);
187 if (rem.is_zero ())
188 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 unsigned HOST_WIDE_INT val;
363 unsigned int prec;
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 prec = TYPE_PRECISION (type);
373 if (prec > HOST_BITS_PER_WIDE_INT)
375 if (TREE_INT_CST_LOW (t) != 0)
376 return true;
377 prec -= HOST_BITS_PER_WIDE_INT;
378 val = TREE_INT_CST_HIGH (t);
380 else
381 val = TREE_INT_CST_LOW (t);
382 if (prec < HOST_BITS_PER_WIDE_INT)
383 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
384 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
390 static bool
391 negate_expr_p (tree t)
393 tree type;
395 if (t == 0)
396 return false;
398 type = TREE_TYPE (t);
400 STRIP_SIGN_NOPS (t);
401 switch (TREE_CODE (t))
403 case INTEGER_CST:
404 if (TYPE_OVERFLOW_WRAPS (type))
405 return true;
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t);
409 case BIT_NOT_EXPR:
410 return (INTEGRAL_TYPE_P (type)
411 && TYPE_OVERFLOW_WRAPS (type));
413 case FIXED_CST:
414 case NEGATE_EXPR:
415 return true;
417 case REAL_CST:
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422 case COMPLEX_CST:
423 return negate_expr_p (TREE_REALPART (t))
424 && negate_expr_p (TREE_IMAGPART (t));
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case FLOOR_DIV_EXPR:
467 case CEIL_DIV_EXPR:
468 case EXACT_DIV_EXPR:
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
473 overflow. */
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
480 case NOP_EXPR:
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type) == REAL_TYPE)
484 tree tem = strip_float_extensions (t);
485 if (tem != t)
486 return negate_expr_p (tem);
488 break;
490 case CALL_EXPR:
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t)))
493 return negate_expr_p (CALL_EXPR_ARG (t, 0));
494 break;
496 case RSHIFT_EXPR:
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
500 tree op1 = TREE_OPERAND (t, 1);
501 if (TREE_INT_CST_HIGH (op1) == 0
502 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
503 == TREE_INT_CST_LOW (op1))
504 return true;
506 break;
508 default:
509 break;
511 return false;
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
517 returned. */
519 static tree
520 fold_negate_expr (location_t loc, tree t)
522 tree type = TREE_TYPE (t);
523 tree tem;
525 switch (TREE_CODE (t))
527 /* Convert - (~A) to A + 1. */
528 case BIT_NOT_EXPR:
529 if (INTEGRAL_TYPE_P (type))
530 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
531 build_int_cst (type, 1));
532 break;
534 case INTEGER_CST:
535 tem = fold_negate_const (t, type);
536 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
537 || !TYPE_OVERFLOW_TRAPS (type))
538 return tem;
539 break;
541 case REAL_CST:
542 tem = fold_negate_const (t, type);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
545 return tem;
546 break;
548 case FIXED_CST:
549 tem = fold_negate_const (t, type);
550 return tem;
552 case COMPLEX_CST:
554 tree rpart = negate_expr (TREE_REALPART (t));
555 tree ipart = negate_expr (TREE_IMAGPART (t));
557 if ((TREE_CODE (rpart) == REAL_CST
558 && TREE_CODE (ipart) == REAL_CST)
559 || (TREE_CODE (rpart) == INTEGER_CST
560 && TREE_CODE (ipart) == INTEGER_CST))
561 return build_complex (type, rpart, ipart);
563 break;
565 case COMPLEX_EXPR:
566 if (negate_expr_p (t))
567 return fold_build2_loc (loc, COMPLEX_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
569 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
570 break;
572 case CONJ_EXPR:
573 if (negate_expr_p (t))
574 return fold_build1_loc (loc, CONJ_EXPR, type,
575 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
576 break;
578 case NEGATE_EXPR:
579 return TREE_OPERAND (t, 0);
581 case PLUS_EXPR:
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t, 1))
587 && reorder_operands_p (TREE_OPERAND (t, 0),
588 TREE_OPERAND (t, 1)))
590 tem = negate_expr (TREE_OPERAND (t, 1));
591 return fold_build2_loc (loc, MINUS_EXPR, type,
592 tem, TREE_OPERAND (t, 0));
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t, 0)))
598 tem = negate_expr (TREE_OPERAND (t, 0));
599 return fold_build2_loc (loc, MINUS_EXPR, type,
600 tem, TREE_OPERAND (t, 1));
603 break;
605 case MINUS_EXPR:
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
609 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
612 break;
614 case MULT_EXPR:
615 if (TYPE_UNSIGNED (type))
616 break;
618 /* Fall through. */
620 case RDIV_EXPR:
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
623 tem = TREE_OPERAND (t, 1);
624 if (negate_expr_p (tem))
625 return fold_build2_loc (loc, TREE_CODE (t), type,
626 TREE_OPERAND (t, 0), negate_expr (tem));
627 tem = TREE_OPERAND (t, 0);
628 if (negate_expr_p (tem))
629 return fold_build2_loc (loc, TREE_CODE (t), type,
630 negate_expr (tem), TREE_OPERAND (t, 1));
632 break;
634 case TRUNC_DIV_EXPR:
635 case ROUND_DIV_EXPR:
636 case FLOOR_DIV_EXPR:
637 case CEIL_DIV_EXPR:
638 case EXACT_DIV_EXPR:
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
643 overflow. */
644 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
646 const char * const warnmsg = G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || integer_onep (tem)))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
661 if (INTEGRAL_TYPE_P (type)
662 && (TREE_CODE (tem) != INTEGER_CST
663 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (TREE_INT_CST_HIGH (op1) == 0
700 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
701 == TREE_INT_CST_LOW (op1))
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
722 return NULL_TREE. */
724 static tree
725 negate_expr (tree t)
727 tree type, tem;
728 location_t loc;
730 if (t == NULL_TREE)
731 return NULL_TREE;
733 loc = EXPR_LOCATION (t);
734 type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
737 tem = fold_negate_expr (loc, t);
738 if (!tem)
739 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
740 return fold_convert_loc (loc, type, tem);
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
758 If IN is itself a literal or constant, return it as appropriate.
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
763 static tree
764 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
765 tree *minus_litp, int negate_p)
767 tree var = 0;
769 *conp = 0;
770 *litp = 0;
771 *minus_litp = 0;
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in);
776 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
777 || TREE_CODE (in) == FIXED_CST)
778 *litp = in;
779 else if (TREE_CODE (in) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
787 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
789 tree op0 = TREE_OPERAND (in, 0);
790 tree op1 = TREE_OPERAND (in, 1);
791 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
792 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
796 || TREE_CODE (op0) == FIXED_CST)
797 *litp = op0, op0 = 0;
798 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
799 || TREE_CODE (op1) == FIXED_CST)
800 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
802 if (op0 != 0 && TREE_CONSTANT (op0))
803 *conp = op0, op0 = 0;
804 else if (op1 != 0 && TREE_CONSTANT (op1))
805 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0 != 0 && op1 != 0)
810 var = in;
811 else if (op0 != 0)
812 var = op0;
813 else
814 var = op1, neg_var_p = neg1_p;
816 /* Now do any needed negations. */
817 if (neg_litp_p)
818 *minus_litp = *litp, *litp = 0;
819 if (neg_conp_p)
820 *conp = negate_expr (*conp);
821 if (neg_var_p)
822 var = negate_expr (var);
824 else if (TREE_CONSTANT (in))
825 *conp = in;
826 else
827 var = in;
829 if (negate_p)
831 if (*litp)
832 *minus_litp = *litp, *litp = 0;
833 else if (*minus_litp)
834 *litp = *minus_litp, *minus_litp = 0;
835 *conp = negate_expr (*conp);
836 var = negate_expr (var);
839 return var;
842 /* Re-associate trees split by the above function. T1 and T2 are
843 either expressions to associate or null. Return the new
844 expression, if any. LOC is the location of the new expression. If
845 we build an operation, do it in TYPE and with CODE. */
847 static tree
848 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
861 if (code == PLUS_EXPR)
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 return build2_loc (loc, MINUS_EXPR, type,
865 fold_convert_loc (loc, type, t2),
866 fold_convert_loc (loc, type,
867 TREE_OPERAND (t1, 0)));
868 else if (TREE_CODE (t2) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t1),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t2, 0)));
873 else if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
876 else if (code == MINUS_EXPR)
878 if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
882 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
886 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
887 fold_convert_loc (loc, type, t2));
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
901 switch (code)
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
909 default:
910 break;
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time. */
923 static tree
924 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
925 int overflowable)
927 double_int op1, op2, res, tmp;
928 tree t;
929 tree type = TREE_TYPE (arg1);
930 bool uns = TYPE_UNSIGNED (type);
931 bool overflow = false;
933 op1 = tree_to_double_int (arg1);
934 op2 = tree_to_double_int (arg2);
936 switch (code)
938 case BIT_IOR_EXPR:
939 res = op1 | op2;
940 break;
942 case BIT_XOR_EXPR:
943 res = op1 ^ op2;
944 break;
946 case BIT_AND_EXPR:
947 res = op1 & op2;
948 break;
950 case RSHIFT_EXPR:
951 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
959 break;
961 case RROTATE_EXPR:
962 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
963 break;
965 case LROTATE_EXPR:
966 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
967 break;
969 case PLUS_EXPR:
970 res = op1.add_with_sign (op2, false, &overflow);
971 break;
973 case MINUS_EXPR:
974 res = op1.sub_with_overflow (op2, &overflow);
975 break;
977 case MULT_EXPR:
978 res = op1.mul_with_sign (op2, false, &overflow);
979 break;
981 case MULT_HIGHPART_EXPR:
982 /* ??? Need quad precision, or an additional shift operand
983 to the multiply primitive, to handle very large highparts. */
984 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
985 return NULL_TREE;
986 tmp = op1 - op2;
987 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
988 break;
990 case TRUNC_DIV_EXPR:
991 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
992 case EXACT_DIV_EXPR:
993 /* This is a shortcut for a common special case. */
994 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
995 && !TREE_OVERFLOW (arg1)
996 && !TREE_OVERFLOW (arg2)
997 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
999 if (code == CEIL_DIV_EXPR)
1000 op1.low += op2.low - 1;
1002 res.low = op1.low / op2.low, res.high = 0;
1003 break;
1006 /* ... fall through ... */
1008 case ROUND_DIV_EXPR:
1009 if (op2.is_zero ())
1010 return NULL_TREE;
1011 if (op2.is_one ())
1013 res = op1;
1014 break;
1016 if (op1 == op2 && !op1.is_zero ())
1018 res = double_int_one;
1019 break;
1021 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1022 break;
1024 case TRUNC_MOD_EXPR:
1025 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1026 /* This is a shortcut for a common special case. */
1027 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1028 && !TREE_OVERFLOW (arg1)
1029 && !TREE_OVERFLOW (arg2)
1030 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1032 if (code == CEIL_MOD_EXPR)
1033 op1.low += op2.low - 1;
1034 res.low = op1.low % op2.low, res.high = 0;
1035 break;
1038 /* ... fall through ... */
1040 case ROUND_MOD_EXPR:
1041 if (op2.is_zero ())
1042 return NULL_TREE;
1043 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1044 break;
1046 case MIN_EXPR:
1047 res = op1.min (op2, uns);
1048 break;
1050 case MAX_EXPR:
1051 res = op1.max (op2, uns);
1052 break;
1054 default:
1055 return NULL_TREE;
1058 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1059 (!uns && overflow)
1060 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1062 return t;
1065 tree
1066 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1068 return int_const_binop_1 (code, arg1, arg2, 1);
1071 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1072 constant. We assume ARG1 and ARG2 have the same data type, or at least
1073 are the same kind of constant and the same machine mode. Return zero if
1074 combining the constants is not allowed in the current operating mode. */
1076 static tree
1077 const_binop (enum tree_code code, tree arg1, tree arg2)
1079 /* Sanity check for the recursive cases. */
1080 if (!arg1 || !arg2)
1081 return NULL_TREE;
1083 STRIP_NOPS (arg1);
1084 STRIP_NOPS (arg2);
1086 if (TREE_CODE (arg1) == INTEGER_CST)
1087 return int_const_binop (code, arg1, arg2);
1089 if (TREE_CODE (arg1) == REAL_CST)
1091 enum machine_mode mode;
1092 REAL_VALUE_TYPE d1;
1093 REAL_VALUE_TYPE d2;
1094 REAL_VALUE_TYPE value;
1095 REAL_VALUE_TYPE result;
1096 bool inexact;
1097 tree t, type;
1099 /* The following codes are handled by real_arithmetic. */
1100 switch (code)
1102 case PLUS_EXPR:
1103 case MINUS_EXPR:
1104 case MULT_EXPR:
1105 case RDIV_EXPR:
1106 case MIN_EXPR:
1107 case MAX_EXPR:
1108 break;
1110 default:
1111 return NULL_TREE;
1114 d1 = TREE_REAL_CST (arg1);
1115 d2 = TREE_REAL_CST (arg2);
1117 type = TREE_TYPE (arg1);
1118 mode = TYPE_MODE (type);
1120 /* Don't perform operation if we honor signaling NaNs and
1121 either operand is a NaN. */
1122 if (HONOR_SNANS (mode)
1123 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1124 return NULL_TREE;
1126 /* Don't perform operation if it would raise a division
1127 by zero exception. */
1128 if (code == RDIV_EXPR
1129 && REAL_VALUES_EQUAL (d2, dconst0)
1130 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1131 return NULL_TREE;
1133 /* If either operand is a NaN, just return it. Otherwise, set up
1134 for floating-point trap; we return an overflow. */
1135 if (REAL_VALUE_ISNAN (d1))
1136 return arg1;
1137 else if (REAL_VALUE_ISNAN (d2))
1138 return arg2;
1140 inexact = real_arithmetic (&value, code, &d1, &d2);
1141 real_convert (&result, mode, &value);
1143 /* Don't constant fold this floating point operation if
1144 the result has overflowed and flag_trapping_math. */
1145 if (flag_trapping_math
1146 && MODE_HAS_INFINITIES (mode)
1147 && REAL_VALUE_ISINF (result)
1148 && !REAL_VALUE_ISINF (d1)
1149 && !REAL_VALUE_ISINF (d2))
1150 return NULL_TREE;
1152 /* Don't constant fold this floating point operation if the
1153 result may dependent upon the run-time rounding mode and
1154 flag_rounding_math is set, or if GCC's software emulation
1155 is unable to accurately represent the result. */
1156 if ((flag_rounding_math
1157 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1158 && (inexact || !real_identical (&result, &value)))
1159 return NULL_TREE;
1161 t = build_real (type, result);
1163 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1164 return t;
1167 if (TREE_CODE (arg1) == FIXED_CST)
1169 FIXED_VALUE_TYPE f1;
1170 FIXED_VALUE_TYPE f2;
1171 FIXED_VALUE_TYPE result;
1172 tree t, type;
1173 int sat_p;
1174 bool overflow_p;
1176 /* The following codes are handled by fixed_arithmetic. */
1177 switch (code)
1179 case PLUS_EXPR:
1180 case MINUS_EXPR:
1181 case MULT_EXPR:
1182 case TRUNC_DIV_EXPR:
1183 f2 = TREE_FIXED_CST (arg2);
1184 break;
1186 case LSHIFT_EXPR:
1187 case RSHIFT_EXPR:
1188 f2.data.high = TREE_INT_CST_HIGH (arg2);
1189 f2.data.low = TREE_INT_CST_LOW (arg2);
1190 f2.mode = SImode;
1191 break;
1193 default:
1194 return NULL_TREE;
1197 f1 = TREE_FIXED_CST (arg1);
1198 type = TREE_TYPE (arg1);
1199 sat_p = TYPE_SATURATING (type);
1200 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1201 t = build_fixed (type, result);
1202 /* Propagate overflow flags. */
1203 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1204 TREE_OVERFLOW (t) = 1;
1205 return t;
1208 if (TREE_CODE (arg1) == COMPLEX_CST)
1210 tree type = TREE_TYPE (arg1);
1211 tree r1 = TREE_REALPART (arg1);
1212 tree i1 = TREE_IMAGPART (arg1);
1213 tree r2 = TREE_REALPART (arg2);
1214 tree i2 = TREE_IMAGPART (arg2);
1215 tree real, imag;
1217 switch (code)
1219 case PLUS_EXPR:
1220 case MINUS_EXPR:
1221 real = const_binop (code, r1, r2);
1222 imag = const_binop (code, i1, i2);
1223 break;
1225 case MULT_EXPR:
1226 if (COMPLEX_FLOAT_TYPE_P (type))
1227 return do_mpc_arg2 (arg1, arg2, type,
1228 /* do_nonfinite= */ folding_initializer,
1229 mpc_mul);
1231 real = const_binop (MINUS_EXPR,
1232 const_binop (MULT_EXPR, r1, r2),
1233 const_binop (MULT_EXPR, i1, i2));
1234 imag = const_binop (PLUS_EXPR,
1235 const_binop (MULT_EXPR, r1, i2),
1236 const_binop (MULT_EXPR, i1, r2));
1237 break;
1239 case RDIV_EXPR:
1240 if (COMPLEX_FLOAT_TYPE_P (type))
1241 return do_mpc_arg2 (arg1, arg2, type,
1242 /* do_nonfinite= */ folding_initializer,
1243 mpc_div);
1244 /* Fallthru ... */
1245 case TRUNC_DIV_EXPR:
1246 case CEIL_DIV_EXPR:
1247 case FLOOR_DIV_EXPR:
1248 case ROUND_DIV_EXPR:
1249 if (flag_complex_method == 0)
1251 /* Keep this algorithm in sync with
1252 tree-complex.c:expand_complex_div_straight().
1254 Expand complex division to scalars, straightforward algorithm.
1255 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1256 t = br*br + bi*bi
1258 tree magsquared
1259 = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r2, r2),
1261 const_binop (MULT_EXPR, i2, i2));
1262 tree t1
1263 = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, r2),
1265 const_binop (MULT_EXPR, i1, i2));
1266 tree t2
1267 = const_binop (MINUS_EXPR,
1268 const_binop (MULT_EXPR, i1, r2),
1269 const_binop (MULT_EXPR, r1, i2));
1271 real = const_binop (code, t1, magsquared);
1272 imag = const_binop (code, t2, magsquared);
1274 else
1276 /* Keep this algorithm in sync with
1277 tree-complex.c:expand_complex_div_wide().
1279 Expand complex division to scalars, modified algorithm to minimize
1280 overflow with wide input ranges. */
1281 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1282 fold_abs_const (r2, TREE_TYPE (type)),
1283 fold_abs_const (i2, TREE_TYPE (type)));
1285 if (integer_nonzerop (compare))
1287 /* In the TRUE branch, we compute
1288 ratio = br/bi;
1289 div = (br * ratio) + bi;
1290 tr = (ar * ratio) + ai;
1291 ti = (ai * ratio) - ar;
1292 tr = tr / div;
1293 ti = ti / div; */
1294 tree ratio = const_binop (code, r2, i2);
1295 tree div = const_binop (PLUS_EXPR, i2,
1296 const_binop (MULT_EXPR, r2, ratio));
1297 real = const_binop (MULT_EXPR, r1, ratio);
1298 real = const_binop (PLUS_EXPR, real, i1);
1299 real = const_binop (code, real, div);
1301 imag = const_binop (MULT_EXPR, i1, ratio);
1302 imag = const_binop (MINUS_EXPR, imag, r1);
1303 imag = const_binop (code, imag, div);
1305 else
1307 /* In the FALSE branch, we compute
1308 ratio = d/c;
1309 divisor = (d * ratio) + c;
1310 tr = (b * ratio) + a;
1311 ti = b - (a * ratio);
1312 tr = tr / div;
1313 ti = ti / div; */
1314 tree ratio = const_binop (code, i2, r2);
1315 tree div = const_binop (PLUS_EXPR, r2,
1316 const_binop (MULT_EXPR, i2, ratio));
1318 real = const_binop (MULT_EXPR, i1, ratio);
1319 real = const_binop (PLUS_EXPR, real, r1);
1320 real = const_binop (code, real, div);
1322 imag = const_binop (MULT_EXPR, r1, ratio);
1323 imag = const_binop (MINUS_EXPR, i1, imag);
1324 imag = const_binop (code, imag, div);
1327 break;
1329 default:
1330 return NULL_TREE;
1333 if (real && imag)
1334 return build_complex (type, real, imag);
1337 if (TREE_CODE (arg1) == VECTOR_CST
1338 && TREE_CODE (arg2) == VECTOR_CST)
1340 tree type = TREE_TYPE(arg1);
1341 int count = TYPE_VECTOR_SUBPARTS (type), i;
1342 tree *elts = XALLOCAVEC (tree, count);
1344 for (i = 0; i < count; i++)
1346 tree elem1 = VECTOR_CST_ELT (arg1, i);
1347 tree elem2 = VECTOR_CST_ELT (arg2, i);
1349 elts[i] = const_binop (code, elem1, elem2);
1351 /* It is possible that const_binop cannot handle the given
1352 code and return NULL_TREE */
1353 if(elts[i] == NULL_TREE)
1354 return NULL_TREE;
1357 return build_vector (type, elts);
1359 return NULL_TREE;
1362 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1363 indicates which particular sizetype to create. */
1365 tree
1366 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1368 return build_int_cst (sizetype_tab[(int) kind], number);
1371 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1372 is a tree code. The type of the result is taken from the operands.
1373 Both must be equivalent integer types, ala int_binop_types_match_p.
1374 If the operands are constant, so is the result. */
1376 tree
1377 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1379 tree type = TREE_TYPE (arg0);
1381 if (arg0 == error_mark_node || arg1 == error_mark_node)
1382 return error_mark_node;
1384 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1385 TREE_TYPE (arg1)));
1387 /* Handle the special case of two integer constants faster. */
1388 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1390 /* And some specific cases even faster than that. */
1391 if (code == PLUS_EXPR)
1393 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1394 return arg1;
1395 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1396 return arg0;
1398 else if (code == MINUS_EXPR)
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MULT_EXPR)
1405 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1406 return arg1;
1409 /* Handle general case of two integer constants. For sizetype
1410 constant calculations we always want to know about overflow,
1411 even in the unsigned case. */
1412 return int_const_binop_1 (code, arg0, arg1, -1);
1415 return fold_build2_loc (loc, code, type, arg0, arg1);
1418 /* Given two values, either both of sizetype or both of bitsizetype,
1419 compute the difference between the two values. Return the value
1420 in signed type corresponding to the type of the operands. */
1422 tree
1423 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1425 tree type = TREE_TYPE (arg0);
1426 tree ctype;
1428 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1429 TREE_TYPE (arg1)));
1431 /* If the type is already signed, just do the simple thing. */
1432 if (!TYPE_UNSIGNED (type))
1433 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1435 if (type == sizetype)
1436 ctype = ssizetype;
1437 else if (type == bitsizetype)
1438 ctype = sbitsizetype;
1439 else
1440 ctype = signed_type_for (type);
1442 /* If either operand is not a constant, do the conversions to the signed
1443 type and subtract. The hardware will do the right thing with any
1444 overflow in the subtraction. */
1445 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1446 return size_binop_loc (loc, MINUS_EXPR,
1447 fold_convert_loc (loc, ctype, arg0),
1448 fold_convert_loc (loc, ctype, arg1));
1450 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1451 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1452 overflow) and negate (which can't either). Special-case a result
1453 of zero while we're here. */
1454 if (tree_int_cst_equal (arg0, arg1))
1455 return build_int_cst (ctype, 0);
1456 else if (tree_int_cst_lt (arg1, arg0))
1457 return fold_convert_loc (loc, ctype,
1458 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1459 else
1460 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1461 fold_convert_loc (loc, ctype,
1462 size_binop_loc (loc,
1463 MINUS_EXPR,
1464 arg1, arg0)));
1467 /* A subroutine of fold_convert_const handling conversions of an
1468 INTEGER_CST to another integer type. */
1470 static tree
1471 fold_convert_const_int_from_int (tree type, const_tree arg1)
1473 tree t;
1475 /* Given an integer constant, make new constant with new type,
1476 appropriately sign-extended or truncated. */
1477 t = force_fit_type_double (type, tree_to_double_int (arg1),
1478 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1479 (TREE_INT_CST_HIGH (arg1) < 0
1480 && (TYPE_UNSIGNED (type)
1481 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1482 | TREE_OVERFLOW (arg1));
1484 return t;
1487 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1488 to an integer type. */
1490 static tree
1491 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1493 int overflow = 0;
1494 tree t;
1496 /* The following code implements the floating point to integer
1497 conversion rules required by the Java Language Specification,
1498 that IEEE NaNs are mapped to zero and values that overflow
1499 the target precision saturate, i.e. values greater than
1500 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1501 are mapped to INT_MIN. These semantics are allowed by the
1502 C and C++ standards that simply state that the behavior of
1503 FP-to-integer conversion is unspecified upon overflow. */
1505 double_int val;
1506 REAL_VALUE_TYPE r;
1507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1509 switch (code)
1511 case FIX_TRUNC_EXPR:
1512 real_trunc (&r, VOIDmode, &x);
1513 break;
1515 default:
1516 gcc_unreachable ();
1519 /* If R is NaN, return zero and show we have an overflow. */
1520 if (REAL_VALUE_ISNAN (r))
1522 overflow = 1;
1523 val = double_int_zero;
1526 /* See if R is less than the lower bound or greater than the
1527 upper bound. */
1529 if (! overflow)
1531 tree lt = TYPE_MIN_VALUE (type);
1532 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1533 if (REAL_VALUES_LESS (r, l))
1535 overflow = 1;
1536 val = tree_to_double_int (lt);
1540 if (! overflow)
1542 tree ut = TYPE_MAX_VALUE (type);
1543 if (ut)
1545 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1546 if (REAL_VALUES_LESS (u, r))
1548 overflow = 1;
1549 val = tree_to_double_int (ut);
1554 if (! overflow)
1555 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1557 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1558 return t;
1561 /* A subroutine of fold_convert_const handling conversions of a
1562 FIXED_CST to an integer type. */
1564 static tree
1565 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1567 tree t;
1568 double_int temp, temp_trunc;
1569 unsigned int mode;
1571 /* Right shift FIXED_CST to temp by fbit. */
1572 temp = TREE_FIXED_CST (arg1).data;
1573 mode = TREE_FIXED_CST (arg1).mode;
1574 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1576 temp = temp.rshift (GET_MODE_FBIT (mode),
1577 HOST_BITS_PER_DOUBLE_INT,
1578 SIGNED_FIXED_POINT_MODE_P (mode));
1580 /* Left shift temp to temp_trunc by fbit. */
1581 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 else
1587 temp = double_int_zero;
1588 temp_trunc = double_int_zero;
1591 /* If FIXED_CST is negative, we need to round the value toward 0.
1592 By checking if the fractional bits are not zero to add 1 to temp. */
1593 if (SIGNED_FIXED_POINT_MODE_P (mode)
1594 && temp_trunc.is_negative ()
1595 && TREE_FIXED_CST (arg1).data != temp_trunc)
1596 temp += double_int_one;
1598 /* Given a fixed-point constant, make new constant with new type,
1599 appropriately sign-extended or truncated. */
1600 t = force_fit_type_double (type, temp, -1,
1601 (temp.is_negative ()
1602 && (TYPE_UNSIGNED (type)
1603 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1604 | TREE_OVERFLOW (arg1));
1606 return t;
1609 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1610 to another floating point type. */
1612 static tree
1613 fold_convert_const_real_from_real (tree type, const_tree arg1)
1615 REAL_VALUE_TYPE value;
1616 tree t;
1618 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1619 t = build_real (type, value);
1621 /* If converting an infinity or NAN to a representation that doesn't
1622 have one, set the overflow bit so that we can produce some kind of
1623 error message at the appropriate point if necessary. It's not the
1624 most user-friendly message, but it's better than nothing. */
1625 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1626 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1627 TREE_OVERFLOW (t) = 1;
1628 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1629 && !MODE_HAS_NANS (TYPE_MODE (type)))
1630 TREE_OVERFLOW (t) = 1;
1631 /* Regular overflow, conversion produced an infinity in a mode that
1632 can't represent them. */
1633 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1634 && REAL_VALUE_ISINF (value)
1635 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1636 TREE_OVERFLOW (t) = 1;
1637 else
1638 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1643 to a floating point type. */
1645 static tree
1646 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1652 t = build_real (type, value);
1654 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1659 to another fixed-point type. */
1661 static tree
1662 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1664 FIXED_VALUE_TYPE value;
1665 tree t;
1666 bool overflow_p;
1668 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1669 TYPE_SATURATING (type));
1670 t = build_fixed (type, value);
1672 /* Propagate overflow flags. */
1673 if (overflow_p | TREE_OVERFLOW (arg1))
1674 TREE_OVERFLOW (t) = 1;
1675 return t;
1678 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1679 to a fixed-point type. */
1681 static tree
1682 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1684 FIXED_VALUE_TYPE value;
1685 tree t;
1686 bool overflow_p;
1688 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1689 TREE_INT_CST (arg1),
1690 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1691 TYPE_SATURATING (type));
1692 t = build_fixed (type, value);
1694 /* Propagate overflow flags. */
1695 if (overflow_p | TREE_OVERFLOW (arg1))
1696 TREE_OVERFLOW (t) = 1;
1697 return t;
1700 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1701 to a fixed-point type. */
1703 static tree
1704 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1706 FIXED_VALUE_TYPE value;
1707 tree t;
1708 bool overflow_p;
1710 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1711 &TREE_REAL_CST (arg1),
1712 TYPE_SATURATING (type));
1713 t = build_fixed (type, value);
1715 /* Propagate overflow flags. */
1716 if (overflow_p | TREE_OVERFLOW (arg1))
1717 TREE_OVERFLOW (t) = 1;
1718 return t;
1721 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1722 type TYPE. If no simplification can be done return NULL_TREE. */
1724 static tree
1725 fold_convert_const (enum tree_code code, tree type, tree arg1)
1727 if (TREE_TYPE (arg1) == type)
1728 return arg1;
1730 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1731 || TREE_CODE (type) == OFFSET_TYPE)
1733 if (TREE_CODE (arg1) == INTEGER_CST)
1734 return fold_convert_const_int_from_int (type, arg1);
1735 else if (TREE_CODE (arg1) == REAL_CST)
1736 return fold_convert_const_int_from_real (code, type, arg1);
1737 else if (TREE_CODE (arg1) == FIXED_CST)
1738 return fold_convert_const_int_from_fixed (type, arg1);
1740 else if (TREE_CODE (type) == REAL_TYPE)
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1743 return build_real_from_int_cst (type, arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1745 return fold_convert_const_real_from_real (type, arg1);
1746 else if (TREE_CODE (arg1) == FIXED_CST)
1747 return fold_convert_const_real_from_fixed (type, arg1);
1749 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1751 if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_fixed_from_fixed (type, arg1);
1753 else if (TREE_CODE (arg1) == INTEGER_CST)
1754 return fold_convert_const_fixed_from_int (type, arg1);
1755 else if (TREE_CODE (arg1) == REAL_CST)
1756 return fold_convert_const_fixed_from_real (type, arg1);
1758 return NULL_TREE;
1761 /* Construct a vector of zero elements of vector type TYPE. */
1763 static tree
1764 build_zero_vector (tree type)
1766 tree t;
1768 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1769 return build_vector_from_val (type, t);
1772 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1774 bool
1775 fold_convertible_p (const_tree type, const_tree arg)
1777 tree orig = TREE_TYPE (arg);
1779 if (type == orig)
1780 return true;
1782 if (TREE_CODE (arg) == ERROR_MARK
1783 || TREE_CODE (type) == ERROR_MARK
1784 || TREE_CODE (orig) == ERROR_MARK)
1785 return false;
1787 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1788 return true;
1790 switch (TREE_CODE (type))
1792 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1793 case POINTER_TYPE: case REFERENCE_TYPE:
1794 case OFFSET_TYPE:
1795 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1796 || TREE_CODE (orig) == OFFSET_TYPE)
1797 return true;
1798 return (TREE_CODE (orig) == VECTOR_TYPE
1799 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1801 case REAL_TYPE:
1802 case FIXED_POINT_TYPE:
1803 case COMPLEX_TYPE:
1804 case VECTOR_TYPE:
1805 case VOID_TYPE:
1806 return TREE_CODE (type) == TREE_CODE (orig);
1808 default:
1809 return false;
1813 /* Convert expression ARG to type TYPE. Used by the middle-end for
1814 simple conversions in preference to calling the front-end's convert. */
1816 tree
1817 fold_convert_loc (location_t loc, tree type, tree arg)
1819 tree orig = TREE_TYPE (arg);
1820 tree tem;
1822 if (type == orig)
1823 return arg;
1825 if (TREE_CODE (arg) == ERROR_MARK
1826 || TREE_CODE (type) == ERROR_MARK
1827 || TREE_CODE (orig) == ERROR_MARK)
1828 return error_mark_node;
1830 switch (TREE_CODE (type))
1832 case POINTER_TYPE:
1833 case REFERENCE_TYPE:
1834 /* Handle conversions between pointers to different address spaces. */
1835 if (POINTER_TYPE_P (orig)
1836 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1837 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1838 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1839 /* fall through */
1841 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1842 case OFFSET_TYPE:
1843 if (TREE_CODE (arg) == INTEGER_CST)
1845 tem = fold_convert_const (NOP_EXPR, type, arg);
1846 if (tem != NULL_TREE)
1847 return tem;
1849 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1850 || TREE_CODE (orig) == OFFSET_TYPE)
1851 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1852 if (TREE_CODE (orig) == COMPLEX_TYPE)
1853 return fold_convert_loc (loc, type,
1854 fold_build1_loc (loc, REALPART_EXPR,
1855 TREE_TYPE (orig), arg));
1856 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1857 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1858 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1860 case REAL_TYPE:
1861 if (TREE_CODE (arg) == INTEGER_CST)
1863 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1864 if (tem != NULL_TREE)
1865 return tem;
1867 else if (TREE_CODE (arg) == REAL_CST)
1869 tem = fold_convert_const (NOP_EXPR, type, arg);
1870 if (tem != NULL_TREE)
1871 return tem;
1873 else if (TREE_CODE (arg) == FIXED_CST)
1875 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1876 if (tem != NULL_TREE)
1877 return tem;
1880 switch (TREE_CODE (orig))
1882 case INTEGER_TYPE:
1883 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1884 case POINTER_TYPE: case REFERENCE_TYPE:
1885 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1887 case REAL_TYPE:
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1890 case FIXED_POINT_TYPE:
1891 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1893 case COMPLEX_TYPE:
1894 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1895 return fold_convert_loc (loc, type, tem);
1897 default:
1898 gcc_unreachable ();
1901 case FIXED_POINT_TYPE:
1902 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1903 || TREE_CODE (arg) == REAL_CST)
1905 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 goto fold_convert_exit;
1910 switch (TREE_CODE (orig))
1912 case FIXED_POINT_TYPE:
1913 case INTEGER_TYPE:
1914 case ENUMERAL_TYPE:
1915 case BOOLEAN_TYPE:
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1919 case COMPLEX_TYPE:
1920 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1921 return fold_convert_loc (loc, type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 case FIXED_POINT_TYPE:
1935 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1936 fold_convert_loc (loc, TREE_TYPE (type), arg),
1937 fold_convert_loc (loc, TREE_TYPE (type),
1938 integer_zero_node));
1939 case COMPLEX_TYPE:
1941 tree rpart, ipart;
1943 if (TREE_CODE (arg) == COMPLEX_EXPR)
1945 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1946 TREE_OPERAND (arg, 0));
1947 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1948 TREE_OPERAND (arg, 1));
1949 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1952 arg = save_expr (arg);
1953 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1954 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1955 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1956 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1957 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1960 default:
1961 gcc_unreachable ();
1964 case VECTOR_TYPE:
1965 if (integer_zerop (arg))
1966 return build_zero_vector (type);
1967 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1968 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1969 || TREE_CODE (orig) == VECTOR_TYPE);
1970 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1972 case VOID_TYPE:
1973 tem = fold_ignored_result (arg);
1974 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1976 default:
1977 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1978 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1979 gcc_unreachable ();
1981 fold_convert_exit:
1982 protected_set_expr_location_unshare (tem, loc);
1983 return tem;
1986 /* Return false if expr can be assumed not to be an lvalue, true
1987 otherwise. */
1989 static bool
1990 maybe_lvalue_p (const_tree x)
1992 /* We only need to wrap lvalue tree codes. */
1993 switch (TREE_CODE (x))
1995 case VAR_DECL:
1996 case PARM_DECL:
1997 case RESULT_DECL:
1998 case LABEL_DECL:
1999 case FUNCTION_DECL:
2000 case SSA_NAME:
2002 case COMPONENT_REF:
2003 case MEM_REF:
2004 case INDIRECT_REF:
2005 case ARRAY_REF:
2006 case ARRAY_RANGE_REF:
2007 case BIT_FIELD_REF:
2008 case OBJ_TYPE_REF:
2010 case REALPART_EXPR:
2011 case IMAGPART_EXPR:
2012 case PREINCREMENT_EXPR:
2013 case PREDECREMENT_EXPR:
2014 case SAVE_EXPR:
2015 case TRY_CATCH_EXPR:
2016 case WITH_CLEANUP_EXPR:
2017 case COMPOUND_EXPR:
2018 case MODIFY_EXPR:
2019 case TARGET_EXPR:
2020 case COND_EXPR:
2021 case BIND_EXPR:
2022 break;
2024 default:
2025 /* Assume the worst for front-end tree codes. */
2026 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2027 break;
2028 return false;
2031 return true;
2034 /* Return an expr equal to X but certainly not valid as an lvalue. */
2036 tree
2037 non_lvalue_loc (location_t loc, tree x)
2039 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2040 us. */
2041 if (in_gimple_form)
2042 return x;
2044 if (! maybe_lvalue_p (x))
2045 return x;
2046 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2049 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2050 Zero means allow extended lvalues. */
2052 int pedantic_lvalues;
2054 /* When pedantic, return an expr equal to X but certainly not valid as a
2055 pedantic lvalue. Otherwise, return X. */
2057 static tree
2058 pedantic_non_lvalue_loc (location_t loc, tree x)
2060 if (pedantic_lvalues)
2061 return non_lvalue_loc (loc, x);
2063 return protected_set_expr_location_unshare (x, loc);
2066 /* Given a tree comparison code, return the code that is the logical inverse.
2067 It is generally not safe to do this for floating-point comparisons, except
2068 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2069 ERROR_MARK in this case. */
2071 enum tree_code
2072 invert_tree_comparison (enum tree_code code, bool honor_nans)
2074 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2075 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2076 return ERROR_MARK;
2078 switch (code)
2080 case EQ_EXPR:
2081 return NE_EXPR;
2082 case NE_EXPR:
2083 return EQ_EXPR;
2084 case GT_EXPR:
2085 return honor_nans ? UNLE_EXPR : LE_EXPR;
2086 case GE_EXPR:
2087 return honor_nans ? UNLT_EXPR : LT_EXPR;
2088 case LT_EXPR:
2089 return honor_nans ? UNGE_EXPR : GE_EXPR;
2090 case LE_EXPR:
2091 return honor_nans ? UNGT_EXPR : GT_EXPR;
2092 case LTGT_EXPR:
2093 return UNEQ_EXPR;
2094 case UNEQ_EXPR:
2095 return LTGT_EXPR;
2096 case UNGT_EXPR:
2097 return LE_EXPR;
2098 case UNGE_EXPR:
2099 return LT_EXPR;
2100 case UNLT_EXPR:
2101 return GE_EXPR;
2102 case UNLE_EXPR:
2103 return GT_EXPR;
2104 case ORDERED_EXPR:
2105 return UNORDERED_EXPR;
2106 case UNORDERED_EXPR:
2107 return ORDERED_EXPR;
2108 default:
2109 gcc_unreachable ();
2113 /* Similar, but return the comparison that results if the operands are
2114 swapped. This is safe for floating-point. */
2116 enum tree_code
2117 swap_tree_comparison (enum tree_code code)
2119 switch (code)
2121 case EQ_EXPR:
2122 case NE_EXPR:
2123 case ORDERED_EXPR:
2124 case UNORDERED_EXPR:
2125 case LTGT_EXPR:
2126 case UNEQ_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 case UNGT_EXPR:
2137 return UNLT_EXPR;
2138 case UNGE_EXPR:
2139 return UNLE_EXPR;
2140 case UNLT_EXPR:
2141 return UNGT_EXPR;
2142 case UNLE_EXPR:
2143 return UNGE_EXPR;
2144 default:
2145 gcc_unreachable ();
2150 /* Convert a comparison tree code from an enum tree_code representation
2151 into a compcode bit-based encoding. This function is the inverse of
2152 compcode_to_comparison. */
2154 static enum comparison_code
2155 comparison_to_compcode (enum tree_code code)
2157 switch (code)
2159 case LT_EXPR:
2160 return COMPCODE_LT;
2161 case EQ_EXPR:
2162 return COMPCODE_EQ;
2163 case LE_EXPR:
2164 return COMPCODE_LE;
2165 case GT_EXPR:
2166 return COMPCODE_GT;
2167 case NE_EXPR:
2168 return COMPCODE_NE;
2169 case GE_EXPR:
2170 return COMPCODE_GE;
2171 case ORDERED_EXPR:
2172 return COMPCODE_ORD;
2173 case UNORDERED_EXPR:
2174 return COMPCODE_UNORD;
2175 case UNLT_EXPR:
2176 return COMPCODE_UNLT;
2177 case UNEQ_EXPR:
2178 return COMPCODE_UNEQ;
2179 case UNLE_EXPR:
2180 return COMPCODE_UNLE;
2181 case UNGT_EXPR:
2182 return COMPCODE_UNGT;
2183 case LTGT_EXPR:
2184 return COMPCODE_LTGT;
2185 case UNGE_EXPR:
2186 return COMPCODE_UNGE;
2187 default:
2188 gcc_unreachable ();
2192 /* Convert a compcode bit-based encoding of a comparison operator back
2193 to GCC's enum tree_code representation. This function is the
2194 inverse of comparison_to_compcode. */
2196 static enum tree_code
2197 compcode_to_comparison (enum comparison_code code)
2199 switch (code)
2201 case COMPCODE_LT:
2202 return LT_EXPR;
2203 case COMPCODE_EQ:
2204 return EQ_EXPR;
2205 case COMPCODE_LE:
2206 return LE_EXPR;
2207 case COMPCODE_GT:
2208 return GT_EXPR;
2209 case COMPCODE_NE:
2210 return NE_EXPR;
2211 case COMPCODE_GE:
2212 return GE_EXPR;
2213 case COMPCODE_ORD:
2214 return ORDERED_EXPR;
2215 case COMPCODE_UNORD:
2216 return UNORDERED_EXPR;
2217 case COMPCODE_UNLT:
2218 return UNLT_EXPR;
2219 case COMPCODE_UNEQ:
2220 return UNEQ_EXPR;
2221 case COMPCODE_UNLE:
2222 return UNLE_EXPR;
2223 case COMPCODE_UNGT:
2224 return UNGT_EXPR;
2225 case COMPCODE_LTGT:
2226 return LTGT_EXPR;
2227 case COMPCODE_UNGE:
2228 return UNGE_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Return a tree for the comparison which is the combination of
2235 doing the AND or OR (depending on CODE) of the two operations LCODE
2236 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2237 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2238 if this makes the transformation invalid. */
2240 tree
2241 combine_comparisons (location_t loc,
2242 enum tree_code code, enum tree_code lcode,
2243 enum tree_code rcode, tree truth_type,
2244 tree ll_arg, tree lr_arg)
2246 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2247 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2248 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2249 int compcode;
2251 switch (code)
2253 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2254 compcode = lcompcode & rcompcode;
2255 break;
2257 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2258 compcode = lcompcode | rcompcode;
2259 break;
2261 default:
2262 return NULL_TREE;
2265 if (!honor_nans)
2267 /* Eliminate unordered comparisons, as well as LTGT and ORD
2268 which are not used unless the mode has NaNs. */
2269 compcode &= ~COMPCODE_UNORD;
2270 if (compcode == COMPCODE_LTGT)
2271 compcode = COMPCODE_NE;
2272 else if (compcode == COMPCODE_ORD)
2273 compcode = COMPCODE_TRUE;
2275 else if (flag_trapping_math)
2277 /* Check that the original operation and the optimized ones will trap
2278 under the same condition. */
2279 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2280 && (lcompcode != COMPCODE_EQ)
2281 && (lcompcode != COMPCODE_ORD);
2282 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2283 && (rcompcode != COMPCODE_EQ)
2284 && (rcompcode != COMPCODE_ORD);
2285 bool trap = (compcode & COMPCODE_UNORD) == 0
2286 && (compcode != COMPCODE_EQ)
2287 && (compcode != COMPCODE_ORD);
2289 /* In a short-circuited boolean expression the LHS might be
2290 such that the RHS, if evaluated, will never trap. For
2291 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2292 if neither x nor y is NaN. (This is a mixed blessing: for
2293 example, the expression above will never trap, hence
2294 optimizing it to x < y would be invalid). */
2295 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2296 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 rtrap = false;
2299 /* If the comparison was short-circuited, and only the RHS
2300 trapped, we may now generate a spurious trap. */
2301 if (rtrap && !ltrap
2302 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 return NULL_TREE;
2305 /* If we changed the conditions that cause a trap, we lose. */
2306 if ((ltrap || rtrap) != trap)
2307 return NULL_TREE;
2310 if (compcode == COMPCODE_TRUE)
2311 return constant_boolean_node (true, truth_type);
2312 else if (compcode == COMPCODE_FALSE)
2313 return constant_boolean_node (false, truth_type);
2314 else
2316 enum tree_code tcode;
2318 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2319 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2354 || TREE_TYPE (arg0) == error_mark_node
2355 || TREE_TYPE (arg1) == error_mark_node)
2356 return 0;
2358 /* Similar, if either does not have a type (like a released SSA name),
2359 they aren't equal. */
2360 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2361 return 0;
2363 /* Check equality of integer constants before bailing out due to
2364 precision differences. */
2365 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2366 return tree_int_cst_equal (arg0, arg1);
2368 /* If both types don't have the same signedness, then we can't consider
2369 them equal. We must check this before the STRIP_NOPS calls
2370 because they may change the signedness of the arguments. As pointers
2371 strictly don't have a signedness, require either two pointers or
2372 two non-pointers as well. */
2373 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2374 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2375 return 0;
2377 /* We cannot consider pointers to different address space equal. */
2378 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2379 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2380 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2381 return 0;
2383 /* If both types don't have the same precision, then it is not safe
2384 to strip NOPs. */
2385 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2386 return 0;
2388 STRIP_NOPS (arg0);
2389 STRIP_NOPS (arg1);
2391 /* In case both args are comparisons but with different comparison
2392 code, try to swap the comparison operands of one arg to produce
2393 a match and compare that variant. */
2394 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2395 && COMPARISON_CLASS_P (arg0)
2396 && COMPARISON_CLASS_P (arg1))
2398 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2400 if (TREE_CODE (arg0) == swap_code)
2401 return operand_equal_p (TREE_OPERAND (arg0, 0),
2402 TREE_OPERAND (arg1, 1), flags)
2403 && operand_equal_p (TREE_OPERAND (arg0, 1),
2404 TREE_OPERAND (arg1, 0), flags);
2407 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2408 /* This is needed for conversions and for COMPONENT_REF.
2409 Might as well play it safe and always test this. */
2410 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2411 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2412 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2413 return 0;
2415 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2416 We don't care about side effects in that case because the SAVE_EXPR
2417 takes care of that for us. In all other cases, two expressions are
2418 equal if they have no side effects. If we have two identical
2419 expressions with side effects that should be treated the same due
2420 to the only side effects being identical SAVE_EXPR's, that will
2421 be detected in the recursive calls below.
2422 If we are taking an invariant address of two identical objects
2423 they are necessarily equal as well. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (flags & OEP_CONSTANT_ADDRESS_OF)
2427 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2428 return 1;
2430 /* Next handle constant cases, those for which we can return 1 even
2431 if ONLY_CONST is set. */
2432 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2433 switch (TREE_CODE (arg0))
2435 case INTEGER_CST:
2436 return tree_int_cst_equal (arg0, arg1);
2438 case FIXED_CST:
2439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2440 TREE_FIXED_CST (arg1));
2442 case REAL_CST:
2443 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2444 TREE_REAL_CST (arg1)))
2445 return 1;
2448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2450 /* If we do not distinguish between signed and unsigned zero,
2451 consider them equal. */
2452 if (real_zerop (arg0) && real_zerop (arg1))
2453 return 1;
2455 return 0;
2457 case VECTOR_CST:
2459 unsigned i;
2461 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2462 return 0;
2464 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2466 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2467 VECTOR_CST_ELT (arg1, i), flags))
2468 return 0;
2470 return 1;
2473 case COMPLEX_CST:
2474 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2475 flags)
2476 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2477 flags));
2479 case STRING_CST:
2480 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2481 && ! memcmp (TREE_STRING_POINTER (arg0),
2482 TREE_STRING_POINTER (arg1),
2483 TREE_STRING_LENGTH (arg0)));
2485 case ADDR_EXPR:
2486 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2487 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2488 ? OEP_CONSTANT_ADDRESS_OF : 0);
2489 default:
2490 break;
2493 if (flags & OEP_ONLY_CONST)
2494 return 0;
2496 /* Define macros to test an operand from arg0 and arg1 for equality and a
2497 variant that allows null and views null as being different from any
2498 non-null value. In the latter case, if either is null, the both
2499 must be; otherwise, do the normal comparison. */
2500 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2501 TREE_OPERAND (arg1, N), flags)
2503 #define OP_SAME_WITH_NULL(N) \
2504 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2505 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2507 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2509 case tcc_unary:
2510 /* Two conversions are equal only if signedness and modes match. */
2511 switch (TREE_CODE (arg0))
2513 CASE_CONVERT:
2514 case FIX_TRUNC_EXPR:
2515 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2516 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2517 return 0;
2518 break;
2519 default:
2520 break;
2523 return OP_SAME (0);
2526 case tcc_comparison:
2527 case tcc_binary:
2528 if (OP_SAME (0) && OP_SAME (1))
2529 return 1;
2531 /* For commutative ops, allow the other order. */
2532 return (commutative_tree_code (TREE_CODE (arg0))
2533 && operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 1), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 0), flags));
2538 case tcc_reference:
2539 /* If either of the pointer (or reference) expressions we are
2540 dereferencing contain a side effect, these cannot be equal. */
2541 if (TREE_SIDE_EFFECTS (arg0)
2542 || TREE_SIDE_EFFECTS (arg1))
2543 return 0;
2545 switch (TREE_CODE (arg0))
2547 case INDIRECT_REF:
2548 case REALPART_EXPR:
2549 case IMAGPART_EXPR:
2550 return OP_SAME (0);
2552 case TARGET_MEM_REF:
2553 /* Require equal extra operands and then fall through to MEM_REF
2554 handling of the two common operands. */
2555 if (!OP_SAME_WITH_NULL (2)
2556 || !OP_SAME_WITH_NULL (3)
2557 || !OP_SAME_WITH_NULL (4))
2558 return 0;
2559 /* Fallthru. */
2560 case MEM_REF:
2561 /* Require equal access sizes, and similar pointer types.
2562 We can have incomplete types for array references of
2563 variable-sized arrays from the Fortran frontent
2564 though. */
2565 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2566 || (TYPE_SIZE (TREE_TYPE (arg0))
2567 && TYPE_SIZE (TREE_TYPE (arg1))
2568 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2569 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2570 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2571 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2572 && OP_SAME (0) && OP_SAME (1));
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 /* Operands 2 and 3 may be null.
2577 Compare the array index by value if it is constant first as we
2578 may have different types but same value here. */
2579 return (OP_SAME (0)
2580 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2581 TREE_OPERAND (arg1, 1))
2582 || OP_SAME (1))
2583 && OP_SAME_WITH_NULL (2)
2584 && OP_SAME_WITH_NULL (3));
2586 case COMPONENT_REF:
2587 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2588 may be NULL when we're called to compare MEM_EXPRs. */
2589 return OP_SAME_WITH_NULL (0)
2590 && OP_SAME (1)
2591 && OP_SAME_WITH_NULL (2);
2593 case BIT_FIELD_REF:
2594 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2596 default:
2597 return 0;
2600 case tcc_expression:
2601 switch (TREE_CODE (arg0))
2603 case ADDR_EXPR:
2604 case TRUTH_NOT_EXPR:
2605 return OP_SAME (0);
2607 case TRUTH_ANDIF_EXPR:
2608 case TRUTH_ORIF_EXPR:
2609 return OP_SAME (0) && OP_SAME (1);
2611 case FMA_EXPR:
2612 case WIDEN_MULT_PLUS_EXPR:
2613 case WIDEN_MULT_MINUS_EXPR:
2614 if (!OP_SAME (2))
2615 return 0;
2616 /* The multiplcation operands are commutative. */
2617 /* FALLTHRU */
2619 case TRUTH_AND_EXPR:
2620 case TRUTH_OR_EXPR:
2621 case TRUTH_XOR_EXPR:
2622 if (OP_SAME (0) && OP_SAME (1))
2623 return 1;
2625 /* Otherwise take into account this is a commutative operation. */
2626 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2627 TREE_OPERAND (arg1, 1), flags)
2628 && operand_equal_p (TREE_OPERAND (arg0, 1),
2629 TREE_OPERAND (arg1, 0), flags));
2631 case COND_EXPR:
2632 case VEC_COND_EXPR:
2633 case DOT_PROD_EXPR:
2634 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 default:
2637 return 0;
2640 case tcc_vl_exp:
2641 switch (TREE_CODE (arg0))
2643 case CALL_EXPR:
2644 /* If the CALL_EXPRs call different functions, then they
2645 clearly can not be equal. */
2646 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2647 flags))
2648 return 0;
2651 unsigned int cef = call_expr_flags (arg0);
2652 if (flags & OEP_PURE_SAME)
2653 cef &= ECF_CONST | ECF_PURE;
2654 else
2655 cef &= ECF_CONST;
2656 if (!cef)
2657 return 0;
2660 /* Now see if all the arguments are the same. */
2662 const_call_expr_arg_iterator iter0, iter1;
2663 const_tree a0, a1;
2664 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2665 a1 = first_const_call_expr_arg (arg1, &iter1);
2666 a0 && a1;
2667 a0 = next_const_call_expr_arg (&iter0),
2668 a1 = next_const_call_expr_arg (&iter1))
2669 if (! operand_equal_p (a0, a1, flags))
2670 return 0;
2672 /* If we get here and both argument lists are exhausted
2673 then the CALL_EXPRs are equal. */
2674 return ! (a0 || a1);
2676 default:
2677 return 0;
2680 case tcc_declaration:
2681 /* Consider __builtin_sqrt equal to sqrt. */
2682 return (TREE_CODE (arg0) == FUNCTION_DECL
2683 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2684 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2685 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2687 default:
2688 return 0;
2691 #undef OP_SAME
2692 #undef OP_SAME_WITH_NULL
2695 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2696 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2698 When in doubt, return 0. */
2700 static int
2701 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2703 int unsignedp1, unsignedpo;
2704 tree primarg0, primarg1, primother;
2705 unsigned int correct_width;
2707 if (operand_equal_p (arg0, arg1, 0))
2708 return 1;
2710 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2711 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2712 return 0;
2714 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2715 and see if the inner values are the same. This removes any
2716 signedness comparison, which doesn't matter here. */
2717 primarg0 = arg0, primarg1 = arg1;
2718 STRIP_NOPS (primarg0);
2719 STRIP_NOPS (primarg1);
2720 if (operand_equal_p (primarg0, primarg1, 0))
2721 return 1;
2723 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2724 actual comparison operand, ARG0.
2726 First throw away any conversions to wider types
2727 already present in the operands. */
2729 primarg1 = get_narrower (arg1, &unsignedp1);
2730 primother = get_narrower (other, &unsignedpo);
2732 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2733 if (unsignedp1 == unsignedpo
2734 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2735 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2737 tree type = TREE_TYPE (arg0);
2739 /* Make sure shorter operand is extended the right way
2740 to match the longer operand. */
2741 primarg1 = fold_convert (signed_or_unsigned_type_for
2742 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2744 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2745 return 1;
2748 return 0;
2751 /* See if ARG is an expression that is either a comparison or is performing
2752 arithmetic on comparisons. The comparisons must only be comparing
2753 two different values, which will be stored in *CVAL1 and *CVAL2; if
2754 they are nonzero it means that some operands have already been found.
2755 No variables may be used anywhere else in the expression except in the
2756 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2757 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2759 If this is true, return 1. Otherwise, return zero. */
2761 static int
2762 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2764 enum tree_code code = TREE_CODE (arg);
2765 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2767 /* We can handle some of the tcc_expression cases here. */
2768 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2769 tclass = tcc_unary;
2770 else if (tclass == tcc_expression
2771 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2772 || code == COMPOUND_EXPR))
2773 tclass = tcc_binary;
2775 else if (tclass == tcc_expression && code == SAVE_EXPR
2776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2778 /* If we've already found a CVAL1 or CVAL2, this expression is
2779 two complex to handle. */
2780 if (*cval1 || *cval2)
2781 return 0;
2783 tclass = tcc_unary;
2784 *save_p = 1;
2787 switch (tclass)
2789 case tcc_unary:
2790 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2792 case tcc_binary:
2793 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2794 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2795 cval1, cval2, save_p));
2797 case tcc_constant:
2798 return 1;
2800 case tcc_expression:
2801 if (code == COND_EXPR)
2802 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2803 cval1, cval2, save_p)
2804 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2805 cval1, cval2, save_p)
2806 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2807 cval1, cval2, save_p));
2808 return 0;
2810 case tcc_comparison:
2811 /* First see if we can handle the first operand, then the second. For
2812 the second operand, we know *CVAL1 can't be zero. It must be that
2813 one side of the comparison is each of the values; test for the
2814 case where this isn't true by failing if the two operands
2815 are the same. */
2817 if (operand_equal_p (TREE_OPERAND (arg, 0),
2818 TREE_OPERAND (arg, 1), 0))
2819 return 0;
2821 if (*cval1 == 0)
2822 *cval1 = TREE_OPERAND (arg, 0);
2823 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2825 else if (*cval2 == 0)
2826 *cval2 = TREE_OPERAND (arg, 0);
2827 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2829 else
2830 return 0;
2832 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2834 else if (*cval2 == 0)
2835 *cval2 = TREE_OPERAND (arg, 1);
2836 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2838 else
2839 return 0;
2841 return 1;
2843 default:
2844 return 0;
2848 /* ARG is a tree that is known to contain just arithmetic operations and
2849 comparisons. Evaluate the operations in the tree substituting NEW0 for
2850 any occurrence of OLD0 as an operand of a comparison and likewise for
2851 NEW1 and OLD1. */
2853 static tree
2854 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2855 tree old1, tree new1)
2857 tree type = TREE_TYPE (arg);
2858 enum tree_code code = TREE_CODE (arg);
2859 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2861 /* We can handle some of the tcc_expression cases here. */
2862 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2863 tclass = tcc_unary;
2864 else if (tclass == tcc_expression
2865 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2866 tclass = tcc_binary;
2868 switch (tclass)
2870 case tcc_unary:
2871 return fold_build1_loc (loc, code, type,
2872 eval_subst (loc, TREE_OPERAND (arg, 0),
2873 old0, new0, old1, new1));
2875 case tcc_binary:
2876 return fold_build2_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1),
2879 eval_subst (loc, TREE_OPERAND (arg, 1),
2880 old0, new0, old1, new1));
2882 case tcc_expression:
2883 switch (code)
2885 case SAVE_EXPR:
2886 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2887 old1, new1);
2889 case COMPOUND_EXPR:
2890 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2891 old1, new1);
2893 case COND_EXPR:
2894 return fold_build3_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (loc, TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1),
2899 eval_subst (loc, TREE_OPERAND (arg, 2),
2900 old0, new0, old1, new1));
2901 default:
2902 break;
2904 /* Fall through - ??? */
2906 case tcc_comparison:
2908 tree arg0 = TREE_OPERAND (arg, 0);
2909 tree arg1 = TREE_OPERAND (arg, 1);
2911 /* We need to check both for exact equality and tree equality. The
2912 former will be true if the operand has a side-effect. In that
2913 case, we know the operand occurred exactly once. */
2915 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2916 arg0 = new0;
2917 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2918 arg0 = new1;
2920 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2921 arg1 = new0;
2922 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2923 arg1 = new1;
2925 return fold_build2_loc (loc, code, type, arg0, arg1);
2928 default:
2929 return arg;
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED was previously an operand of the expression
2935 but is now not needed (e.g., we folded OMITTED * 0).
2937 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2938 the conversion of RESULT to TYPE. */
2940 tree
2941 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2943 tree t = fold_convert_loc (loc, type, result);
2945 /* If the resulting operand is an empty statement, just return the omitted
2946 statement casted to void. */
2947 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2948 return build1_loc (loc, NOP_EXPR, void_type_node,
2949 fold_ignored_result (omitted));
2951 if (TREE_SIDE_EFFECTS (omitted))
2952 return build2_loc (loc, COMPOUND_EXPR, type,
2953 fold_ignored_result (omitted), t);
2955 return non_lvalue_loc (loc, t);
2958 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 static tree
2961 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2962 tree omitted)
2964 tree t = fold_convert_loc (loc, type, result);
2966 /* If the resulting operand is an empty statement, just return the omitted
2967 statement casted to void. */
2968 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2969 return build1_loc (loc, NOP_EXPR, void_type_node,
2970 fold_ignored_result (omitted));
2972 if (TREE_SIDE_EFFECTS (omitted))
2973 return build2_loc (loc, COMPOUND_EXPR, type,
2974 fold_ignored_result (omitted), t);
2976 return pedantic_non_lvalue_loc (loc, t);
2979 /* Return a tree for the case when the result of an expression is RESULT
2980 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2981 of the expression but are now not needed.
2983 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2984 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2985 evaluated before OMITTED2. Otherwise, if neither has side effects,
2986 just do the conversion of RESULT to TYPE. */
2988 tree
2989 omit_two_operands_loc (location_t loc, tree type, tree result,
2990 tree omitted1, tree omitted2)
2992 tree t = fold_convert_loc (loc, type, result);
2994 if (TREE_SIDE_EFFECTS (omitted2))
2995 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
2996 if (TREE_SIDE_EFFECTS (omitted1))
2997 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
2999 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3003 /* Return a simplified tree node for the truth-negation of ARG. This
3004 never alters ARG itself. We assume that ARG is an operation that
3005 returns a truth value (0 or 1).
3007 FIXME: one would think we would fold the result, but it causes
3008 problems with the dominator optimizer. */
3010 tree
3011 fold_truth_not_expr (location_t loc, tree arg)
3013 tree type = TREE_TYPE (arg);
3014 enum tree_code code = TREE_CODE (arg);
3015 location_t loc1, loc2;
3017 /* If this is a comparison, we can simply invert it, except for
3018 floating-point non-equality comparisons, in which case we just
3019 enclose a TRUTH_NOT_EXPR around what we have. */
3021 if (TREE_CODE_CLASS (code) == tcc_comparison)
3023 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3024 if (FLOAT_TYPE_P (op_type)
3025 && flag_trapping_math
3026 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3027 && code != NE_EXPR && code != EQ_EXPR)
3028 return NULL_TREE;
3030 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3031 if (code == ERROR_MARK)
3032 return NULL_TREE;
3034 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3035 TREE_OPERAND (arg, 1));
3038 switch (code)
3040 case INTEGER_CST:
3041 return constant_boolean_node (integer_zerop (arg), type);
3043 case TRUTH_AND_EXPR:
3044 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3045 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3046 return build2_loc (loc, TRUTH_OR_EXPR, type,
3047 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3048 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3050 case TRUTH_OR_EXPR:
3051 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3052 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3053 return build2_loc (loc, TRUTH_AND_EXPR, type,
3054 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3055 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3057 case TRUTH_XOR_EXPR:
3058 /* Here we can invert either operand. We invert the first operand
3059 unless the second operand is a TRUTH_NOT_EXPR in which case our
3060 result is the XOR of the first operand with the inside of the
3061 negation of the second operand. */
3063 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3064 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3066 else
3067 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3068 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3069 TREE_OPERAND (arg, 1));
3071 case TRUTH_ANDIF_EXPR:
3072 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3073 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3074 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3075 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3076 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3078 case TRUTH_ORIF_EXPR:
3079 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3080 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3081 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3082 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3083 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 case TRUTH_NOT_EXPR:
3086 return TREE_OPERAND (arg, 0);
3088 case COND_EXPR:
3090 tree arg1 = TREE_OPERAND (arg, 1);
3091 tree arg2 = TREE_OPERAND (arg, 2);
3093 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3094 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3096 /* A COND_EXPR may have a throw as one operand, which
3097 then has void type. Just leave void operands
3098 as they are. */
3099 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3100 VOID_TYPE_P (TREE_TYPE (arg1))
3101 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3102 VOID_TYPE_P (TREE_TYPE (arg2))
3103 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3106 case COMPOUND_EXPR:
3107 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, COMPOUND_EXPR, type,
3109 TREE_OPERAND (arg, 0),
3110 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3112 case NON_LVALUE_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3116 CASE_CONVERT:
3117 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3118 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3120 /* ... fall through ... */
3122 case FLOAT_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 return build1_loc (loc, TREE_CODE (arg), type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3127 case BIT_AND_EXPR:
3128 if (!integer_onep (TREE_OPERAND (arg, 1)))
3129 return NULL_TREE;
3130 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3132 case SAVE_EXPR:
3133 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3135 case CLEANUP_POINT_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3138 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3140 default:
3141 return NULL_TREE;
3145 /* Return a simplified tree node for the truth-negation of ARG. This
3146 never alters ARG itself. We assume that ARG is an operation that
3147 returns a truth value (0 or 1).
3149 FIXME: one would think we would fold the result, but it causes
3150 problems with the dominator optimizer. */
3152 tree
3153 invert_truthvalue_loc (location_t loc, tree arg)
3155 tree tem;
3157 if (TREE_CODE (arg) == ERROR_MARK)
3158 return arg;
3160 tem = fold_truth_not_expr (loc, arg);
3161 if (!tem)
3162 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3164 return tem;
3167 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3168 operands are another bit-wise operation with a common input. If so,
3169 distribute the bit operations to save an operation and possibly two if
3170 constants are involved. For example, convert
3171 (A | B) & (A | C) into A | (B & C)
3172 Further simplification will occur if B and C are constants.
3174 If this optimization cannot be done, 0 will be returned. */
3176 static tree
3177 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3178 tree arg0, tree arg1)
3180 tree common;
3181 tree left, right;
3183 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3184 || TREE_CODE (arg0) == code
3185 || (TREE_CODE (arg0) != BIT_AND_EXPR
3186 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3187 return 0;
3189 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3191 common = TREE_OPERAND (arg0, 0);
3192 left = TREE_OPERAND (arg0, 1);
3193 right = TREE_OPERAND (arg1, 1);
3195 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3197 common = TREE_OPERAND (arg0, 0);
3198 left = TREE_OPERAND (arg0, 1);
3199 right = TREE_OPERAND (arg1, 0);
3201 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3203 common = TREE_OPERAND (arg0, 1);
3204 left = TREE_OPERAND (arg0, 0);
3205 right = TREE_OPERAND (arg1, 1);
3207 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3209 common = TREE_OPERAND (arg0, 1);
3210 left = TREE_OPERAND (arg0, 0);
3211 right = TREE_OPERAND (arg1, 0);
3213 else
3214 return 0;
3216 common = fold_convert_loc (loc, type, common);
3217 left = fold_convert_loc (loc, type, left);
3218 right = fold_convert_loc (loc, type, right);
3219 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3220 fold_build2_loc (loc, code, type, left, right));
3223 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3224 with code CODE. This optimization is unsafe. */
3225 static tree
3226 distribute_real_division (location_t loc, enum tree_code code, tree type,
3227 tree arg0, tree arg1)
3229 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3230 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3232 /* (A / C) +- (B / C) -> (A +- B) / C. */
3233 if (mul0 == mul1
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 1), 0))
3236 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3237 fold_build2_loc (loc, code, type,
3238 TREE_OPERAND (arg0, 0),
3239 TREE_OPERAND (arg1, 0)),
3240 TREE_OPERAND (arg0, 1));
3242 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3243 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0), 0)
3245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3248 REAL_VALUE_TYPE r0, r1;
3249 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3250 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3251 if (!mul0)
3252 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3253 if (!mul1)
3254 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3255 real_arithmetic (&r0, code, &r0, &r1);
3256 return fold_build2_loc (loc, MULT_EXPR, type,
3257 TREE_OPERAND (arg0, 0),
3258 build_real (type, r0));
3261 return NULL_TREE;
3264 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3265 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3267 static tree
3268 make_bit_field_ref (location_t loc, tree inner, tree type,
3269 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3271 tree result, bftype;
3273 if (bitpos == 0)
3275 tree size = TYPE_SIZE (TREE_TYPE (inner));
3276 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3277 || POINTER_TYPE_P (TREE_TYPE (inner)))
3278 && host_integerp (size, 0)
3279 && tree_low_cst (size, 0) == bitsize)
3280 return fold_convert_loc (loc, type, inner);
3283 bftype = type;
3284 if (TYPE_PRECISION (bftype) != bitsize
3285 || TYPE_UNSIGNED (bftype) == !unsignedp)
3286 bftype = build_nonstandard_integer_type (bitsize, 0);
3288 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 if (bftype != type)
3292 result = fold_convert_loc (loc, type, result);
3294 return result;
3297 /* Optimize a bit-field compare.
3299 There are two cases: First is a compare against a constant and the
3300 second is a comparison of two items where the fields are at the same
3301 bit position relative to the start of a chunk (byte, halfword, word)
3302 large enough to contain it. In these cases we can avoid the shift
3303 implicit in bitfield extractions.
3305 For constants, we emit a compare of the shifted constant with the
3306 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3307 compared. For two fields at the same position, we do the ANDs with the
3308 similar mask and compare the result of the ANDs.
3310 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3311 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3312 are the left and right operands of the comparison, respectively.
3314 If the optimization described above can be done, we return the resulting
3315 tree. Otherwise we return zero. */
3317 static tree
3318 optimize_bit_field_compare (location_t loc, enum tree_code code,
3319 tree compare_type, tree lhs, tree rhs)
3321 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3322 tree type = TREE_TYPE (lhs);
3323 tree signed_type, unsigned_type;
3324 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3325 enum machine_mode lmode, rmode, nmode;
3326 int lunsignedp, runsignedp;
3327 int lvolatilep = 0, rvolatilep = 0;
3328 tree linner, rinner = NULL_TREE;
3329 tree mask;
3330 tree offset;
3332 /* In the strict volatile bitfields case, doing code changes here may prevent
3333 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3334 if (flag_strict_volatile_bitfields > 0)
3335 return 0;
3337 /* Get all the information about the extractions being done. If the bit size
3338 if the same as the size of the underlying object, we aren't doing an
3339 extraction at all and so can do nothing. We also don't want to
3340 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3341 then will no longer be able to replace it. */
3342 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3343 &lunsignedp, &lvolatilep, false);
3344 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3345 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3346 return 0;
3348 if (!const_p)
3350 /* If this is not a constant, we can only do something if bit positions,
3351 sizes, and signedness are the same. */
3352 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3353 &runsignedp, &rvolatilep, false);
3355 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3356 || lunsignedp != runsignedp || offset != 0
3357 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3358 return 0;
3361 /* See if we can find a mode to refer to this field. We should be able to,
3362 but fail if we can't. */
3363 if (lvolatilep
3364 && GET_MODE_BITSIZE (lmode) > 0
3365 && flag_strict_volatile_bitfields > 0)
3366 nmode = lmode;
3367 else
3368 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3369 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3370 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3371 TYPE_ALIGN (TREE_TYPE (rinner))),
3372 word_mode, lvolatilep || rvolatilep);
3373 if (nmode == VOIDmode)
3374 return 0;
3376 /* Set signed and unsigned types of the precision of this mode for the
3377 shifts below. */
3378 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3379 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3381 /* Compute the bit position and size for the new reference and our offset
3382 within it. If the new reference is the same size as the original, we
3383 won't optimize anything, so return zero. */
3384 nbitsize = GET_MODE_BITSIZE (nmode);
3385 nbitpos = lbitpos & ~ (nbitsize - 1);
3386 lbitpos -= nbitpos;
3387 if (nbitsize == lbitsize)
3388 return 0;
3390 if (BYTES_BIG_ENDIAN)
3391 lbitpos = nbitsize - lbitsize - lbitpos;
3393 /* Make the mask to be used against the extracted field. */
3394 mask = build_int_cst_type (unsigned_type, -1);
3395 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3396 mask = const_binop (RSHIFT_EXPR, mask,
3397 size_int (nbitsize - lbitsize - lbitpos));
3399 if (! const_p)
3400 /* If not comparing with constant, just rework the comparison
3401 and return. */
3402 return fold_build2_loc (loc, code, compare_type,
3403 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3404 make_bit_field_ref (loc, linner,
3405 unsigned_type,
3406 nbitsize, nbitpos,
3408 mask),
3409 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3410 make_bit_field_ref (loc, rinner,
3411 unsigned_type,
3412 nbitsize, nbitpos,
3414 mask));
3416 /* Otherwise, we are handling the constant case. See if the constant is too
3417 big for the field. Warn and return a tree of for 0 (false) if so. We do
3418 this not only for its own sake, but to avoid having to test for this
3419 error case below. If we didn't, we might generate wrong code.
3421 For unsigned fields, the constant shifted right by the field length should
3422 be all zero. For signed fields, the high-order bits should agree with
3423 the sign bit. */
3425 if (lunsignedp)
3427 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3428 fold_convert_loc (loc,
3429 unsigned_type, rhs),
3430 size_int (lbitsize))))
3432 warning (0, "comparison is always %d due to width of bit-field",
3433 code == NE_EXPR);
3434 return constant_boolean_node (code == NE_EXPR, compare_type);
3437 else
3439 tree tem = const_binop (RSHIFT_EXPR,
3440 fold_convert_loc (loc, signed_type, rhs),
3441 size_int (lbitsize - 1));
3442 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3444 warning (0, "comparison is always %d due to width of bit-field",
3445 code == NE_EXPR);
3446 return constant_boolean_node (code == NE_EXPR, compare_type);
3450 /* Single-bit compares should always be against zero. */
3451 if (lbitsize == 1 && ! integer_zerop (rhs))
3453 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3454 rhs = build_int_cst (type, 0);
3457 /* Make a new bitfield reference, shift the constant over the
3458 appropriate number of bits and mask it with the computed mask
3459 (in case this was a signed field). If we changed it, make a new one. */
3460 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3461 if (lvolatilep)
3463 TREE_SIDE_EFFECTS (lhs) = 1;
3464 TREE_THIS_VOLATILE (lhs) = 1;
3467 rhs = const_binop (BIT_AND_EXPR,
3468 const_binop (LSHIFT_EXPR,
3469 fold_convert_loc (loc, unsigned_type, rhs),
3470 size_int (lbitpos)),
3471 mask);
3473 lhs = build2_loc (loc, code, compare_type,
3474 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3475 return lhs;
3478 /* Subroutine for fold_truth_andor_1: decode a field reference.
3480 If EXP is a comparison reference, we return the innermost reference.
3482 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3483 set to the starting bit number.
3485 If the innermost field can be completely contained in a mode-sized
3486 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3488 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3489 otherwise it is not changed.
3491 *PUNSIGNEDP is set to the signedness of the field.
3493 *PMASK is set to the mask used. This is either contained in a
3494 BIT_AND_EXPR or derived from the width of the field.
3496 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3498 Return 0 if this is not a component reference or is one that we can't
3499 do anything with. */
3501 static tree
3502 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3503 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3504 int *punsignedp, int *pvolatilep,
3505 tree *pmask, tree *pand_mask)
3507 tree outer_type = 0;
3508 tree and_mask = 0;
3509 tree mask, inner, offset;
3510 tree unsigned_type;
3511 unsigned int precision;
3513 /* All the optimizations using this function assume integer fields.
3514 There are problems with FP fields since the type_for_size call
3515 below can fail for, e.g., XFmode. */
3516 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3517 return 0;
3519 /* We are interested in the bare arrangement of bits, so strip everything
3520 that doesn't affect the machine mode. However, record the type of the
3521 outermost expression if it may matter below. */
3522 if (CONVERT_EXPR_P (exp)
3523 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3524 outer_type = TREE_TYPE (exp);
3525 STRIP_NOPS (exp);
3527 if (TREE_CODE (exp) == BIT_AND_EXPR)
3529 and_mask = TREE_OPERAND (exp, 1);
3530 exp = TREE_OPERAND (exp, 0);
3531 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3532 if (TREE_CODE (and_mask) != INTEGER_CST)
3533 return 0;
3536 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3537 punsignedp, pvolatilep, false);
3538 if ((inner == exp && and_mask == 0)
3539 || *pbitsize < 0 || offset != 0
3540 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3541 return 0;
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3547 *punsignedp = TYPE_UNSIGNED (outer_type);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3551 precision = TYPE_PRECISION (unsigned_type);
3553 mask = build_int_cst_type (unsigned_type, -1);
3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3556 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3558 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3559 if (and_mask != 0)
3560 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3561 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3563 *pmask = mask;
3564 *pand_mask = and_mask;
3565 return inner;
3568 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3569 bit positions. */
3571 static int
3572 all_ones_mask_p (const_tree mask, int size)
3574 tree type = TREE_TYPE (mask);
3575 unsigned int precision = TYPE_PRECISION (type);
3576 tree tmask;
3578 tmask = build_int_cst_type (signed_type_for (type), -1);
3580 return
3581 tree_int_cst_equal (mask,
3582 const_binop (RSHIFT_EXPR,
3583 const_binop (LSHIFT_EXPR, tmask,
3584 size_int (precision - size)),
3585 size_int (precision - size)));
3588 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3589 represents the sign bit of EXP's type. If EXP represents a sign
3590 or zero extension, also test VAL against the unextended type.
3591 The return value is the (sub)expression whose sign bit is VAL,
3592 or NULL_TREE otherwise. */
3594 static tree
3595 sign_bit_p (tree exp, const_tree val)
3597 unsigned HOST_WIDE_INT mask_lo, lo;
3598 HOST_WIDE_INT mask_hi, hi;
3599 int width;
3600 tree t;
3602 /* Tree EXP must have an integral type. */
3603 t = TREE_TYPE (exp);
3604 if (! INTEGRAL_TYPE_P (t))
3605 return NULL_TREE;
3607 /* Tree VAL must be an integer constant. */
3608 if (TREE_CODE (val) != INTEGER_CST
3609 || TREE_OVERFLOW (val))
3610 return NULL_TREE;
3612 width = TYPE_PRECISION (t);
3613 if (width > HOST_BITS_PER_WIDE_INT)
3615 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3616 lo = 0;
3618 mask_hi = ((unsigned HOST_WIDE_INT) -1
3619 >> (HOST_BITS_PER_DOUBLE_INT - width));
3620 mask_lo = -1;
3622 else
3624 hi = 0;
3625 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3627 mask_hi = 0;
3628 mask_lo = ((unsigned HOST_WIDE_INT) -1
3629 >> (HOST_BITS_PER_WIDE_INT - width));
3632 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3633 treat VAL as if it were unsigned. */
3634 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3635 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3636 return exp;
3638 /* Handle extension from a narrower type. */
3639 if (TREE_CODE (exp) == NOP_EXPR
3640 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3641 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3643 return NULL_TREE;
3646 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3647 to be evaluated unconditionally. */
3649 static int
3650 simple_operand_p (const_tree exp)
3652 /* Strip any conversions that don't change the machine mode. */
3653 STRIP_NOPS (exp);
3655 return (CONSTANT_CLASS_P (exp)
3656 || TREE_CODE (exp) == SSA_NAME
3657 || (DECL_P (exp)
3658 && ! TREE_ADDRESSABLE (exp)
3659 && ! TREE_THIS_VOLATILE (exp)
3660 && ! DECL_NONLOCAL (exp)
3661 /* Don't regard global variables as simple. They may be
3662 allocated in ways unknown to the compiler (shared memory,
3663 #pragma weak, etc). */
3664 && ! TREE_PUBLIC (exp)
3665 && ! DECL_EXTERNAL (exp)
3666 /* Loading a static variable is unduly expensive, but global
3667 registers aren't expensive. */
3668 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3671 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3672 to be evaluated unconditionally.
3673 I addition to simple_operand_p, we assume that comparisons, conversions,
3674 and logic-not operations are simple, if their operands are simple, too. */
3676 static bool
3677 simple_operand_p_2 (tree exp)
3679 enum tree_code code;
3681 if (TREE_SIDE_EFFECTS (exp)
3682 || tree_could_trap_p (exp))
3683 return false;
3685 while (CONVERT_EXPR_P (exp))
3686 exp = TREE_OPERAND (exp, 0);
3688 code = TREE_CODE (exp);
3690 if (TREE_CODE_CLASS (code) == tcc_comparison)
3691 return (simple_operand_p (TREE_OPERAND (exp, 0))
3692 && simple_operand_p (TREE_OPERAND (exp, 1)));
3694 if (code == TRUTH_NOT_EXPR)
3695 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3697 return simple_operand_p (exp);
3701 /* The following functions are subroutines to fold_range_test and allow it to
3702 try to change a logical combination of comparisons into a range test.
3704 For example, both
3705 X == 2 || X == 3 || X == 4 || X == 5
3707 X >= 2 && X <= 5
3708 are converted to
3709 (unsigned) (X - 2) <= 3
3711 We describe each set of comparisons as being either inside or outside
3712 a range, using a variable named like IN_P, and then describe the
3713 range with a lower and upper bound. If one of the bounds is omitted,
3714 it represents either the highest or lowest value of the type.
3716 In the comments below, we represent a range by two numbers in brackets
3717 preceded by a "+" to designate being inside that range, or a "-" to
3718 designate being outside that range, so the condition can be inverted by
3719 flipping the prefix. An omitted bound is represented by a "-". For
3720 example, "- [-, 10]" means being outside the range starting at the lowest
3721 possible value and ending at 10, in other words, being greater than 10.
3722 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3723 always false.
3725 We set up things so that the missing bounds are handled in a consistent
3726 manner so neither a missing bound nor "true" and "false" need to be
3727 handled using a special case. */
3729 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3730 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3731 and UPPER1_P are nonzero if the respective argument is an upper bound
3732 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3733 must be specified for a comparison. ARG1 will be converted to ARG0's
3734 type if both are specified. */
3736 static tree
3737 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3738 tree arg1, int upper1_p)
3740 tree tem;
3741 int result;
3742 int sgn0, sgn1;
3744 /* If neither arg represents infinity, do the normal operation.
3745 Else, if not a comparison, return infinity. Else handle the special
3746 comparison rules. Note that most of the cases below won't occur, but
3747 are handled for consistency. */
3749 if (arg0 != 0 && arg1 != 0)
3751 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3752 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 STRIP_NOPS (tem);
3754 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3757 if (TREE_CODE_CLASS (code) != tcc_comparison)
3758 return 0;
3760 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3761 for neither. In real maths, we cannot assume open ended ranges are
3762 the same. But, this is computer arithmetic, where numbers are finite.
3763 We can therefore make the transformation of any unbounded range with
3764 the value Z, Z being greater than any representable number. This permits
3765 us to treat unbounded ranges as equal. */
3766 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3767 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3768 switch (code)
3770 case EQ_EXPR:
3771 result = sgn0 == sgn1;
3772 break;
3773 case NE_EXPR:
3774 result = sgn0 != sgn1;
3775 break;
3776 case LT_EXPR:
3777 result = sgn0 < sgn1;
3778 break;
3779 case LE_EXPR:
3780 result = sgn0 <= sgn1;
3781 break;
3782 case GT_EXPR:
3783 result = sgn0 > sgn1;
3784 break;
3785 case GE_EXPR:
3786 result = sgn0 >= sgn1;
3787 break;
3788 default:
3789 gcc_unreachable ();
3792 return constant_boolean_node (result, type);
3795 /* Helper routine for make_range. Perform one step for it, return
3796 new expression if the loop should continue or NULL_TREE if it should
3797 stop. */
3799 tree
3800 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3801 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3802 bool *strict_overflow_p)
3804 tree arg0_type = TREE_TYPE (arg0);
3805 tree n_low, n_high, low = *p_low, high = *p_high;
3806 int in_p = *p_in_p, n_in_p;
3808 switch (code)
3810 case TRUTH_NOT_EXPR:
3811 *p_in_p = ! in_p;
3812 return arg0;
3814 case EQ_EXPR: case NE_EXPR:
3815 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3816 /* We can only do something if the range is testing for zero
3817 and if the second operand is an integer constant. Note that
3818 saying something is "in" the range we make is done by
3819 complementing IN_P since it will set in the initial case of
3820 being not equal to zero; "out" is leaving it alone. */
3821 if (low == NULL_TREE || high == NULL_TREE
3822 || ! integer_zerop (low) || ! integer_zerop (high)
3823 || TREE_CODE (arg1) != INTEGER_CST)
3824 return NULL_TREE;
3826 switch (code)
3828 case NE_EXPR: /* - [c, c] */
3829 low = high = arg1;
3830 break;
3831 case EQ_EXPR: /* + [c, c] */
3832 in_p = ! in_p, low = high = arg1;
3833 break;
3834 case GT_EXPR: /* - [-, c] */
3835 low = 0, high = arg1;
3836 break;
3837 case GE_EXPR: /* + [c, -] */
3838 in_p = ! in_p, low = arg1, high = 0;
3839 break;
3840 case LT_EXPR: /* - [c, -] */
3841 low = arg1, high = 0;
3842 break;
3843 case LE_EXPR: /* + [-, c] */
3844 in_p = ! in_p, low = 0, high = arg1;
3845 break;
3846 default:
3847 gcc_unreachable ();
3850 /* If this is an unsigned comparison, we also know that EXP is
3851 greater than or equal to zero. We base the range tests we make
3852 on that fact, so we record it here so we can parse existing
3853 range tests. We test arg0_type since often the return type
3854 of, e.g. EQ_EXPR, is boolean. */
3855 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3857 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3858 in_p, low, high, 1,
3859 build_int_cst (arg0_type, 0),
3860 NULL_TREE))
3861 return NULL_TREE;
3863 in_p = n_in_p, low = n_low, high = n_high;
3865 /* If the high bound is missing, but we have a nonzero low
3866 bound, reverse the range so it goes from zero to the low bound
3867 minus 1. */
3868 if (high == 0 && low && ! integer_zerop (low))
3870 in_p = ! in_p;
3871 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3872 integer_one_node, 0);
3873 low = build_int_cst (arg0_type, 0);
3877 *p_low = low;
3878 *p_high = high;
3879 *p_in_p = in_p;
3880 return arg0;
3882 case NEGATE_EXPR:
3883 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3884 low and high are non-NULL, then normalize will DTRT. */
3885 if (!TYPE_UNSIGNED (arg0_type)
3886 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3888 if (low == NULL_TREE)
3889 low = TYPE_MIN_VALUE (arg0_type);
3890 if (high == NULL_TREE)
3891 high = TYPE_MAX_VALUE (arg0_type);
3894 /* (-x) IN [a,b] -> x in [-b, -a] */
3895 n_low = range_binop (MINUS_EXPR, exp_type,
3896 build_int_cst (exp_type, 0),
3897 0, high, 1);
3898 n_high = range_binop (MINUS_EXPR, exp_type,
3899 build_int_cst (exp_type, 0),
3900 0, low, 0);
3901 if (n_high != 0 && TREE_OVERFLOW (n_high))
3902 return NULL_TREE;
3903 goto normalize;
3905 case BIT_NOT_EXPR:
3906 /* ~ X -> -X - 1 */
3907 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3908 build_int_cst (exp_type, 1));
3910 case PLUS_EXPR:
3911 case MINUS_EXPR:
3912 if (TREE_CODE (arg1) != INTEGER_CST)
3913 return NULL_TREE;
3915 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3916 move a constant to the other side. */
3917 if (!TYPE_UNSIGNED (arg0_type)
3918 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3919 return NULL_TREE;
3921 /* If EXP is signed, any overflow in the computation is undefined,
3922 so we don't worry about it so long as our computations on
3923 the bounds don't overflow. For unsigned, overflow is defined
3924 and this is exactly the right thing. */
3925 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3926 arg0_type, low, 0, arg1, 0);
3927 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3928 arg0_type, high, 1, arg1, 0);
3929 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3930 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3931 return NULL_TREE;
3933 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3934 *strict_overflow_p = true;
3936 normalize:
3937 /* Check for an unsigned range which has wrapped around the maximum
3938 value thus making n_high < n_low, and normalize it. */
3939 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3941 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3942 integer_one_node, 0);
3943 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3944 integer_one_node, 0);
3946 /* If the range is of the form +/- [ x+1, x ], we won't
3947 be able to normalize it. But then, it represents the
3948 whole range or the empty set, so make it
3949 +/- [ -, - ]. */
3950 if (tree_int_cst_equal (n_low, low)
3951 && tree_int_cst_equal (n_high, high))
3952 low = high = 0;
3953 else
3954 in_p = ! in_p;
3956 else
3957 low = n_low, high = n_high;
3959 *p_low = low;
3960 *p_high = high;
3961 *p_in_p = in_p;
3962 return arg0;
3964 CASE_CONVERT:
3965 case NON_LVALUE_EXPR:
3966 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3967 return NULL_TREE;
3969 if (! INTEGRAL_TYPE_P (arg0_type)
3970 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3971 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3972 return NULL_TREE;
3974 n_low = low, n_high = high;
3976 if (n_low != 0)
3977 n_low = fold_convert_loc (loc, arg0_type, n_low);
3979 if (n_high != 0)
3980 n_high = fold_convert_loc (loc, arg0_type, n_high);
3982 /* If we're converting arg0 from an unsigned type, to exp,
3983 a signed type, we will be doing the comparison as unsigned.
3984 The tests above have already verified that LOW and HIGH
3985 are both positive.
3987 So we have to ensure that we will handle large unsigned
3988 values the same way that the current signed bounds treat
3989 negative values. */
3991 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3993 tree high_positive;
3994 tree equiv_type;
3995 /* For fixed-point modes, we need to pass the saturating flag
3996 as the 2nd parameter. */
3997 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3998 equiv_type
3999 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4000 TYPE_SATURATING (arg0_type));
4001 else
4002 equiv_type
4003 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4005 /* A range without an upper bound is, naturally, unbounded.
4006 Since convert would have cropped a very large value, use
4007 the max value for the destination type. */
4008 high_positive
4009 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4010 : TYPE_MAX_VALUE (arg0_type);
4012 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4013 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4014 fold_convert_loc (loc, arg0_type,
4015 high_positive),
4016 build_int_cst (arg0_type, 1));
4018 /* If the low bound is specified, "and" the range with the
4019 range for which the original unsigned value will be
4020 positive. */
4021 if (low != 0)
4023 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4024 1, fold_convert_loc (loc, arg0_type,
4025 integer_zero_node),
4026 high_positive))
4027 return NULL_TREE;
4029 in_p = (n_in_p == in_p);
4031 else
4033 /* Otherwise, "or" the range with the range of the input
4034 that will be interpreted as negative. */
4035 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4036 1, fold_convert_loc (loc, arg0_type,
4037 integer_zero_node),
4038 high_positive))
4039 return NULL_TREE;
4041 in_p = (in_p != n_in_p);
4045 *p_low = n_low;
4046 *p_high = n_high;
4047 *p_in_p = in_p;
4048 return arg0;
4050 default:
4051 return NULL_TREE;
4055 /* Given EXP, a logical expression, set the range it is testing into
4056 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4057 actually being tested. *PLOW and *PHIGH will be made of the same
4058 type as the returned expression. If EXP is not a comparison, we
4059 will most likely not be returning a useful value and range. Set
4060 *STRICT_OVERFLOW_P to true if the return value is only valid
4061 because signed overflow is undefined; otherwise, do not change
4062 *STRICT_OVERFLOW_P. */
4064 tree
4065 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4066 bool *strict_overflow_p)
4068 enum tree_code code;
4069 tree arg0, arg1 = NULL_TREE;
4070 tree exp_type, nexp;
4071 int in_p;
4072 tree low, high;
4073 location_t loc = EXPR_LOCATION (exp);
4075 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4076 and see if we can refine the range. Some of the cases below may not
4077 happen, but it doesn't seem worth worrying about this. We "continue"
4078 the outer loop when we've changed something; otherwise we "break"
4079 the switch, which will "break" the while. */
4081 in_p = 0;
4082 low = high = build_int_cst (TREE_TYPE (exp), 0);
4084 while (1)
4086 code = TREE_CODE (exp);
4087 exp_type = TREE_TYPE (exp);
4088 arg0 = NULL_TREE;
4090 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4092 if (TREE_OPERAND_LENGTH (exp) > 0)
4093 arg0 = TREE_OPERAND (exp, 0);
4094 if (TREE_CODE_CLASS (code) == tcc_binary
4095 || TREE_CODE_CLASS (code) == tcc_comparison
4096 || (TREE_CODE_CLASS (code) == tcc_expression
4097 && TREE_OPERAND_LENGTH (exp) > 1))
4098 arg1 = TREE_OPERAND (exp, 1);
4100 if (arg0 == NULL_TREE)
4101 break;
4103 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4104 &high, &in_p, strict_overflow_p);
4105 if (nexp == NULL_TREE)
4106 break;
4107 exp = nexp;
4110 /* If EXP is a constant, we can evaluate whether this is true or false. */
4111 if (TREE_CODE (exp) == INTEGER_CST)
4113 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4114 exp, 0, low, 0))
4115 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4116 exp, 1, high, 1)));
4117 low = high = 0;
4118 exp = 0;
4121 *pin_p = in_p, *plow = low, *phigh = high;
4122 return exp;
4125 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4126 type, TYPE, return an expression to test if EXP is in (or out of, depending
4127 on IN_P) the range. Return 0 if the test couldn't be created. */
4129 tree
4130 build_range_check (location_t loc, tree type, tree exp, int in_p,
4131 tree low, tree high)
4133 tree etype = TREE_TYPE (exp), value;
4135 #ifdef HAVE_canonicalize_funcptr_for_compare
4136 /* Disable this optimization for function pointer expressions
4137 on targets that require function pointer canonicalization. */
4138 if (HAVE_canonicalize_funcptr_for_compare
4139 && TREE_CODE (etype) == POINTER_TYPE
4140 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4141 return NULL_TREE;
4142 #endif
4144 if (! in_p)
4146 value = build_range_check (loc, type, exp, 1, low, high);
4147 if (value != 0)
4148 return invert_truthvalue_loc (loc, value);
4150 return 0;
4153 if (low == 0 && high == 0)
4154 return build_int_cst (type, 1);
4156 if (low == 0)
4157 return fold_build2_loc (loc, LE_EXPR, type, exp,
4158 fold_convert_loc (loc, etype, high));
4160 if (high == 0)
4161 return fold_build2_loc (loc, GE_EXPR, type, exp,
4162 fold_convert_loc (loc, etype, low));
4164 if (operand_equal_p (low, high, 0))
4165 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4166 fold_convert_loc (loc, etype, low));
4168 if (integer_zerop (low))
4170 if (! TYPE_UNSIGNED (etype))
4172 etype = unsigned_type_for (etype);
4173 high = fold_convert_loc (loc, etype, high);
4174 exp = fold_convert_loc (loc, etype, exp);
4176 return build_range_check (loc, type, exp, 1, 0, high);
4179 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4180 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4182 unsigned HOST_WIDE_INT lo;
4183 HOST_WIDE_INT hi;
4184 int prec;
4186 prec = TYPE_PRECISION (etype);
4187 if (prec <= HOST_BITS_PER_WIDE_INT)
4189 hi = 0;
4190 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4192 else
4194 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4195 lo = (unsigned HOST_WIDE_INT) -1;
4198 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4200 if (TYPE_UNSIGNED (etype))
4202 tree signed_etype = signed_type_for (etype);
4203 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4204 etype
4205 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4206 else
4207 etype = signed_etype;
4208 exp = fold_convert_loc (loc, etype, exp);
4210 return fold_build2_loc (loc, GT_EXPR, type, exp,
4211 build_int_cst (etype, 0));
4215 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4216 This requires wrap-around arithmetics for the type of the expression.
4217 First make sure that arithmetics in this type is valid, then make sure
4218 that it wraps around. */
4219 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4220 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4221 TYPE_UNSIGNED (etype));
4223 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4225 tree utype, minv, maxv;
4227 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4228 for the type in question, as we rely on this here. */
4229 utype = unsigned_type_for (etype);
4230 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4231 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4232 integer_one_node, 1);
4233 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4235 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4236 minv, 1, maxv, 1)))
4237 etype = utype;
4238 else
4239 return 0;
4242 high = fold_convert_loc (loc, etype, high);
4243 low = fold_convert_loc (loc, etype, low);
4244 exp = fold_convert_loc (loc, etype, exp);
4246 value = const_binop (MINUS_EXPR, high, low);
4249 if (POINTER_TYPE_P (etype))
4251 if (value != 0 && !TREE_OVERFLOW (value))
4253 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4254 return build_range_check (loc, type,
4255 fold_build_pointer_plus_loc (loc, exp, low),
4256 1, build_int_cst (etype, 0), value);
4258 return 0;
4261 if (value != 0 && !TREE_OVERFLOW (value))
4262 return build_range_check (loc, type,
4263 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4264 1, build_int_cst (etype, 0), value);
4266 return 0;
4269 /* Return the predecessor of VAL in its type, handling the infinite case. */
4271 static tree
4272 range_predecessor (tree val)
4274 tree type = TREE_TYPE (val);
4276 if (INTEGRAL_TYPE_P (type)
4277 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4278 return 0;
4279 else
4280 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4283 /* Return the successor of VAL in its type, handling the infinite case. */
4285 static tree
4286 range_successor (tree val)
4288 tree type = TREE_TYPE (val);
4290 if (INTEGRAL_TYPE_P (type)
4291 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4292 return 0;
4293 else
4294 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4297 /* Given two ranges, see if we can merge them into one. Return 1 if we
4298 can, 0 if we can't. Set the output range into the specified parameters. */
4300 bool
4301 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4302 tree high0, int in1_p, tree low1, tree high1)
4304 int no_overlap;
4305 int subset;
4306 int temp;
4307 tree tem;
4308 int in_p;
4309 tree low, high;
4310 int lowequal = ((low0 == 0 && low1 == 0)
4311 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4312 low0, 0, low1, 0)));
4313 int highequal = ((high0 == 0 && high1 == 0)
4314 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4315 high0, 1, high1, 1)));
4317 /* Make range 0 be the range that starts first, or ends last if they
4318 start at the same value. Swap them if it isn't. */
4319 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4320 low0, 0, low1, 0))
4321 || (lowequal
4322 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4323 high1, 1, high0, 1))))
4325 temp = in0_p, in0_p = in1_p, in1_p = temp;
4326 tem = low0, low0 = low1, low1 = tem;
4327 tem = high0, high0 = high1, high1 = tem;
4330 /* Now flag two cases, whether the ranges are disjoint or whether the
4331 second range is totally subsumed in the first. Note that the tests
4332 below are simplified by the ones above. */
4333 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4334 high0, 1, low1, 0));
4335 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4336 high1, 1, high0, 1));
4338 /* We now have four cases, depending on whether we are including or
4339 excluding the two ranges. */
4340 if (in0_p && in1_p)
4342 /* If they don't overlap, the result is false. If the second range
4343 is a subset it is the result. Otherwise, the range is from the start
4344 of the second to the end of the first. */
4345 if (no_overlap)
4346 in_p = 0, low = high = 0;
4347 else if (subset)
4348 in_p = 1, low = low1, high = high1;
4349 else
4350 in_p = 1, low = low1, high = high0;
4353 else if (in0_p && ! in1_p)
4355 /* If they don't overlap, the result is the first range. If they are
4356 equal, the result is false. If the second range is a subset of the
4357 first, and the ranges begin at the same place, we go from just after
4358 the end of the second range to the end of the first. If the second
4359 range is not a subset of the first, or if it is a subset and both
4360 ranges end at the same place, the range starts at the start of the
4361 first range and ends just before the second range.
4362 Otherwise, we can't describe this as a single range. */
4363 if (no_overlap)
4364 in_p = 1, low = low0, high = high0;
4365 else if (lowequal && highequal)
4366 in_p = 0, low = high = 0;
4367 else if (subset && lowequal)
4369 low = range_successor (high1);
4370 high = high0;
4371 in_p = 1;
4372 if (low == 0)
4374 /* We are in the weird situation where high0 > high1 but
4375 high1 has no successor. Punt. */
4376 return 0;
4379 else if (! subset || highequal)
4381 low = low0;
4382 high = range_predecessor (low1);
4383 in_p = 1;
4384 if (high == 0)
4386 /* low0 < low1 but low1 has no predecessor. Punt. */
4387 return 0;
4390 else
4391 return 0;
4394 else if (! in0_p && in1_p)
4396 /* If they don't overlap, the result is the second range. If the second
4397 is a subset of the first, the result is false. Otherwise,
4398 the range starts just after the first range and ends at the
4399 end of the second. */
4400 if (no_overlap)
4401 in_p = 1, low = low1, high = high1;
4402 else if (subset || highequal)
4403 in_p = 0, low = high = 0;
4404 else
4406 low = range_successor (high0);
4407 high = high1;
4408 in_p = 1;
4409 if (low == 0)
4411 /* high1 > high0 but high0 has no successor. Punt. */
4412 return 0;
4417 else
4419 /* The case where we are excluding both ranges. Here the complex case
4420 is if they don't overlap. In that case, the only time we have a
4421 range is if they are adjacent. If the second is a subset of the
4422 first, the result is the first. Otherwise, the range to exclude
4423 starts at the beginning of the first range and ends at the end of the
4424 second. */
4425 if (no_overlap)
4427 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4428 range_successor (high0),
4429 1, low1, 0)))
4430 in_p = 0, low = low0, high = high1;
4431 else
4433 /* Canonicalize - [min, x] into - [-, x]. */
4434 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4435 switch (TREE_CODE (TREE_TYPE (low0)))
4437 case ENUMERAL_TYPE:
4438 if (TYPE_PRECISION (TREE_TYPE (low0))
4439 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4440 break;
4441 /* FALLTHROUGH */
4442 case INTEGER_TYPE:
4443 if (tree_int_cst_equal (low0,
4444 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4445 low0 = 0;
4446 break;
4447 case POINTER_TYPE:
4448 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4449 && integer_zerop (low0))
4450 low0 = 0;
4451 break;
4452 default:
4453 break;
4456 /* Canonicalize - [x, max] into - [x, -]. */
4457 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4458 switch (TREE_CODE (TREE_TYPE (high1)))
4460 case ENUMERAL_TYPE:
4461 if (TYPE_PRECISION (TREE_TYPE (high1))
4462 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4463 break;
4464 /* FALLTHROUGH */
4465 case INTEGER_TYPE:
4466 if (tree_int_cst_equal (high1,
4467 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4468 high1 = 0;
4469 break;
4470 case POINTER_TYPE:
4471 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4472 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4473 high1, 1,
4474 integer_one_node, 1)))
4475 high1 = 0;
4476 break;
4477 default:
4478 break;
4481 /* The ranges might be also adjacent between the maximum and
4482 minimum values of the given type. For
4483 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4484 return + [x + 1, y - 1]. */
4485 if (low0 == 0 && high1 == 0)
4487 low = range_successor (high0);
4488 high = range_predecessor (low1);
4489 if (low == 0 || high == 0)
4490 return 0;
4492 in_p = 1;
4494 else
4495 return 0;
4498 else if (subset)
4499 in_p = 0, low = low0, high = high0;
4500 else
4501 in_p = 0, low = low0, high = high1;
4504 *pin_p = in_p, *plow = low, *phigh = high;
4505 return 1;
4509 /* Subroutine of fold, looking inside expressions of the form
4510 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4511 of the COND_EXPR. This function is being used also to optimize
4512 A op B ? C : A, by reversing the comparison first.
4514 Return a folded expression whose code is not a COND_EXPR
4515 anymore, or NULL_TREE if no folding opportunity is found. */
4517 static tree
4518 fold_cond_expr_with_comparison (location_t loc, tree type,
4519 tree arg0, tree arg1, tree arg2)
4521 enum tree_code comp_code = TREE_CODE (arg0);
4522 tree arg00 = TREE_OPERAND (arg0, 0);
4523 tree arg01 = TREE_OPERAND (arg0, 1);
4524 tree arg1_type = TREE_TYPE (arg1);
4525 tree tem;
4527 STRIP_NOPS (arg1);
4528 STRIP_NOPS (arg2);
4530 /* If we have A op 0 ? A : -A, consider applying the following
4531 transformations:
4533 A == 0? A : -A same as -A
4534 A != 0? A : -A same as A
4535 A >= 0? A : -A same as abs (A)
4536 A > 0? A : -A same as abs (A)
4537 A <= 0? A : -A same as -abs (A)
4538 A < 0? A : -A same as -abs (A)
4540 None of these transformations work for modes with signed
4541 zeros. If A is +/-0, the first two transformations will
4542 change the sign of the result (from +0 to -0, or vice
4543 versa). The last four will fix the sign of the result,
4544 even though the original expressions could be positive or
4545 negative, depending on the sign of A.
4547 Note that all these transformations are correct if A is
4548 NaN, since the two alternatives (A and -A) are also NaNs. */
4549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4550 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4551 ? real_zerop (arg01)
4552 : integer_zerop (arg01))
4553 && ((TREE_CODE (arg2) == NEGATE_EXPR
4554 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4555 /* In the case that A is of the form X-Y, '-A' (arg2) may
4556 have already been folded to Y-X, check for that. */
4557 || (TREE_CODE (arg1) == MINUS_EXPR
4558 && TREE_CODE (arg2) == MINUS_EXPR
4559 && operand_equal_p (TREE_OPERAND (arg1, 0),
4560 TREE_OPERAND (arg2, 1), 0)
4561 && operand_equal_p (TREE_OPERAND (arg1, 1),
4562 TREE_OPERAND (arg2, 0), 0))))
4563 switch (comp_code)
4565 case EQ_EXPR:
4566 case UNEQ_EXPR:
4567 tem = fold_convert_loc (loc, arg1_type, arg1);
4568 return pedantic_non_lvalue_loc (loc,
4569 fold_convert_loc (loc, type,
4570 negate_expr (tem)));
4571 case NE_EXPR:
4572 case LTGT_EXPR:
4573 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4574 case UNGE_EXPR:
4575 case UNGT_EXPR:
4576 if (flag_trapping_math)
4577 break;
4578 /* Fall through. */
4579 case GE_EXPR:
4580 case GT_EXPR:
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4586 case UNLE_EXPR:
4587 case UNLT_EXPR:
4588 if (flag_trapping_math)
4589 break;
4590 case LE_EXPR:
4591 case LT_EXPR:
4592 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4593 arg1 = fold_convert_loc (loc, signed_type_for
4594 (TREE_TYPE (arg1)), arg1);
4595 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4596 return negate_expr (fold_convert_loc (loc, type, tem));
4597 default:
4598 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4599 break;
4602 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4603 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4604 both transformations are correct when A is NaN: A != 0
4605 is then true, and A == 0 is false. */
4607 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4608 && integer_zerop (arg01) && integer_zerop (arg2))
4610 if (comp_code == NE_EXPR)
4611 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4612 else if (comp_code == EQ_EXPR)
4613 return build_int_cst (type, 0);
4616 /* Try some transformations of A op B ? A : B.
4618 A == B? A : B same as B
4619 A != B? A : B same as A
4620 A >= B? A : B same as max (A, B)
4621 A > B? A : B same as max (B, A)
4622 A <= B? A : B same as min (A, B)
4623 A < B? A : B same as min (B, A)
4625 As above, these transformations don't work in the presence
4626 of signed zeros. For example, if A and B are zeros of
4627 opposite sign, the first two transformations will change
4628 the sign of the result. In the last four, the original
4629 expressions give different results for (A=+0, B=-0) and
4630 (A=-0, B=+0), but the transformed expressions do not.
4632 The first two transformations are correct if either A or B
4633 is a NaN. In the first transformation, the condition will
4634 be false, and B will indeed be chosen. In the case of the
4635 second transformation, the condition A != B will be true,
4636 and A will be chosen.
4638 The conversions to max() and min() are not correct if B is
4639 a number and A is not. The conditions in the original
4640 expressions will be false, so all four give B. The min()
4641 and max() versions would give a NaN instead. */
4642 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4643 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4644 /* Avoid these transformations if the COND_EXPR may be used
4645 as an lvalue in the C++ front-end. PR c++/19199. */
4646 && (in_gimple_form
4647 || (strcmp (lang_hooks.name, "GNU C++") != 0
4648 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4649 || ! maybe_lvalue_p (arg1)
4650 || ! maybe_lvalue_p (arg2)))
4652 tree comp_op0 = arg00;
4653 tree comp_op1 = arg01;
4654 tree comp_type = TREE_TYPE (comp_op0);
4656 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4657 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4659 comp_type = type;
4660 comp_op0 = arg1;
4661 comp_op1 = arg2;
4664 switch (comp_code)
4666 case EQ_EXPR:
4667 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4668 case NE_EXPR:
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4670 case LE_EXPR:
4671 case LT_EXPR:
4672 case UNLE_EXPR:
4673 case UNLT_EXPR:
4674 /* In C++ a ?: expression can be an lvalue, so put the
4675 operand which will be used if they are equal first
4676 so that we can convert this back to the
4677 corresponding COND_EXPR. */
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4681 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4682 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4683 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4685 comp_op1, comp_op0);
4686 return pedantic_non_lvalue_loc (loc,
4687 fold_convert_loc (loc, type, tem));
4689 break;
4690 case GE_EXPR:
4691 case GT_EXPR:
4692 case UNGE_EXPR:
4693 case UNGT_EXPR:
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4696 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4697 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4698 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4699 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4700 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4701 comp_op1, comp_op0);
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, tem));
4705 break;
4706 case UNEQ_EXPR:
4707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4708 return pedantic_non_lvalue_loc (loc,
4709 fold_convert_loc (loc, type, arg2));
4710 break;
4711 case LTGT_EXPR:
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4713 return pedantic_non_lvalue_loc (loc,
4714 fold_convert_loc (loc, type, arg1));
4715 break;
4716 default:
4717 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4718 break;
4722 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4723 we might still be able to simplify this. For example,
4724 if C1 is one less or one more than C2, this might have started
4725 out as a MIN or MAX and been transformed by this function.
4726 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4728 if (INTEGRAL_TYPE_P (type)
4729 && TREE_CODE (arg01) == INTEGER_CST
4730 && TREE_CODE (arg2) == INTEGER_CST)
4731 switch (comp_code)
4733 case EQ_EXPR:
4734 if (TREE_CODE (arg1) == INTEGER_CST)
4735 break;
4736 /* We can replace A with C1 in this case. */
4737 arg1 = fold_convert_loc (loc, type, arg01);
4738 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4740 case LT_EXPR:
4741 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4742 MIN_EXPR, to preserve the signedness of the comparison. */
4743 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4744 OEP_ONLY_CONST)
4745 && operand_equal_p (arg01,
4746 const_binop (PLUS_EXPR, arg2,
4747 build_int_cst (type, 1)),
4748 OEP_ONLY_CONST))
4750 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4751 fold_convert_loc (loc, TREE_TYPE (arg00),
4752 arg2));
4753 return pedantic_non_lvalue_loc (loc,
4754 fold_convert_loc (loc, type, tem));
4756 break;
4758 case LE_EXPR:
4759 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4760 as above. */
4761 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4762 OEP_ONLY_CONST)
4763 && operand_equal_p (arg01,
4764 const_binop (MINUS_EXPR, arg2,
4765 build_int_cst (type, 1)),
4766 OEP_ONLY_CONST))
4768 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4769 fold_convert_loc (loc, TREE_TYPE (arg00),
4770 arg2));
4771 return pedantic_non_lvalue_loc (loc,
4772 fold_convert_loc (loc, type, tem));
4774 break;
4776 case GT_EXPR:
4777 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4778 MAX_EXPR, to preserve the signedness of the comparison. */
4779 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4780 OEP_ONLY_CONST)
4781 && operand_equal_p (arg01,
4782 const_binop (MINUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4784 OEP_ONLY_CONST))
4786 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 arg2));
4789 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4791 break;
4793 case GE_EXPR:
4794 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4795 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4796 OEP_ONLY_CONST)
4797 && operand_equal_p (arg01,
4798 const_binop (PLUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4800 OEP_ONLY_CONST))
4802 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4804 arg2));
4805 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4807 break;
4808 case NE_EXPR:
4809 break;
4810 default:
4811 gcc_unreachable ();
4814 return NULL_TREE;
4819 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4820 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4821 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4822 false) >= 2)
4823 #endif
4825 /* EXP is some logical combination of boolean tests. See if we can
4826 merge it into some range test. Return the new tree if so. */
4828 static tree
4829 fold_range_test (location_t loc, enum tree_code code, tree type,
4830 tree op0, tree op1)
4832 int or_op = (code == TRUTH_ORIF_EXPR
4833 || code == TRUTH_OR_EXPR);
4834 int in0_p, in1_p, in_p;
4835 tree low0, low1, low, high0, high1, high;
4836 bool strict_overflow_p = false;
4837 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4838 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4839 tree tem;
4840 const char * const warnmsg = G_("assuming signed overflow does not occur "
4841 "when simplifying range test");
4843 /* If this is an OR operation, invert both sides; we will invert
4844 again at the end. */
4845 if (or_op)
4846 in0_p = ! in0_p, in1_p = ! in1_p;
4848 /* If both expressions are the same, if we can merge the ranges, and we
4849 can build the range test, return it or it inverted. If one of the
4850 ranges is always true or always false, consider it to be the same
4851 expression as the other. */
4852 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4853 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4854 in1_p, low1, high1)
4855 && 0 != (tem = (build_range_check (loc, type,
4856 lhs != 0 ? lhs
4857 : rhs != 0 ? rhs : integer_zero_node,
4858 in_p, low, high))))
4860 if (strict_overflow_p)
4861 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4862 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4865 /* On machines where the branch cost is expensive, if this is a
4866 short-circuited branch and the underlying object on both sides
4867 is the same, make a non-short-circuit operation. */
4868 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4869 && lhs != 0 && rhs != 0
4870 && (code == TRUTH_ANDIF_EXPR
4871 || code == TRUTH_ORIF_EXPR)
4872 && operand_equal_p (lhs, rhs, 0))
4874 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4875 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4876 which cases we can't do this. */
4877 if (simple_operand_p (lhs))
4878 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4879 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4880 type, op0, op1);
4882 else if (!lang_hooks.decls.global_bindings_p ()
4883 && !CONTAINS_PLACEHOLDER_P (lhs))
4885 tree common = save_expr (lhs);
4887 if (0 != (lhs = build_range_check (loc, type, common,
4888 or_op ? ! in0_p : in0_p,
4889 low0, high0))
4890 && (0 != (rhs = build_range_check (loc, type, common,
4891 or_op ? ! in1_p : in1_p,
4892 low1, high1))))
4894 if (strict_overflow_p)
4895 fold_overflow_warning (warnmsg,
4896 WARN_STRICT_OVERFLOW_COMPARISON);
4897 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4898 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4899 type, lhs, rhs);
4904 return 0;
4907 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4908 bit value. Arrange things so the extra bits will be set to zero if and
4909 only if C is signed-extended to its full width. If MASK is nonzero,
4910 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4912 static tree
4913 unextend (tree c, int p, int unsignedp, tree mask)
4915 tree type = TREE_TYPE (c);
4916 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4917 tree temp;
4919 if (p == modesize || unsignedp)
4920 return c;
4922 /* We work by getting just the sign bit into the low-order bit, then
4923 into the high-order bit, then sign-extend. We then XOR that value
4924 with C. */
4925 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4926 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4928 /* We must use a signed type in order to get an arithmetic right shift.
4929 However, we must also avoid introducing accidental overflows, so that
4930 a subsequent call to integer_zerop will work. Hence we must
4931 do the type conversion here. At this point, the constant is either
4932 zero or one, and the conversion to a signed type can never overflow.
4933 We could get an overflow if this conversion is done anywhere else. */
4934 if (TYPE_UNSIGNED (type))
4935 temp = fold_convert (signed_type_for (type), temp);
4937 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4938 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4939 if (mask != 0)
4940 temp = const_binop (BIT_AND_EXPR, temp,
4941 fold_convert (TREE_TYPE (c), mask));
4942 /* If necessary, convert the type back to match the type of C. */
4943 if (TYPE_UNSIGNED (type))
4944 temp = fold_convert (type, temp);
4946 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4949 /* For an expression that has the form
4950 (A && B) || ~B
4952 (A || B) && ~B,
4953 we can drop one of the inner expressions and simplify to
4954 A || ~B
4956 A && ~B
4957 LOC is the location of the resulting expression. OP is the inner
4958 logical operation; the left-hand side in the examples above, while CMPOP
4959 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4960 removing a condition that guards another, as in
4961 (A != NULL && A->...) || A == NULL
4962 which we must not transform. If RHS_ONLY is true, only eliminate the
4963 right-most operand of the inner logical operation. */
4965 static tree
4966 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4967 bool rhs_only)
4969 tree type = TREE_TYPE (cmpop);
4970 enum tree_code code = TREE_CODE (cmpop);
4971 enum tree_code truthop_code = TREE_CODE (op);
4972 tree lhs = TREE_OPERAND (op, 0);
4973 tree rhs = TREE_OPERAND (op, 1);
4974 tree orig_lhs = lhs, orig_rhs = rhs;
4975 enum tree_code rhs_code = TREE_CODE (rhs);
4976 enum tree_code lhs_code = TREE_CODE (lhs);
4977 enum tree_code inv_code;
4979 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4980 return NULL_TREE;
4982 if (TREE_CODE_CLASS (code) != tcc_comparison)
4983 return NULL_TREE;
4985 if (rhs_code == truthop_code)
4987 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4988 if (newrhs != NULL_TREE)
4990 rhs = newrhs;
4991 rhs_code = TREE_CODE (rhs);
4994 if (lhs_code == truthop_code && !rhs_only)
4996 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4997 if (newlhs != NULL_TREE)
4999 lhs = newlhs;
5000 lhs_code = TREE_CODE (lhs);
5004 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5005 if (inv_code == rhs_code
5006 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5007 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5008 return lhs;
5009 if (!rhs_only && inv_code == lhs_code
5010 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5011 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5012 return rhs;
5013 if (rhs != orig_rhs || lhs != orig_lhs)
5014 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5015 lhs, rhs);
5016 return NULL_TREE;
5019 /* Find ways of folding logical expressions of LHS and RHS:
5020 Try to merge two comparisons to the same innermost item.
5021 Look for range tests like "ch >= '0' && ch <= '9'".
5022 Look for combinations of simple terms on machines with expensive branches
5023 and evaluate the RHS unconditionally.
5025 For example, if we have p->a == 2 && p->b == 4 and we can make an
5026 object large enough to span both A and B, we can do this with a comparison
5027 against the object ANDed with the a mask.
5029 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5030 operations to do this with one comparison.
5032 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5033 function and the one above.
5035 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5036 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5038 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5039 two operands.
5041 We return the simplified tree or 0 if no optimization is possible. */
5043 static tree
5044 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5045 tree lhs, tree rhs)
5047 /* If this is the "or" of two comparisons, we can do something if
5048 the comparisons are NE_EXPR. If this is the "and", we can do something
5049 if the comparisons are EQ_EXPR. I.e.,
5050 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5052 WANTED_CODE is this operation code. For single bit fields, we can
5053 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5054 comparison for one-bit fields. */
5056 enum tree_code wanted_code;
5057 enum tree_code lcode, rcode;
5058 tree ll_arg, lr_arg, rl_arg, rr_arg;
5059 tree ll_inner, lr_inner, rl_inner, rr_inner;
5060 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5061 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5062 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5063 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5064 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5065 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5066 enum machine_mode lnmode, rnmode;
5067 tree ll_mask, lr_mask, rl_mask, rr_mask;
5068 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5069 tree l_const, r_const;
5070 tree lntype, rntype, result;
5071 HOST_WIDE_INT first_bit, end_bit;
5072 int volatilep;
5074 /* Start by getting the comparison codes. Fail if anything is volatile.
5075 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5076 it were surrounded with a NE_EXPR. */
5078 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5079 return 0;
5081 lcode = TREE_CODE (lhs);
5082 rcode = TREE_CODE (rhs);
5084 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5086 lhs = build2 (NE_EXPR, truth_type, lhs,
5087 build_int_cst (TREE_TYPE (lhs), 0));
5088 lcode = NE_EXPR;
5091 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5093 rhs = build2 (NE_EXPR, truth_type, rhs,
5094 build_int_cst (TREE_TYPE (rhs), 0));
5095 rcode = NE_EXPR;
5098 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5099 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5100 return 0;
5102 ll_arg = TREE_OPERAND (lhs, 0);
5103 lr_arg = TREE_OPERAND (lhs, 1);
5104 rl_arg = TREE_OPERAND (rhs, 0);
5105 rr_arg = TREE_OPERAND (rhs, 1);
5107 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5108 if (simple_operand_p (ll_arg)
5109 && simple_operand_p (lr_arg))
5111 if (operand_equal_p (ll_arg, rl_arg, 0)
5112 && operand_equal_p (lr_arg, rr_arg, 0))
5114 result = combine_comparisons (loc, code, lcode, rcode,
5115 truth_type, ll_arg, lr_arg);
5116 if (result)
5117 return result;
5119 else if (operand_equal_p (ll_arg, rr_arg, 0)
5120 && operand_equal_p (lr_arg, rl_arg, 0))
5122 result = combine_comparisons (loc, code, lcode,
5123 swap_tree_comparison (rcode),
5124 truth_type, ll_arg, lr_arg);
5125 if (result)
5126 return result;
5130 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5131 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5133 /* If the RHS can be evaluated unconditionally and its operands are
5134 simple, it wins to evaluate the RHS unconditionally on machines
5135 with expensive branches. In this case, this isn't a comparison
5136 that can be merged. */
5138 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5139 false) >= 2
5140 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5141 && simple_operand_p (rl_arg)
5142 && simple_operand_p (rr_arg))
5144 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5145 if (code == TRUTH_OR_EXPR
5146 && lcode == NE_EXPR && integer_zerop (lr_arg)
5147 && rcode == NE_EXPR && integer_zerop (rr_arg)
5148 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5149 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5150 return build2_loc (loc, NE_EXPR, truth_type,
5151 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5152 ll_arg, rl_arg),
5153 build_int_cst (TREE_TYPE (ll_arg), 0));
5155 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5156 if (code == TRUTH_AND_EXPR
5157 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5158 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5159 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5160 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5161 return build2_loc (loc, EQ_EXPR, truth_type,
5162 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5163 ll_arg, rl_arg),
5164 build_int_cst (TREE_TYPE (ll_arg), 0));
5167 /* See if the comparisons can be merged. Then get all the parameters for
5168 each side. */
5170 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5171 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5172 return 0;
5174 volatilep = 0;
5175 ll_inner = decode_field_reference (loc, ll_arg,
5176 &ll_bitsize, &ll_bitpos, &ll_mode,
5177 &ll_unsignedp, &volatilep, &ll_mask,
5178 &ll_and_mask);
5179 lr_inner = decode_field_reference (loc, lr_arg,
5180 &lr_bitsize, &lr_bitpos, &lr_mode,
5181 &lr_unsignedp, &volatilep, &lr_mask,
5182 &lr_and_mask);
5183 rl_inner = decode_field_reference (loc, rl_arg,
5184 &rl_bitsize, &rl_bitpos, &rl_mode,
5185 &rl_unsignedp, &volatilep, &rl_mask,
5186 &rl_and_mask);
5187 rr_inner = decode_field_reference (loc, rr_arg,
5188 &rr_bitsize, &rr_bitpos, &rr_mode,
5189 &rr_unsignedp, &volatilep, &rr_mask,
5190 &rr_and_mask);
5192 /* It must be true that the inner operation on the lhs of each
5193 comparison must be the same if we are to be able to do anything.
5194 Then see if we have constants. If not, the same must be true for
5195 the rhs's. */
5196 if (volatilep || ll_inner == 0 || rl_inner == 0
5197 || ! operand_equal_p (ll_inner, rl_inner, 0))
5198 return 0;
5200 if (TREE_CODE (lr_arg) == INTEGER_CST
5201 && TREE_CODE (rr_arg) == INTEGER_CST)
5202 l_const = lr_arg, r_const = rr_arg;
5203 else if (lr_inner == 0 || rr_inner == 0
5204 || ! operand_equal_p (lr_inner, rr_inner, 0))
5205 return 0;
5206 else
5207 l_const = r_const = 0;
5209 /* If either comparison code is not correct for our logical operation,
5210 fail. However, we can convert a one-bit comparison against zero into
5211 the opposite comparison against that bit being set in the field. */
5213 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5214 if (lcode != wanted_code)
5216 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5218 /* Make the left operand unsigned, since we are only interested
5219 in the value of one bit. Otherwise we are doing the wrong
5220 thing below. */
5221 ll_unsignedp = 1;
5222 l_const = ll_mask;
5224 else
5225 return 0;
5228 /* This is analogous to the code for l_const above. */
5229 if (rcode != wanted_code)
5231 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5233 rl_unsignedp = 1;
5234 r_const = rl_mask;
5236 else
5237 return 0;
5240 /* See if we can find a mode that contains both fields being compared on
5241 the left. If we can't, fail. Otherwise, update all constants and masks
5242 to be relative to a field of that size. */
5243 first_bit = MIN (ll_bitpos, rl_bitpos);
5244 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5245 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5246 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5247 volatilep);
5248 if (lnmode == VOIDmode)
5249 return 0;
5251 lnbitsize = GET_MODE_BITSIZE (lnmode);
5252 lnbitpos = first_bit & ~ (lnbitsize - 1);
5253 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5254 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5256 if (BYTES_BIG_ENDIAN)
5258 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5259 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5262 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5263 size_int (xll_bitpos));
5264 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5265 size_int (xrl_bitpos));
5267 if (l_const)
5269 l_const = fold_convert_loc (loc, lntype, l_const);
5270 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5271 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5272 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5273 fold_build1_loc (loc, BIT_NOT_EXPR,
5274 lntype, ll_mask))))
5276 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5278 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5281 if (r_const)
5283 r_const = fold_convert_loc (loc, lntype, r_const);
5284 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5285 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5286 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5287 fold_build1_loc (loc, BIT_NOT_EXPR,
5288 lntype, rl_mask))))
5290 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5292 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5296 /* If the right sides are not constant, do the same for it. Also,
5297 disallow this optimization if a size or signedness mismatch occurs
5298 between the left and right sides. */
5299 if (l_const == 0)
5301 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5302 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5303 /* Make sure the two fields on the right
5304 correspond to the left without being swapped. */
5305 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5306 return 0;
5308 first_bit = MIN (lr_bitpos, rr_bitpos);
5309 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5310 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5311 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5312 volatilep);
5313 if (rnmode == VOIDmode)
5314 return 0;
5316 rnbitsize = GET_MODE_BITSIZE (rnmode);
5317 rnbitpos = first_bit & ~ (rnbitsize - 1);
5318 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5319 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5321 if (BYTES_BIG_ENDIAN)
5323 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5324 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5327 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5328 rntype, lr_mask),
5329 size_int (xlr_bitpos));
5330 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5331 rntype, rr_mask),
5332 size_int (xrr_bitpos));
5334 /* Make a mask that corresponds to both fields being compared.
5335 Do this for both items being compared. If the operands are the
5336 same size and the bits being compared are in the same position
5337 then we can do this by masking both and comparing the masked
5338 results. */
5339 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5340 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5341 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5343 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5344 ll_unsignedp || rl_unsignedp);
5345 if (! all_ones_mask_p (ll_mask, lnbitsize))
5346 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5348 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5349 lr_unsignedp || rr_unsignedp);
5350 if (! all_ones_mask_p (lr_mask, rnbitsize))
5351 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5353 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5356 /* There is still another way we can do something: If both pairs of
5357 fields being compared are adjacent, we may be able to make a wider
5358 field containing them both.
5360 Note that we still must mask the lhs/rhs expressions. Furthermore,
5361 the mask must be shifted to account for the shift done by
5362 make_bit_field_ref. */
5363 if ((ll_bitsize + ll_bitpos == rl_bitpos
5364 && lr_bitsize + lr_bitpos == rr_bitpos)
5365 || (ll_bitpos == rl_bitpos + rl_bitsize
5366 && lr_bitpos == rr_bitpos + rr_bitsize))
5368 tree type;
5370 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5371 ll_bitsize + rl_bitsize,
5372 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5373 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5374 lr_bitsize + rr_bitsize,
5375 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5377 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5378 size_int (MIN (xll_bitpos, xrl_bitpos)));
5379 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5380 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5382 /* Convert to the smaller type before masking out unwanted bits. */
5383 type = lntype;
5384 if (lntype != rntype)
5386 if (lnbitsize > rnbitsize)
5388 lhs = fold_convert_loc (loc, rntype, lhs);
5389 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5390 type = rntype;
5392 else if (lnbitsize < rnbitsize)
5394 rhs = fold_convert_loc (loc, lntype, rhs);
5395 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5396 type = lntype;
5400 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5401 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5403 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5404 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5406 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5409 return 0;
5412 /* Handle the case of comparisons with constants. If there is something in
5413 common between the masks, those bits of the constants must be the same.
5414 If not, the condition is always false. Test for this to avoid generating
5415 incorrect code below. */
5416 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5417 if (! integer_zerop (result)
5418 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5419 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5421 if (wanted_code == NE_EXPR)
5423 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5424 return constant_boolean_node (true, truth_type);
5426 else
5428 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5429 return constant_boolean_node (false, truth_type);
5433 /* Construct the expression we will return. First get the component
5434 reference we will make. Unless the mask is all ones the width of
5435 that field, perform the mask operation. Then compare with the
5436 merged constant. */
5437 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5438 ll_unsignedp || rl_unsignedp);
5440 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5441 if (! all_ones_mask_p (ll_mask, lnbitsize))
5442 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5444 return build2_loc (loc, wanted_code, truth_type, result,
5445 const_binop (BIT_IOR_EXPR, l_const, r_const));
5448 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5449 constant. */
5451 static tree
5452 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5453 tree op0, tree op1)
5455 tree arg0 = op0;
5456 enum tree_code op_code;
5457 tree comp_const;
5458 tree minmax_const;
5459 int consts_equal, consts_lt;
5460 tree inner;
5462 STRIP_SIGN_NOPS (arg0);
5464 op_code = TREE_CODE (arg0);
5465 minmax_const = TREE_OPERAND (arg0, 1);
5466 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5467 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5468 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5469 inner = TREE_OPERAND (arg0, 0);
5471 /* If something does not permit us to optimize, return the original tree. */
5472 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5473 || TREE_CODE (comp_const) != INTEGER_CST
5474 || TREE_OVERFLOW (comp_const)
5475 || TREE_CODE (minmax_const) != INTEGER_CST
5476 || TREE_OVERFLOW (minmax_const))
5477 return NULL_TREE;
5479 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5480 and GT_EXPR, doing the rest with recursive calls using logical
5481 simplifications. */
5482 switch (code)
5484 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5486 tree tem
5487 = optimize_minmax_comparison (loc,
5488 invert_tree_comparison (code, false),
5489 type, op0, op1);
5490 if (tem)
5491 return invert_truthvalue_loc (loc, tem);
5492 return NULL_TREE;
5495 case GE_EXPR:
5496 return
5497 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5498 optimize_minmax_comparison
5499 (loc, EQ_EXPR, type, arg0, comp_const),
5500 optimize_minmax_comparison
5501 (loc, GT_EXPR, type, arg0, comp_const));
5503 case EQ_EXPR:
5504 if (op_code == MAX_EXPR && consts_equal)
5505 /* MAX (X, 0) == 0 -> X <= 0 */
5506 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5508 else if (op_code == MAX_EXPR && consts_lt)
5509 /* MAX (X, 0) == 5 -> X == 5 */
5510 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5512 else if (op_code == MAX_EXPR)
5513 /* MAX (X, 0) == -1 -> false */
5514 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5516 else if (consts_equal)
5517 /* MIN (X, 0) == 0 -> X >= 0 */
5518 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5520 else if (consts_lt)
5521 /* MIN (X, 0) == 5 -> false */
5522 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5524 else
5525 /* MIN (X, 0) == -1 -> X == -1 */
5526 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5528 case GT_EXPR:
5529 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5530 /* MAX (X, 0) > 0 -> X > 0
5531 MAX (X, 0) > 5 -> X > 5 */
5532 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5534 else if (op_code == MAX_EXPR)
5535 /* MAX (X, 0) > -1 -> true */
5536 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5538 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5539 /* MIN (X, 0) > 0 -> false
5540 MIN (X, 0) > 5 -> false */
5541 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5543 else
5544 /* MIN (X, 0) > -1 -> X > -1 */
5545 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5547 default:
5548 return NULL_TREE;
5552 /* T is an integer expression that is being multiplied, divided, or taken a
5553 modulus (CODE says which and what kind of divide or modulus) by a
5554 constant C. See if we can eliminate that operation by folding it with
5555 other operations already in T. WIDE_TYPE, if non-null, is a type that
5556 should be used for the computation if wider than our type.
5558 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5559 (X * 2) + (Y * 4). We must, however, be assured that either the original
5560 expression would not overflow or that overflow is undefined for the type
5561 in the language in question.
5563 If we return a non-null expression, it is an equivalent form of the
5564 original computation, but need not be in the original type.
5566 We set *STRICT_OVERFLOW_P to true if the return values depends on
5567 signed overflow being undefined. Otherwise we do not change
5568 *STRICT_OVERFLOW_P. */
5570 static tree
5571 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5572 bool *strict_overflow_p)
5574 /* To avoid exponential search depth, refuse to allow recursion past
5575 three levels. Beyond that (1) it's highly unlikely that we'll find
5576 something interesting and (2) we've probably processed it before
5577 when we built the inner expression. */
5579 static int depth;
5580 tree ret;
5582 if (depth > 3)
5583 return NULL;
5585 depth++;
5586 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5587 depth--;
5589 return ret;
5592 static tree
5593 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5594 bool *strict_overflow_p)
5596 tree type = TREE_TYPE (t);
5597 enum tree_code tcode = TREE_CODE (t);
5598 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5599 > GET_MODE_SIZE (TYPE_MODE (type)))
5600 ? wide_type : type);
5601 tree t1, t2;
5602 int same_p = tcode == code;
5603 tree op0 = NULL_TREE, op1 = NULL_TREE;
5604 bool sub_strict_overflow_p;
5606 /* Don't deal with constants of zero here; they confuse the code below. */
5607 if (integer_zerop (c))
5608 return NULL_TREE;
5610 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5611 op0 = TREE_OPERAND (t, 0);
5613 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5614 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5616 /* Note that we need not handle conditional operations here since fold
5617 already handles those cases. So just do arithmetic here. */
5618 switch (tcode)
5620 case INTEGER_CST:
5621 /* For a constant, we can always simplify if we are a multiply
5622 or (for divide and modulus) if it is a multiple of our constant. */
5623 if (code == MULT_EXPR
5624 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5625 return const_binop (code, fold_convert (ctype, t),
5626 fold_convert (ctype, c));
5627 break;
5629 CASE_CONVERT: case NON_LVALUE_EXPR:
5630 /* If op0 is an expression ... */
5631 if ((COMPARISON_CLASS_P (op0)
5632 || UNARY_CLASS_P (op0)
5633 || BINARY_CLASS_P (op0)
5634 || VL_EXP_CLASS_P (op0)
5635 || EXPRESSION_CLASS_P (op0))
5636 /* ... and has wrapping overflow, and its type is smaller
5637 than ctype, then we cannot pass through as widening. */
5638 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5639 && (TYPE_PRECISION (ctype)
5640 > TYPE_PRECISION (TREE_TYPE (op0))))
5641 /* ... or this is a truncation (t is narrower than op0),
5642 then we cannot pass through this narrowing. */
5643 || (TYPE_PRECISION (type)
5644 < TYPE_PRECISION (TREE_TYPE (op0)))
5645 /* ... or signedness changes for division or modulus,
5646 then we cannot pass through this conversion. */
5647 || (code != MULT_EXPR
5648 && (TYPE_UNSIGNED (ctype)
5649 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5650 /* ... or has undefined overflow while the converted to
5651 type has not, we cannot do the operation in the inner type
5652 as that would introduce undefined overflow. */
5653 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5654 && !TYPE_OVERFLOW_UNDEFINED (type))))
5655 break;
5657 /* Pass the constant down and see if we can make a simplification. If
5658 we can, replace this expression with the inner simplification for
5659 possible later conversion to our or some other type. */
5660 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5661 && TREE_CODE (t2) == INTEGER_CST
5662 && !TREE_OVERFLOW (t2)
5663 && (0 != (t1 = extract_muldiv (op0, t2, code,
5664 code == MULT_EXPR
5665 ? ctype : NULL_TREE,
5666 strict_overflow_p))))
5667 return t1;
5668 break;
5670 case ABS_EXPR:
5671 /* If widening the type changes it from signed to unsigned, then we
5672 must avoid building ABS_EXPR itself as unsigned. */
5673 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5675 tree cstype = (*signed_type_for) (ctype);
5676 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5677 != 0)
5679 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5680 return fold_convert (ctype, t1);
5682 break;
5684 /* If the constant is negative, we cannot simplify this. */
5685 if (tree_int_cst_sgn (c) == -1)
5686 break;
5687 /* FALLTHROUGH */
5688 case NEGATE_EXPR:
5689 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5690 != 0)
5691 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5692 break;
5694 case MIN_EXPR: case MAX_EXPR:
5695 /* If widening the type changes the signedness, then we can't perform
5696 this optimization as that changes the result. */
5697 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5698 break;
5700 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5701 sub_strict_overflow_p = false;
5702 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5703 &sub_strict_overflow_p)) != 0
5704 && (t2 = extract_muldiv (op1, c, code, wide_type,
5705 &sub_strict_overflow_p)) != 0)
5707 if (tree_int_cst_sgn (c) < 0)
5708 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5709 if (sub_strict_overflow_p)
5710 *strict_overflow_p = true;
5711 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5712 fold_convert (ctype, t2));
5714 break;
5716 case LSHIFT_EXPR: case RSHIFT_EXPR:
5717 /* If the second operand is constant, this is a multiplication
5718 or floor division, by a power of two, so we can treat it that
5719 way unless the multiplier or divisor overflows. Signed
5720 left-shift overflow is implementation-defined rather than
5721 undefined in C90, so do not convert signed left shift into
5722 multiplication. */
5723 if (TREE_CODE (op1) == INTEGER_CST
5724 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5725 /* const_binop may not detect overflow correctly,
5726 so check for it explicitly here. */
5727 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5728 && TREE_INT_CST_HIGH (op1) == 0
5729 && 0 != (t1 = fold_convert (ctype,
5730 const_binop (LSHIFT_EXPR,
5731 size_one_node,
5732 op1)))
5733 && !TREE_OVERFLOW (t1))
5734 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5735 ? MULT_EXPR : FLOOR_DIV_EXPR,
5736 ctype,
5737 fold_convert (ctype, op0),
5738 t1),
5739 c, code, wide_type, strict_overflow_p);
5740 break;
5742 case PLUS_EXPR: case MINUS_EXPR:
5743 /* See if we can eliminate the operation on both sides. If we can, we
5744 can return a new PLUS or MINUS. If we can't, the only remaining
5745 cases where we can do anything are if the second operand is a
5746 constant. */
5747 sub_strict_overflow_p = false;
5748 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5749 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5750 if (t1 != 0 && t2 != 0
5751 && (code == MULT_EXPR
5752 /* If not multiplication, we can only do this if both operands
5753 are divisible by c. */
5754 || (multiple_of_p (ctype, op0, c)
5755 && multiple_of_p (ctype, op1, c))))
5757 if (sub_strict_overflow_p)
5758 *strict_overflow_p = true;
5759 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5760 fold_convert (ctype, t2));
5763 /* If this was a subtraction, negate OP1 and set it to be an addition.
5764 This simplifies the logic below. */
5765 if (tcode == MINUS_EXPR)
5767 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5768 /* If OP1 was not easily negatable, the constant may be OP0. */
5769 if (TREE_CODE (op0) == INTEGER_CST)
5771 tree tem = op0;
5772 op0 = op1;
5773 op1 = tem;
5774 tem = t1;
5775 t1 = t2;
5776 t2 = tem;
5780 if (TREE_CODE (op1) != INTEGER_CST)
5781 break;
5783 /* If either OP1 or C are negative, this optimization is not safe for
5784 some of the division and remainder types while for others we need
5785 to change the code. */
5786 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5788 if (code == CEIL_DIV_EXPR)
5789 code = FLOOR_DIV_EXPR;
5790 else if (code == FLOOR_DIV_EXPR)
5791 code = CEIL_DIV_EXPR;
5792 else if (code != MULT_EXPR
5793 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5794 break;
5797 /* If it's a multiply or a division/modulus operation of a multiple
5798 of our constant, do the operation and verify it doesn't overflow. */
5799 if (code == MULT_EXPR
5800 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5802 op1 = const_binop (code, fold_convert (ctype, op1),
5803 fold_convert (ctype, c));
5804 /* We allow the constant to overflow with wrapping semantics. */
5805 if (op1 == 0
5806 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5807 break;
5809 else
5810 break;
5812 /* If we have an unsigned type, we cannot widen the operation since it
5813 will change the result if the original computation overflowed. */
5814 if (TYPE_UNSIGNED (ctype) && ctype != type)
5815 break;
5817 /* If we were able to eliminate our operation from the first side,
5818 apply our operation to the second side and reform the PLUS. */
5819 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5820 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5822 /* The last case is if we are a multiply. In that case, we can
5823 apply the distributive law to commute the multiply and addition
5824 if the multiplication of the constants doesn't overflow. */
5825 if (code == MULT_EXPR)
5826 return fold_build2 (tcode, ctype,
5827 fold_build2 (code, ctype,
5828 fold_convert (ctype, op0),
5829 fold_convert (ctype, c)),
5830 op1);
5832 break;
5834 case MULT_EXPR:
5835 /* We have a special case here if we are doing something like
5836 (C * 8) % 4 since we know that's zero. */
5837 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5838 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5839 /* If the multiplication can overflow we cannot optimize this. */
5840 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5841 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5842 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5844 *strict_overflow_p = true;
5845 return omit_one_operand (type, integer_zero_node, op0);
5848 /* ... fall through ... */
5850 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5851 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5852 /* If we can extract our operation from the LHS, do so and return a
5853 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5854 do something only if the second operand is a constant. */
5855 if (same_p
5856 && (t1 = extract_muldiv (op0, c, code, wide_type,
5857 strict_overflow_p)) != 0)
5858 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5859 fold_convert (ctype, op1));
5860 else if (tcode == MULT_EXPR && code == MULT_EXPR
5861 && (t1 = extract_muldiv (op1, c, code, wide_type,
5862 strict_overflow_p)) != 0)
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5864 fold_convert (ctype, t1));
5865 else if (TREE_CODE (op1) != INTEGER_CST)
5866 return 0;
5868 /* If these are the same operation types, we can associate them
5869 assuming no overflow. */
5870 if (tcode == code)
5872 double_int mul;
5873 bool overflow_p;
5874 unsigned prec = TYPE_PRECISION (ctype);
5875 bool uns = TYPE_UNSIGNED (ctype);
5876 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5877 double_int dic = tree_to_double_int (c).ext (prec, uns);
5878 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5879 overflow_p = ((!uns && overflow_p)
5880 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5881 if (!double_int_fits_to_tree_p (ctype, mul)
5882 && ((uns && tcode != MULT_EXPR) || !uns))
5883 overflow_p = 1;
5884 if (!overflow_p)
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5886 double_int_to_tree (ctype, mul));
5889 /* If these operations "cancel" each other, we have the main
5890 optimizations of this pass, which occur when either constant is a
5891 multiple of the other, in which case we replace this with either an
5892 operation or CODE or TCODE.
5894 If we have an unsigned type, we cannot do this since it will change
5895 the result if the original computation overflowed. */
5896 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5897 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5898 || (tcode == MULT_EXPR
5899 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5900 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5901 && code != MULT_EXPR)))
5903 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5906 *strict_overflow_p = true;
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5908 fold_convert (ctype,
5909 const_binop (TRUNC_DIV_EXPR,
5910 op1, c)));
5912 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5914 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5915 *strict_overflow_p = true;
5916 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5917 fold_convert (ctype,
5918 const_binop (TRUNC_DIV_EXPR,
5919 c, op1)));
5922 break;
5924 default:
5925 break;
5928 return 0;
5931 /* Return a node which has the indicated constant VALUE (either 0 or
5932 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5933 and is of the indicated TYPE. */
5935 tree
5936 constant_boolean_node (bool value, tree type)
5938 if (type == integer_type_node)
5939 return value ? integer_one_node : integer_zero_node;
5940 else if (type == boolean_type_node)
5941 return value ? boolean_true_node : boolean_false_node;
5942 else if (TREE_CODE (type) == VECTOR_TYPE)
5943 return build_vector_from_val (type,
5944 build_int_cst (TREE_TYPE (type),
5945 value ? -1 : 0));
5946 else
5947 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5951 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5952 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5953 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5954 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5955 COND is the first argument to CODE; otherwise (as in the example
5956 given here), it is the second argument. TYPE is the type of the
5957 original expression. Return NULL_TREE if no simplification is
5958 possible. */
5960 static tree
5961 fold_binary_op_with_conditional_arg (location_t loc,
5962 enum tree_code code,
5963 tree type, tree op0, tree op1,
5964 tree cond, tree arg, int cond_first_p)
5966 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5967 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5968 tree test, true_value, false_value;
5969 tree lhs = NULL_TREE;
5970 tree rhs = NULL_TREE;
5971 enum tree_code cond_code = COND_EXPR;
5973 if (TREE_CODE (cond) == COND_EXPR
5974 || TREE_CODE (cond) == VEC_COND_EXPR)
5976 test = TREE_OPERAND (cond, 0);
5977 true_value = TREE_OPERAND (cond, 1);
5978 false_value = TREE_OPERAND (cond, 2);
5979 /* If this operand throws an expression, then it does not make
5980 sense to try to perform a logical or arithmetic operation
5981 involving it. */
5982 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5983 lhs = true_value;
5984 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5985 rhs = false_value;
5987 else
5989 tree testtype = TREE_TYPE (cond);
5990 test = cond;
5991 true_value = constant_boolean_node (true, testtype);
5992 false_value = constant_boolean_node (false, testtype);
5995 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
5996 cond_code = VEC_COND_EXPR;
5998 /* This transformation is only worthwhile if we don't have to wrap ARG
5999 in a SAVE_EXPR and the operation can be simplified without recursing
6000 on at least one of the branches once its pushed inside the COND_EXPR. */
6001 if (!TREE_CONSTANT (arg)
6002 && (TREE_SIDE_EFFECTS (arg)
6003 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6004 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6005 return NULL_TREE;
6007 arg = fold_convert_loc (loc, arg_type, arg);
6008 if (lhs == 0)
6010 true_value = fold_convert_loc (loc, cond_type, true_value);
6011 if (cond_first_p)
6012 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6013 else
6014 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6016 if (rhs == 0)
6018 false_value = fold_convert_loc (loc, cond_type, false_value);
6019 if (cond_first_p)
6020 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6021 else
6022 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6025 /* Check that we have simplified at least one of the branches. */
6026 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6027 return NULL_TREE;
6029 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6033 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6035 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6036 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6037 ADDEND is the same as X.
6039 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6040 and finite. The problematic cases are when X is zero, and its mode
6041 has signed zeros. In the case of rounding towards -infinity,
6042 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6043 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6045 bool
6046 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6048 if (!real_zerop (addend))
6049 return false;
6051 /* Don't allow the fold with -fsignaling-nans. */
6052 if (HONOR_SNANS (TYPE_MODE (type)))
6053 return false;
6055 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6056 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6057 return true;
6059 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6060 if (TREE_CODE (addend) == REAL_CST
6061 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6062 negate = !negate;
6064 /* The mode has signed zeros, and we have to honor their sign.
6065 In this situation, there is only one case we can return true for.
6066 X - 0 is the same as X unless rounding towards -infinity is
6067 supported. */
6068 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6071 /* Subroutine of fold() that checks comparisons of built-in math
6072 functions against real constants.
6074 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6075 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6076 is the type of the result and ARG0 and ARG1 are the operands of the
6077 comparison. ARG1 must be a TREE_REAL_CST.
6079 The function returns the constant folded tree if a simplification
6080 can be made, and NULL_TREE otherwise. */
6082 static tree
6083 fold_mathfn_compare (location_t loc,
6084 enum built_in_function fcode, enum tree_code code,
6085 tree type, tree arg0, tree arg1)
6087 REAL_VALUE_TYPE c;
6089 if (BUILTIN_SQRT_P (fcode))
6091 tree arg = CALL_EXPR_ARG (arg0, 0);
6092 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6094 c = TREE_REAL_CST (arg1);
6095 if (REAL_VALUE_NEGATIVE (c))
6097 /* sqrt(x) < y is always false, if y is negative. */
6098 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6099 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6101 /* sqrt(x) > y is always true, if y is negative and we
6102 don't care about NaNs, i.e. negative values of x. */
6103 if (code == NE_EXPR || !HONOR_NANS (mode))
6104 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6106 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6107 return fold_build2_loc (loc, GE_EXPR, type, arg,
6108 build_real (TREE_TYPE (arg), dconst0));
6110 else if (code == GT_EXPR || code == GE_EXPR)
6112 REAL_VALUE_TYPE c2;
6114 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6115 real_convert (&c2, mode, &c2);
6117 if (REAL_VALUE_ISINF (c2))
6119 /* sqrt(x) > y is x == +Inf, when y is very large. */
6120 if (HONOR_INFINITIES (mode))
6121 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6122 build_real (TREE_TYPE (arg), c2));
6124 /* sqrt(x) > y is always false, when y is very large
6125 and we don't care about infinities. */
6126 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6129 /* sqrt(x) > c is the same as x > c*c. */
6130 return fold_build2_loc (loc, code, type, arg,
6131 build_real (TREE_TYPE (arg), c2));
6133 else if (code == LT_EXPR || code == LE_EXPR)
6135 REAL_VALUE_TYPE c2;
6137 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6138 real_convert (&c2, mode, &c2);
6140 if (REAL_VALUE_ISINF (c2))
6142 /* sqrt(x) < y is always true, when y is a very large
6143 value and we don't care about NaNs or Infinities. */
6144 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6145 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6147 /* sqrt(x) < y is x != +Inf when y is very large and we
6148 don't care about NaNs. */
6149 if (! HONOR_NANS (mode))
6150 return fold_build2_loc (loc, NE_EXPR, type, arg,
6151 build_real (TREE_TYPE (arg), c2));
6153 /* sqrt(x) < y is x >= 0 when y is very large and we
6154 don't care about Infinities. */
6155 if (! HONOR_INFINITIES (mode))
6156 return fold_build2_loc (loc, GE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg), dconst0));
6159 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6160 arg = save_expr (arg);
6161 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6162 fold_build2_loc (loc, GE_EXPR, type, arg,
6163 build_real (TREE_TYPE (arg),
6164 dconst0)),
6165 fold_build2_loc (loc, NE_EXPR, type, arg,
6166 build_real (TREE_TYPE (arg),
6167 c2)));
6170 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6171 if (! HONOR_NANS (mode))
6172 return fold_build2_loc (loc, code, type, arg,
6173 build_real (TREE_TYPE (arg), c2));
6175 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6176 arg = save_expr (arg);
6177 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6178 fold_build2_loc (loc, GE_EXPR, type, arg,
6179 build_real (TREE_TYPE (arg),
6180 dconst0)),
6181 fold_build2_loc (loc, code, type, arg,
6182 build_real (TREE_TYPE (arg),
6183 c2)));
6187 return NULL_TREE;
6190 /* Subroutine of fold() that optimizes comparisons against Infinities,
6191 either +Inf or -Inf.
6193 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6194 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6195 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6197 The function returns the constant folded tree if a simplification
6198 can be made, and NULL_TREE otherwise. */
6200 static tree
6201 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6202 tree arg0, tree arg1)
6204 enum machine_mode mode;
6205 REAL_VALUE_TYPE max;
6206 tree temp;
6207 bool neg;
6209 mode = TYPE_MODE (TREE_TYPE (arg0));
6211 /* For negative infinity swap the sense of the comparison. */
6212 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6213 if (neg)
6214 code = swap_tree_comparison (code);
6216 switch (code)
6218 case GT_EXPR:
6219 /* x > +Inf is always false, if with ignore sNANs. */
6220 if (HONOR_SNANS (mode))
6221 return NULL_TREE;
6222 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6224 case LE_EXPR:
6225 /* x <= +Inf is always true, if we don't case about NaNs. */
6226 if (! HONOR_NANS (mode))
6227 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6229 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6230 arg0 = save_expr (arg0);
6231 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6233 case EQ_EXPR:
6234 case GE_EXPR:
6235 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6236 real_maxval (&max, neg, mode);
6237 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6238 arg0, build_real (TREE_TYPE (arg0), max));
6240 case LT_EXPR:
6241 /* x < +Inf is always equal to x <= DBL_MAX. */
6242 real_maxval (&max, neg, mode);
6243 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6244 arg0, build_real (TREE_TYPE (arg0), max));
6246 case NE_EXPR:
6247 /* x != +Inf is always equal to !(x > DBL_MAX). */
6248 real_maxval (&max, neg, mode);
6249 if (! HONOR_NANS (mode))
6250 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6251 arg0, build_real (TREE_TYPE (arg0), max));
6253 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6257 default:
6258 break;
6261 return NULL_TREE;
6264 /* Subroutine of fold() that optimizes comparisons of a division by
6265 a nonzero integer constant against an integer constant, i.e.
6266 X/C1 op C2.
6268 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6269 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6270 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272 The function returns the constant folded tree if a simplification
6273 can be made, and NULL_TREE otherwise. */
6275 static tree
6276 fold_div_compare (location_t loc,
6277 enum tree_code code, tree type, tree arg0, tree arg1)
6279 tree prod, tmp, hi, lo;
6280 tree arg00 = TREE_OPERAND (arg0, 0);
6281 tree arg01 = TREE_OPERAND (arg0, 1);
6282 double_int val;
6283 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6284 bool neg_overflow;
6285 bool overflow;
6287 /* We have to do this the hard way to detect unsigned overflow.
6288 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6289 val = TREE_INT_CST (arg01)
6290 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6291 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6292 neg_overflow = false;
6294 if (unsigned_p)
6296 tmp = int_const_binop (MINUS_EXPR, arg01,
6297 build_int_cst (TREE_TYPE (arg01), 1));
6298 lo = prod;
6300 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6301 val = TREE_INT_CST (prod)
6302 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6303 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6304 -1, overflow | TREE_OVERFLOW (prod));
6306 else if (tree_int_cst_sgn (arg01) >= 0)
6308 tmp = int_const_binop (MINUS_EXPR, arg01,
6309 build_int_cst (TREE_TYPE (arg01), 1));
6310 switch (tree_int_cst_sgn (arg1))
6312 case -1:
6313 neg_overflow = true;
6314 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6315 hi = prod;
6316 break;
6318 case 0:
6319 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6320 hi = tmp;
6321 break;
6323 case 1:
6324 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6325 lo = prod;
6326 break;
6328 default:
6329 gcc_unreachable ();
6332 else
6334 /* A negative divisor reverses the relational operators. */
6335 code = swap_tree_comparison (code);
6337 tmp = int_const_binop (PLUS_EXPR, arg01,
6338 build_int_cst (TREE_TYPE (arg01), 1));
6339 switch (tree_int_cst_sgn (arg1))
6341 case -1:
6342 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6343 lo = prod;
6344 break;
6346 case 0:
6347 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6348 lo = tmp;
6349 break;
6351 case 1:
6352 neg_overflow = true;
6353 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6354 hi = prod;
6355 break;
6357 default:
6358 gcc_unreachable ();
6362 switch (code)
6364 case EQ_EXPR:
6365 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6366 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6367 if (TREE_OVERFLOW (hi))
6368 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6369 if (TREE_OVERFLOW (lo))
6370 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6371 return build_range_check (loc, type, arg00, 1, lo, hi);
6373 case NE_EXPR:
6374 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6375 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6376 if (TREE_OVERFLOW (hi))
6377 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6378 if (TREE_OVERFLOW (lo))
6379 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6380 return build_range_check (loc, type, arg00, 0, lo, hi);
6382 case LT_EXPR:
6383 if (TREE_OVERFLOW (lo))
6385 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6386 return omit_one_operand_loc (loc, type, tmp, arg00);
6388 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6390 case LE_EXPR:
6391 if (TREE_OVERFLOW (hi))
6393 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6394 return omit_one_operand_loc (loc, type, tmp, arg00);
6396 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6398 case GT_EXPR:
6399 if (TREE_OVERFLOW (hi))
6401 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6402 return omit_one_operand_loc (loc, type, tmp, arg00);
6404 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6406 case GE_EXPR:
6407 if (TREE_OVERFLOW (lo))
6409 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6410 return omit_one_operand_loc (loc, type, tmp, arg00);
6412 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6414 default:
6415 break;
6418 return NULL_TREE;
6422 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6423 equality/inequality test, then return a simplified form of the test
6424 using a sign testing. Otherwise return NULL. TYPE is the desired
6425 result type. */
6427 static tree
6428 fold_single_bit_test_into_sign_test (location_t loc,
6429 enum tree_code code, tree arg0, tree arg1,
6430 tree result_type)
6432 /* If this is testing a single bit, we can optimize the test. */
6433 if ((code == NE_EXPR || code == EQ_EXPR)
6434 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6435 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6437 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6438 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6439 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6441 if (arg00 != NULL_TREE
6442 /* This is only a win if casting to a signed type is cheap,
6443 i.e. when arg00's type is not a partial mode. */
6444 && TYPE_PRECISION (TREE_TYPE (arg00))
6445 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6447 tree stype = signed_type_for (TREE_TYPE (arg00));
6448 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6449 result_type,
6450 fold_convert_loc (loc, stype, arg00),
6451 build_int_cst (stype, 0));
6455 return NULL_TREE;
6458 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6459 equality/inequality test, then return a simplified form of
6460 the test using shifts and logical operations. Otherwise return
6461 NULL. TYPE is the desired result type. */
6463 tree
6464 fold_single_bit_test (location_t loc, enum tree_code code,
6465 tree arg0, tree arg1, tree result_type)
6467 /* If this is testing a single bit, we can optimize the test. */
6468 if ((code == NE_EXPR || code == EQ_EXPR)
6469 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6470 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6472 tree inner = TREE_OPERAND (arg0, 0);
6473 tree type = TREE_TYPE (arg0);
6474 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6475 enum machine_mode operand_mode = TYPE_MODE (type);
6476 int ops_unsigned;
6477 tree signed_type, unsigned_type, intermediate_type;
6478 tree tem, one;
6480 /* First, see if we can fold the single bit test into a sign-bit
6481 test. */
6482 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6483 result_type);
6484 if (tem)
6485 return tem;
6487 /* Otherwise we have (A & C) != 0 where C is a single bit,
6488 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6489 Similarly for (A & C) == 0. */
6491 /* If INNER is a right shift of a constant and it plus BITNUM does
6492 not overflow, adjust BITNUM and INNER. */
6493 if (TREE_CODE (inner) == RSHIFT_EXPR
6494 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6495 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6496 && bitnum < TYPE_PRECISION (type)
6497 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6498 bitnum - TYPE_PRECISION (type)))
6500 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6501 inner = TREE_OPERAND (inner, 0);
6504 /* If we are going to be able to omit the AND below, we must do our
6505 operations as unsigned. If we must use the AND, we have a choice.
6506 Normally unsigned is faster, but for some machines signed is. */
6507 #ifdef LOAD_EXTEND_OP
6508 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6509 && !flag_syntax_only) ? 0 : 1;
6510 #else
6511 ops_unsigned = 1;
6512 #endif
6514 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6515 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6516 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6517 inner = fold_convert_loc (loc, intermediate_type, inner);
6519 if (bitnum != 0)
6520 inner = build2 (RSHIFT_EXPR, intermediate_type,
6521 inner, size_int (bitnum));
6523 one = build_int_cst (intermediate_type, 1);
6525 if (code == EQ_EXPR)
6526 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6528 /* Put the AND last so it can combine with more things. */
6529 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6531 /* Make sure to return the proper type. */
6532 inner = fold_convert_loc (loc, result_type, inner);
6534 return inner;
6536 return NULL_TREE;
6539 /* Check whether we are allowed to reorder operands arg0 and arg1,
6540 such that the evaluation of arg1 occurs before arg0. */
6542 static bool
6543 reorder_operands_p (const_tree arg0, const_tree arg1)
6545 if (! flag_evaluation_order)
6546 return true;
6547 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6548 return true;
6549 return ! TREE_SIDE_EFFECTS (arg0)
6550 && ! TREE_SIDE_EFFECTS (arg1);
6553 /* Test whether it is preferable two swap two operands, ARG0 and
6554 ARG1, for example because ARG0 is an integer constant and ARG1
6555 isn't. If REORDER is true, only recommend swapping if we can
6556 evaluate the operands in reverse order. */
6558 bool
6559 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6561 STRIP_SIGN_NOPS (arg0);
6562 STRIP_SIGN_NOPS (arg1);
6564 if (TREE_CODE (arg1) == INTEGER_CST)
6565 return 0;
6566 if (TREE_CODE (arg0) == INTEGER_CST)
6567 return 1;
6569 if (TREE_CODE (arg1) == REAL_CST)
6570 return 0;
6571 if (TREE_CODE (arg0) == REAL_CST)
6572 return 1;
6574 if (TREE_CODE (arg1) == FIXED_CST)
6575 return 0;
6576 if (TREE_CODE (arg0) == FIXED_CST)
6577 return 1;
6579 if (TREE_CODE (arg1) == COMPLEX_CST)
6580 return 0;
6581 if (TREE_CODE (arg0) == COMPLEX_CST)
6582 return 1;
6584 if (TREE_CONSTANT (arg1))
6585 return 0;
6586 if (TREE_CONSTANT (arg0))
6587 return 1;
6589 if (optimize_function_for_size_p (cfun))
6590 return 0;
6592 if (reorder && flag_evaluation_order
6593 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6594 return 0;
6596 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6597 for commutative and comparison operators. Ensuring a canonical
6598 form allows the optimizers to find additional redundancies without
6599 having to explicitly check for both orderings. */
6600 if (TREE_CODE (arg0) == SSA_NAME
6601 && TREE_CODE (arg1) == SSA_NAME
6602 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6603 return 1;
6605 /* Put SSA_NAMEs last. */
6606 if (TREE_CODE (arg1) == SSA_NAME)
6607 return 0;
6608 if (TREE_CODE (arg0) == SSA_NAME)
6609 return 1;
6611 /* Put variables last. */
6612 if (DECL_P (arg1))
6613 return 0;
6614 if (DECL_P (arg0))
6615 return 1;
6617 return 0;
6620 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6621 ARG0 is extended to a wider type. */
6623 static tree
6624 fold_widened_comparison (location_t loc, enum tree_code code,
6625 tree type, tree arg0, tree arg1)
6627 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6628 tree arg1_unw;
6629 tree shorter_type, outer_type;
6630 tree min, max;
6631 bool above, below;
6633 if (arg0_unw == arg0)
6634 return NULL_TREE;
6635 shorter_type = TREE_TYPE (arg0_unw);
6637 #ifdef HAVE_canonicalize_funcptr_for_compare
6638 /* Disable this optimization if we're casting a function pointer
6639 type on targets that require function pointer canonicalization. */
6640 if (HAVE_canonicalize_funcptr_for_compare
6641 && TREE_CODE (shorter_type) == POINTER_TYPE
6642 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6643 return NULL_TREE;
6644 #endif
6646 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6647 return NULL_TREE;
6649 arg1_unw = get_unwidened (arg1, NULL_TREE);
6651 /* If possible, express the comparison in the shorter mode. */
6652 if ((code == EQ_EXPR || code == NE_EXPR
6653 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6654 && (TREE_TYPE (arg1_unw) == shorter_type
6655 || ((TYPE_PRECISION (shorter_type)
6656 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6657 && (TYPE_UNSIGNED (shorter_type)
6658 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6659 || (TREE_CODE (arg1_unw) == INTEGER_CST
6660 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6661 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6662 && int_fits_type_p (arg1_unw, shorter_type))))
6663 return fold_build2_loc (loc, code, type, arg0_unw,
6664 fold_convert_loc (loc, shorter_type, arg1_unw));
6666 if (TREE_CODE (arg1_unw) != INTEGER_CST
6667 || TREE_CODE (shorter_type) != INTEGER_TYPE
6668 || !int_fits_type_p (arg1_unw, shorter_type))
6669 return NULL_TREE;
6671 /* If we are comparing with the integer that does not fit into the range
6672 of the shorter type, the result is known. */
6673 outer_type = TREE_TYPE (arg1_unw);
6674 min = lower_bound_in_type (outer_type, shorter_type);
6675 max = upper_bound_in_type (outer_type, shorter_type);
6677 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6678 max, arg1_unw));
6679 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6680 arg1_unw, min));
6682 switch (code)
6684 case EQ_EXPR:
6685 if (above || below)
6686 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6687 break;
6689 case NE_EXPR:
6690 if (above || below)
6691 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6692 break;
6694 case LT_EXPR:
6695 case LE_EXPR:
6696 if (above)
6697 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6698 else if (below)
6699 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 case GT_EXPR:
6702 case GE_EXPR:
6703 if (above)
6704 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6705 else if (below)
6706 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6708 default:
6709 break;
6712 return NULL_TREE;
6715 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6716 ARG0 just the signedness is changed. */
6718 static tree
6719 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6720 tree arg0, tree arg1)
6722 tree arg0_inner;
6723 tree inner_type, outer_type;
6725 if (!CONVERT_EXPR_P (arg0))
6726 return NULL_TREE;
6728 outer_type = TREE_TYPE (arg0);
6729 arg0_inner = TREE_OPERAND (arg0, 0);
6730 inner_type = TREE_TYPE (arg0_inner);
6732 #ifdef HAVE_canonicalize_funcptr_for_compare
6733 /* Disable this optimization if we're casting a function pointer
6734 type on targets that require function pointer canonicalization. */
6735 if (HAVE_canonicalize_funcptr_for_compare
6736 && TREE_CODE (inner_type) == POINTER_TYPE
6737 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6738 return NULL_TREE;
6739 #endif
6741 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6742 return NULL_TREE;
6744 if (TREE_CODE (arg1) != INTEGER_CST
6745 && !(CONVERT_EXPR_P (arg1)
6746 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6747 return NULL_TREE;
6749 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6750 && code != NE_EXPR
6751 && code != EQ_EXPR)
6752 return NULL_TREE;
6754 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6755 return NULL_TREE;
6757 if (TREE_CODE (arg1) == INTEGER_CST)
6758 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6759 0, TREE_OVERFLOW (arg1));
6760 else
6761 arg1 = fold_convert_loc (loc, inner_type, arg1);
6763 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6766 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6767 step of the array. Reconstructs s and delta in the case of s *
6768 delta being an integer constant (and thus already folded). ADDR is
6769 the address. MULT is the multiplicative expression. If the
6770 function succeeds, the new address expression is returned.
6771 Otherwise NULL_TREE is returned. LOC is the location of the
6772 resulting expression. */
6774 static tree
6775 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6777 tree s, delta, step;
6778 tree ref = TREE_OPERAND (addr, 0), pref;
6779 tree ret, pos;
6780 tree itype;
6781 bool mdim = false;
6783 /* Strip the nops that might be added when converting op1 to sizetype. */
6784 STRIP_NOPS (op1);
6786 /* Canonicalize op1 into a possibly non-constant delta
6787 and an INTEGER_CST s. */
6788 if (TREE_CODE (op1) == MULT_EXPR)
6790 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6792 STRIP_NOPS (arg0);
6793 STRIP_NOPS (arg1);
6795 if (TREE_CODE (arg0) == INTEGER_CST)
6797 s = arg0;
6798 delta = arg1;
6800 else if (TREE_CODE (arg1) == INTEGER_CST)
6802 s = arg1;
6803 delta = arg0;
6805 else
6806 return NULL_TREE;
6808 else if (TREE_CODE (op1) == INTEGER_CST)
6810 delta = op1;
6811 s = NULL_TREE;
6813 else
6815 /* Simulate we are delta * 1. */
6816 delta = op1;
6817 s = integer_one_node;
6820 /* Handle &x.array the same as we would handle &x.array[0]. */
6821 if (TREE_CODE (ref) == COMPONENT_REF
6822 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6824 tree domain;
6826 /* Remember if this was a multi-dimensional array. */
6827 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6828 mdim = true;
6830 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6831 if (! domain)
6832 goto cont;
6833 itype = TREE_TYPE (domain);
6835 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6836 if (TREE_CODE (step) != INTEGER_CST)
6837 goto cont;
6839 if (s)
6841 if (! tree_int_cst_equal (step, s))
6842 goto cont;
6844 else
6846 /* Try if delta is a multiple of step. */
6847 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6848 if (! tmp)
6849 goto cont;
6850 delta = tmp;
6853 /* Only fold here if we can verify we do not overflow one
6854 dimension of a multi-dimensional array. */
6855 if (mdim)
6857 tree tmp;
6859 if (!TYPE_MIN_VALUE (domain)
6860 || !TYPE_MAX_VALUE (domain)
6861 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6862 goto cont;
6864 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6865 fold_convert_loc (loc, itype,
6866 TYPE_MIN_VALUE (domain)),
6867 fold_convert_loc (loc, itype, delta));
6868 if (TREE_CODE (tmp) != INTEGER_CST
6869 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6870 goto cont;
6873 /* We found a suitable component reference. */
6875 pref = TREE_OPERAND (addr, 0);
6876 ret = copy_node (pref);
6877 SET_EXPR_LOCATION (ret, loc);
6879 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6880 fold_build2_loc
6881 (loc, PLUS_EXPR, itype,
6882 fold_convert_loc (loc, itype,
6883 TYPE_MIN_VALUE
6884 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6885 fold_convert_loc (loc, itype, delta)),
6886 NULL_TREE, NULL_TREE);
6887 return build_fold_addr_expr_loc (loc, ret);
6890 cont:
6892 for (;; ref = TREE_OPERAND (ref, 0))
6894 if (TREE_CODE (ref) == ARRAY_REF)
6896 tree domain;
6898 /* Remember if this was a multi-dimensional array. */
6899 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6900 mdim = true;
6902 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6903 if (! domain)
6904 continue;
6905 itype = TREE_TYPE (domain);
6907 step = array_ref_element_size (ref);
6908 if (TREE_CODE (step) != INTEGER_CST)
6909 continue;
6911 if (s)
6913 if (! tree_int_cst_equal (step, s))
6914 continue;
6916 else
6918 /* Try if delta is a multiple of step. */
6919 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6920 if (! tmp)
6921 continue;
6922 delta = tmp;
6925 /* Only fold here if we can verify we do not overflow one
6926 dimension of a multi-dimensional array. */
6927 if (mdim)
6929 tree tmp;
6931 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6932 || !TYPE_MAX_VALUE (domain)
6933 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6934 continue;
6936 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6937 fold_convert_loc (loc, itype,
6938 TREE_OPERAND (ref, 1)),
6939 fold_convert_loc (loc, itype, delta));
6940 if (!tmp
6941 || TREE_CODE (tmp) != INTEGER_CST
6942 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6943 continue;
6946 break;
6948 else
6949 mdim = false;
6951 if (!handled_component_p (ref))
6952 return NULL_TREE;
6955 /* We found the suitable array reference. So copy everything up to it,
6956 and replace the index. */
6958 pref = TREE_OPERAND (addr, 0);
6959 ret = copy_node (pref);
6960 SET_EXPR_LOCATION (ret, loc);
6961 pos = ret;
6963 while (pref != ref)
6965 pref = TREE_OPERAND (pref, 0);
6966 TREE_OPERAND (pos, 0) = copy_node (pref);
6967 pos = TREE_OPERAND (pos, 0);
6970 TREE_OPERAND (pos, 1)
6971 = fold_build2_loc (loc, PLUS_EXPR, itype,
6972 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6973 fold_convert_loc (loc, itype, delta));
6974 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6978 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6979 means A >= Y && A != MAX, but in this case we know that
6980 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6982 static tree
6983 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6985 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6987 if (TREE_CODE (bound) == LT_EXPR)
6988 a = TREE_OPERAND (bound, 0);
6989 else if (TREE_CODE (bound) == GT_EXPR)
6990 a = TREE_OPERAND (bound, 1);
6991 else
6992 return NULL_TREE;
6994 typea = TREE_TYPE (a);
6995 if (!INTEGRAL_TYPE_P (typea)
6996 && !POINTER_TYPE_P (typea))
6997 return NULL_TREE;
6999 if (TREE_CODE (ineq) == LT_EXPR)
7001 a1 = TREE_OPERAND (ineq, 1);
7002 y = TREE_OPERAND (ineq, 0);
7004 else if (TREE_CODE (ineq) == GT_EXPR)
7006 a1 = TREE_OPERAND (ineq, 0);
7007 y = TREE_OPERAND (ineq, 1);
7009 else
7010 return NULL_TREE;
7012 if (TREE_TYPE (a1) != typea)
7013 return NULL_TREE;
7015 if (POINTER_TYPE_P (typea))
7017 /* Convert the pointer types into integer before taking the difference. */
7018 tree ta = fold_convert_loc (loc, ssizetype, a);
7019 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7020 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7022 else
7023 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7025 if (!diff || !integer_onep (diff))
7026 return NULL_TREE;
7028 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7031 /* Fold a sum or difference of at least one multiplication.
7032 Returns the folded tree or NULL if no simplification could be made. */
7034 static tree
7035 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7036 tree arg0, tree arg1)
7038 tree arg00, arg01, arg10, arg11;
7039 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7041 /* (A * C) +- (B * C) -> (A+-B) * C.
7042 (A * C) +- A -> A * (C+-1).
7043 We are most concerned about the case where C is a constant,
7044 but other combinations show up during loop reduction. Since
7045 it is not difficult, try all four possibilities. */
7047 if (TREE_CODE (arg0) == MULT_EXPR)
7049 arg00 = TREE_OPERAND (arg0, 0);
7050 arg01 = TREE_OPERAND (arg0, 1);
7052 else if (TREE_CODE (arg0) == INTEGER_CST)
7054 arg00 = build_one_cst (type);
7055 arg01 = arg0;
7057 else
7059 /* We cannot generate constant 1 for fract. */
7060 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7061 return NULL_TREE;
7062 arg00 = arg0;
7063 arg01 = build_one_cst (type);
7065 if (TREE_CODE (arg1) == MULT_EXPR)
7067 arg10 = TREE_OPERAND (arg1, 0);
7068 arg11 = TREE_OPERAND (arg1, 1);
7070 else if (TREE_CODE (arg1) == INTEGER_CST)
7072 arg10 = build_one_cst (type);
7073 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7074 the purpose of this canonicalization. */
7075 if (TREE_INT_CST_HIGH (arg1) == -1
7076 && negate_expr_p (arg1)
7077 && code == PLUS_EXPR)
7079 arg11 = negate_expr (arg1);
7080 code = MINUS_EXPR;
7082 else
7083 arg11 = arg1;
7085 else
7087 /* We cannot generate constant 1 for fract. */
7088 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7089 return NULL_TREE;
7090 arg10 = arg1;
7091 arg11 = build_one_cst (type);
7093 same = NULL_TREE;
7095 if (operand_equal_p (arg01, arg11, 0))
7096 same = arg01, alt0 = arg00, alt1 = arg10;
7097 else if (operand_equal_p (arg00, arg10, 0))
7098 same = arg00, alt0 = arg01, alt1 = arg11;
7099 else if (operand_equal_p (arg00, arg11, 0))
7100 same = arg00, alt0 = arg01, alt1 = arg10;
7101 else if (operand_equal_p (arg01, arg10, 0))
7102 same = arg01, alt0 = arg00, alt1 = arg11;
7104 /* No identical multiplicands; see if we can find a common
7105 power-of-two factor in non-power-of-two multiplies. This
7106 can help in multi-dimensional array access. */
7107 else if (host_integerp (arg01, 0)
7108 && host_integerp (arg11, 0))
7110 HOST_WIDE_INT int01, int11, tmp;
7111 bool swap = false;
7112 tree maybe_same;
7113 int01 = TREE_INT_CST_LOW (arg01);
7114 int11 = TREE_INT_CST_LOW (arg11);
7116 /* Move min of absolute values to int11. */
7117 if (absu_hwi (int01) < absu_hwi (int11))
7119 tmp = int01, int01 = int11, int11 = tmp;
7120 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7121 maybe_same = arg01;
7122 swap = true;
7124 else
7125 maybe_same = arg11;
7127 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7128 /* The remainder should not be a constant, otherwise we
7129 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7130 increased the number of multiplications necessary. */
7131 && TREE_CODE (arg10) != INTEGER_CST)
7133 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7134 build_int_cst (TREE_TYPE (arg00),
7135 int01 / int11));
7136 alt1 = arg10;
7137 same = maybe_same;
7138 if (swap)
7139 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7143 if (same)
7144 return fold_build2_loc (loc, MULT_EXPR, type,
7145 fold_build2_loc (loc, code, type,
7146 fold_convert_loc (loc, type, alt0),
7147 fold_convert_loc (loc, type, alt1)),
7148 fold_convert_loc (loc, type, same));
7150 return NULL_TREE;
7153 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7154 specified by EXPR into the buffer PTR of length LEN bytes.
7155 Return the number of bytes placed in the buffer, or zero
7156 upon failure. */
7158 static int
7159 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7161 tree type = TREE_TYPE (expr);
7162 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7163 int byte, offset, word, words;
7164 unsigned char value;
7166 if (total_bytes > len)
7167 return 0;
7168 words = total_bytes / UNITS_PER_WORD;
7170 for (byte = 0; byte < total_bytes; byte++)
7172 int bitpos = byte * BITS_PER_UNIT;
7173 if (bitpos < HOST_BITS_PER_WIDE_INT)
7174 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7175 else
7176 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7177 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7179 if (total_bytes > UNITS_PER_WORD)
7181 word = byte / UNITS_PER_WORD;
7182 if (WORDS_BIG_ENDIAN)
7183 word = (words - 1) - word;
7184 offset = word * UNITS_PER_WORD;
7185 if (BYTES_BIG_ENDIAN)
7186 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7187 else
7188 offset += byte % UNITS_PER_WORD;
7190 else
7191 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7192 ptr[offset] = value;
7194 return total_bytes;
7198 /* Subroutine of native_encode_expr. Encode the REAL_CST
7199 specified by EXPR into the buffer PTR of length LEN bytes.
7200 Return the number of bytes placed in the buffer, or zero
7201 upon failure. */
7203 static int
7204 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7206 tree type = TREE_TYPE (expr);
7207 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7208 int byte, offset, word, words, bitpos;
7209 unsigned char value;
7211 /* There are always 32 bits in each long, no matter the size of
7212 the hosts long. We handle floating point representations with
7213 up to 192 bits. */
7214 long tmp[6];
7216 if (total_bytes > len)
7217 return 0;
7218 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7220 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7222 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7223 bitpos += BITS_PER_UNIT)
7225 byte = (bitpos / BITS_PER_UNIT) & 3;
7226 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7228 if (UNITS_PER_WORD < 4)
7230 word = byte / UNITS_PER_WORD;
7231 if (WORDS_BIG_ENDIAN)
7232 word = (words - 1) - word;
7233 offset = word * UNITS_PER_WORD;
7234 if (BYTES_BIG_ENDIAN)
7235 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7236 else
7237 offset += byte % UNITS_PER_WORD;
7239 else
7240 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7241 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7243 return total_bytes;
7246 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7247 specified by EXPR into the buffer PTR of length LEN bytes.
7248 Return the number of bytes placed in the buffer, or zero
7249 upon failure. */
7251 static int
7252 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7254 int rsize, isize;
7255 tree part;
7257 part = TREE_REALPART (expr);
7258 rsize = native_encode_expr (part, ptr, len);
7259 if (rsize == 0)
7260 return 0;
7261 part = TREE_IMAGPART (expr);
7262 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7263 if (isize != rsize)
7264 return 0;
7265 return rsize + isize;
7269 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7270 specified by EXPR into the buffer PTR of length LEN bytes.
7271 Return the number of bytes placed in the buffer, or zero
7272 upon failure. */
7274 static int
7275 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7277 unsigned i, count;
7278 int size, offset;
7279 tree itype, elem;
7281 offset = 0;
7282 count = VECTOR_CST_NELTS (expr);
7283 itype = TREE_TYPE (TREE_TYPE (expr));
7284 size = GET_MODE_SIZE (TYPE_MODE (itype));
7285 for (i = 0; i < count; i++)
7287 elem = VECTOR_CST_ELT (expr, i);
7288 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7289 return 0;
7290 offset += size;
7292 return offset;
7296 /* Subroutine of native_encode_expr. Encode the STRING_CST
7297 specified by EXPR into the buffer PTR of length LEN bytes.
7298 Return the number of bytes placed in the buffer, or zero
7299 upon failure. */
7301 static int
7302 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7304 tree type = TREE_TYPE (expr);
7305 HOST_WIDE_INT total_bytes;
7307 if (TREE_CODE (type) != ARRAY_TYPE
7308 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7309 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7310 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7311 return 0;
7312 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7313 if (total_bytes > len)
7314 return 0;
7315 if (TREE_STRING_LENGTH (expr) < total_bytes)
7317 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7318 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7319 total_bytes - TREE_STRING_LENGTH (expr));
7321 else
7322 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7323 return total_bytes;
7327 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7328 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7329 buffer PTR of length LEN bytes. Return the number of bytes
7330 placed in the buffer, or zero upon failure. */
7333 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7335 switch (TREE_CODE (expr))
7337 case INTEGER_CST:
7338 return native_encode_int (expr, ptr, len);
7340 case REAL_CST:
7341 return native_encode_real (expr, ptr, len);
7343 case COMPLEX_CST:
7344 return native_encode_complex (expr, ptr, len);
7346 case VECTOR_CST:
7347 return native_encode_vector (expr, ptr, len);
7349 case STRING_CST:
7350 return native_encode_string (expr, ptr, len);
7352 default:
7353 return 0;
7358 /* Subroutine of native_interpret_expr. Interpret the contents of
7359 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7360 If the buffer cannot be interpreted, return NULL_TREE. */
7362 static tree
7363 native_interpret_int (tree type, const unsigned char *ptr, int len)
7365 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7366 int byte, offset, word, words;
7367 unsigned char value;
7368 double_int result;
7370 if (total_bytes > len)
7371 return NULL_TREE;
7372 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7373 return NULL_TREE;
7375 result = double_int_zero;
7376 words = total_bytes / UNITS_PER_WORD;
7378 for (byte = 0; byte < total_bytes; byte++)
7380 int bitpos = byte * BITS_PER_UNIT;
7381 if (total_bytes > UNITS_PER_WORD)
7383 word = byte / UNITS_PER_WORD;
7384 if (WORDS_BIG_ENDIAN)
7385 word = (words - 1) - word;
7386 offset = word * UNITS_PER_WORD;
7387 if (BYTES_BIG_ENDIAN)
7388 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7389 else
7390 offset += byte % UNITS_PER_WORD;
7392 else
7393 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7394 value = ptr[offset];
7396 if (bitpos < HOST_BITS_PER_WIDE_INT)
7397 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7398 else
7399 result.high |= (unsigned HOST_WIDE_INT) value
7400 << (bitpos - HOST_BITS_PER_WIDE_INT);
7403 return double_int_to_tree (type, result);
7407 /* Subroutine of native_interpret_expr. Interpret the contents of
7408 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7409 If the buffer cannot be interpreted, return NULL_TREE. */
7411 static tree
7412 native_interpret_real (tree type, const unsigned char *ptr, int len)
7414 enum machine_mode mode = TYPE_MODE (type);
7415 int total_bytes = GET_MODE_SIZE (mode);
7416 int byte, offset, word, words, bitpos;
7417 unsigned char value;
7418 /* There are always 32 bits in each long, no matter the size of
7419 the hosts long. We handle floating point representations with
7420 up to 192 bits. */
7421 REAL_VALUE_TYPE r;
7422 long tmp[6];
7424 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7425 if (total_bytes > len || total_bytes > 24)
7426 return NULL_TREE;
7427 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7429 memset (tmp, 0, sizeof (tmp));
7430 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7431 bitpos += BITS_PER_UNIT)
7433 byte = (bitpos / BITS_PER_UNIT) & 3;
7434 if (UNITS_PER_WORD < 4)
7436 word = byte / UNITS_PER_WORD;
7437 if (WORDS_BIG_ENDIAN)
7438 word = (words - 1) - word;
7439 offset = word * UNITS_PER_WORD;
7440 if (BYTES_BIG_ENDIAN)
7441 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7442 else
7443 offset += byte % UNITS_PER_WORD;
7445 else
7446 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7447 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7449 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7452 real_from_target (&r, tmp, mode);
7453 return build_real (type, r);
7457 /* Subroutine of native_interpret_expr. Interpret the contents of
7458 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7459 If the buffer cannot be interpreted, return NULL_TREE. */
7461 static tree
7462 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7464 tree etype, rpart, ipart;
7465 int size;
7467 etype = TREE_TYPE (type);
7468 size = GET_MODE_SIZE (TYPE_MODE (etype));
7469 if (size * 2 > len)
7470 return NULL_TREE;
7471 rpart = native_interpret_expr (etype, ptr, size);
7472 if (!rpart)
7473 return NULL_TREE;
7474 ipart = native_interpret_expr (etype, ptr+size, size);
7475 if (!ipart)
7476 return NULL_TREE;
7477 return build_complex (type, rpart, ipart);
7481 /* Subroutine of native_interpret_expr. Interpret the contents of
7482 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7483 If the buffer cannot be interpreted, return NULL_TREE. */
7485 static tree
7486 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7488 tree etype, elem;
7489 int i, size, count;
7490 tree *elements;
7492 etype = TREE_TYPE (type);
7493 size = GET_MODE_SIZE (TYPE_MODE (etype));
7494 count = TYPE_VECTOR_SUBPARTS (type);
7495 if (size * count > len)
7496 return NULL_TREE;
7498 elements = XALLOCAVEC (tree, count);
7499 for (i = count - 1; i >= 0; i--)
7501 elem = native_interpret_expr (etype, ptr+(i*size), size);
7502 if (!elem)
7503 return NULL_TREE;
7504 elements[i] = elem;
7506 return build_vector (type, elements);
7510 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7511 the buffer PTR of length LEN as a constant of type TYPE. For
7512 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7513 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7514 return NULL_TREE. */
7516 tree
7517 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7519 switch (TREE_CODE (type))
7521 case INTEGER_TYPE:
7522 case ENUMERAL_TYPE:
7523 case BOOLEAN_TYPE:
7524 case POINTER_TYPE:
7525 case REFERENCE_TYPE:
7526 return native_interpret_int (type, ptr, len);
7528 case REAL_TYPE:
7529 return native_interpret_real (type, ptr, len);
7531 case COMPLEX_TYPE:
7532 return native_interpret_complex (type, ptr, len);
7534 case VECTOR_TYPE:
7535 return native_interpret_vector (type, ptr, len);
7537 default:
7538 return NULL_TREE;
7542 /* Returns true if we can interpret the contents of a native encoding
7543 as TYPE. */
7545 static bool
7546 can_native_interpret_type_p (tree type)
7548 switch (TREE_CODE (type))
7550 case INTEGER_TYPE:
7551 case ENUMERAL_TYPE:
7552 case BOOLEAN_TYPE:
7553 case POINTER_TYPE:
7554 case REFERENCE_TYPE:
7555 case REAL_TYPE:
7556 case COMPLEX_TYPE:
7557 case VECTOR_TYPE:
7558 return true;
7559 default:
7560 return false;
7564 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7565 TYPE at compile-time. If we're unable to perform the conversion
7566 return NULL_TREE. */
7568 static tree
7569 fold_view_convert_expr (tree type, tree expr)
7571 /* We support up to 512-bit values (for V8DFmode). */
7572 unsigned char buffer[64];
7573 int len;
7575 /* Check that the host and target are sane. */
7576 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7577 return NULL_TREE;
7579 len = native_encode_expr (expr, buffer, sizeof (buffer));
7580 if (len == 0)
7581 return NULL_TREE;
7583 return native_interpret_expr (type, buffer, len);
7586 /* Build an expression for the address of T. Folds away INDIRECT_REF
7587 to avoid confusing the gimplify process. */
7589 tree
7590 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7592 /* The size of the object is not relevant when talking about its address. */
7593 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7594 t = TREE_OPERAND (t, 0);
7596 if (TREE_CODE (t) == INDIRECT_REF)
7598 t = TREE_OPERAND (t, 0);
7600 if (TREE_TYPE (t) != ptrtype)
7601 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7603 else if (TREE_CODE (t) == MEM_REF
7604 && integer_zerop (TREE_OPERAND (t, 1)))
7605 return TREE_OPERAND (t, 0);
7606 else if (TREE_CODE (t) == MEM_REF
7607 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7608 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7609 TREE_OPERAND (t, 0),
7610 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7611 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7613 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7615 if (TREE_TYPE (t) != ptrtype)
7616 t = fold_convert_loc (loc, ptrtype, t);
7618 else
7619 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7621 return t;
7624 /* Build an expression for the address of T. */
7626 tree
7627 build_fold_addr_expr_loc (location_t loc, tree t)
7629 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7631 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7634 static bool vec_cst_ctor_to_array (tree, tree *);
7636 /* Fold a unary expression of code CODE and type TYPE with operand
7637 OP0. Return the folded expression if folding is successful.
7638 Otherwise, return NULL_TREE. */
7640 tree
7641 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7643 tree tem;
7644 tree arg0;
7645 enum tree_code_class kind = TREE_CODE_CLASS (code);
7647 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7648 && TREE_CODE_LENGTH (code) == 1);
7650 arg0 = op0;
7651 if (arg0)
7653 if (CONVERT_EXPR_CODE_P (code)
7654 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7656 /* Don't use STRIP_NOPS, because signedness of argument type
7657 matters. */
7658 STRIP_SIGN_NOPS (arg0);
7660 else
7662 /* Strip any conversions that don't change the mode. This
7663 is safe for every expression, except for a comparison
7664 expression because its signedness is derived from its
7665 operands.
7667 Note that this is done as an internal manipulation within
7668 the constant folder, in order to find the simplest
7669 representation of the arguments so that their form can be
7670 studied. In any cases, the appropriate type conversions
7671 should be put back in the tree that will get out of the
7672 constant folder. */
7673 STRIP_NOPS (arg0);
7677 if (TREE_CODE_CLASS (code) == tcc_unary)
7679 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7680 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7681 fold_build1_loc (loc, code, type,
7682 fold_convert_loc (loc, TREE_TYPE (op0),
7683 TREE_OPERAND (arg0, 1))));
7684 else if (TREE_CODE (arg0) == COND_EXPR)
7686 tree arg01 = TREE_OPERAND (arg0, 1);
7687 tree arg02 = TREE_OPERAND (arg0, 2);
7688 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7689 arg01 = fold_build1_loc (loc, code, type,
7690 fold_convert_loc (loc,
7691 TREE_TYPE (op0), arg01));
7692 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7693 arg02 = fold_build1_loc (loc, code, type,
7694 fold_convert_loc (loc,
7695 TREE_TYPE (op0), arg02));
7696 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7697 arg01, arg02);
7699 /* If this was a conversion, and all we did was to move into
7700 inside the COND_EXPR, bring it back out. But leave it if
7701 it is a conversion from integer to integer and the
7702 result precision is no wider than a word since such a
7703 conversion is cheap and may be optimized away by combine,
7704 while it couldn't if it were outside the COND_EXPR. Then return
7705 so we don't get into an infinite recursion loop taking the
7706 conversion out and then back in. */
7708 if ((CONVERT_EXPR_CODE_P (code)
7709 || code == NON_LVALUE_EXPR)
7710 && TREE_CODE (tem) == COND_EXPR
7711 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7712 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7714 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7715 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7716 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7717 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7718 && (INTEGRAL_TYPE_P
7719 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7720 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7721 || flag_syntax_only))
7722 tem = build1_loc (loc, code, type,
7723 build3 (COND_EXPR,
7724 TREE_TYPE (TREE_OPERAND
7725 (TREE_OPERAND (tem, 1), 0)),
7726 TREE_OPERAND (tem, 0),
7727 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7728 TREE_OPERAND (TREE_OPERAND (tem, 2),
7729 0)));
7730 return tem;
7734 switch (code)
7736 case PAREN_EXPR:
7737 /* Re-association barriers around constants and other re-association
7738 barriers can be removed. */
7739 if (CONSTANT_CLASS_P (op0)
7740 || TREE_CODE (op0) == PAREN_EXPR)
7741 return fold_convert_loc (loc, type, op0);
7742 return NULL_TREE;
7744 CASE_CONVERT:
7745 case FLOAT_EXPR:
7746 case FIX_TRUNC_EXPR:
7747 if (TREE_TYPE (op0) == type)
7748 return op0;
7750 if (COMPARISON_CLASS_P (op0))
7752 /* If we have (type) (a CMP b) and type is an integral type, return
7753 new expression involving the new type. Canonicalize
7754 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7755 non-integral type.
7756 Do not fold the result as that would not simplify further, also
7757 folding again results in recursions. */
7758 if (TREE_CODE (type) == BOOLEAN_TYPE)
7759 return build2_loc (loc, TREE_CODE (op0), type,
7760 TREE_OPERAND (op0, 0),
7761 TREE_OPERAND (op0, 1));
7762 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7763 && TREE_CODE (type) != VECTOR_TYPE)
7764 return build3_loc (loc, COND_EXPR, type, op0,
7765 constant_boolean_node (true, type),
7766 constant_boolean_node (false, type));
7769 /* Handle cases of two conversions in a row. */
7770 if (CONVERT_EXPR_P (op0))
7772 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7773 tree inter_type = TREE_TYPE (op0);
7774 int inside_int = INTEGRAL_TYPE_P (inside_type);
7775 int inside_ptr = POINTER_TYPE_P (inside_type);
7776 int inside_float = FLOAT_TYPE_P (inside_type);
7777 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7778 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7779 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7780 int inter_int = INTEGRAL_TYPE_P (inter_type);
7781 int inter_ptr = POINTER_TYPE_P (inter_type);
7782 int inter_float = FLOAT_TYPE_P (inter_type);
7783 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7784 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7785 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7786 int final_int = INTEGRAL_TYPE_P (type);
7787 int final_ptr = POINTER_TYPE_P (type);
7788 int final_float = FLOAT_TYPE_P (type);
7789 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7790 unsigned int final_prec = TYPE_PRECISION (type);
7791 int final_unsignedp = TYPE_UNSIGNED (type);
7793 /* In addition to the cases of two conversions in a row
7794 handled below, if we are converting something to its own
7795 type via an object of identical or wider precision, neither
7796 conversion is needed. */
7797 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7798 && (((inter_int || inter_ptr) && final_int)
7799 || (inter_float && final_float))
7800 && inter_prec >= final_prec)
7801 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7803 /* Likewise, if the intermediate and initial types are either both
7804 float or both integer, we don't need the middle conversion if the
7805 former is wider than the latter and doesn't change the signedness
7806 (for integers). Avoid this if the final type is a pointer since
7807 then we sometimes need the middle conversion. Likewise if the
7808 final type has a precision not equal to the size of its mode. */
7809 if (((inter_int && inside_int)
7810 || (inter_float && inside_float)
7811 || (inter_vec && inside_vec))
7812 && inter_prec >= inside_prec
7813 && (inter_float || inter_vec
7814 || inter_unsignedp == inside_unsignedp)
7815 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7816 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7817 && ! final_ptr
7818 && (! final_vec || inter_prec == inside_prec))
7819 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7821 /* If we have a sign-extension of a zero-extended value, we can
7822 replace that by a single zero-extension. Likewise if the
7823 final conversion does not change precision we can drop the
7824 intermediate conversion. */
7825 if (inside_int && inter_int && final_int
7826 && ((inside_prec < inter_prec && inter_prec < final_prec
7827 && inside_unsignedp && !inter_unsignedp)
7828 || final_prec == inter_prec))
7829 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7831 /* Two conversions in a row are not needed unless:
7832 - some conversion is floating-point (overstrict for now), or
7833 - some conversion is a vector (overstrict for now), or
7834 - the intermediate type is narrower than both initial and
7835 final, or
7836 - the intermediate type and innermost type differ in signedness,
7837 and the outermost type is wider than the intermediate, or
7838 - the initial type is a pointer type and the precisions of the
7839 intermediate and final types differ, or
7840 - the final type is a pointer type and the precisions of the
7841 initial and intermediate types differ. */
7842 if (! inside_float && ! inter_float && ! final_float
7843 && ! inside_vec && ! inter_vec && ! final_vec
7844 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7845 && ! (inside_int && inter_int
7846 && inter_unsignedp != inside_unsignedp
7847 && inter_prec < final_prec)
7848 && ((inter_unsignedp && inter_prec > inside_prec)
7849 == (final_unsignedp && final_prec > inter_prec))
7850 && ! (inside_ptr && inter_prec != final_prec)
7851 && ! (final_ptr && inside_prec != inter_prec)
7852 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7853 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7854 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7857 /* Handle (T *)&A.B.C for A being of type T and B and C
7858 living at offset zero. This occurs frequently in
7859 C++ upcasting and then accessing the base. */
7860 if (TREE_CODE (op0) == ADDR_EXPR
7861 && POINTER_TYPE_P (type)
7862 && handled_component_p (TREE_OPERAND (op0, 0)))
7864 HOST_WIDE_INT bitsize, bitpos;
7865 tree offset;
7866 enum machine_mode mode;
7867 int unsignedp, volatilep;
7868 tree base = TREE_OPERAND (op0, 0);
7869 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7870 &mode, &unsignedp, &volatilep, false);
7871 /* If the reference was to a (constant) zero offset, we can use
7872 the address of the base if it has the same base type
7873 as the result type and the pointer type is unqualified. */
7874 if (! offset && bitpos == 0
7875 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7876 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7877 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7878 return fold_convert_loc (loc, type,
7879 build_fold_addr_expr_loc (loc, base));
7882 if (TREE_CODE (op0) == MODIFY_EXPR
7883 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7884 /* Detect assigning a bitfield. */
7885 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7886 && DECL_BIT_FIELD
7887 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7889 /* Don't leave an assignment inside a conversion
7890 unless assigning a bitfield. */
7891 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7892 /* First do the assignment, then return converted constant. */
7893 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7894 TREE_NO_WARNING (tem) = 1;
7895 TREE_USED (tem) = 1;
7896 return tem;
7899 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7900 constants (if x has signed type, the sign bit cannot be set
7901 in c). This folds extension into the BIT_AND_EXPR.
7902 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7903 very likely don't have maximal range for their precision and this
7904 transformation effectively doesn't preserve non-maximal ranges. */
7905 if (TREE_CODE (type) == INTEGER_TYPE
7906 && TREE_CODE (op0) == BIT_AND_EXPR
7907 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7909 tree and_expr = op0;
7910 tree and0 = TREE_OPERAND (and_expr, 0);
7911 tree and1 = TREE_OPERAND (and_expr, 1);
7912 int change = 0;
7914 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7915 || (TYPE_PRECISION (type)
7916 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7917 change = 1;
7918 else if (TYPE_PRECISION (TREE_TYPE (and1))
7919 <= HOST_BITS_PER_WIDE_INT
7920 && host_integerp (and1, 1))
7922 unsigned HOST_WIDE_INT cst;
7924 cst = tree_low_cst (and1, 1);
7925 cst &= (HOST_WIDE_INT) -1
7926 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7927 change = (cst == 0);
7928 #ifdef LOAD_EXTEND_OP
7929 if (change
7930 && !flag_syntax_only
7931 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7932 == ZERO_EXTEND))
7934 tree uns = unsigned_type_for (TREE_TYPE (and0));
7935 and0 = fold_convert_loc (loc, uns, and0);
7936 and1 = fold_convert_loc (loc, uns, and1);
7938 #endif
7940 if (change)
7942 tem = force_fit_type_double (type, tree_to_double_int (and1),
7943 0, TREE_OVERFLOW (and1));
7944 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7945 fold_convert_loc (loc, type, and0), tem);
7949 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7950 when one of the new casts will fold away. Conservatively we assume
7951 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7952 if (POINTER_TYPE_P (type)
7953 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7954 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7955 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7956 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7957 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7959 tree arg00 = TREE_OPERAND (arg0, 0);
7960 tree arg01 = TREE_OPERAND (arg0, 1);
7962 return fold_build_pointer_plus_loc
7963 (loc, fold_convert_loc (loc, type, arg00), arg01);
7966 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7967 of the same precision, and X is an integer type not narrower than
7968 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7969 if (INTEGRAL_TYPE_P (type)
7970 && TREE_CODE (op0) == BIT_NOT_EXPR
7971 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7972 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7973 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7975 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7976 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7977 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7978 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7979 fold_convert_loc (loc, type, tem));
7982 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7983 type of X and Y (integer types only). */
7984 if (INTEGRAL_TYPE_P (type)
7985 && TREE_CODE (op0) == MULT_EXPR
7986 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7987 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7989 /* Be careful not to introduce new overflows. */
7990 tree mult_type;
7991 if (TYPE_OVERFLOW_WRAPS (type))
7992 mult_type = type;
7993 else
7994 mult_type = unsigned_type_for (type);
7996 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7998 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7999 fold_convert_loc (loc, mult_type,
8000 TREE_OPERAND (op0, 0)),
8001 fold_convert_loc (loc, mult_type,
8002 TREE_OPERAND (op0, 1)));
8003 return fold_convert_loc (loc, type, tem);
8007 tem = fold_convert_const (code, type, op0);
8008 return tem ? tem : NULL_TREE;
8010 case ADDR_SPACE_CONVERT_EXPR:
8011 if (integer_zerop (arg0))
8012 return fold_convert_const (code, type, arg0);
8013 return NULL_TREE;
8015 case FIXED_CONVERT_EXPR:
8016 tem = fold_convert_const (code, type, arg0);
8017 return tem ? tem : NULL_TREE;
8019 case VIEW_CONVERT_EXPR:
8020 if (TREE_TYPE (op0) == type)
8021 return op0;
8022 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8023 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8024 type, TREE_OPERAND (op0, 0));
8025 if (TREE_CODE (op0) == MEM_REF)
8026 return fold_build2_loc (loc, MEM_REF, type,
8027 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8029 /* For integral conversions with the same precision or pointer
8030 conversions use a NOP_EXPR instead. */
8031 if ((INTEGRAL_TYPE_P (type)
8032 || POINTER_TYPE_P (type))
8033 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8034 || POINTER_TYPE_P (TREE_TYPE (op0)))
8035 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8036 return fold_convert_loc (loc, type, op0);
8038 /* Strip inner integral conversions that do not change the precision. */
8039 if (CONVERT_EXPR_P (op0)
8040 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 || POINTER_TYPE_P (TREE_TYPE (op0)))
8042 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8043 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8044 && (TYPE_PRECISION (TREE_TYPE (op0))
8045 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8046 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8047 type, TREE_OPERAND (op0, 0));
8049 return fold_view_convert_expr (type, op0);
8051 case NEGATE_EXPR:
8052 tem = fold_negate_expr (loc, arg0);
8053 if (tem)
8054 return fold_convert_loc (loc, type, tem);
8055 return NULL_TREE;
8057 case ABS_EXPR:
8058 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8059 return fold_abs_const (arg0, type);
8060 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8061 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8062 /* Convert fabs((double)float) into (double)fabsf(float). */
8063 else if (TREE_CODE (arg0) == NOP_EXPR
8064 && TREE_CODE (type) == REAL_TYPE)
8066 tree targ0 = strip_float_extensions (arg0);
8067 if (targ0 != arg0)
8068 return fold_convert_loc (loc, type,
8069 fold_build1_loc (loc, ABS_EXPR,
8070 TREE_TYPE (targ0),
8071 targ0));
8073 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8074 else if (TREE_CODE (arg0) == ABS_EXPR)
8075 return arg0;
8076 else if (tree_expr_nonnegative_p (arg0))
8077 return arg0;
8079 /* Strip sign ops from argument. */
8080 if (TREE_CODE (type) == REAL_TYPE)
8082 tem = fold_strip_sign_ops (arg0);
8083 if (tem)
8084 return fold_build1_loc (loc, ABS_EXPR, type,
8085 fold_convert_loc (loc, type, tem));
8087 return NULL_TREE;
8089 case CONJ_EXPR:
8090 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8091 return fold_convert_loc (loc, type, arg0);
8092 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8094 tree itype = TREE_TYPE (type);
8095 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8096 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8097 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8098 negate_expr (ipart));
8100 if (TREE_CODE (arg0) == COMPLEX_CST)
8102 tree itype = TREE_TYPE (type);
8103 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8104 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8105 return build_complex (type, rpart, negate_expr (ipart));
8107 if (TREE_CODE (arg0) == CONJ_EXPR)
8108 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8109 return NULL_TREE;
8111 case BIT_NOT_EXPR:
8112 if (TREE_CODE (arg0) == INTEGER_CST)
8113 return fold_not_const (arg0, type);
8114 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8115 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8116 /* Convert ~ (-A) to A - 1. */
8117 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8118 return fold_build2_loc (loc, MINUS_EXPR, type,
8119 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8120 build_int_cst (type, 1));
8121 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8122 else if (INTEGRAL_TYPE_P (type)
8123 && ((TREE_CODE (arg0) == MINUS_EXPR
8124 && integer_onep (TREE_OPERAND (arg0, 1)))
8125 || (TREE_CODE (arg0) == PLUS_EXPR
8126 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8127 return fold_build1_loc (loc, NEGATE_EXPR, type,
8128 fold_convert_loc (loc, type,
8129 TREE_OPERAND (arg0, 0)));
8130 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8131 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8132 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8133 fold_convert_loc (loc, type,
8134 TREE_OPERAND (arg0, 0)))))
8135 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8136 fold_convert_loc (loc, type,
8137 TREE_OPERAND (arg0, 1)));
8138 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8139 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8140 fold_convert_loc (loc, type,
8141 TREE_OPERAND (arg0, 1)))))
8142 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8143 fold_convert_loc (loc, type,
8144 TREE_OPERAND (arg0, 0)), tem);
8145 /* Perform BIT_NOT_EXPR on each element individually. */
8146 else if (TREE_CODE (arg0) == VECTOR_CST)
8148 tree *elements;
8149 tree elem;
8150 unsigned count = VECTOR_CST_NELTS (arg0), i;
8152 elements = XALLOCAVEC (tree, count);
8153 for (i = 0; i < count; i++)
8155 elem = VECTOR_CST_ELT (arg0, i);
8156 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8157 if (elem == NULL_TREE)
8158 break;
8159 elements[i] = elem;
8161 if (i == count)
8162 return build_vector (type, elements);
8165 return NULL_TREE;
8167 case TRUTH_NOT_EXPR:
8168 /* The argument to invert_truthvalue must have Boolean type. */
8169 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8170 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8172 /* Note that the operand of this must be an int
8173 and its values must be 0 or 1.
8174 ("true" is a fixed value perhaps depending on the language,
8175 but we don't handle values other than 1 correctly yet.) */
8176 tem = fold_truth_not_expr (loc, arg0);
8177 if (!tem)
8178 return NULL_TREE;
8179 return fold_convert_loc (loc, type, tem);
8181 case REALPART_EXPR:
8182 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8183 return fold_convert_loc (loc, type, arg0);
8184 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8185 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8186 TREE_OPERAND (arg0, 1));
8187 if (TREE_CODE (arg0) == COMPLEX_CST)
8188 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8189 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8191 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8192 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8193 fold_build1_loc (loc, REALPART_EXPR, itype,
8194 TREE_OPERAND (arg0, 0)),
8195 fold_build1_loc (loc, REALPART_EXPR, itype,
8196 TREE_OPERAND (arg0, 1)));
8197 return fold_convert_loc (loc, type, tem);
8199 if (TREE_CODE (arg0) == CONJ_EXPR)
8201 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8202 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8203 TREE_OPERAND (arg0, 0));
8204 return fold_convert_loc (loc, type, tem);
8206 if (TREE_CODE (arg0) == CALL_EXPR)
8208 tree fn = get_callee_fndecl (arg0);
8209 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8210 switch (DECL_FUNCTION_CODE (fn))
8212 CASE_FLT_FN (BUILT_IN_CEXPI):
8213 fn = mathfn_built_in (type, BUILT_IN_COS);
8214 if (fn)
8215 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8216 break;
8218 default:
8219 break;
8222 return NULL_TREE;
8224 case IMAGPART_EXPR:
8225 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8226 return build_zero_cst (type);
8227 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8228 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8229 TREE_OPERAND (arg0, 0));
8230 if (TREE_CODE (arg0) == COMPLEX_CST)
8231 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8232 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8234 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8235 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8236 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8237 TREE_OPERAND (arg0, 0)),
8238 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8239 TREE_OPERAND (arg0, 1)));
8240 return fold_convert_loc (loc, type, tem);
8242 if (TREE_CODE (arg0) == CONJ_EXPR)
8244 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8245 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8246 return fold_convert_loc (loc, type, negate_expr (tem));
8248 if (TREE_CODE (arg0) == CALL_EXPR)
8250 tree fn = get_callee_fndecl (arg0);
8251 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8252 switch (DECL_FUNCTION_CODE (fn))
8254 CASE_FLT_FN (BUILT_IN_CEXPI):
8255 fn = mathfn_built_in (type, BUILT_IN_SIN);
8256 if (fn)
8257 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8258 break;
8260 default:
8261 break;
8264 return NULL_TREE;
8266 case INDIRECT_REF:
8267 /* Fold *&X to X if X is an lvalue. */
8268 if (TREE_CODE (op0) == ADDR_EXPR)
8270 tree op00 = TREE_OPERAND (op0, 0);
8271 if ((TREE_CODE (op00) == VAR_DECL
8272 || TREE_CODE (op00) == PARM_DECL
8273 || TREE_CODE (op00) == RESULT_DECL)
8274 && !TREE_READONLY (op00))
8275 return op00;
8277 return NULL_TREE;
8279 case VEC_UNPACK_LO_EXPR:
8280 case VEC_UNPACK_HI_EXPR:
8281 case VEC_UNPACK_FLOAT_LO_EXPR:
8282 case VEC_UNPACK_FLOAT_HI_EXPR:
8284 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8285 tree *elts;
8286 enum tree_code subcode;
8288 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8289 if (TREE_CODE (arg0) != VECTOR_CST)
8290 return NULL_TREE;
8292 elts = XALLOCAVEC (tree, nelts * 2);
8293 if (!vec_cst_ctor_to_array (arg0, elts))
8294 return NULL_TREE;
8296 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8297 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8298 elts += nelts;
8300 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8301 subcode = NOP_EXPR;
8302 else
8303 subcode = FLOAT_EXPR;
8305 for (i = 0; i < nelts; i++)
8307 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8308 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8309 return NULL_TREE;
8312 return build_vector (type, elts);
8315 case REDUC_MIN_EXPR:
8316 case REDUC_MAX_EXPR:
8317 case REDUC_PLUS_EXPR:
8319 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8320 tree *elts;
8321 enum tree_code subcode;
8323 if (TREE_CODE (op0) != VECTOR_CST)
8324 return NULL_TREE;
8326 elts = XALLOCAVEC (tree, nelts);
8327 if (!vec_cst_ctor_to_array (op0, elts))
8328 return NULL_TREE;
8330 switch (code)
8332 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8333 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8334 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8335 default: gcc_unreachable ();
8338 for (i = 1; i < nelts; i++)
8340 elts[0] = const_binop (subcode, elts[0], elts[i]);
8341 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8342 return NULL_TREE;
8343 elts[i] = build_zero_cst (TREE_TYPE (type));
8346 return build_vector (type, elts);
8349 default:
8350 return NULL_TREE;
8351 } /* switch (code) */
8355 /* If the operation was a conversion do _not_ mark a resulting constant
8356 with TREE_OVERFLOW if the original constant was not. These conversions
8357 have implementation defined behavior and retaining the TREE_OVERFLOW
8358 flag here would confuse later passes such as VRP. */
8359 tree
8360 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8361 tree type, tree op0)
8363 tree res = fold_unary_loc (loc, code, type, op0);
8364 if (res
8365 && TREE_CODE (res) == INTEGER_CST
8366 && TREE_CODE (op0) == INTEGER_CST
8367 && CONVERT_EXPR_CODE_P (code))
8368 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8370 return res;
8373 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8374 operands OP0 and OP1. LOC is the location of the resulting expression.
8375 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8376 Return the folded expression if folding is successful. Otherwise,
8377 return NULL_TREE. */
8378 static tree
8379 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8380 tree arg0, tree arg1, tree op0, tree op1)
8382 tree tem;
8384 /* We only do these simplifications if we are optimizing. */
8385 if (!optimize)
8386 return NULL_TREE;
8388 /* Check for things like (A || B) && (A || C). We can convert this
8389 to A || (B && C). Note that either operator can be any of the four
8390 truth and/or operations and the transformation will still be
8391 valid. Also note that we only care about order for the
8392 ANDIF and ORIF operators. If B contains side effects, this
8393 might change the truth-value of A. */
8394 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8395 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8396 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8397 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8398 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8399 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8401 tree a00 = TREE_OPERAND (arg0, 0);
8402 tree a01 = TREE_OPERAND (arg0, 1);
8403 tree a10 = TREE_OPERAND (arg1, 0);
8404 tree a11 = TREE_OPERAND (arg1, 1);
8405 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8406 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8407 && (code == TRUTH_AND_EXPR
8408 || code == TRUTH_OR_EXPR));
8410 if (operand_equal_p (a00, a10, 0))
8411 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8412 fold_build2_loc (loc, code, type, a01, a11));
8413 else if (commutative && operand_equal_p (a00, a11, 0))
8414 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8415 fold_build2_loc (loc, code, type, a01, a10));
8416 else if (commutative && operand_equal_p (a01, a10, 0))
8417 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8418 fold_build2_loc (loc, code, type, a00, a11));
8420 /* This case if tricky because we must either have commutative
8421 operators or else A10 must not have side-effects. */
8423 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8424 && operand_equal_p (a01, a11, 0))
8425 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8426 fold_build2_loc (loc, code, type, a00, a10),
8427 a01);
8430 /* See if we can build a range comparison. */
8431 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8432 return tem;
8434 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8435 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8437 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8438 if (tem)
8439 return fold_build2_loc (loc, code, type, tem, arg1);
8442 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8443 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8445 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8446 if (tem)
8447 return fold_build2_loc (loc, code, type, arg0, tem);
8450 /* Check for the possibility of merging component references. If our
8451 lhs is another similar operation, try to merge its rhs with our
8452 rhs. Then try to merge our lhs and rhs. */
8453 if (TREE_CODE (arg0) == code
8454 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8455 TREE_OPERAND (arg0, 1), arg1)))
8456 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8458 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8459 return tem;
8461 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8462 && (code == TRUTH_AND_EXPR
8463 || code == TRUTH_ANDIF_EXPR
8464 || code == TRUTH_OR_EXPR
8465 || code == TRUTH_ORIF_EXPR))
8467 enum tree_code ncode, icode;
8469 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8470 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8471 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8473 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8474 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8475 We don't want to pack more than two leafs to a non-IF AND/OR
8476 expression.
8477 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8478 equal to IF-CODE, then we don't want to add right-hand operand.
8479 If the inner right-hand side of left-hand operand has
8480 side-effects, or isn't simple, then we can't add to it,
8481 as otherwise we might destroy if-sequence. */
8482 if (TREE_CODE (arg0) == icode
8483 && simple_operand_p_2 (arg1)
8484 /* Needed for sequence points to handle trappings, and
8485 side-effects. */
8486 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8488 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8489 arg1);
8490 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8491 tem);
8493 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8494 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8495 else if (TREE_CODE (arg1) == icode
8496 && simple_operand_p_2 (arg0)
8497 /* Needed for sequence points to handle trappings, and
8498 side-effects. */
8499 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8501 tem = fold_build2_loc (loc, ncode, type,
8502 arg0, TREE_OPERAND (arg1, 0));
8503 return fold_build2_loc (loc, icode, type, tem,
8504 TREE_OPERAND (arg1, 1));
8506 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8507 into (A OR B).
8508 For sequence point consistancy, we need to check for trapping,
8509 and side-effects. */
8510 else if (code == icode && simple_operand_p_2 (arg0)
8511 && simple_operand_p_2 (arg1))
8512 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8515 return NULL_TREE;
8518 /* Fold a binary expression of code CODE and type TYPE with operands
8519 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8520 Return the folded expression if folding is successful. Otherwise,
8521 return NULL_TREE. */
8523 static tree
8524 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8526 enum tree_code compl_code;
8528 if (code == MIN_EXPR)
8529 compl_code = MAX_EXPR;
8530 else if (code == MAX_EXPR)
8531 compl_code = MIN_EXPR;
8532 else
8533 gcc_unreachable ();
8535 /* MIN (MAX (a, b), b) == b. */
8536 if (TREE_CODE (op0) == compl_code
8537 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8538 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8540 /* MIN (MAX (b, a), b) == b. */
8541 if (TREE_CODE (op0) == compl_code
8542 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8543 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8544 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8546 /* MIN (a, MAX (a, b)) == a. */
8547 if (TREE_CODE (op1) == compl_code
8548 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8549 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8550 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8552 /* MIN (a, MAX (b, a)) == a. */
8553 if (TREE_CODE (op1) == compl_code
8554 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8555 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8556 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8558 return NULL_TREE;
8561 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8562 by changing CODE to reduce the magnitude of constants involved in
8563 ARG0 of the comparison.
8564 Returns a canonicalized comparison tree if a simplification was
8565 possible, otherwise returns NULL_TREE.
8566 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8567 valid if signed overflow is undefined. */
8569 static tree
8570 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8571 tree arg0, tree arg1,
8572 bool *strict_overflow_p)
8574 enum tree_code code0 = TREE_CODE (arg0);
8575 tree t, cst0 = NULL_TREE;
8576 int sgn0;
8577 bool swap = false;
8579 /* Match A +- CST code arg1 and CST code arg1. We can change the
8580 first form only if overflow is undefined. */
8581 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8582 /* In principle pointers also have undefined overflow behavior,
8583 but that causes problems elsewhere. */
8584 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8585 && (code0 == MINUS_EXPR
8586 || code0 == PLUS_EXPR)
8587 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8588 || code0 == INTEGER_CST))
8589 return NULL_TREE;
8591 /* Identify the constant in arg0 and its sign. */
8592 if (code0 == INTEGER_CST)
8593 cst0 = arg0;
8594 else
8595 cst0 = TREE_OPERAND (arg0, 1);
8596 sgn0 = tree_int_cst_sgn (cst0);
8598 /* Overflowed constants and zero will cause problems. */
8599 if (integer_zerop (cst0)
8600 || TREE_OVERFLOW (cst0))
8601 return NULL_TREE;
8603 /* See if we can reduce the magnitude of the constant in
8604 arg0 by changing the comparison code. */
8605 if (code0 == INTEGER_CST)
8607 /* CST <= arg1 -> CST-1 < arg1. */
8608 if (code == LE_EXPR && sgn0 == 1)
8609 code = LT_EXPR;
8610 /* -CST < arg1 -> -CST-1 <= arg1. */
8611 else if (code == LT_EXPR && sgn0 == -1)
8612 code = LE_EXPR;
8613 /* CST > arg1 -> CST-1 >= arg1. */
8614 else if (code == GT_EXPR && sgn0 == 1)
8615 code = GE_EXPR;
8616 /* -CST >= arg1 -> -CST-1 > arg1. */
8617 else if (code == GE_EXPR && sgn0 == -1)
8618 code = GT_EXPR;
8619 else
8620 return NULL_TREE;
8621 /* arg1 code' CST' might be more canonical. */
8622 swap = true;
8624 else
8626 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8627 if (code == LT_EXPR
8628 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8629 code = LE_EXPR;
8630 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8631 else if (code == GT_EXPR
8632 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8633 code = GE_EXPR;
8634 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8635 else if (code == LE_EXPR
8636 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8637 code = LT_EXPR;
8638 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8639 else if (code == GE_EXPR
8640 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8641 code = GT_EXPR;
8642 else
8643 return NULL_TREE;
8644 *strict_overflow_p = true;
8647 /* Now build the constant reduced in magnitude. But not if that
8648 would produce one outside of its types range. */
8649 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8650 && ((sgn0 == 1
8651 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8652 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8653 || (sgn0 == -1
8654 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8655 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8656 /* We cannot swap the comparison here as that would cause us to
8657 endlessly recurse. */
8658 return NULL_TREE;
8660 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8661 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8662 if (code0 != INTEGER_CST)
8663 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8664 t = fold_convert (TREE_TYPE (arg1), t);
8666 /* If swapping might yield to a more canonical form, do so. */
8667 if (swap)
8668 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8669 else
8670 return fold_build2_loc (loc, code, type, t, arg1);
8673 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8674 overflow further. Try to decrease the magnitude of constants involved
8675 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8676 and put sole constants at the second argument position.
8677 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8679 static tree
8680 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8681 tree arg0, tree arg1)
8683 tree t;
8684 bool strict_overflow_p;
8685 const char * const warnmsg = G_("assuming signed overflow does not occur "
8686 "when reducing constant in comparison");
8688 /* Try canonicalization by simplifying arg0. */
8689 strict_overflow_p = false;
8690 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8691 &strict_overflow_p);
8692 if (t)
8694 if (strict_overflow_p)
8695 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8696 return t;
8699 /* Try canonicalization by simplifying arg1 using the swapped
8700 comparison. */
8701 code = swap_tree_comparison (code);
8702 strict_overflow_p = false;
8703 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8704 &strict_overflow_p);
8705 if (t && strict_overflow_p)
8706 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8707 return t;
8710 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8711 space. This is used to avoid issuing overflow warnings for
8712 expressions like &p->x which can not wrap. */
8714 static bool
8715 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8717 double_int di_offset, total;
8719 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8720 return true;
8722 if (bitpos < 0)
8723 return true;
8725 if (offset == NULL_TREE)
8726 di_offset = double_int_zero;
8727 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8728 return true;
8729 else
8730 di_offset = TREE_INT_CST (offset);
8732 bool overflow;
8733 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8734 total = di_offset.add_with_sign (units, true, &overflow);
8735 if (overflow)
8736 return true;
8738 if (total.high != 0)
8739 return true;
8741 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8742 if (size <= 0)
8743 return true;
8745 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8746 array. */
8747 if (TREE_CODE (base) == ADDR_EXPR)
8749 HOST_WIDE_INT base_size;
8751 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8752 if (base_size > 0 && size < base_size)
8753 size = base_size;
8756 return total.low > (unsigned HOST_WIDE_INT) size;
8759 /* Subroutine of fold_binary. This routine performs all of the
8760 transformations that are common to the equality/inequality
8761 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8762 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8763 fold_binary should call fold_binary. Fold a comparison with
8764 tree code CODE and type TYPE with operands OP0 and OP1. Return
8765 the folded comparison or NULL_TREE. */
8767 static tree
8768 fold_comparison (location_t loc, enum tree_code code, tree type,
8769 tree op0, tree op1)
8771 tree arg0, arg1, tem;
8773 arg0 = op0;
8774 arg1 = op1;
8776 STRIP_SIGN_NOPS (arg0);
8777 STRIP_SIGN_NOPS (arg1);
8779 tem = fold_relational_const (code, type, arg0, arg1);
8780 if (tem != NULL_TREE)
8781 return tem;
8783 /* If one arg is a real or integer constant, put it last. */
8784 if (tree_swap_operands_p (arg0, arg1, true))
8785 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8787 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8788 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8789 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8790 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8791 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8792 && (TREE_CODE (arg1) == INTEGER_CST
8793 && !TREE_OVERFLOW (arg1)))
8795 tree const1 = TREE_OPERAND (arg0, 1);
8796 tree const2 = arg1;
8797 tree variable = TREE_OPERAND (arg0, 0);
8798 tree lhs;
8799 int lhs_add;
8800 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8802 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8803 TREE_TYPE (arg1), const2, const1);
8805 /* If the constant operation overflowed this can be
8806 simplified as a comparison against INT_MAX/INT_MIN. */
8807 if (TREE_CODE (lhs) == INTEGER_CST
8808 && TREE_OVERFLOW (lhs))
8810 int const1_sgn = tree_int_cst_sgn (const1);
8811 enum tree_code code2 = code;
8813 /* Get the sign of the constant on the lhs if the
8814 operation were VARIABLE + CONST1. */
8815 if (TREE_CODE (arg0) == MINUS_EXPR)
8816 const1_sgn = -const1_sgn;
8818 /* The sign of the constant determines if we overflowed
8819 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8820 Canonicalize to the INT_MIN overflow by swapping the comparison
8821 if necessary. */
8822 if (const1_sgn == -1)
8823 code2 = swap_tree_comparison (code);
8825 /* We now can look at the canonicalized case
8826 VARIABLE + 1 CODE2 INT_MIN
8827 and decide on the result. */
8828 if (code2 == LT_EXPR
8829 || code2 == LE_EXPR
8830 || code2 == EQ_EXPR)
8831 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8832 else if (code2 == NE_EXPR
8833 || code2 == GE_EXPR
8834 || code2 == GT_EXPR)
8835 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8838 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8839 && (TREE_CODE (lhs) != INTEGER_CST
8840 || !TREE_OVERFLOW (lhs)))
8842 if (code != EQ_EXPR && code != NE_EXPR)
8843 fold_overflow_warning ("assuming signed overflow does not occur "
8844 "when changing X +- C1 cmp C2 to "
8845 "X cmp C1 +- C2",
8846 WARN_STRICT_OVERFLOW_COMPARISON);
8847 return fold_build2_loc (loc, code, type, variable, lhs);
8851 /* For comparisons of pointers we can decompose it to a compile time
8852 comparison of the base objects and the offsets into the object.
8853 This requires at least one operand being an ADDR_EXPR or a
8854 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8855 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8856 && (TREE_CODE (arg0) == ADDR_EXPR
8857 || TREE_CODE (arg1) == ADDR_EXPR
8858 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8859 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8861 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8862 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8863 enum machine_mode mode;
8864 int volatilep, unsignedp;
8865 bool indirect_base0 = false, indirect_base1 = false;
8867 /* Get base and offset for the access. Strip ADDR_EXPR for
8868 get_inner_reference, but put it back by stripping INDIRECT_REF
8869 off the base object if possible. indirect_baseN will be true
8870 if baseN is not an address but refers to the object itself. */
8871 base0 = arg0;
8872 if (TREE_CODE (arg0) == ADDR_EXPR)
8874 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8875 &bitsize, &bitpos0, &offset0, &mode,
8876 &unsignedp, &volatilep, false);
8877 if (TREE_CODE (base0) == INDIRECT_REF)
8878 base0 = TREE_OPERAND (base0, 0);
8879 else
8880 indirect_base0 = true;
8882 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8884 base0 = TREE_OPERAND (arg0, 0);
8885 STRIP_SIGN_NOPS (base0);
8886 if (TREE_CODE (base0) == ADDR_EXPR)
8888 base0 = TREE_OPERAND (base0, 0);
8889 indirect_base0 = true;
8891 offset0 = TREE_OPERAND (arg0, 1);
8892 if (host_integerp (offset0, 0))
8894 HOST_WIDE_INT off = size_low_cst (offset0);
8895 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8896 * BITS_PER_UNIT)
8897 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8899 bitpos0 = off * BITS_PER_UNIT;
8900 offset0 = NULL_TREE;
8905 base1 = arg1;
8906 if (TREE_CODE (arg1) == ADDR_EXPR)
8908 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8909 &bitsize, &bitpos1, &offset1, &mode,
8910 &unsignedp, &volatilep, false);
8911 if (TREE_CODE (base1) == INDIRECT_REF)
8912 base1 = TREE_OPERAND (base1, 0);
8913 else
8914 indirect_base1 = true;
8916 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8918 base1 = TREE_OPERAND (arg1, 0);
8919 STRIP_SIGN_NOPS (base1);
8920 if (TREE_CODE (base1) == ADDR_EXPR)
8922 base1 = TREE_OPERAND (base1, 0);
8923 indirect_base1 = true;
8925 offset1 = TREE_OPERAND (arg1, 1);
8926 if (host_integerp (offset1, 0))
8928 HOST_WIDE_INT off = size_low_cst (offset1);
8929 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8930 * BITS_PER_UNIT)
8931 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8933 bitpos1 = off * BITS_PER_UNIT;
8934 offset1 = NULL_TREE;
8939 /* A local variable can never be pointed to by
8940 the default SSA name of an incoming parameter. */
8941 if ((TREE_CODE (arg0) == ADDR_EXPR
8942 && indirect_base0
8943 && TREE_CODE (base0) == VAR_DECL
8944 && auto_var_in_fn_p (base0, current_function_decl)
8945 && !indirect_base1
8946 && TREE_CODE (base1) == SSA_NAME
8947 && SSA_NAME_IS_DEFAULT_DEF (base1)
8948 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8949 || (TREE_CODE (arg1) == ADDR_EXPR
8950 && indirect_base1
8951 && TREE_CODE (base1) == VAR_DECL
8952 && auto_var_in_fn_p (base1, current_function_decl)
8953 && !indirect_base0
8954 && TREE_CODE (base0) == SSA_NAME
8955 && SSA_NAME_IS_DEFAULT_DEF (base0)
8956 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8958 if (code == NE_EXPR)
8959 return constant_boolean_node (1, type);
8960 else if (code == EQ_EXPR)
8961 return constant_boolean_node (0, type);
8963 /* If we have equivalent bases we might be able to simplify. */
8964 else if (indirect_base0 == indirect_base1
8965 && operand_equal_p (base0, base1, 0))
8967 /* We can fold this expression to a constant if the non-constant
8968 offset parts are equal. */
8969 if ((offset0 == offset1
8970 || (offset0 && offset1
8971 && operand_equal_p (offset0, offset1, 0)))
8972 && (code == EQ_EXPR
8973 || code == NE_EXPR
8974 || (indirect_base0 && DECL_P (base0))
8975 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8978 if (code != EQ_EXPR
8979 && code != NE_EXPR
8980 && bitpos0 != bitpos1
8981 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8982 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8983 fold_overflow_warning (("assuming pointer wraparound does not "
8984 "occur when comparing P +- C1 with "
8985 "P +- C2"),
8986 WARN_STRICT_OVERFLOW_CONDITIONAL);
8988 switch (code)
8990 case EQ_EXPR:
8991 return constant_boolean_node (bitpos0 == bitpos1, type);
8992 case NE_EXPR:
8993 return constant_boolean_node (bitpos0 != bitpos1, type);
8994 case LT_EXPR:
8995 return constant_boolean_node (bitpos0 < bitpos1, type);
8996 case LE_EXPR:
8997 return constant_boolean_node (bitpos0 <= bitpos1, type);
8998 case GE_EXPR:
8999 return constant_boolean_node (bitpos0 >= bitpos1, type);
9000 case GT_EXPR:
9001 return constant_boolean_node (bitpos0 > bitpos1, type);
9002 default:;
9005 /* We can simplify the comparison to a comparison of the variable
9006 offset parts if the constant offset parts are equal.
9007 Be careful to use signed sizetype here because otherwise we
9008 mess with array offsets in the wrong way. This is possible
9009 because pointer arithmetic is restricted to retain within an
9010 object and overflow on pointer differences is undefined as of
9011 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9012 else if (bitpos0 == bitpos1
9013 && ((code == EQ_EXPR || code == NE_EXPR)
9014 || (indirect_base0 && DECL_P (base0))
9015 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9017 /* By converting to signed sizetype we cover middle-end pointer
9018 arithmetic which operates on unsigned pointer types of size
9019 type size and ARRAY_REF offsets which are properly sign or
9020 zero extended from their type in case it is narrower than
9021 sizetype. */
9022 if (offset0 == NULL_TREE)
9023 offset0 = build_int_cst (ssizetype, 0);
9024 else
9025 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9026 if (offset1 == NULL_TREE)
9027 offset1 = build_int_cst (ssizetype, 0);
9028 else
9029 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9031 if (code != EQ_EXPR
9032 && code != NE_EXPR
9033 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9034 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9035 fold_overflow_warning (("assuming pointer wraparound does not "
9036 "occur when comparing P +- C1 with "
9037 "P +- C2"),
9038 WARN_STRICT_OVERFLOW_COMPARISON);
9040 return fold_build2_loc (loc, code, type, offset0, offset1);
9043 /* For non-equal bases we can simplify if they are addresses
9044 of local binding decls or constants. */
9045 else if (indirect_base0 && indirect_base1
9046 /* We know that !operand_equal_p (base0, base1, 0)
9047 because the if condition was false. But make
9048 sure two decls are not the same. */
9049 && base0 != base1
9050 && TREE_CODE (arg0) == ADDR_EXPR
9051 && TREE_CODE (arg1) == ADDR_EXPR
9052 && (((TREE_CODE (base0) == VAR_DECL
9053 || TREE_CODE (base0) == PARM_DECL)
9054 && (targetm.binds_local_p (base0)
9055 || CONSTANT_CLASS_P (base1)))
9056 || CONSTANT_CLASS_P (base0))
9057 && (((TREE_CODE (base1) == VAR_DECL
9058 || TREE_CODE (base1) == PARM_DECL)
9059 && (targetm.binds_local_p (base1)
9060 || CONSTANT_CLASS_P (base0)))
9061 || CONSTANT_CLASS_P (base1)))
9063 if (code == EQ_EXPR)
9064 return omit_two_operands_loc (loc, type, boolean_false_node,
9065 arg0, arg1);
9066 else if (code == NE_EXPR)
9067 return omit_two_operands_loc (loc, type, boolean_true_node,
9068 arg0, arg1);
9070 /* For equal offsets we can simplify to a comparison of the
9071 base addresses. */
9072 else if (bitpos0 == bitpos1
9073 && (indirect_base0
9074 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9075 && (indirect_base1
9076 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9077 && ((offset0 == offset1)
9078 || (offset0 && offset1
9079 && operand_equal_p (offset0, offset1, 0))))
9081 if (indirect_base0)
9082 base0 = build_fold_addr_expr_loc (loc, base0);
9083 if (indirect_base1)
9084 base1 = build_fold_addr_expr_loc (loc, base1);
9085 return fold_build2_loc (loc, code, type, base0, base1);
9089 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9090 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9091 the resulting offset is smaller in absolute value than the
9092 original one. */
9093 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9094 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9095 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9096 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9097 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9098 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9099 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9101 tree const1 = TREE_OPERAND (arg0, 1);
9102 tree const2 = TREE_OPERAND (arg1, 1);
9103 tree variable1 = TREE_OPERAND (arg0, 0);
9104 tree variable2 = TREE_OPERAND (arg1, 0);
9105 tree cst;
9106 const char * const warnmsg = G_("assuming signed overflow does not "
9107 "occur when combining constants around "
9108 "a comparison");
9110 /* Put the constant on the side where it doesn't overflow and is
9111 of lower absolute value than before. */
9112 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9113 ? MINUS_EXPR : PLUS_EXPR,
9114 const2, const1);
9115 if (!TREE_OVERFLOW (cst)
9116 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9118 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9119 return fold_build2_loc (loc, code, type,
9120 variable1,
9121 fold_build2_loc (loc,
9122 TREE_CODE (arg1), TREE_TYPE (arg1),
9123 variable2, cst));
9126 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9127 ? MINUS_EXPR : PLUS_EXPR,
9128 const1, const2);
9129 if (!TREE_OVERFLOW (cst)
9130 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9132 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9133 return fold_build2_loc (loc, code, type,
9134 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9135 variable1, cst),
9136 variable2);
9140 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9141 signed arithmetic case. That form is created by the compiler
9142 often enough for folding it to be of value. One example is in
9143 computing loop trip counts after Operator Strength Reduction. */
9144 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9145 && TREE_CODE (arg0) == MULT_EXPR
9146 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9147 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9148 && integer_zerop (arg1))
9150 tree const1 = TREE_OPERAND (arg0, 1);
9151 tree const2 = arg1; /* zero */
9152 tree variable1 = TREE_OPERAND (arg0, 0);
9153 enum tree_code cmp_code = code;
9155 /* Handle unfolded multiplication by zero. */
9156 if (integer_zerop (const1))
9157 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9159 fold_overflow_warning (("assuming signed overflow does not occur when "
9160 "eliminating multiplication in comparison "
9161 "with zero"),
9162 WARN_STRICT_OVERFLOW_COMPARISON);
9164 /* If const1 is negative we swap the sense of the comparison. */
9165 if (tree_int_cst_sgn (const1) < 0)
9166 cmp_code = swap_tree_comparison (cmp_code);
9168 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9171 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9172 if (tem)
9173 return tem;
9175 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9177 tree targ0 = strip_float_extensions (arg0);
9178 tree targ1 = strip_float_extensions (arg1);
9179 tree newtype = TREE_TYPE (targ0);
9181 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9182 newtype = TREE_TYPE (targ1);
9184 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9185 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9186 return fold_build2_loc (loc, code, type,
9187 fold_convert_loc (loc, newtype, targ0),
9188 fold_convert_loc (loc, newtype, targ1));
9190 /* (-a) CMP (-b) -> b CMP a */
9191 if (TREE_CODE (arg0) == NEGATE_EXPR
9192 && TREE_CODE (arg1) == NEGATE_EXPR)
9193 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9194 TREE_OPERAND (arg0, 0));
9196 if (TREE_CODE (arg1) == REAL_CST)
9198 REAL_VALUE_TYPE cst;
9199 cst = TREE_REAL_CST (arg1);
9201 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9202 if (TREE_CODE (arg0) == NEGATE_EXPR)
9203 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9204 TREE_OPERAND (arg0, 0),
9205 build_real (TREE_TYPE (arg1),
9206 real_value_negate (&cst)));
9208 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9209 /* a CMP (-0) -> a CMP 0 */
9210 if (REAL_VALUE_MINUS_ZERO (cst))
9211 return fold_build2_loc (loc, code, type, arg0,
9212 build_real (TREE_TYPE (arg1), dconst0));
9214 /* x != NaN is always true, other ops are always false. */
9215 if (REAL_VALUE_ISNAN (cst)
9216 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9218 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9219 return omit_one_operand_loc (loc, type, tem, arg0);
9222 /* Fold comparisons against infinity. */
9223 if (REAL_VALUE_ISINF (cst)
9224 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9226 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9227 if (tem != NULL_TREE)
9228 return tem;
9232 /* If this is a comparison of a real constant with a PLUS_EXPR
9233 or a MINUS_EXPR of a real constant, we can convert it into a
9234 comparison with a revised real constant as long as no overflow
9235 occurs when unsafe_math_optimizations are enabled. */
9236 if (flag_unsafe_math_optimizations
9237 && TREE_CODE (arg1) == REAL_CST
9238 && (TREE_CODE (arg0) == PLUS_EXPR
9239 || TREE_CODE (arg0) == MINUS_EXPR)
9240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9241 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9242 ? MINUS_EXPR : PLUS_EXPR,
9243 arg1, TREE_OPERAND (arg0, 1)))
9244 && !TREE_OVERFLOW (tem))
9245 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9247 /* Likewise, we can simplify a comparison of a real constant with
9248 a MINUS_EXPR whose first operand is also a real constant, i.e.
9249 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9250 floating-point types only if -fassociative-math is set. */
9251 if (flag_associative_math
9252 && TREE_CODE (arg1) == REAL_CST
9253 && TREE_CODE (arg0) == MINUS_EXPR
9254 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9255 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9256 arg1))
9257 && !TREE_OVERFLOW (tem))
9258 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9259 TREE_OPERAND (arg0, 1), tem);
9261 /* Fold comparisons against built-in math functions. */
9262 if (TREE_CODE (arg1) == REAL_CST
9263 && flag_unsafe_math_optimizations
9264 && ! flag_errno_math)
9266 enum built_in_function fcode = builtin_mathfn_code (arg0);
9268 if (fcode != END_BUILTINS)
9270 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9271 if (tem != NULL_TREE)
9272 return tem;
9277 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9278 && CONVERT_EXPR_P (arg0))
9280 /* If we are widening one operand of an integer comparison,
9281 see if the other operand is similarly being widened. Perhaps we
9282 can do the comparison in the narrower type. */
9283 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9284 if (tem)
9285 return tem;
9287 /* Or if we are changing signedness. */
9288 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9289 if (tem)
9290 return tem;
9293 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9294 constant, we can simplify it. */
9295 if (TREE_CODE (arg1) == INTEGER_CST
9296 && (TREE_CODE (arg0) == MIN_EXPR
9297 || TREE_CODE (arg0) == MAX_EXPR)
9298 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9300 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9301 if (tem)
9302 return tem;
9305 /* Simplify comparison of something with itself. (For IEEE
9306 floating-point, we can only do some of these simplifications.) */
9307 if (operand_equal_p (arg0, arg1, 0))
9309 switch (code)
9311 case EQ_EXPR:
9312 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9313 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9314 return constant_boolean_node (1, type);
9315 break;
9317 case GE_EXPR:
9318 case LE_EXPR:
9319 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9320 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9321 return constant_boolean_node (1, type);
9322 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9324 case NE_EXPR:
9325 /* For NE, we can only do this simplification if integer
9326 or we don't honor IEEE floating point NaNs. */
9327 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9328 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9329 break;
9330 /* ... fall through ... */
9331 case GT_EXPR:
9332 case LT_EXPR:
9333 return constant_boolean_node (0, type);
9334 default:
9335 gcc_unreachable ();
9339 /* If we are comparing an expression that just has comparisons
9340 of two integer values, arithmetic expressions of those comparisons,
9341 and constants, we can simplify it. There are only three cases
9342 to check: the two values can either be equal, the first can be
9343 greater, or the second can be greater. Fold the expression for
9344 those three values. Since each value must be 0 or 1, we have
9345 eight possibilities, each of which corresponds to the constant 0
9346 or 1 or one of the six possible comparisons.
9348 This handles common cases like (a > b) == 0 but also handles
9349 expressions like ((x > y) - (y > x)) > 0, which supposedly
9350 occur in macroized code. */
9352 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9354 tree cval1 = 0, cval2 = 0;
9355 int save_p = 0;
9357 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9358 /* Don't handle degenerate cases here; they should already
9359 have been handled anyway. */
9360 && cval1 != 0 && cval2 != 0
9361 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9362 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9363 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9364 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9365 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9366 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9367 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9369 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9370 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9372 /* We can't just pass T to eval_subst in case cval1 or cval2
9373 was the same as ARG1. */
9375 tree high_result
9376 = fold_build2_loc (loc, code, type,
9377 eval_subst (loc, arg0, cval1, maxval,
9378 cval2, minval),
9379 arg1);
9380 tree equal_result
9381 = fold_build2_loc (loc, code, type,
9382 eval_subst (loc, arg0, cval1, maxval,
9383 cval2, maxval),
9384 arg1);
9385 tree low_result
9386 = fold_build2_loc (loc, code, type,
9387 eval_subst (loc, arg0, cval1, minval,
9388 cval2, maxval),
9389 arg1);
9391 /* All three of these results should be 0 or 1. Confirm they are.
9392 Then use those values to select the proper code to use. */
9394 if (TREE_CODE (high_result) == INTEGER_CST
9395 && TREE_CODE (equal_result) == INTEGER_CST
9396 && TREE_CODE (low_result) == INTEGER_CST)
9398 /* Make a 3-bit mask with the high-order bit being the
9399 value for `>', the next for '=', and the low for '<'. */
9400 switch ((integer_onep (high_result) * 4)
9401 + (integer_onep (equal_result) * 2)
9402 + integer_onep (low_result))
9404 case 0:
9405 /* Always false. */
9406 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9407 case 1:
9408 code = LT_EXPR;
9409 break;
9410 case 2:
9411 code = EQ_EXPR;
9412 break;
9413 case 3:
9414 code = LE_EXPR;
9415 break;
9416 case 4:
9417 code = GT_EXPR;
9418 break;
9419 case 5:
9420 code = NE_EXPR;
9421 break;
9422 case 6:
9423 code = GE_EXPR;
9424 break;
9425 case 7:
9426 /* Always true. */
9427 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9430 if (save_p)
9432 tem = save_expr (build2 (code, type, cval1, cval2));
9433 SET_EXPR_LOCATION (tem, loc);
9434 return tem;
9436 return fold_build2_loc (loc, code, type, cval1, cval2);
9441 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9442 into a single range test. */
9443 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9444 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9445 && TREE_CODE (arg1) == INTEGER_CST
9446 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9447 && !integer_zerop (TREE_OPERAND (arg0, 1))
9448 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9449 && !TREE_OVERFLOW (arg1))
9451 tem = fold_div_compare (loc, code, type, arg0, arg1);
9452 if (tem != NULL_TREE)
9453 return tem;
9456 /* Fold ~X op ~Y as Y op X. */
9457 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9458 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9460 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9461 return fold_build2_loc (loc, code, type,
9462 fold_convert_loc (loc, cmp_type,
9463 TREE_OPERAND (arg1, 0)),
9464 TREE_OPERAND (arg0, 0));
9467 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9468 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9469 && TREE_CODE (arg1) == INTEGER_CST)
9471 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9472 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9473 TREE_OPERAND (arg0, 0),
9474 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9475 fold_convert_loc (loc, cmp_type, arg1)));
9478 return NULL_TREE;
9482 /* Subroutine of fold_binary. Optimize complex multiplications of the
9483 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9484 argument EXPR represents the expression "z" of type TYPE. */
9486 static tree
9487 fold_mult_zconjz (location_t loc, tree type, tree expr)
9489 tree itype = TREE_TYPE (type);
9490 tree rpart, ipart, tem;
9492 if (TREE_CODE (expr) == COMPLEX_EXPR)
9494 rpart = TREE_OPERAND (expr, 0);
9495 ipart = TREE_OPERAND (expr, 1);
9497 else if (TREE_CODE (expr) == COMPLEX_CST)
9499 rpart = TREE_REALPART (expr);
9500 ipart = TREE_IMAGPART (expr);
9502 else
9504 expr = save_expr (expr);
9505 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9506 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9509 rpart = save_expr (rpart);
9510 ipart = save_expr (ipart);
9511 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9512 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9513 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9514 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9515 build_zero_cst (itype));
9519 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9520 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9521 guarantees that P and N have the same least significant log2(M) bits.
9522 N is not otherwise constrained. In particular, N is not normalized to
9523 0 <= N < M as is common. In general, the precise value of P is unknown.
9524 M is chosen as large as possible such that constant N can be determined.
9526 Returns M and sets *RESIDUE to N.
9528 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9529 account. This is not always possible due to PR 35705.
9532 static unsigned HOST_WIDE_INT
9533 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9534 bool allow_func_align)
9536 enum tree_code code;
9538 *residue = 0;
9540 code = TREE_CODE (expr);
9541 if (code == ADDR_EXPR)
9543 unsigned int bitalign;
9544 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9545 *residue /= BITS_PER_UNIT;
9546 return bitalign / BITS_PER_UNIT;
9548 else if (code == POINTER_PLUS_EXPR)
9550 tree op0, op1;
9551 unsigned HOST_WIDE_INT modulus;
9552 enum tree_code inner_code;
9554 op0 = TREE_OPERAND (expr, 0);
9555 STRIP_NOPS (op0);
9556 modulus = get_pointer_modulus_and_residue (op0, residue,
9557 allow_func_align);
9559 op1 = TREE_OPERAND (expr, 1);
9560 STRIP_NOPS (op1);
9561 inner_code = TREE_CODE (op1);
9562 if (inner_code == INTEGER_CST)
9564 *residue += TREE_INT_CST_LOW (op1);
9565 return modulus;
9567 else if (inner_code == MULT_EXPR)
9569 op1 = TREE_OPERAND (op1, 1);
9570 if (TREE_CODE (op1) == INTEGER_CST)
9572 unsigned HOST_WIDE_INT align;
9574 /* Compute the greatest power-of-2 divisor of op1. */
9575 align = TREE_INT_CST_LOW (op1);
9576 align &= -align;
9578 /* If align is non-zero and less than *modulus, replace
9579 *modulus with align., If align is 0, then either op1 is 0
9580 or the greatest power-of-2 divisor of op1 doesn't fit in an
9581 unsigned HOST_WIDE_INT. In either case, no additional
9582 constraint is imposed. */
9583 if (align)
9584 modulus = MIN (modulus, align);
9586 return modulus;
9591 /* If we get here, we were unable to determine anything useful about the
9592 expression. */
9593 return 1;
9596 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9597 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9599 static bool
9600 vec_cst_ctor_to_array (tree arg, tree *elts)
9602 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9604 if (TREE_CODE (arg) == VECTOR_CST)
9606 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9607 elts[i] = VECTOR_CST_ELT (arg, i);
9609 else if (TREE_CODE (arg) == CONSTRUCTOR)
9611 constructor_elt *elt;
9613 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9614 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9615 return false;
9616 else
9617 elts[i] = elt->value;
9619 else
9620 return false;
9621 for (; i < nelts; i++)
9622 elts[i]
9623 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9624 return true;
9627 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9628 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9629 NULL_TREE otherwise. */
9631 static tree
9632 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9634 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9635 tree *elts;
9636 bool need_ctor = false;
9638 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9639 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9640 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9641 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9642 return NULL_TREE;
9644 elts = XALLOCAVEC (tree, nelts * 3);
9645 if (!vec_cst_ctor_to_array (arg0, elts)
9646 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9647 return NULL_TREE;
9649 for (i = 0; i < nelts; i++)
9651 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9652 need_ctor = true;
9653 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9656 if (need_ctor)
9658 vec<constructor_elt, va_gc> *v;
9659 vec_alloc (v, nelts);
9660 for (i = 0; i < nelts; i++)
9661 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9662 return build_constructor (type, v);
9664 else
9665 return build_vector (type, &elts[2 * nelts]);
9668 /* Try to fold a pointer difference of type TYPE two address expressions of
9669 array references AREF0 and AREF1 using location LOC. Return a
9670 simplified expression for the difference or NULL_TREE. */
9672 static tree
9673 fold_addr_of_array_ref_difference (location_t loc, tree type,
9674 tree aref0, tree aref1)
9676 tree base0 = TREE_OPERAND (aref0, 0);
9677 tree base1 = TREE_OPERAND (aref1, 0);
9678 tree base_offset = build_int_cst (type, 0);
9680 /* If the bases are array references as well, recurse. If the bases
9681 are pointer indirections compute the difference of the pointers.
9682 If the bases are equal, we are set. */
9683 if ((TREE_CODE (base0) == ARRAY_REF
9684 && TREE_CODE (base1) == ARRAY_REF
9685 && (base_offset
9686 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9687 || (INDIRECT_REF_P (base0)
9688 && INDIRECT_REF_P (base1)
9689 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9690 TREE_OPERAND (base0, 0),
9691 TREE_OPERAND (base1, 0))))
9692 || operand_equal_p (base0, base1, 0))
9694 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9695 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9696 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9697 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9698 return fold_build2_loc (loc, PLUS_EXPR, type,
9699 base_offset,
9700 fold_build2_loc (loc, MULT_EXPR, type,
9701 diff, esz));
9703 return NULL_TREE;
9706 /* If the real or vector real constant CST of type TYPE has an exact
9707 inverse, return it, else return NULL. */
9709 static tree
9710 exact_inverse (tree type, tree cst)
9712 REAL_VALUE_TYPE r;
9713 tree unit_type, *elts;
9714 enum machine_mode mode;
9715 unsigned vec_nelts, i;
9717 switch (TREE_CODE (cst))
9719 case REAL_CST:
9720 r = TREE_REAL_CST (cst);
9722 if (exact_real_inverse (TYPE_MODE (type), &r))
9723 return build_real (type, r);
9725 return NULL_TREE;
9727 case VECTOR_CST:
9728 vec_nelts = VECTOR_CST_NELTS (cst);
9729 elts = XALLOCAVEC (tree, vec_nelts);
9730 unit_type = TREE_TYPE (type);
9731 mode = TYPE_MODE (unit_type);
9733 for (i = 0; i < vec_nelts; i++)
9735 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9736 if (!exact_real_inverse (mode, &r))
9737 return NULL_TREE;
9738 elts[i] = build_real (unit_type, r);
9741 return build_vector (type, elts);
9743 default:
9744 return NULL_TREE;
9748 /* Fold a binary expression of code CODE and type TYPE with operands
9749 OP0 and OP1. LOC is the location of the resulting expression.
9750 Return the folded expression if folding is successful. Otherwise,
9751 return NULL_TREE. */
9753 tree
9754 fold_binary_loc (location_t loc,
9755 enum tree_code code, tree type, tree op0, tree op1)
9757 enum tree_code_class kind = TREE_CODE_CLASS (code);
9758 tree arg0, arg1, tem;
9759 tree t1 = NULL_TREE;
9760 bool strict_overflow_p;
9762 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9763 && TREE_CODE_LENGTH (code) == 2
9764 && op0 != NULL_TREE
9765 && op1 != NULL_TREE);
9767 arg0 = op0;
9768 arg1 = op1;
9770 /* Strip any conversions that don't change the mode. This is
9771 safe for every expression, except for a comparison expression
9772 because its signedness is derived from its operands. So, in
9773 the latter case, only strip conversions that don't change the
9774 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9775 preserved.
9777 Note that this is done as an internal manipulation within the
9778 constant folder, in order to find the simplest representation
9779 of the arguments so that their form can be studied. In any
9780 cases, the appropriate type conversions should be put back in
9781 the tree that will get out of the constant folder. */
9783 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9785 STRIP_SIGN_NOPS (arg0);
9786 STRIP_SIGN_NOPS (arg1);
9788 else
9790 STRIP_NOPS (arg0);
9791 STRIP_NOPS (arg1);
9794 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9795 constant but we can't do arithmetic on them. */
9796 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9797 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9798 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9799 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9800 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9801 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9803 if (kind == tcc_binary)
9805 /* Make sure type and arg0 have the same saturating flag. */
9806 gcc_assert (TYPE_SATURATING (type)
9807 == TYPE_SATURATING (TREE_TYPE (arg0)));
9808 tem = const_binop (code, arg0, arg1);
9810 else if (kind == tcc_comparison)
9811 tem = fold_relational_const (code, type, arg0, arg1);
9812 else
9813 tem = NULL_TREE;
9815 if (tem != NULL_TREE)
9817 if (TREE_TYPE (tem) != type)
9818 tem = fold_convert_loc (loc, type, tem);
9819 return tem;
9823 /* If this is a commutative operation, and ARG0 is a constant, move it
9824 to ARG1 to reduce the number of tests below. */
9825 if (commutative_tree_code (code)
9826 && tree_swap_operands_p (arg0, arg1, true))
9827 return fold_build2_loc (loc, code, type, op1, op0);
9829 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9831 First check for cases where an arithmetic operation is applied to a
9832 compound, conditional, or comparison operation. Push the arithmetic
9833 operation inside the compound or conditional to see if any folding
9834 can then be done. Convert comparison to conditional for this purpose.
9835 The also optimizes non-constant cases that used to be done in
9836 expand_expr.
9838 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9839 one of the operands is a comparison and the other is a comparison, a
9840 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9841 code below would make the expression more complex. Change it to a
9842 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9843 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9845 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9846 || code == EQ_EXPR || code == NE_EXPR)
9847 && TREE_CODE (type) != VECTOR_TYPE
9848 && ((truth_value_p (TREE_CODE (arg0))
9849 && (truth_value_p (TREE_CODE (arg1))
9850 || (TREE_CODE (arg1) == BIT_AND_EXPR
9851 && integer_onep (TREE_OPERAND (arg1, 1)))))
9852 || (truth_value_p (TREE_CODE (arg1))
9853 && (truth_value_p (TREE_CODE (arg0))
9854 || (TREE_CODE (arg0) == BIT_AND_EXPR
9855 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9857 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9858 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9859 : TRUTH_XOR_EXPR,
9860 boolean_type_node,
9861 fold_convert_loc (loc, boolean_type_node, arg0),
9862 fold_convert_loc (loc, boolean_type_node, arg1));
9864 if (code == EQ_EXPR)
9865 tem = invert_truthvalue_loc (loc, tem);
9867 return fold_convert_loc (loc, type, tem);
9870 if (TREE_CODE_CLASS (code) == tcc_binary
9871 || TREE_CODE_CLASS (code) == tcc_comparison)
9873 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9875 tem = fold_build2_loc (loc, code, type,
9876 fold_convert_loc (loc, TREE_TYPE (op0),
9877 TREE_OPERAND (arg0, 1)), op1);
9878 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9879 tem);
9881 if (TREE_CODE (arg1) == COMPOUND_EXPR
9882 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9884 tem = fold_build2_loc (loc, code, type, op0,
9885 fold_convert_loc (loc, TREE_TYPE (op1),
9886 TREE_OPERAND (arg1, 1)));
9887 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9888 tem);
9891 if (TREE_CODE (arg0) == COND_EXPR
9892 || TREE_CODE (arg0) == VEC_COND_EXPR
9893 || COMPARISON_CLASS_P (arg0))
9895 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9896 arg0, arg1,
9897 /*cond_first_p=*/1);
9898 if (tem != NULL_TREE)
9899 return tem;
9902 if (TREE_CODE (arg1) == COND_EXPR
9903 || TREE_CODE (arg1) == VEC_COND_EXPR
9904 || COMPARISON_CLASS_P (arg1))
9906 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9907 arg1, arg0,
9908 /*cond_first_p=*/0);
9909 if (tem != NULL_TREE)
9910 return tem;
9914 switch (code)
9916 case MEM_REF:
9917 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9918 if (TREE_CODE (arg0) == ADDR_EXPR
9919 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9921 tree iref = TREE_OPERAND (arg0, 0);
9922 return fold_build2 (MEM_REF, type,
9923 TREE_OPERAND (iref, 0),
9924 int_const_binop (PLUS_EXPR, arg1,
9925 TREE_OPERAND (iref, 1)));
9928 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9929 if (TREE_CODE (arg0) == ADDR_EXPR
9930 && handled_component_p (TREE_OPERAND (arg0, 0)))
9932 tree base;
9933 HOST_WIDE_INT coffset;
9934 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9935 &coffset);
9936 if (!base)
9937 return NULL_TREE;
9938 return fold_build2 (MEM_REF, type,
9939 build_fold_addr_expr (base),
9940 int_const_binop (PLUS_EXPR, arg1,
9941 size_int (coffset)));
9944 return NULL_TREE;
9946 case POINTER_PLUS_EXPR:
9947 /* 0 +p index -> (type)index */
9948 if (integer_zerop (arg0))
9949 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9951 /* PTR +p 0 -> PTR */
9952 if (integer_zerop (arg1))
9953 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9955 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9956 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9957 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9958 return fold_convert_loc (loc, type,
9959 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9960 fold_convert_loc (loc, sizetype,
9961 arg1),
9962 fold_convert_loc (loc, sizetype,
9963 arg0)));
9965 /* (PTR +p B) +p A -> PTR +p (B + A) */
9966 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9968 tree inner;
9969 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9970 tree arg00 = TREE_OPERAND (arg0, 0);
9971 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9972 arg01, fold_convert_loc (loc, sizetype, arg1));
9973 return fold_convert_loc (loc, type,
9974 fold_build_pointer_plus_loc (loc,
9975 arg00, inner));
9978 /* PTR_CST +p CST -> CST1 */
9979 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9980 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9981 fold_convert_loc (loc, type, arg1));
9983 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9984 of the array. Loop optimizer sometimes produce this type of
9985 expressions. */
9986 if (TREE_CODE (arg0) == ADDR_EXPR)
9988 tem = try_move_mult_to_index (loc, arg0,
9989 fold_convert_loc (loc,
9990 ssizetype, arg1));
9991 if (tem)
9992 return fold_convert_loc (loc, type, tem);
9995 return NULL_TREE;
9997 case PLUS_EXPR:
9998 /* A + (-B) -> A - B */
9999 if (TREE_CODE (arg1) == NEGATE_EXPR)
10000 return fold_build2_loc (loc, MINUS_EXPR, type,
10001 fold_convert_loc (loc, type, arg0),
10002 fold_convert_loc (loc, type,
10003 TREE_OPERAND (arg1, 0)));
10004 /* (-A) + B -> B - A */
10005 if (TREE_CODE (arg0) == NEGATE_EXPR
10006 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10007 return fold_build2_loc (loc, MINUS_EXPR, type,
10008 fold_convert_loc (loc, type, arg1),
10009 fold_convert_loc (loc, type,
10010 TREE_OPERAND (arg0, 0)));
10012 if (INTEGRAL_TYPE_P (type))
10014 /* Convert ~A + 1 to -A. */
10015 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10016 && integer_onep (arg1))
10017 return fold_build1_loc (loc, NEGATE_EXPR, type,
10018 fold_convert_loc (loc, type,
10019 TREE_OPERAND (arg0, 0)));
10021 /* ~X + X is -1. */
10022 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10023 && !TYPE_OVERFLOW_TRAPS (type))
10025 tree tem = TREE_OPERAND (arg0, 0);
10027 STRIP_NOPS (tem);
10028 if (operand_equal_p (tem, arg1, 0))
10030 t1 = build_int_cst_type (type, -1);
10031 return omit_one_operand_loc (loc, type, t1, arg1);
10035 /* X + ~X is -1. */
10036 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10037 && !TYPE_OVERFLOW_TRAPS (type))
10039 tree tem = TREE_OPERAND (arg1, 0);
10041 STRIP_NOPS (tem);
10042 if (operand_equal_p (arg0, tem, 0))
10044 t1 = build_int_cst_type (type, -1);
10045 return omit_one_operand_loc (loc, type, t1, arg0);
10049 /* X + (X / CST) * -CST is X % CST. */
10050 if (TREE_CODE (arg1) == MULT_EXPR
10051 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10052 && operand_equal_p (arg0,
10053 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10055 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10056 tree cst1 = TREE_OPERAND (arg1, 1);
10057 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10058 cst1, cst0);
10059 if (sum && integer_zerop (sum))
10060 return fold_convert_loc (loc, type,
10061 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10062 TREE_TYPE (arg0), arg0,
10063 cst0));
10067 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10068 one. Make sure the type is not saturating and has the signedness of
10069 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10070 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10071 if ((TREE_CODE (arg0) == MULT_EXPR
10072 || TREE_CODE (arg1) == MULT_EXPR)
10073 && !TYPE_SATURATING (type)
10074 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10075 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10076 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10078 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10079 if (tem)
10080 return tem;
10083 if (! FLOAT_TYPE_P (type))
10085 if (integer_zerop (arg1))
10086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10088 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10089 with a constant, and the two constants have no bits in common,
10090 we should treat this as a BIT_IOR_EXPR since this may produce more
10091 simplifications. */
10092 if (TREE_CODE (arg0) == BIT_AND_EXPR
10093 && TREE_CODE (arg1) == BIT_AND_EXPR
10094 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10095 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10096 && integer_zerop (const_binop (BIT_AND_EXPR,
10097 TREE_OPERAND (arg0, 1),
10098 TREE_OPERAND (arg1, 1))))
10100 code = BIT_IOR_EXPR;
10101 goto bit_ior;
10104 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10105 (plus (plus (mult) (mult)) (foo)) so that we can
10106 take advantage of the factoring cases below. */
10107 if (TYPE_OVERFLOW_WRAPS (type)
10108 && (((TREE_CODE (arg0) == PLUS_EXPR
10109 || TREE_CODE (arg0) == MINUS_EXPR)
10110 && TREE_CODE (arg1) == MULT_EXPR)
10111 || ((TREE_CODE (arg1) == PLUS_EXPR
10112 || TREE_CODE (arg1) == MINUS_EXPR)
10113 && TREE_CODE (arg0) == MULT_EXPR)))
10115 tree parg0, parg1, parg, marg;
10116 enum tree_code pcode;
10118 if (TREE_CODE (arg1) == MULT_EXPR)
10119 parg = arg0, marg = arg1;
10120 else
10121 parg = arg1, marg = arg0;
10122 pcode = TREE_CODE (parg);
10123 parg0 = TREE_OPERAND (parg, 0);
10124 parg1 = TREE_OPERAND (parg, 1);
10125 STRIP_NOPS (parg0);
10126 STRIP_NOPS (parg1);
10128 if (TREE_CODE (parg0) == MULT_EXPR
10129 && TREE_CODE (parg1) != MULT_EXPR)
10130 return fold_build2_loc (loc, pcode, type,
10131 fold_build2_loc (loc, PLUS_EXPR, type,
10132 fold_convert_loc (loc, type,
10133 parg0),
10134 fold_convert_loc (loc, type,
10135 marg)),
10136 fold_convert_loc (loc, type, parg1));
10137 if (TREE_CODE (parg0) != MULT_EXPR
10138 && TREE_CODE (parg1) == MULT_EXPR)
10139 return
10140 fold_build2_loc (loc, PLUS_EXPR, type,
10141 fold_convert_loc (loc, type, parg0),
10142 fold_build2_loc (loc, pcode, type,
10143 fold_convert_loc (loc, type, marg),
10144 fold_convert_loc (loc, type,
10145 parg1)));
10148 else
10150 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10151 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10152 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10154 /* Likewise if the operands are reversed. */
10155 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10156 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10158 /* Convert X + -C into X - C. */
10159 if (TREE_CODE (arg1) == REAL_CST
10160 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10162 tem = fold_negate_const (arg1, type);
10163 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10164 return fold_build2_loc (loc, MINUS_EXPR, type,
10165 fold_convert_loc (loc, type, arg0),
10166 fold_convert_loc (loc, type, tem));
10169 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10170 to __complex__ ( x, y ). This is not the same for SNaNs or
10171 if signed zeros are involved. */
10172 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10173 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10174 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10176 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10177 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10178 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10179 bool arg0rz = false, arg0iz = false;
10180 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10181 || (arg0i && (arg0iz = real_zerop (arg0i))))
10183 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10184 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10185 if (arg0rz && arg1i && real_zerop (arg1i))
10187 tree rp = arg1r ? arg1r
10188 : build1 (REALPART_EXPR, rtype, arg1);
10189 tree ip = arg0i ? arg0i
10190 : build1 (IMAGPART_EXPR, rtype, arg0);
10191 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10193 else if (arg0iz && arg1r && real_zerop (arg1r))
10195 tree rp = arg0r ? arg0r
10196 : build1 (REALPART_EXPR, rtype, arg0);
10197 tree ip = arg1i ? arg1i
10198 : build1 (IMAGPART_EXPR, rtype, arg1);
10199 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10204 if (flag_unsafe_math_optimizations
10205 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10206 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10207 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10208 return tem;
10210 /* Convert x+x into x*2.0. */
10211 if (operand_equal_p (arg0, arg1, 0)
10212 && SCALAR_FLOAT_TYPE_P (type))
10213 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10214 build_real (type, dconst2));
10216 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10217 We associate floats only if the user has specified
10218 -fassociative-math. */
10219 if (flag_associative_math
10220 && TREE_CODE (arg1) == PLUS_EXPR
10221 && TREE_CODE (arg0) != MULT_EXPR)
10223 tree tree10 = TREE_OPERAND (arg1, 0);
10224 tree tree11 = TREE_OPERAND (arg1, 1);
10225 if (TREE_CODE (tree11) == MULT_EXPR
10226 && TREE_CODE (tree10) == MULT_EXPR)
10228 tree tree0;
10229 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10230 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10233 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10234 We associate floats only if the user has specified
10235 -fassociative-math. */
10236 if (flag_associative_math
10237 && TREE_CODE (arg0) == PLUS_EXPR
10238 && TREE_CODE (arg1) != MULT_EXPR)
10240 tree tree00 = TREE_OPERAND (arg0, 0);
10241 tree tree01 = TREE_OPERAND (arg0, 1);
10242 if (TREE_CODE (tree01) == MULT_EXPR
10243 && TREE_CODE (tree00) == MULT_EXPR)
10245 tree tree0;
10246 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10247 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10252 bit_rotate:
10253 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10254 is a rotate of A by C1 bits. */
10255 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10256 is a rotate of A by B bits. */
10258 enum tree_code code0, code1;
10259 tree rtype;
10260 code0 = TREE_CODE (arg0);
10261 code1 = TREE_CODE (arg1);
10262 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10263 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10264 && operand_equal_p (TREE_OPERAND (arg0, 0),
10265 TREE_OPERAND (arg1, 0), 0)
10266 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10267 TYPE_UNSIGNED (rtype))
10268 /* Only create rotates in complete modes. Other cases are not
10269 expanded properly. */
10270 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10272 tree tree01, tree11;
10273 enum tree_code code01, code11;
10275 tree01 = TREE_OPERAND (arg0, 1);
10276 tree11 = TREE_OPERAND (arg1, 1);
10277 STRIP_NOPS (tree01);
10278 STRIP_NOPS (tree11);
10279 code01 = TREE_CODE (tree01);
10280 code11 = TREE_CODE (tree11);
10281 if (code01 == INTEGER_CST
10282 && code11 == INTEGER_CST
10283 && TREE_INT_CST_HIGH (tree01) == 0
10284 && TREE_INT_CST_HIGH (tree11) == 0
10285 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10286 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10288 tem = build2_loc (loc, LROTATE_EXPR,
10289 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10290 TREE_OPERAND (arg0, 0),
10291 code0 == LSHIFT_EXPR ? tree01 : tree11);
10292 return fold_convert_loc (loc, type, tem);
10294 else if (code11 == MINUS_EXPR)
10296 tree tree110, tree111;
10297 tree110 = TREE_OPERAND (tree11, 0);
10298 tree111 = TREE_OPERAND (tree11, 1);
10299 STRIP_NOPS (tree110);
10300 STRIP_NOPS (tree111);
10301 if (TREE_CODE (tree110) == INTEGER_CST
10302 && 0 == compare_tree_int (tree110,
10303 TYPE_PRECISION
10304 (TREE_TYPE (TREE_OPERAND
10305 (arg0, 0))))
10306 && operand_equal_p (tree01, tree111, 0))
10307 return
10308 fold_convert_loc (loc, type,
10309 build2 ((code0 == LSHIFT_EXPR
10310 ? LROTATE_EXPR
10311 : RROTATE_EXPR),
10312 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10313 TREE_OPERAND (arg0, 0), tree01));
10315 else if (code01 == MINUS_EXPR)
10317 tree tree010, tree011;
10318 tree010 = TREE_OPERAND (tree01, 0);
10319 tree011 = TREE_OPERAND (tree01, 1);
10320 STRIP_NOPS (tree010);
10321 STRIP_NOPS (tree011);
10322 if (TREE_CODE (tree010) == INTEGER_CST
10323 && 0 == compare_tree_int (tree010,
10324 TYPE_PRECISION
10325 (TREE_TYPE (TREE_OPERAND
10326 (arg0, 0))))
10327 && operand_equal_p (tree11, tree011, 0))
10328 return fold_convert_loc
10329 (loc, type,
10330 build2 ((code0 != LSHIFT_EXPR
10331 ? LROTATE_EXPR
10332 : RROTATE_EXPR),
10333 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10334 TREE_OPERAND (arg0, 0), tree11));
10339 associate:
10340 /* In most languages, can't associate operations on floats through
10341 parentheses. Rather than remember where the parentheses were, we
10342 don't associate floats at all, unless the user has specified
10343 -fassociative-math.
10344 And, we need to make sure type is not saturating. */
10346 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10347 && !TYPE_SATURATING (type))
10349 tree var0, con0, lit0, minus_lit0;
10350 tree var1, con1, lit1, minus_lit1;
10351 tree atype = type;
10352 bool ok = true;
10354 /* Split both trees into variables, constants, and literals. Then
10355 associate each group together, the constants with literals,
10356 then the result with variables. This increases the chances of
10357 literals being recombined later and of generating relocatable
10358 expressions for the sum of a constant and literal. */
10359 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10360 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10361 code == MINUS_EXPR);
10363 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10364 if (code == MINUS_EXPR)
10365 code = PLUS_EXPR;
10367 /* With undefined overflow prefer doing association in a type
10368 which wraps on overflow, if that is one of the operand types. */
10369 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10370 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10372 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10373 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10374 atype = TREE_TYPE (arg0);
10375 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10376 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10377 atype = TREE_TYPE (arg1);
10378 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10381 /* With undefined overflow we can only associate constants with one
10382 variable, and constants whose association doesn't overflow. */
10383 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10384 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10386 if (var0 && var1)
10388 tree tmp0 = var0;
10389 tree tmp1 = var1;
10391 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10392 tmp0 = TREE_OPERAND (tmp0, 0);
10393 if (CONVERT_EXPR_P (tmp0)
10394 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10395 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10396 <= TYPE_PRECISION (atype)))
10397 tmp0 = TREE_OPERAND (tmp0, 0);
10398 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10399 tmp1 = TREE_OPERAND (tmp1, 0);
10400 if (CONVERT_EXPR_P (tmp1)
10401 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10402 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10403 <= TYPE_PRECISION (atype)))
10404 tmp1 = TREE_OPERAND (tmp1, 0);
10405 /* The only case we can still associate with two variables
10406 is if they are the same, modulo negation and bit-pattern
10407 preserving conversions. */
10408 if (!operand_equal_p (tmp0, tmp1, 0))
10409 ok = false;
10413 /* Only do something if we found more than two objects. Otherwise,
10414 nothing has changed and we risk infinite recursion. */
10415 if (ok
10416 && (2 < ((var0 != 0) + (var1 != 0)
10417 + (con0 != 0) + (con1 != 0)
10418 + (lit0 != 0) + (lit1 != 0)
10419 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10421 bool any_overflows = false;
10422 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10423 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10424 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10425 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10426 var0 = associate_trees (loc, var0, var1, code, atype);
10427 con0 = associate_trees (loc, con0, con1, code, atype);
10428 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10429 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10430 code, atype);
10432 /* Preserve the MINUS_EXPR if the negative part of the literal is
10433 greater than the positive part. Otherwise, the multiplicative
10434 folding code (i.e extract_muldiv) may be fooled in case
10435 unsigned constants are subtracted, like in the following
10436 example: ((X*2 + 4) - 8U)/2. */
10437 if (minus_lit0 && lit0)
10439 if (TREE_CODE (lit0) == INTEGER_CST
10440 && TREE_CODE (minus_lit0) == INTEGER_CST
10441 && tree_int_cst_lt (lit0, minus_lit0))
10443 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10444 MINUS_EXPR, atype);
10445 lit0 = 0;
10447 else
10449 lit0 = associate_trees (loc, lit0, minus_lit0,
10450 MINUS_EXPR, atype);
10451 minus_lit0 = 0;
10455 /* Don't introduce overflows through reassociation. */
10456 if (!any_overflows
10457 && ((lit0 && TREE_OVERFLOW (lit0))
10458 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10459 return NULL_TREE;
10461 if (minus_lit0)
10463 if (con0 == 0)
10464 return
10465 fold_convert_loc (loc, type,
10466 associate_trees (loc, var0, minus_lit0,
10467 MINUS_EXPR, atype));
10468 else
10470 con0 = associate_trees (loc, con0, minus_lit0,
10471 MINUS_EXPR, atype);
10472 return
10473 fold_convert_loc (loc, type,
10474 associate_trees (loc, var0, con0,
10475 PLUS_EXPR, atype));
10479 con0 = associate_trees (loc, con0, lit0, code, atype);
10480 return
10481 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10482 code, atype));
10486 return NULL_TREE;
10488 case MINUS_EXPR:
10489 /* Pointer simplifications for subtraction, simple reassociations. */
10490 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10492 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10493 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10494 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10496 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10497 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10498 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10499 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10500 return fold_build2_loc (loc, PLUS_EXPR, type,
10501 fold_build2_loc (loc, MINUS_EXPR, type,
10502 arg00, arg10),
10503 fold_build2_loc (loc, MINUS_EXPR, type,
10504 arg01, arg11));
10506 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10507 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10509 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10510 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10511 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10512 fold_convert_loc (loc, type, arg1));
10513 if (tmp)
10514 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10517 /* A - (-B) -> A + B */
10518 if (TREE_CODE (arg1) == NEGATE_EXPR)
10519 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10520 fold_convert_loc (loc, type,
10521 TREE_OPERAND (arg1, 0)));
10522 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10523 if (TREE_CODE (arg0) == NEGATE_EXPR
10524 && (FLOAT_TYPE_P (type)
10525 || INTEGRAL_TYPE_P (type))
10526 && negate_expr_p (arg1)
10527 && reorder_operands_p (arg0, arg1))
10528 return fold_build2_loc (loc, MINUS_EXPR, type,
10529 fold_convert_loc (loc, type,
10530 negate_expr (arg1)),
10531 fold_convert_loc (loc, type,
10532 TREE_OPERAND (arg0, 0)));
10533 /* Convert -A - 1 to ~A. */
10534 if (INTEGRAL_TYPE_P (type)
10535 && TREE_CODE (arg0) == NEGATE_EXPR
10536 && integer_onep (arg1)
10537 && !TYPE_OVERFLOW_TRAPS (type))
10538 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10539 fold_convert_loc (loc, type,
10540 TREE_OPERAND (arg0, 0)));
10542 /* Convert -1 - A to ~A. */
10543 if (INTEGRAL_TYPE_P (type)
10544 && integer_all_onesp (arg0))
10545 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10548 /* X - (X / CST) * CST is X % CST. */
10549 if (INTEGRAL_TYPE_P (type)
10550 && TREE_CODE (arg1) == MULT_EXPR
10551 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10552 && operand_equal_p (arg0,
10553 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10554 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10555 TREE_OPERAND (arg1, 1), 0))
10556 return
10557 fold_convert_loc (loc, type,
10558 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10559 arg0, TREE_OPERAND (arg1, 1)));
10561 if (! FLOAT_TYPE_P (type))
10563 if (integer_zerop (arg0))
10564 return negate_expr (fold_convert_loc (loc, type, arg1));
10565 if (integer_zerop (arg1))
10566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10568 /* Fold A - (A & B) into ~B & A. */
10569 if (!TREE_SIDE_EFFECTS (arg0)
10570 && TREE_CODE (arg1) == BIT_AND_EXPR)
10572 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10574 tree arg10 = fold_convert_loc (loc, type,
10575 TREE_OPERAND (arg1, 0));
10576 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10577 fold_build1_loc (loc, BIT_NOT_EXPR,
10578 type, arg10),
10579 fold_convert_loc (loc, type, arg0));
10581 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10583 tree arg11 = fold_convert_loc (loc,
10584 type, TREE_OPERAND (arg1, 1));
10585 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10586 fold_build1_loc (loc, BIT_NOT_EXPR,
10587 type, arg11),
10588 fold_convert_loc (loc, type, arg0));
10592 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10593 any power of 2 minus 1. */
10594 if (TREE_CODE (arg0) == BIT_AND_EXPR
10595 && TREE_CODE (arg1) == BIT_AND_EXPR
10596 && operand_equal_p (TREE_OPERAND (arg0, 0),
10597 TREE_OPERAND (arg1, 0), 0))
10599 tree mask0 = TREE_OPERAND (arg0, 1);
10600 tree mask1 = TREE_OPERAND (arg1, 1);
10601 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10603 if (operand_equal_p (tem, mask1, 0))
10605 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10606 TREE_OPERAND (arg0, 0), mask1);
10607 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10612 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10613 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10614 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10616 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10617 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10618 (-ARG1 + ARG0) reduces to -ARG1. */
10619 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10620 return negate_expr (fold_convert_loc (loc, type, arg1));
10622 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10623 __complex__ ( x, -y ). This is not the same for SNaNs or if
10624 signed zeros are involved. */
10625 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10627 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10629 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10630 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10631 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10632 bool arg0rz = false, arg0iz = false;
10633 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10634 || (arg0i && (arg0iz = real_zerop (arg0i))))
10636 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10637 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10638 if (arg0rz && arg1i && real_zerop (arg1i))
10640 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10641 arg1r ? arg1r
10642 : build1 (REALPART_EXPR, rtype, arg1));
10643 tree ip = arg0i ? arg0i
10644 : build1 (IMAGPART_EXPR, rtype, arg0);
10645 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10647 else if (arg0iz && arg1r && real_zerop (arg1r))
10649 tree rp = arg0r ? arg0r
10650 : build1 (REALPART_EXPR, rtype, arg0);
10651 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10652 arg1i ? arg1i
10653 : build1 (IMAGPART_EXPR, rtype, arg1));
10654 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10659 /* Fold &x - &x. This can happen from &x.foo - &x.
10660 This is unsafe for certain floats even in non-IEEE formats.
10661 In IEEE, it is unsafe because it does wrong for NaNs.
10662 Also note that operand_equal_p is always false if an operand
10663 is volatile. */
10665 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10666 && operand_equal_p (arg0, arg1, 0))
10667 return build_zero_cst (type);
10669 /* A - B -> A + (-B) if B is easily negatable. */
10670 if (negate_expr_p (arg1)
10671 && ((FLOAT_TYPE_P (type)
10672 /* Avoid this transformation if B is a positive REAL_CST. */
10673 && (TREE_CODE (arg1) != REAL_CST
10674 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10675 || INTEGRAL_TYPE_P (type)))
10676 return fold_build2_loc (loc, PLUS_EXPR, type,
10677 fold_convert_loc (loc, type, arg0),
10678 fold_convert_loc (loc, type,
10679 negate_expr (arg1)));
10681 /* Try folding difference of addresses. */
10683 HOST_WIDE_INT diff;
10685 if ((TREE_CODE (arg0) == ADDR_EXPR
10686 || TREE_CODE (arg1) == ADDR_EXPR)
10687 && ptr_difference_const (arg0, arg1, &diff))
10688 return build_int_cst_type (type, diff);
10691 /* Fold &a[i] - &a[j] to i-j. */
10692 if (TREE_CODE (arg0) == ADDR_EXPR
10693 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10694 && TREE_CODE (arg1) == ADDR_EXPR
10695 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10697 tree tem = fold_addr_of_array_ref_difference (loc, type,
10698 TREE_OPERAND (arg0, 0),
10699 TREE_OPERAND (arg1, 0));
10700 if (tem)
10701 return tem;
10704 if (FLOAT_TYPE_P (type)
10705 && flag_unsafe_math_optimizations
10706 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10707 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10708 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10709 return tem;
10711 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10712 one. Make sure the type is not saturating and has the signedness of
10713 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10714 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10715 if ((TREE_CODE (arg0) == MULT_EXPR
10716 || TREE_CODE (arg1) == MULT_EXPR)
10717 && !TYPE_SATURATING (type)
10718 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10719 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10720 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10722 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10723 if (tem)
10724 return tem;
10727 goto associate;
10729 case MULT_EXPR:
10730 /* (-A) * (-B) -> A * B */
10731 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10732 return fold_build2_loc (loc, MULT_EXPR, type,
10733 fold_convert_loc (loc, type,
10734 TREE_OPERAND (arg0, 0)),
10735 fold_convert_loc (loc, type,
10736 negate_expr (arg1)));
10737 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10738 return fold_build2_loc (loc, MULT_EXPR, type,
10739 fold_convert_loc (loc, type,
10740 negate_expr (arg0)),
10741 fold_convert_loc (loc, type,
10742 TREE_OPERAND (arg1, 0)));
10744 if (! FLOAT_TYPE_P (type))
10746 if (integer_zerop (arg1))
10747 return omit_one_operand_loc (loc, type, arg1, arg0);
10748 if (integer_onep (arg1))
10749 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10750 /* Transform x * -1 into -x. Make sure to do the negation
10751 on the original operand with conversions not stripped
10752 because we can only strip non-sign-changing conversions. */
10753 if (integer_all_onesp (arg1))
10754 return fold_convert_loc (loc, type, negate_expr (op0));
10755 /* Transform x * -C into -x * C if x is easily negatable. */
10756 if (TREE_CODE (arg1) == INTEGER_CST
10757 && tree_int_cst_sgn (arg1) == -1
10758 && negate_expr_p (arg0)
10759 && (tem = negate_expr (arg1)) != arg1
10760 && !TREE_OVERFLOW (tem))
10761 return fold_build2_loc (loc, MULT_EXPR, type,
10762 fold_convert_loc (loc, type,
10763 negate_expr (arg0)),
10764 tem);
10766 /* (a * (1 << b)) is (a << b) */
10767 if (TREE_CODE (arg1) == LSHIFT_EXPR
10768 && integer_onep (TREE_OPERAND (arg1, 0)))
10769 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10770 TREE_OPERAND (arg1, 1));
10771 if (TREE_CODE (arg0) == LSHIFT_EXPR
10772 && integer_onep (TREE_OPERAND (arg0, 0)))
10773 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10774 TREE_OPERAND (arg0, 1));
10776 /* (A + A) * C -> A * 2 * C */
10777 if (TREE_CODE (arg0) == PLUS_EXPR
10778 && TREE_CODE (arg1) == INTEGER_CST
10779 && operand_equal_p (TREE_OPERAND (arg0, 0),
10780 TREE_OPERAND (arg0, 1), 0))
10781 return fold_build2_loc (loc, MULT_EXPR, type,
10782 omit_one_operand_loc (loc, type,
10783 TREE_OPERAND (arg0, 0),
10784 TREE_OPERAND (arg0, 1)),
10785 fold_build2_loc (loc, MULT_EXPR, type,
10786 build_int_cst (type, 2) , arg1));
10788 strict_overflow_p = false;
10789 if (TREE_CODE (arg1) == INTEGER_CST
10790 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10791 &strict_overflow_p)))
10793 if (strict_overflow_p)
10794 fold_overflow_warning (("assuming signed overflow does not "
10795 "occur when simplifying "
10796 "multiplication"),
10797 WARN_STRICT_OVERFLOW_MISC);
10798 return fold_convert_loc (loc, type, tem);
10801 /* Optimize z * conj(z) for integer complex numbers. */
10802 if (TREE_CODE (arg0) == CONJ_EXPR
10803 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10804 return fold_mult_zconjz (loc, type, arg1);
10805 if (TREE_CODE (arg1) == CONJ_EXPR
10806 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10807 return fold_mult_zconjz (loc, type, arg0);
10809 else
10811 /* Maybe fold x * 0 to 0. The expressions aren't the same
10812 when x is NaN, since x * 0 is also NaN. Nor are they the
10813 same in modes with signed zeros, since multiplying a
10814 negative value by 0 gives -0, not +0. */
10815 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10816 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10817 && real_zerop (arg1))
10818 return omit_one_operand_loc (loc, type, arg1, arg0);
10819 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10820 Likewise for complex arithmetic with signed zeros. */
10821 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10822 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10823 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10824 && real_onep (arg1))
10825 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10827 /* Transform x * -1.0 into -x. */
10828 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10829 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10830 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10831 && real_minus_onep (arg1))
10832 return fold_convert_loc (loc, type, negate_expr (arg0));
10834 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10835 the result for floating point types due to rounding so it is applied
10836 only if -fassociative-math was specify. */
10837 if (flag_associative_math
10838 && TREE_CODE (arg0) == RDIV_EXPR
10839 && TREE_CODE (arg1) == REAL_CST
10840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10842 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10843 arg1);
10844 if (tem)
10845 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10846 TREE_OPERAND (arg0, 1));
10849 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10850 if (operand_equal_p (arg0, arg1, 0))
10852 tree tem = fold_strip_sign_ops (arg0);
10853 if (tem != NULL_TREE)
10855 tem = fold_convert_loc (loc, type, tem);
10856 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10860 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10861 This is not the same for NaNs or if signed zeros are
10862 involved. */
10863 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10864 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10865 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10866 && TREE_CODE (arg1) == COMPLEX_CST
10867 && real_zerop (TREE_REALPART (arg1)))
10869 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10870 if (real_onep (TREE_IMAGPART (arg1)))
10871 return
10872 fold_build2_loc (loc, COMPLEX_EXPR, type,
10873 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10874 rtype, arg0)),
10875 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10876 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10877 return
10878 fold_build2_loc (loc, COMPLEX_EXPR, type,
10879 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10880 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10881 rtype, arg0)));
10884 /* Optimize z * conj(z) for floating point complex numbers.
10885 Guarded by flag_unsafe_math_optimizations as non-finite
10886 imaginary components don't produce scalar results. */
10887 if (flag_unsafe_math_optimizations
10888 && TREE_CODE (arg0) == CONJ_EXPR
10889 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10890 return fold_mult_zconjz (loc, type, arg1);
10891 if (flag_unsafe_math_optimizations
10892 && TREE_CODE (arg1) == CONJ_EXPR
10893 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10894 return fold_mult_zconjz (loc, type, arg0);
10896 if (flag_unsafe_math_optimizations)
10898 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10899 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10901 /* Optimizations of root(...)*root(...). */
10902 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10904 tree rootfn, arg;
10905 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10906 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10908 /* Optimize sqrt(x)*sqrt(x) as x. */
10909 if (BUILTIN_SQRT_P (fcode0)
10910 && operand_equal_p (arg00, arg10, 0)
10911 && ! HONOR_SNANS (TYPE_MODE (type)))
10912 return arg00;
10914 /* Optimize root(x)*root(y) as root(x*y). */
10915 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10916 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10917 return build_call_expr_loc (loc, rootfn, 1, arg);
10920 /* Optimize expN(x)*expN(y) as expN(x+y). */
10921 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10923 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10924 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10925 CALL_EXPR_ARG (arg0, 0),
10926 CALL_EXPR_ARG (arg1, 0));
10927 return build_call_expr_loc (loc, expfn, 1, arg);
10930 /* Optimizations of pow(...)*pow(...). */
10931 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10932 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10933 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10935 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10936 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10937 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10938 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10940 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10941 if (operand_equal_p (arg01, arg11, 0))
10943 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10944 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10945 arg00, arg10);
10946 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10949 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10950 if (operand_equal_p (arg00, arg10, 0))
10952 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10953 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10954 arg01, arg11);
10955 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10959 /* Optimize tan(x)*cos(x) as sin(x). */
10960 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10961 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10962 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10963 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10964 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10965 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10966 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10967 CALL_EXPR_ARG (arg1, 0), 0))
10969 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10971 if (sinfn != NULL_TREE)
10972 return build_call_expr_loc (loc, sinfn, 1,
10973 CALL_EXPR_ARG (arg0, 0));
10976 /* Optimize x*pow(x,c) as pow(x,c+1). */
10977 if (fcode1 == BUILT_IN_POW
10978 || fcode1 == BUILT_IN_POWF
10979 || fcode1 == BUILT_IN_POWL)
10981 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10982 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10983 if (TREE_CODE (arg11) == REAL_CST
10984 && !TREE_OVERFLOW (arg11)
10985 && operand_equal_p (arg0, arg10, 0))
10987 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10988 REAL_VALUE_TYPE c;
10989 tree arg;
10991 c = TREE_REAL_CST (arg11);
10992 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10993 arg = build_real (type, c);
10994 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10998 /* Optimize pow(x,c)*x as pow(x,c+1). */
10999 if (fcode0 == BUILT_IN_POW
11000 || fcode0 == BUILT_IN_POWF
11001 || fcode0 == BUILT_IN_POWL)
11003 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11004 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11005 if (TREE_CODE (arg01) == REAL_CST
11006 && !TREE_OVERFLOW (arg01)
11007 && operand_equal_p (arg1, arg00, 0))
11009 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11010 REAL_VALUE_TYPE c;
11011 tree arg;
11013 c = TREE_REAL_CST (arg01);
11014 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11015 arg = build_real (type, c);
11016 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11020 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11021 if (!in_gimple_form
11022 && optimize
11023 && operand_equal_p (arg0, arg1, 0))
11025 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11027 if (powfn)
11029 tree arg = build_real (type, dconst2);
11030 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11035 goto associate;
11037 case BIT_IOR_EXPR:
11038 bit_ior:
11039 if (integer_all_onesp (arg1))
11040 return omit_one_operand_loc (loc, type, arg1, arg0);
11041 if (integer_zerop (arg1))
11042 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11043 if (operand_equal_p (arg0, arg1, 0))
11044 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11046 /* ~X | X is -1. */
11047 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11048 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11050 t1 = build_zero_cst (type);
11051 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11052 return omit_one_operand_loc (loc, type, t1, arg1);
11055 /* X | ~X is -1. */
11056 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11057 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11059 t1 = build_zero_cst (type);
11060 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11061 return omit_one_operand_loc (loc, type, t1, arg0);
11064 /* Canonicalize (X & C1) | C2. */
11065 if (TREE_CODE (arg0) == BIT_AND_EXPR
11066 && TREE_CODE (arg1) == INTEGER_CST
11067 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11069 double_int c1, c2, c3, msk;
11070 int width = TYPE_PRECISION (type), w;
11071 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11072 c2 = tree_to_double_int (arg1);
11074 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11075 if ((c1 & c2) == c1)
11076 return omit_one_operand_loc (loc, type, arg1,
11077 TREE_OPERAND (arg0, 0));
11079 msk = double_int::mask (width);
11081 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11082 if (msk.and_not (c1 | c2).is_zero ())
11083 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11084 TREE_OPERAND (arg0, 0), arg1);
11086 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11087 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11088 mode which allows further optimizations. */
11089 c1 &= msk;
11090 c2 &= msk;
11091 c3 = c1.and_not (c2);
11092 for (w = BITS_PER_UNIT;
11093 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11094 w <<= 1)
11096 unsigned HOST_WIDE_INT mask
11097 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11098 if (((c1.low | c2.low) & mask) == mask
11099 && (c1.low & ~mask) == 0 && c1.high == 0)
11101 c3 = double_int::from_uhwi (mask);
11102 break;
11105 if (c3 != c1)
11106 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11107 fold_build2_loc (loc, BIT_AND_EXPR, type,
11108 TREE_OPERAND (arg0, 0),
11109 double_int_to_tree (type,
11110 c3)),
11111 arg1);
11114 /* (X & Y) | Y is (X, Y). */
11115 if (TREE_CODE (arg0) == BIT_AND_EXPR
11116 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11117 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11118 /* (X & Y) | X is (Y, X). */
11119 if (TREE_CODE (arg0) == BIT_AND_EXPR
11120 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11121 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11122 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11123 /* X | (X & Y) is (Y, X). */
11124 if (TREE_CODE (arg1) == BIT_AND_EXPR
11125 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11126 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11127 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11128 /* X | (Y & X) is (Y, X). */
11129 if (TREE_CODE (arg1) == BIT_AND_EXPR
11130 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11131 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11132 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11134 /* (X & ~Y) | (~X & Y) is X ^ Y */
11135 if (TREE_CODE (arg0) == BIT_AND_EXPR
11136 && TREE_CODE (arg1) == BIT_AND_EXPR)
11138 tree a0, a1, l0, l1, n0, n1;
11140 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11141 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11143 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11144 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11146 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11147 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11149 if ((operand_equal_p (n0, a0, 0)
11150 && operand_equal_p (n1, a1, 0))
11151 || (operand_equal_p (n0, a1, 0)
11152 && operand_equal_p (n1, a0, 0)))
11153 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11156 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11157 if (t1 != NULL_TREE)
11158 return t1;
11160 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11162 This results in more efficient code for machines without a NAND
11163 instruction. Combine will canonicalize to the first form
11164 which will allow use of NAND instructions provided by the
11165 backend if they exist. */
11166 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11167 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11169 return
11170 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11171 build2 (BIT_AND_EXPR, type,
11172 fold_convert_loc (loc, type,
11173 TREE_OPERAND (arg0, 0)),
11174 fold_convert_loc (loc, type,
11175 TREE_OPERAND (arg1, 0))));
11178 /* See if this can be simplified into a rotate first. If that
11179 is unsuccessful continue in the association code. */
11180 goto bit_rotate;
11182 case BIT_XOR_EXPR:
11183 if (integer_zerop (arg1))
11184 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11185 if (integer_all_onesp (arg1))
11186 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11187 if (operand_equal_p (arg0, arg1, 0))
11188 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11190 /* ~X ^ X is -1. */
11191 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11192 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11194 t1 = build_zero_cst (type);
11195 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11196 return omit_one_operand_loc (loc, type, t1, arg1);
11199 /* X ^ ~X is -1. */
11200 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11201 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11203 t1 = build_zero_cst (type);
11204 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11205 return omit_one_operand_loc (loc, type, t1, arg0);
11208 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11209 with a constant, and the two constants have no bits in common,
11210 we should treat this as a BIT_IOR_EXPR since this may produce more
11211 simplifications. */
11212 if (TREE_CODE (arg0) == BIT_AND_EXPR
11213 && TREE_CODE (arg1) == BIT_AND_EXPR
11214 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11215 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11216 && integer_zerop (const_binop (BIT_AND_EXPR,
11217 TREE_OPERAND (arg0, 1),
11218 TREE_OPERAND (arg1, 1))))
11220 code = BIT_IOR_EXPR;
11221 goto bit_ior;
11224 /* (X | Y) ^ X -> Y & ~ X*/
11225 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11226 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11228 tree t2 = TREE_OPERAND (arg0, 1);
11229 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11230 arg1);
11231 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11232 fold_convert_loc (loc, type, t2),
11233 fold_convert_loc (loc, type, t1));
11234 return t1;
11237 /* (Y | X) ^ X -> Y & ~ X*/
11238 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11239 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11241 tree t2 = TREE_OPERAND (arg0, 0);
11242 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11243 arg1);
11244 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11245 fold_convert_loc (loc, type, t2),
11246 fold_convert_loc (loc, type, t1));
11247 return t1;
11250 /* X ^ (X | Y) -> Y & ~ X*/
11251 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11252 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11254 tree t2 = TREE_OPERAND (arg1, 1);
11255 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11256 arg0);
11257 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11258 fold_convert_loc (loc, type, t2),
11259 fold_convert_loc (loc, type, t1));
11260 return t1;
11263 /* X ^ (Y | X) -> Y & ~ X*/
11264 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11265 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11267 tree t2 = TREE_OPERAND (arg1, 0);
11268 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11269 arg0);
11270 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11271 fold_convert_loc (loc, type, t2),
11272 fold_convert_loc (loc, type, t1));
11273 return t1;
11276 /* Convert ~X ^ ~Y to X ^ Y. */
11277 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11278 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11279 return fold_build2_loc (loc, code, type,
11280 fold_convert_loc (loc, type,
11281 TREE_OPERAND (arg0, 0)),
11282 fold_convert_loc (loc, type,
11283 TREE_OPERAND (arg1, 0)));
11285 /* Convert ~X ^ C to X ^ ~C. */
11286 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11287 && TREE_CODE (arg1) == INTEGER_CST)
11288 return fold_build2_loc (loc, code, type,
11289 fold_convert_loc (loc, type,
11290 TREE_OPERAND (arg0, 0)),
11291 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11293 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11294 if (TREE_CODE (arg0) == BIT_AND_EXPR
11295 && integer_onep (TREE_OPERAND (arg0, 1))
11296 && integer_onep (arg1))
11297 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11298 build_zero_cst (TREE_TYPE (arg0)));
11300 /* Fold (X & Y) ^ Y as ~X & Y. */
11301 if (TREE_CODE (arg0) == BIT_AND_EXPR
11302 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11304 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11305 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11306 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11307 fold_convert_loc (loc, type, arg1));
11309 /* Fold (X & Y) ^ X as ~Y & X. */
11310 if (TREE_CODE (arg0) == BIT_AND_EXPR
11311 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11312 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11314 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11315 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11316 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11317 fold_convert_loc (loc, type, arg1));
11319 /* Fold X ^ (X & Y) as X & ~Y. */
11320 if (TREE_CODE (arg1) == BIT_AND_EXPR
11321 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11323 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11324 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11325 fold_convert_loc (loc, type, arg0),
11326 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11328 /* Fold X ^ (Y & X) as ~Y & X. */
11329 if (TREE_CODE (arg1) == BIT_AND_EXPR
11330 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11331 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11333 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11334 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11335 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11336 fold_convert_loc (loc, type, arg0));
11339 /* See if this can be simplified into a rotate first. If that
11340 is unsuccessful continue in the association code. */
11341 goto bit_rotate;
11343 case BIT_AND_EXPR:
11344 if (integer_all_onesp (arg1))
11345 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11346 if (integer_zerop (arg1))
11347 return omit_one_operand_loc (loc, type, arg1, arg0);
11348 if (operand_equal_p (arg0, arg1, 0))
11349 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11351 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11352 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11353 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11354 || (TREE_CODE (arg0) == EQ_EXPR
11355 && integer_zerop (TREE_OPERAND (arg0, 1))))
11356 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11357 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11359 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11360 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11361 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11362 || (TREE_CODE (arg1) == EQ_EXPR
11363 && integer_zerop (TREE_OPERAND (arg1, 1))))
11364 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11365 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11367 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11368 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11369 && TREE_CODE (arg1) == INTEGER_CST
11370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11372 tree tmp1 = fold_convert_loc (loc, type, arg1);
11373 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11374 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11375 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11376 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11377 return
11378 fold_convert_loc (loc, type,
11379 fold_build2_loc (loc, BIT_IOR_EXPR,
11380 type, tmp2, tmp3));
11383 /* (X | Y) & Y is (X, Y). */
11384 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11385 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11386 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11387 /* (X | Y) & X is (Y, X). */
11388 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11389 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11390 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11391 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11392 /* X & (X | Y) is (Y, X). */
11393 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11394 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11395 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11396 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11397 /* X & (Y | X) is (Y, X). */
11398 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11400 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11401 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11403 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11404 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11405 && integer_onep (TREE_OPERAND (arg0, 1))
11406 && integer_onep (arg1))
11408 tree tem2;
11409 tem = TREE_OPERAND (arg0, 0);
11410 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11411 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11412 tem, tem2);
11413 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11414 build_zero_cst (TREE_TYPE (tem)));
11416 /* Fold ~X & 1 as (X & 1) == 0. */
11417 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11418 && integer_onep (arg1))
11420 tree tem2;
11421 tem = TREE_OPERAND (arg0, 0);
11422 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11423 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11424 tem, tem2);
11425 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11426 build_zero_cst (TREE_TYPE (tem)));
11428 /* Fold !X & 1 as X == 0. */
11429 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11430 && integer_onep (arg1))
11432 tem = TREE_OPERAND (arg0, 0);
11433 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11434 build_zero_cst (TREE_TYPE (tem)));
11437 /* Fold (X ^ Y) & Y as ~X & Y. */
11438 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11439 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11441 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11442 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11443 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11444 fold_convert_loc (loc, type, arg1));
11446 /* Fold (X ^ Y) & X as ~Y & X. */
11447 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11449 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11451 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11452 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11453 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11454 fold_convert_loc (loc, type, arg1));
11456 /* Fold X & (X ^ Y) as X & ~Y. */
11457 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11458 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11460 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11461 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11462 fold_convert_loc (loc, type, arg0),
11463 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11465 /* Fold X & (Y ^ X) as ~Y & X. */
11466 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11467 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11468 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11470 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11471 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11472 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11473 fold_convert_loc (loc, type, arg0));
11476 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11477 multiple of 1 << CST. */
11478 if (TREE_CODE (arg1) == INTEGER_CST)
11480 double_int cst1 = tree_to_double_int (arg1);
11481 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11482 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11483 if ((cst1 & ncst1) == ncst1
11484 && multiple_of_p (type, arg0,
11485 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11486 return fold_convert_loc (loc, type, arg0);
11489 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11490 bits from CST2. */
11491 if (TREE_CODE (arg1) == INTEGER_CST
11492 && TREE_CODE (arg0) == MULT_EXPR
11493 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11495 int arg1tz
11496 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11497 if (arg1tz > 0)
11499 double_int arg1mask, masked;
11500 arg1mask = ~double_int::mask (arg1tz);
11501 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11502 TYPE_UNSIGNED (type));
11503 masked = arg1mask & tree_to_double_int (arg1);
11504 if (masked.is_zero ())
11505 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11506 arg0, arg1);
11507 else if (masked != tree_to_double_int (arg1))
11508 return fold_build2_loc (loc, code, type, op0,
11509 double_int_to_tree (type, masked));
11513 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11514 ((A & N) + B) & M -> (A + B) & M
11515 Similarly if (N & M) == 0,
11516 ((A | N) + B) & M -> (A + B) & M
11517 and for - instead of + (or unary - instead of +)
11518 and/or ^ instead of |.
11519 If B is constant and (B & M) == 0, fold into A & M. */
11520 if (host_integerp (arg1, 1))
11522 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11523 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11524 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11525 && (TREE_CODE (arg0) == PLUS_EXPR
11526 || TREE_CODE (arg0) == MINUS_EXPR
11527 || TREE_CODE (arg0) == NEGATE_EXPR)
11528 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11529 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11531 tree pmop[2];
11532 int which = 0;
11533 unsigned HOST_WIDE_INT cst0;
11535 /* Now we know that arg0 is (C + D) or (C - D) or
11536 -C and arg1 (M) is == (1LL << cst) - 1.
11537 Store C into PMOP[0] and D into PMOP[1]. */
11538 pmop[0] = TREE_OPERAND (arg0, 0);
11539 pmop[1] = NULL;
11540 if (TREE_CODE (arg0) != NEGATE_EXPR)
11542 pmop[1] = TREE_OPERAND (arg0, 1);
11543 which = 1;
11546 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11547 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11548 & cst1) != cst1)
11549 which = -1;
11551 for (; which >= 0; which--)
11552 switch (TREE_CODE (pmop[which]))
11554 case BIT_AND_EXPR:
11555 case BIT_IOR_EXPR:
11556 case BIT_XOR_EXPR:
11557 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11558 != INTEGER_CST)
11559 break;
11560 /* tree_low_cst not used, because we don't care about
11561 the upper bits. */
11562 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11563 cst0 &= cst1;
11564 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11566 if (cst0 != cst1)
11567 break;
11569 else if (cst0 != 0)
11570 break;
11571 /* If C or D is of the form (A & N) where
11572 (N & M) == M, or of the form (A | N) or
11573 (A ^ N) where (N & M) == 0, replace it with A. */
11574 pmop[which] = TREE_OPERAND (pmop[which], 0);
11575 break;
11576 case INTEGER_CST:
11577 /* If C or D is a N where (N & M) == 0, it can be
11578 omitted (assumed 0). */
11579 if ((TREE_CODE (arg0) == PLUS_EXPR
11580 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11581 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11582 pmop[which] = NULL;
11583 break;
11584 default:
11585 break;
11588 /* Only build anything new if we optimized one or both arguments
11589 above. */
11590 if (pmop[0] != TREE_OPERAND (arg0, 0)
11591 || (TREE_CODE (arg0) != NEGATE_EXPR
11592 && pmop[1] != TREE_OPERAND (arg0, 1)))
11594 tree utype = TREE_TYPE (arg0);
11595 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11597 /* Perform the operations in a type that has defined
11598 overflow behavior. */
11599 utype = unsigned_type_for (TREE_TYPE (arg0));
11600 if (pmop[0] != NULL)
11601 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11602 if (pmop[1] != NULL)
11603 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11606 if (TREE_CODE (arg0) == NEGATE_EXPR)
11607 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11608 else if (TREE_CODE (arg0) == PLUS_EXPR)
11610 if (pmop[0] != NULL && pmop[1] != NULL)
11611 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11612 pmop[0], pmop[1]);
11613 else if (pmop[0] != NULL)
11614 tem = pmop[0];
11615 else if (pmop[1] != NULL)
11616 tem = pmop[1];
11617 else
11618 return build_int_cst (type, 0);
11620 else if (pmop[0] == NULL)
11621 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11622 else
11623 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11624 pmop[0], pmop[1]);
11625 /* TEM is now the new binary +, - or unary - replacement. */
11626 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11627 fold_convert_loc (loc, utype, arg1));
11628 return fold_convert_loc (loc, type, tem);
11633 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11634 if (t1 != NULL_TREE)
11635 return t1;
11636 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11637 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11638 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11640 unsigned int prec
11641 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11643 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11644 && (~TREE_INT_CST_LOW (arg1)
11645 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11646 return
11647 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11650 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11652 This results in more efficient code for machines without a NOR
11653 instruction. Combine will canonicalize to the first form
11654 which will allow use of NOR instructions provided by the
11655 backend if they exist. */
11656 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11657 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11659 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11660 build2 (BIT_IOR_EXPR, type,
11661 fold_convert_loc (loc, type,
11662 TREE_OPERAND (arg0, 0)),
11663 fold_convert_loc (loc, type,
11664 TREE_OPERAND (arg1, 0))));
11667 /* If arg0 is derived from the address of an object or function, we may
11668 be able to fold this expression using the object or function's
11669 alignment. */
11670 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11672 unsigned HOST_WIDE_INT modulus, residue;
11673 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11675 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11676 integer_onep (arg1));
11678 /* This works because modulus is a power of 2. If this weren't the
11679 case, we'd have to replace it by its greatest power-of-2
11680 divisor: modulus & -modulus. */
11681 if (low < modulus)
11682 return build_int_cst (type, residue & low);
11685 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11686 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11687 if the new mask might be further optimized. */
11688 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11689 || TREE_CODE (arg0) == RSHIFT_EXPR)
11690 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11691 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11692 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11693 < TYPE_PRECISION (TREE_TYPE (arg0))
11694 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11695 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11697 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11698 unsigned HOST_WIDE_INT mask
11699 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11700 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11701 tree shift_type = TREE_TYPE (arg0);
11703 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11704 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11705 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11706 && TYPE_PRECISION (TREE_TYPE (arg0))
11707 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11709 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11710 tree arg00 = TREE_OPERAND (arg0, 0);
11711 /* See if more bits can be proven as zero because of
11712 zero extension. */
11713 if (TREE_CODE (arg00) == NOP_EXPR
11714 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11716 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11717 if (TYPE_PRECISION (inner_type)
11718 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11719 && TYPE_PRECISION (inner_type) < prec)
11721 prec = TYPE_PRECISION (inner_type);
11722 /* See if we can shorten the right shift. */
11723 if (shiftc < prec)
11724 shift_type = inner_type;
11727 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11728 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11729 zerobits <<= prec - shiftc;
11730 /* For arithmetic shift if sign bit could be set, zerobits
11731 can contain actually sign bits, so no transformation is
11732 possible, unless MASK masks them all away. In that
11733 case the shift needs to be converted into logical shift. */
11734 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11735 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11737 if ((mask & zerobits) == 0)
11738 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11739 else
11740 zerobits = 0;
11744 /* ((X << 16) & 0xff00) is (X, 0). */
11745 if ((mask & zerobits) == mask)
11746 return omit_one_operand_loc (loc, type,
11747 build_int_cst (type, 0), arg0);
11749 newmask = mask | zerobits;
11750 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11752 unsigned int prec;
11754 /* Only do the transformation if NEWMASK is some integer
11755 mode's mask. */
11756 for (prec = BITS_PER_UNIT;
11757 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11758 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11759 break;
11760 if (prec < HOST_BITS_PER_WIDE_INT
11761 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11763 tree newmaskt;
11765 if (shift_type != TREE_TYPE (arg0))
11767 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11768 fold_convert_loc (loc, shift_type,
11769 TREE_OPERAND (arg0, 0)),
11770 TREE_OPERAND (arg0, 1));
11771 tem = fold_convert_loc (loc, type, tem);
11773 else
11774 tem = op0;
11775 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11776 if (!tree_int_cst_equal (newmaskt, arg1))
11777 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11782 goto associate;
11784 case RDIV_EXPR:
11785 /* Don't touch a floating-point divide by zero unless the mode
11786 of the constant can represent infinity. */
11787 if (TREE_CODE (arg1) == REAL_CST
11788 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11789 && real_zerop (arg1))
11790 return NULL_TREE;
11792 /* Optimize A / A to 1.0 if we don't care about
11793 NaNs or Infinities. Skip the transformation
11794 for non-real operands. */
11795 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11796 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11797 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11798 && operand_equal_p (arg0, arg1, 0))
11800 tree r = build_real (TREE_TYPE (arg0), dconst1);
11802 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11805 /* The complex version of the above A / A optimization. */
11806 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11807 && operand_equal_p (arg0, arg1, 0))
11809 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11810 if (! HONOR_NANS (TYPE_MODE (elem_type))
11811 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11813 tree r = build_real (elem_type, dconst1);
11814 /* omit_two_operands will call fold_convert for us. */
11815 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11819 /* (-A) / (-B) -> A / B */
11820 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11821 return fold_build2_loc (loc, RDIV_EXPR, type,
11822 TREE_OPERAND (arg0, 0),
11823 negate_expr (arg1));
11824 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11825 return fold_build2_loc (loc, RDIV_EXPR, type,
11826 negate_expr (arg0),
11827 TREE_OPERAND (arg1, 0));
11829 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11830 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11831 && real_onep (arg1))
11832 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11834 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11835 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11836 && real_minus_onep (arg1))
11837 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11838 negate_expr (arg0)));
11840 /* If ARG1 is a constant, we can convert this to a multiply by the
11841 reciprocal. This does not have the same rounding properties,
11842 so only do this if -freciprocal-math. We can actually
11843 always safely do it if ARG1 is a power of two, but it's hard to
11844 tell if it is or not in a portable manner. */
11845 if (optimize
11846 && (TREE_CODE (arg1) == REAL_CST
11847 || (TREE_CODE (arg1) == COMPLEX_CST
11848 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11849 || (TREE_CODE (arg1) == VECTOR_CST
11850 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11852 if (flag_reciprocal_math
11853 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11854 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11855 /* Find the reciprocal if optimizing and the result is exact.
11856 TODO: Complex reciprocal not implemented. */
11857 if (TREE_CODE (arg1) != COMPLEX_CST)
11859 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11861 if (inverse)
11862 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11865 /* Convert A/B/C to A/(B*C). */
11866 if (flag_reciprocal_math
11867 && TREE_CODE (arg0) == RDIV_EXPR)
11868 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11869 fold_build2_loc (loc, MULT_EXPR, type,
11870 TREE_OPERAND (arg0, 1), arg1));
11872 /* Convert A/(B/C) to (A/B)*C. */
11873 if (flag_reciprocal_math
11874 && TREE_CODE (arg1) == RDIV_EXPR)
11875 return fold_build2_loc (loc, MULT_EXPR, type,
11876 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11877 TREE_OPERAND (arg1, 0)),
11878 TREE_OPERAND (arg1, 1));
11880 /* Convert C1/(X*C2) into (C1/C2)/X. */
11881 if (flag_reciprocal_math
11882 && TREE_CODE (arg1) == MULT_EXPR
11883 && TREE_CODE (arg0) == REAL_CST
11884 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11886 tree tem = const_binop (RDIV_EXPR, arg0,
11887 TREE_OPERAND (arg1, 1));
11888 if (tem)
11889 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11890 TREE_OPERAND (arg1, 0));
11893 if (flag_unsafe_math_optimizations)
11895 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11896 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11898 /* Optimize sin(x)/cos(x) as tan(x). */
11899 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11900 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11901 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11902 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11903 CALL_EXPR_ARG (arg1, 0), 0))
11905 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11907 if (tanfn != NULL_TREE)
11908 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11911 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11912 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11913 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11914 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11915 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11916 CALL_EXPR_ARG (arg1, 0), 0))
11918 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11920 if (tanfn != NULL_TREE)
11922 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11923 CALL_EXPR_ARG (arg0, 0));
11924 return fold_build2_loc (loc, RDIV_EXPR, type,
11925 build_real (type, dconst1), tmp);
11929 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11930 NaNs or Infinities. */
11931 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11932 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11933 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11935 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11936 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11938 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11939 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11940 && operand_equal_p (arg00, arg01, 0))
11942 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11944 if (cosfn != NULL_TREE)
11945 return build_call_expr_loc (loc, cosfn, 1, arg00);
11949 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11950 NaNs or Infinities. */
11951 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11952 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11953 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11955 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11956 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11958 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11959 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11960 && operand_equal_p (arg00, arg01, 0))
11962 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11964 if (cosfn != NULL_TREE)
11966 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11967 return fold_build2_loc (loc, RDIV_EXPR, type,
11968 build_real (type, dconst1),
11969 tmp);
11974 /* Optimize pow(x,c)/x as pow(x,c-1). */
11975 if (fcode0 == BUILT_IN_POW
11976 || fcode0 == BUILT_IN_POWF
11977 || fcode0 == BUILT_IN_POWL)
11979 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11980 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11981 if (TREE_CODE (arg01) == REAL_CST
11982 && !TREE_OVERFLOW (arg01)
11983 && operand_equal_p (arg1, arg00, 0))
11985 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11986 REAL_VALUE_TYPE c;
11987 tree arg;
11989 c = TREE_REAL_CST (arg01);
11990 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11991 arg = build_real (type, c);
11992 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11996 /* Optimize a/root(b/c) into a*root(c/b). */
11997 if (BUILTIN_ROOT_P (fcode1))
11999 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12001 if (TREE_CODE (rootarg) == RDIV_EXPR)
12003 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12004 tree b = TREE_OPERAND (rootarg, 0);
12005 tree c = TREE_OPERAND (rootarg, 1);
12007 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12009 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12010 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12014 /* Optimize x/expN(y) into x*expN(-y). */
12015 if (BUILTIN_EXPONENT_P (fcode1))
12017 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12018 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12019 arg1 = build_call_expr_loc (loc,
12020 expfn, 1,
12021 fold_convert_loc (loc, type, arg));
12022 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12025 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12026 if (fcode1 == BUILT_IN_POW
12027 || fcode1 == BUILT_IN_POWF
12028 || fcode1 == BUILT_IN_POWL)
12030 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12031 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12032 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12033 tree neg11 = fold_convert_loc (loc, type,
12034 negate_expr (arg11));
12035 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12036 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12039 return NULL_TREE;
12041 case TRUNC_DIV_EXPR:
12042 /* Optimize (X & (-A)) / A where A is a power of 2,
12043 to X >> log2(A) */
12044 if (TREE_CODE (arg0) == BIT_AND_EXPR
12045 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12046 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12048 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12049 arg1, TREE_OPERAND (arg0, 1));
12050 if (sum && integer_zerop (sum)) {
12051 unsigned long pow2;
12053 if (TREE_INT_CST_LOW (arg1))
12054 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12055 else
12056 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12057 + HOST_BITS_PER_WIDE_INT;
12059 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12060 TREE_OPERAND (arg0, 0),
12061 build_int_cst (integer_type_node, pow2));
12065 /* Fall through */
12067 case FLOOR_DIV_EXPR:
12068 /* Simplify A / (B << N) where A and B are positive and B is
12069 a power of 2, to A >> (N + log2(B)). */
12070 strict_overflow_p = false;
12071 if (TREE_CODE (arg1) == LSHIFT_EXPR
12072 && (TYPE_UNSIGNED (type)
12073 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12075 tree sval = TREE_OPERAND (arg1, 0);
12076 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12078 tree sh_cnt = TREE_OPERAND (arg1, 1);
12079 unsigned long pow2;
12081 if (TREE_INT_CST_LOW (sval))
12082 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12083 else
12084 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12085 + HOST_BITS_PER_WIDE_INT;
12087 if (strict_overflow_p)
12088 fold_overflow_warning (("assuming signed overflow does not "
12089 "occur when simplifying A / (B << N)"),
12090 WARN_STRICT_OVERFLOW_MISC);
12092 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12093 sh_cnt,
12094 build_int_cst (TREE_TYPE (sh_cnt),
12095 pow2));
12096 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12097 fold_convert_loc (loc, type, arg0), sh_cnt);
12101 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12102 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12103 if (INTEGRAL_TYPE_P (type)
12104 && TYPE_UNSIGNED (type)
12105 && code == FLOOR_DIV_EXPR)
12106 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12108 /* Fall through */
12110 case ROUND_DIV_EXPR:
12111 case CEIL_DIV_EXPR:
12112 case EXACT_DIV_EXPR:
12113 if (integer_onep (arg1))
12114 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12115 if (integer_zerop (arg1))
12116 return NULL_TREE;
12117 /* X / -1 is -X. */
12118 if (!TYPE_UNSIGNED (type)
12119 && TREE_CODE (arg1) == INTEGER_CST
12120 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12121 && TREE_INT_CST_HIGH (arg1) == -1)
12122 return fold_convert_loc (loc, type, negate_expr (arg0));
12124 /* Convert -A / -B to A / B when the type is signed and overflow is
12125 undefined. */
12126 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12127 && TREE_CODE (arg0) == NEGATE_EXPR
12128 && negate_expr_p (arg1))
12130 if (INTEGRAL_TYPE_P (type))
12131 fold_overflow_warning (("assuming signed overflow does not occur "
12132 "when distributing negation across "
12133 "division"),
12134 WARN_STRICT_OVERFLOW_MISC);
12135 return fold_build2_loc (loc, code, type,
12136 fold_convert_loc (loc, type,
12137 TREE_OPERAND (arg0, 0)),
12138 fold_convert_loc (loc, type,
12139 negate_expr (arg1)));
12141 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12142 && TREE_CODE (arg1) == NEGATE_EXPR
12143 && negate_expr_p (arg0))
12145 if (INTEGRAL_TYPE_P (type))
12146 fold_overflow_warning (("assuming signed overflow does not occur "
12147 "when distributing negation across "
12148 "division"),
12149 WARN_STRICT_OVERFLOW_MISC);
12150 return fold_build2_loc (loc, code, type,
12151 fold_convert_loc (loc, type,
12152 negate_expr (arg0)),
12153 fold_convert_loc (loc, type,
12154 TREE_OPERAND (arg1, 0)));
12157 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12158 operation, EXACT_DIV_EXPR.
12160 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12161 At one time others generated faster code, it's not clear if they do
12162 after the last round to changes to the DIV code in expmed.c. */
12163 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12164 && multiple_of_p (type, arg0, arg1))
12165 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12167 strict_overflow_p = false;
12168 if (TREE_CODE (arg1) == INTEGER_CST
12169 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12170 &strict_overflow_p)))
12172 if (strict_overflow_p)
12173 fold_overflow_warning (("assuming signed overflow does not occur "
12174 "when simplifying division"),
12175 WARN_STRICT_OVERFLOW_MISC);
12176 return fold_convert_loc (loc, type, tem);
12179 return NULL_TREE;
12181 case CEIL_MOD_EXPR:
12182 case FLOOR_MOD_EXPR:
12183 case ROUND_MOD_EXPR:
12184 case TRUNC_MOD_EXPR:
12185 /* X % 1 is always zero, but be sure to preserve any side
12186 effects in X. */
12187 if (integer_onep (arg1))
12188 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12190 /* X % 0, return X % 0 unchanged so that we can get the
12191 proper warnings and errors. */
12192 if (integer_zerop (arg1))
12193 return NULL_TREE;
12195 /* 0 % X is always zero, but be sure to preserve any side
12196 effects in X. Place this after checking for X == 0. */
12197 if (integer_zerop (arg0))
12198 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12200 /* X % -1 is zero. */
12201 if (!TYPE_UNSIGNED (type)
12202 && TREE_CODE (arg1) == INTEGER_CST
12203 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12204 && TREE_INT_CST_HIGH (arg1) == -1)
12205 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12207 /* X % -C is the same as X % C. */
12208 if (code == TRUNC_MOD_EXPR
12209 && !TYPE_UNSIGNED (type)
12210 && TREE_CODE (arg1) == INTEGER_CST
12211 && !TREE_OVERFLOW (arg1)
12212 && TREE_INT_CST_HIGH (arg1) < 0
12213 && !TYPE_OVERFLOW_TRAPS (type)
12214 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12215 && !sign_bit_p (arg1, arg1))
12216 return fold_build2_loc (loc, code, type,
12217 fold_convert_loc (loc, type, arg0),
12218 fold_convert_loc (loc, type,
12219 negate_expr (arg1)));
12221 /* X % -Y is the same as X % Y. */
12222 if (code == TRUNC_MOD_EXPR
12223 && !TYPE_UNSIGNED (type)
12224 && TREE_CODE (arg1) == NEGATE_EXPR
12225 && !TYPE_OVERFLOW_TRAPS (type))
12226 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12227 fold_convert_loc (loc, type,
12228 TREE_OPERAND (arg1, 0)));
12230 strict_overflow_p = false;
12231 if (TREE_CODE (arg1) == INTEGER_CST
12232 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12233 &strict_overflow_p)))
12235 if (strict_overflow_p)
12236 fold_overflow_warning (("assuming signed overflow does not occur "
12237 "when simplifying modulus"),
12238 WARN_STRICT_OVERFLOW_MISC);
12239 return fold_convert_loc (loc, type, tem);
12242 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12243 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12244 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12245 && (TYPE_UNSIGNED (type)
12246 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12248 tree c = arg1;
12249 /* Also optimize A % (C << N) where C is a power of 2,
12250 to A & ((C << N) - 1). */
12251 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12252 c = TREE_OPERAND (arg1, 0);
12254 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12256 tree mask
12257 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12258 build_int_cst (TREE_TYPE (arg1), 1));
12259 if (strict_overflow_p)
12260 fold_overflow_warning (("assuming signed overflow does not "
12261 "occur when simplifying "
12262 "X % (power of two)"),
12263 WARN_STRICT_OVERFLOW_MISC);
12264 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12265 fold_convert_loc (loc, type, arg0),
12266 fold_convert_loc (loc, type, mask));
12270 return NULL_TREE;
12272 case LROTATE_EXPR:
12273 case RROTATE_EXPR:
12274 if (integer_all_onesp (arg0))
12275 return omit_one_operand_loc (loc, type, arg0, arg1);
12276 goto shift;
12278 case RSHIFT_EXPR:
12279 /* Optimize -1 >> x for arithmetic right shifts. */
12280 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12281 && tree_expr_nonnegative_p (arg1))
12282 return omit_one_operand_loc (loc, type, arg0, arg1);
12283 /* ... fall through ... */
12285 case LSHIFT_EXPR:
12286 shift:
12287 if (integer_zerop (arg1))
12288 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12289 if (integer_zerop (arg0))
12290 return omit_one_operand_loc (loc, type, arg0, arg1);
12292 /* Since negative shift count is not well-defined,
12293 don't try to compute it in the compiler. */
12294 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12295 return NULL_TREE;
12297 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12298 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12299 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12300 && host_integerp (TREE_OPERAND (arg0, 1), false)
12301 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12303 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12304 + TREE_INT_CST_LOW (arg1));
12306 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12307 being well defined. */
12308 if (low >= TYPE_PRECISION (type))
12310 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12311 low = low % TYPE_PRECISION (type);
12312 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12313 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12314 TREE_OPERAND (arg0, 0));
12315 else
12316 low = TYPE_PRECISION (type) - 1;
12319 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12320 build_int_cst (type, low));
12323 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12324 into x & ((unsigned)-1 >> c) for unsigned types. */
12325 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12326 || (TYPE_UNSIGNED (type)
12327 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12328 && host_integerp (arg1, false)
12329 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12330 && host_integerp (TREE_OPERAND (arg0, 1), false)
12331 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12333 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12334 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12335 tree lshift;
12336 tree arg00;
12338 if (low0 == low1)
12340 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12342 lshift = build_int_cst (type, -1);
12343 lshift = int_const_binop (code, lshift, arg1);
12345 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12349 /* Rewrite an LROTATE_EXPR by a constant into an
12350 RROTATE_EXPR by a new constant. */
12351 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12353 tree tem = build_int_cst (TREE_TYPE (arg1),
12354 TYPE_PRECISION (type));
12355 tem = const_binop (MINUS_EXPR, tem, arg1);
12356 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12359 /* If we have a rotate of a bit operation with the rotate count and
12360 the second operand of the bit operation both constant,
12361 permute the two operations. */
12362 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12363 && (TREE_CODE (arg0) == BIT_AND_EXPR
12364 || TREE_CODE (arg0) == BIT_IOR_EXPR
12365 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12366 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12367 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12368 fold_build2_loc (loc, code, type,
12369 TREE_OPERAND (arg0, 0), arg1),
12370 fold_build2_loc (loc, code, type,
12371 TREE_OPERAND (arg0, 1), arg1));
12373 /* Two consecutive rotates adding up to the precision of the
12374 type can be ignored. */
12375 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12376 && TREE_CODE (arg0) == RROTATE_EXPR
12377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12378 && TREE_INT_CST_HIGH (arg1) == 0
12379 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12380 && ((TREE_INT_CST_LOW (arg1)
12381 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12382 == (unsigned int) TYPE_PRECISION (type)))
12383 return TREE_OPERAND (arg0, 0);
12385 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12386 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12387 if the latter can be further optimized. */
12388 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12389 && TREE_CODE (arg0) == BIT_AND_EXPR
12390 && TREE_CODE (arg1) == INTEGER_CST
12391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12393 tree mask = fold_build2_loc (loc, code, type,
12394 fold_convert_loc (loc, type,
12395 TREE_OPERAND (arg0, 1)),
12396 arg1);
12397 tree shift = fold_build2_loc (loc, code, type,
12398 fold_convert_loc (loc, type,
12399 TREE_OPERAND (arg0, 0)),
12400 arg1);
12401 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12402 if (tem)
12403 return tem;
12406 return NULL_TREE;
12408 case MIN_EXPR:
12409 if (operand_equal_p (arg0, arg1, 0))
12410 return omit_one_operand_loc (loc, type, arg0, arg1);
12411 if (INTEGRAL_TYPE_P (type)
12412 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12413 return omit_one_operand_loc (loc, type, arg1, arg0);
12414 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12415 if (tem)
12416 return tem;
12417 goto associate;
12419 case MAX_EXPR:
12420 if (operand_equal_p (arg0, arg1, 0))
12421 return omit_one_operand_loc (loc, type, arg0, arg1);
12422 if (INTEGRAL_TYPE_P (type)
12423 && TYPE_MAX_VALUE (type)
12424 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12425 return omit_one_operand_loc (loc, type, arg1, arg0);
12426 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12427 if (tem)
12428 return tem;
12429 goto associate;
12431 case TRUTH_ANDIF_EXPR:
12432 /* Note that the operands of this must be ints
12433 and their values must be 0 or 1.
12434 ("true" is a fixed value perhaps depending on the language.) */
12435 /* If first arg is constant zero, return it. */
12436 if (integer_zerop (arg0))
12437 return fold_convert_loc (loc, type, arg0);
12438 case TRUTH_AND_EXPR:
12439 /* If either arg is constant true, drop it. */
12440 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12441 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12442 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12443 /* Preserve sequence points. */
12444 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12445 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12446 /* If second arg is constant zero, result is zero, but first arg
12447 must be evaluated. */
12448 if (integer_zerop (arg1))
12449 return omit_one_operand_loc (loc, type, arg1, arg0);
12450 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12451 case will be handled here. */
12452 if (integer_zerop (arg0))
12453 return omit_one_operand_loc (loc, type, arg0, arg1);
12455 /* !X && X is always false. */
12456 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12457 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12458 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12459 /* X && !X is always false. */
12460 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12461 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12462 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12464 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12465 means A >= Y && A != MAX, but in this case we know that
12466 A < X <= MAX. */
12468 if (!TREE_SIDE_EFFECTS (arg0)
12469 && !TREE_SIDE_EFFECTS (arg1))
12471 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12472 if (tem && !operand_equal_p (tem, arg0, 0))
12473 return fold_build2_loc (loc, code, type, tem, arg1);
12475 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12476 if (tem && !operand_equal_p (tem, arg1, 0))
12477 return fold_build2_loc (loc, code, type, arg0, tem);
12480 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12481 != NULL_TREE)
12482 return tem;
12484 return NULL_TREE;
12486 case TRUTH_ORIF_EXPR:
12487 /* Note that the operands of this must be ints
12488 and their values must be 0 or true.
12489 ("true" is a fixed value perhaps depending on the language.) */
12490 /* If first arg is constant true, return it. */
12491 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12492 return fold_convert_loc (loc, type, arg0);
12493 case TRUTH_OR_EXPR:
12494 /* If either arg is constant zero, drop it. */
12495 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12496 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12497 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12498 /* Preserve sequence points. */
12499 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12501 /* If second arg is constant true, result is true, but we must
12502 evaluate first arg. */
12503 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12504 return omit_one_operand_loc (loc, type, arg1, arg0);
12505 /* Likewise for first arg, but note this only occurs here for
12506 TRUTH_OR_EXPR. */
12507 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12508 return omit_one_operand_loc (loc, type, arg0, arg1);
12510 /* !X || X is always true. */
12511 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12512 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12513 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12514 /* X || !X is always true. */
12515 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12516 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12517 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12519 /* (X && !Y) || (!X && Y) is X ^ Y */
12520 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12521 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12523 tree a0, a1, l0, l1, n0, n1;
12525 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12526 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12528 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12529 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12531 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12532 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12534 if ((operand_equal_p (n0, a0, 0)
12535 && operand_equal_p (n1, a1, 0))
12536 || (operand_equal_p (n0, a1, 0)
12537 && operand_equal_p (n1, a0, 0)))
12538 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12541 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12542 != NULL_TREE)
12543 return tem;
12545 return NULL_TREE;
12547 case TRUTH_XOR_EXPR:
12548 /* If the second arg is constant zero, drop it. */
12549 if (integer_zerop (arg1))
12550 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12551 /* If the second arg is constant true, this is a logical inversion. */
12552 if (integer_onep (arg1))
12554 /* Only call invert_truthvalue if operand is a truth value. */
12555 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12556 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12557 else
12558 tem = invert_truthvalue_loc (loc, arg0);
12559 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12561 /* Identical arguments cancel to zero. */
12562 if (operand_equal_p (arg0, arg1, 0))
12563 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12565 /* !X ^ X is always true. */
12566 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12567 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12568 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12570 /* X ^ !X is always true. */
12571 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12572 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12573 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12575 return NULL_TREE;
12577 case EQ_EXPR:
12578 case NE_EXPR:
12579 STRIP_NOPS (arg0);
12580 STRIP_NOPS (arg1);
12582 tem = fold_comparison (loc, code, type, op0, op1);
12583 if (tem != NULL_TREE)
12584 return tem;
12586 /* bool_var != 0 becomes bool_var. */
12587 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12588 && code == NE_EXPR)
12589 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12591 /* bool_var == 1 becomes bool_var. */
12592 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12593 && code == EQ_EXPR)
12594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12596 /* bool_var != 1 becomes !bool_var. */
12597 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12598 && code == NE_EXPR)
12599 return fold_convert_loc (loc, type,
12600 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12601 TREE_TYPE (arg0), arg0));
12603 /* bool_var == 0 becomes !bool_var. */
12604 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12605 && code == EQ_EXPR)
12606 return fold_convert_loc (loc, type,
12607 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12608 TREE_TYPE (arg0), arg0));
12610 /* !exp != 0 becomes !exp */
12611 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12612 && code == NE_EXPR)
12613 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12615 /* If this is an equality comparison of the address of two non-weak,
12616 unaliased symbols neither of which are extern (since we do not
12617 have access to attributes for externs), then we know the result. */
12618 if (TREE_CODE (arg0) == ADDR_EXPR
12619 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12620 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12621 && ! lookup_attribute ("alias",
12622 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12623 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12624 && TREE_CODE (arg1) == ADDR_EXPR
12625 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12626 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12627 && ! lookup_attribute ("alias",
12628 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12629 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12631 /* We know that we're looking at the address of two
12632 non-weak, unaliased, static _DECL nodes.
12634 It is both wasteful and incorrect to call operand_equal_p
12635 to compare the two ADDR_EXPR nodes. It is wasteful in that
12636 all we need to do is test pointer equality for the arguments
12637 to the two ADDR_EXPR nodes. It is incorrect to use
12638 operand_equal_p as that function is NOT equivalent to a
12639 C equality test. It can in fact return false for two
12640 objects which would test as equal using the C equality
12641 operator. */
12642 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12643 return constant_boolean_node (equal
12644 ? code == EQ_EXPR : code != EQ_EXPR,
12645 type);
12648 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12649 a MINUS_EXPR of a constant, we can convert it into a comparison with
12650 a revised constant as long as no overflow occurs. */
12651 if (TREE_CODE (arg1) == INTEGER_CST
12652 && (TREE_CODE (arg0) == PLUS_EXPR
12653 || TREE_CODE (arg0) == MINUS_EXPR)
12654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12655 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12656 ? MINUS_EXPR : PLUS_EXPR,
12657 fold_convert_loc (loc, TREE_TYPE (arg0),
12658 arg1),
12659 TREE_OPERAND (arg0, 1)))
12660 && !TREE_OVERFLOW (tem))
12661 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12663 /* Similarly for a NEGATE_EXPR. */
12664 if (TREE_CODE (arg0) == NEGATE_EXPR
12665 && TREE_CODE (arg1) == INTEGER_CST
12666 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12667 arg1)))
12668 && TREE_CODE (tem) == INTEGER_CST
12669 && !TREE_OVERFLOW (tem))
12670 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12672 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12673 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12674 && TREE_CODE (arg1) == INTEGER_CST
12675 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12676 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12677 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12678 fold_convert_loc (loc,
12679 TREE_TYPE (arg0),
12680 arg1),
12681 TREE_OPERAND (arg0, 1)));
12683 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12684 if ((TREE_CODE (arg0) == PLUS_EXPR
12685 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12686 || TREE_CODE (arg0) == MINUS_EXPR)
12687 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12688 0)),
12689 arg1, 0)
12690 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12691 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12693 tree val = TREE_OPERAND (arg0, 1);
12694 return omit_two_operands_loc (loc, type,
12695 fold_build2_loc (loc, code, type,
12696 val,
12697 build_int_cst (TREE_TYPE (val),
12698 0)),
12699 TREE_OPERAND (arg0, 0), arg1);
12702 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12703 if (TREE_CODE (arg0) == MINUS_EXPR
12704 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12705 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12706 1)),
12707 arg1, 0)
12708 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12710 return omit_two_operands_loc (loc, type,
12711 code == NE_EXPR
12712 ? boolean_true_node : boolean_false_node,
12713 TREE_OPERAND (arg0, 1), arg1);
12716 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12717 for !=. Don't do this for ordered comparisons due to overflow. */
12718 if (TREE_CODE (arg0) == MINUS_EXPR
12719 && integer_zerop (arg1))
12720 return fold_build2_loc (loc, code, type,
12721 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12723 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12724 if (TREE_CODE (arg0) == ABS_EXPR
12725 && (integer_zerop (arg1) || real_zerop (arg1)))
12726 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12728 /* If this is an EQ or NE comparison with zero and ARG0 is
12729 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12730 two operations, but the latter can be done in one less insn
12731 on machines that have only two-operand insns or on which a
12732 constant cannot be the first operand. */
12733 if (TREE_CODE (arg0) == BIT_AND_EXPR
12734 && integer_zerop (arg1))
12736 tree arg00 = TREE_OPERAND (arg0, 0);
12737 tree arg01 = TREE_OPERAND (arg0, 1);
12738 if (TREE_CODE (arg00) == LSHIFT_EXPR
12739 && integer_onep (TREE_OPERAND (arg00, 0)))
12741 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12742 arg01, TREE_OPERAND (arg00, 1));
12743 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12744 build_int_cst (TREE_TYPE (arg0), 1));
12745 return fold_build2_loc (loc, code, type,
12746 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12747 arg1);
12749 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12750 && integer_onep (TREE_OPERAND (arg01, 0)))
12752 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12753 arg00, TREE_OPERAND (arg01, 1));
12754 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12755 build_int_cst (TREE_TYPE (arg0), 1));
12756 return fold_build2_loc (loc, code, type,
12757 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12758 arg1);
12762 /* If this is an NE or EQ comparison of zero against the result of a
12763 signed MOD operation whose second operand is a power of 2, make
12764 the MOD operation unsigned since it is simpler and equivalent. */
12765 if (integer_zerop (arg1)
12766 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12767 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12768 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12769 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12770 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12771 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12773 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12774 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12775 fold_convert_loc (loc, newtype,
12776 TREE_OPERAND (arg0, 0)),
12777 fold_convert_loc (loc, newtype,
12778 TREE_OPERAND (arg0, 1)));
12780 return fold_build2_loc (loc, code, type, newmod,
12781 fold_convert_loc (loc, newtype, arg1));
12784 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12785 C1 is a valid shift constant, and C2 is a power of two, i.e.
12786 a single bit. */
12787 if (TREE_CODE (arg0) == BIT_AND_EXPR
12788 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12789 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12790 == INTEGER_CST
12791 && integer_pow2p (TREE_OPERAND (arg0, 1))
12792 && integer_zerop (arg1))
12794 tree itype = TREE_TYPE (arg0);
12795 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12796 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12798 /* Check for a valid shift count. */
12799 if (TREE_INT_CST_HIGH (arg001) == 0
12800 && TREE_INT_CST_LOW (arg001) < prec)
12802 tree arg01 = TREE_OPERAND (arg0, 1);
12803 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12804 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12805 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12806 can be rewritten as (X & (C2 << C1)) != 0. */
12807 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12809 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12810 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12811 return fold_build2_loc (loc, code, type, tem,
12812 fold_convert_loc (loc, itype, arg1));
12814 /* Otherwise, for signed (arithmetic) shifts,
12815 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12816 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12817 else if (!TYPE_UNSIGNED (itype))
12818 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12819 arg000, build_int_cst (itype, 0));
12820 /* Otherwise, of unsigned (logical) shifts,
12821 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12822 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12823 else
12824 return omit_one_operand_loc (loc, type,
12825 code == EQ_EXPR ? integer_one_node
12826 : integer_zero_node,
12827 arg000);
12831 /* If we have (A & C) == C where C is a power of 2, convert this into
12832 (A & C) != 0. Similarly for NE_EXPR. */
12833 if (TREE_CODE (arg0) == BIT_AND_EXPR
12834 && integer_pow2p (TREE_OPERAND (arg0, 1))
12835 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12836 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12837 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12838 integer_zero_node));
12840 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12841 bit, then fold the expression into A < 0 or A >= 0. */
12842 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12843 if (tem)
12844 return tem;
12846 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12847 Similarly for NE_EXPR. */
12848 if (TREE_CODE (arg0) == BIT_AND_EXPR
12849 && TREE_CODE (arg1) == INTEGER_CST
12850 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12852 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12853 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12854 TREE_OPERAND (arg0, 1));
12855 tree dandnotc
12856 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12857 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12858 notc);
12859 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12860 if (integer_nonzerop (dandnotc))
12861 return omit_one_operand_loc (loc, type, rslt, arg0);
12864 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12865 Similarly for NE_EXPR. */
12866 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12867 && TREE_CODE (arg1) == INTEGER_CST
12868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12870 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12871 tree candnotd
12872 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12873 TREE_OPERAND (arg0, 1),
12874 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12875 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12876 if (integer_nonzerop (candnotd))
12877 return omit_one_operand_loc (loc, type, rslt, arg0);
12880 /* If this is a comparison of a field, we may be able to simplify it. */
12881 if ((TREE_CODE (arg0) == COMPONENT_REF
12882 || TREE_CODE (arg0) == BIT_FIELD_REF)
12883 /* Handle the constant case even without -O
12884 to make sure the warnings are given. */
12885 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12887 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12888 if (t1)
12889 return t1;
12892 /* Optimize comparisons of strlen vs zero to a compare of the
12893 first character of the string vs zero. To wit,
12894 strlen(ptr) == 0 => *ptr == 0
12895 strlen(ptr) != 0 => *ptr != 0
12896 Other cases should reduce to one of these two (or a constant)
12897 due to the return value of strlen being unsigned. */
12898 if (TREE_CODE (arg0) == CALL_EXPR
12899 && integer_zerop (arg1))
12901 tree fndecl = get_callee_fndecl (arg0);
12903 if (fndecl
12904 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12905 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12906 && call_expr_nargs (arg0) == 1
12907 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12909 tree iref = build_fold_indirect_ref_loc (loc,
12910 CALL_EXPR_ARG (arg0, 0));
12911 return fold_build2_loc (loc, code, type, iref,
12912 build_int_cst (TREE_TYPE (iref), 0));
12916 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12917 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12918 if (TREE_CODE (arg0) == RSHIFT_EXPR
12919 && integer_zerop (arg1)
12920 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12922 tree arg00 = TREE_OPERAND (arg0, 0);
12923 tree arg01 = TREE_OPERAND (arg0, 1);
12924 tree itype = TREE_TYPE (arg00);
12925 if (TREE_INT_CST_HIGH (arg01) == 0
12926 && TREE_INT_CST_LOW (arg01)
12927 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12929 if (TYPE_UNSIGNED (itype))
12931 itype = signed_type_for (itype);
12932 arg00 = fold_convert_loc (loc, itype, arg00);
12934 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12935 type, arg00, build_zero_cst (itype));
12939 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12940 if (integer_zerop (arg1)
12941 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12942 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12943 TREE_OPERAND (arg0, 1));
12945 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12946 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12947 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12948 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12949 build_zero_cst (TREE_TYPE (arg0)));
12950 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12951 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12952 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12953 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12954 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12955 build_zero_cst (TREE_TYPE (arg0)));
12957 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12958 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12959 && TREE_CODE (arg1) == INTEGER_CST
12960 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12961 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12962 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12963 TREE_OPERAND (arg0, 1), arg1));
12965 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12966 (X & C) == 0 when C is a single bit. */
12967 if (TREE_CODE (arg0) == BIT_AND_EXPR
12968 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12969 && integer_zerop (arg1)
12970 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12972 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12973 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12974 TREE_OPERAND (arg0, 1));
12975 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12976 type, tem,
12977 fold_convert_loc (loc, TREE_TYPE (arg0),
12978 arg1));
12981 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12982 constant C is a power of two, i.e. a single bit. */
12983 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12984 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12985 && integer_zerop (arg1)
12986 && integer_pow2p (TREE_OPERAND (arg0, 1))
12987 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12988 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12990 tree arg00 = TREE_OPERAND (arg0, 0);
12991 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12992 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12995 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12996 when is C is a power of two, i.e. a single bit. */
12997 if (TREE_CODE (arg0) == BIT_AND_EXPR
12998 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12999 && integer_zerop (arg1)
13000 && integer_pow2p (TREE_OPERAND (arg0, 1))
13001 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13002 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13004 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13005 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13006 arg000, TREE_OPERAND (arg0, 1));
13007 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13008 tem, build_int_cst (TREE_TYPE (tem), 0));
13011 if (integer_zerop (arg1)
13012 && tree_expr_nonzero_p (arg0))
13014 tree res = constant_boolean_node (code==NE_EXPR, type);
13015 return omit_one_operand_loc (loc, type, res, arg0);
13018 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13019 if (TREE_CODE (arg0) == NEGATE_EXPR
13020 && TREE_CODE (arg1) == NEGATE_EXPR)
13021 return fold_build2_loc (loc, code, type,
13022 TREE_OPERAND (arg0, 0),
13023 fold_convert_loc (loc, TREE_TYPE (arg0),
13024 TREE_OPERAND (arg1, 0)));
13026 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13027 if (TREE_CODE (arg0) == BIT_AND_EXPR
13028 && TREE_CODE (arg1) == BIT_AND_EXPR)
13030 tree arg00 = TREE_OPERAND (arg0, 0);
13031 tree arg01 = TREE_OPERAND (arg0, 1);
13032 tree arg10 = TREE_OPERAND (arg1, 0);
13033 tree arg11 = TREE_OPERAND (arg1, 1);
13034 tree itype = TREE_TYPE (arg0);
13036 if (operand_equal_p (arg01, arg11, 0))
13037 return fold_build2_loc (loc, code, type,
13038 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13039 fold_build2_loc (loc,
13040 BIT_XOR_EXPR, itype,
13041 arg00, arg10),
13042 arg01),
13043 build_zero_cst (itype));
13045 if (operand_equal_p (arg01, arg10, 0))
13046 return fold_build2_loc (loc, code, type,
13047 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13048 fold_build2_loc (loc,
13049 BIT_XOR_EXPR, itype,
13050 arg00, arg11),
13051 arg01),
13052 build_zero_cst (itype));
13054 if (operand_equal_p (arg00, arg11, 0))
13055 return fold_build2_loc (loc, code, type,
13056 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13057 fold_build2_loc (loc,
13058 BIT_XOR_EXPR, itype,
13059 arg01, arg10),
13060 arg00),
13061 build_zero_cst (itype));
13063 if (operand_equal_p (arg00, arg10, 0))
13064 return fold_build2_loc (loc, code, type,
13065 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13066 fold_build2_loc (loc,
13067 BIT_XOR_EXPR, itype,
13068 arg01, arg11),
13069 arg00),
13070 build_zero_cst (itype));
13073 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13074 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13076 tree arg00 = TREE_OPERAND (arg0, 0);
13077 tree arg01 = TREE_OPERAND (arg0, 1);
13078 tree arg10 = TREE_OPERAND (arg1, 0);
13079 tree arg11 = TREE_OPERAND (arg1, 1);
13080 tree itype = TREE_TYPE (arg0);
13082 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13083 operand_equal_p guarantees no side-effects so we don't need
13084 to use omit_one_operand on Z. */
13085 if (operand_equal_p (arg01, arg11, 0))
13086 return fold_build2_loc (loc, code, type, arg00,
13087 fold_convert_loc (loc, TREE_TYPE (arg00),
13088 arg10));
13089 if (operand_equal_p (arg01, arg10, 0))
13090 return fold_build2_loc (loc, code, type, arg00,
13091 fold_convert_loc (loc, TREE_TYPE (arg00),
13092 arg11));
13093 if (operand_equal_p (arg00, arg11, 0))
13094 return fold_build2_loc (loc, code, type, arg01,
13095 fold_convert_loc (loc, TREE_TYPE (arg01),
13096 arg10));
13097 if (operand_equal_p (arg00, arg10, 0))
13098 return fold_build2_loc (loc, code, type, arg01,
13099 fold_convert_loc (loc, TREE_TYPE (arg01),
13100 arg11));
13102 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13103 if (TREE_CODE (arg01) == INTEGER_CST
13104 && TREE_CODE (arg11) == INTEGER_CST)
13106 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13107 fold_convert_loc (loc, itype, arg11));
13108 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13109 return fold_build2_loc (loc, code, type, tem,
13110 fold_convert_loc (loc, itype, arg10));
13114 /* Attempt to simplify equality/inequality comparisons of complex
13115 values. Only lower the comparison if the result is known or
13116 can be simplified to a single scalar comparison. */
13117 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13118 || TREE_CODE (arg0) == COMPLEX_CST)
13119 && (TREE_CODE (arg1) == COMPLEX_EXPR
13120 || TREE_CODE (arg1) == COMPLEX_CST))
13122 tree real0, imag0, real1, imag1;
13123 tree rcond, icond;
13125 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13127 real0 = TREE_OPERAND (arg0, 0);
13128 imag0 = TREE_OPERAND (arg0, 1);
13130 else
13132 real0 = TREE_REALPART (arg0);
13133 imag0 = TREE_IMAGPART (arg0);
13136 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13138 real1 = TREE_OPERAND (arg1, 0);
13139 imag1 = TREE_OPERAND (arg1, 1);
13141 else
13143 real1 = TREE_REALPART (arg1);
13144 imag1 = TREE_IMAGPART (arg1);
13147 rcond = fold_binary_loc (loc, code, type, real0, real1);
13148 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13150 if (integer_zerop (rcond))
13152 if (code == EQ_EXPR)
13153 return omit_two_operands_loc (loc, type, boolean_false_node,
13154 imag0, imag1);
13155 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13157 else
13159 if (code == NE_EXPR)
13160 return omit_two_operands_loc (loc, type, boolean_true_node,
13161 imag0, imag1);
13162 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13166 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13167 if (icond && TREE_CODE (icond) == INTEGER_CST)
13169 if (integer_zerop (icond))
13171 if (code == EQ_EXPR)
13172 return omit_two_operands_loc (loc, type, boolean_false_node,
13173 real0, real1);
13174 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13176 else
13178 if (code == NE_EXPR)
13179 return omit_two_operands_loc (loc, type, boolean_true_node,
13180 real0, real1);
13181 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13186 return NULL_TREE;
13188 case LT_EXPR:
13189 case GT_EXPR:
13190 case LE_EXPR:
13191 case GE_EXPR:
13192 tem = fold_comparison (loc, code, type, op0, op1);
13193 if (tem != NULL_TREE)
13194 return tem;
13196 /* Transform comparisons of the form X +- C CMP X. */
13197 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13198 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13199 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13200 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13201 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13202 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13204 tree arg01 = TREE_OPERAND (arg0, 1);
13205 enum tree_code code0 = TREE_CODE (arg0);
13206 int is_positive;
13208 if (TREE_CODE (arg01) == REAL_CST)
13209 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13210 else
13211 is_positive = tree_int_cst_sgn (arg01);
13213 /* (X - c) > X becomes false. */
13214 if (code == GT_EXPR
13215 && ((code0 == MINUS_EXPR && is_positive >= 0)
13216 || (code0 == PLUS_EXPR && is_positive <= 0)))
13218 if (TREE_CODE (arg01) == INTEGER_CST
13219 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13220 fold_overflow_warning (("assuming signed overflow does not "
13221 "occur when assuming that (X - c) > X "
13222 "is always false"),
13223 WARN_STRICT_OVERFLOW_ALL);
13224 return constant_boolean_node (0, type);
13227 /* Likewise (X + c) < X becomes false. */
13228 if (code == LT_EXPR
13229 && ((code0 == PLUS_EXPR && is_positive >= 0)
13230 || (code0 == MINUS_EXPR && is_positive <= 0)))
13232 if (TREE_CODE (arg01) == INTEGER_CST
13233 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13234 fold_overflow_warning (("assuming signed overflow does not "
13235 "occur when assuming that "
13236 "(X + c) < X is always false"),
13237 WARN_STRICT_OVERFLOW_ALL);
13238 return constant_boolean_node (0, type);
13241 /* Convert (X - c) <= X to true. */
13242 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13243 && code == LE_EXPR
13244 && ((code0 == MINUS_EXPR && is_positive >= 0)
13245 || (code0 == PLUS_EXPR && is_positive <= 0)))
13247 if (TREE_CODE (arg01) == INTEGER_CST
13248 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13249 fold_overflow_warning (("assuming signed overflow does not "
13250 "occur when assuming that "
13251 "(X - c) <= X is always true"),
13252 WARN_STRICT_OVERFLOW_ALL);
13253 return constant_boolean_node (1, type);
13256 /* Convert (X + c) >= X to true. */
13257 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13258 && code == GE_EXPR
13259 && ((code0 == PLUS_EXPR && is_positive >= 0)
13260 || (code0 == MINUS_EXPR && is_positive <= 0)))
13262 if (TREE_CODE (arg01) == INTEGER_CST
13263 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13264 fold_overflow_warning (("assuming signed overflow does not "
13265 "occur when assuming that "
13266 "(X + c) >= X is always true"),
13267 WARN_STRICT_OVERFLOW_ALL);
13268 return constant_boolean_node (1, type);
13271 if (TREE_CODE (arg01) == INTEGER_CST)
13273 /* Convert X + c > X and X - c < X to true for integers. */
13274 if (code == GT_EXPR
13275 && ((code0 == PLUS_EXPR && is_positive > 0)
13276 || (code0 == MINUS_EXPR && is_positive < 0)))
13278 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13279 fold_overflow_warning (("assuming signed overflow does "
13280 "not occur when assuming that "
13281 "(X + c) > X is always true"),
13282 WARN_STRICT_OVERFLOW_ALL);
13283 return constant_boolean_node (1, type);
13286 if (code == LT_EXPR
13287 && ((code0 == MINUS_EXPR && is_positive > 0)
13288 || (code0 == PLUS_EXPR && is_positive < 0)))
13290 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13291 fold_overflow_warning (("assuming signed overflow does "
13292 "not occur when assuming that "
13293 "(X - c) < X is always true"),
13294 WARN_STRICT_OVERFLOW_ALL);
13295 return constant_boolean_node (1, type);
13298 /* Convert X + c <= X and X - c >= X to false for integers. */
13299 if (code == LE_EXPR
13300 && ((code0 == PLUS_EXPR && is_positive > 0)
13301 || (code0 == MINUS_EXPR && is_positive < 0)))
13303 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13304 fold_overflow_warning (("assuming signed overflow does "
13305 "not occur when assuming that "
13306 "(X + c) <= X is always false"),
13307 WARN_STRICT_OVERFLOW_ALL);
13308 return constant_boolean_node (0, type);
13311 if (code == GE_EXPR
13312 && ((code0 == MINUS_EXPR && is_positive > 0)
13313 || (code0 == PLUS_EXPR && is_positive < 0)))
13315 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13316 fold_overflow_warning (("assuming signed overflow does "
13317 "not occur when assuming that "
13318 "(X - c) >= X is always false"),
13319 WARN_STRICT_OVERFLOW_ALL);
13320 return constant_boolean_node (0, type);
13325 /* Comparisons with the highest or lowest possible integer of
13326 the specified precision will have known values. */
13328 tree arg1_type = TREE_TYPE (arg1);
13329 unsigned int width = TYPE_PRECISION (arg1_type);
13331 if (TREE_CODE (arg1) == INTEGER_CST
13332 && width <= HOST_BITS_PER_DOUBLE_INT
13333 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13335 HOST_WIDE_INT signed_max_hi;
13336 unsigned HOST_WIDE_INT signed_max_lo;
13337 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13339 if (width <= HOST_BITS_PER_WIDE_INT)
13341 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13342 - 1;
13343 signed_max_hi = 0;
13344 max_hi = 0;
13346 if (TYPE_UNSIGNED (arg1_type))
13348 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13349 min_lo = 0;
13350 min_hi = 0;
13352 else
13354 max_lo = signed_max_lo;
13355 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13356 min_hi = -1;
13359 else
13361 width -= HOST_BITS_PER_WIDE_INT;
13362 signed_max_lo = -1;
13363 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13364 - 1;
13365 max_lo = -1;
13366 min_lo = 0;
13368 if (TYPE_UNSIGNED (arg1_type))
13370 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13371 min_hi = 0;
13373 else
13375 max_hi = signed_max_hi;
13376 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13380 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13381 && TREE_INT_CST_LOW (arg1) == max_lo)
13382 switch (code)
13384 case GT_EXPR:
13385 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13387 case GE_EXPR:
13388 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13390 case LE_EXPR:
13391 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13393 case LT_EXPR:
13394 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13396 /* The GE_EXPR and LT_EXPR cases above are not normally
13397 reached because of previous transformations. */
13399 default:
13400 break;
13402 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13403 == max_hi
13404 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13405 switch (code)
13407 case GT_EXPR:
13408 arg1 = const_binop (PLUS_EXPR, arg1,
13409 build_int_cst (TREE_TYPE (arg1), 1));
13410 return fold_build2_loc (loc, EQ_EXPR, type,
13411 fold_convert_loc (loc,
13412 TREE_TYPE (arg1), arg0),
13413 arg1);
13414 case LE_EXPR:
13415 arg1 = const_binop (PLUS_EXPR, arg1,
13416 build_int_cst (TREE_TYPE (arg1), 1));
13417 return fold_build2_loc (loc, NE_EXPR, type,
13418 fold_convert_loc (loc, TREE_TYPE (arg1),
13419 arg0),
13420 arg1);
13421 default:
13422 break;
13424 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13425 == min_hi
13426 && TREE_INT_CST_LOW (arg1) == min_lo)
13427 switch (code)
13429 case LT_EXPR:
13430 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13432 case LE_EXPR:
13433 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13435 case GE_EXPR:
13436 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13438 case GT_EXPR:
13439 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13441 default:
13442 break;
13444 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13445 == min_hi
13446 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13447 switch (code)
13449 case GE_EXPR:
13450 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13451 return fold_build2_loc (loc, NE_EXPR, type,
13452 fold_convert_loc (loc,
13453 TREE_TYPE (arg1), arg0),
13454 arg1);
13455 case LT_EXPR:
13456 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13457 return fold_build2_loc (loc, EQ_EXPR, type,
13458 fold_convert_loc (loc, TREE_TYPE (arg1),
13459 arg0),
13460 arg1);
13461 default:
13462 break;
13465 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13466 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13467 && TYPE_UNSIGNED (arg1_type)
13468 /* We will flip the signedness of the comparison operator
13469 associated with the mode of arg1, so the sign bit is
13470 specified by this mode. Check that arg1 is the signed
13471 max associated with this sign bit. */
13472 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13473 /* signed_type does not work on pointer types. */
13474 && INTEGRAL_TYPE_P (arg1_type))
13476 /* The following case also applies to X < signed_max+1
13477 and X >= signed_max+1 because previous transformations. */
13478 if (code == LE_EXPR || code == GT_EXPR)
13480 tree st;
13481 st = signed_type_for (TREE_TYPE (arg1));
13482 return fold_build2_loc (loc,
13483 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13484 type, fold_convert_loc (loc, st, arg0),
13485 build_int_cst (st, 0));
13491 /* If we are comparing an ABS_EXPR with a constant, we can
13492 convert all the cases into explicit comparisons, but they may
13493 well not be faster than doing the ABS and one comparison.
13494 But ABS (X) <= C is a range comparison, which becomes a subtraction
13495 and a comparison, and is probably faster. */
13496 if (code == LE_EXPR
13497 && TREE_CODE (arg1) == INTEGER_CST
13498 && TREE_CODE (arg0) == ABS_EXPR
13499 && ! TREE_SIDE_EFFECTS (arg0)
13500 && (0 != (tem = negate_expr (arg1)))
13501 && TREE_CODE (tem) == INTEGER_CST
13502 && !TREE_OVERFLOW (tem))
13503 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13504 build2 (GE_EXPR, type,
13505 TREE_OPERAND (arg0, 0), tem),
13506 build2 (LE_EXPR, type,
13507 TREE_OPERAND (arg0, 0), arg1));
13509 /* Convert ABS_EXPR<x> >= 0 to true. */
13510 strict_overflow_p = false;
13511 if (code == GE_EXPR
13512 && (integer_zerop (arg1)
13513 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13514 && real_zerop (arg1)))
13515 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13517 if (strict_overflow_p)
13518 fold_overflow_warning (("assuming signed overflow does not occur "
13519 "when simplifying comparison of "
13520 "absolute value and zero"),
13521 WARN_STRICT_OVERFLOW_CONDITIONAL);
13522 return omit_one_operand_loc (loc, type,
13523 constant_boolean_node (true, type),
13524 arg0);
13527 /* Convert ABS_EXPR<x> < 0 to false. */
13528 strict_overflow_p = false;
13529 if (code == LT_EXPR
13530 && (integer_zerop (arg1) || real_zerop (arg1))
13531 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13533 if (strict_overflow_p)
13534 fold_overflow_warning (("assuming signed overflow does not occur "
13535 "when simplifying comparison of "
13536 "absolute value and zero"),
13537 WARN_STRICT_OVERFLOW_CONDITIONAL);
13538 return omit_one_operand_loc (loc, type,
13539 constant_boolean_node (false, type),
13540 arg0);
13543 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13544 and similarly for >= into !=. */
13545 if ((code == LT_EXPR || code == GE_EXPR)
13546 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13547 && TREE_CODE (arg1) == LSHIFT_EXPR
13548 && integer_onep (TREE_OPERAND (arg1, 0)))
13549 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13550 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13551 TREE_OPERAND (arg1, 1)),
13552 build_zero_cst (TREE_TYPE (arg0)));
13554 if ((code == LT_EXPR || code == GE_EXPR)
13555 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13556 && CONVERT_EXPR_P (arg1)
13557 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13558 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13560 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13561 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13562 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13563 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13564 build_zero_cst (TREE_TYPE (arg0)));
13567 return NULL_TREE;
13569 case UNORDERED_EXPR:
13570 case ORDERED_EXPR:
13571 case UNLT_EXPR:
13572 case UNLE_EXPR:
13573 case UNGT_EXPR:
13574 case UNGE_EXPR:
13575 case UNEQ_EXPR:
13576 case LTGT_EXPR:
13577 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13579 t1 = fold_relational_const (code, type, arg0, arg1);
13580 if (t1 != NULL_TREE)
13581 return t1;
13584 /* If the first operand is NaN, the result is constant. */
13585 if (TREE_CODE (arg0) == REAL_CST
13586 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13587 && (code != LTGT_EXPR || ! flag_trapping_math))
13589 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13590 ? integer_zero_node
13591 : integer_one_node;
13592 return omit_one_operand_loc (loc, type, t1, arg1);
13595 /* If the second operand is NaN, the result is constant. */
13596 if (TREE_CODE (arg1) == REAL_CST
13597 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13598 && (code != LTGT_EXPR || ! flag_trapping_math))
13600 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13601 ? integer_zero_node
13602 : integer_one_node;
13603 return omit_one_operand_loc (loc, type, t1, arg0);
13606 /* Simplify unordered comparison of something with itself. */
13607 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13608 && operand_equal_p (arg0, arg1, 0))
13609 return constant_boolean_node (1, type);
13611 if (code == LTGT_EXPR
13612 && !flag_trapping_math
13613 && operand_equal_p (arg0, arg1, 0))
13614 return constant_boolean_node (0, type);
13616 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13618 tree targ0 = strip_float_extensions (arg0);
13619 tree targ1 = strip_float_extensions (arg1);
13620 tree newtype = TREE_TYPE (targ0);
13622 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13623 newtype = TREE_TYPE (targ1);
13625 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13626 return fold_build2_loc (loc, code, type,
13627 fold_convert_loc (loc, newtype, targ0),
13628 fold_convert_loc (loc, newtype, targ1));
13631 return NULL_TREE;
13633 case COMPOUND_EXPR:
13634 /* When pedantic, a compound expression can be neither an lvalue
13635 nor an integer constant expression. */
13636 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13637 return NULL_TREE;
13638 /* Don't let (0, 0) be null pointer constant. */
13639 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13640 : fold_convert_loc (loc, type, arg1);
13641 return pedantic_non_lvalue_loc (loc, tem);
13643 case COMPLEX_EXPR:
13644 if ((TREE_CODE (arg0) == REAL_CST
13645 && TREE_CODE (arg1) == REAL_CST)
13646 || (TREE_CODE (arg0) == INTEGER_CST
13647 && TREE_CODE (arg1) == INTEGER_CST))
13648 return build_complex (type, arg0, arg1);
13649 if (TREE_CODE (arg0) == REALPART_EXPR
13650 && TREE_CODE (arg1) == IMAGPART_EXPR
13651 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13652 && operand_equal_p (TREE_OPERAND (arg0, 0),
13653 TREE_OPERAND (arg1, 0), 0))
13654 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13655 TREE_OPERAND (arg1, 0));
13656 return NULL_TREE;
13658 case ASSERT_EXPR:
13659 /* An ASSERT_EXPR should never be passed to fold_binary. */
13660 gcc_unreachable ();
13662 case VEC_PACK_TRUNC_EXPR:
13663 case VEC_PACK_FIX_TRUNC_EXPR:
13665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13666 tree *elts;
13668 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13669 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13670 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13671 return NULL_TREE;
13673 elts = XALLOCAVEC (tree, nelts);
13674 if (!vec_cst_ctor_to_array (arg0, elts)
13675 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13676 return NULL_TREE;
13678 for (i = 0; i < nelts; i++)
13680 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13681 ? NOP_EXPR : FIX_TRUNC_EXPR,
13682 TREE_TYPE (type), elts[i]);
13683 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13684 return NULL_TREE;
13687 return build_vector (type, elts);
13690 case VEC_WIDEN_MULT_LO_EXPR:
13691 case VEC_WIDEN_MULT_HI_EXPR:
13692 case VEC_WIDEN_MULT_EVEN_EXPR:
13693 case VEC_WIDEN_MULT_ODD_EXPR:
13695 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13696 unsigned int out, ofs, scale;
13697 tree *elts;
13699 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13700 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13701 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13702 return NULL_TREE;
13704 elts = XALLOCAVEC (tree, nelts * 4);
13705 if (!vec_cst_ctor_to_array (arg0, elts)
13706 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13707 return NULL_TREE;
13709 if (code == VEC_WIDEN_MULT_LO_EXPR)
13710 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13711 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13712 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13713 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13714 scale = 1, ofs = 0;
13715 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13716 scale = 1, ofs = 1;
13718 for (out = 0; out < nelts; out++)
13720 unsigned int in1 = (out << scale) + ofs;
13721 unsigned int in2 = in1 + nelts * 2;
13722 tree t1, t2;
13724 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13725 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13727 if (t1 == NULL_TREE || t2 == NULL_TREE)
13728 return NULL_TREE;
13729 elts[out] = const_binop (MULT_EXPR, t1, t2);
13730 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13731 return NULL_TREE;
13734 return build_vector (type, elts);
13737 default:
13738 return NULL_TREE;
13739 } /* switch (code) */
13742 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13743 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13744 of GOTO_EXPR. */
13746 static tree
13747 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13749 switch (TREE_CODE (*tp))
13751 case LABEL_EXPR:
13752 return *tp;
13754 case GOTO_EXPR:
13755 *walk_subtrees = 0;
13757 /* ... fall through ... */
13759 default:
13760 return NULL_TREE;
13764 /* Return whether the sub-tree ST contains a label which is accessible from
13765 outside the sub-tree. */
13767 static bool
13768 contains_label_p (tree st)
13770 return
13771 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13774 /* Fold a ternary expression of code CODE and type TYPE with operands
13775 OP0, OP1, and OP2. Return the folded expression if folding is
13776 successful. Otherwise, return NULL_TREE. */
13778 tree
13779 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13780 tree op0, tree op1, tree op2)
13782 tree tem;
13783 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13784 enum tree_code_class kind = TREE_CODE_CLASS (code);
13786 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13787 && TREE_CODE_LENGTH (code) == 3);
13789 /* Strip any conversions that don't change the mode. This is safe
13790 for every expression, except for a comparison expression because
13791 its signedness is derived from its operands. So, in the latter
13792 case, only strip conversions that don't change the signedness.
13794 Note that this is done as an internal manipulation within the
13795 constant folder, in order to find the simplest representation of
13796 the arguments so that their form can be studied. In any cases,
13797 the appropriate type conversions should be put back in the tree
13798 that will get out of the constant folder. */
13799 if (op0)
13801 arg0 = op0;
13802 STRIP_NOPS (arg0);
13805 if (op1)
13807 arg1 = op1;
13808 STRIP_NOPS (arg1);
13811 if (op2)
13813 arg2 = op2;
13814 STRIP_NOPS (arg2);
13817 switch (code)
13819 case COMPONENT_REF:
13820 if (TREE_CODE (arg0) == CONSTRUCTOR
13821 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13823 unsigned HOST_WIDE_INT idx;
13824 tree field, value;
13825 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13826 if (field == arg1)
13827 return value;
13829 return NULL_TREE;
13831 case COND_EXPR:
13832 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13833 so all simple results must be passed through pedantic_non_lvalue. */
13834 if (TREE_CODE (arg0) == INTEGER_CST)
13836 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13837 tem = integer_zerop (arg0) ? op2 : op1;
13838 /* Only optimize constant conditions when the selected branch
13839 has the same type as the COND_EXPR. This avoids optimizing
13840 away "c ? x : throw", where the throw has a void type.
13841 Avoid throwing away that operand which contains label. */
13842 if ((!TREE_SIDE_EFFECTS (unused_op)
13843 || !contains_label_p (unused_op))
13844 && (! VOID_TYPE_P (TREE_TYPE (tem))
13845 || VOID_TYPE_P (type)))
13846 return pedantic_non_lvalue_loc (loc, tem);
13847 return NULL_TREE;
13849 if (operand_equal_p (arg1, op2, 0))
13850 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13852 /* If we have A op B ? A : C, we may be able to convert this to a
13853 simpler expression, depending on the operation and the values
13854 of B and C. Signed zeros prevent all of these transformations,
13855 for reasons given above each one.
13857 Also try swapping the arguments and inverting the conditional. */
13858 if (COMPARISON_CLASS_P (arg0)
13859 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13860 arg1, TREE_OPERAND (arg0, 1))
13861 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13863 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13864 if (tem)
13865 return tem;
13868 if (COMPARISON_CLASS_P (arg0)
13869 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13870 op2,
13871 TREE_OPERAND (arg0, 1))
13872 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13874 location_t loc0 = expr_location_or (arg0, loc);
13875 tem = fold_truth_not_expr (loc0, arg0);
13876 if (tem && COMPARISON_CLASS_P (tem))
13878 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13879 if (tem)
13880 return tem;
13884 /* If the second operand is simpler than the third, swap them
13885 since that produces better jump optimization results. */
13886 if (truth_value_p (TREE_CODE (arg0))
13887 && tree_swap_operands_p (op1, op2, false))
13889 location_t loc0 = expr_location_or (arg0, loc);
13890 /* See if this can be inverted. If it can't, possibly because
13891 it was a floating-point inequality comparison, don't do
13892 anything. */
13893 tem = fold_truth_not_expr (loc0, arg0);
13894 if (tem)
13895 return fold_build3_loc (loc, code, type, tem, op2, op1);
13898 /* Convert A ? 1 : 0 to simply A. */
13899 if (integer_onep (op1)
13900 && integer_zerop (op2)
13901 /* If we try to convert OP0 to our type, the
13902 call to fold will try to move the conversion inside
13903 a COND, which will recurse. In that case, the COND_EXPR
13904 is probably the best choice, so leave it alone. */
13905 && type == TREE_TYPE (arg0))
13906 return pedantic_non_lvalue_loc (loc, arg0);
13908 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13909 over COND_EXPR in cases such as floating point comparisons. */
13910 if (integer_zerop (op1)
13911 && integer_onep (op2)
13912 && truth_value_p (TREE_CODE (arg0)))
13913 return pedantic_non_lvalue_loc (loc,
13914 fold_convert_loc (loc, type,
13915 invert_truthvalue_loc (loc,
13916 arg0)));
13918 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13919 if (TREE_CODE (arg0) == LT_EXPR
13920 && integer_zerop (TREE_OPERAND (arg0, 1))
13921 && integer_zerop (op2)
13922 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13924 /* sign_bit_p only checks ARG1 bits within A's precision.
13925 If <sign bit of A> has wider type than A, bits outside
13926 of A's precision in <sign bit of A> need to be checked.
13927 If they are all 0, this optimization needs to be done
13928 in unsigned A's type, if they are all 1 in signed A's type,
13929 otherwise this can't be done. */
13930 if (TYPE_PRECISION (TREE_TYPE (tem))
13931 < TYPE_PRECISION (TREE_TYPE (arg1))
13932 && TYPE_PRECISION (TREE_TYPE (tem))
13933 < TYPE_PRECISION (type))
13935 unsigned HOST_WIDE_INT mask_lo;
13936 HOST_WIDE_INT mask_hi;
13937 int inner_width, outer_width;
13938 tree tem_type;
13940 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13941 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13942 if (outer_width > TYPE_PRECISION (type))
13943 outer_width = TYPE_PRECISION (type);
13945 if (outer_width > HOST_BITS_PER_WIDE_INT)
13947 mask_hi = ((unsigned HOST_WIDE_INT) -1
13948 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13949 mask_lo = -1;
13951 else
13953 mask_hi = 0;
13954 mask_lo = ((unsigned HOST_WIDE_INT) -1
13955 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13957 if (inner_width > HOST_BITS_PER_WIDE_INT)
13959 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13960 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13961 mask_lo = 0;
13963 else
13964 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13965 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13967 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13968 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13970 tem_type = signed_type_for (TREE_TYPE (tem));
13971 tem = fold_convert_loc (loc, tem_type, tem);
13973 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13974 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13976 tem_type = unsigned_type_for (TREE_TYPE (tem));
13977 tem = fold_convert_loc (loc, tem_type, tem);
13979 else
13980 tem = NULL;
13983 if (tem)
13984 return
13985 fold_convert_loc (loc, type,
13986 fold_build2_loc (loc, BIT_AND_EXPR,
13987 TREE_TYPE (tem), tem,
13988 fold_convert_loc (loc,
13989 TREE_TYPE (tem),
13990 arg1)));
13993 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13994 already handled above. */
13995 if (TREE_CODE (arg0) == BIT_AND_EXPR
13996 && integer_onep (TREE_OPERAND (arg0, 1))
13997 && integer_zerop (op2)
13998 && integer_pow2p (arg1))
14000 tree tem = TREE_OPERAND (arg0, 0);
14001 STRIP_NOPS (tem);
14002 if (TREE_CODE (tem) == RSHIFT_EXPR
14003 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14004 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14005 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14006 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14007 TREE_OPERAND (tem, 0), arg1);
14010 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14011 is probably obsolete because the first operand should be a
14012 truth value (that's why we have the two cases above), but let's
14013 leave it in until we can confirm this for all front-ends. */
14014 if (integer_zerop (op2)
14015 && TREE_CODE (arg0) == NE_EXPR
14016 && integer_zerop (TREE_OPERAND (arg0, 1))
14017 && integer_pow2p (arg1)
14018 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14020 arg1, OEP_ONLY_CONST))
14021 return pedantic_non_lvalue_loc (loc,
14022 fold_convert_loc (loc, type,
14023 TREE_OPERAND (arg0, 0)));
14025 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14026 if (integer_zerop (op2)
14027 && truth_value_p (TREE_CODE (arg0))
14028 && truth_value_p (TREE_CODE (arg1)))
14029 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14030 fold_convert_loc (loc, type, arg0),
14031 arg1);
14033 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14034 if (integer_onep (op2)
14035 && truth_value_p (TREE_CODE (arg0))
14036 && truth_value_p (TREE_CODE (arg1)))
14038 location_t loc0 = expr_location_or (arg0, loc);
14039 /* Only perform transformation if ARG0 is easily inverted. */
14040 tem = fold_truth_not_expr (loc0, arg0);
14041 if (tem)
14042 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14043 fold_convert_loc (loc, type, tem),
14044 arg1);
14047 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14048 if (integer_zerop (arg1)
14049 && truth_value_p (TREE_CODE (arg0))
14050 && truth_value_p (TREE_CODE (op2)))
14052 location_t loc0 = expr_location_or (arg0, loc);
14053 /* Only perform transformation if ARG0 is easily inverted. */
14054 tem = fold_truth_not_expr (loc0, arg0);
14055 if (tem)
14056 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14057 fold_convert_loc (loc, type, tem),
14058 op2);
14061 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14062 if (integer_onep (arg1)
14063 && truth_value_p (TREE_CODE (arg0))
14064 && truth_value_p (TREE_CODE (op2)))
14065 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14066 fold_convert_loc (loc, type, arg0),
14067 op2);
14069 return NULL_TREE;
14071 case VEC_COND_EXPR:
14072 if (TREE_CODE (arg0) == VECTOR_CST)
14074 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14075 return pedantic_non_lvalue_loc (loc, op1);
14076 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14077 return pedantic_non_lvalue_loc (loc, op2);
14079 return NULL_TREE;
14081 case CALL_EXPR:
14082 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14083 of fold_ternary on them. */
14084 gcc_unreachable ();
14086 case BIT_FIELD_REF:
14087 if ((TREE_CODE (arg0) == VECTOR_CST
14088 || (TREE_CODE (arg0) == CONSTRUCTOR
14089 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14090 && (type == TREE_TYPE (TREE_TYPE (arg0))
14091 || (TREE_CODE (type) == VECTOR_TYPE
14092 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14094 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14095 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14096 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14097 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14099 if (n != 0
14100 && (idx % width) == 0
14101 && (n % width) == 0
14102 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14104 idx = idx / width;
14105 n = n / width;
14107 if (TREE_CODE (arg0) == VECTOR_CST)
14109 if (n == 1)
14110 return VECTOR_CST_ELT (arg0, idx);
14112 tree *vals = XALLOCAVEC (tree, n);
14113 for (unsigned i = 0; i < n; ++i)
14114 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14115 return build_vector (type, vals);
14118 /* Constructor elements can be subvectors. */
14119 unsigned HOST_WIDE_INT k = 1;
14120 if (CONSTRUCTOR_NELTS (arg0) != 0)
14122 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14123 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14124 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14127 /* We keep an exact subset of the constructor elements. */
14128 if ((idx % k) == 0 && (n % k) == 0)
14130 if (CONSTRUCTOR_NELTS (arg0) == 0)
14131 return build_constructor (type, NULL);
14132 idx /= k;
14133 n /= k;
14134 if (n == 1)
14136 if (idx < CONSTRUCTOR_NELTS (arg0))
14137 return CONSTRUCTOR_ELT (arg0, idx)->value;
14138 return build_zero_cst (type);
14141 vec<constructor_elt, va_gc> *vals;
14142 vec_alloc (vals, n);
14143 for (unsigned i = 0;
14144 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14145 ++i)
14146 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14147 CONSTRUCTOR_ELT
14148 (arg0, idx + i)->value);
14149 return build_constructor (type, vals);
14151 /* The bitfield references a single constructor element. */
14152 else if (idx + n <= (idx / k + 1) * k)
14154 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14155 return build_zero_cst (type);
14156 else if (n == k)
14157 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14158 else
14159 return fold_build3_loc (loc, code, type,
14160 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14161 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14166 /* A bit-field-ref that referenced the full argument can be stripped. */
14167 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14168 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14169 && integer_zerop (op2))
14170 return fold_convert_loc (loc, type, arg0);
14172 /* On constants we can use native encode/interpret to constant
14173 fold (nearly) all BIT_FIELD_REFs. */
14174 if (CONSTANT_CLASS_P (arg0)
14175 && can_native_interpret_type_p (type)
14176 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14177 /* This limitation should not be necessary, we just need to
14178 round this up to mode size. */
14179 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14180 /* Need bit-shifting of the buffer to relax the following. */
14181 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14183 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14184 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14185 unsigned HOST_WIDE_INT clen;
14186 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14187 /* ??? We cannot tell native_encode_expr to start at
14188 some random byte only. So limit us to a reasonable amount
14189 of work. */
14190 if (clen <= 4096)
14192 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14193 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14194 if (len > 0
14195 && len * BITS_PER_UNIT >= bitpos + bitsize)
14197 tree v = native_interpret_expr (type,
14198 b + bitpos / BITS_PER_UNIT,
14199 bitsize / BITS_PER_UNIT);
14200 if (v)
14201 return v;
14206 return NULL_TREE;
14208 case FMA_EXPR:
14209 /* For integers we can decompose the FMA if possible. */
14210 if (TREE_CODE (arg0) == INTEGER_CST
14211 && TREE_CODE (arg1) == INTEGER_CST)
14212 return fold_build2_loc (loc, PLUS_EXPR, type,
14213 const_binop (MULT_EXPR, arg0, arg1), arg2);
14214 if (integer_zerop (arg2))
14215 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14217 return fold_fma (loc, type, arg0, arg1, arg2);
14219 case VEC_PERM_EXPR:
14220 if (TREE_CODE (arg2) == VECTOR_CST)
14222 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14223 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14224 tree t;
14225 bool need_mask_canon = false;
14226 bool all_in_vec0 = true;
14227 bool all_in_vec1 = true;
14228 bool maybe_identity = true;
14229 bool single_arg = (op0 == op1);
14230 bool changed = false;
14232 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14233 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14234 for (i = 0; i < nelts; i++)
14236 tree val = VECTOR_CST_ELT (arg2, i);
14237 if (TREE_CODE (val) != INTEGER_CST)
14238 return NULL_TREE;
14240 sel[i] = TREE_INT_CST_LOW (val) & mask;
14241 if (TREE_INT_CST_HIGH (val)
14242 || ((unsigned HOST_WIDE_INT)
14243 TREE_INT_CST_LOW (val) != sel[i]))
14244 need_mask_canon = true;
14246 if (sel[i] < nelts)
14247 all_in_vec1 = false;
14248 else
14249 all_in_vec0 = false;
14251 if ((sel[i] & (nelts-1)) != i)
14252 maybe_identity = false;
14255 if (maybe_identity)
14257 if (all_in_vec0)
14258 return op0;
14259 if (all_in_vec1)
14260 return op1;
14263 if (all_in_vec0)
14264 op1 = op0;
14265 else if (all_in_vec1)
14267 op0 = op1;
14268 for (i = 0; i < nelts; i++)
14269 sel[i] -= nelts;
14270 need_mask_canon = true;
14273 if ((TREE_CODE (op0) == VECTOR_CST
14274 || TREE_CODE (op0) == CONSTRUCTOR)
14275 && (TREE_CODE (op1) == VECTOR_CST
14276 || TREE_CODE (op1) == CONSTRUCTOR))
14278 t = fold_vec_perm (type, op0, op1, sel);
14279 if (t != NULL_TREE)
14280 return t;
14283 if (op0 == op1 && !single_arg)
14284 changed = true;
14286 if (need_mask_canon && arg2 == op2)
14288 tree *tsel = XALLOCAVEC (tree, nelts);
14289 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14290 for (i = 0; i < nelts; i++)
14291 tsel[i] = build_int_cst (eltype, sel[i]);
14292 op2 = build_vector (TREE_TYPE (arg2), tsel);
14293 changed = true;
14296 if (changed)
14297 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14299 return NULL_TREE;
14301 default:
14302 return NULL_TREE;
14303 } /* switch (code) */
14306 /* Perform constant folding and related simplification of EXPR.
14307 The related simplifications include x*1 => x, x*0 => 0, etc.,
14308 and application of the associative law.
14309 NOP_EXPR conversions may be removed freely (as long as we
14310 are careful not to change the type of the overall expression).
14311 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14312 but we can constant-fold them if they have constant operands. */
14314 #ifdef ENABLE_FOLD_CHECKING
14315 # define fold(x) fold_1 (x)
14316 static tree fold_1 (tree);
14317 static
14318 #endif
14319 tree
14320 fold (tree expr)
14322 const tree t = expr;
14323 enum tree_code code = TREE_CODE (t);
14324 enum tree_code_class kind = TREE_CODE_CLASS (code);
14325 tree tem;
14326 location_t loc = EXPR_LOCATION (expr);
14328 /* Return right away if a constant. */
14329 if (kind == tcc_constant)
14330 return t;
14332 /* CALL_EXPR-like objects with variable numbers of operands are
14333 treated specially. */
14334 if (kind == tcc_vl_exp)
14336 if (code == CALL_EXPR)
14338 tem = fold_call_expr (loc, expr, false);
14339 return tem ? tem : expr;
14341 return expr;
14344 if (IS_EXPR_CODE_CLASS (kind))
14346 tree type = TREE_TYPE (t);
14347 tree op0, op1, op2;
14349 switch (TREE_CODE_LENGTH (code))
14351 case 1:
14352 op0 = TREE_OPERAND (t, 0);
14353 tem = fold_unary_loc (loc, code, type, op0);
14354 return tem ? tem : expr;
14355 case 2:
14356 op0 = TREE_OPERAND (t, 0);
14357 op1 = TREE_OPERAND (t, 1);
14358 tem = fold_binary_loc (loc, code, type, op0, op1);
14359 return tem ? tem : expr;
14360 case 3:
14361 op0 = TREE_OPERAND (t, 0);
14362 op1 = TREE_OPERAND (t, 1);
14363 op2 = TREE_OPERAND (t, 2);
14364 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14365 return tem ? tem : expr;
14366 default:
14367 break;
14371 switch (code)
14373 case ARRAY_REF:
14375 tree op0 = TREE_OPERAND (t, 0);
14376 tree op1 = TREE_OPERAND (t, 1);
14378 if (TREE_CODE (op1) == INTEGER_CST
14379 && TREE_CODE (op0) == CONSTRUCTOR
14380 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14382 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14383 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14384 unsigned HOST_WIDE_INT begin = 0;
14386 /* Find a matching index by means of a binary search. */
14387 while (begin != end)
14389 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14390 tree index = (*elts)[middle].index;
14392 if (TREE_CODE (index) == INTEGER_CST
14393 && tree_int_cst_lt (index, op1))
14394 begin = middle + 1;
14395 else if (TREE_CODE (index) == INTEGER_CST
14396 && tree_int_cst_lt (op1, index))
14397 end = middle;
14398 else if (TREE_CODE (index) == RANGE_EXPR
14399 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14400 begin = middle + 1;
14401 else if (TREE_CODE (index) == RANGE_EXPR
14402 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14403 end = middle;
14404 else
14405 return (*elts)[middle].value;
14409 return t;
14412 /* Return a VECTOR_CST if possible. */
14413 case CONSTRUCTOR:
14415 tree type = TREE_TYPE (t);
14416 if (TREE_CODE (type) != VECTOR_TYPE)
14417 return t;
14419 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14420 unsigned HOST_WIDE_INT idx, pos = 0;
14421 tree value;
14423 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14425 if (!CONSTANT_CLASS_P (value))
14426 return t;
14427 if (TREE_CODE (value) == VECTOR_CST)
14429 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14430 vec[pos++] = VECTOR_CST_ELT (value, i);
14432 else
14433 vec[pos++] = value;
14435 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14436 vec[pos] = build_zero_cst (TREE_TYPE (type));
14438 return build_vector (type, vec);
14441 case CONST_DECL:
14442 return fold (DECL_INITIAL (t));
14444 default:
14445 return t;
14446 } /* switch (code) */
14449 #ifdef ENABLE_FOLD_CHECKING
14450 #undef fold
14452 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14453 hash_table <pointer_hash <tree_node> >);
14454 static void fold_check_failed (const_tree, const_tree);
14455 void print_fold_checksum (const_tree);
14457 /* When --enable-checking=fold, compute a digest of expr before
14458 and after actual fold call to see if fold did not accidentally
14459 change original expr. */
14461 tree
14462 fold (tree expr)
14464 tree ret;
14465 struct md5_ctx ctx;
14466 unsigned char checksum_before[16], checksum_after[16];
14467 hash_table <pointer_hash <tree_node> > ht;
14469 ht.create (32);
14470 md5_init_ctx (&ctx);
14471 fold_checksum_tree (expr, &ctx, ht);
14472 md5_finish_ctx (&ctx, checksum_before);
14473 ht.empty ();
14475 ret = fold_1 (expr);
14477 md5_init_ctx (&ctx);
14478 fold_checksum_tree (expr, &ctx, ht);
14479 md5_finish_ctx (&ctx, checksum_after);
14480 ht.dispose ();
14482 if (memcmp (checksum_before, checksum_after, 16))
14483 fold_check_failed (expr, ret);
14485 return ret;
14488 void
14489 print_fold_checksum (const_tree expr)
14491 struct md5_ctx ctx;
14492 unsigned char checksum[16], cnt;
14493 hash_table <pointer_hash <tree_node> > ht;
14495 ht.create (32);
14496 md5_init_ctx (&ctx);
14497 fold_checksum_tree (expr, &ctx, ht);
14498 md5_finish_ctx (&ctx, checksum);
14499 ht.dispose ();
14500 for (cnt = 0; cnt < 16; ++cnt)
14501 fprintf (stderr, "%02x", checksum[cnt]);
14502 putc ('\n', stderr);
14505 static void
14506 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14508 internal_error ("fold check: original tree changed by fold");
14511 static void
14512 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14513 hash_table <pointer_hash <tree_node> > ht)
14515 tree_node **slot;
14516 enum tree_code code;
14517 union tree_node buf;
14518 int i, len;
14520 recursive_label:
14521 if (expr == NULL)
14522 return;
14523 slot = ht.find_slot (expr, INSERT);
14524 if (*slot != NULL)
14525 return;
14526 *slot = CONST_CAST_TREE (expr);
14527 code = TREE_CODE (expr);
14528 if (TREE_CODE_CLASS (code) == tcc_declaration
14529 && DECL_ASSEMBLER_NAME_SET_P (expr))
14531 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14532 memcpy ((char *) &buf, expr, tree_size (expr));
14533 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14534 expr = (tree) &buf;
14536 else if (TREE_CODE_CLASS (code) == tcc_type
14537 && (TYPE_POINTER_TO (expr)
14538 || TYPE_REFERENCE_TO (expr)
14539 || TYPE_CACHED_VALUES_P (expr)
14540 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14541 || TYPE_NEXT_VARIANT (expr)))
14543 /* Allow these fields to be modified. */
14544 tree tmp;
14545 memcpy ((char *) &buf, expr, tree_size (expr));
14546 expr = tmp = (tree) &buf;
14547 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14548 TYPE_POINTER_TO (tmp) = NULL;
14549 TYPE_REFERENCE_TO (tmp) = NULL;
14550 TYPE_NEXT_VARIANT (tmp) = NULL;
14551 if (TYPE_CACHED_VALUES_P (tmp))
14553 TYPE_CACHED_VALUES_P (tmp) = 0;
14554 TYPE_CACHED_VALUES (tmp) = NULL;
14557 md5_process_bytes (expr, tree_size (expr), ctx);
14558 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14559 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14560 if (TREE_CODE_CLASS (code) != tcc_type
14561 && TREE_CODE_CLASS (code) != tcc_declaration
14562 && code != TREE_LIST
14563 && code != SSA_NAME
14564 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14565 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14566 switch (TREE_CODE_CLASS (code))
14568 case tcc_constant:
14569 switch (code)
14571 case STRING_CST:
14572 md5_process_bytes (TREE_STRING_POINTER (expr),
14573 TREE_STRING_LENGTH (expr), ctx);
14574 break;
14575 case COMPLEX_CST:
14576 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14577 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14578 break;
14579 case VECTOR_CST:
14580 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14581 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14582 break;
14583 default:
14584 break;
14586 break;
14587 case tcc_exceptional:
14588 switch (code)
14590 case TREE_LIST:
14591 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14592 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14593 expr = TREE_CHAIN (expr);
14594 goto recursive_label;
14595 break;
14596 case TREE_VEC:
14597 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14598 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14599 break;
14600 default:
14601 break;
14603 break;
14604 case tcc_expression:
14605 case tcc_reference:
14606 case tcc_comparison:
14607 case tcc_unary:
14608 case tcc_binary:
14609 case tcc_statement:
14610 case tcc_vl_exp:
14611 len = TREE_OPERAND_LENGTH (expr);
14612 for (i = 0; i < len; ++i)
14613 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14614 break;
14615 case tcc_declaration:
14616 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14617 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14618 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14620 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14621 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14622 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14623 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14624 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14626 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14627 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14629 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14631 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14632 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14633 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14635 break;
14636 case tcc_type:
14637 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14638 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14639 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14640 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14641 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14642 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14643 if (INTEGRAL_TYPE_P (expr)
14644 || SCALAR_FLOAT_TYPE_P (expr))
14646 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14647 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14649 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14650 if (TREE_CODE (expr) == RECORD_TYPE
14651 || TREE_CODE (expr) == UNION_TYPE
14652 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14653 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14654 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14655 break;
14656 default:
14657 break;
14661 /* Helper function for outputting the checksum of a tree T. When
14662 debugging with gdb, you can "define mynext" to be "next" followed
14663 by "call debug_fold_checksum (op0)", then just trace down till the
14664 outputs differ. */
14666 DEBUG_FUNCTION void
14667 debug_fold_checksum (const_tree t)
14669 int i;
14670 unsigned char checksum[16];
14671 struct md5_ctx ctx;
14672 hash_table <pointer_hash <tree_node> > ht;
14673 ht.create (32);
14675 md5_init_ctx (&ctx);
14676 fold_checksum_tree (t, &ctx, ht);
14677 md5_finish_ctx (&ctx, checksum);
14678 ht.empty ();
14680 for (i = 0; i < 16; i++)
14681 fprintf (stderr, "%d ", checksum[i]);
14683 fprintf (stderr, "\n");
14686 #endif
14688 /* Fold a unary tree expression with code CODE of type TYPE with an
14689 operand OP0. LOC is the location of the resulting expression.
14690 Return a folded expression if successful. Otherwise, return a tree
14691 expression with code CODE of type TYPE with an operand OP0. */
14693 tree
14694 fold_build1_stat_loc (location_t loc,
14695 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14697 tree tem;
14698 #ifdef ENABLE_FOLD_CHECKING
14699 unsigned char checksum_before[16], checksum_after[16];
14700 struct md5_ctx ctx;
14701 hash_table <pointer_hash <tree_node> > ht;
14703 ht.create (32);
14704 md5_init_ctx (&ctx);
14705 fold_checksum_tree (op0, &ctx, ht);
14706 md5_finish_ctx (&ctx, checksum_before);
14707 ht.empty ();
14708 #endif
14710 tem = fold_unary_loc (loc, code, type, op0);
14711 if (!tem)
14712 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14714 #ifdef ENABLE_FOLD_CHECKING
14715 md5_init_ctx (&ctx);
14716 fold_checksum_tree (op0, &ctx, ht);
14717 md5_finish_ctx (&ctx, checksum_after);
14718 ht.dispose ();
14720 if (memcmp (checksum_before, checksum_after, 16))
14721 fold_check_failed (op0, tem);
14722 #endif
14723 return tem;
14726 /* Fold a binary tree expression with code CODE of type TYPE with
14727 operands OP0 and OP1. LOC is the location of the resulting
14728 expression. Return a folded expression if successful. Otherwise,
14729 return a tree expression with code CODE of type TYPE with operands
14730 OP0 and OP1. */
14732 tree
14733 fold_build2_stat_loc (location_t loc,
14734 enum tree_code code, tree type, tree op0, tree op1
14735 MEM_STAT_DECL)
14737 tree tem;
14738 #ifdef ENABLE_FOLD_CHECKING
14739 unsigned char checksum_before_op0[16],
14740 checksum_before_op1[16],
14741 checksum_after_op0[16],
14742 checksum_after_op1[16];
14743 struct md5_ctx ctx;
14744 hash_table <pointer_hash <tree_node> > ht;
14746 ht.create (32);
14747 md5_init_ctx (&ctx);
14748 fold_checksum_tree (op0, &ctx, ht);
14749 md5_finish_ctx (&ctx, checksum_before_op0);
14750 ht.empty ();
14752 md5_init_ctx (&ctx);
14753 fold_checksum_tree (op1, &ctx, ht);
14754 md5_finish_ctx (&ctx, checksum_before_op1);
14755 ht.empty ();
14756 #endif
14758 tem = fold_binary_loc (loc, code, type, op0, op1);
14759 if (!tem)
14760 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14762 #ifdef ENABLE_FOLD_CHECKING
14763 md5_init_ctx (&ctx);
14764 fold_checksum_tree (op0, &ctx, ht);
14765 md5_finish_ctx (&ctx, checksum_after_op0);
14766 ht.empty ();
14768 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14769 fold_check_failed (op0, tem);
14771 md5_init_ctx (&ctx);
14772 fold_checksum_tree (op1, &ctx, ht);
14773 md5_finish_ctx (&ctx, checksum_after_op1);
14774 ht.dispose ();
14776 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14777 fold_check_failed (op1, tem);
14778 #endif
14779 return tem;
14782 /* Fold a ternary tree expression with code CODE of type TYPE with
14783 operands OP0, OP1, and OP2. Return a folded expression if
14784 successful. Otherwise, return a tree expression with code CODE of
14785 type TYPE with operands OP0, OP1, and OP2. */
14787 tree
14788 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14789 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14791 tree tem;
14792 #ifdef ENABLE_FOLD_CHECKING
14793 unsigned char checksum_before_op0[16],
14794 checksum_before_op1[16],
14795 checksum_before_op2[16],
14796 checksum_after_op0[16],
14797 checksum_after_op1[16],
14798 checksum_after_op2[16];
14799 struct md5_ctx ctx;
14800 hash_table <pointer_hash <tree_node> > ht;
14802 ht.create (32);
14803 md5_init_ctx (&ctx);
14804 fold_checksum_tree (op0, &ctx, ht);
14805 md5_finish_ctx (&ctx, checksum_before_op0);
14806 ht.empty ();
14808 md5_init_ctx (&ctx);
14809 fold_checksum_tree (op1, &ctx, ht);
14810 md5_finish_ctx (&ctx, checksum_before_op1);
14811 ht.empty ();
14813 md5_init_ctx (&ctx);
14814 fold_checksum_tree (op2, &ctx, ht);
14815 md5_finish_ctx (&ctx, checksum_before_op2);
14816 ht.empty ();
14817 #endif
14819 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14820 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14821 if (!tem)
14822 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14824 #ifdef ENABLE_FOLD_CHECKING
14825 md5_init_ctx (&ctx);
14826 fold_checksum_tree (op0, &ctx, ht);
14827 md5_finish_ctx (&ctx, checksum_after_op0);
14828 ht.empty ();
14830 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14831 fold_check_failed (op0, tem);
14833 md5_init_ctx (&ctx);
14834 fold_checksum_tree (op1, &ctx, ht);
14835 md5_finish_ctx (&ctx, checksum_after_op1);
14836 ht.empty ();
14838 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14839 fold_check_failed (op1, tem);
14841 md5_init_ctx (&ctx);
14842 fold_checksum_tree (op2, &ctx, ht);
14843 md5_finish_ctx (&ctx, checksum_after_op2);
14844 ht.dispose ();
14846 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14847 fold_check_failed (op2, tem);
14848 #endif
14849 return tem;
14852 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14853 arguments in ARGARRAY, and a null static chain.
14854 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14855 of type TYPE from the given operands as constructed by build_call_array. */
14857 tree
14858 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14859 int nargs, tree *argarray)
14861 tree tem;
14862 #ifdef ENABLE_FOLD_CHECKING
14863 unsigned char checksum_before_fn[16],
14864 checksum_before_arglist[16],
14865 checksum_after_fn[16],
14866 checksum_after_arglist[16];
14867 struct md5_ctx ctx;
14868 hash_table <pointer_hash <tree_node> > ht;
14869 int i;
14871 ht.create (32);
14872 md5_init_ctx (&ctx);
14873 fold_checksum_tree (fn, &ctx, ht);
14874 md5_finish_ctx (&ctx, checksum_before_fn);
14875 ht.empty ();
14877 md5_init_ctx (&ctx);
14878 for (i = 0; i < nargs; i++)
14879 fold_checksum_tree (argarray[i], &ctx, ht);
14880 md5_finish_ctx (&ctx, checksum_before_arglist);
14881 ht.empty ();
14882 #endif
14884 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14886 #ifdef ENABLE_FOLD_CHECKING
14887 md5_init_ctx (&ctx);
14888 fold_checksum_tree (fn, &ctx, ht);
14889 md5_finish_ctx (&ctx, checksum_after_fn);
14890 ht.empty ();
14892 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14893 fold_check_failed (fn, tem);
14895 md5_init_ctx (&ctx);
14896 for (i = 0; i < nargs; i++)
14897 fold_checksum_tree (argarray[i], &ctx, ht);
14898 md5_finish_ctx (&ctx, checksum_after_arglist);
14899 ht.dispose ();
14901 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14902 fold_check_failed (NULL_TREE, tem);
14903 #endif
14904 return tem;
14907 /* Perform constant folding and related simplification of initializer
14908 expression EXPR. These behave identically to "fold_buildN" but ignore
14909 potential run-time traps and exceptions that fold must preserve. */
14911 #define START_FOLD_INIT \
14912 int saved_signaling_nans = flag_signaling_nans;\
14913 int saved_trapping_math = flag_trapping_math;\
14914 int saved_rounding_math = flag_rounding_math;\
14915 int saved_trapv = flag_trapv;\
14916 int saved_folding_initializer = folding_initializer;\
14917 flag_signaling_nans = 0;\
14918 flag_trapping_math = 0;\
14919 flag_rounding_math = 0;\
14920 flag_trapv = 0;\
14921 folding_initializer = 1;
14923 #define END_FOLD_INIT \
14924 flag_signaling_nans = saved_signaling_nans;\
14925 flag_trapping_math = saved_trapping_math;\
14926 flag_rounding_math = saved_rounding_math;\
14927 flag_trapv = saved_trapv;\
14928 folding_initializer = saved_folding_initializer;
14930 tree
14931 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14932 tree type, tree op)
14934 tree result;
14935 START_FOLD_INIT;
14937 result = fold_build1_loc (loc, code, type, op);
14939 END_FOLD_INIT;
14940 return result;
14943 tree
14944 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14945 tree type, tree op0, tree op1)
14947 tree result;
14948 START_FOLD_INIT;
14950 result = fold_build2_loc (loc, code, type, op0, op1);
14952 END_FOLD_INIT;
14953 return result;
14956 tree
14957 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14958 tree type, tree op0, tree op1, tree op2)
14960 tree result;
14961 START_FOLD_INIT;
14963 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14965 END_FOLD_INIT;
14966 return result;
14969 tree
14970 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14971 int nargs, tree *argarray)
14973 tree result;
14974 START_FOLD_INIT;
14976 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14978 END_FOLD_INIT;
14979 return result;
14982 #undef START_FOLD_INIT
14983 #undef END_FOLD_INIT
14985 /* Determine if first argument is a multiple of second argument. Return 0 if
14986 it is not, or we cannot easily determined it to be.
14988 An example of the sort of thing we care about (at this point; this routine
14989 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14990 fold cases do now) is discovering that
14992 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14994 is a multiple of
14996 SAVE_EXPR (J * 8)
14998 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15000 This code also handles discovering that
15002 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15004 is a multiple of 8 so we don't have to worry about dealing with a
15005 possible remainder.
15007 Note that we *look* inside a SAVE_EXPR only to determine how it was
15008 calculated; it is not safe for fold to do much of anything else with the
15009 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15010 at run time. For example, the latter example above *cannot* be implemented
15011 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15012 evaluation time of the original SAVE_EXPR is not necessarily the same at
15013 the time the new expression is evaluated. The only optimization of this
15014 sort that would be valid is changing
15016 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15018 divided by 8 to
15020 SAVE_EXPR (I) * SAVE_EXPR (J)
15022 (where the same SAVE_EXPR (J) is used in the original and the
15023 transformed version). */
15026 multiple_of_p (tree type, const_tree top, const_tree bottom)
15028 if (operand_equal_p (top, bottom, 0))
15029 return 1;
15031 if (TREE_CODE (type) != INTEGER_TYPE)
15032 return 0;
15034 switch (TREE_CODE (top))
15036 case BIT_AND_EXPR:
15037 /* Bitwise and provides a power of two multiple. If the mask is
15038 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15039 if (!integer_pow2p (bottom))
15040 return 0;
15041 /* FALLTHRU */
15043 case MULT_EXPR:
15044 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15045 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15047 case PLUS_EXPR:
15048 case MINUS_EXPR:
15049 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15050 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15052 case LSHIFT_EXPR:
15053 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15055 tree op1, t1;
15057 op1 = TREE_OPERAND (top, 1);
15058 /* const_binop may not detect overflow correctly,
15059 so check for it explicitly here. */
15060 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15061 > TREE_INT_CST_LOW (op1)
15062 && TREE_INT_CST_HIGH (op1) == 0
15063 && 0 != (t1 = fold_convert (type,
15064 const_binop (LSHIFT_EXPR,
15065 size_one_node,
15066 op1)))
15067 && !TREE_OVERFLOW (t1))
15068 return multiple_of_p (type, t1, bottom);
15070 return 0;
15072 case NOP_EXPR:
15073 /* Can't handle conversions from non-integral or wider integral type. */
15074 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15075 || (TYPE_PRECISION (type)
15076 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15077 return 0;
15079 /* .. fall through ... */
15081 case SAVE_EXPR:
15082 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15084 case COND_EXPR:
15085 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15086 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15088 case INTEGER_CST:
15089 if (TREE_CODE (bottom) != INTEGER_CST
15090 || integer_zerop (bottom)
15091 || (TYPE_UNSIGNED (type)
15092 && (tree_int_cst_sgn (top) < 0
15093 || tree_int_cst_sgn (bottom) < 0)))
15094 return 0;
15095 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15096 top, bottom));
15098 default:
15099 return 0;
15103 /* Return true if CODE or TYPE is known to be non-negative. */
15105 static bool
15106 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15108 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15109 && truth_value_p (code))
15110 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15111 have a signed:1 type (where the value is -1 and 0). */
15112 return true;
15113 return false;
15116 /* Return true if (CODE OP0) is known to be non-negative. If the return
15117 value is based on the assumption that signed overflow is undefined,
15118 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15119 *STRICT_OVERFLOW_P. */
15121 bool
15122 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15123 bool *strict_overflow_p)
15125 if (TYPE_UNSIGNED (type))
15126 return true;
15128 switch (code)
15130 case ABS_EXPR:
15131 /* We can't return 1 if flag_wrapv is set because
15132 ABS_EXPR<INT_MIN> = INT_MIN. */
15133 if (!INTEGRAL_TYPE_P (type))
15134 return true;
15135 if (TYPE_OVERFLOW_UNDEFINED (type))
15137 *strict_overflow_p = true;
15138 return true;
15140 break;
15142 case NON_LVALUE_EXPR:
15143 case FLOAT_EXPR:
15144 case FIX_TRUNC_EXPR:
15145 return tree_expr_nonnegative_warnv_p (op0,
15146 strict_overflow_p);
15148 case NOP_EXPR:
15150 tree inner_type = TREE_TYPE (op0);
15151 tree outer_type = type;
15153 if (TREE_CODE (outer_type) == REAL_TYPE)
15155 if (TREE_CODE (inner_type) == REAL_TYPE)
15156 return tree_expr_nonnegative_warnv_p (op0,
15157 strict_overflow_p);
15158 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15160 if (TYPE_UNSIGNED (inner_type))
15161 return true;
15162 return tree_expr_nonnegative_warnv_p (op0,
15163 strict_overflow_p);
15166 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15168 if (TREE_CODE (inner_type) == REAL_TYPE)
15169 return tree_expr_nonnegative_warnv_p (op0,
15170 strict_overflow_p);
15171 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15172 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15173 && TYPE_UNSIGNED (inner_type);
15176 break;
15178 default:
15179 return tree_simple_nonnegative_warnv_p (code, type);
15182 /* We don't know sign of `t', so be conservative and return false. */
15183 return false;
15186 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15187 value is based on the assumption that signed overflow is undefined,
15188 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15189 *STRICT_OVERFLOW_P. */
15191 bool
15192 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15193 tree op1, bool *strict_overflow_p)
15195 if (TYPE_UNSIGNED (type))
15196 return true;
15198 switch (code)
15200 case POINTER_PLUS_EXPR:
15201 case PLUS_EXPR:
15202 if (FLOAT_TYPE_P (type))
15203 return (tree_expr_nonnegative_warnv_p (op0,
15204 strict_overflow_p)
15205 && tree_expr_nonnegative_warnv_p (op1,
15206 strict_overflow_p));
15208 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15209 both unsigned and at least 2 bits shorter than the result. */
15210 if (TREE_CODE (type) == INTEGER_TYPE
15211 && TREE_CODE (op0) == NOP_EXPR
15212 && TREE_CODE (op1) == NOP_EXPR)
15214 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15215 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15216 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15217 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15219 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15220 TYPE_PRECISION (inner2)) + 1;
15221 return prec < TYPE_PRECISION (type);
15224 break;
15226 case MULT_EXPR:
15227 if (FLOAT_TYPE_P (type))
15229 /* x * x for floating point x is always non-negative. */
15230 if (operand_equal_p (op0, op1, 0))
15231 return true;
15232 return (tree_expr_nonnegative_warnv_p (op0,
15233 strict_overflow_p)
15234 && tree_expr_nonnegative_warnv_p (op1,
15235 strict_overflow_p));
15238 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15239 both unsigned and their total bits is shorter than the result. */
15240 if (TREE_CODE (type) == INTEGER_TYPE
15241 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15242 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15244 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15245 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15246 : TREE_TYPE (op0);
15247 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15248 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15249 : TREE_TYPE (op1);
15251 bool unsigned0 = TYPE_UNSIGNED (inner0);
15252 bool unsigned1 = TYPE_UNSIGNED (inner1);
15254 if (TREE_CODE (op0) == INTEGER_CST)
15255 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15257 if (TREE_CODE (op1) == INTEGER_CST)
15258 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15260 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15261 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15263 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15264 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15265 : TYPE_PRECISION (inner0);
15267 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15268 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15269 : TYPE_PRECISION (inner1);
15271 return precision0 + precision1 < TYPE_PRECISION (type);
15274 return false;
15276 case BIT_AND_EXPR:
15277 case MAX_EXPR:
15278 return (tree_expr_nonnegative_warnv_p (op0,
15279 strict_overflow_p)
15280 || tree_expr_nonnegative_warnv_p (op1,
15281 strict_overflow_p));
15283 case BIT_IOR_EXPR:
15284 case BIT_XOR_EXPR:
15285 case MIN_EXPR:
15286 case RDIV_EXPR:
15287 case TRUNC_DIV_EXPR:
15288 case CEIL_DIV_EXPR:
15289 case FLOOR_DIV_EXPR:
15290 case ROUND_DIV_EXPR:
15291 return (tree_expr_nonnegative_warnv_p (op0,
15292 strict_overflow_p)
15293 && tree_expr_nonnegative_warnv_p (op1,
15294 strict_overflow_p));
15296 case TRUNC_MOD_EXPR:
15297 case CEIL_MOD_EXPR:
15298 case FLOOR_MOD_EXPR:
15299 case ROUND_MOD_EXPR:
15300 return tree_expr_nonnegative_warnv_p (op0,
15301 strict_overflow_p);
15302 default:
15303 return tree_simple_nonnegative_warnv_p (code, type);
15306 /* We don't know sign of `t', so be conservative and return false. */
15307 return false;
15310 /* Return true if T is known to be non-negative. If the return
15311 value is based on the assumption that signed overflow is undefined,
15312 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15313 *STRICT_OVERFLOW_P. */
15315 bool
15316 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15318 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15319 return true;
15321 switch (TREE_CODE (t))
15323 case INTEGER_CST:
15324 return tree_int_cst_sgn (t) >= 0;
15326 case REAL_CST:
15327 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15329 case FIXED_CST:
15330 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15332 case COND_EXPR:
15333 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15334 strict_overflow_p)
15335 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15336 strict_overflow_p));
15337 default:
15338 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15339 TREE_TYPE (t));
15341 /* We don't know sign of `t', so be conservative and return false. */
15342 return false;
15345 /* Return true if T is known to be non-negative. If the return
15346 value is based on the assumption that signed overflow is undefined,
15347 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15348 *STRICT_OVERFLOW_P. */
15350 bool
15351 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15352 tree arg0, tree arg1, bool *strict_overflow_p)
15354 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15355 switch (DECL_FUNCTION_CODE (fndecl))
15357 CASE_FLT_FN (BUILT_IN_ACOS):
15358 CASE_FLT_FN (BUILT_IN_ACOSH):
15359 CASE_FLT_FN (BUILT_IN_CABS):
15360 CASE_FLT_FN (BUILT_IN_COSH):
15361 CASE_FLT_FN (BUILT_IN_ERFC):
15362 CASE_FLT_FN (BUILT_IN_EXP):
15363 CASE_FLT_FN (BUILT_IN_EXP10):
15364 CASE_FLT_FN (BUILT_IN_EXP2):
15365 CASE_FLT_FN (BUILT_IN_FABS):
15366 CASE_FLT_FN (BUILT_IN_FDIM):
15367 CASE_FLT_FN (BUILT_IN_HYPOT):
15368 CASE_FLT_FN (BUILT_IN_POW10):
15369 CASE_INT_FN (BUILT_IN_FFS):
15370 CASE_INT_FN (BUILT_IN_PARITY):
15371 CASE_INT_FN (BUILT_IN_POPCOUNT):
15372 case BUILT_IN_BSWAP32:
15373 case BUILT_IN_BSWAP64:
15374 /* Always true. */
15375 return true;
15377 CASE_FLT_FN (BUILT_IN_SQRT):
15378 /* sqrt(-0.0) is -0.0. */
15379 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15380 return true;
15381 return tree_expr_nonnegative_warnv_p (arg0,
15382 strict_overflow_p);
15384 CASE_FLT_FN (BUILT_IN_ASINH):
15385 CASE_FLT_FN (BUILT_IN_ATAN):
15386 CASE_FLT_FN (BUILT_IN_ATANH):
15387 CASE_FLT_FN (BUILT_IN_CBRT):
15388 CASE_FLT_FN (BUILT_IN_CEIL):
15389 CASE_FLT_FN (BUILT_IN_ERF):
15390 CASE_FLT_FN (BUILT_IN_EXPM1):
15391 CASE_FLT_FN (BUILT_IN_FLOOR):
15392 CASE_FLT_FN (BUILT_IN_FMOD):
15393 CASE_FLT_FN (BUILT_IN_FREXP):
15394 CASE_FLT_FN (BUILT_IN_ICEIL):
15395 CASE_FLT_FN (BUILT_IN_IFLOOR):
15396 CASE_FLT_FN (BUILT_IN_IRINT):
15397 CASE_FLT_FN (BUILT_IN_IROUND):
15398 CASE_FLT_FN (BUILT_IN_LCEIL):
15399 CASE_FLT_FN (BUILT_IN_LDEXP):
15400 CASE_FLT_FN (BUILT_IN_LFLOOR):
15401 CASE_FLT_FN (BUILT_IN_LLCEIL):
15402 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15403 CASE_FLT_FN (BUILT_IN_LLRINT):
15404 CASE_FLT_FN (BUILT_IN_LLROUND):
15405 CASE_FLT_FN (BUILT_IN_LRINT):
15406 CASE_FLT_FN (BUILT_IN_LROUND):
15407 CASE_FLT_FN (BUILT_IN_MODF):
15408 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15409 CASE_FLT_FN (BUILT_IN_RINT):
15410 CASE_FLT_FN (BUILT_IN_ROUND):
15411 CASE_FLT_FN (BUILT_IN_SCALB):
15412 CASE_FLT_FN (BUILT_IN_SCALBLN):
15413 CASE_FLT_FN (BUILT_IN_SCALBN):
15414 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15415 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15416 CASE_FLT_FN (BUILT_IN_SINH):
15417 CASE_FLT_FN (BUILT_IN_TANH):
15418 CASE_FLT_FN (BUILT_IN_TRUNC):
15419 /* True if the 1st argument is nonnegative. */
15420 return tree_expr_nonnegative_warnv_p (arg0,
15421 strict_overflow_p);
15423 CASE_FLT_FN (BUILT_IN_FMAX):
15424 /* True if the 1st OR 2nd arguments are nonnegative. */
15425 return (tree_expr_nonnegative_warnv_p (arg0,
15426 strict_overflow_p)
15427 || (tree_expr_nonnegative_warnv_p (arg1,
15428 strict_overflow_p)));
15430 CASE_FLT_FN (BUILT_IN_FMIN):
15431 /* True if the 1st AND 2nd arguments are nonnegative. */
15432 return (tree_expr_nonnegative_warnv_p (arg0,
15433 strict_overflow_p)
15434 && (tree_expr_nonnegative_warnv_p (arg1,
15435 strict_overflow_p)));
15437 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15438 /* True if the 2nd argument is nonnegative. */
15439 return tree_expr_nonnegative_warnv_p (arg1,
15440 strict_overflow_p);
15442 CASE_FLT_FN (BUILT_IN_POWI):
15443 /* True if the 1st argument is nonnegative or the second
15444 argument is an even integer. */
15445 if (TREE_CODE (arg1) == INTEGER_CST
15446 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15447 return true;
15448 return tree_expr_nonnegative_warnv_p (arg0,
15449 strict_overflow_p);
15451 CASE_FLT_FN (BUILT_IN_POW):
15452 /* True if the 1st argument is nonnegative or the second
15453 argument is an even integer valued real. */
15454 if (TREE_CODE (arg1) == REAL_CST)
15456 REAL_VALUE_TYPE c;
15457 HOST_WIDE_INT n;
15459 c = TREE_REAL_CST (arg1);
15460 n = real_to_integer (&c);
15461 if ((n & 1) == 0)
15463 REAL_VALUE_TYPE cint;
15464 real_from_integer (&cint, VOIDmode, n,
15465 n < 0 ? -1 : 0, 0);
15466 if (real_identical (&c, &cint))
15467 return true;
15470 return tree_expr_nonnegative_warnv_p (arg0,
15471 strict_overflow_p);
15473 default:
15474 break;
15476 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15477 type);
15480 /* Return true if T is known to be non-negative. If the return
15481 value is based on the assumption that signed overflow is undefined,
15482 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15483 *STRICT_OVERFLOW_P. */
15485 bool
15486 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15488 enum tree_code code = TREE_CODE (t);
15489 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15490 return true;
15492 switch (code)
15494 case TARGET_EXPR:
15496 tree temp = TARGET_EXPR_SLOT (t);
15497 t = TARGET_EXPR_INITIAL (t);
15499 /* If the initializer is non-void, then it's a normal expression
15500 that will be assigned to the slot. */
15501 if (!VOID_TYPE_P (t))
15502 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15504 /* Otherwise, the initializer sets the slot in some way. One common
15505 way is an assignment statement at the end of the initializer. */
15506 while (1)
15508 if (TREE_CODE (t) == BIND_EXPR)
15509 t = expr_last (BIND_EXPR_BODY (t));
15510 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15511 || TREE_CODE (t) == TRY_CATCH_EXPR)
15512 t = expr_last (TREE_OPERAND (t, 0));
15513 else if (TREE_CODE (t) == STATEMENT_LIST)
15514 t = expr_last (t);
15515 else
15516 break;
15518 if (TREE_CODE (t) == MODIFY_EXPR
15519 && TREE_OPERAND (t, 0) == temp)
15520 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15521 strict_overflow_p);
15523 return false;
15526 case CALL_EXPR:
15528 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15529 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15531 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15532 get_callee_fndecl (t),
15533 arg0,
15534 arg1,
15535 strict_overflow_p);
15537 case COMPOUND_EXPR:
15538 case MODIFY_EXPR:
15539 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15540 strict_overflow_p);
15541 case BIND_EXPR:
15542 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15543 strict_overflow_p);
15544 case SAVE_EXPR:
15545 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15546 strict_overflow_p);
15548 default:
15549 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15550 TREE_TYPE (t));
15553 /* We don't know sign of `t', so be conservative and return false. */
15554 return false;
15557 /* Return true if T is known to be non-negative. If the return
15558 value is based on the assumption that signed overflow is undefined,
15559 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15560 *STRICT_OVERFLOW_P. */
15562 bool
15563 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15565 enum tree_code code;
15566 if (t == error_mark_node)
15567 return false;
15569 code = TREE_CODE (t);
15570 switch (TREE_CODE_CLASS (code))
15572 case tcc_binary:
15573 case tcc_comparison:
15574 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15575 TREE_TYPE (t),
15576 TREE_OPERAND (t, 0),
15577 TREE_OPERAND (t, 1),
15578 strict_overflow_p);
15580 case tcc_unary:
15581 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15582 TREE_TYPE (t),
15583 TREE_OPERAND (t, 0),
15584 strict_overflow_p);
15586 case tcc_constant:
15587 case tcc_declaration:
15588 case tcc_reference:
15589 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15591 default:
15592 break;
15595 switch (code)
15597 case TRUTH_AND_EXPR:
15598 case TRUTH_OR_EXPR:
15599 case TRUTH_XOR_EXPR:
15600 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15601 TREE_TYPE (t),
15602 TREE_OPERAND (t, 0),
15603 TREE_OPERAND (t, 1),
15604 strict_overflow_p);
15605 case TRUTH_NOT_EXPR:
15606 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15607 TREE_TYPE (t),
15608 TREE_OPERAND (t, 0),
15609 strict_overflow_p);
15611 case COND_EXPR:
15612 case CONSTRUCTOR:
15613 case OBJ_TYPE_REF:
15614 case ASSERT_EXPR:
15615 case ADDR_EXPR:
15616 case WITH_SIZE_EXPR:
15617 case SSA_NAME:
15618 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15620 default:
15621 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15625 /* Return true if `t' is known to be non-negative. Handle warnings
15626 about undefined signed overflow. */
15628 bool
15629 tree_expr_nonnegative_p (tree t)
15631 bool ret, strict_overflow_p;
15633 strict_overflow_p = false;
15634 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15635 if (strict_overflow_p)
15636 fold_overflow_warning (("assuming signed overflow does not occur when "
15637 "determining that expression is always "
15638 "non-negative"),
15639 WARN_STRICT_OVERFLOW_MISC);
15640 return ret;
15644 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15645 For floating point we further ensure that T is not denormal.
15646 Similar logic is present in nonzero_address in rtlanal.h.
15648 If the return value is based on the assumption that signed overflow
15649 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15650 change *STRICT_OVERFLOW_P. */
15652 bool
15653 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15654 bool *strict_overflow_p)
15656 switch (code)
15658 case ABS_EXPR:
15659 return tree_expr_nonzero_warnv_p (op0,
15660 strict_overflow_p);
15662 case NOP_EXPR:
15664 tree inner_type = TREE_TYPE (op0);
15665 tree outer_type = type;
15667 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15668 && tree_expr_nonzero_warnv_p (op0,
15669 strict_overflow_p));
15671 break;
15673 case NON_LVALUE_EXPR:
15674 return tree_expr_nonzero_warnv_p (op0,
15675 strict_overflow_p);
15677 default:
15678 break;
15681 return false;
15684 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15685 For floating point we further ensure that T is not denormal.
15686 Similar logic is present in nonzero_address in rtlanal.h.
15688 If the return value is based on the assumption that signed overflow
15689 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15690 change *STRICT_OVERFLOW_P. */
15692 bool
15693 tree_binary_nonzero_warnv_p (enum tree_code code,
15694 tree type,
15695 tree op0,
15696 tree op1, bool *strict_overflow_p)
15698 bool sub_strict_overflow_p;
15699 switch (code)
15701 case POINTER_PLUS_EXPR:
15702 case PLUS_EXPR:
15703 if (TYPE_OVERFLOW_UNDEFINED (type))
15705 /* With the presence of negative values it is hard
15706 to say something. */
15707 sub_strict_overflow_p = false;
15708 if (!tree_expr_nonnegative_warnv_p (op0,
15709 &sub_strict_overflow_p)
15710 || !tree_expr_nonnegative_warnv_p (op1,
15711 &sub_strict_overflow_p))
15712 return false;
15713 /* One of operands must be positive and the other non-negative. */
15714 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15715 overflows, on a twos-complement machine the sum of two
15716 nonnegative numbers can never be zero. */
15717 return (tree_expr_nonzero_warnv_p (op0,
15718 strict_overflow_p)
15719 || tree_expr_nonzero_warnv_p (op1,
15720 strict_overflow_p));
15722 break;
15724 case MULT_EXPR:
15725 if (TYPE_OVERFLOW_UNDEFINED (type))
15727 if (tree_expr_nonzero_warnv_p (op0,
15728 strict_overflow_p)
15729 && tree_expr_nonzero_warnv_p (op1,
15730 strict_overflow_p))
15732 *strict_overflow_p = true;
15733 return true;
15736 break;
15738 case MIN_EXPR:
15739 sub_strict_overflow_p = false;
15740 if (tree_expr_nonzero_warnv_p (op0,
15741 &sub_strict_overflow_p)
15742 && tree_expr_nonzero_warnv_p (op1,
15743 &sub_strict_overflow_p))
15745 if (sub_strict_overflow_p)
15746 *strict_overflow_p = true;
15748 break;
15750 case MAX_EXPR:
15751 sub_strict_overflow_p = false;
15752 if (tree_expr_nonzero_warnv_p (op0,
15753 &sub_strict_overflow_p))
15755 if (sub_strict_overflow_p)
15756 *strict_overflow_p = true;
15758 /* When both operands are nonzero, then MAX must be too. */
15759 if (tree_expr_nonzero_warnv_p (op1,
15760 strict_overflow_p))
15761 return true;
15763 /* MAX where operand 0 is positive is positive. */
15764 return tree_expr_nonnegative_warnv_p (op0,
15765 strict_overflow_p);
15767 /* MAX where operand 1 is positive is positive. */
15768 else if (tree_expr_nonzero_warnv_p (op1,
15769 &sub_strict_overflow_p)
15770 && tree_expr_nonnegative_warnv_p (op1,
15771 &sub_strict_overflow_p))
15773 if (sub_strict_overflow_p)
15774 *strict_overflow_p = true;
15775 return true;
15777 break;
15779 case BIT_IOR_EXPR:
15780 return (tree_expr_nonzero_warnv_p (op1,
15781 strict_overflow_p)
15782 || tree_expr_nonzero_warnv_p (op0,
15783 strict_overflow_p));
15785 default:
15786 break;
15789 return false;
15792 /* Return true when T is an address and is known to be nonzero.
15793 For floating point we further ensure that T is not denormal.
15794 Similar logic is present in nonzero_address in rtlanal.h.
15796 If the return value is based on the assumption that signed overflow
15797 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15798 change *STRICT_OVERFLOW_P. */
15800 bool
15801 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15803 bool sub_strict_overflow_p;
15804 switch (TREE_CODE (t))
15806 case INTEGER_CST:
15807 return !integer_zerop (t);
15809 case ADDR_EXPR:
15811 tree base = TREE_OPERAND (t, 0);
15812 if (!DECL_P (base))
15813 base = get_base_address (base);
15815 if (!base)
15816 return false;
15818 /* Weak declarations may link to NULL. Other things may also be NULL
15819 so protect with -fdelete-null-pointer-checks; but not variables
15820 allocated on the stack. */
15821 if (DECL_P (base)
15822 && (flag_delete_null_pointer_checks
15823 || (DECL_CONTEXT (base)
15824 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15825 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15826 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15828 /* Constants are never weak. */
15829 if (CONSTANT_CLASS_P (base))
15830 return true;
15832 return false;
15835 case COND_EXPR:
15836 sub_strict_overflow_p = false;
15837 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15838 &sub_strict_overflow_p)
15839 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15840 &sub_strict_overflow_p))
15842 if (sub_strict_overflow_p)
15843 *strict_overflow_p = true;
15844 return true;
15846 break;
15848 default:
15849 break;
15851 return false;
15854 /* Return true when T is an address and is known to be nonzero.
15855 For floating point we further ensure that T is not denormal.
15856 Similar logic is present in nonzero_address in rtlanal.h.
15858 If the return value is based on the assumption that signed overflow
15859 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15860 change *STRICT_OVERFLOW_P. */
15862 bool
15863 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15865 tree type = TREE_TYPE (t);
15866 enum tree_code code;
15868 /* Doing something useful for floating point would need more work. */
15869 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15870 return false;
15872 code = TREE_CODE (t);
15873 switch (TREE_CODE_CLASS (code))
15875 case tcc_unary:
15876 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15877 strict_overflow_p);
15878 case tcc_binary:
15879 case tcc_comparison:
15880 return tree_binary_nonzero_warnv_p (code, type,
15881 TREE_OPERAND (t, 0),
15882 TREE_OPERAND (t, 1),
15883 strict_overflow_p);
15884 case tcc_constant:
15885 case tcc_declaration:
15886 case tcc_reference:
15887 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15889 default:
15890 break;
15893 switch (code)
15895 case TRUTH_NOT_EXPR:
15896 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15897 strict_overflow_p);
15899 case TRUTH_AND_EXPR:
15900 case TRUTH_OR_EXPR:
15901 case TRUTH_XOR_EXPR:
15902 return tree_binary_nonzero_warnv_p (code, type,
15903 TREE_OPERAND (t, 0),
15904 TREE_OPERAND (t, 1),
15905 strict_overflow_p);
15907 case COND_EXPR:
15908 case CONSTRUCTOR:
15909 case OBJ_TYPE_REF:
15910 case ASSERT_EXPR:
15911 case ADDR_EXPR:
15912 case WITH_SIZE_EXPR:
15913 case SSA_NAME:
15914 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15916 case COMPOUND_EXPR:
15917 case MODIFY_EXPR:
15918 case BIND_EXPR:
15919 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15920 strict_overflow_p);
15922 case SAVE_EXPR:
15923 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15924 strict_overflow_p);
15926 case CALL_EXPR:
15927 return alloca_call_p (t);
15929 default:
15930 break;
15932 return false;
15935 /* Return true when T is an address and is known to be nonzero.
15936 Handle warnings about undefined signed overflow. */
15938 bool
15939 tree_expr_nonzero_p (tree t)
15941 bool ret, strict_overflow_p;
15943 strict_overflow_p = false;
15944 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15945 if (strict_overflow_p)
15946 fold_overflow_warning (("assuming signed overflow does not occur when "
15947 "determining that expression is always "
15948 "non-zero"),
15949 WARN_STRICT_OVERFLOW_MISC);
15950 return ret;
15953 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15954 attempt to fold the expression to a constant without modifying TYPE,
15955 OP0 or OP1.
15957 If the expression could be simplified to a constant, then return
15958 the constant. If the expression would not be simplified to a
15959 constant, then return NULL_TREE. */
15961 tree
15962 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15964 tree tem = fold_binary (code, type, op0, op1);
15965 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15968 /* Given the components of a unary expression CODE, TYPE and OP0,
15969 attempt to fold the expression to a constant without modifying
15970 TYPE or OP0.
15972 If the expression could be simplified to a constant, then return
15973 the constant. If the expression would not be simplified to a
15974 constant, then return NULL_TREE. */
15976 tree
15977 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15979 tree tem = fold_unary (code, type, op0);
15980 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15983 /* If EXP represents referencing an element in a constant string
15984 (either via pointer arithmetic or array indexing), return the
15985 tree representing the value accessed, otherwise return NULL. */
15987 tree
15988 fold_read_from_constant_string (tree exp)
15990 if ((TREE_CODE (exp) == INDIRECT_REF
15991 || TREE_CODE (exp) == ARRAY_REF)
15992 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15994 tree exp1 = TREE_OPERAND (exp, 0);
15995 tree index;
15996 tree string;
15997 location_t loc = EXPR_LOCATION (exp);
15999 if (TREE_CODE (exp) == INDIRECT_REF)
16000 string = string_constant (exp1, &index);
16001 else
16003 tree low_bound = array_ref_low_bound (exp);
16004 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16006 /* Optimize the special-case of a zero lower bound.
16008 We convert the low_bound to sizetype to avoid some problems
16009 with constant folding. (E.g. suppose the lower bound is 1,
16010 and its mode is QI. Without the conversion,l (ARRAY
16011 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16012 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16013 if (! integer_zerop (low_bound))
16014 index = size_diffop_loc (loc, index,
16015 fold_convert_loc (loc, sizetype, low_bound));
16017 string = exp1;
16020 if (string
16021 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16022 && TREE_CODE (string) == STRING_CST
16023 && TREE_CODE (index) == INTEGER_CST
16024 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16025 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16026 == MODE_INT)
16027 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16028 return build_int_cst_type (TREE_TYPE (exp),
16029 (TREE_STRING_POINTER (string)
16030 [TREE_INT_CST_LOW (index)]));
16032 return NULL;
16035 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16036 an integer constant, real, or fixed-point constant.
16038 TYPE is the type of the result. */
16040 static tree
16041 fold_negate_const (tree arg0, tree type)
16043 tree t = NULL_TREE;
16045 switch (TREE_CODE (arg0))
16047 case INTEGER_CST:
16049 double_int val = tree_to_double_int (arg0);
16050 bool overflow;
16051 val = val.neg_with_overflow (&overflow);
16052 t = force_fit_type_double (type, val, 1,
16053 (overflow | TREE_OVERFLOW (arg0))
16054 && !TYPE_UNSIGNED (type));
16055 break;
16058 case REAL_CST:
16059 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16060 break;
16062 case FIXED_CST:
16064 FIXED_VALUE_TYPE f;
16065 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16066 &(TREE_FIXED_CST (arg0)), NULL,
16067 TYPE_SATURATING (type));
16068 t = build_fixed (type, f);
16069 /* Propagate overflow flags. */
16070 if (overflow_p | TREE_OVERFLOW (arg0))
16071 TREE_OVERFLOW (t) = 1;
16072 break;
16075 default:
16076 gcc_unreachable ();
16079 return t;
16082 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16083 an integer constant or real constant.
16085 TYPE is the type of the result. */
16087 tree
16088 fold_abs_const (tree arg0, tree type)
16090 tree t = NULL_TREE;
16092 switch (TREE_CODE (arg0))
16094 case INTEGER_CST:
16096 double_int val = tree_to_double_int (arg0);
16098 /* If the value is unsigned or non-negative, then the absolute value
16099 is the same as the ordinary value. */
16100 if (TYPE_UNSIGNED (type)
16101 || !val.is_negative ())
16102 t = arg0;
16104 /* If the value is negative, then the absolute value is
16105 its negation. */
16106 else
16108 bool overflow;
16109 val = val.neg_with_overflow (&overflow);
16110 t = force_fit_type_double (type, val, -1,
16111 overflow | TREE_OVERFLOW (arg0));
16114 break;
16116 case REAL_CST:
16117 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16118 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16119 else
16120 t = arg0;
16121 break;
16123 default:
16124 gcc_unreachable ();
16127 return t;
16130 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16131 constant. TYPE is the type of the result. */
16133 static tree
16134 fold_not_const (const_tree arg0, tree type)
16136 double_int val;
16138 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16140 val = ~tree_to_double_int (arg0);
16141 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16144 /* Given CODE, a relational operator, the target type, TYPE and two
16145 constant operands OP0 and OP1, return the result of the
16146 relational operation. If the result is not a compile time
16147 constant, then return NULL_TREE. */
16149 static tree
16150 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16152 int result, invert;
16154 /* From here on, the only cases we handle are when the result is
16155 known to be a constant. */
16157 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16159 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16160 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16162 /* Handle the cases where either operand is a NaN. */
16163 if (real_isnan (c0) || real_isnan (c1))
16165 switch (code)
16167 case EQ_EXPR:
16168 case ORDERED_EXPR:
16169 result = 0;
16170 break;
16172 case NE_EXPR:
16173 case UNORDERED_EXPR:
16174 case UNLT_EXPR:
16175 case UNLE_EXPR:
16176 case UNGT_EXPR:
16177 case UNGE_EXPR:
16178 case UNEQ_EXPR:
16179 result = 1;
16180 break;
16182 case LT_EXPR:
16183 case LE_EXPR:
16184 case GT_EXPR:
16185 case GE_EXPR:
16186 case LTGT_EXPR:
16187 if (flag_trapping_math)
16188 return NULL_TREE;
16189 result = 0;
16190 break;
16192 default:
16193 gcc_unreachable ();
16196 return constant_boolean_node (result, type);
16199 return constant_boolean_node (real_compare (code, c0, c1), type);
16202 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16204 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16205 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16206 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16209 /* Handle equality/inequality of complex constants. */
16210 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16212 tree rcond = fold_relational_const (code, type,
16213 TREE_REALPART (op0),
16214 TREE_REALPART (op1));
16215 tree icond = fold_relational_const (code, type,
16216 TREE_IMAGPART (op0),
16217 TREE_IMAGPART (op1));
16218 if (code == EQ_EXPR)
16219 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16220 else if (code == NE_EXPR)
16221 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16222 else
16223 return NULL_TREE;
16226 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16228 unsigned count = VECTOR_CST_NELTS (op0);
16229 tree *elts = XALLOCAVEC (tree, count);
16230 gcc_assert (VECTOR_CST_NELTS (op1) == count
16231 && TYPE_VECTOR_SUBPARTS (type) == count);
16233 for (unsigned i = 0; i < count; i++)
16235 tree elem_type = TREE_TYPE (type);
16236 tree elem0 = VECTOR_CST_ELT (op0, i);
16237 tree elem1 = VECTOR_CST_ELT (op1, i);
16239 tree tem = fold_relational_const (code, elem_type,
16240 elem0, elem1);
16242 if (tem == NULL_TREE)
16243 return NULL_TREE;
16245 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16248 return build_vector (type, elts);
16251 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16253 To compute GT, swap the arguments and do LT.
16254 To compute GE, do LT and invert the result.
16255 To compute LE, swap the arguments, do LT and invert the result.
16256 To compute NE, do EQ and invert the result.
16258 Therefore, the code below must handle only EQ and LT. */
16260 if (code == LE_EXPR || code == GT_EXPR)
16262 tree tem = op0;
16263 op0 = op1;
16264 op1 = tem;
16265 code = swap_tree_comparison (code);
16268 /* Note that it is safe to invert for real values here because we
16269 have already handled the one case that it matters. */
16271 invert = 0;
16272 if (code == NE_EXPR || code == GE_EXPR)
16274 invert = 1;
16275 code = invert_tree_comparison (code, false);
16278 /* Compute a result for LT or EQ if args permit;
16279 Otherwise return T. */
16280 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16282 if (code == EQ_EXPR)
16283 result = tree_int_cst_equal (op0, op1);
16284 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16285 result = INT_CST_LT_UNSIGNED (op0, op1);
16286 else
16287 result = INT_CST_LT (op0, op1);
16289 else
16290 return NULL_TREE;
16292 if (invert)
16293 result ^= 1;
16294 return constant_boolean_node (result, type);
16297 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16298 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16299 itself. */
16301 tree
16302 fold_build_cleanup_point_expr (tree type, tree expr)
16304 /* If the expression does not have side effects then we don't have to wrap
16305 it with a cleanup point expression. */
16306 if (!TREE_SIDE_EFFECTS (expr))
16307 return expr;
16309 /* If the expression is a return, check to see if the expression inside the
16310 return has no side effects or the right hand side of the modify expression
16311 inside the return. If either don't have side effects set we don't need to
16312 wrap the expression in a cleanup point expression. Note we don't check the
16313 left hand side of the modify because it should always be a return decl. */
16314 if (TREE_CODE (expr) == RETURN_EXPR)
16316 tree op = TREE_OPERAND (expr, 0);
16317 if (!op || !TREE_SIDE_EFFECTS (op))
16318 return expr;
16319 op = TREE_OPERAND (op, 1);
16320 if (!TREE_SIDE_EFFECTS (op))
16321 return expr;
16324 return build1 (CLEANUP_POINT_EXPR, type, expr);
16327 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16328 of an indirection through OP0, or NULL_TREE if no simplification is
16329 possible. */
16331 tree
16332 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16334 tree sub = op0;
16335 tree subtype;
16337 STRIP_NOPS (sub);
16338 subtype = TREE_TYPE (sub);
16339 if (!POINTER_TYPE_P (subtype))
16340 return NULL_TREE;
16342 if (TREE_CODE (sub) == ADDR_EXPR)
16344 tree op = TREE_OPERAND (sub, 0);
16345 tree optype = TREE_TYPE (op);
16346 /* *&CONST_DECL -> to the value of the const decl. */
16347 if (TREE_CODE (op) == CONST_DECL)
16348 return DECL_INITIAL (op);
16349 /* *&p => p; make sure to handle *&"str"[cst] here. */
16350 if (type == optype)
16352 tree fop = fold_read_from_constant_string (op);
16353 if (fop)
16354 return fop;
16355 else
16356 return op;
16358 /* *(foo *)&fooarray => fooarray[0] */
16359 else if (TREE_CODE (optype) == ARRAY_TYPE
16360 && type == TREE_TYPE (optype)
16361 && (!in_gimple_form
16362 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16364 tree type_domain = TYPE_DOMAIN (optype);
16365 tree min_val = size_zero_node;
16366 if (type_domain && TYPE_MIN_VALUE (type_domain))
16367 min_val = TYPE_MIN_VALUE (type_domain);
16368 if (in_gimple_form
16369 && TREE_CODE (min_val) != INTEGER_CST)
16370 return NULL_TREE;
16371 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16372 NULL_TREE, NULL_TREE);
16374 /* *(foo *)&complexfoo => __real__ complexfoo */
16375 else if (TREE_CODE (optype) == COMPLEX_TYPE
16376 && type == TREE_TYPE (optype))
16377 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16378 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16379 else if (TREE_CODE (optype) == VECTOR_TYPE
16380 && type == TREE_TYPE (optype))
16382 tree part_width = TYPE_SIZE (type);
16383 tree index = bitsize_int (0);
16384 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16388 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16389 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16391 tree op00 = TREE_OPERAND (sub, 0);
16392 tree op01 = TREE_OPERAND (sub, 1);
16394 STRIP_NOPS (op00);
16395 if (TREE_CODE (op00) == ADDR_EXPR)
16397 tree op00type;
16398 op00 = TREE_OPERAND (op00, 0);
16399 op00type = TREE_TYPE (op00);
16401 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16402 if (TREE_CODE (op00type) == VECTOR_TYPE
16403 && type == TREE_TYPE (op00type))
16405 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16406 tree part_width = TYPE_SIZE (type);
16407 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16408 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16409 tree index = bitsize_int (indexi);
16411 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16412 return fold_build3_loc (loc,
16413 BIT_FIELD_REF, type, op00,
16414 part_width, index);
16417 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16418 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16419 && type == TREE_TYPE (op00type))
16421 tree size = TYPE_SIZE_UNIT (type);
16422 if (tree_int_cst_equal (size, op01))
16423 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16425 /* ((foo *)&fooarray)[1] => fooarray[1] */
16426 else if (TREE_CODE (op00type) == ARRAY_TYPE
16427 && type == TREE_TYPE (op00type))
16429 tree type_domain = TYPE_DOMAIN (op00type);
16430 tree min_val = size_zero_node;
16431 if (type_domain && TYPE_MIN_VALUE (type_domain))
16432 min_val = TYPE_MIN_VALUE (type_domain);
16433 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16434 TYPE_SIZE_UNIT (type));
16435 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16436 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16437 NULL_TREE, NULL_TREE);
16442 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16443 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16444 && type == TREE_TYPE (TREE_TYPE (subtype))
16445 && (!in_gimple_form
16446 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16448 tree type_domain;
16449 tree min_val = size_zero_node;
16450 sub = build_fold_indirect_ref_loc (loc, sub);
16451 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16452 if (type_domain && TYPE_MIN_VALUE (type_domain))
16453 min_val = TYPE_MIN_VALUE (type_domain);
16454 if (in_gimple_form
16455 && TREE_CODE (min_val) != INTEGER_CST)
16456 return NULL_TREE;
16457 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16458 NULL_TREE);
16461 return NULL_TREE;
16464 /* Builds an expression for an indirection through T, simplifying some
16465 cases. */
16467 tree
16468 build_fold_indirect_ref_loc (location_t loc, tree t)
16470 tree type = TREE_TYPE (TREE_TYPE (t));
16471 tree sub = fold_indirect_ref_1 (loc, type, t);
16473 if (sub)
16474 return sub;
16476 return build1_loc (loc, INDIRECT_REF, type, t);
16479 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16481 tree
16482 fold_indirect_ref_loc (location_t loc, tree t)
16484 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16486 if (sub)
16487 return sub;
16488 else
16489 return t;
16492 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16493 whose result is ignored. The type of the returned tree need not be
16494 the same as the original expression. */
16496 tree
16497 fold_ignored_result (tree t)
16499 if (!TREE_SIDE_EFFECTS (t))
16500 return integer_zero_node;
16502 for (;;)
16503 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16505 case tcc_unary:
16506 t = TREE_OPERAND (t, 0);
16507 break;
16509 case tcc_binary:
16510 case tcc_comparison:
16511 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16512 t = TREE_OPERAND (t, 0);
16513 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16514 t = TREE_OPERAND (t, 1);
16515 else
16516 return t;
16517 break;
16519 case tcc_expression:
16520 switch (TREE_CODE (t))
16522 case COMPOUND_EXPR:
16523 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16524 return t;
16525 t = TREE_OPERAND (t, 0);
16526 break;
16528 case COND_EXPR:
16529 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16530 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16531 return t;
16532 t = TREE_OPERAND (t, 0);
16533 break;
16535 default:
16536 return t;
16538 break;
16540 default:
16541 return t;
16545 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16546 This can only be applied to objects of a sizetype. */
16548 tree
16549 round_up_loc (location_t loc, tree value, int divisor)
16551 tree div = NULL_TREE;
16553 gcc_assert (divisor > 0);
16554 if (divisor == 1)
16555 return value;
16557 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16558 have to do anything. Only do this when we are not given a const,
16559 because in that case, this check is more expensive than just
16560 doing it. */
16561 if (TREE_CODE (value) != INTEGER_CST)
16563 div = build_int_cst (TREE_TYPE (value), divisor);
16565 if (multiple_of_p (TREE_TYPE (value), value, div))
16566 return value;
16569 /* If divisor is a power of two, simplify this to bit manipulation. */
16570 if (divisor == (divisor & -divisor))
16572 if (TREE_CODE (value) == INTEGER_CST)
16574 double_int val = tree_to_double_int (value);
16575 bool overflow_p;
16577 if ((val.low & (divisor - 1)) == 0)
16578 return value;
16580 overflow_p = TREE_OVERFLOW (value);
16581 val.low &= ~(divisor - 1);
16582 val.low += divisor;
16583 if (val.low == 0)
16585 val.high++;
16586 if (val.high == 0)
16587 overflow_p = true;
16590 return force_fit_type_double (TREE_TYPE (value), val,
16591 -1, overflow_p);
16593 else
16595 tree t;
16597 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16598 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16599 t = build_int_cst (TREE_TYPE (value), -divisor);
16600 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16603 else
16605 if (!div)
16606 div = build_int_cst (TREE_TYPE (value), divisor);
16607 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16608 value = size_binop_loc (loc, MULT_EXPR, value, div);
16611 return value;
16614 /* Likewise, but round down. */
16616 tree
16617 round_down_loc (location_t loc, tree value, int divisor)
16619 tree div = NULL_TREE;
16621 gcc_assert (divisor > 0);
16622 if (divisor == 1)
16623 return value;
16625 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16626 have to do anything. Only do this when we are not given a const,
16627 because in that case, this check is more expensive than just
16628 doing it. */
16629 if (TREE_CODE (value) != INTEGER_CST)
16631 div = build_int_cst (TREE_TYPE (value), divisor);
16633 if (multiple_of_p (TREE_TYPE (value), value, div))
16634 return value;
16637 /* If divisor is a power of two, simplify this to bit manipulation. */
16638 if (divisor == (divisor & -divisor))
16640 tree t;
16642 t = build_int_cst (TREE_TYPE (value), -divisor);
16643 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16645 else
16647 if (!div)
16648 div = build_int_cst (TREE_TYPE (value), divisor);
16649 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16650 value = size_binop_loc (loc, MULT_EXPR, value, div);
16653 return value;
16656 /* Returns the pointer to the base of the object addressed by EXP and
16657 extracts the information about the offset of the access, storing it
16658 to PBITPOS and POFFSET. */
16660 static tree
16661 split_address_to_core_and_offset (tree exp,
16662 HOST_WIDE_INT *pbitpos, tree *poffset)
16664 tree core;
16665 enum machine_mode mode;
16666 int unsignedp, volatilep;
16667 HOST_WIDE_INT bitsize;
16668 location_t loc = EXPR_LOCATION (exp);
16670 if (TREE_CODE (exp) == ADDR_EXPR)
16672 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16673 poffset, &mode, &unsignedp, &volatilep,
16674 false);
16675 core = build_fold_addr_expr_loc (loc, core);
16677 else
16679 core = exp;
16680 *pbitpos = 0;
16681 *poffset = NULL_TREE;
16684 return core;
16687 /* Returns true if addresses of E1 and E2 differ by a constant, false
16688 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16690 bool
16691 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16693 tree core1, core2;
16694 HOST_WIDE_INT bitpos1, bitpos2;
16695 tree toffset1, toffset2, tdiff, type;
16697 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16698 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16700 if (bitpos1 % BITS_PER_UNIT != 0
16701 || bitpos2 % BITS_PER_UNIT != 0
16702 || !operand_equal_p (core1, core2, 0))
16703 return false;
16705 if (toffset1 && toffset2)
16707 type = TREE_TYPE (toffset1);
16708 if (type != TREE_TYPE (toffset2))
16709 toffset2 = fold_convert (type, toffset2);
16711 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16712 if (!cst_and_fits_in_hwi (tdiff))
16713 return false;
16715 *diff = int_cst_value (tdiff);
16717 else if (toffset1 || toffset2)
16719 /* If only one of the offsets is non-constant, the difference cannot
16720 be a constant. */
16721 return false;
16723 else
16724 *diff = 0;
16726 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16727 return true;
16730 /* Simplify the floating point expression EXP when the sign of the
16731 result is not significant. Return NULL_TREE if no simplification
16732 is possible. */
16734 tree
16735 fold_strip_sign_ops (tree exp)
16737 tree arg0, arg1;
16738 location_t loc = EXPR_LOCATION (exp);
16740 switch (TREE_CODE (exp))
16742 case ABS_EXPR:
16743 case NEGATE_EXPR:
16744 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16745 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16747 case MULT_EXPR:
16748 case RDIV_EXPR:
16749 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16750 return NULL_TREE;
16751 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16752 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16753 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16754 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16755 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16756 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16757 break;
16759 case COMPOUND_EXPR:
16760 arg0 = TREE_OPERAND (exp, 0);
16761 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16762 if (arg1)
16763 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16764 break;
16766 case COND_EXPR:
16767 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16768 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16769 if (arg0 || arg1)
16770 return fold_build3_loc (loc,
16771 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16772 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16773 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16774 break;
16776 case CALL_EXPR:
16778 const enum built_in_function fcode = builtin_mathfn_code (exp);
16779 switch (fcode)
16781 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16782 /* Strip copysign function call, return the 1st argument. */
16783 arg0 = CALL_EXPR_ARG (exp, 0);
16784 arg1 = CALL_EXPR_ARG (exp, 1);
16785 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16787 default:
16788 /* Strip sign ops from the argument of "odd" math functions. */
16789 if (negate_mathfn_p (fcode))
16791 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16792 if (arg0)
16793 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16795 break;
16798 break;
16800 default:
16801 break;
16803 return NULL_TREE;