Implement protection of stack variables
[official-gcc.git] / gcc / fold-const.c
blob4fa1fd670aa9b1608b1f4535016ace4c24b7d3f2
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hash-table.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
173 tree
174 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
176 double_int quo, rem;
177 int uns;
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
184 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
185 uns, code, &rem);
187 if (rem.is_zero ())
188 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 unsigned HOST_WIDE_INT val;
363 unsigned int prec;
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 prec = TYPE_PRECISION (type);
373 if (prec > HOST_BITS_PER_WIDE_INT)
375 if (TREE_INT_CST_LOW (t) != 0)
376 return true;
377 prec -= HOST_BITS_PER_WIDE_INT;
378 val = TREE_INT_CST_HIGH (t);
380 else
381 val = TREE_INT_CST_LOW (t);
382 if (prec < HOST_BITS_PER_WIDE_INT)
383 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
384 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
390 static bool
391 negate_expr_p (tree t)
393 tree type;
395 if (t == 0)
396 return false;
398 type = TREE_TYPE (t);
400 STRIP_SIGN_NOPS (t);
401 switch (TREE_CODE (t))
403 case INTEGER_CST:
404 if (TYPE_OVERFLOW_WRAPS (type))
405 return true;
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t);
409 case BIT_NOT_EXPR:
410 return (INTEGRAL_TYPE_P (type)
411 && TYPE_OVERFLOW_WRAPS (type));
413 case FIXED_CST:
414 case NEGATE_EXPR:
415 return true;
417 case REAL_CST:
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422 case COMPLEX_CST:
423 return negate_expr_p (TREE_REALPART (t))
424 && negate_expr_p (TREE_IMAGPART (t));
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case FLOOR_DIV_EXPR:
467 case CEIL_DIV_EXPR:
468 case EXACT_DIV_EXPR:
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
473 overflow. */
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
480 case NOP_EXPR:
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type) == REAL_TYPE)
484 tree tem = strip_float_extensions (t);
485 if (tem != t)
486 return negate_expr_p (tem);
488 break;
490 case CALL_EXPR:
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t)))
493 return negate_expr_p (CALL_EXPR_ARG (t, 0));
494 break;
496 case RSHIFT_EXPR:
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
500 tree op1 = TREE_OPERAND (t, 1);
501 if (TREE_INT_CST_HIGH (op1) == 0
502 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
503 == TREE_INT_CST_LOW (op1))
504 return true;
506 break;
508 default:
509 break;
511 return false;
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
517 returned. */
519 static tree
520 fold_negate_expr (location_t loc, tree t)
522 tree type = TREE_TYPE (t);
523 tree tem;
525 switch (TREE_CODE (t))
527 /* Convert - (~A) to A + 1. */
528 case BIT_NOT_EXPR:
529 if (INTEGRAL_TYPE_P (type))
530 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
531 build_int_cst (type, 1));
532 break;
534 case INTEGER_CST:
535 tem = fold_negate_const (t, type);
536 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
537 || !TYPE_OVERFLOW_TRAPS (type))
538 return tem;
539 break;
541 case REAL_CST:
542 tem = fold_negate_const (t, type);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
545 return tem;
546 break;
548 case FIXED_CST:
549 tem = fold_negate_const (t, type);
550 return tem;
552 case COMPLEX_CST:
554 tree rpart = negate_expr (TREE_REALPART (t));
555 tree ipart = negate_expr (TREE_IMAGPART (t));
557 if ((TREE_CODE (rpart) == REAL_CST
558 && TREE_CODE (ipart) == REAL_CST)
559 || (TREE_CODE (rpart) == INTEGER_CST
560 && TREE_CODE (ipart) == INTEGER_CST))
561 return build_complex (type, rpart, ipart);
563 break;
565 case COMPLEX_EXPR:
566 if (negate_expr_p (t))
567 return fold_build2_loc (loc, COMPLEX_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
569 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
570 break;
572 case CONJ_EXPR:
573 if (negate_expr_p (t))
574 return fold_build1_loc (loc, CONJ_EXPR, type,
575 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
576 break;
578 case NEGATE_EXPR:
579 return TREE_OPERAND (t, 0);
581 case PLUS_EXPR:
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t, 1))
587 && reorder_operands_p (TREE_OPERAND (t, 0),
588 TREE_OPERAND (t, 1)))
590 tem = negate_expr (TREE_OPERAND (t, 1));
591 return fold_build2_loc (loc, MINUS_EXPR, type,
592 tem, TREE_OPERAND (t, 0));
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t, 0)))
598 tem = negate_expr (TREE_OPERAND (t, 0));
599 return fold_build2_loc (loc, MINUS_EXPR, type,
600 tem, TREE_OPERAND (t, 1));
603 break;
605 case MINUS_EXPR:
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
609 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
612 break;
614 case MULT_EXPR:
615 if (TYPE_UNSIGNED (type))
616 break;
618 /* Fall through. */
620 case RDIV_EXPR:
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
623 tem = TREE_OPERAND (t, 1);
624 if (negate_expr_p (tem))
625 return fold_build2_loc (loc, TREE_CODE (t), type,
626 TREE_OPERAND (t, 0), negate_expr (tem));
627 tem = TREE_OPERAND (t, 0);
628 if (negate_expr_p (tem))
629 return fold_build2_loc (loc, TREE_CODE (t), type,
630 negate_expr (tem), TREE_OPERAND (t, 1));
632 break;
634 case TRUNC_DIV_EXPR:
635 case ROUND_DIV_EXPR:
636 case FLOOR_DIV_EXPR:
637 case CEIL_DIV_EXPR:
638 case EXACT_DIV_EXPR:
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
643 overflow. */
644 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
646 const char * const warnmsg = G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || integer_onep (tem)))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
661 if (INTEGRAL_TYPE_P (type)
662 && (TREE_CODE (tem) != INTEGER_CST
663 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (TREE_INT_CST_HIGH (op1) == 0
700 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
701 == TREE_INT_CST_LOW (op1))
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
722 return NULL_TREE. */
724 static tree
725 negate_expr (tree t)
727 tree type, tem;
728 location_t loc;
730 if (t == NULL_TREE)
731 return NULL_TREE;
733 loc = EXPR_LOCATION (t);
734 type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
737 tem = fold_negate_expr (loc, t);
738 if (!tem)
739 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
740 return fold_convert_loc (loc, type, tem);
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
758 If IN is itself a literal or constant, return it as appropriate.
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
763 static tree
764 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
765 tree *minus_litp, int negate_p)
767 tree var = 0;
769 *conp = 0;
770 *litp = 0;
771 *minus_litp = 0;
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in);
776 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
777 || TREE_CODE (in) == FIXED_CST)
778 *litp = in;
779 else if (TREE_CODE (in) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
787 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
789 tree op0 = TREE_OPERAND (in, 0);
790 tree op1 = TREE_OPERAND (in, 1);
791 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
792 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
796 || TREE_CODE (op0) == FIXED_CST)
797 *litp = op0, op0 = 0;
798 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
799 || TREE_CODE (op1) == FIXED_CST)
800 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
802 if (op0 != 0 && TREE_CONSTANT (op0))
803 *conp = op0, op0 = 0;
804 else if (op1 != 0 && TREE_CONSTANT (op1))
805 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0 != 0 && op1 != 0)
810 var = in;
811 else if (op0 != 0)
812 var = op0;
813 else
814 var = op1, neg_var_p = neg1_p;
816 /* Now do any needed negations. */
817 if (neg_litp_p)
818 *minus_litp = *litp, *litp = 0;
819 if (neg_conp_p)
820 *conp = negate_expr (*conp);
821 if (neg_var_p)
822 var = negate_expr (var);
824 else if (TREE_CONSTANT (in))
825 *conp = in;
826 else
827 var = in;
829 if (negate_p)
831 if (*litp)
832 *minus_litp = *litp, *litp = 0;
833 else if (*minus_litp)
834 *litp = *minus_litp, *minus_litp = 0;
835 *conp = negate_expr (*conp);
836 var = negate_expr (var);
839 return var;
842 /* Re-associate trees split by the above function. T1 and T2 are
843 either expressions to associate or null. Return the new
844 expression, if any. LOC is the location of the new expression. If
845 we build an operation, do it in TYPE and with CODE. */
847 static tree
848 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
861 if (code == PLUS_EXPR)
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 return build2_loc (loc, MINUS_EXPR, type,
865 fold_convert_loc (loc, type, t2),
866 fold_convert_loc (loc, type,
867 TREE_OPERAND (t1, 0)));
868 else if (TREE_CODE (t2) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t1),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t2, 0)));
873 else if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
876 else if (code == MINUS_EXPR)
878 if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
882 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
886 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
887 fold_convert_loc (loc, type, t2));
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
901 switch (code)
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
909 default:
910 break;
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time. */
923 static tree
924 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
925 int overflowable)
927 double_int op1, op2, res, tmp;
928 tree t;
929 tree type = TREE_TYPE (arg1);
930 bool uns = TYPE_UNSIGNED (type);
931 bool overflow = false;
933 op1 = tree_to_double_int (arg1);
934 op2 = tree_to_double_int (arg2);
936 switch (code)
938 case BIT_IOR_EXPR:
939 res = op1 | op2;
940 break;
942 case BIT_XOR_EXPR:
943 res = op1 ^ op2;
944 break;
946 case BIT_AND_EXPR:
947 res = op1 & op2;
948 break;
950 case RSHIFT_EXPR:
951 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
959 break;
961 case RROTATE_EXPR:
962 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
963 break;
965 case LROTATE_EXPR:
966 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
967 break;
969 case PLUS_EXPR:
970 res = op1.add_with_sign (op2, false, &overflow);
971 break;
973 case MINUS_EXPR:
974 res = op1.sub_with_overflow (op2, &overflow);
975 break;
977 case MULT_EXPR:
978 res = op1.mul_with_sign (op2, false, &overflow);
979 break;
981 case MULT_HIGHPART_EXPR:
982 /* ??? Need quad precision, or an additional shift operand
983 to the multiply primitive, to handle very large highparts. */
984 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
985 return NULL_TREE;
986 tmp = op1 - op2;
987 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
988 break;
990 case TRUNC_DIV_EXPR:
991 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
992 case EXACT_DIV_EXPR:
993 /* This is a shortcut for a common special case. */
994 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
995 && !TREE_OVERFLOW (arg1)
996 && !TREE_OVERFLOW (arg2)
997 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
999 if (code == CEIL_DIV_EXPR)
1000 op1.low += op2.low - 1;
1002 res.low = op1.low / op2.low, res.high = 0;
1003 break;
1006 /* ... fall through ... */
1008 case ROUND_DIV_EXPR:
1009 if (op2.is_zero ())
1010 return NULL_TREE;
1011 if (op2.is_one ())
1013 res = op1;
1014 break;
1016 if (op1 == op2 && !op1.is_zero ())
1018 res = double_int_one;
1019 break;
1021 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1022 break;
1024 case TRUNC_MOD_EXPR:
1025 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1026 /* This is a shortcut for a common special case. */
1027 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1028 && !TREE_OVERFLOW (arg1)
1029 && !TREE_OVERFLOW (arg2)
1030 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1032 if (code == CEIL_MOD_EXPR)
1033 op1.low += op2.low - 1;
1034 res.low = op1.low % op2.low, res.high = 0;
1035 break;
1038 /* ... fall through ... */
1040 case ROUND_MOD_EXPR:
1041 if (op2.is_zero ())
1042 return NULL_TREE;
1043 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1044 break;
1046 case MIN_EXPR:
1047 res = op1.min (op2, uns);
1048 break;
1050 case MAX_EXPR:
1051 res = op1.max (op2, uns);
1052 break;
1054 default:
1055 return NULL_TREE;
1058 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1059 (!uns && overflow)
1060 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1062 return t;
1065 tree
1066 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1068 return int_const_binop_1 (code, arg1, arg2, 1);
1071 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1072 constant. We assume ARG1 and ARG2 have the same data type, or at least
1073 are the same kind of constant and the same machine mode. Return zero if
1074 combining the constants is not allowed in the current operating mode. */
1076 static tree
1077 const_binop (enum tree_code code, tree arg1, tree arg2)
1079 /* Sanity check for the recursive cases. */
1080 if (!arg1 || !arg2)
1081 return NULL_TREE;
1083 STRIP_NOPS (arg1);
1084 STRIP_NOPS (arg2);
1086 if (TREE_CODE (arg1) == INTEGER_CST)
1087 return int_const_binop (code, arg1, arg2);
1089 if (TREE_CODE (arg1) == REAL_CST)
1091 enum machine_mode mode;
1092 REAL_VALUE_TYPE d1;
1093 REAL_VALUE_TYPE d2;
1094 REAL_VALUE_TYPE value;
1095 REAL_VALUE_TYPE result;
1096 bool inexact;
1097 tree t, type;
1099 /* The following codes are handled by real_arithmetic. */
1100 switch (code)
1102 case PLUS_EXPR:
1103 case MINUS_EXPR:
1104 case MULT_EXPR:
1105 case RDIV_EXPR:
1106 case MIN_EXPR:
1107 case MAX_EXPR:
1108 break;
1110 default:
1111 return NULL_TREE;
1114 d1 = TREE_REAL_CST (arg1);
1115 d2 = TREE_REAL_CST (arg2);
1117 type = TREE_TYPE (arg1);
1118 mode = TYPE_MODE (type);
1120 /* Don't perform operation if we honor signaling NaNs and
1121 either operand is a NaN. */
1122 if (HONOR_SNANS (mode)
1123 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1124 return NULL_TREE;
1126 /* Don't perform operation if it would raise a division
1127 by zero exception. */
1128 if (code == RDIV_EXPR
1129 && REAL_VALUES_EQUAL (d2, dconst0)
1130 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1131 return NULL_TREE;
1133 /* If either operand is a NaN, just return it. Otherwise, set up
1134 for floating-point trap; we return an overflow. */
1135 if (REAL_VALUE_ISNAN (d1))
1136 return arg1;
1137 else if (REAL_VALUE_ISNAN (d2))
1138 return arg2;
1140 inexact = real_arithmetic (&value, code, &d1, &d2);
1141 real_convert (&result, mode, &value);
1143 /* Don't constant fold this floating point operation if
1144 the result has overflowed and flag_trapping_math. */
1145 if (flag_trapping_math
1146 && MODE_HAS_INFINITIES (mode)
1147 && REAL_VALUE_ISINF (result)
1148 && !REAL_VALUE_ISINF (d1)
1149 && !REAL_VALUE_ISINF (d2))
1150 return NULL_TREE;
1152 /* Don't constant fold this floating point operation if the
1153 result may dependent upon the run-time rounding mode and
1154 flag_rounding_math is set, or if GCC's software emulation
1155 is unable to accurately represent the result. */
1156 if ((flag_rounding_math
1157 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1158 && (inexact || !real_identical (&result, &value)))
1159 return NULL_TREE;
1161 t = build_real (type, result);
1163 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1164 return t;
1167 if (TREE_CODE (arg1) == FIXED_CST)
1169 FIXED_VALUE_TYPE f1;
1170 FIXED_VALUE_TYPE f2;
1171 FIXED_VALUE_TYPE result;
1172 tree t, type;
1173 int sat_p;
1174 bool overflow_p;
1176 /* The following codes are handled by fixed_arithmetic. */
1177 switch (code)
1179 case PLUS_EXPR:
1180 case MINUS_EXPR:
1181 case MULT_EXPR:
1182 case TRUNC_DIV_EXPR:
1183 f2 = TREE_FIXED_CST (arg2);
1184 break;
1186 case LSHIFT_EXPR:
1187 case RSHIFT_EXPR:
1188 f2.data.high = TREE_INT_CST_HIGH (arg2);
1189 f2.data.low = TREE_INT_CST_LOW (arg2);
1190 f2.mode = SImode;
1191 break;
1193 default:
1194 return NULL_TREE;
1197 f1 = TREE_FIXED_CST (arg1);
1198 type = TREE_TYPE (arg1);
1199 sat_p = TYPE_SATURATING (type);
1200 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1201 t = build_fixed (type, result);
1202 /* Propagate overflow flags. */
1203 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1204 TREE_OVERFLOW (t) = 1;
1205 return t;
1208 if (TREE_CODE (arg1) == COMPLEX_CST)
1210 tree type = TREE_TYPE (arg1);
1211 tree r1 = TREE_REALPART (arg1);
1212 tree i1 = TREE_IMAGPART (arg1);
1213 tree r2 = TREE_REALPART (arg2);
1214 tree i2 = TREE_IMAGPART (arg2);
1215 tree real, imag;
1217 switch (code)
1219 case PLUS_EXPR:
1220 case MINUS_EXPR:
1221 real = const_binop (code, r1, r2);
1222 imag = const_binop (code, i1, i2);
1223 break;
1225 case MULT_EXPR:
1226 if (COMPLEX_FLOAT_TYPE_P (type))
1227 return do_mpc_arg2 (arg1, arg2, type,
1228 /* do_nonfinite= */ folding_initializer,
1229 mpc_mul);
1231 real = const_binop (MINUS_EXPR,
1232 const_binop (MULT_EXPR, r1, r2),
1233 const_binop (MULT_EXPR, i1, i2));
1234 imag = const_binop (PLUS_EXPR,
1235 const_binop (MULT_EXPR, r1, i2),
1236 const_binop (MULT_EXPR, i1, r2));
1237 break;
1239 case RDIV_EXPR:
1240 if (COMPLEX_FLOAT_TYPE_P (type))
1241 return do_mpc_arg2 (arg1, arg2, type,
1242 /* do_nonfinite= */ folding_initializer,
1243 mpc_div);
1244 /* Fallthru ... */
1245 case TRUNC_DIV_EXPR:
1246 case CEIL_DIV_EXPR:
1247 case FLOOR_DIV_EXPR:
1248 case ROUND_DIV_EXPR:
1249 if (flag_complex_method == 0)
1251 /* Keep this algorithm in sync with
1252 tree-complex.c:expand_complex_div_straight().
1254 Expand complex division to scalars, straightforward algorithm.
1255 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1256 t = br*br + bi*bi
1258 tree magsquared
1259 = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r2, r2),
1261 const_binop (MULT_EXPR, i2, i2));
1262 tree t1
1263 = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, r2),
1265 const_binop (MULT_EXPR, i1, i2));
1266 tree t2
1267 = const_binop (MINUS_EXPR,
1268 const_binop (MULT_EXPR, i1, r2),
1269 const_binop (MULT_EXPR, r1, i2));
1271 real = const_binop (code, t1, magsquared);
1272 imag = const_binop (code, t2, magsquared);
1274 else
1276 /* Keep this algorithm in sync with
1277 tree-complex.c:expand_complex_div_wide().
1279 Expand complex division to scalars, modified algorithm to minimize
1280 overflow with wide input ranges. */
1281 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1282 fold_abs_const (r2, TREE_TYPE (type)),
1283 fold_abs_const (i2, TREE_TYPE (type)));
1285 if (integer_nonzerop (compare))
1287 /* In the TRUE branch, we compute
1288 ratio = br/bi;
1289 div = (br * ratio) + bi;
1290 tr = (ar * ratio) + ai;
1291 ti = (ai * ratio) - ar;
1292 tr = tr / div;
1293 ti = ti / div; */
1294 tree ratio = const_binop (code, r2, i2);
1295 tree div = const_binop (PLUS_EXPR, i2,
1296 const_binop (MULT_EXPR, r2, ratio));
1297 real = const_binop (MULT_EXPR, r1, ratio);
1298 real = const_binop (PLUS_EXPR, real, i1);
1299 real = const_binop (code, real, div);
1301 imag = const_binop (MULT_EXPR, i1, ratio);
1302 imag = const_binop (MINUS_EXPR, imag, r1);
1303 imag = const_binop (code, imag, div);
1305 else
1307 /* In the FALSE branch, we compute
1308 ratio = d/c;
1309 divisor = (d * ratio) + c;
1310 tr = (b * ratio) + a;
1311 ti = b - (a * ratio);
1312 tr = tr / div;
1313 ti = ti / div; */
1314 tree ratio = const_binop (code, i2, r2);
1315 tree div = const_binop (PLUS_EXPR, r2,
1316 const_binop (MULT_EXPR, i2, ratio));
1318 real = const_binop (MULT_EXPR, i1, ratio);
1319 real = const_binop (PLUS_EXPR, real, r1);
1320 real = const_binop (code, real, div);
1322 imag = const_binop (MULT_EXPR, r1, ratio);
1323 imag = const_binop (MINUS_EXPR, i1, imag);
1324 imag = const_binop (code, imag, div);
1327 break;
1329 default:
1330 return NULL_TREE;
1333 if (real && imag)
1334 return build_complex (type, real, imag);
1337 if (TREE_CODE (arg1) == VECTOR_CST
1338 && TREE_CODE (arg2) == VECTOR_CST)
1340 tree type = TREE_TYPE(arg1);
1341 int count = TYPE_VECTOR_SUBPARTS (type), i;
1342 tree *elts = XALLOCAVEC (tree, count);
1344 for (i = 0; i < count; i++)
1346 tree elem1 = VECTOR_CST_ELT (arg1, i);
1347 tree elem2 = VECTOR_CST_ELT (arg2, i);
1349 elts[i] = const_binop (code, elem1, elem2);
1351 /* It is possible that const_binop cannot handle the given
1352 code and return NULL_TREE */
1353 if(elts[i] == NULL_TREE)
1354 return NULL_TREE;
1357 return build_vector (type, elts);
1359 return NULL_TREE;
1362 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1363 indicates which particular sizetype to create. */
1365 tree
1366 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1368 return build_int_cst (sizetype_tab[(int) kind], number);
1371 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1372 is a tree code. The type of the result is taken from the operands.
1373 Both must be equivalent integer types, ala int_binop_types_match_p.
1374 If the operands are constant, so is the result. */
1376 tree
1377 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1379 tree type = TREE_TYPE (arg0);
1381 if (arg0 == error_mark_node || arg1 == error_mark_node)
1382 return error_mark_node;
1384 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1385 TREE_TYPE (arg1)));
1387 /* Handle the special case of two integer constants faster. */
1388 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1390 /* And some specific cases even faster than that. */
1391 if (code == PLUS_EXPR)
1393 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1394 return arg1;
1395 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1396 return arg0;
1398 else if (code == MINUS_EXPR)
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MULT_EXPR)
1405 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1406 return arg1;
1409 /* Handle general case of two integer constants. For sizetype
1410 constant calculations we always want to know about overflow,
1411 even in the unsigned case. */
1412 return int_const_binop_1 (code, arg0, arg1, -1);
1415 return fold_build2_loc (loc, code, type, arg0, arg1);
1418 /* Given two values, either both of sizetype or both of bitsizetype,
1419 compute the difference between the two values. Return the value
1420 in signed type corresponding to the type of the operands. */
1422 tree
1423 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1425 tree type = TREE_TYPE (arg0);
1426 tree ctype;
1428 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1429 TREE_TYPE (arg1)));
1431 /* If the type is already signed, just do the simple thing. */
1432 if (!TYPE_UNSIGNED (type))
1433 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1435 if (type == sizetype)
1436 ctype = ssizetype;
1437 else if (type == bitsizetype)
1438 ctype = sbitsizetype;
1439 else
1440 ctype = signed_type_for (type);
1442 /* If either operand is not a constant, do the conversions to the signed
1443 type and subtract. The hardware will do the right thing with any
1444 overflow in the subtraction. */
1445 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1446 return size_binop_loc (loc, MINUS_EXPR,
1447 fold_convert_loc (loc, ctype, arg0),
1448 fold_convert_loc (loc, ctype, arg1));
1450 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1451 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1452 overflow) and negate (which can't either). Special-case a result
1453 of zero while we're here. */
1454 if (tree_int_cst_equal (arg0, arg1))
1455 return build_int_cst (ctype, 0);
1456 else if (tree_int_cst_lt (arg1, arg0))
1457 return fold_convert_loc (loc, ctype,
1458 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1459 else
1460 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1461 fold_convert_loc (loc, ctype,
1462 size_binop_loc (loc,
1463 MINUS_EXPR,
1464 arg1, arg0)));
1467 /* A subroutine of fold_convert_const handling conversions of an
1468 INTEGER_CST to another integer type. */
1470 static tree
1471 fold_convert_const_int_from_int (tree type, const_tree arg1)
1473 tree t;
1475 /* Given an integer constant, make new constant with new type,
1476 appropriately sign-extended or truncated. */
1477 t = force_fit_type_double (type, tree_to_double_int (arg1),
1478 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1479 (TREE_INT_CST_HIGH (arg1) < 0
1480 && (TYPE_UNSIGNED (type)
1481 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1482 | TREE_OVERFLOW (arg1));
1484 return t;
1487 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1488 to an integer type. */
1490 static tree
1491 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1493 int overflow = 0;
1494 tree t;
1496 /* The following code implements the floating point to integer
1497 conversion rules required by the Java Language Specification,
1498 that IEEE NaNs are mapped to zero and values that overflow
1499 the target precision saturate, i.e. values greater than
1500 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1501 are mapped to INT_MIN. These semantics are allowed by the
1502 C and C++ standards that simply state that the behavior of
1503 FP-to-integer conversion is unspecified upon overflow. */
1505 double_int val;
1506 REAL_VALUE_TYPE r;
1507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1509 switch (code)
1511 case FIX_TRUNC_EXPR:
1512 real_trunc (&r, VOIDmode, &x);
1513 break;
1515 default:
1516 gcc_unreachable ();
1519 /* If R is NaN, return zero and show we have an overflow. */
1520 if (REAL_VALUE_ISNAN (r))
1522 overflow = 1;
1523 val = double_int_zero;
1526 /* See if R is less than the lower bound or greater than the
1527 upper bound. */
1529 if (! overflow)
1531 tree lt = TYPE_MIN_VALUE (type);
1532 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1533 if (REAL_VALUES_LESS (r, l))
1535 overflow = 1;
1536 val = tree_to_double_int (lt);
1540 if (! overflow)
1542 tree ut = TYPE_MAX_VALUE (type);
1543 if (ut)
1545 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1546 if (REAL_VALUES_LESS (u, r))
1548 overflow = 1;
1549 val = tree_to_double_int (ut);
1554 if (! overflow)
1555 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1557 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1558 return t;
1561 /* A subroutine of fold_convert_const handling conversions of a
1562 FIXED_CST to an integer type. */
1564 static tree
1565 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1567 tree t;
1568 double_int temp, temp_trunc;
1569 unsigned int mode;
1571 /* Right shift FIXED_CST to temp by fbit. */
1572 temp = TREE_FIXED_CST (arg1).data;
1573 mode = TREE_FIXED_CST (arg1).mode;
1574 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1576 temp = temp.rshift (GET_MODE_FBIT (mode),
1577 HOST_BITS_PER_DOUBLE_INT,
1578 SIGNED_FIXED_POINT_MODE_P (mode));
1580 /* Left shift temp to temp_trunc by fbit. */
1581 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 else
1587 temp = double_int_zero;
1588 temp_trunc = double_int_zero;
1591 /* If FIXED_CST is negative, we need to round the value toward 0.
1592 By checking if the fractional bits are not zero to add 1 to temp. */
1593 if (SIGNED_FIXED_POINT_MODE_P (mode)
1594 && temp_trunc.is_negative ()
1595 && TREE_FIXED_CST (arg1).data != temp_trunc)
1596 temp += double_int_one;
1598 /* Given a fixed-point constant, make new constant with new type,
1599 appropriately sign-extended or truncated. */
1600 t = force_fit_type_double (type, temp, -1,
1601 (temp.is_negative ()
1602 && (TYPE_UNSIGNED (type)
1603 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1604 | TREE_OVERFLOW (arg1));
1606 return t;
1609 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1610 to another floating point type. */
1612 static tree
1613 fold_convert_const_real_from_real (tree type, const_tree arg1)
1615 REAL_VALUE_TYPE value;
1616 tree t;
1618 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1619 t = build_real (type, value);
1621 /* If converting an infinity or NAN to a representation that doesn't
1622 have one, set the overflow bit so that we can produce some kind of
1623 error message at the appropriate point if necessary. It's not the
1624 most user-friendly message, but it's better than nothing. */
1625 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1626 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1627 TREE_OVERFLOW (t) = 1;
1628 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1629 && !MODE_HAS_NANS (TYPE_MODE (type)))
1630 TREE_OVERFLOW (t) = 1;
1631 /* Regular overflow, conversion produced an infinity in a mode that
1632 can't represent them. */
1633 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1634 && REAL_VALUE_ISINF (value)
1635 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1636 TREE_OVERFLOW (t) = 1;
1637 else
1638 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1643 to a floating point type. */
1645 static tree
1646 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1652 t = build_real (type, value);
1654 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1659 to another fixed-point type. */
1661 static tree
1662 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1664 FIXED_VALUE_TYPE value;
1665 tree t;
1666 bool overflow_p;
1668 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1669 TYPE_SATURATING (type));
1670 t = build_fixed (type, value);
1672 /* Propagate overflow flags. */
1673 if (overflow_p | TREE_OVERFLOW (arg1))
1674 TREE_OVERFLOW (t) = 1;
1675 return t;
1678 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1679 to a fixed-point type. */
1681 static tree
1682 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1684 FIXED_VALUE_TYPE value;
1685 tree t;
1686 bool overflow_p;
1688 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1689 TREE_INT_CST (arg1),
1690 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1691 TYPE_SATURATING (type));
1692 t = build_fixed (type, value);
1694 /* Propagate overflow flags. */
1695 if (overflow_p | TREE_OVERFLOW (arg1))
1696 TREE_OVERFLOW (t) = 1;
1697 return t;
1700 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1701 to a fixed-point type. */
1703 static tree
1704 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1706 FIXED_VALUE_TYPE value;
1707 tree t;
1708 bool overflow_p;
1710 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1711 &TREE_REAL_CST (arg1),
1712 TYPE_SATURATING (type));
1713 t = build_fixed (type, value);
1715 /* Propagate overflow flags. */
1716 if (overflow_p | TREE_OVERFLOW (arg1))
1717 TREE_OVERFLOW (t) = 1;
1718 return t;
1721 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1722 type TYPE. If no simplification can be done return NULL_TREE. */
1724 static tree
1725 fold_convert_const (enum tree_code code, tree type, tree arg1)
1727 if (TREE_TYPE (arg1) == type)
1728 return arg1;
1730 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1731 || TREE_CODE (type) == OFFSET_TYPE)
1733 if (TREE_CODE (arg1) == INTEGER_CST)
1734 return fold_convert_const_int_from_int (type, arg1);
1735 else if (TREE_CODE (arg1) == REAL_CST)
1736 return fold_convert_const_int_from_real (code, type, arg1);
1737 else if (TREE_CODE (arg1) == FIXED_CST)
1738 return fold_convert_const_int_from_fixed (type, arg1);
1740 else if (TREE_CODE (type) == REAL_TYPE)
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1743 return build_real_from_int_cst (type, arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1745 return fold_convert_const_real_from_real (type, arg1);
1746 else if (TREE_CODE (arg1) == FIXED_CST)
1747 return fold_convert_const_real_from_fixed (type, arg1);
1749 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1751 if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_fixed_from_fixed (type, arg1);
1753 else if (TREE_CODE (arg1) == INTEGER_CST)
1754 return fold_convert_const_fixed_from_int (type, arg1);
1755 else if (TREE_CODE (arg1) == REAL_CST)
1756 return fold_convert_const_fixed_from_real (type, arg1);
1758 return NULL_TREE;
1761 /* Construct a vector of zero elements of vector type TYPE. */
1763 static tree
1764 build_zero_vector (tree type)
1766 tree t;
1768 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1769 return build_vector_from_val (type, t);
1772 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1774 bool
1775 fold_convertible_p (const_tree type, const_tree arg)
1777 tree orig = TREE_TYPE (arg);
1779 if (type == orig)
1780 return true;
1782 if (TREE_CODE (arg) == ERROR_MARK
1783 || TREE_CODE (type) == ERROR_MARK
1784 || TREE_CODE (orig) == ERROR_MARK)
1785 return false;
1787 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1788 return true;
1790 switch (TREE_CODE (type))
1792 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1793 case POINTER_TYPE: case REFERENCE_TYPE:
1794 case OFFSET_TYPE:
1795 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1796 || TREE_CODE (orig) == OFFSET_TYPE)
1797 return true;
1798 return (TREE_CODE (orig) == VECTOR_TYPE
1799 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1801 case REAL_TYPE:
1802 case FIXED_POINT_TYPE:
1803 case COMPLEX_TYPE:
1804 case VECTOR_TYPE:
1805 case VOID_TYPE:
1806 return TREE_CODE (type) == TREE_CODE (orig);
1808 default:
1809 return false;
1813 /* Convert expression ARG to type TYPE. Used by the middle-end for
1814 simple conversions in preference to calling the front-end's convert. */
1816 tree
1817 fold_convert_loc (location_t loc, tree type, tree arg)
1819 tree orig = TREE_TYPE (arg);
1820 tree tem;
1822 if (type == orig)
1823 return arg;
1825 if (TREE_CODE (arg) == ERROR_MARK
1826 || TREE_CODE (type) == ERROR_MARK
1827 || TREE_CODE (orig) == ERROR_MARK)
1828 return error_mark_node;
1830 switch (TREE_CODE (type))
1832 case POINTER_TYPE:
1833 case REFERENCE_TYPE:
1834 /* Handle conversions between pointers to different address spaces. */
1835 if (POINTER_TYPE_P (orig)
1836 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1837 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1838 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1839 /* fall through */
1841 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1842 case OFFSET_TYPE:
1843 if (TREE_CODE (arg) == INTEGER_CST)
1845 tem = fold_convert_const (NOP_EXPR, type, arg);
1846 if (tem != NULL_TREE)
1847 return tem;
1849 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1850 || TREE_CODE (orig) == OFFSET_TYPE)
1851 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1852 if (TREE_CODE (orig) == COMPLEX_TYPE)
1853 return fold_convert_loc (loc, type,
1854 fold_build1_loc (loc, REALPART_EXPR,
1855 TREE_TYPE (orig), arg));
1856 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1857 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1858 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1860 case REAL_TYPE:
1861 if (TREE_CODE (arg) == INTEGER_CST)
1863 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1864 if (tem != NULL_TREE)
1865 return tem;
1867 else if (TREE_CODE (arg) == REAL_CST)
1869 tem = fold_convert_const (NOP_EXPR, type, arg);
1870 if (tem != NULL_TREE)
1871 return tem;
1873 else if (TREE_CODE (arg) == FIXED_CST)
1875 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1876 if (tem != NULL_TREE)
1877 return tem;
1880 switch (TREE_CODE (orig))
1882 case INTEGER_TYPE:
1883 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1884 case POINTER_TYPE: case REFERENCE_TYPE:
1885 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1887 case REAL_TYPE:
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1890 case FIXED_POINT_TYPE:
1891 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1893 case COMPLEX_TYPE:
1894 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1895 return fold_convert_loc (loc, type, tem);
1897 default:
1898 gcc_unreachable ();
1901 case FIXED_POINT_TYPE:
1902 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1903 || TREE_CODE (arg) == REAL_CST)
1905 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 goto fold_convert_exit;
1910 switch (TREE_CODE (orig))
1912 case FIXED_POINT_TYPE:
1913 case INTEGER_TYPE:
1914 case ENUMERAL_TYPE:
1915 case BOOLEAN_TYPE:
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1919 case COMPLEX_TYPE:
1920 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1921 return fold_convert_loc (loc, type, tem);
1923 default:
1924 gcc_unreachable ();
1927 case COMPLEX_TYPE:
1928 switch (TREE_CODE (orig))
1930 case INTEGER_TYPE:
1931 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case REAL_TYPE:
1934 case FIXED_POINT_TYPE:
1935 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1936 fold_convert_loc (loc, TREE_TYPE (type), arg),
1937 fold_convert_loc (loc, TREE_TYPE (type),
1938 integer_zero_node));
1939 case COMPLEX_TYPE:
1941 tree rpart, ipart;
1943 if (TREE_CODE (arg) == COMPLEX_EXPR)
1945 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1946 TREE_OPERAND (arg, 0));
1947 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1948 TREE_OPERAND (arg, 1));
1949 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1952 arg = save_expr (arg);
1953 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1954 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1955 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1956 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1957 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1960 default:
1961 gcc_unreachable ();
1964 case VECTOR_TYPE:
1965 if (integer_zerop (arg))
1966 return build_zero_vector (type);
1967 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1968 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1969 || TREE_CODE (orig) == VECTOR_TYPE);
1970 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1972 case VOID_TYPE:
1973 tem = fold_ignored_result (arg);
1974 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1976 default:
1977 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1978 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1979 gcc_unreachable ();
1981 fold_convert_exit:
1982 protected_set_expr_location_unshare (tem, loc);
1983 return tem;
1986 /* Return false if expr can be assumed not to be an lvalue, true
1987 otherwise. */
1989 static bool
1990 maybe_lvalue_p (const_tree x)
1992 /* We only need to wrap lvalue tree codes. */
1993 switch (TREE_CODE (x))
1995 case VAR_DECL:
1996 case PARM_DECL:
1997 case RESULT_DECL:
1998 case LABEL_DECL:
1999 case FUNCTION_DECL:
2000 case SSA_NAME:
2002 case COMPONENT_REF:
2003 case MEM_REF:
2004 case INDIRECT_REF:
2005 case ARRAY_REF:
2006 case ARRAY_RANGE_REF:
2007 case BIT_FIELD_REF:
2008 case OBJ_TYPE_REF:
2010 case REALPART_EXPR:
2011 case IMAGPART_EXPR:
2012 case PREINCREMENT_EXPR:
2013 case PREDECREMENT_EXPR:
2014 case SAVE_EXPR:
2015 case TRY_CATCH_EXPR:
2016 case WITH_CLEANUP_EXPR:
2017 case COMPOUND_EXPR:
2018 case MODIFY_EXPR:
2019 case TARGET_EXPR:
2020 case COND_EXPR:
2021 case BIND_EXPR:
2022 break;
2024 default:
2025 /* Assume the worst for front-end tree codes. */
2026 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2027 break;
2028 return false;
2031 return true;
2034 /* Return an expr equal to X but certainly not valid as an lvalue. */
2036 tree
2037 non_lvalue_loc (location_t loc, tree x)
2039 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2040 us. */
2041 if (in_gimple_form)
2042 return x;
2044 if (! maybe_lvalue_p (x))
2045 return x;
2046 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2049 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2050 Zero means allow extended lvalues. */
2052 int pedantic_lvalues;
2054 /* When pedantic, return an expr equal to X but certainly not valid as a
2055 pedantic lvalue. Otherwise, return X. */
2057 static tree
2058 pedantic_non_lvalue_loc (location_t loc, tree x)
2060 if (pedantic_lvalues)
2061 return non_lvalue_loc (loc, x);
2063 return protected_set_expr_location_unshare (x, loc);
2066 /* Given a tree comparison code, return the code that is the logical inverse.
2067 It is generally not safe to do this for floating-point comparisons, except
2068 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2069 ERROR_MARK in this case. */
2071 enum tree_code
2072 invert_tree_comparison (enum tree_code code, bool honor_nans)
2074 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2075 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2076 return ERROR_MARK;
2078 switch (code)
2080 case EQ_EXPR:
2081 return NE_EXPR;
2082 case NE_EXPR:
2083 return EQ_EXPR;
2084 case GT_EXPR:
2085 return honor_nans ? UNLE_EXPR : LE_EXPR;
2086 case GE_EXPR:
2087 return honor_nans ? UNLT_EXPR : LT_EXPR;
2088 case LT_EXPR:
2089 return honor_nans ? UNGE_EXPR : GE_EXPR;
2090 case LE_EXPR:
2091 return honor_nans ? UNGT_EXPR : GT_EXPR;
2092 case LTGT_EXPR:
2093 return UNEQ_EXPR;
2094 case UNEQ_EXPR:
2095 return LTGT_EXPR;
2096 case UNGT_EXPR:
2097 return LE_EXPR;
2098 case UNGE_EXPR:
2099 return LT_EXPR;
2100 case UNLT_EXPR:
2101 return GE_EXPR;
2102 case UNLE_EXPR:
2103 return GT_EXPR;
2104 case ORDERED_EXPR:
2105 return UNORDERED_EXPR;
2106 case UNORDERED_EXPR:
2107 return ORDERED_EXPR;
2108 default:
2109 gcc_unreachable ();
2113 /* Similar, but return the comparison that results if the operands are
2114 swapped. This is safe for floating-point. */
2116 enum tree_code
2117 swap_tree_comparison (enum tree_code code)
2119 switch (code)
2121 case EQ_EXPR:
2122 case NE_EXPR:
2123 case ORDERED_EXPR:
2124 case UNORDERED_EXPR:
2125 case LTGT_EXPR:
2126 case UNEQ_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 case UNGT_EXPR:
2137 return UNLT_EXPR;
2138 case UNGE_EXPR:
2139 return UNLE_EXPR;
2140 case UNLT_EXPR:
2141 return UNGT_EXPR;
2142 case UNLE_EXPR:
2143 return UNGE_EXPR;
2144 default:
2145 gcc_unreachable ();
2150 /* Convert a comparison tree code from an enum tree_code representation
2151 into a compcode bit-based encoding. This function is the inverse of
2152 compcode_to_comparison. */
2154 static enum comparison_code
2155 comparison_to_compcode (enum tree_code code)
2157 switch (code)
2159 case LT_EXPR:
2160 return COMPCODE_LT;
2161 case EQ_EXPR:
2162 return COMPCODE_EQ;
2163 case LE_EXPR:
2164 return COMPCODE_LE;
2165 case GT_EXPR:
2166 return COMPCODE_GT;
2167 case NE_EXPR:
2168 return COMPCODE_NE;
2169 case GE_EXPR:
2170 return COMPCODE_GE;
2171 case ORDERED_EXPR:
2172 return COMPCODE_ORD;
2173 case UNORDERED_EXPR:
2174 return COMPCODE_UNORD;
2175 case UNLT_EXPR:
2176 return COMPCODE_UNLT;
2177 case UNEQ_EXPR:
2178 return COMPCODE_UNEQ;
2179 case UNLE_EXPR:
2180 return COMPCODE_UNLE;
2181 case UNGT_EXPR:
2182 return COMPCODE_UNGT;
2183 case LTGT_EXPR:
2184 return COMPCODE_LTGT;
2185 case UNGE_EXPR:
2186 return COMPCODE_UNGE;
2187 default:
2188 gcc_unreachable ();
2192 /* Convert a compcode bit-based encoding of a comparison operator back
2193 to GCC's enum tree_code representation. This function is the
2194 inverse of comparison_to_compcode. */
2196 static enum tree_code
2197 compcode_to_comparison (enum comparison_code code)
2199 switch (code)
2201 case COMPCODE_LT:
2202 return LT_EXPR;
2203 case COMPCODE_EQ:
2204 return EQ_EXPR;
2205 case COMPCODE_LE:
2206 return LE_EXPR;
2207 case COMPCODE_GT:
2208 return GT_EXPR;
2209 case COMPCODE_NE:
2210 return NE_EXPR;
2211 case COMPCODE_GE:
2212 return GE_EXPR;
2213 case COMPCODE_ORD:
2214 return ORDERED_EXPR;
2215 case COMPCODE_UNORD:
2216 return UNORDERED_EXPR;
2217 case COMPCODE_UNLT:
2218 return UNLT_EXPR;
2219 case COMPCODE_UNEQ:
2220 return UNEQ_EXPR;
2221 case COMPCODE_UNLE:
2222 return UNLE_EXPR;
2223 case COMPCODE_UNGT:
2224 return UNGT_EXPR;
2225 case COMPCODE_LTGT:
2226 return LTGT_EXPR;
2227 case COMPCODE_UNGE:
2228 return UNGE_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Return a tree for the comparison which is the combination of
2235 doing the AND or OR (depending on CODE) of the two operations LCODE
2236 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2237 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2238 if this makes the transformation invalid. */
2240 tree
2241 combine_comparisons (location_t loc,
2242 enum tree_code code, enum tree_code lcode,
2243 enum tree_code rcode, tree truth_type,
2244 tree ll_arg, tree lr_arg)
2246 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2247 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2248 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2249 int compcode;
2251 switch (code)
2253 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2254 compcode = lcompcode & rcompcode;
2255 break;
2257 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2258 compcode = lcompcode | rcompcode;
2259 break;
2261 default:
2262 return NULL_TREE;
2265 if (!honor_nans)
2267 /* Eliminate unordered comparisons, as well as LTGT and ORD
2268 which are not used unless the mode has NaNs. */
2269 compcode &= ~COMPCODE_UNORD;
2270 if (compcode == COMPCODE_LTGT)
2271 compcode = COMPCODE_NE;
2272 else if (compcode == COMPCODE_ORD)
2273 compcode = COMPCODE_TRUE;
2275 else if (flag_trapping_math)
2277 /* Check that the original operation and the optimized ones will trap
2278 under the same condition. */
2279 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2280 && (lcompcode != COMPCODE_EQ)
2281 && (lcompcode != COMPCODE_ORD);
2282 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2283 && (rcompcode != COMPCODE_EQ)
2284 && (rcompcode != COMPCODE_ORD);
2285 bool trap = (compcode & COMPCODE_UNORD) == 0
2286 && (compcode != COMPCODE_EQ)
2287 && (compcode != COMPCODE_ORD);
2289 /* In a short-circuited boolean expression the LHS might be
2290 such that the RHS, if evaluated, will never trap. For
2291 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2292 if neither x nor y is NaN. (This is a mixed blessing: for
2293 example, the expression above will never trap, hence
2294 optimizing it to x < y would be invalid). */
2295 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2296 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 rtrap = false;
2299 /* If the comparison was short-circuited, and only the RHS
2300 trapped, we may now generate a spurious trap. */
2301 if (rtrap && !ltrap
2302 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 return NULL_TREE;
2305 /* If we changed the conditions that cause a trap, we lose. */
2306 if ((ltrap || rtrap) != trap)
2307 return NULL_TREE;
2310 if (compcode == COMPCODE_TRUE)
2311 return constant_boolean_node (true, truth_type);
2312 else if (compcode == COMPCODE_FALSE)
2313 return constant_boolean_node (false, truth_type);
2314 else
2316 enum tree_code tcode;
2318 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2319 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2354 || TREE_TYPE (arg0) == error_mark_node
2355 || TREE_TYPE (arg1) == error_mark_node)
2356 return 0;
2358 /* Similar, if either does not have a type (like a released SSA name),
2359 they aren't equal. */
2360 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2361 return 0;
2363 /* Check equality of integer constants before bailing out due to
2364 precision differences. */
2365 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2366 return tree_int_cst_equal (arg0, arg1);
2368 /* If both types don't have the same signedness, then we can't consider
2369 them equal. We must check this before the STRIP_NOPS calls
2370 because they may change the signedness of the arguments. As pointers
2371 strictly don't have a signedness, require either two pointers or
2372 two non-pointers as well. */
2373 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2374 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2375 return 0;
2377 /* We cannot consider pointers to different address space equal. */
2378 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2379 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2380 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2381 return 0;
2383 /* If both types don't have the same precision, then it is not safe
2384 to strip NOPs. */
2385 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2386 return 0;
2388 STRIP_NOPS (arg0);
2389 STRIP_NOPS (arg1);
2391 /* In case both args are comparisons but with different comparison
2392 code, try to swap the comparison operands of one arg to produce
2393 a match and compare that variant. */
2394 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2395 && COMPARISON_CLASS_P (arg0)
2396 && COMPARISON_CLASS_P (arg1))
2398 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2400 if (TREE_CODE (arg0) == swap_code)
2401 return operand_equal_p (TREE_OPERAND (arg0, 0),
2402 TREE_OPERAND (arg1, 1), flags)
2403 && operand_equal_p (TREE_OPERAND (arg0, 1),
2404 TREE_OPERAND (arg1, 0), flags);
2407 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2408 /* This is needed for conversions and for COMPONENT_REF.
2409 Might as well play it safe and always test this. */
2410 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2411 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2412 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2413 return 0;
2415 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2416 We don't care about side effects in that case because the SAVE_EXPR
2417 takes care of that for us. In all other cases, two expressions are
2418 equal if they have no side effects. If we have two identical
2419 expressions with side effects that should be treated the same due
2420 to the only side effects being identical SAVE_EXPR's, that will
2421 be detected in the recursive calls below.
2422 If we are taking an invariant address of two identical objects
2423 they are necessarily equal as well. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (flags & OEP_CONSTANT_ADDRESS_OF)
2427 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2428 return 1;
2430 /* Next handle constant cases, those for which we can return 1 even
2431 if ONLY_CONST is set. */
2432 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2433 switch (TREE_CODE (arg0))
2435 case INTEGER_CST:
2436 return tree_int_cst_equal (arg0, arg1);
2438 case FIXED_CST:
2439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2440 TREE_FIXED_CST (arg1));
2442 case REAL_CST:
2443 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2444 TREE_REAL_CST (arg1)))
2445 return 1;
2448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2450 /* If we do not distinguish between signed and unsigned zero,
2451 consider them equal. */
2452 if (real_zerop (arg0) && real_zerop (arg1))
2453 return 1;
2455 return 0;
2457 case VECTOR_CST:
2459 unsigned i;
2461 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2462 return 0;
2464 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2466 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2467 VECTOR_CST_ELT (arg1, i), flags))
2468 return 0;
2470 return 1;
2473 case COMPLEX_CST:
2474 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2475 flags)
2476 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2477 flags));
2479 case STRING_CST:
2480 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2481 && ! memcmp (TREE_STRING_POINTER (arg0),
2482 TREE_STRING_POINTER (arg1),
2483 TREE_STRING_LENGTH (arg0)));
2485 case ADDR_EXPR:
2486 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2487 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2488 ? OEP_CONSTANT_ADDRESS_OF : 0);
2489 default:
2490 break;
2493 if (flags & OEP_ONLY_CONST)
2494 return 0;
2496 /* Define macros to test an operand from arg0 and arg1 for equality and a
2497 variant that allows null and views null as being different from any
2498 non-null value. In the latter case, if either is null, the both
2499 must be; otherwise, do the normal comparison. */
2500 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2501 TREE_OPERAND (arg1, N), flags)
2503 #define OP_SAME_WITH_NULL(N) \
2504 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2505 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2507 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2509 case tcc_unary:
2510 /* Two conversions are equal only if signedness and modes match. */
2511 switch (TREE_CODE (arg0))
2513 CASE_CONVERT:
2514 case FIX_TRUNC_EXPR:
2515 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2516 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2517 return 0;
2518 break;
2519 default:
2520 break;
2523 return OP_SAME (0);
2526 case tcc_comparison:
2527 case tcc_binary:
2528 if (OP_SAME (0) && OP_SAME (1))
2529 return 1;
2531 /* For commutative ops, allow the other order. */
2532 return (commutative_tree_code (TREE_CODE (arg0))
2533 && operand_equal_p (TREE_OPERAND (arg0, 0),
2534 TREE_OPERAND (arg1, 1), flags)
2535 && operand_equal_p (TREE_OPERAND (arg0, 1),
2536 TREE_OPERAND (arg1, 0), flags));
2538 case tcc_reference:
2539 /* If either of the pointer (or reference) expressions we are
2540 dereferencing contain a side effect, these cannot be equal. */
2541 if (TREE_SIDE_EFFECTS (arg0)
2542 || TREE_SIDE_EFFECTS (arg1))
2543 return 0;
2545 switch (TREE_CODE (arg0))
2547 case INDIRECT_REF:
2548 case REALPART_EXPR:
2549 case IMAGPART_EXPR:
2550 return OP_SAME (0);
2552 case TARGET_MEM_REF:
2553 /* Require equal extra operands and then fall through to MEM_REF
2554 handling of the two common operands. */
2555 if (!OP_SAME_WITH_NULL (2)
2556 || !OP_SAME_WITH_NULL (3)
2557 || !OP_SAME_WITH_NULL (4))
2558 return 0;
2559 /* Fallthru. */
2560 case MEM_REF:
2561 /* Require equal access sizes, and similar pointer types.
2562 We can have incomplete types for array references of
2563 variable-sized arrays from the Fortran frontent
2564 though. */
2565 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2566 || (TYPE_SIZE (TREE_TYPE (arg0))
2567 && TYPE_SIZE (TREE_TYPE (arg1))
2568 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2569 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2570 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2571 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2572 && OP_SAME (0) && OP_SAME (1));
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 /* Operands 2 and 3 may be null.
2577 Compare the array index by value if it is constant first as we
2578 may have different types but same value here. */
2579 return (OP_SAME (0)
2580 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2581 TREE_OPERAND (arg1, 1))
2582 || OP_SAME (1))
2583 && OP_SAME_WITH_NULL (2)
2584 && OP_SAME_WITH_NULL (3));
2586 case COMPONENT_REF:
2587 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2588 may be NULL when we're called to compare MEM_EXPRs. */
2589 return OP_SAME_WITH_NULL (0)
2590 && OP_SAME (1)
2591 && OP_SAME_WITH_NULL (2);
2593 case BIT_FIELD_REF:
2594 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2596 default:
2597 return 0;
2600 case tcc_expression:
2601 switch (TREE_CODE (arg0))
2603 case ADDR_EXPR:
2604 case TRUTH_NOT_EXPR:
2605 return OP_SAME (0);
2607 case TRUTH_ANDIF_EXPR:
2608 case TRUTH_ORIF_EXPR:
2609 return OP_SAME (0) && OP_SAME (1);
2611 case FMA_EXPR:
2612 case WIDEN_MULT_PLUS_EXPR:
2613 case WIDEN_MULT_MINUS_EXPR:
2614 if (!OP_SAME (2))
2615 return 0;
2616 /* The multiplcation operands are commutative. */
2617 /* FALLTHRU */
2619 case TRUTH_AND_EXPR:
2620 case TRUTH_OR_EXPR:
2621 case TRUTH_XOR_EXPR:
2622 if (OP_SAME (0) && OP_SAME (1))
2623 return 1;
2625 /* Otherwise take into account this is a commutative operation. */
2626 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2627 TREE_OPERAND (arg1, 1), flags)
2628 && operand_equal_p (TREE_OPERAND (arg0, 1),
2629 TREE_OPERAND (arg1, 0), flags));
2631 case COND_EXPR:
2632 case VEC_COND_EXPR:
2633 case DOT_PROD_EXPR:
2634 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 default:
2637 return 0;
2640 case tcc_vl_exp:
2641 switch (TREE_CODE (arg0))
2643 case CALL_EXPR:
2644 /* If the CALL_EXPRs call different functions, then they
2645 clearly can not be equal. */
2646 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2647 flags))
2648 return 0;
2651 unsigned int cef = call_expr_flags (arg0);
2652 if (flags & OEP_PURE_SAME)
2653 cef &= ECF_CONST | ECF_PURE;
2654 else
2655 cef &= ECF_CONST;
2656 if (!cef)
2657 return 0;
2660 /* Now see if all the arguments are the same. */
2662 const_call_expr_arg_iterator iter0, iter1;
2663 const_tree a0, a1;
2664 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2665 a1 = first_const_call_expr_arg (arg1, &iter1);
2666 a0 && a1;
2667 a0 = next_const_call_expr_arg (&iter0),
2668 a1 = next_const_call_expr_arg (&iter1))
2669 if (! operand_equal_p (a0, a1, flags))
2670 return 0;
2672 /* If we get here and both argument lists are exhausted
2673 then the CALL_EXPRs are equal. */
2674 return ! (a0 || a1);
2676 default:
2677 return 0;
2680 case tcc_declaration:
2681 /* Consider __builtin_sqrt equal to sqrt. */
2682 return (TREE_CODE (arg0) == FUNCTION_DECL
2683 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2684 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2685 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2687 default:
2688 return 0;
2691 #undef OP_SAME
2692 #undef OP_SAME_WITH_NULL
2695 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2696 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2698 When in doubt, return 0. */
2700 static int
2701 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2703 int unsignedp1, unsignedpo;
2704 tree primarg0, primarg1, primother;
2705 unsigned int correct_width;
2707 if (operand_equal_p (arg0, arg1, 0))
2708 return 1;
2710 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2711 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2712 return 0;
2714 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2715 and see if the inner values are the same. This removes any
2716 signedness comparison, which doesn't matter here. */
2717 primarg0 = arg0, primarg1 = arg1;
2718 STRIP_NOPS (primarg0);
2719 STRIP_NOPS (primarg1);
2720 if (operand_equal_p (primarg0, primarg1, 0))
2721 return 1;
2723 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2724 actual comparison operand, ARG0.
2726 First throw away any conversions to wider types
2727 already present in the operands. */
2729 primarg1 = get_narrower (arg1, &unsignedp1);
2730 primother = get_narrower (other, &unsignedpo);
2732 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2733 if (unsignedp1 == unsignedpo
2734 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2735 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2737 tree type = TREE_TYPE (arg0);
2739 /* Make sure shorter operand is extended the right way
2740 to match the longer operand. */
2741 primarg1 = fold_convert (signed_or_unsigned_type_for
2742 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2744 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2745 return 1;
2748 return 0;
2751 /* See if ARG is an expression that is either a comparison or is performing
2752 arithmetic on comparisons. The comparisons must only be comparing
2753 two different values, which will be stored in *CVAL1 and *CVAL2; if
2754 they are nonzero it means that some operands have already been found.
2755 No variables may be used anywhere else in the expression except in the
2756 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2757 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2759 If this is true, return 1. Otherwise, return zero. */
2761 static int
2762 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2764 enum tree_code code = TREE_CODE (arg);
2765 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2767 /* We can handle some of the tcc_expression cases here. */
2768 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2769 tclass = tcc_unary;
2770 else if (tclass == tcc_expression
2771 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2772 || code == COMPOUND_EXPR))
2773 tclass = tcc_binary;
2775 else if (tclass == tcc_expression && code == SAVE_EXPR
2776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2778 /* If we've already found a CVAL1 or CVAL2, this expression is
2779 two complex to handle. */
2780 if (*cval1 || *cval2)
2781 return 0;
2783 tclass = tcc_unary;
2784 *save_p = 1;
2787 switch (tclass)
2789 case tcc_unary:
2790 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2792 case tcc_binary:
2793 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2794 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2795 cval1, cval2, save_p));
2797 case tcc_constant:
2798 return 1;
2800 case tcc_expression:
2801 if (code == COND_EXPR)
2802 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2803 cval1, cval2, save_p)
2804 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2805 cval1, cval2, save_p)
2806 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2807 cval1, cval2, save_p));
2808 return 0;
2810 case tcc_comparison:
2811 /* First see if we can handle the first operand, then the second. For
2812 the second operand, we know *CVAL1 can't be zero. It must be that
2813 one side of the comparison is each of the values; test for the
2814 case where this isn't true by failing if the two operands
2815 are the same. */
2817 if (operand_equal_p (TREE_OPERAND (arg, 0),
2818 TREE_OPERAND (arg, 1), 0))
2819 return 0;
2821 if (*cval1 == 0)
2822 *cval1 = TREE_OPERAND (arg, 0);
2823 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2825 else if (*cval2 == 0)
2826 *cval2 = TREE_OPERAND (arg, 0);
2827 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2829 else
2830 return 0;
2832 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2834 else if (*cval2 == 0)
2835 *cval2 = TREE_OPERAND (arg, 1);
2836 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2838 else
2839 return 0;
2841 return 1;
2843 default:
2844 return 0;
2848 /* ARG is a tree that is known to contain just arithmetic operations and
2849 comparisons. Evaluate the operations in the tree substituting NEW0 for
2850 any occurrence of OLD0 as an operand of a comparison and likewise for
2851 NEW1 and OLD1. */
2853 static tree
2854 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2855 tree old1, tree new1)
2857 tree type = TREE_TYPE (arg);
2858 enum tree_code code = TREE_CODE (arg);
2859 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2861 /* We can handle some of the tcc_expression cases here. */
2862 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2863 tclass = tcc_unary;
2864 else if (tclass == tcc_expression
2865 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2866 tclass = tcc_binary;
2868 switch (tclass)
2870 case tcc_unary:
2871 return fold_build1_loc (loc, code, type,
2872 eval_subst (loc, TREE_OPERAND (arg, 0),
2873 old0, new0, old1, new1));
2875 case tcc_binary:
2876 return fold_build2_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1),
2879 eval_subst (loc, TREE_OPERAND (arg, 1),
2880 old0, new0, old1, new1));
2882 case tcc_expression:
2883 switch (code)
2885 case SAVE_EXPR:
2886 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2887 old1, new1);
2889 case COMPOUND_EXPR:
2890 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2891 old1, new1);
2893 case COND_EXPR:
2894 return fold_build3_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (loc, TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1),
2899 eval_subst (loc, TREE_OPERAND (arg, 2),
2900 old0, new0, old1, new1));
2901 default:
2902 break;
2904 /* Fall through - ??? */
2906 case tcc_comparison:
2908 tree arg0 = TREE_OPERAND (arg, 0);
2909 tree arg1 = TREE_OPERAND (arg, 1);
2911 /* We need to check both for exact equality and tree equality. The
2912 former will be true if the operand has a side-effect. In that
2913 case, we know the operand occurred exactly once. */
2915 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2916 arg0 = new0;
2917 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2918 arg0 = new1;
2920 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2921 arg1 = new0;
2922 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2923 arg1 = new1;
2925 return fold_build2_loc (loc, code, type, arg0, arg1);
2928 default:
2929 return arg;
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED was previously an operand of the expression
2935 but is now not needed (e.g., we folded OMITTED * 0).
2937 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2938 the conversion of RESULT to TYPE. */
2940 tree
2941 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2943 tree t = fold_convert_loc (loc, type, result);
2945 /* If the resulting operand is an empty statement, just return the omitted
2946 statement casted to void. */
2947 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2948 return build1_loc (loc, NOP_EXPR, void_type_node,
2949 fold_ignored_result (omitted));
2951 if (TREE_SIDE_EFFECTS (omitted))
2952 return build2_loc (loc, COMPOUND_EXPR, type,
2953 fold_ignored_result (omitted), t);
2955 return non_lvalue_loc (loc, t);
2958 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 static tree
2961 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2962 tree omitted)
2964 tree t = fold_convert_loc (loc, type, result);
2966 /* If the resulting operand is an empty statement, just return the omitted
2967 statement casted to void. */
2968 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2969 return build1_loc (loc, NOP_EXPR, void_type_node,
2970 fold_ignored_result (omitted));
2972 if (TREE_SIDE_EFFECTS (omitted))
2973 return build2_loc (loc, COMPOUND_EXPR, type,
2974 fold_ignored_result (omitted), t);
2976 return pedantic_non_lvalue_loc (loc, t);
2979 /* Return a tree for the case when the result of an expression is RESULT
2980 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2981 of the expression but are now not needed.
2983 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2984 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2985 evaluated before OMITTED2. Otherwise, if neither has side effects,
2986 just do the conversion of RESULT to TYPE. */
2988 tree
2989 omit_two_operands_loc (location_t loc, tree type, tree result,
2990 tree omitted1, tree omitted2)
2992 tree t = fold_convert_loc (loc, type, result);
2994 if (TREE_SIDE_EFFECTS (omitted2))
2995 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
2996 if (TREE_SIDE_EFFECTS (omitted1))
2997 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
2999 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3003 /* Return a simplified tree node for the truth-negation of ARG. This
3004 never alters ARG itself. We assume that ARG is an operation that
3005 returns a truth value (0 or 1).
3007 FIXME: one would think we would fold the result, but it causes
3008 problems with the dominator optimizer. */
3010 tree
3011 fold_truth_not_expr (location_t loc, tree arg)
3013 tree type = TREE_TYPE (arg);
3014 enum tree_code code = TREE_CODE (arg);
3015 location_t loc1, loc2;
3017 /* If this is a comparison, we can simply invert it, except for
3018 floating-point non-equality comparisons, in which case we just
3019 enclose a TRUTH_NOT_EXPR around what we have. */
3021 if (TREE_CODE_CLASS (code) == tcc_comparison)
3023 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3024 if (FLOAT_TYPE_P (op_type)
3025 && flag_trapping_math
3026 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3027 && code != NE_EXPR && code != EQ_EXPR)
3028 return NULL_TREE;
3030 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3031 if (code == ERROR_MARK)
3032 return NULL_TREE;
3034 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3035 TREE_OPERAND (arg, 1));
3038 switch (code)
3040 case INTEGER_CST:
3041 return constant_boolean_node (integer_zerop (arg), type);
3043 case TRUTH_AND_EXPR:
3044 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3045 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3046 return build2_loc (loc, TRUTH_OR_EXPR, type,
3047 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3048 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3050 case TRUTH_OR_EXPR:
3051 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3052 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3053 return build2_loc (loc, TRUTH_AND_EXPR, type,
3054 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3055 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3057 case TRUTH_XOR_EXPR:
3058 /* Here we can invert either operand. We invert the first operand
3059 unless the second operand is a TRUTH_NOT_EXPR in which case our
3060 result is the XOR of the first operand with the inside of the
3061 negation of the second operand. */
3063 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3064 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3066 else
3067 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3068 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3069 TREE_OPERAND (arg, 1));
3071 case TRUTH_ANDIF_EXPR:
3072 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3073 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3074 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3075 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3076 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3078 case TRUTH_ORIF_EXPR:
3079 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3080 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3081 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3082 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3083 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 case TRUTH_NOT_EXPR:
3086 return TREE_OPERAND (arg, 0);
3088 case COND_EXPR:
3090 tree arg1 = TREE_OPERAND (arg, 1);
3091 tree arg2 = TREE_OPERAND (arg, 2);
3093 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3094 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3096 /* A COND_EXPR may have a throw as one operand, which
3097 then has void type. Just leave void operands
3098 as they are. */
3099 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3100 VOID_TYPE_P (TREE_TYPE (arg1))
3101 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3102 VOID_TYPE_P (TREE_TYPE (arg2))
3103 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3106 case COMPOUND_EXPR:
3107 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, COMPOUND_EXPR, type,
3109 TREE_OPERAND (arg, 0),
3110 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3112 case NON_LVALUE_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3116 CASE_CONVERT:
3117 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3118 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3120 /* ... fall through ... */
3122 case FLOAT_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 return build1_loc (loc, TREE_CODE (arg), type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3127 case BIT_AND_EXPR:
3128 if (!integer_onep (TREE_OPERAND (arg, 1)))
3129 return NULL_TREE;
3130 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3132 case SAVE_EXPR:
3133 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3135 case CLEANUP_POINT_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3138 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3140 default:
3141 return NULL_TREE;
3145 /* Return a simplified tree node for the truth-negation of ARG. This
3146 never alters ARG itself. We assume that ARG is an operation that
3147 returns a truth value (0 or 1).
3149 FIXME: one would think we would fold the result, but it causes
3150 problems with the dominator optimizer. */
3152 tree
3153 invert_truthvalue_loc (location_t loc, tree arg)
3155 tree tem;
3157 if (TREE_CODE (arg) == ERROR_MARK)
3158 return arg;
3160 tem = fold_truth_not_expr (loc, arg);
3161 if (!tem)
3162 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3164 return tem;
3167 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3168 operands are another bit-wise operation with a common input. If so,
3169 distribute the bit operations to save an operation and possibly two if
3170 constants are involved. For example, convert
3171 (A | B) & (A | C) into A | (B & C)
3172 Further simplification will occur if B and C are constants.
3174 If this optimization cannot be done, 0 will be returned. */
3176 static tree
3177 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3178 tree arg0, tree arg1)
3180 tree common;
3181 tree left, right;
3183 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3184 || TREE_CODE (arg0) == code
3185 || (TREE_CODE (arg0) != BIT_AND_EXPR
3186 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3187 return 0;
3189 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3191 common = TREE_OPERAND (arg0, 0);
3192 left = TREE_OPERAND (arg0, 1);
3193 right = TREE_OPERAND (arg1, 1);
3195 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3197 common = TREE_OPERAND (arg0, 0);
3198 left = TREE_OPERAND (arg0, 1);
3199 right = TREE_OPERAND (arg1, 0);
3201 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3203 common = TREE_OPERAND (arg0, 1);
3204 left = TREE_OPERAND (arg0, 0);
3205 right = TREE_OPERAND (arg1, 1);
3207 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3209 common = TREE_OPERAND (arg0, 1);
3210 left = TREE_OPERAND (arg0, 0);
3211 right = TREE_OPERAND (arg1, 0);
3213 else
3214 return 0;
3216 common = fold_convert_loc (loc, type, common);
3217 left = fold_convert_loc (loc, type, left);
3218 right = fold_convert_loc (loc, type, right);
3219 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3220 fold_build2_loc (loc, code, type, left, right));
3223 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3224 with code CODE. This optimization is unsafe. */
3225 static tree
3226 distribute_real_division (location_t loc, enum tree_code code, tree type,
3227 tree arg0, tree arg1)
3229 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3230 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3232 /* (A / C) +- (B / C) -> (A +- B) / C. */
3233 if (mul0 == mul1
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 1), 0))
3236 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3237 fold_build2_loc (loc, code, type,
3238 TREE_OPERAND (arg0, 0),
3239 TREE_OPERAND (arg1, 0)),
3240 TREE_OPERAND (arg0, 1));
3242 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3243 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0), 0)
3245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3248 REAL_VALUE_TYPE r0, r1;
3249 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3250 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3251 if (!mul0)
3252 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3253 if (!mul1)
3254 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3255 real_arithmetic (&r0, code, &r0, &r1);
3256 return fold_build2_loc (loc, MULT_EXPR, type,
3257 TREE_OPERAND (arg0, 0),
3258 build_real (type, r0));
3261 return NULL_TREE;
3264 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3265 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3267 static tree
3268 make_bit_field_ref (location_t loc, tree inner, tree type,
3269 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3271 tree result, bftype;
3273 if (bitpos == 0)
3275 tree size = TYPE_SIZE (TREE_TYPE (inner));
3276 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3277 || POINTER_TYPE_P (TREE_TYPE (inner)))
3278 && host_integerp (size, 0)
3279 && tree_low_cst (size, 0) == bitsize)
3280 return fold_convert_loc (loc, type, inner);
3283 bftype = type;
3284 if (TYPE_PRECISION (bftype) != bitsize
3285 || TYPE_UNSIGNED (bftype) == !unsignedp)
3286 bftype = build_nonstandard_integer_type (bitsize, 0);
3288 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 if (bftype != type)
3292 result = fold_convert_loc (loc, type, result);
3294 return result;
3297 /* Optimize a bit-field compare.
3299 There are two cases: First is a compare against a constant and the
3300 second is a comparison of two items where the fields are at the same
3301 bit position relative to the start of a chunk (byte, halfword, word)
3302 large enough to contain it. In these cases we can avoid the shift
3303 implicit in bitfield extractions.
3305 For constants, we emit a compare of the shifted constant with the
3306 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3307 compared. For two fields at the same position, we do the ANDs with the
3308 similar mask and compare the result of the ANDs.
3310 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3311 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3312 are the left and right operands of the comparison, respectively.
3314 If the optimization described above can be done, we return the resulting
3315 tree. Otherwise we return zero. */
3317 static tree
3318 optimize_bit_field_compare (location_t loc, enum tree_code code,
3319 tree compare_type, tree lhs, tree rhs)
3321 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3322 tree type = TREE_TYPE (lhs);
3323 tree signed_type, unsigned_type;
3324 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3325 enum machine_mode lmode, rmode, nmode;
3326 int lunsignedp, runsignedp;
3327 int lvolatilep = 0, rvolatilep = 0;
3328 tree linner, rinner = NULL_TREE;
3329 tree mask;
3330 tree offset;
3332 /* In the strict volatile bitfields case, doing code changes here may prevent
3333 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3334 if (flag_strict_volatile_bitfields > 0)
3335 return 0;
3337 /* Get all the information about the extractions being done. If the bit size
3338 if the same as the size of the underlying object, we aren't doing an
3339 extraction at all and so can do nothing. We also don't want to
3340 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3341 then will no longer be able to replace it. */
3342 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3343 &lunsignedp, &lvolatilep, false);
3344 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3345 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3346 return 0;
3348 if (!const_p)
3350 /* If this is not a constant, we can only do something if bit positions,
3351 sizes, and signedness are the same. */
3352 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3353 &runsignedp, &rvolatilep, false);
3355 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3356 || lunsignedp != runsignedp || offset != 0
3357 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3358 return 0;
3361 /* See if we can find a mode to refer to this field. We should be able to,
3362 but fail if we can't. */
3363 if (lvolatilep
3364 && GET_MODE_BITSIZE (lmode) > 0
3365 && flag_strict_volatile_bitfields > 0)
3366 nmode = lmode;
3367 else
3368 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3369 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3370 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3371 TYPE_ALIGN (TREE_TYPE (rinner))),
3372 word_mode, lvolatilep || rvolatilep);
3373 if (nmode == VOIDmode)
3374 return 0;
3376 /* Set signed and unsigned types of the precision of this mode for the
3377 shifts below. */
3378 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3379 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3381 /* Compute the bit position and size for the new reference and our offset
3382 within it. If the new reference is the same size as the original, we
3383 won't optimize anything, so return zero. */
3384 nbitsize = GET_MODE_BITSIZE (nmode);
3385 nbitpos = lbitpos & ~ (nbitsize - 1);
3386 lbitpos -= nbitpos;
3387 if (nbitsize == lbitsize)
3388 return 0;
3390 if (BYTES_BIG_ENDIAN)
3391 lbitpos = nbitsize - lbitsize - lbitpos;
3393 /* Make the mask to be used against the extracted field. */
3394 mask = build_int_cst_type (unsigned_type, -1);
3395 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3396 mask = const_binop (RSHIFT_EXPR, mask,
3397 size_int (nbitsize - lbitsize - lbitpos));
3399 if (! const_p)
3400 /* If not comparing with constant, just rework the comparison
3401 and return. */
3402 return fold_build2_loc (loc, code, compare_type,
3403 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3404 make_bit_field_ref (loc, linner,
3405 unsigned_type,
3406 nbitsize, nbitpos,
3408 mask),
3409 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3410 make_bit_field_ref (loc, rinner,
3411 unsigned_type,
3412 nbitsize, nbitpos,
3414 mask));
3416 /* Otherwise, we are handling the constant case. See if the constant is too
3417 big for the field. Warn and return a tree of for 0 (false) if so. We do
3418 this not only for its own sake, but to avoid having to test for this
3419 error case below. If we didn't, we might generate wrong code.
3421 For unsigned fields, the constant shifted right by the field length should
3422 be all zero. For signed fields, the high-order bits should agree with
3423 the sign bit. */
3425 if (lunsignedp)
3427 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3428 fold_convert_loc (loc,
3429 unsigned_type, rhs),
3430 size_int (lbitsize))))
3432 warning (0, "comparison is always %d due to width of bit-field",
3433 code == NE_EXPR);
3434 return constant_boolean_node (code == NE_EXPR, compare_type);
3437 else
3439 tree tem = const_binop (RSHIFT_EXPR,
3440 fold_convert_loc (loc, signed_type, rhs),
3441 size_int (lbitsize - 1));
3442 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3444 warning (0, "comparison is always %d due to width of bit-field",
3445 code == NE_EXPR);
3446 return constant_boolean_node (code == NE_EXPR, compare_type);
3450 /* Single-bit compares should always be against zero. */
3451 if (lbitsize == 1 && ! integer_zerop (rhs))
3453 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3454 rhs = build_int_cst (type, 0);
3457 /* Make a new bitfield reference, shift the constant over the
3458 appropriate number of bits and mask it with the computed mask
3459 (in case this was a signed field). If we changed it, make a new one. */
3460 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3461 if (lvolatilep)
3463 TREE_SIDE_EFFECTS (lhs) = 1;
3464 TREE_THIS_VOLATILE (lhs) = 1;
3467 rhs = const_binop (BIT_AND_EXPR,
3468 const_binop (LSHIFT_EXPR,
3469 fold_convert_loc (loc, unsigned_type, rhs),
3470 size_int (lbitpos)),
3471 mask);
3473 lhs = build2_loc (loc, code, compare_type,
3474 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3475 return lhs;
3478 /* Subroutine for fold_truth_andor_1: decode a field reference.
3480 If EXP is a comparison reference, we return the innermost reference.
3482 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3483 set to the starting bit number.
3485 If the innermost field can be completely contained in a mode-sized
3486 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3488 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3489 otherwise it is not changed.
3491 *PUNSIGNEDP is set to the signedness of the field.
3493 *PMASK is set to the mask used. This is either contained in a
3494 BIT_AND_EXPR or derived from the width of the field.
3496 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3498 Return 0 if this is not a component reference or is one that we can't
3499 do anything with. */
3501 static tree
3502 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3503 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3504 int *punsignedp, int *pvolatilep,
3505 tree *pmask, tree *pand_mask)
3507 tree outer_type = 0;
3508 tree and_mask = 0;
3509 tree mask, inner, offset;
3510 tree unsigned_type;
3511 unsigned int precision;
3513 /* All the optimizations using this function assume integer fields.
3514 There are problems with FP fields since the type_for_size call
3515 below can fail for, e.g., XFmode. */
3516 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3517 return 0;
3519 /* We are interested in the bare arrangement of bits, so strip everything
3520 that doesn't affect the machine mode. However, record the type of the
3521 outermost expression if it may matter below. */
3522 if (CONVERT_EXPR_P (exp)
3523 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3524 outer_type = TREE_TYPE (exp);
3525 STRIP_NOPS (exp);
3527 if (TREE_CODE (exp) == BIT_AND_EXPR)
3529 and_mask = TREE_OPERAND (exp, 1);
3530 exp = TREE_OPERAND (exp, 0);
3531 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3532 if (TREE_CODE (and_mask) != INTEGER_CST)
3533 return 0;
3536 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3537 punsignedp, pvolatilep, false);
3538 if ((inner == exp && and_mask == 0)
3539 || *pbitsize < 0 || offset != 0
3540 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3541 return 0;
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3547 *punsignedp = TYPE_UNSIGNED (outer_type);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3551 precision = TYPE_PRECISION (unsigned_type);
3553 mask = build_int_cst_type (unsigned_type, -1);
3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3556 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3558 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3559 if (and_mask != 0)
3560 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3561 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3563 *pmask = mask;
3564 *pand_mask = and_mask;
3565 return inner;
3568 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3569 bit positions. */
3571 static int
3572 all_ones_mask_p (const_tree mask, int size)
3574 tree type = TREE_TYPE (mask);
3575 unsigned int precision = TYPE_PRECISION (type);
3576 tree tmask;
3578 tmask = build_int_cst_type (signed_type_for (type), -1);
3580 return
3581 tree_int_cst_equal (mask,
3582 const_binop (RSHIFT_EXPR,
3583 const_binop (LSHIFT_EXPR, tmask,
3584 size_int (precision - size)),
3585 size_int (precision - size)));
3588 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3589 represents the sign bit of EXP's type. If EXP represents a sign
3590 or zero extension, also test VAL against the unextended type.
3591 The return value is the (sub)expression whose sign bit is VAL,
3592 or NULL_TREE otherwise. */
3594 static tree
3595 sign_bit_p (tree exp, const_tree val)
3597 unsigned HOST_WIDE_INT mask_lo, lo;
3598 HOST_WIDE_INT mask_hi, hi;
3599 int width;
3600 tree t;
3602 /* Tree EXP must have an integral type. */
3603 t = TREE_TYPE (exp);
3604 if (! INTEGRAL_TYPE_P (t))
3605 return NULL_TREE;
3607 /* Tree VAL must be an integer constant. */
3608 if (TREE_CODE (val) != INTEGER_CST
3609 || TREE_OVERFLOW (val))
3610 return NULL_TREE;
3612 width = TYPE_PRECISION (t);
3613 if (width > HOST_BITS_PER_WIDE_INT)
3615 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3616 lo = 0;
3618 mask_hi = ((unsigned HOST_WIDE_INT) -1
3619 >> (HOST_BITS_PER_DOUBLE_INT - width));
3620 mask_lo = -1;
3622 else
3624 hi = 0;
3625 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3627 mask_hi = 0;
3628 mask_lo = ((unsigned HOST_WIDE_INT) -1
3629 >> (HOST_BITS_PER_WIDE_INT - width));
3632 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3633 treat VAL as if it were unsigned. */
3634 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3635 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3636 return exp;
3638 /* Handle extension from a narrower type. */
3639 if (TREE_CODE (exp) == NOP_EXPR
3640 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3641 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3643 return NULL_TREE;
3646 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3647 to be evaluated unconditionally. */
3649 static int
3650 simple_operand_p (const_tree exp)
3652 /* Strip any conversions that don't change the machine mode. */
3653 STRIP_NOPS (exp);
3655 return (CONSTANT_CLASS_P (exp)
3656 || TREE_CODE (exp) == SSA_NAME
3657 || (DECL_P (exp)
3658 && ! TREE_ADDRESSABLE (exp)
3659 && ! TREE_THIS_VOLATILE (exp)
3660 && ! DECL_NONLOCAL (exp)
3661 /* Don't regard global variables as simple. They may be
3662 allocated in ways unknown to the compiler (shared memory,
3663 #pragma weak, etc). */
3664 && ! TREE_PUBLIC (exp)
3665 && ! DECL_EXTERNAL (exp)
3666 /* Loading a static variable is unduly expensive, but global
3667 registers aren't expensive. */
3668 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3671 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3672 to be evaluated unconditionally.
3673 I addition to simple_operand_p, we assume that comparisons, conversions,
3674 and logic-not operations are simple, if their operands are simple, too. */
3676 static bool
3677 simple_operand_p_2 (tree exp)
3679 enum tree_code code;
3681 if (TREE_SIDE_EFFECTS (exp)
3682 || tree_could_trap_p (exp))
3683 return false;
3685 while (CONVERT_EXPR_P (exp))
3686 exp = TREE_OPERAND (exp, 0);
3688 code = TREE_CODE (exp);
3690 if (TREE_CODE_CLASS (code) == tcc_comparison)
3691 return (simple_operand_p (TREE_OPERAND (exp, 0))
3692 && simple_operand_p (TREE_OPERAND (exp, 1)));
3694 if (code == TRUTH_NOT_EXPR)
3695 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3697 return simple_operand_p (exp);
3701 /* The following functions are subroutines to fold_range_test and allow it to
3702 try to change a logical combination of comparisons into a range test.
3704 For example, both
3705 X == 2 || X == 3 || X == 4 || X == 5
3707 X >= 2 && X <= 5
3708 are converted to
3709 (unsigned) (X - 2) <= 3
3711 We describe each set of comparisons as being either inside or outside
3712 a range, using a variable named like IN_P, and then describe the
3713 range with a lower and upper bound. If one of the bounds is omitted,
3714 it represents either the highest or lowest value of the type.
3716 In the comments below, we represent a range by two numbers in brackets
3717 preceded by a "+" to designate being inside that range, or a "-" to
3718 designate being outside that range, so the condition can be inverted by
3719 flipping the prefix. An omitted bound is represented by a "-". For
3720 example, "- [-, 10]" means being outside the range starting at the lowest
3721 possible value and ending at 10, in other words, being greater than 10.
3722 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3723 always false.
3725 We set up things so that the missing bounds are handled in a consistent
3726 manner so neither a missing bound nor "true" and "false" need to be
3727 handled using a special case. */
3729 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3730 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3731 and UPPER1_P are nonzero if the respective argument is an upper bound
3732 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3733 must be specified for a comparison. ARG1 will be converted to ARG0's
3734 type if both are specified. */
3736 static tree
3737 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3738 tree arg1, int upper1_p)
3740 tree tem;
3741 int result;
3742 int sgn0, sgn1;
3744 /* If neither arg represents infinity, do the normal operation.
3745 Else, if not a comparison, return infinity. Else handle the special
3746 comparison rules. Note that most of the cases below won't occur, but
3747 are handled for consistency. */
3749 if (arg0 != 0 && arg1 != 0)
3751 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3752 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 STRIP_NOPS (tem);
3754 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3757 if (TREE_CODE_CLASS (code) != tcc_comparison)
3758 return 0;
3760 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3761 for neither. In real maths, we cannot assume open ended ranges are
3762 the same. But, this is computer arithmetic, where numbers are finite.
3763 We can therefore make the transformation of any unbounded range with
3764 the value Z, Z being greater than any representable number. This permits
3765 us to treat unbounded ranges as equal. */
3766 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3767 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3768 switch (code)
3770 case EQ_EXPR:
3771 result = sgn0 == sgn1;
3772 break;
3773 case NE_EXPR:
3774 result = sgn0 != sgn1;
3775 break;
3776 case LT_EXPR:
3777 result = sgn0 < sgn1;
3778 break;
3779 case LE_EXPR:
3780 result = sgn0 <= sgn1;
3781 break;
3782 case GT_EXPR:
3783 result = sgn0 > sgn1;
3784 break;
3785 case GE_EXPR:
3786 result = sgn0 >= sgn1;
3787 break;
3788 default:
3789 gcc_unreachable ();
3792 return constant_boolean_node (result, type);
3795 /* Helper routine for make_range. Perform one step for it, return
3796 new expression if the loop should continue or NULL_TREE if it should
3797 stop. */
3799 tree
3800 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3801 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3802 bool *strict_overflow_p)
3804 tree arg0_type = TREE_TYPE (arg0);
3805 tree n_low, n_high, low = *p_low, high = *p_high;
3806 int in_p = *p_in_p, n_in_p;
3808 switch (code)
3810 case TRUTH_NOT_EXPR:
3811 *p_in_p = ! in_p;
3812 return arg0;
3814 case EQ_EXPR: case NE_EXPR:
3815 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3816 /* We can only do something if the range is testing for zero
3817 and if the second operand is an integer constant. Note that
3818 saying something is "in" the range we make is done by
3819 complementing IN_P since it will set in the initial case of
3820 being not equal to zero; "out" is leaving it alone. */
3821 if (low == NULL_TREE || high == NULL_TREE
3822 || ! integer_zerop (low) || ! integer_zerop (high)
3823 || TREE_CODE (arg1) != INTEGER_CST)
3824 return NULL_TREE;
3826 switch (code)
3828 case NE_EXPR: /* - [c, c] */
3829 low = high = arg1;
3830 break;
3831 case EQ_EXPR: /* + [c, c] */
3832 in_p = ! in_p, low = high = arg1;
3833 break;
3834 case GT_EXPR: /* - [-, c] */
3835 low = 0, high = arg1;
3836 break;
3837 case GE_EXPR: /* + [c, -] */
3838 in_p = ! in_p, low = arg1, high = 0;
3839 break;
3840 case LT_EXPR: /* - [c, -] */
3841 low = arg1, high = 0;
3842 break;
3843 case LE_EXPR: /* + [-, c] */
3844 in_p = ! in_p, low = 0, high = arg1;
3845 break;
3846 default:
3847 gcc_unreachable ();
3850 /* If this is an unsigned comparison, we also know that EXP is
3851 greater than or equal to zero. We base the range tests we make
3852 on that fact, so we record it here so we can parse existing
3853 range tests. We test arg0_type since often the return type
3854 of, e.g. EQ_EXPR, is boolean. */
3855 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3857 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3858 in_p, low, high, 1,
3859 build_int_cst (arg0_type, 0),
3860 NULL_TREE))
3861 return NULL_TREE;
3863 in_p = n_in_p, low = n_low, high = n_high;
3865 /* If the high bound is missing, but we have a nonzero low
3866 bound, reverse the range so it goes from zero to the low bound
3867 minus 1. */
3868 if (high == 0 && low && ! integer_zerop (low))
3870 in_p = ! in_p;
3871 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3872 integer_one_node, 0);
3873 low = build_int_cst (arg0_type, 0);
3877 *p_low = low;
3878 *p_high = high;
3879 *p_in_p = in_p;
3880 return arg0;
3882 case NEGATE_EXPR:
3883 /* (-x) IN [a,b] -> x in [-b, -a] */
3884 n_low = range_binop (MINUS_EXPR, exp_type,
3885 build_int_cst (exp_type, 0),
3886 0, high, 1);
3887 n_high = range_binop (MINUS_EXPR, exp_type,
3888 build_int_cst (exp_type, 0),
3889 0, low, 0);
3890 if (n_high != 0 && TREE_OVERFLOW (n_high))
3891 return NULL_TREE;
3892 goto normalize;
3894 case BIT_NOT_EXPR:
3895 /* ~ X -> -X - 1 */
3896 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3897 build_int_cst (exp_type, 1));
3899 case PLUS_EXPR:
3900 case MINUS_EXPR:
3901 if (TREE_CODE (arg1) != INTEGER_CST)
3902 return NULL_TREE;
3904 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3905 move a constant to the other side. */
3906 if (!TYPE_UNSIGNED (arg0_type)
3907 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3908 return NULL_TREE;
3910 /* If EXP is signed, any overflow in the computation is undefined,
3911 so we don't worry about it so long as our computations on
3912 the bounds don't overflow. For unsigned, overflow is defined
3913 and this is exactly the right thing. */
3914 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3915 arg0_type, low, 0, arg1, 0);
3916 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3917 arg0_type, high, 1, arg1, 0);
3918 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3919 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3920 return NULL_TREE;
3922 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3923 *strict_overflow_p = true;
3925 normalize:
3926 /* Check for an unsigned range which has wrapped around the maximum
3927 value thus making n_high < n_low, and normalize it. */
3928 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3930 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3931 integer_one_node, 0);
3932 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3933 integer_one_node, 0);
3935 /* If the range is of the form +/- [ x+1, x ], we won't
3936 be able to normalize it. But then, it represents the
3937 whole range or the empty set, so make it
3938 +/- [ -, - ]. */
3939 if (tree_int_cst_equal (n_low, low)
3940 && tree_int_cst_equal (n_high, high))
3941 low = high = 0;
3942 else
3943 in_p = ! in_p;
3945 else
3946 low = n_low, high = n_high;
3948 *p_low = low;
3949 *p_high = high;
3950 *p_in_p = in_p;
3951 return arg0;
3953 CASE_CONVERT:
3954 case NON_LVALUE_EXPR:
3955 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3956 return NULL_TREE;
3958 if (! INTEGRAL_TYPE_P (arg0_type)
3959 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3960 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3961 return NULL_TREE;
3963 n_low = low, n_high = high;
3965 if (n_low != 0)
3966 n_low = fold_convert_loc (loc, arg0_type, n_low);
3968 if (n_high != 0)
3969 n_high = fold_convert_loc (loc, arg0_type, n_high);
3971 /* If we're converting arg0 from an unsigned type, to exp,
3972 a signed type, we will be doing the comparison as unsigned.
3973 The tests above have already verified that LOW and HIGH
3974 are both positive.
3976 So we have to ensure that we will handle large unsigned
3977 values the same way that the current signed bounds treat
3978 negative values. */
3980 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3982 tree high_positive;
3983 tree equiv_type;
3984 /* For fixed-point modes, we need to pass the saturating flag
3985 as the 2nd parameter. */
3986 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3987 equiv_type
3988 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
3989 TYPE_SATURATING (arg0_type));
3990 else
3991 equiv_type
3992 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
3994 /* A range without an upper bound is, naturally, unbounded.
3995 Since convert would have cropped a very large value, use
3996 the max value for the destination type. */
3997 high_positive
3998 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3999 : TYPE_MAX_VALUE (arg0_type);
4001 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4002 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4003 fold_convert_loc (loc, arg0_type,
4004 high_positive),
4005 build_int_cst (arg0_type, 1));
4007 /* If the low bound is specified, "and" the range with the
4008 range for which the original unsigned value will be
4009 positive. */
4010 if (low != 0)
4012 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4013 1, fold_convert_loc (loc, arg0_type,
4014 integer_zero_node),
4015 high_positive))
4016 return NULL_TREE;
4018 in_p = (n_in_p == in_p);
4020 else
4022 /* Otherwise, "or" the range with the range of the input
4023 that will be interpreted as negative. */
4024 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4025 1, fold_convert_loc (loc, arg0_type,
4026 integer_zero_node),
4027 high_positive))
4028 return NULL_TREE;
4030 in_p = (in_p != n_in_p);
4034 *p_low = n_low;
4035 *p_high = n_high;
4036 *p_in_p = in_p;
4037 return arg0;
4039 default:
4040 return NULL_TREE;
4044 /* Given EXP, a logical expression, set the range it is testing into
4045 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4046 actually being tested. *PLOW and *PHIGH will be made of the same
4047 type as the returned expression. If EXP is not a comparison, we
4048 will most likely not be returning a useful value and range. Set
4049 *STRICT_OVERFLOW_P to true if the return value is only valid
4050 because signed overflow is undefined; otherwise, do not change
4051 *STRICT_OVERFLOW_P. */
4053 tree
4054 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4055 bool *strict_overflow_p)
4057 enum tree_code code;
4058 tree arg0, arg1 = NULL_TREE;
4059 tree exp_type, nexp;
4060 int in_p;
4061 tree low, high;
4062 location_t loc = EXPR_LOCATION (exp);
4064 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4065 and see if we can refine the range. Some of the cases below may not
4066 happen, but it doesn't seem worth worrying about this. We "continue"
4067 the outer loop when we've changed something; otherwise we "break"
4068 the switch, which will "break" the while. */
4070 in_p = 0;
4071 low = high = build_int_cst (TREE_TYPE (exp), 0);
4073 while (1)
4075 code = TREE_CODE (exp);
4076 exp_type = TREE_TYPE (exp);
4077 arg0 = NULL_TREE;
4079 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4081 if (TREE_OPERAND_LENGTH (exp) > 0)
4082 arg0 = TREE_OPERAND (exp, 0);
4083 if (TREE_CODE_CLASS (code) == tcc_binary
4084 || TREE_CODE_CLASS (code) == tcc_comparison
4085 || (TREE_CODE_CLASS (code) == tcc_expression
4086 && TREE_OPERAND_LENGTH (exp) > 1))
4087 arg1 = TREE_OPERAND (exp, 1);
4089 if (arg0 == NULL_TREE)
4090 break;
4092 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4093 &high, &in_p, strict_overflow_p);
4094 if (nexp == NULL_TREE)
4095 break;
4096 exp = nexp;
4099 /* If EXP is a constant, we can evaluate whether this is true or false. */
4100 if (TREE_CODE (exp) == INTEGER_CST)
4102 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4103 exp, 0, low, 0))
4104 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4105 exp, 1, high, 1)));
4106 low = high = 0;
4107 exp = 0;
4110 *pin_p = in_p, *plow = low, *phigh = high;
4111 return exp;
4114 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4115 type, TYPE, return an expression to test if EXP is in (or out of, depending
4116 on IN_P) the range. Return 0 if the test couldn't be created. */
4118 tree
4119 build_range_check (location_t loc, tree type, tree exp, int in_p,
4120 tree low, tree high)
4122 tree etype = TREE_TYPE (exp), value;
4124 #ifdef HAVE_canonicalize_funcptr_for_compare
4125 /* Disable this optimization for function pointer expressions
4126 on targets that require function pointer canonicalization. */
4127 if (HAVE_canonicalize_funcptr_for_compare
4128 && TREE_CODE (etype) == POINTER_TYPE
4129 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4130 return NULL_TREE;
4131 #endif
4133 if (! in_p)
4135 value = build_range_check (loc, type, exp, 1, low, high);
4136 if (value != 0)
4137 return invert_truthvalue_loc (loc, value);
4139 return 0;
4142 if (low == 0 && high == 0)
4143 return build_int_cst (type, 1);
4145 if (low == 0)
4146 return fold_build2_loc (loc, LE_EXPR, type, exp,
4147 fold_convert_loc (loc, etype, high));
4149 if (high == 0)
4150 return fold_build2_loc (loc, GE_EXPR, type, exp,
4151 fold_convert_loc (loc, etype, low));
4153 if (operand_equal_p (low, high, 0))
4154 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4155 fold_convert_loc (loc, etype, low));
4157 if (integer_zerop (low))
4159 if (! TYPE_UNSIGNED (etype))
4161 etype = unsigned_type_for (etype);
4162 high = fold_convert_loc (loc, etype, high);
4163 exp = fold_convert_loc (loc, etype, exp);
4165 return build_range_check (loc, type, exp, 1, 0, high);
4168 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4169 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4171 unsigned HOST_WIDE_INT lo;
4172 HOST_WIDE_INT hi;
4173 int prec;
4175 prec = TYPE_PRECISION (etype);
4176 if (prec <= HOST_BITS_PER_WIDE_INT)
4178 hi = 0;
4179 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4181 else
4183 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4184 lo = (unsigned HOST_WIDE_INT) -1;
4187 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4189 if (TYPE_UNSIGNED (etype))
4191 tree signed_etype = signed_type_for (etype);
4192 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4193 etype
4194 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4195 else
4196 etype = signed_etype;
4197 exp = fold_convert_loc (loc, etype, exp);
4199 return fold_build2_loc (loc, GT_EXPR, type, exp,
4200 build_int_cst (etype, 0));
4204 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4205 This requires wrap-around arithmetics for the type of the expression.
4206 First make sure that arithmetics in this type is valid, then make sure
4207 that it wraps around. */
4208 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4209 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4210 TYPE_UNSIGNED (etype));
4212 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4214 tree utype, minv, maxv;
4216 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4217 for the type in question, as we rely on this here. */
4218 utype = unsigned_type_for (etype);
4219 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4220 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4221 integer_one_node, 1);
4222 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4224 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4225 minv, 1, maxv, 1)))
4226 etype = utype;
4227 else
4228 return 0;
4231 high = fold_convert_loc (loc, etype, high);
4232 low = fold_convert_loc (loc, etype, low);
4233 exp = fold_convert_loc (loc, etype, exp);
4235 value = const_binop (MINUS_EXPR, high, low);
4238 if (POINTER_TYPE_P (etype))
4240 if (value != 0 && !TREE_OVERFLOW (value))
4242 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4243 return build_range_check (loc, type,
4244 fold_build_pointer_plus_loc (loc, exp, low),
4245 1, build_int_cst (etype, 0), value);
4247 return 0;
4250 if (value != 0 && !TREE_OVERFLOW (value))
4251 return build_range_check (loc, type,
4252 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4253 1, build_int_cst (etype, 0), value);
4255 return 0;
4258 /* Return the predecessor of VAL in its type, handling the infinite case. */
4260 static tree
4261 range_predecessor (tree val)
4263 tree type = TREE_TYPE (val);
4265 if (INTEGRAL_TYPE_P (type)
4266 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4267 return 0;
4268 else
4269 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4272 /* Return the successor of VAL in its type, handling the infinite case. */
4274 static tree
4275 range_successor (tree val)
4277 tree type = TREE_TYPE (val);
4279 if (INTEGRAL_TYPE_P (type)
4280 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4281 return 0;
4282 else
4283 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4286 /* Given two ranges, see if we can merge them into one. Return 1 if we
4287 can, 0 if we can't. Set the output range into the specified parameters. */
4289 bool
4290 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4291 tree high0, int in1_p, tree low1, tree high1)
4293 int no_overlap;
4294 int subset;
4295 int temp;
4296 tree tem;
4297 int in_p;
4298 tree low, high;
4299 int lowequal = ((low0 == 0 && low1 == 0)
4300 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4301 low0, 0, low1, 0)));
4302 int highequal = ((high0 == 0 && high1 == 0)
4303 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4304 high0, 1, high1, 1)));
4306 /* Make range 0 be the range that starts first, or ends last if they
4307 start at the same value. Swap them if it isn't. */
4308 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4309 low0, 0, low1, 0))
4310 || (lowequal
4311 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4312 high1, 1, high0, 1))))
4314 temp = in0_p, in0_p = in1_p, in1_p = temp;
4315 tem = low0, low0 = low1, low1 = tem;
4316 tem = high0, high0 = high1, high1 = tem;
4319 /* Now flag two cases, whether the ranges are disjoint or whether the
4320 second range is totally subsumed in the first. Note that the tests
4321 below are simplified by the ones above. */
4322 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4323 high0, 1, low1, 0));
4324 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4325 high1, 1, high0, 1));
4327 /* We now have four cases, depending on whether we are including or
4328 excluding the two ranges. */
4329 if (in0_p && in1_p)
4331 /* If they don't overlap, the result is false. If the second range
4332 is a subset it is the result. Otherwise, the range is from the start
4333 of the second to the end of the first. */
4334 if (no_overlap)
4335 in_p = 0, low = high = 0;
4336 else if (subset)
4337 in_p = 1, low = low1, high = high1;
4338 else
4339 in_p = 1, low = low1, high = high0;
4342 else if (in0_p && ! in1_p)
4344 /* If they don't overlap, the result is the first range. If they are
4345 equal, the result is false. If the second range is a subset of the
4346 first, and the ranges begin at the same place, we go from just after
4347 the end of the second range to the end of the first. If the second
4348 range is not a subset of the first, or if it is a subset and both
4349 ranges end at the same place, the range starts at the start of the
4350 first range and ends just before the second range.
4351 Otherwise, we can't describe this as a single range. */
4352 if (no_overlap)
4353 in_p = 1, low = low0, high = high0;
4354 else if (lowequal && highequal)
4355 in_p = 0, low = high = 0;
4356 else if (subset && lowequal)
4358 low = range_successor (high1);
4359 high = high0;
4360 in_p = 1;
4361 if (low == 0)
4363 /* We are in the weird situation where high0 > high1 but
4364 high1 has no successor. Punt. */
4365 return 0;
4368 else if (! subset || highequal)
4370 low = low0;
4371 high = range_predecessor (low1);
4372 in_p = 1;
4373 if (high == 0)
4375 /* low0 < low1 but low1 has no predecessor. Punt. */
4376 return 0;
4379 else
4380 return 0;
4383 else if (! in0_p && in1_p)
4385 /* If they don't overlap, the result is the second range. If the second
4386 is a subset of the first, the result is false. Otherwise,
4387 the range starts just after the first range and ends at the
4388 end of the second. */
4389 if (no_overlap)
4390 in_p = 1, low = low1, high = high1;
4391 else if (subset || highequal)
4392 in_p = 0, low = high = 0;
4393 else
4395 low = range_successor (high0);
4396 high = high1;
4397 in_p = 1;
4398 if (low == 0)
4400 /* high1 > high0 but high0 has no successor. Punt. */
4401 return 0;
4406 else
4408 /* The case where we are excluding both ranges. Here the complex case
4409 is if they don't overlap. In that case, the only time we have a
4410 range is if they are adjacent. If the second is a subset of the
4411 first, the result is the first. Otherwise, the range to exclude
4412 starts at the beginning of the first range and ends at the end of the
4413 second. */
4414 if (no_overlap)
4416 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4417 range_successor (high0),
4418 1, low1, 0)))
4419 in_p = 0, low = low0, high = high1;
4420 else
4422 /* Canonicalize - [min, x] into - [-, x]. */
4423 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4424 switch (TREE_CODE (TREE_TYPE (low0)))
4426 case ENUMERAL_TYPE:
4427 if (TYPE_PRECISION (TREE_TYPE (low0))
4428 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4429 break;
4430 /* FALLTHROUGH */
4431 case INTEGER_TYPE:
4432 if (tree_int_cst_equal (low0,
4433 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4434 low0 = 0;
4435 break;
4436 case POINTER_TYPE:
4437 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4438 && integer_zerop (low0))
4439 low0 = 0;
4440 break;
4441 default:
4442 break;
4445 /* Canonicalize - [x, max] into - [x, -]. */
4446 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4447 switch (TREE_CODE (TREE_TYPE (high1)))
4449 case ENUMERAL_TYPE:
4450 if (TYPE_PRECISION (TREE_TYPE (high1))
4451 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4452 break;
4453 /* FALLTHROUGH */
4454 case INTEGER_TYPE:
4455 if (tree_int_cst_equal (high1,
4456 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4457 high1 = 0;
4458 break;
4459 case POINTER_TYPE:
4460 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4461 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4462 high1, 1,
4463 integer_one_node, 1)))
4464 high1 = 0;
4465 break;
4466 default:
4467 break;
4470 /* The ranges might be also adjacent between the maximum and
4471 minimum values of the given type. For
4472 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4473 return + [x + 1, y - 1]. */
4474 if (low0 == 0 && high1 == 0)
4476 low = range_successor (high0);
4477 high = range_predecessor (low1);
4478 if (low == 0 || high == 0)
4479 return 0;
4481 in_p = 1;
4483 else
4484 return 0;
4487 else if (subset)
4488 in_p = 0, low = low0, high = high0;
4489 else
4490 in_p = 0, low = low0, high = high1;
4493 *pin_p = in_p, *plow = low, *phigh = high;
4494 return 1;
4498 /* Subroutine of fold, looking inside expressions of the form
4499 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4500 of the COND_EXPR. This function is being used also to optimize
4501 A op B ? C : A, by reversing the comparison first.
4503 Return a folded expression whose code is not a COND_EXPR
4504 anymore, or NULL_TREE if no folding opportunity is found. */
4506 static tree
4507 fold_cond_expr_with_comparison (location_t loc, tree type,
4508 tree arg0, tree arg1, tree arg2)
4510 enum tree_code comp_code = TREE_CODE (arg0);
4511 tree arg00 = TREE_OPERAND (arg0, 0);
4512 tree arg01 = TREE_OPERAND (arg0, 1);
4513 tree arg1_type = TREE_TYPE (arg1);
4514 tree tem;
4516 STRIP_NOPS (arg1);
4517 STRIP_NOPS (arg2);
4519 /* If we have A op 0 ? A : -A, consider applying the following
4520 transformations:
4522 A == 0? A : -A same as -A
4523 A != 0? A : -A same as A
4524 A >= 0? A : -A same as abs (A)
4525 A > 0? A : -A same as abs (A)
4526 A <= 0? A : -A same as -abs (A)
4527 A < 0? A : -A same as -abs (A)
4529 None of these transformations work for modes with signed
4530 zeros. If A is +/-0, the first two transformations will
4531 change the sign of the result (from +0 to -0, or vice
4532 versa). The last four will fix the sign of the result,
4533 even though the original expressions could be positive or
4534 negative, depending on the sign of A.
4536 Note that all these transformations are correct if A is
4537 NaN, since the two alternatives (A and -A) are also NaNs. */
4538 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4539 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4540 ? real_zerop (arg01)
4541 : integer_zerop (arg01))
4542 && ((TREE_CODE (arg2) == NEGATE_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4544 /* In the case that A is of the form X-Y, '-A' (arg2) may
4545 have already been folded to Y-X, check for that. */
4546 || (TREE_CODE (arg1) == MINUS_EXPR
4547 && TREE_CODE (arg2) == MINUS_EXPR
4548 && operand_equal_p (TREE_OPERAND (arg1, 0),
4549 TREE_OPERAND (arg2, 1), 0)
4550 && operand_equal_p (TREE_OPERAND (arg1, 1),
4551 TREE_OPERAND (arg2, 0), 0))))
4552 switch (comp_code)
4554 case EQ_EXPR:
4555 case UNEQ_EXPR:
4556 tem = fold_convert_loc (loc, arg1_type, arg1);
4557 return pedantic_non_lvalue_loc (loc,
4558 fold_convert_loc (loc, type,
4559 negate_expr (tem)));
4560 case NE_EXPR:
4561 case LTGT_EXPR:
4562 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4563 case UNGE_EXPR:
4564 case UNGT_EXPR:
4565 if (flag_trapping_math)
4566 break;
4567 /* Fall through. */
4568 case GE_EXPR:
4569 case GT_EXPR:
4570 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4571 arg1 = fold_convert_loc (loc, signed_type_for
4572 (TREE_TYPE (arg1)), arg1);
4573 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4574 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4575 case UNLE_EXPR:
4576 case UNLT_EXPR:
4577 if (flag_trapping_math)
4578 break;
4579 case LE_EXPR:
4580 case LT_EXPR:
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return negate_expr (fold_convert_loc (loc, type, tem));
4586 default:
4587 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4588 break;
4591 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4592 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4593 both transformations are correct when A is NaN: A != 0
4594 is then true, and A == 0 is false. */
4596 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4597 && integer_zerop (arg01) && integer_zerop (arg2))
4599 if (comp_code == NE_EXPR)
4600 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4601 else if (comp_code == EQ_EXPR)
4602 return build_int_cst (type, 0);
4605 /* Try some transformations of A op B ? A : B.
4607 A == B? A : B same as B
4608 A != B? A : B same as A
4609 A >= B? A : B same as max (A, B)
4610 A > B? A : B same as max (B, A)
4611 A <= B? A : B same as min (A, B)
4612 A < B? A : B same as min (B, A)
4614 As above, these transformations don't work in the presence
4615 of signed zeros. For example, if A and B are zeros of
4616 opposite sign, the first two transformations will change
4617 the sign of the result. In the last four, the original
4618 expressions give different results for (A=+0, B=-0) and
4619 (A=-0, B=+0), but the transformed expressions do not.
4621 The first two transformations are correct if either A or B
4622 is a NaN. In the first transformation, the condition will
4623 be false, and B will indeed be chosen. In the case of the
4624 second transformation, the condition A != B will be true,
4625 and A will be chosen.
4627 The conversions to max() and min() are not correct if B is
4628 a number and A is not. The conditions in the original
4629 expressions will be false, so all four give B. The min()
4630 and max() versions would give a NaN instead. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4632 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4633 /* Avoid these transformations if the COND_EXPR may be used
4634 as an lvalue in the C++ front-end. PR c++/19199. */
4635 && (in_gimple_form
4636 || (strcmp (lang_hooks.name, "GNU C++") != 0
4637 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4638 || ! maybe_lvalue_p (arg1)
4639 || ! maybe_lvalue_p (arg2)))
4641 tree comp_op0 = arg00;
4642 tree comp_op1 = arg01;
4643 tree comp_type = TREE_TYPE (comp_op0);
4645 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4646 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4648 comp_type = type;
4649 comp_op0 = arg1;
4650 comp_op1 = arg2;
4653 switch (comp_code)
4655 case EQ_EXPR:
4656 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4657 case NE_EXPR:
4658 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4659 case LE_EXPR:
4660 case LT_EXPR:
4661 case UNLE_EXPR:
4662 case UNLT_EXPR:
4663 /* In C++ a ?: expression can be an lvalue, so put the
4664 operand which will be used if they are equal first
4665 so that we can convert this back to the
4666 corresponding COND_EXPR. */
4667 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4669 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4670 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4671 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4672 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4673 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4674 comp_op1, comp_op0);
4675 return pedantic_non_lvalue_loc (loc,
4676 fold_convert_loc (loc, type, tem));
4678 break;
4679 case GE_EXPR:
4680 case GT_EXPR:
4681 case UNGE_EXPR:
4682 case UNGT_EXPR:
4683 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4685 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4686 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4687 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4688 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4689 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4690 comp_op1, comp_op0);
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, tem));
4694 break;
4695 case UNEQ_EXPR:
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4697 return pedantic_non_lvalue_loc (loc,
4698 fold_convert_loc (loc, type, arg2));
4699 break;
4700 case LTGT_EXPR:
4701 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, arg1));
4704 break;
4705 default:
4706 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4707 break;
4711 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4712 we might still be able to simplify this. For example,
4713 if C1 is one less or one more than C2, this might have started
4714 out as a MIN or MAX and been transformed by this function.
4715 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4717 if (INTEGRAL_TYPE_P (type)
4718 && TREE_CODE (arg01) == INTEGER_CST
4719 && TREE_CODE (arg2) == INTEGER_CST)
4720 switch (comp_code)
4722 case EQ_EXPR:
4723 if (TREE_CODE (arg1) == INTEGER_CST)
4724 break;
4725 /* We can replace A with C1 in this case. */
4726 arg1 = fold_convert_loc (loc, type, arg01);
4727 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4729 case LT_EXPR:
4730 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4731 MIN_EXPR, to preserve the signedness of the comparison. */
4732 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4733 OEP_ONLY_CONST)
4734 && operand_equal_p (arg01,
4735 const_binop (PLUS_EXPR, arg2,
4736 build_int_cst (type, 1)),
4737 OEP_ONLY_CONST))
4739 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4740 fold_convert_loc (loc, TREE_TYPE (arg00),
4741 arg2));
4742 return pedantic_non_lvalue_loc (loc,
4743 fold_convert_loc (loc, type, tem));
4745 break;
4747 case LE_EXPR:
4748 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4749 as above. */
4750 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4751 OEP_ONLY_CONST)
4752 && operand_equal_p (arg01,
4753 const_binop (MINUS_EXPR, arg2,
4754 build_int_cst (type, 1)),
4755 OEP_ONLY_CONST))
4757 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4758 fold_convert_loc (loc, TREE_TYPE (arg00),
4759 arg2));
4760 return pedantic_non_lvalue_loc (loc,
4761 fold_convert_loc (loc, type, tem));
4763 break;
4765 case GT_EXPR:
4766 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4767 MAX_EXPR, to preserve the signedness of the comparison. */
4768 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4769 OEP_ONLY_CONST)
4770 && operand_equal_p (arg01,
4771 const_binop (MINUS_EXPR, arg2,
4772 build_int_cst (type, 1)),
4773 OEP_ONLY_CONST))
4775 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4776 fold_convert_loc (loc, TREE_TYPE (arg00),
4777 arg2));
4778 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4780 break;
4782 case GE_EXPR:
4783 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4784 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (PLUS_EXPR, arg2,
4788 build_int_cst (type, 1)),
4789 OEP_ONLY_CONST))
4791 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4796 break;
4797 case NE_EXPR:
4798 break;
4799 default:
4800 gcc_unreachable ();
4803 return NULL_TREE;
4808 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4809 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4810 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4811 false) >= 2)
4812 #endif
4814 /* EXP is some logical combination of boolean tests. See if we can
4815 merge it into some range test. Return the new tree if so. */
4817 static tree
4818 fold_range_test (location_t loc, enum tree_code code, tree type,
4819 tree op0, tree op1)
4821 int or_op = (code == TRUTH_ORIF_EXPR
4822 || code == TRUTH_OR_EXPR);
4823 int in0_p, in1_p, in_p;
4824 tree low0, low1, low, high0, high1, high;
4825 bool strict_overflow_p = false;
4826 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4827 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4828 tree tem;
4829 const char * const warnmsg = G_("assuming signed overflow does not occur "
4830 "when simplifying range test");
4832 /* If this is an OR operation, invert both sides; we will invert
4833 again at the end. */
4834 if (or_op)
4835 in0_p = ! in0_p, in1_p = ! in1_p;
4837 /* If both expressions are the same, if we can merge the ranges, and we
4838 can build the range test, return it or it inverted. If one of the
4839 ranges is always true or always false, consider it to be the same
4840 expression as the other. */
4841 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4842 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4843 in1_p, low1, high1)
4844 && 0 != (tem = (build_range_check (loc, type,
4845 lhs != 0 ? lhs
4846 : rhs != 0 ? rhs : integer_zero_node,
4847 in_p, low, high))))
4849 if (strict_overflow_p)
4850 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4851 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4854 /* On machines where the branch cost is expensive, if this is a
4855 short-circuited branch and the underlying object on both sides
4856 is the same, make a non-short-circuit operation. */
4857 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4858 && lhs != 0 && rhs != 0
4859 && (code == TRUTH_ANDIF_EXPR
4860 || code == TRUTH_ORIF_EXPR)
4861 && operand_equal_p (lhs, rhs, 0))
4863 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4864 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4865 which cases we can't do this. */
4866 if (simple_operand_p (lhs))
4867 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4868 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4869 type, op0, op1);
4871 else if (!lang_hooks.decls.global_bindings_p ()
4872 && !CONTAINS_PLACEHOLDER_P (lhs))
4874 tree common = save_expr (lhs);
4876 if (0 != (lhs = build_range_check (loc, type, common,
4877 or_op ? ! in0_p : in0_p,
4878 low0, high0))
4879 && (0 != (rhs = build_range_check (loc, type, common,
4880 or_op ? ! in1_p : in1_p,
4881 low1, high1))))
4883 if (strict_overflow_p)
4884 fold_overflow_warning (warnmsg,
4885 WARN_STRICT_OVERFLOW_COMPARISON);
4886 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4887 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4888 type, lhs, rhs);
4893 return 0;
4896 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4897 bit value. Arrange things so the extra bits will be set to zero if and
4898 only if C is signed-extended to its full width. If MASK is nonzero,
4899 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4901 static tree
4902 unextend (tree c, int p, int unsignedp, tree mask)
4904 tree type = TREE_TYPE (c);
4905 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4906 tree temp;
4908 if (p == modesize || unsignedp)
4909 return c;
4911 /* We work by getting just the sign bit into the low-order bit, then
4912 into the high-order bit, then sign-extend. We then XOR that value
4913 with C. */
4914 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4915 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4917 /* We must use a signed type in order to get an arithmetic right shift.
4918 However, we must also avoid introducing accidental overflows, so that
4919 a subsequent call to integer_zerop will work. Hence we must
4920 do the type conversion here. At this point, the constant is either
4921 zero or one, and the conversion to a signed type can never overflow.
4922 We could get an overflow if this conversion is done anywhere else. */
4923 if (TYPE_UNSIGNED (type))
4924 temp = fold_convert (signed_type_for (type), temp);
4926 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4927 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4928 if (mask != 0)
4929 temp = const_binop (BIT_AND_EXPR, temp,
4930 fold_convert (TREE_TYPE (c), mask));
4931 /* If necessary, convert the type back to match the type of C. */
4932 if (TYPE_UNSIGNED (type))
4933 temp = fold_convert (type, temp);
4935 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4938 /* For an expression that has the form
4939 (A && B) || ~B
4941 (A || B) && ~B,
4942 we can drop one of the inner expressions and simplify to
4943 A || ~B
4945 A && ~B
4946 LOC is the location of the resulting expression. OP is the inner
4947 logical operation; the left-hand side in the examples above, while CMPOP
4948 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4949 removing a condition that guards another, as in
4950 (A != NULL && A->...) || A == NULL
4951 which we must not transform. If RHS_ONLY is true, only eliminate the
4952 right-most operand of the inner logical operation. */
4954 static tree
4955 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4956 bool rhs_only)
4958 tree type = TREE_TYPE (cmpop);
4959 enum tree_code code = TREE_CODE (cmpop);
4960 enum tree_code truthop_code = TREE_CODE (op);
4961 tree lhs = TREE_OPERAND (op, 0);
4962 tree rhs = TREE_OPERAND (op, 1);
4963 tree orig_lhs = lhs, orig_rhs = rhs;
4964 enum tree_code rhs_code = TREE_CODE (rhs);
4965 enum tree_code lhs_code = TREE_CODE (lhs);
4966 enum tree_code inv_code;
4968 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4969 return NULL_TREE;
4971 if (TREE_CODE_CLASS (code) != tcc_comparison)
4972 return NULL_TREE;
4974 if (rhs_code == truthop_code)
4976 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4977 if (newrhs != NULL_TREE)
4979 rhs = newrhs;
4980 rhs_code = TREE_CODE (rhs);
4983 if (lhs_code == truthop_code && !rhs_only)
4985 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4986 if (newlhs != NULL_TREE)
4988 lhs = newlhs;
4989 lhs_code = TREE_CODE (lhs);
4993 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4994 if (inv_code == rhs_code
4995 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4996 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4997 return lhs;
4998 if (!rhs_only && inv_code == lhs_code
4999 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5000 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5001 return rhs;
5002 if (rhs != orig_rhs || lhs != orig_lhs)
5003 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5004 lhs, rhs);
5005 return NULL_TREE;
5008 /* Find ways of folding logical expressions of LHS and RHS:
5009 Try to merge two comparisons to the same innermost item.
5010 Look for range tests like "ch >= '0' && ch <= '9'".
5011 Look for combinations of simple terms on machines with expensive branches
5012 and evaluate the RHS unconditionally.
5014 For example, if we have p->a == 2 && p->b == 4 and we can make an
5015 object large enough to span both A and B, we can do this with a comparison
5016 against the object ANDed with the a mask.
5018 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5019 operations to do this with one comparison.
5021 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5022 function and the one above.
5024 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5025 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5027 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5028 two operands.
5030 We return the simplified tree or 0 if no optimization is possible. */
5032 static tree
5033 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5034 tree lhs, tree rhs)
5036 /* If this is the "or" of two comparisons, we can do something if
5037 the comparisons are NE_EXPR. If this is the "and", we can do something
5038 if the comparisons are EQ_EXPR. I.e.,
5039 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5041 WANTED_CODE is this operation code. For single bit fields, we can
5042 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5043 comparison for one-bit fields. */
5045 enum tree_code wanted_code;
5046 enum tree_code lcode, rcode;
5047 tree ll_arg, lr_arg, rl_arg, rr_arg;
5048 tree ll_inner, lr_inner, rl_inner, rr_inner;
5049 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5050 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5051 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5052 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5053 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5054 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5055 enum machine_mode lnmode, rnmode;
5056 tree ll_mask, lr_mask, rl_mask, rr_mask;
5057 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5058 tree l_const, r_const;
5059 tree lntype, rntype, result;
5060 HOST_WIDE_INT first_bit, end_bit;
5061 int volatilep;
5063 /* Start by getting the comparison codes. Fail if anything is volatile.
5064 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5065 it were surrounded with a NE_EXPR. */
5067 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5068 return 0;
5070 lcode = TREE_CODE (lhs);
5071 rcode = TREE_CODE (rhs);
5073 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5075 lhs = build2 (NE_EXPR, truth_type, lhs,
5076 build_int_cst (TREE_TYPE (lhs), 0));
5077 lcode = NE_EXPR;
5080 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5082 rhs = build2 (NE_EXPR, truth_type, rhs,
5083 build_int_cst (TREE_TYPE (rhs), 0));
5084 rcode = NE_EXPR;
5087 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5088 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5089 return 0;
5091 ll_arg = TREE_OPERAND (lhs, 0);
5092 lr_arg = TREE_OPERAND (lhs, 1);
5093 rl_arg = TREE_OPERAND (rhs, 0);
5094 rr_arg = TREE_OPERAND (rhs, 1);
5096 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5097 if (simple_operand_p (ll_arg)
5098 && simple_operand_p (lr_arg))
5100 if (operand_equal_p (ll_arg, rl_arg, 0)
5101 && operand_equal_p (lr_arg, rr_arg, 0))
5103 result = combine_comparisons (loc, code, lcode, rcode,
5104 truth_type, ll_arg, lr_arg);
5105 if (result)
5106 return result;
5108 else if (operand_equal_p (ll_arg, rr_arg, 0)
5109 && operand_equal_p (lr_arg, rl_arg, 0))
5111 result = combine_comparisons (loc, code, lcode,
5112 swap_tree_comparison (rcode),
5113 truth_type, ll_arg, lr_arg);
5114 if (result)
5115 return result;
5119 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5120 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5122 /* If the RHS can be evaluated unconditionally and its operands are
5123 simple, it wins to evaluate the RHS unconditionally on machines
5124 with expensive branches. In this case, this isn't a comparison
5125 that can be merged. */
5127 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5128 false) >= 2
5129 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5130 && simple_operand_p (rl_arg)
5131 && simple_operand_p (rr_arg))
5133 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5134 if (code == TRUTH_OR_EXPR
5135 && lcode == NE_EXPR && integer_zerop (lr_arg)
5136 && rcode == NE_EXPR && integer_zerop (rr_arg)
5137 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5138 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5139 return build2_loc (loc, NE_EXPR, truth_type,
5140 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5141 ll_arg, rl_arg),
5142 build_int_cst (TREE_TYPE (ll_arg), 0));
5144 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5145 if (code == TRUTH_AND_EXPR
5146 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5147 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5148 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5149 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5150 return build2_loc (loc, EQ_EXPR, truth_type,
5151 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5152 ll_arg, rl_arg),
5153 build_int_cst (TREE_TYPE (ll_arg), 0));
5156 /* See if the comparisons can be merged. Then get all the parameters for
5157 each side. */
5159 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5160 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5161 return 0;
5163 volatilep = 0;
5164 ll_inner = decode_field_reference (loc, ll_arg,
5165 &ll_bitsize, &ll_bitpos, &ll_mode,
5166 &ll_unsignedp, &volatilep, &ll_mask,
5167 &ll_and_mask);
5168 lr_inner = decode_field_reference (loc, lr_arg,
5169 &lr_bitsize, &lr_bitpos, &lr_mode,
5170 &lr_unsignedp, &volatilep, &lr_mask,
5171 &lr_and_mask);
5172 rl_inner = decode_field_reference (loc, rl_arg,
5173 &rl_bitsize, &rl_bitpos, &rl_mode,
5174 &rl_unsignedp, &volatilep, &rl_mask,
5175 &rl_and_mask);
5176 rr_inner = decode_field_reference (loc, rr_arg,
5177 &rr_bitsize, &rr_bitpos, &rr_mode,
5178 &rr_unsignedp, &volatilep, &rr_mask,
5179 &rr_and_mask);
5181 /* It must be true that the inner operation on the lhs of each
5182 comparison must be the same if we are to be able to do anything.
5183 Then see if we have constants. If not, the same must be true for
5184 the rhs's. */
5185 if (volatilep || ll_inner == 0 || rl_inner == 0
5186 || ! operand_equal_p (ll_inner, rl_inner, 0))
5187 return 0;
5189 if (TREE_CODE (lr_arg) == INTEGER_CST
5190 && TREE_CODE (rr_arg) == INTEGER_CST)
5191 l_const = lr_arg, r_const = rr_arg;
5192 else if (lr_inner == 0 || rr_inner == 0
5193 || ! operand_equal_p (lr_inner, rr_inner, 0))
5194 return 0;
5195 else
5196 l_const = r_const = 0;
5198 /* If either comparison code is not correct for our logical operation,
5199 fail. However, we can convert a one-bit comparison against zero into
5200 the opposite comparison against that bit being set in the field. */
5202 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5203 if (lcode != wanted_code)
5205 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5207 /* Make the left operand unsigned, since we are only interested
5208 in the value of one bit. Otherwise we are doing the wrong
5209 thing below. */
5210 ll_unsignedp = 1;
5211 l_const = ll_mask;
5213 else
5214 return 0;
5217 /* This is analogous to the code for l_const above. */
5218 if (rcode != wanted_code)
5220 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5222 rl_unsignedp = 1;
5223 r_const = rl_mask;
5225 else
5226 return 0;
5229 /* See if we can find a mode that contains both fields being compared on
5230 the left. If we can't, fail. Otherwise, update all constants and masks
5231 to be relative to a field of that size. */
5232 first_bit = MIN (ll_bitpos, rl_bitpos);
5233 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5234 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5235 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5236 volatilep);
5237 if (lnmode == VOIDmode)
5238 return 0;
5240 lnbitsize = GET_MODE_BITSIZE (lnmode);
5241 lnbitpos = first_bit & ~ (lnbitsize - 1);
5242 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5243 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5245 if (BYTES_BIG_ENDIAN)
5247 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5248 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5251 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5252 size_int (xll_bitpos));
5253 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5254 size_int (xrl_bitpos));
5256 if (l_const)
5258 l_const = fold_convert_loc (loc, lntype, l_const);
5259 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5260 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5261 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5262 fold_build1_loc (loc, BIT_NOT_EXPR,
5263 lntype, ll_mask))))
5265 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5267 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5270 if (r_const)
5272 r_const = fold_convert_loc (loc, lntype, r_const);
5273 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5274 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5275 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5276 fold_build1_loc (loc, BIT_NOT_EXPR,
5277 lntype, rl_mask))))
5279 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5281 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5285 /* If the right sides are not constant, do the same for it. Also,
5286 disallow this optimization if a size or signedness mismatch occurs
5287 between the left and right sides. */
5288 if (l_const == 0)
5290 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5291 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5292 /* Make sure the two fields on the right
5293 correspond to the left without being swapped. */
5294 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5295 return 0;
5297 first_bit = MIN (lr_bitpos, rr_bitpos);
5298 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5299 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5300 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5301 volatilep);
5302 if (rnmode == VOIDmode)
5303 return 0;
5305 rnbitsize = GET_MODE_BITSIZE (rnmode);
5306 rnbitpos = first_bit & ~ (rnbitsize - 1);
5307 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5308 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5310 if (BYTES_BIG_ENDIAN)
5312 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5313 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5316 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5317 rntype, lr_mask),
5318 size_int (xlr_bitpos));
5319 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5320 rntype, rr_mask),
5321 size_int (xrr_bitpos));
5323 /* Make a mask that corresponds to both fields being compared.
5324 Do this for both items being compared. If the operands are the
5325 same size and the bits being compared are in the same position
5326 then we can do this by masking both and comparing the masked
5327 results. */
5328 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5329 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5330 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5332 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5333 ll_unsignedp || rl_unsignedp);
5334 if (! all_ones_mask_p (ll_mask, lnbitsize))
5335 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5337 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5338 lr_unsignedp || rr_unsignedp);
5339 if (! all_ones_mask_p (lr_mask, rnbitsize))
5340 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5342 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5345 /* There is still another way we can do something: If both pairs of
5346 fields being compared are adjacent, we may be able to make a wider
5347 field containing them both.
5349 Note that we still must mask the lhs/rhs expressions. Furthermore,
5350 the mask must be shifted to account for the shift done by
5351 make_bit_field_ref. */
5352 if ((ll_bitsize + ll_bitpos == rl_bitpos
5353 && lr_bitsize + lr_bitpos == rr_bitpos)
5354 || (ll_bitpos == rl_bitpos + rl_bitsize
5355 && lr_bitpos == rr_bitpos + rr_bitsize))
5357 tree type;
5359 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5360 ll_bitsize + rl_bitsize,
5361 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5362 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5363 lr_bitsize + rr_bitsize,
5364 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5366 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5367 size_int (MIN (xll_bitpos, xrl_bitpos)));
5368 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5369 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5371 /* Convert to the smaller type before masking out unwanted bits. */
5372 type = lntype;
5373 if (lntype != rntype)
5375 if (lnbitsize > rnbitsize)
5377 lhs = fold_convert_loc (loc, rntype, lhs);
5378 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5379 type = rntype;
5381 else if (lnbitsize < rnbitsize)
5383 rhs = fold_convert_loc (loc, lntype, rhs);
5384 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5385 type = lntype;
5389 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5390 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5392 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5393 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5395 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5398 return 0;
5401 /* Handle the case of comparisons with constants. If there is something in
5402 common between the masks, those bits of the constants must be the same.
5403 If not, the condition is always false. Test for this to avoid generating
5404 incorrect code below. */
5405 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5406 if (! integer_zerop (result)
5407 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5408 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5410 if (wanted_code == NE_EXPR)
5412 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5413 return constant_boolean_node (true, truth_type);
5415 else
5417 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5418 return constant_boolean_node (false, truth_type);
5422 /* Construct the expression we will return. First get the component
5423 reference we will make. Unless the mask is all ones the width of
5424 that field, perform the mask operation. Then compare with the
5425 merged constant. */
5426 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5427 ll_unsignedp || rl_unsignedp);
5429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5430 if (! all_ones_mask_p (ll_mask, lnbitsize))
5431 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5433 return build2_loc (loc, wanted_code, truth_type, result,
5434 const_binop (BIT_IOR_EXPR, l_const, r_const));
5437 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5438 constant. */
5440 static tree
5441 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5442 tree op0, tree op1)
5444 tree arg0 = op0;
5445 enum tree_code op_code;
5446 tree comp_const;
5447 tree minmax_const;
5448 int consts_equal, consts_lt;
5449 tree inner;
5451 STRIP_SIGN_NOPS (arg0);
5453 op_code = TREE_CODE (arg0);
5454 minmax_const = TREE_OPERAND (arg0, 1);
5455 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5456 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5457 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5458 inner = TREE_OPERAND (arg0, 0);
5460 /* If something does not permit us to optimize, return the original tree. */
5461 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5462 || TREE_CODE (comp_const) != INTEGER_CST
5463 || TREE_OVERFLOW (comp_const)
5464 || TREE_CODE (minmax_const) != INTEGER_CST
5465 || TREE_OVERFLOW (minmax_const))
5466 return NULL_TREE;
5468 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5469 and GT_EXPR, doing the rest with recursive calls using logical
5470 simplifications. */
5471 switch (code)
5473 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5475 tree tem
5476 = optimize_minmax_comparison (loc,
5477 invert_tree_comparison (code, false),
5478 type, op0, op1);
5479 if (tem)
5480 return invert_truthvalue_loc (loc, tem);
5481 return NULL_TREE;
5484 case GE_EXPR:
5485 return
5486 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5487 optimize_minmax_comparison
5488 (loc, EQ_EXPR, type, arg0, comp_const),
5489 optimize_minmax_comparison
5490 (loc, GT_EXPR, type, arg0, comp_const));
5492 case EQ_EXPR:
5493 if (op_code == MAX_EXPR && consts_equal)
5494 /* MAX (X, 0) == 0 -> X <= 0 */
5495 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5497 else if (op_code == MAX_EXPR && consts_lt)
5498 /* MAX (X, 0) == 5 -> X == 5 */
5499 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5501 else if (op_code == MAX_EXPR)
5502 /* MAX (X, 0) == -1 -> false */
5503 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5505 else if (consts_equal)
5506 /* MIN (X, 0) == 0 -> X >= 0 */
5507 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5509 else if (consts_lt)
5510 /* MIN (X, 0) == 5 -> false */
5511 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5513 else
5514 /* MIN (X, 0) == -1 -> X == -1 */
5515 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5517 case GT_EXPR:
5518 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5519 /* MAX (X, 0) > 0 -> X > 0
5520 MAX (X, 0) > 5 -> X > 5 */
5521 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5523 else if (op_code == MAX_EXPR)
5524 /* MAX (X, 0) > -1 -> true */
5525 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5527 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5528 /* MIN (X, 0) > 0 -> false
5529 MIN (X, 0) > 5 -> false */
5530 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5532 else
5533 /* MIN (X, 0) > -1 -> X > -1 */
5534 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5536 default:
5537 return NULL_TREE;
5541 /* T is an integer expression that is being multiplied, divided, or taken a
5542 modulus (CODE says which and what kind of divide or modulus) by a
5543 constant C. See if we can eliminate that operation by folding it with
5544 other operations already in T. WIDE_TYPE, if non-null, is a type that
5545 should be used for the computation if wider than our type.
5547 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5548 (X * 2) + (Y * 4). We must, however, be assured that either the original
5549 expression would not overflow or that overflow is undefined for the type
5550 in the language in question.
5552 If we return a non-null expression, it is an equivalent form of the
5553 original computation, but need not be in the original type.
5555 We set *STRICT_OVERFLOW_P to true if the return values depends on
5556 signed overflow being undefined. Otherwise we do not change
5557 *STRICT_OVERFLOW_P. */
5559 static tree
5560 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5561 bool *strict_overflow_p)
5563 /* To avoid exponential search depth, refuse to allow recursion past
5564 three levels. Beyond that (1) it's highly unlikely that we'll find
5565 something interesting and (2) we've probably processed it before
5566 when we built the inner expression. */
5568 static int depth;
5569 tree ret;
5571 if (depth > 3)
5572 return NULL;
5574 depth++;
5575 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5576 depth--;
5578 return ret;
5581 static tree
5582 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5583 bool *strict_overflow_p)
5585 tree type = TREE_TYPE (t);
5586 enum tree_code tcode = TREE_CODE (t);
5587 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5588 > GET_MODE_SIZE (TYPE_MODE (type)))
5589 ? wide_type : type);
5590 tree t1, t2;
5591 int same_p = tcode == code;
5592 tree op0 = NULL_TREE, op1 = NULL_TREE;
5593 bool sub_strict_overflow_p;
5595 /* Don't deal with constants of zero here; they confuse the code below. */
5596 if (integer_zerop (c))
5597 return NULL_TREE;
5599 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5600 op0 = TREE_OPERAND (t, 0);
5602 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5603 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5605 /* Note that we need not handle conditional operations here since fold
5606 already handles those cases. So just do arithmetic here. */
5607 switch (tcode)
5609 case INTEGER_CST:
5610 /* For a constant, we can always simplify if we are a multiply
5611 or (for divide and modulus) if it is a multiple of our constant. */
5612 if (code == MULT_EXPR
5613 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5614 return const_binop (code, fold_convert (ctype, t),
5615 fold_convert (ctype, c));
5616 break;
5618 CASE_CONVERT: case NON_LVALUE_EXPR:
5619 /* If op0 is an expression ... */
5620 if ((COMPARISON_CLASS_P (op0)
5621 || UNARY_CLASS_P (op0)
5622 || BINARY_CLASS_P (op0)
5623 || VL_EXP_CLASS_P (op0)
5624 || EXPRESSION_CLASS_P (op0))
5625 /* ... and has wrapping overflow, and its type is smaller
5626 than ctype, then we cannot pass through as widening. */
5627 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5628 && (TYPE_PRECISION (ctype)
5629 > TYPE_PRECISION (TREE_TYPE (op0))))
5630 /* ... or this is a truncation (t is narrower than op0),
5631 then we cannot pass through this narrowing. */
5632 || (TYPE_PRECISION (type)
5633 < TYPE_PRECISION (TREE_TYPE (op0)))
5634 /* ... or signedness changes for division or modulus,
5635 then we cannot pass through this conversion. */
5636 || (code != MULT_EXPR
5637 && (TYPE_UNSIGNED (ctype)
5638 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5639 /* ... or has undefined overflow while the converted to
5640 type has not, we cannot do the operation in the inner type
5641 as that would introduce undefined overflow. */
5642 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5643 && !TYPE_OVERFLOW_UNDEFINED (type))))
5644 break;
5646 /* Pass the constant down and see if we can make a simplification. If
5647 we can, replace this expression with the inner simplification for
5648 possible later conversion to our or some other type. */
5649 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5650 && TREE_CODE (t2) == INTEGER_CST
5651 && !TREE_OVERFLOW (t2)
5652 && (0 != (t1 = extract_muldiv (op0, t2, code,
5653 code == MULT_EXPR
5654 ? ctype : NULL_TREE,
5655 strict_overflow_p))))
5656 return t1;
5657 break;
5659 case ABS_EXPR:
5660 /* If widening the type changes it from signed to unsigned, then we
5661 must avoid building ABS_EXPR itself as unsigned. */
5662 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5664 tree cstype = (*signed_type_for) (ctype);
5665 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5666 != 0)
5668 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5669 return fold_convert (ctype, t1);
5671 break;
5673 /* If the constant is negative, we cannot simplify this. */
5674 if (tree_int_cst_sgn (c) == -1)
5675 break;
5676 /* FALLTHROUGH */
5677 case NEGATE_EXPR:
5678 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5679 != 0)
5680 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5681 break;
5683 case MIN_EXPR: case MAX_EXPR:
5684 /* If widening the type changes the signedness, then we can't perform
5685 this optimization as that changes the result. */
5686 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5687 break;
5689 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5690 sub_strict_overflow_p = false;
5691 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5692 &sub_strict_overflow_p)) != 0
5693 && (t2 = extract_muldiv (op1, c, code, wide_type,
5694 &sub_strict_overflow_p)) != 0)
5696 if (tree_int_cst_sgn (c) < 0)
5697 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5698 if (sub_strict_overflow_p)
5699 *strict_overflow_p = true;
5700 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5701 fold_convert (ctype, t2));
5703 break;
5705 case LSHIFT_EXPR: case RSHIFT_EXPR:
5706 /* If the second operand is constant, this is a multiplication
5707 or floor division, by a power of two, so we can treat it that
5708 way unless the multiplier or divisor overflows. Signed
5709 left-shift overflow is implementation-defined rather than
5710 undefined in C90, so do not convert signed left shift into
5711 multiplication. */
5712 if (TREE_CODE (op1) == INTEGER_CST
5713 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5714 /* const_binop may not detect overflow correctly,
5715 so check for it explicitly here. */
5716 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5717 && TREE_INT_CST_HIGH (op1) == 0
5718 && 0 != (t1 = fold_convert (ctype,
5719 const_binop (LSHIFT_EXPR,
5720 size_one_node,
5721 op1)))
5722 && !TREE_OVERFLOW (t1))
5723 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5724 ? MULT_EXPR : FLOOR_DIV_EXPR,
5725 ctype,
5726 fold_convert (ctype, op0),
5727 t1),
5728 c, code, wide_type, strict_overflow_p);
5729 break;
5731 case PLUS_EXPR: case MINUS_EXPR:
5732 /* See if we can eliminate the operation on both sides. If we can, we
5733 can return a new PLUS or MINUS. If we can't, the only remaining
5734 cases where we can do anything are if the second operand is a
5735 constant. */
5736 sub_strict_overflow_p = false;
5737 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5738 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5739 if (t1 != 0 && t2 != 0
5740 && (code == MULT_EXPR
5741 /* If not multiplication, we can only do this if both operands
5742 are divisible by c. */
5743 || (multiple_of_p (ctype, op0, c)
5744 && multiple_of_p (ctype, op1, c))))
5746 if (sub_strict_overflow_p)
5747 *strict_overflow_p = true;
5748 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5749 fold_convert (ctype, t2));
5752 /* If this was a subtraction, negate OP1 and set it to be an addition.
5753 This simplifies the logic below. */
5754 if (tcode == MINUS_EXPR)
5756 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5757 /* If OP1 was not easily negatable, the constant may be OP0. */
5758 if (TREE_CODE (op0) == INTEGER_CST)
5760 tree tem = op0;
5761 op0 = op1;
5762 op1 = tem;
5763 tem = t1;
5764 t1 = t2;
5765 t2 = tem;
5769 if (TREE_CODE (op1) != INTEGER_CST)
5770 break;
5772 /* If either OP1 or C are negative, this optimization is not safe for
5773 some of the division and remainder types while for others we need
5774 to change the code. */
5775 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5777 if (code == CEIL_DIV_EXPR)
5778 code = FLOOR_DIV_EXPR;
5779 else if (code == FLOOR_DIV_EXPR)
5780 code = CEIL_DIV_EXPR;
5781 else if (code != MULT_EXPR
5782 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5783 break;
5786 /* If it's a multiply or a division/modulus operation of a multiple
5787 of our constant, do the operation and verify it doesn't overflow. */
5788 if (code == MULT_EXPR
5789 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5791 op1 = const_binop (code, fold_convert (ctype, op1),
5792 fold_convert (ctype, c));
5793 /* We allow the constant to overflow with wrapping semantics. */
5794 if (op1 == 0
5795 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5796 break;
5798 else
5799 break;
5801 /* If we have an unsigned type is not a sizetype, we cannot widen
5802 the operation since it will change the result if the original
5803 computation overflowed. */
5804 if (TYPE_UNSIGNED (ctype)
5805 && ctype != type)
5806 break;
5808 /* If we were able to eliminate our operation from the first side,
5809 apply our operation to the second side and reform the PLUS. */
5810 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5811 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5813 /* The last case is if we are a multiply. In that case, we can
5814 apply the distributive law to commute the multiply and addition
5815 if the multiplication of the constants doesn't overflow. */
5816 if (code == MULT_EXPR)
5817 return fold_build2 (tcode, ctype,
5818 fold_build2 (code, ctype,
5819 fold_convert (ctype, op0),
5820 fold_convert (ctype, c)),
5821 op1);
5823 break;
5825 case MULT_EXPR:
5826 /* We have a special case here if we are doing something like
5827 (C * 8) % 4 since we know that's zero. */
5828 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5829 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5830 /* If the multiplication can overflow we cannot optimize this. */
5831 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5832 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5833 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5835 *strict_overflow_p = true;
5836 return omit_one_operand (type, integer_zero_node, op0);
5839 /* ... fall through ... */
5841 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5842 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5843 /* If we can extract our operation from the LHS, do so and return a
5844 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5845 do something only if the second operand is a constant. */
5846 if (same_p
5847 && (t1 = extract_muldiv (op0, c, code, wide_type,
5848 strict_overflow_p)) != 0)
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, op1));
5851 else if (tcode == MULT_EXPR && code == MULT_EXPR
5852 && (t1 = extract_muldiv (op1, c, code, wide_type,
5853 strict_overflow_p)) != 0)
5854 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5855 fold_convert (ctype, t1));
5856 else if (TREE_CODE (op1) != INTEGER_CST)
5857 return 0;
5859 /* If these are the same operation types, we can associate them
5860 assuming no overflow. */
5861 if (tcode == code)
5863 double_int mul;
5864 bool overflow_p;
5865 unsigned prec = TYPE_PRECISION (ctype);
5866 bool uns = TYPE_UNSIGNED (ctype);
5867 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5868 double_int dic = tree_to_double_int (c).ext (prec, uns);
5869 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5870 overflow_p = ((!uns && overflow_p)
5871 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5872 if (!double_int_fits_to_tree_p (ctype, mul)
5873 && ((uns && tcode != MULT_EXPR) || !uns))
5874 overflow_p = 1;
5875 if (!overflow_p)
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5877 double_int_to_tree (ctype, mul));
5880 /* If these operations "cancel" each other, we have the main
5881 optimizations of this pass, which occur when either constant is a
5882 multiple of the other, in which case we replace this with either an
5883 operation or CODE or TCODE.
5885 If we have an unsigned type, we cannot do this since it will change
5886 the result if the original computation overflowed. */
5887 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5888 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5889 || (tcode == MULT_EXPR
5890 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5891 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5892 && code != MULT_EXPR)))
5894 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5896 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5897 *strict_overflow_p = true;
5898 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5899 fold_convert (ctype,
5900 const_binop (TRUNC_DIV_EXPR,
5901 op1, c)));
5903 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5906 *strict_overflow_p = true;
5907 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5908 fold_convert (ctype,
5909 const_binop (TRUNC_DIV_EXPR,
5910 c, op1)));
5913 break;
5915 default:
5916 break;
5919 return 0;
5922 /* Return a node which has the indicated constant VALUE (either 0 or
5923 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5924 and is of the indicated TYPE. */
5926 tree
5927 constant_boolean_node (bool value, tree type)
5929 if (type == integer_type_node)
5930 return value ? integer_one_node : integer_zero_node;
5931 else if (type == boolean_type_node)
5932 return value ? boolean_true_node : boolean_false_node;
5933 else if (TREE_CODE (type) == VECTOR_TYPE)
5934 return build_vector_from_val (type,
5935 build_int_cst (TREE_TYPE (type),
5936 value ? -1 : 0));
5937 else
5938 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5942 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5943 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5944 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5945 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5946 COND is the first argument to CODE; otherwise (as in the example
5947 given here), it is the second argument. TYPE is the type of the
5948 original expression. Return NULL_TREE if no simplification is
5949 possible. */
5951 static tree
5952 fold_binary_op_with_conditional_arg (location_t loc,
5953 enum tree_code code,
5954 tree type, tree op0, tree op1,
5955 tree cond, tree arg, int cond_first_p)
5957 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5958 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5959 tree test, true_value, false_value;
5960 tree lhs = NULL_TREE;
5961 tree rhs = NULL_TREE;
5962 enum tree_code cond_code = COND_EXPR;
5964 if (TREE_CODE (cond) == COND_EXPR
5965 || TREE_CODE (cond) == VEC_COND_EXPR)
5967 test = TREE_OPERAND (cond, 0);
5968 true_value = TREE_OPERAND (cond, 1);
5969 false_value = TREE_OPERAND (cond, 2);
5970 /* If this operand throws an expression, then it does not make
5971 sense to try to perform a logical or arithmetic operation
5972 involving it. */
5973 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5974 lhs = true_value;
5975 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5976 rhs = false_value;
5978 else
5980 tree testtype = TREE_TYPE (cond);
5981 test = cond;
5982 true_value = constant_boolean_node (true, testtype);
5983 false_value = constant_boolean_node (false, testtype);
5986 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
5987 cond_code = VEC_COND_EXPR;
5989 /* This transformation is only worthwhile if we don't have to wrap ARG
5990 in a SAVE_EXPR and the operation can be simplified without recursing
5991 on at least one of the branches once its pushed inside the COND_EXPR. */
5992 if (!TREE_CONSTANT (arg)
5993 && (TREE_SIDE_EFFECTS (arg)
5994 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
5995 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5996 return NULL_TREE;
5998 arg = fold_convert_loc (loc, arg_type, arg);
5999 if (lhs == 0)
6001 true_value = fold_convert_loc (loc, cond_type, true_value);
6002 if (cond_first_p)
6003 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6004 else
6005 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6007 if (rhs == 0)
6009 false_value = fold_convert_loc (loc, cond_type, false_value);
6010 if (cond_first_p)
6011 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6012 else
6013 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6016 /* Check that we have simplified at least one of the branches. */
6017 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6018 return NULL_TREE;
6020 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6024 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6026 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6027 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6028 ADDEND is the same as X.
6030 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6031 and finite. The problematic cases are when X is zero, and its mode
6032 has signed zeros. In the case of rounding towards -infinity,
6033 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6034 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6036 bool
6037 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6039 if (!real_zerop (addend))
6040 return false;
6042 /* Don't allow the fold with -fsignaling-nans. */
6043 if (HONOR_SNANS (TYPE_MODE (type)))
6044 return false;
6046 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6047 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6048 return true;
6050 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6051 if (TREE_CODE (addend) == REAL_CST
6052 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6053 negate = !negate;
6055 /* The mode has signed zeros, and we have to honor their sign.
6056 In this situation, there is only one case we can return true for.
6057 X - 0 is the same as X unless rounding towards -infinity is
6058 supported. */
6059 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6062 /* Subroutine of fold() that checks comparisons of built-in math
6063 functions against real constants.
6065 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6066 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6067 is the type of the result and ARG0 and ARG1 are the operands of the
6068 comparison. ARG1 must be a TREE_REAL_CST.
6070 The function returns the constant folded tree if a simplification
6071 can be made, and NULL_TREE otherwise. */
6073 static tree
6074 fold_mathfn_compare (location_t loc,
6075 enum built_in_function fcode, enum tree_code code,
6076 tree type, tree arg0, tree arg1)
6078 REAL_VALUE_TYPE c;
6080 if (BUILTIN_SQRT_P (fcode))
6082 tree arg = CALL_EXPR_ARG (arg0, 0);
6083 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6085 c = TREE_REAL_CST (arg1);
6086 if (REAL_VALUE_NEGATIVE (c))
6088 /* sqrt(x) < y is always false, if y is negative. */
6089 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6090 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6092 /* sqrt(x) > y is always true, if y is negative and we
6093 don't care about NaNs, i.e. negative values of x. */
6094 if (code == NE_EXPR || !HONOR_NANS (mode))
6095 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6097 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6098 return fold_build2_loc (loc, GE_EXPR, type, arg,
6099 build_real (TREE_TYPE (arg), dconst0));
6101 else if (code == GT_EXPR || code == GE_EXPR)
6103 REAL_VALUE_TYPE c2;
6105 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6106 real_convert (&c2, mode, &c2);
6108 if (REAL_VALUE_ISINF (c2))
6110 /* sqrt(x) > y is x == +Inf, when y is very large. */
6111 if (HONOR_INFINITIES (mode))
6112 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6113 build_real (TREE_TYPE (arg), c2));
6115 /* sqrt(x) > y is always false, when y is very large
6116 and we don't care about infinities. */
6117 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6120 /* sqrt(x) > c is the same as x > c*c. */
6121 return fold_build2_loc (loc, code, type, arg,
6122 build_real (TREE_TYPE (arg), c2));
6124 else if (code == LT_EXPR || code == LE_EXPR)
6126 REAL_VALUE_TYPE c2;
6128 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6129 real_convert (&c2, mode, &c2);
6131 if (REAL_VALUE_ISINF (c2))
6133 /* sqrt(x) < y is always true, when y is a very large
6134 value and we don't care about NaNs or Infinities. */
6135 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6136 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6138 /* sqrt(x) < y is x != +Inf when y is very large and we
6139 don't care about NaNs. */
6140 if (! HONOR_NANS (mode))
6141 return fold_build2_loc (loc, NE_EXPR, type, arg,
6142 build_real (TREE_TYPE (arg), c2));
6144 /* sqrt(x) < y is x >= 0 when y is very large and we
6145 don't care about Infinities. */
6146 if (! HONOR_INFINITIES (mode))
6147 return fold_build2_loc (loc, GE_EXPR, type, arg,
6148 build_real (TREE_TYPE (arg), dconst0));
6150 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6151 arg = save_expr (arg);
6152 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6153 fold_build2_loc (loc, GE_EXPR, type, arg,
6154 build_real (TREE_TYPE (arg),
6155 dconst0)),
6156 fold_build2_loc (loc, NE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg),
6158 c2)));
6161 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6162 if (! HONOR_NANS (mode))
6163 return fold_build2_loc (loc, code, type, arg,
6164 build_real (TREE_TYPE (arg), c2));
6166 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6167 arg = save_expr (arg);
6168 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6169 fold_build2_loc (loc, GE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg),
6171 dconst0)),
6172 fold_build2_loc (loc, code, type, arg,
6173 build_real (TREE_TYPE (arg),
6174 c2)));
6178 return NULL_TREE;
6181 /* Subroutine of fold() that optimizes comparisons against Infinities,
6182 either +Inf or -Inf.
6184 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6185 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6186 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6188 The function returns the constant folded tree if a simplification
6189 can be made, and NULL_TREE otherwise. */
6191 static tree
6192 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6193 tree arg0, tree arg1)
6195 enum machine_mode mode;
6196 REAL_VALUE_TYPE max;
6197 tree temp;
6198 bool neg;
6200 mode = TYPE_MODE (TREE_TYPE (arg0));
6202 /* For negative infinity swap the sense of the comparison. */
6203 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6204 if (neg)
6205 code = swap_tree_comparison (code);
6207 switch (code)
6209 case GT_EXPR:
6210 /* x > +Inf is always false, if with ignore sNANs. */
6211 if (HONOR_SNANS (mode))
6212 return NULL_TREE;
6213 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6215 case LE_EXPR:
6216 /* x <= +Inf is always true, if we don't case about NaNs. */
6217 if (! HONOR_NANS (mode))
6218 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6220 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6221 arg0 = save_expr (arg0);
6222 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6224 case EQ_EXPR:
6225 case GE_EXPR:
6226 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6227 real_maxval (&max, neg, mode);
6228 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6229 arg0, build_real (TREE_TYPE (arg0), max));
6231 case LT_EXPR:
6232 /* x < +Inf is always equal to x <= DBL_MAX. */
6233 real_maxval (&max, neg, mode);
6234 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6235 arg0, build_real (TREE_TYPE (arg0), max));
6237 case NE_EXPR:
6238 /* x != +Inf is always equal to !(x > DBL_MAX). */
6239 real_maxval (&max, neg, mode);
6240 if (! HONOR_NANS (mode))
6241 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6242 arg0, build_real (TREE_TYPE (arg0), max));
6244 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6245 arg0, build_real (TREE_TYPE (arg0), max));
6246 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6248 default:
6249 break;
6252 return NULL_TREE;
6255 /* Subroutine of fold() that optimizes comparisons of a division by
6256 a nonzero integer constant against an integer constant, i.e.
6257 X/C1 op C2.
6259 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6260 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6261 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6263 The function returns the constant folded tree if a simplification
6264 can be made, and NULL_TREE otherwise. */
6266 static tree
6267 fold_div_compare (location_t loc,
6268 enum tree_code code, tree type, tree arg0, tree arg1)
6270 tree prod, tmp, hi, lo;
6271 tree arg00 = TREE_OPERAND (arg0, 0);
6272 tree arg01 = TREE_OPERAND (arg0, 1);
6273 double_int val;
6274 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6275 bool neg_overflow;
6276 bool overflow;
6278 /* We have to do this the hard way to detect unsigned overflow.
6279 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6280 val = TREE_INT_CST (arg01)
6281 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6282 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6283 neg_overflow = false;
6285 if (unsigned_p)
6287 tmp = int_const_binop (MINUS_EXPR, arg01,
6288 build_int_cst (TREE_TYPE (arg01), 1));
6289 lo = prod;
6291 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6292 val = TREE_INT_CST (prod)
6293 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6294 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6295 -1, overflow | TREE_OVERFLOW (prod));
6297 else if (tree_int_cst_sgn (arg01) >= 0)
6299 tmp = int_const_binop (MINUS_EXPR, arg01,
6300 build_int_cst (TREE_TYPE (arg01), 1));
6301 switch (tree_int_cst_sgn (arg1))
6303 case -1:
6304 neg_overflow = true;
6305 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6306 hi = prod;
6307 break;
6309 case 0:
6310 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6311 hi = tmp;
6312 break;
6314 case 1:
6315 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6316 lo = prod;
6317 break;
6319 default:
6320 gcc_unreachable ();
6323 else
6325 /* A negative divisor reverses the relational operators. */
6326 code = swap_tree_comparison (code);
6328 tmp = int_const_binop (PLUS_EXPR, arg01,
6329 build_int_cst (TREE_TYPE (arg01), 1));
6330 switch (tree_int_cst_sgn (arg1))
6332 case -1:
6333 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6334 lo = prod;
6335 break;
6337 case 0:
6338 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6339 lo = tmp;
6340 break;
6342 case 1:
6343 neg_overflow = true;
6344 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6345 hi = prod;
6346 break;
6348 default:
6349 gcc_unreachable ();
6353 switch (code)
6355 case EQ_EXPR:
6356 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6357 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6358 if (TREE_OVERFLOW (hi))
6359 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6360 if (TREE_OVERFLOW (lo))
6361 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6362 return build_range_check (loc, type, arg00, 1, lo, hi);
6364 case NE_EXPR:
6365 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6366 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6367 if (TREE_OVERFLOW (hi))
6368 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6369 if (TREE_OVERFLOW (lo))
6370 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6371 return build_range_check (loc, type, arg00, 0, lo, hi);
6373 case LT_EXPR:
6374 if (TREE_OVERFLOW (lo))
6376 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6377 return omit_one_operand_loc (loc, type, tmp, arg00);
6379 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6381 case LE_EXPR:
6382 if (TREE_OVERFLOW (hi))
6384 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6385 return omit_one_operand_loc (loc, type, tmp, arg00);
6387 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6389 case GT_EXPR:
6390 if (TREE_OVERFLOW (hi))
6392 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6393 return omit_one_operand_loc (loc, type, tmp, arg00);
6395 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6397 case GE_EXPR:
6398 if (TREE_OVERFLOW (lo))
6400 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6401 return omit_one_operand_loc (loc, type, tmp, arg00);
6403 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6405 default:
6406 break;
6409 return NULL_TREE;
6413 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6414 equality/inequality test, then return a simplified form of the test
6415 using a sign testing. Otherwise return NULL. TYPE is the desired
6416 result type. */
6418 static tree
6419 fold_single_bit_test_into_sign_test (location_t loc,
6420 enum tree_code code, tree arg0, tree arg1,
6421 tree result_type)
6423 /* If this is testing a single bit, we can optimize the test. */
6424 if ((code == NE_EXPR || code == EQ_EXPR)
6425 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6426 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6428 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6429 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6430 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6432 if (arg00 != NULL_TREE
6433 /* This is only a win if casting to a signed type is cheap,
6434 i.e. when arg00's type is not a partial mode. */
6435 && TYPE_PRECISION (TREE_TYPE (arg00))
6436 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6438 tree stype = signed_type_for (TREE_TYPE (arg00));
6439 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6440 result_type,
6441 fold_convert_loc (loc, stype, arg00),
6442 build_int_cst (stype, 0));
6446 return NULL_TREE;
6449 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6450 equality/inequality test, then return a simplified form of
6451 the test using shifts and logical operations. Otherwise return
6452 NULL. TYPE is the desired result type. */
6454 tree
6455 fold_single_bit_test (location_t loc, enum tree_code code,
6456 tree arg0, tree arg1, tree result_type)
6458 /* If this is testing a single bit, we can optimize the test. */
6459 if ((code == NE_EXPR || code == EQ_EXPR)
6460 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6461 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6463 tree inner = TREE_OPERAND (arg0, 0);
6464 tree type = TREE_TYPE (arg0);
6465 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6466 enum machine_mode operand_mode = TYPE_MODE (type);
6467 int ops_unsigned;
6468 tree signed_type, unsigned_type, intermediate_type;
6469 tree tem, one;
6471 /* First, see if we can fold the single bit test into a sign-bit
6472 test. */
6473 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6474 result_type);
6475 if (tem)
6476 return tem;
6478 /* Otherwise we have (A & C) != 0 where C is a single bit,
6479 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6480 Similarly for (A & C) == 0. */
6482 /* If INNER is a right shift of a constant and it plus BITNUM does
6483 not overflow, adjust BITNUM and INNER. */
6484 if (TREE_CODE (inner) == RSHIFT_EXPR
6485 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6486 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6487 && bitnum < TYPE_PRECISION (type)
6488 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6489 bitnum - TYPE_PRECISION (type)))
6491 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6492 inner = TREE_OPERAND (inner, 0);
6495 /* If we are going to be able to omit the AND below, we must do our
6496 operations as unsigned. If we must use the AND, we have a choice.
6497 Normally unsigned is faster, but for some machines signed is. */
6498 #ifdef LOAD_EXTEND_OP
6499 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6500 && !flag_syntax_only) ? 0 : 1;
6501 #else
6502 ops_unsigned = 1;
6503 #endif
6505 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6506 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6507 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6508 inner = fold_convert_loc (loc, intermediate_type, inner);
6510 if (bitnum != 0)
6511 inner = build2 (RSHIFT_EXPR, intermediate_type,
6512 inner, size_int (bitnum));
6514 one = build_int_cst (intermediate_type, 1);
6516 if (code == EQ_EXPR)
6517 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6519 /* Put the AND last so it can combine with more things. */
6520 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6522 /* Make sure to return the proper type. */
6523 inner = fold_convert_loc (loc, result_type, inner);
6525 return inner;
6527 return NULL_TREE;
6530 /* Check whether we are allowed to reorder operands arg0 and arg1,
6531 such that the evaluation of arg1 occurs before arg0. */
6533 static bool
6534 reorder_operands_p (const_tree arg0, const_tree arg1)
6536 if (! flag_evaluation_order)
6537 return true;
6538 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6539 return true;
6540 return ! TREE_SIDE_EFFECTS (arg0)
6541 && ! TREE_SIDE_EFFECTS (arg1);
6544 /* Test whether it is preferable two swap two operands, ARG0 and
6545 ARG1, for example because ARG0 is an integer constant and ARG1
6546 isn't. If REORDER is true, only recommend swapping if we can
6547 evaluate the operands in reverse order. */
6549 bool
6550 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6552 STRIP_SIGN_NOPS (arg0);
6553 STRIP_SIGN_NOPS (arg1);
6555 if (TREE_CODE (arg1) == INTEGER_CST)
6556 return 0;
6557 if (TREE_CODE (arg0) == INTEGER_CST)
6558 return 1;
6560 if (TREE_CODE (arg1) == REAL_CST)
6561 return 0;
6562 if (TREE_CODE (arg0) == REAL_CST)
6563 return 1;
6565 if (TREE_CODE (arg1) == FIXED_CST)
6566 return 0;
6567 if (TREE_CODE (arg0) == FIXED_CST)
6568 return 1;
6570 if (TREE_CODE (arg1) == COMPLEX_CST)
6571 return 0;
6572 if (TREE_CODE (arg0) == COMPLEX_CST)
6573 return 1;
6575 if (TREE_CONSTANT (arg1))
6576 return 0;
6577 if (TREE_CONSTANT (arg0))
6578 return 1;
6580 if (optimize_function_for_size_p (cfun))
6581 return 0;
6583 if (reorder && flag_evaluation_order
6584 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6585 return 0;
6587 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6588 for commutative and comparison operators. Ensuring a canonical
6589 form allows the optimizers to find additional redundancies without
6590 having to explicitly check for both orderings. */
6591 if (TREE_CODE (arg0) == SSA_NAME
6592 && TREE_CODE (arg1) == SSA_NAME
6593 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6594 return 1;
6596 /* Put SSA_NAMEs last. */
6597 if (TREE_CODE (arg1) == SSA_NAME)
6598 return 0;
6599 if (TREE_CODE (arg0) == SSA_NAME)
6600 return 1;
6602 /* Put variables last. */
6603 if (DECL_P (arg1))
6604 return 0;
6605 if (DECL_P (arg0))
6606 return 1;
6608 return 0;
6611 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6612 ARG0 is extended to a wider type. */
6614 static tree
6615 fold_widened_comparison (location_t loc, enum tree_code code,
6616 tree type, tree arg0, tree arg1)
6618 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6619 tree arg1_unw;
6620 tree shorter_type, outer_type;
6621 tree min, max;
6622 bool above, below;
6624 if (arg0_unw == arg0)
6625 return NULL_TREE;
6626 shorter_type = TREE_TYPE (arg0_unw);
6628 #ifdef HAVE_canonicalize_funcptr_for_compare
6629 /* Disable this optimization if we're casting a function pointer
6630 type on targets that require function pointer canonicalization. */
6631 if (HAVE_canonicalize_funcptr_for_compare
6632 && TREE_CODE (shorter_type) == POINTER_TYPE
6633 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6634 return NULL_TREE;
6635 #endif
6637 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6638 return NULL_TREE;
6640 arg1_unw = get_unwidened (arg1, NULL_TREE);
6642 /* If possible, express the comparison in the shorter mode. */
6643 if ((code == EQ_EXPR || code == NE_EXPR
6644 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6645 && (TREE_TYPE (arg1_unw) == shorter_type
6646 || ((TYPE_PRECISION (shorter_type)
6647 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6648 && (TYPE_UNSIGNED (shorter_type)
6649 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6650 || (TREE_CODE (arg1_unw) == INTEGER_CST
6651 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6652 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6653 && int_fits_type_p (arg1_unw, shorter_type))))
6654 return fold_build2_loc (loc, code, type, arg0_unw,
6655 fold_convert_loc (loc, shorter_type, arg1_unw));
6657 if (TREE_CODE (arg1_unw) != INTEGER_CST
6658 || TREE_CODE (shorter_type) != INTEGER_TYPE
6659 || !int_fits_type_p (arg1_unw, shorter_type))
6660 return NULL_TREE;
6662 /* If we are comparing with the integer that does not fit into the range
6663 of the shorter type, the result is known. */
6664 outer_type = TREE_TYPE (arg1_unw);
6665 min = lower_bound_in_type (outer_type, shorter_type);
6666 max = upper_bound_in_type (outer_type, shorter_type);
6668 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6669 max, arg1_unw));
6670 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6671 arg1_unw, min));
6673 switch (code)
6675 case EQ_EXPR:
6676 if (above || below)
6677 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6678 break;
6680 case NE_EXPR:
6681 if (above || below)
6682 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6683 break;
6685 case LT_EXPR:
6686 case LE_EXPR:
6687 if (above)
6688 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6689 else if (below)
6690 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6692 case GT_EXPR:
6693 case GE_EXPR:
6694 if (above)
6695 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6696 else if (below)
6697 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6699 default:
6700 break;
6703 return NULL_TREE;
6706 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6707 ARG0 just the signedness is changed. */
6709 static tree
6710 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6711 tree arg0, tree arg1)
6713 tree arg0_inner;
6714 tree inner_type, outer_type;
6716 if (!CONVERT_EXPR_P (arg0))
6717 return NULL_TREE;
6719 outer_type = TREE_TYPE (arg0);
6720 arg0_inner = TREE_OPERAND (arg0, 0);
6721 inner_type = TREE_TYPE (arg0_inner);
6723 #ifdef HAVE_canonicalize_funcptr_for_compare
6724 /* Disable this optimization if we're casting a function pointer
6725 type on targets that require function pointer canonicalization. */
6726 if (HAVE_canonicalize_funcptr_for_compare
6727 && TREE_CODE (inner_type) == POINTER_TYPE
6728 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6729 return NULL_TREE;
6730 #endif
6732 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6733 return NULL_TREE;
6735 if (TREE_CODE (arg1) != INTEGER_CST
6736 && !(CONVERT_EXPR_P (arg1)
6737 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6738 return NULL_TREE;
6740 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6741 && code != NE_EXPR
6742 && code != EQ_EXPR)
6743 return NULL_TREE;
6745 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6746 return NULL_TREE;
6748 if (TREE_CODE (arg1) == INTEGER_CST)
6749 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6750 0, TREE_OVERFLOW (arg1));
6751 else
6752 arg1 = fold_convert_loc (loc, inner_type, arg1);
6754 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6757 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6758 step of the array. Reconstructs s and delta in the case of s *
6759 delta being an integer constant (and thus already folded). ADDR is
6760 the address. MULT is the multiplicative expression. If the
6761 function succeeds, the new address expression is returned.
6762 Otherwise NULL_TREE is returned. LOC is the location of the
6763 resulting expression. */
6765 static tree
6766 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6768 tree s, delta, step;
6769 tree ref = TREE_OPERAND (addr, 0), pref;
6770 tree ret, pos;
6771 tree itype;
6772 bool mdim = false;
6774 /* Strip the nops that might be added when converting op1 to sizetype. */
6775 STRIP_NOPS (op1);
6777 /* Canonicalize op1 into a possibly non-constant delta
6778 and an INTEGER_CST s. */
6779 if (TREE_CODE (op1) == MULT_EXPR)
6781 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6783 STRIP_NOPS (arg0);
6784 STRIP_NOPS (arg1);
6786 if (TREE_CODE (arg0) == INTEGER_CST)
6788 s = arg0;
6789 delta = arg1;
6791 else if (TREE_CODE (arg1) == INTEGER_CST)
6793 s = arg1;
6794 delta = arg0;
6796 else
6797 return NULL_TREE;
6799 else if (TREE_CODE (op1) == INTEGER_CST)
6801 delta = op1;
6802 s = NULL_TREE;
6804 else
6806 /* Simulate we are delta * 1. */
6807 delta = op1;
6808 s = integer_one_node;
6811 /* Handle &x.array the same as we would handle &x.array[0]. */
6812 if (TREE_CODE (ref) == COMPONENT_REF
6813 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6815 tree domain;
6817 /* Remember if this was a multi-dimensional array. */
6818 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6819 mdim = true;
6821 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6822 if (! domain)
6823 goto cont;
6824 itype = TREE_TYPE (domain);
6826 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6827 if (TREE_CODE (step) != INTEGER_CST)
6828 goto cont;
6830 if (s)
6832 if (! tree_int_cst_equal (step, s))
6833 goto cont;
6835 else
6837 /* Try if delta is a multiple of step. */
6838 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6839 if (! tmp)
6840 goto cont;
6841 delta = tmp;
6844 /* Only fold here if we can verify we do not overflow one
6845 dimension of a multi-dimensional array. */
6846 if (mdim)
6848 tree tmp;
6850 if (!TYPE_MIN_VALUE (domain)
6851 || !TYPE_MAX_VALUE (domain)
6852 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6853 goto cont;
6855 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6856 fold_convert_loc (loc, itype,
6857 TYPE_MIN_VALUE (domain)),
6858 fold_convert_loc (loc, itype, delta));
6859 if (TREE_CODE (tmp) != INTEGER_CST
6860 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6861 goto cont;
6864 /* We found a suitable component reference. */
6866 pref = TREE_OPERAND (addr, 0);
6867 ret = copy_node (pref);
6868 SET_EXPR_LOCATION (ret, loc);
6870 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6871 fold_build2_loc
6872 (loc, PLUS_EXPR, itype,
6873 fold_convert_loc (loc, itype,
6874 TYPE_MIN_VALUE
6875 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6876 fold_convert_loc (loc, itype, delta)),
6877 NULL_TREE, NULL_TREE);
6878 return build_fold_addr_expr_loc (loc, ret);
6881 cont:
6883 for (;; ref = TREE_OPERAND (ref, 0))
6885 if (TREE_CODE (ref) == ARRAY_REF)
6887 tree domain;
6889 /* Remember if this was a multi-dimensional array. */
6890 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6891 mdim = true;
6893 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6894 if (! domain)
6895 continue;
6896 itype = TREE_TYPE (domain);
6898 step = array_ref_element_size (ref);
6899 if (TREE_CODE (step) != INTEGER_CST)
6900 continue;
6902 if (s)
6904 if (! tree_int_cst_equal (step, s))
6905 continue;
6907 else
6909 /* Try if delta is a multiple of step. */
6910 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6911 if (! tmp)
6912 continue;
6913 delta = tmp;
6916 /* Only fold here if we can verify we do not overflow one
6917 dimension of a multi-dimensional array. */
6918 if (mdim)
6920 tree tmp;
6922 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6923 || !TYPE_MAX_VALUE (domain)
6924 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6925 continue;
6927 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6928 fold_convert_loc (loc, itype,
6929 TREE_OPERAND (ref, 1)),
6930 fold_convert_loc (loc, itype, delta));
6931 if (!tmp
6932 || TREE_CODE (tmp) != INTEGER_CST
6933 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6934 continue;
6937 break;
6939 else
6940 mdim = false;
6942 if (!handled_component_p (ref))
6943 return NULL_TREE;
6946 /* We found the suitable array reference. So copy everything up to it,
6947 and replace the index. */
6949 pref = TREE_OPERAND (addr, 0);
6950 ret = copy_node (pref);
6951 SET_EXPR_LOCATION (ret, loc);
6952 pos = ret;
6954 while (pref != ref)
6956 pref = TREE_OPERAND (pref, 0);
6957 TREE_OPERAND (pos, 0) = copy_node (pref);
6958 pos = TREE_OPERAND (pos, 0);
6961 TREE_OPERAND (pos, 1)
6962 = fold_build2_loc (loc, PLUS_EXPR, itype,
6963 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6964 fold_convert_loc (loc, itype, delta));
6965 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6969 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6970 means A >= Y && A != MAX, but in this case we know that
6971 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6973 static tree
6974 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6976 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6978 if (TREE_CODE (bound) == LT_EXPR)
6979 a = TREE_OPERAND (bound, 0);
6980 else if (TREE_CODE (bound) == GT_EXPR)
6981 a = TREE_OPERAND (bound, 1);
6982 else
6983 return NULL_TREE;
6985 typea = TREE_TYPE (a);
6986 if (!INTEGRAL_TYPE_P (typea)
6987 && !POINTER_TYPE_P (typea))
6988 return NULL_TREE;
6990 if (TREE_CODE (ineq) == LT_EXPR)
6992 a1 = TREE_OPERAND (ineq, 1);
6993 y = TREE_OPERAND (ineq, 0);
6995 else if (TREE_CODE (ineq) == GT_EXPR)
6997 a1 = TREE_OPERAND (ineq, 0);
6998 y = TREE_OPERAND (ineq, 1);
7000 else
7001 return NULL_TREE;
7003 if (TREE_TYPE (a1) != typea)
7004 return NULL_TREE;
7006 if (POINTER_TYPE_P (typea))
7008 /* Convert the pointer types into integer before taking the difference. */
7009 tree ta = fold_convert_loc (loc, ssizetype, a);
7010 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7011 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7013 else
7014 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7016 if (!diff || !integer_onep (diff))
7017 return NULL_TREE;
7019 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7022 /* Fold a sum or difference of at least one multiplication.
7023 Returns the folded tree or NULL if no simplification could be made. */
7025 static tree
7026 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7027 tree arg0, tree arg1)
7029 tree arg00, arg01, arg10, arg11;
7030 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7032 /* (A * C) +- (B * C) -> (A+-B) * C.
7033 (A * C) +- A -> A * (C+-1).
7034 We are most concerned about the case where C is a constant,
7035 but other combinations show up during loop reduction. Since
7036 it is not difficult, try all four possibilities. */
7038 if (TREE_CODE (arg0) == MULT_EXPR)
7040 arg00 = TREE_OPERAND (arg0, 0);
7041 arg01 = TREE_OPERAND (arg0, 1);
7043 else if (TREE_CODE (arg0) == INTEGER_CST)
7045 arg00 = build_one_cst (type);
7046 arg01 = arg0;
7048 else
7050 /* We cannot generate constant 1 for fract. */
7051 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7052 return NULL_TREE;
7053 arg00 = arg0;
7054 arg01 = build_one_cst (type);
7056 if (TREE_CODE (arg1) == MULT_EXPR)
7058 arg10 = TREE_OPERAND (arg1, 0);
7059 arg11 = TREE_OPERAND (arg1, 1);
7061 else if (TREE_CODE (arg1) == INTEGER_CST)
7063 arg10 = build_one_cst (type);
7064 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7065 the purpose of this canonicalization. */
7066 if (TREE_INT_CST_HIGH (arg1) == -1
7067 && negate_expr_p (arg1)
7068 && code == PLUS_EXPR)
7070 arg11 = negate_expr (arg1);
7071 code = MINUS_EXPR;
7073 else
7074 arg11 = arg1;
7076 else
7078 /* We cannot generate constant 1 for fract. */
7079 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7080 return NULL_TREE;
7081 arg10 = arg1;
7082 arg11 = build_one_cst (type);
7084 same = NULL_TREE;
7086 if (operand_equal_p (arg01, arg11, 0))
7087 same = arg01, alt0 = arg00, alt1 = arg10;
7088 else if (operand_equal_p (arg00, arg10, 0))
7089 same = arg00, alt0 = arg01, alt1 = arg11;
7090 else if (operand_equal_p (arg00, arg11, 0))
7091 same = arg00, alt0 = arg01, alt1 = arg10;
7092 else if (operand_equal_p (arg01, arg10, 0))
7093 same = arg01, alt0 = arg00, alt1 = arg11;
7095 /* No identical multiplicands; see if we can find a common
7096 power-of-two factor in non-power-of-two multiplies. This
7097 can help in multi-dimensional array access. */
7098 else if (host_integerp (arg01, 0)
7099 && host_integerp (arg11, 0))
7101 HOST_WIDE_INT int01, int11, tmp;
7102 bool swap = false;
7103 tree maybe_same;
7104 int01 = TREE_INT_CST_LOW (arg01);
7105 int11 = TREE_INT_CST_LOW (arg11);
7107 /* Move min of absolute values to int11. */
7108 if (absu_hwi (int01) < absu_hwi (int11))
7110 tmp = int01, int01 = int11, int11 = tmp;
7111 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7112 maybe_same = arg01;
7113 swap = true;
7115 else
7116 maybe_same = arg11;
7118 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7119 /* The remainder should not be a constant, otherwise we
7120 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7121 increased the number of multiplications necessary. */
7122 && TREE_CODE (arg10) != INTEGER_CST)
7124 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7125 build_int_cst (TREE_TYPE (arg00),
7126 int01 / int11));
7127 alt1 = arg10;
7128 same = maybe_same;
7129 if (swap)
7130 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7134 if (same)
7135 return fold_build2_loc (loc, MULT_EXPR, type,
7136 fold_build2_loc (loc, code, type,
7137 fold_convert_loc (loc, type, alt0),
7138 fold_convert_loc (loc, type, alt1)),
7139 fold_convert_loc (loc, type, same));
7141 return NULL_TREE;
7144 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7145 specified by EXPR into the buffer PTR of length LEN bytes.
7146 Return the number of bytes placed in the buffer, or zero
7147 upon failure. */
7149 static int
7150 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7152 tree type = TREE_TYPE (expr);
7153 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7154 int byte, offset, word, words;
7155 unsigned char value;
7157 if (total_bytes > len)
7158 return 0;
7159 words = total_bytes / UNITS_PER_WORD;
7161 for (byte = 0; byte < total_bytes; byte++)
7163 int bitpos = byte * BITS_PER_UNIT;
7164 if (bitpos < HOST_BITS_PER_WIDE_INT)
7165 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7166 else
7167 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7168 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7170 if (total_bytes > UNITS_PER_WORD)
7172 word = byte / UNITS_PER_WORD;
7173 if (WORDS_BIG_ENDIAN)
7174 word = (words - 1) - word;
7175 offset = word * UNITS_PER_WORD;
7176 if (BYTES_BIG_ENDIAN)
7177 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7178 else
7179 offset += byte % UNITS_PER_WORD;
7181 else
7182 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7183 ptr[offset] = value;
7185 return total_bytes;
7189 /* Subroutine of native_encode_expr. Encode the REAL_CST
7190 specified by EXPR into the buffer PTR of length LEN bytes.
7191 Return the number of bytes placed in the buffer, or zero
7192 upon failure. */
7194 static int
7195 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7197 tree type = TREE_TYPE (expr);
7198 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7199 int byte, offset, word, words, bitpos;
7200 unsigned char value;
7202 /* There are always 32 bits in each long, no matter the size of
7203 the hosts long. We handle floating point representations with
7204 up to 192 bits. */
7205 long tmp[6];
7207 if (total_bytes > len)
7208 return 0;
7209 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7211 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7213 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7214 bitpos += BITS_PER_UNIT)
7216 byte = (bitpos / BITS_PER_UNIT) & 3;
7217 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7219 if (UNITS_PER_WORD < 4)
7221 word = byte / UNITS_PER_WORD;
7222 if (WORDS_BIG_ENDIAN)
7223 word = (words - 1) - word;
7224 offset = word * UNITS_PER_WORD;
7225 if (BYTES_BIG_ENDIAN)
7226 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7227 else
7228 offset += byte % UNITS_PER_WORD;
7230 else
7231 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7232 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7234 return total_bytes;
7237 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7238 specified by EXPR into the buffer PTR of length LEN bytes.
7239 Return the number of bytes placed in the buffer, or zero
7240 upon failure. */
7242 static int
7243 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7245 int rsize, isize;
7246 tree part;
7248 part = TREE_REALPART (expr);
7249 rsize = native_encode_expr (part, ptr, len);
7250 if (rsize == 0)
7251 return 0;
7252 part = TREE_IMAGPART (expr);
7253 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7254 if (isize != rsize)
7255 return 0;
7256 return rsize + isize;
7260 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7265 static int
7266 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7268 unsigned i, count;
7269 int size, offset;
7270 tree itype, elem;
7272 offset = 0;
7273 count = VECTOR_CST_NELTS (expr);
7274 itype = TREE_TYPE (TREE_TYPE (expr));
7275 size = GET_MODE_SIZE (TYPE_MODE (itype));
7276 for (i = 0; i < count; i++)
7278 elem = VECTOR_CST_ELT (expr, i);
7279 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7280 return 0;
7281 offset += size;
7283 return offset;
7287 /* Subroutine of native_encode_expr. Encode the STRING_CST
7288 specified by EXPR into the buffer PTR of length LEN bytes.
7289 Return the number of bytes placed in the buffer, or zero
7290 upon failure. */
7292 static int
7293 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7295 tree type = TREE_TYPE (expr);
7296 HOST_WIDE_INT total_bytes;
7298 if (TREE_CODE (type) != ARRAY_TYPE
7299 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7300 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7301 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7302 return 0;
7303 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7304 if (total_bytes > len)
7305 return 0;
7306 if (TREE_STRING_LENGTH (expr) < total_bytes)
7308 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7309 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7310 total_bytes - TREE_STRING_LENGTH (expr));
7312 else
7313 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7314 return total_bytes;
7318 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7319 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7320 buffer PTR of length LEN bytes. Return the number of bytes
7321 placed in the buffer, or zero upon failure. */
7324 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7326 switch (TREE_CODE (expr))
7328 case INTEGER_CST:
7329 return native_encode_int (expr, ptr, len);
7331 case REAL_CST:
7332 return native_encode_real (expr, ptr, len);
7334 case COMPLEX_CST:
7335 return native_encode_complex (expr, ptr, len);
7337 case VECTOR_CST:
7338 return native_encode_vector (expr, ptr, len);
7340 case STRING_CST:
7341 return native_encode_string (expr, ptr, len);
7343 default:
7344 return 0;
7349 /* Subroutine of native_interpret_expr. Interpret the contents of
7350 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7351 If the buffer cannot be interpreted, return NULL_TREE. */
7353 static tree
7354 native_interpret_int (tree type, const unsigned char *ptr, int len)
7356 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7357 int byte, offset, word, words;
7358 unsigned char value;
7359 double_int result;
7361 if (total_bytes > len)
7362 return NULL_TREE;
7363 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7364 return NULL_TREE;
7366 result = double_int_zero;
7367 words = total_bytes / UNITS_PER_WORD;
7369 for (byte = 0; byte < total_bytes; byte++)
7371 int bitpos = byte * BITS_PER_UNIT;
7372 if (total_bytes > UNITS_PER_WORD)
7374 word = byte / UNITS_PER_WORD;
7375 if (WORDS_BIG_ENDIAN)
7376 word = (words - 1) - word;
7377 offset = word * UNITS_PER_WORD;
7378 if (BYTES_BIG_ENDIAN)
7379 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7380 else
7381 offset += byte % UNITS_PER_WORD;
7383 else
7384 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7385 value = ptr[offset];
7387 if (bitpos < HOST_BITS_PER_WIDE_INT)
7388 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7389 else
7390 result.high |= (unsigned HOST_WIDE_INT) value
7391 << (bitpos - HOST_BITS_PER_WIDE_INT);
7394 return double_int_to_tree (type, result);
7398 /* Subroutine of native_interpret_expr. Interpret the contents of
7399 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7400 If the buffer cannot be interpreted, return NULL_TREE. */
7402 static tree
7403 native_interpret_real (tree type, const unsigned char *ptr, int len)
7405 enum machine_mode mode = TYPE_MODE (type);
7406 int total_bytes = GET_MODE_SIZE (mode);
7407 int byte, offset, word, words, bitpos;
7408 unsigned char value;
7409 /* There are always 32 bits in each long, no matter the size of
7410 the hosts long. We handle floating point representations with
7411 up to 192 bits. */
7412 REAL_VALUE_TYPE r;
7413 long tmp[6];
7415 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7416 if (total_bytes > len || total_bytes > 24)
7417 return NULL_TREE;
7418 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7420 memset (tmp, 0, sizeof (tmp));
7421 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7422 bitpos += BITS_PER_UNIT)
7424 byte = (bitpos / BITS_PER_UNIT) & 3;
7425 if (UNITS_PER_WORD < 4)
7427 word = byte / UNITS_PER_WORD;
7428 if (WORDS_BIG_ENDIAN)
7429 word = (words - 1) - word;
7430 offset = word * UNITS_PER_WORD;
7431 if (BYTES_BIG_ENDIAN)
7432 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7433 else
7434 offset += byte % UNITS_PER_WORD;
7436 else
7437 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7438 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7440 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7443 real_from_target (&r, tmp, mode);
7444 return build_real (type, r);
7448 /* Subroutine of native_interpret_expr. Interpret the contents of
7449 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7450 If the buffer cannot be interpreted, return NULL_TREE. */
7452 static tree
7453 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7455 tree etype, rpart, ipart;
7456 int size;
7458 etype = TREE_TYPE (type);
7459 size = GET_MODE_SIZE (TYPE_MODE (etype));
7460 if (size * 2 > len)
7461 return NULL_TREE;
7462 rpart = native_interpret_expr (etype, ptr, size);
7463 if (!rpart)
7464 return NULL_TREE;
7465 ipart = native_interpret_expr (etype, ptr+size, size);
7466 if (!ipart)
7467 return NULL_TREE;
7468 return build_complex (type, rpart, ipart);
7472 /* Subroutine of native_interpret_expr. Interpret the contents of
7473 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7474 If the buffer cannot be interpreted, return NULL_TREE. */
7476 static tree
7477 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7479 tree etype, elem;
7480 int i, size, count;
7481 tree *elements;
7483 etype = TREE_TYPE (type);
7484 size = GET_MODE_SIZE (TYPE_MODE (etype));
7485 count = TYPE_VECTOR_SUBPARTS (type);
7486 if (size * count > len)
7487 return NULL_TREE;
7489 elements = XALLOCAVEC (tree, count);
7490 for (i = count - 1; i >= 0; i--)
7492 elem = native_interpret_expr (etype, ptr+(i*size), size);
7493 if (!elem)
7494 return NULL_TREE;
7495 elements[i] = elem;
7497 return build_vector (type, elements);
7501 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7502 the buffer PTR of length LEN as a constant of type TYPE. For
7503 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7504 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7505 return NULL_TREE. */
7507 tree
7508 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7510 switch (TREE_CODE (type))
7512 case INTEGER_TYPE:
7513 case ENUMERAL_TYPE:
7514 case BOOLEAN_TYPE:
7515 case POINTER_TYPE:
7516 case REFERENCE_TYPE:
7517 return native_interpret_int (type, ptr, len);
7519 case REAL_TYPE:
7520 return native_interpret_real (type, ptr, len);
7522 case COMPLEX_TYPE:
7523 return native_interpret_complex (type, ptr, len);
7525 case VECTOR_TYPE:
7526 return native_interpret_vector (type, ptr, len);
7528 default:
7529 return NULL_TREE;
7533 /* Returns true if we can interpret the contents of a native encoding
7534 as TYPE. */
7536 static bool
7537 can_native_interpret_type_p (tree type)
7539 switch (TREE_CODE (type))
7541 case INTEGER_TYPE:
7542 case ENUMERAL_TYPE:
7543 case BOOLEAN_TYPE:
7544 case POINTER_TYPE:
7545 case REFERENCE_TYPE:
7546 case REAL_TYPE:
7547 case COMPLEX_TYPE:
7548 case VECTOR_TYPE:
7549 return true;
7550 default:
7551 return false;
7555 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7556 TYPE at compile-time. If we're unable to perform the conversion
7557 return NULL_TREE. */
7559 static tree
7560 fold_view_convert_expr (tree type, tree expr)
7562 /* We support up to 512-bit values (for V8DFmode). */
7563 unsigned char buffer[64];
7564 int len;
7566 /* Check that the host and target are sane. */
7567 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7568 return NULL_TREE;
7570 len = native_encode_expr (expr, buffer, sizeof (buffer));
7571 if (len == 0)
7572 return NULL_TREE;
7574 return native_interpret_expr (type, buffer, len);
7577 /* Build an expression for the address of T. Folds away INDIRECT_REF
7578 to avoid confusing the gimplify process. */
7580 tree
7581 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7583 /* The size of the object is not relevant when talking about its address. */
7584 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7585 t = TREE_OPERAND (t, 0);
7587 if (TREE_CODE (t) == INDIRECT_REF)
7589 t = TREE_OPERAND (t, 0);
7591 if (TREE_TYPE (t) != ptrtype)
7592 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7594 else if (TREE_CODE (t) == MEM_REF
7595 && integer_zerop (TREE_OPERAND (t, 1)))
7596 return TREE_OPERAND (t, 0);
7597 else if (TREE_CODE (t) == MEM_REF
7598 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7599 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7600 TREE_OPERAND (t, 0),
7601 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7602 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7604 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7606 if (TREE_TYPE (t) != ptrtype)
7607 t = fold_convert_loc (loc, ptrtype, t);
7609 else
7610 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7612 return t;
7615 /* Build an expression for the address of T. */
7617 tree
7618 build_fold_addr_expr_loc (location_t loc, tree t)
7620 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7622 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7625 static bool vec_cst_ctor_to_array (tree, tree *);
7627 /* Fold a unary expression of code CODE and type TYPE with operand
7628 OP0. Return the folded expression if folding is successful.
7629 Otherwise, return NULL_TREE. */
7631 tree
7632 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7634 tree tem;
7635 tree arg0;
7636 enum tree_code_class kind = TREE_CODE_CLASS (code);
7638 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7639 && TREE_CODE_LENGTH (code) == 1);
7641 arg0 = op0;
7642 if (arg0)
7644 if (CONVERT_EXPR_CODE_P (code)
7645 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7647 /* Don't use STRIP_NOPS, because signedness of argument type
7648 matters. */
7649 STRIP_SIGN_NOPS (arg0);
7651 else
7653 /* Strip any conversions that don't change the mode. This
7654 is safe for every expression, except for a comparison
7655 expression because its signedness is derived from its
7656 operands.
7658 Note that this is done as an internal manipulation within
7659 the constant folder, in order to find the simplest
7660 representation of the arguments so that their form can be
7661 studied. In any cases, the appropriate type conversions
7662 should be put back in the tree that will get out of the
7663 constant folder. */
7664 STRIP_NOPS (arg0);
7668 if (TREE_CODE_CLASS (code) == tcc_unary)
7670 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7671 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7672 fold_build1_loc (loc, code, type,
7673 fold_convert_loc (loc, TREE_TYPE (op0),
7674 TREE_OPERAND (arg0, 1))));
7675 else if (TREE_CODE (arg0) == COND_EXPR)
7677 tree arg01 = TREE_OPERAND (arg0, 1);
7678 tree arg02 = TREE_OPERAND (arg0, 2);
7679 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7680 arg01 = fold_build1_loc (loc, code, type,
7681 fold_convert_loc (loc,
7682 TREE_TYPE (op0), arg01));
7683 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7684 arg02 = fold_build1_loc (loc, code, type,
7685 fold_convert_loc (loc,
7686 TREE_TYPE (op0), arg02));
7687 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7688 arg01, arg02);
7690 /* If this was a conversion, and all we did was to move into
7691 inside the COND_EXPR, bring it back out. But leave it if
7692 it is a conversion from integer to integer and the
7693 result precision is no wider than a word since such a
7694 conversion is cheap and may be optimized away by combine,
7695 while it couldn't if it were outside the COND_EXPR. Then return
7696 so we don't get into an infinite recursion loop taking the
7697 conversion out and then back in. */
7699 if ((CONVERT_EXPR_CODE_P (code)
7700 || code == NON_LVALUE_EXPR)
7701 && TREE_CODE (tem) == COND_EXPR
7702 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7703 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7704 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7705 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7706 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7707 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7708 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7709 && (INTEGRAL_TYPE_P
7710 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7711 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7712 || flag_syntax_only))
7713 tem = build1_loc (loc, code, type,
7714 build3 (COND_EXPR,
7715 TREE_TYPE (TREE_OPERAND
7716 (TREE_OPERAND (tem, 1), 0)),
7717 TREE_OPERAND (tem, 0),
7718 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7719 TREE_OPERAND (TREE_OPERAND (tem, 2),
7720 0)));
7721 return tem;
7725 switch (code)
7727 case PAREN_EXPR:
7728 /* Re-association barriers around constants and other re-association
7729 barriers can be removed. */
7730 if (CONSTANT_CLASS_P (op0)
7731 || TREE_CODE (op0) == PAREN_EXPR)
7732 return fold_convert_loc (loc, type, op0);
7733 return NULL_TREE;
7735 CASE_CONVERT:
7736 case FLOAT_EXPR:
7737 case FIX_TRUNC_EXPR:
7738 if (TREE_TYPE (op0) == type)
7739 return op0;
7741 if (COMPARISON_CLASS_P (op0))
7743 /* If we have (type) (a CMP b) and type is an integral type, return
7744 new expression involving the new type. Canonicalize
7745 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7746 non-integral type.
7747 Do not fold the result as that would not simplify further, also
7748 folding again results in recursions. */
7749 if (TREE_CODE (type) == BOOLEAN_TYPE)
7750 return build2_loc (loc, TREE_CODE (op0), type,
7751 TREE_OPERAND (op0, 0),
7752 TREE_OPERAND (op0, 1));
7753 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7754 && TREE_CODE (type) != VECTOR_TYPE)
7755 return build3_loc (loc, COND_EXPR, type, op0,
7756 constant_boolean_node (true, type),
7757 constant_boolean_node (false, type));
7760 /* Handle cases of two conversions in a row. */
7761 if (CONVERT_EXPR_P (op0))
7763 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7764 tree inter_type = TREE_TYPE (op0);
7765 int inside_int = INTEGRAL_TYPE_P (inside_type);
7766 int inside_ptr = POINTER_TYPE_P (inside_type);
7767 int inside_float = FLOAT_TYPE_P (inside_type);
7768 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7769 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7770 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7771 int inter_int = INTEGRAL_TYPE_P (inter_type);
7772 int inter_ptr = POINTER_TYPE_P (inter_type);
7773 int inter_float = FLOAT_TYPE_P (inter_type);
7774 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7775 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7776 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7777 int final_int = INTEGRAL_TYPE_P (type);
7778 int final_ptr = POINTER_TYPE_P (type);
7779 int final_float = FLOAT_TYPE_P (type);
7780 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7781 unsigned int final_prec = TYPE_PRECISION (type);
7782 int final_unsignedp = TYPE_UNSIGNED (type);
7784 /* In addition to the cases of two conversions in a row
7785 handled below, if we are converting something to its own
7786 type via an object of identical or wider precision, neither
7787 conversion is needed. */
7788 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7789 && (((inter_int || inter_ptr) && final_int)
7790 || (inter_float && final_float))
7791 && inter_prec >= final_prec)
7792 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7794 /* Likewise, if the intermediate and initial types are either both
7795 float or both integer, we don't need the middle conversion if the
7796 former is wider than the latter and doesn't change the signedness
7797 (for integers). Avoid this if the final type is a pointer since
7798 then we sometimes need the middle conversion. Likewise if the
7799 final type has a precision not equal to the size of its mode. */
7800 if (((inter_int && inside_int)
7801 || (inter_float && inside_float)
7802 || (inter_vec && inside_vec))
7803 && inter_prec >= inside_prec
7804 && (inter_float || inter_vec
7805 || inter_unsignedp == inside_unsignedp)
7806 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7807 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7808 && ! final_ptr
7809 && (! final_vec || inter_prec == inside_prec))
7810 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7812 /* If we have a sign-extension of a zero-extended value, we can
7813 replace that by a single zero-extension. Likewise if the
7814 final conversion does not change precision we can drop the
7815 intermediate conversion. */
7816 if (inside_int && inter_int && final_int
7817 && ((inside_prec < inter_prec && inter_prec < final_prec
7818 && inside_unsignedp && !inter_unsignedp)
7819 || final_prec == inter_prec))
7820 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7822 /* Two conversions in a row are not needed unless:
7823 - some conversion is floating-point (overstrict for now), or
7824 - some conversion is a vector (overstrict for now), or
7825 - the intermediate type is narrower than both initial and
7826 final, or
7827 - the intermediate type and innermost type differ in signedness,
7828 and the outermost type is wider than the intermediate, or
7829 - the initial type is a pointer type and the precisions of the
7830 intermediate and final types differ, or
7831 - the final type is a pointer type and the precisions of the
7832 initial and intermediate types differ. */
7833 if (! inside_float && ! inter_float && ! final_float
7834 && ! inside_vec && ! inter_vec && ! final_vec
7835 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7836 && ! (inside_int && inter_int
7837 && inter_unsignedp != inside_unsignedp
7838 && inter_prec < final_prec)
7839 && ((inter_unsignedp && inter_prec > inside_prec)
7840 == (final_unsignedp && final_prec > inter_prec))
7841 && ! (inside_ptr && inter_prec != final_prec)
7842 && ! (final_ptr && inside_prec != inter_prec)
7843 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7844 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7845 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7848 /* Handle (T *)&A.B.C for A being of type T and B and C
7849 living at offset zero. This occurs frequently in
7850 C++ upcasting and then accessing the base. */
7851 if (TREE_CODE (op0) == ADDR_EXPR
7852 && POINTER_TYPE_P (type)
7853 && handled_component_p (TREE_OPERAND (op0, 0)))
7855 HOST_WIDE_INT bitsize, bitpos;
7856 tree offset;
7857 enum machine_mode mode;
7858 int unsignedp, volatilep;
7859 tree base = TREE_OPERAND (op0, 0);
7860 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7861 &mode, &unsignedp, &volatilep, false);
7862 /* If the reference was to a (constant) zero offset, we can use
7863 the address of the base if it has the same base type
7864 as the result type and the pointer type is unqualified. */
7865 if (! offset && bitpos == 0
7866 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7867 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7868 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7869 return fold_convert_loc (loc, type,
7870 build_fold_addr_expr_loc (loc, base));
7873 if (TREE_CODE (op0) == MODIFY_EXPR
7874 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7875 /* Detect assigning a bitfield. */
7876 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7877 && DECL_BIT_FIELD
7878 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7880 /* Don't leave an assignment inside a conversion
7881 unless assigning a bitfield. */
7882 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7883 /* First do the assignment, then return converted constant. */
7884 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7885 TREE_NO_WARNING (tem) = 1;
7886 TREE_USED (tem) = 1;
7887 return tem;
7890 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7891 constants (if x has signed type, the sign bit cannot be set
7892 in c). This folds extension into the BIT_AND_EXPR.
7893 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7894 very likely don't have maximal range for their precision and this
7895 transformation effectively doesn't preserve non-maximal ranges. */
7896 if (TREE_CODE (type) == INTEGER_TYPE
7897 && TREE_CODE (op0) == BIT_AND_EXPR
7898 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7900 tree and_expr = op0;
7901 tree and0 = TREE_OPERAND (and_expr, 0);
7902 tree and1 = TREE_OPERAND (and_expr, 1);
7903 int change = 0;
7905 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7906 || (TYPE_PRECISION (type)
7907 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7908 change = 1;
7909 else if (TYPE_PRECISION (TREE_TYPE (and1))
7910 <= HOST_BITS_PER_WIDE_INT
7911 && host_integerp (and1, 1))
7913 unsigned HOST_WIDE_INT cst;
7915 cst = tree_low_cst (and1, 1);
7916 cst &= (HOST_WIDE_INT) -1
7917 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7918 change = (cst == 0);
7919 #ifdef LOAD_EXTEND_OP
7920 if (change
7921 && !flag_syntax_only
7922 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7923 == ZERO_EXTEND))
7925 tree uns = unsigned_type_for (TREE_TYPE (and0));
7926 and0 = fold_convert_loc (loc, uns, and0);
7927 and1 = fold_convert_loc (loc, uns, and1);
7929 #endif
7931 if (change)
7933 tem = force_fit_type_double (type, tree_to_double_int (and1),
7934 0, TREE_OVERFLOW (and1));
7935 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7936 fold_convert_loc (loc, type, and0), tem);
7940 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7941 when one of the new casts will fold away. Conservatively we assume
7942 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7943 if (POINTER_TYPE_P (type)
7944 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7945 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7946 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7947 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7948 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7950 tree arg00 = TREE_OPERAND (arg0, 0);
7951 tree arg01 = TREE_OPERAND (arg0, 1);
7953 return fold_build_pointer_plus_loc
7954 (loc, fold_convert_loc (loc, type, arg00), arg01);
7957 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7958 of the same precision, and X is an integer type not narrower than
7959 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7960 if (INTEGRAL_TYPE_P (type)
7961 && TREE_CODE (op0) == BIT_NOT_EXPR
7962 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7963 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7964 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7966 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7967 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7968 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7969 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7970 fold_convert_loc (loc, type, tem));
7973 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7974 type of X and Y (integer types only). */
7975 if (INTEGRAL_TYPE_P (type)
7976 && TREE_CODE (op0) == MULT_EXPR
7977 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7978 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7980 /* Be careful not to introduce new overflows. */
7981 tree mult_type;
7982 if (TYPE_OVERFLOW_WRAPS (type))
7983 mult_type = type;
7984 else
7985 mult_type = unsigned_type_for (type);
7987 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7989 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7990 fold_convert_loc (loc, mult_type,
7991 TREE_OPERAND (op0, 0)),
7992 fold_convert_loc (loc, mult_type,
7993 TREE_OPERAND (op0, 1)));
7994 return fold_convert_loc (loc, type, tem);
7998 tem = fold_convert_const (code, type, op0);
7999 return tem ? tem : NULL_TREE;
8001 case ADDR_SPACE_CONVERT_EXPR:
8002 if (integer_zerop (arg0))
8003 return fold_convert_const (code, type, arg0);
8004 return NULL_TREE;
8006 case FIXED_CONVERT_EXPR:
8007 tem = fold_convert_const (code, type, arg0);
8008 return tem ? tem : NULL_TREE;
8010 case VIEW_CONVERT_EXPR:
8011 if (TREE_TYPE (op0) == type)
8012 return op0;
8013 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8014 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8015 type, TREE_OPERAND (op0, 0));
8016 if (TREE_CODE (op0) == MEM_REF)
8017 return fold_build2_loc (loc, MEM_REF, type,
8018 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8020 /* For integral conversions with the same precision or pointer
8021 conversions use a NOP_EXPR instead. */
8022 if ((INTEGRAL_TYPE_P (type)
8023 || POINTER_TYPE_P (type))
8024 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8025 || POINTER_TYPE_P (TREE_TYPE (op0)))
8026 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8027 return fold_convert_loc (loc, type, op0);
8029 /* Strip inner integral conversions that do not change the precision. */
8030 if (CONVERT_EXPR_P (op0)
8031 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8032 || POINTER_TYPE_P (TREE_TYPE (op0)))
8033 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8034 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8035 && (TYPE_PRECISION (TREE_TYPE (op0))
8036 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8037 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8038 type, TREE_OPERAND (op0, 0));
8040 return fold_view_convert_expr (type, op0);
8042 case NEGATE_EXPR:
8043 tem = fold_negate_expr (loc, arg0);
8044 if (tem)
8045 return fold_convert_loc (loc, type, tem);
8046 return NULL_TREE;
8048 case ABS_EXPR:
8049 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8050 return fold_abs_const (arg0, type);
8051 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8052 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8053 /* Convert fabs((double)float) into (double)fabsf(float). */
8054 else if (TREE_CODE (arg0) == NOP_EXPR
8055 && TREE_CODE (type) == REAL_TYPE)
8057 tree targ0 = strip_float_extensions (arg0);
8058 if (targ0 != arg0)
8059 return fold_convert_loc (loc, type,
8060 fold_build1_loc (loc, ABS_EXPR,
8061 TREE_TYPE (targ0),
8062 targ0));
8064 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8065 else if (TREE_CODE (arg0) == ABS_EXPR)
8066 return arg0;
8067 else if (tree_expr_nonnegative_p (arg0))
8068 return arg0;
8070 /* Strip sign ops from argument. */
8071 if (TREE_CODE (type) == REAL_TYPE)
8073 tem = fold_strip_sign_ops (arg0);
8074 if (tem)
8075 return fold_build1_loc (loc, ABS_EXPR, type,
8076 fold_convert_loc (loc, type, tem));
8078 return NULL_TREE;
8080 case CONJ_EXPR:
8081 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8082 return fold_convert_loc (loc, type, arg0);
8083 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8085 tree itype = TREE_TYPE (type);
8086 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8087 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8088 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8089 negate_expr (ipart));
8091 if (TREE_CODE (arg0) == COMPLEX_CST)
8093 tree itype = TREE_TYPE (type);
8094 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8095 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8096 return build_complex (type, rpart, negate_expr (ipart));
8098 if (TREE_CODE (arg0) == CONJ_EXPR)
8099 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8100 return NULL_TREE;
8102 case BIT_NOT_EXPR:
8103 if (TREE_CODE (arg0) == INTEGER_CST)
8104 return fold_not_const (arg0, type);
8105 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8106 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8107 /* Convert ~ (-A) to A - 1. */
8108 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8109 return fold_build2_loc (loc, MINUS_EXPR, type,
8110 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8111 build_int_cst (type, 1));
8112 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8113 else if (INTEGRAL_TYPE_P (type)
8114 && ((TREE_CODE (arg0) == MINUS_EXPR
8115 && integer_onep (TREE_OPERAND (arg0, 1)))
8116 || (TREE_CODE (arg0) == PLUS_EXPR
8117 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8118 return fold_build1_loc (loc, NEGATE_EXPR, type,
8119 fold_convert_loc (loc, type,
8120 TREE_OPERAND (arg0, 0)));
8121 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8122 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8123 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8124 fold_convert_loc (loc, type,
8125 TREE_OPERAND (arg0, 0)))))
8126 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8127 fold_convert_loc (loc, type,
8128 TREE_OPERAND (arg0, 1)));
8129 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8130 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8131 fold_convert_loc (loc, type,
8132 TREE_OPERAND (arg0, 1)))))
8133 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8134 fold_convert_loc (loc, type,
8135 TREE_OPERAND (arg0, 0)), tem);
8136 /* Perform BIT_NOT_EXPR on each element individually. */
8137 else if (TREE_CODE (arg0) == VECTOR_CST)
8139 tree *elements;
8140 tree elem;
8141 unsigned count = VECTOR_CST_NELTS (arg0), i;
8143 elements = XALLOCAVEC (tree, count);
8144 for (i = 0; i < count; i++)
8146 elem = VECTOR_CST_ELT (arg0, i);
8147 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8148 if (elem == NULL_TREE)
8149 break;
8150 elements[i] = elem;
8152 if (i == count)
8153 return build_vector (type, elements);
8156 return NULL_TREE;
8158 case TRUTH_NOT_EXPR:
8159 /* The argument to invert_truthvalue must have Boolean type. */
8160 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8161 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8163 /* Note that the operand of this must be an int
8164 and its values must be 0 or 1.
8165 ("true" is a fixed value perhaps depending on the language,
8166 but we don't handle values other than 1 correctly yet.) */
8167 tem = fold_truth_not_expr (loc, arg0);
8168 if (!tem)
8169 return NULL_TREE;
8170 return fold_convert_loc (loc, type, tem);
8172 case REALPART_EXPR:
8173 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8174 return fold_convert_loc (loc, type, arg0);
8175 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8176 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8177 TREE_OPERAND (arg0, 1));
8178 if (TREE_CODE (arg0) == COMPLEX_CST)
8179 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8180 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8182 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8183 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8184 fold_build1_loc (loc, REALPART_EXPR, itype,
8185 TREE_OPERAND (arg0, 0)),
8186 fold_build1_loc (loc, REALPART_EXPR, itype,
8187 TREE_OPERAND (arg0, 1)));
8188 return fold_convert_loc (loc, type, tem);
8190 if (TREE_CODE (arg0) == CONJ_EXPR)
8192 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8193 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8194 TREE_OPERAND (arg0, 0));
8195 return fold_convert_loc (loc, type, tem);
8197 if (TREE_CODE (arg0) == CALL_EXPR)
8199 tree fn = get_callee_fndecl (arg0);
8200 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8201 switch (DECL_FUNCTION_CODE (fn))
8203 CASE_FLT_FN (BUILT_IN_CEXPI):
8204 fn = mathfn_built_in (type, BUILT_IN_COS);
8205 if (fn)
8206 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8207 break;
8209 default:
8210 break;
8213 return NULL_TREE;
8215 case IMAGPART_EXPR:
8216 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8217 return build_zero_cst (type);
8218 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8219 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8220 TREE_OPERAND (arg0, 0));
8221 if (TREE_CODE (arg0) == COMPLEX_CST)
8222 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8223 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8225 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8226 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8227 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8228 TREE_OPERAND (arg0, 0)),
8229 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8230 TREE_OPERAND (arg0, 1)));
8231 return fold_convert_loc (loc, type, tem);
8233 if (TREE_CODE (arg0) == CONJ_EXPR)
8235 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8236 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8237 return fold_convert_loc (loc, type, negate_expr (tem));
8239 if (TREE_CODE (arg0) == CALL_EXPR)
8241 tree fn = get_callee_fndecl (arg0);
8242 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8243 switch (DECL_FUNCTION_CODE (fn))
8245 CASE_FLT_FN (BUILT_IN_CEXPI):
8246 fn = mathfn_built_in (type, BUILT_IN_SIN);
8247 if (fn)
8248 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8249 break;
8251 default:
8252 break;
8255 return NULL_TREE;
8257 case INDIRECT_REF:
8258 /* Fold *&X to X if X is an lvalue. */
8259 if (TREE_CODE (op0) == ADDR_EXPR)
8261 tree op00 = TREE_OPERAND (op0, 0);
8262 if ((TREE_CODE (op00) == VAR_DECL
8263 || TREE_CODE (op00) == PARM_DECL
8264 || TREE_CODE (op00) == RESULT_DECL)
8265 && !TREE_READONLY (op00))
8266 return op00;
8268 return NULL_TREE;
8270 case VEC_UNPACK_LO_EXPR:
8271 case VEC_UNPACK_HI_EXPR:
8272 case VEC_UNPACK_FLOAT_LO_EXPR:
8273 case VEC_UNPACK_FLOAT_HI_EXPR:
8275 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8276 tree *elts;
8277 enum tree_code subcode;
8279 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8280 if (TREE_CODE (arg0) != VECTOR_CST)
8281 return NULL_TREE;
8283 elts = XALLOCAVEC (tree, nelts * 2);
8284 if (!vec_cst_ctor_to_array (arg0, elts))
8285 return NULL_TREE;
8287 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8288 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8289 elts += nelts;
8291 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8292 subcode = NOP_EXPR;
8293 else
8294 subcode = FLOAT_EXPR;
8296 for (i = 0; i < nelts; i++)
8298 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8299 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8300 return NULL_TREE;
8303 return build_vector (type, elts);
8306 case REDUC_MIN_EXPR:
8307 case REDUC_MAX_EXPR:
8308 case REDUC_PLUS_EXPR:
8310 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8311 tree *elts;
8312 enum tree_code subcode;
8314 if (TREE_CODE (op0) != VECTOR_CST)
8315 return NULL_TREE;
8317 elts = XALLOCAVEC (tree, nelts);
8318 if (!vec_cst_ctor_to_array (op0, elts))
8319 return NULL_TREE;
8321 switch (code)
8323 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8324 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8325 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8326 default: gcc_unreachable ();
8329 for (i = 1; i < nelts; i++)
8331 elts[0] = const_binop (subcode, elts[0], elts[i]);
8332 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8333 return NULL_TREE;
8334 elts[i] = build_zero_cst (TREE_TYPE (type));
8337 return build_vector (type, elts);
8340 default:
8341 return NULL_TREE;
8342 } /* switch (code) */
8346 /* If the operation was a conversion do _not_ mark a resulting constant
8347 with TREE_OVERFLOW if the original constant was not. These conversions
8348 have implementation defined behavior and retaining the TREE_OVERFLOW
8349 flag here would confuse later passes such as VRP. */
8350 tree
8351 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8352 tree type, tree op0)
8354 tree res = fold_unary_loc (loc, code, type, op0);
8355 if (res
8356 && TREE_CODE (res) == INTEGER_CST
8357 && TREE_CODE (op0) == INTEGER_CST
8358 && CONVERT_EXPR_CODE_P (code))
8359 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8361 return res;
8364 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8365 operands OP0 and OP1. LOC is the location of the resulting expression.
8366 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8367 Return the folded expression if folding is successful. Otherwise,
8368 return NULL_TREE. */
8369 static tree
8370 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8371 tree arg0, tree arg1, tree op0, tree op1)
8373 tree tem;
8375 /* We only do these simplifications if we are optimizing. */
8376 if (!optimize)
8377 return NULL_TREE;
8379 /* Check for things like (A || B) && (A || C). We can convert this
8380 to A || (B && C). Note that either operator can be any of the four
8381 truth and/or operations and the transformation will still be
8382 valid. Also note that we only care about order for the
8383 ANDIF and ORIF operators. If B contains side effects, this
8384 might change the truth-value of A. */
8385 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8386 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8387 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8388 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8389 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8390 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8392 tree a00 = TREE_OPERAND (arg0, 0);
8393 tree a01 = TREE_OPERAND (arg0, 1);
8394 tree a10 = TREE_OPERAND (arg1, 0);
8395 tree a11 = TREE_OPERAND (arg1, 1);
8396 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8397 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8398 && (code == TRUTH_AND_EXPR
8399 || code == TRUTH_OR_EXPR));
8401 if (operand_equal_p (a00, a10, 0))
8402 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8403 fold_build2_loc (loc, code, type, a01, a11));
8404 else if (commutative && operand_equal_p (a00, a11, 0))
8405 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8406 fold_build2_loc (loc, code, type, a01, a10));
8407 else if (commutative && operand_equal_p (a01, a10, 0))
8408 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8409 fold_build2_loc (loc, code, type, a00, a11));
8411 /* This case if tricky because we must either have commutative
8412 operators or else A10 must not have side-effects. */
8414 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8415 && operand_equal_p (a01, a11, 0))
8416 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8417 fold_build2_loc (loc, code, type, a00, a10),
8418 a01);
8421 /* See if we can build a range comparison. */
8422 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8423 return tem;
8425 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8426 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8428 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8429 if (tem)
8430 return fold_build2_loc (loc, code, type, tem, arg1);
8433 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8434 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8436 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8437 if (tem)
8438 return fold_build2_loc (loc, code, type, arg0, tem);
8441 /* Check for the possibility of merging component references. If our
8442 lhs is another similar operation, try to merge its rhs with our
8443 rhs. Then try to merge our lhs and rhs. */
8444 if (TREE_CODE (arg0) == code
8445 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8446 TREE_OPERAND (arg0, 1), arg1)))
8447 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8449 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8450 return tem;
8452 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8453 && (code == TRUTH_AND_EXPR
8454 || code == TRUTH_ANDIF_EXPR
8455 || code == TRUTH_OR_EXPR
8456 || code == TRUTH_ORIF_EXPR))
8458 enum tree_code ncode, icode;
8460 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8461 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8462 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8464 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8465 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8466 We don't want to pack more than two leafs to a non-IF AND/OR
8467 expression.
8468 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8469 equal to IF-CODE, then we don't want to add right-hand operand.
8470 If the inner right-hand side of left-hand operand has
8471 side-effects, or isn't simple, then we can't add to it,
8472 as otherwise we might destroy if-sequence. */
8473 if (TREE_CODE (arg0) == icode
8474 && simple_operand_p_2 (arg1)
8475 /* Needed for sequence points to handle trappings, and
8476 side-effects. */
8477 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8479 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8480 arg1);
8481 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8482 tem);
8484 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8485 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8486 else if (TREE_CODE (arg1) == icode
8487 && simple_operand_p_2 (arg0)
8488 /* Needed for sequence points to handle trappings, and
8489 side-effects. */
8490 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8492 tem = fold_build2_loc (loc, ncode, type,
8493 arg0, TREE_OPERAND (arg1, 0));
8494 return fold_build2_loc (loc, icode, type, tem,
8495 TREE_OPERAND (arg1, 1));
8497 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8498 into (A OR B).
8499 For sequence point consistancy, we need to check for trapping,
8500 and side-effects. */
8501 else if (code == icode && simple_operand_p_2 (arg0)
8502 && simple_operand_p_2 (arg1))
8503 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8506 return NULL_TREE;
8509 /* Fold a binary expression of code CODE and type TYPE with operands
8510 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8511 Return the folded expression if folding is successful. Otherwise,
8512 return NULL_TREE. */
8514 static tree
8515 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8517 enum tree_code compl_code;
8519 if (code == MIN_EXPR)
8520 compl_code = MAX_EXPR;
8521 else if (code == MAX_EXPR)
8522 compl_code = MIN_EXPR;
8523 else
8524 gcc_unreachable ();
8526 /* MIN (MAX (a, b), b) == b. */
8527 if (TREE_CODE (op0) == compl_code
8528 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8529 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8531 /* MIN (MAX (b, a), b) == b. */
8532 if (TREE_CODE (op0) == compl_code
8533 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8534 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8535 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8537 /* MIN (a, MAX (a, b)) == a. */
8538 if (TREE_CODE (op1) == compl_code
8539 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8540 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8541 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8543 /* MIN (a, MAX (b, a)) == a. */
8544 if (TREE_CODE (op1) == compl_code
8545 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8546 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8547 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8549 return NULL_TREE;
8552 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8553 by changing CODE to reduce the magnitude of constants involved in
8554 ARG0 of the comparison.
8555 Returns a canonicalized comparison tree if a simplification was
8556 possible, otherwise returns NULL_TREE.
8557 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8558 valid if signed overflow is undefined. */
8560 static tree
8561 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8562 tree arg0, tree arg1,
8563 bool *strict_overflow_p)
8565 enum tree_code code0 = TREE_CODE (arg0);
8566 tree t, cst0 = NULL_TREE;
8567 int sgn0;
8568 bool swap = false;
8570 /* Match A +- CST code arg1 and CST code arg1. We can change the
8571 first form only if overflow is undefined. */
8572 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8573 /* In principle pointers also have undefined overflow behavior,
8574 but that causes problems elsewhere. */
8575 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8576 && (code0 == MINUS_EXPR
8577 || code0 == PLUS_EXPR)
8578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8579 || code0 == INTEGER_CST))
8580 return NULL_TREE;
8582 /* Identify the constant in arg0 and its sign. */
8583 if (code0 == INTEGER_CST)
8584 cst0 = arg0;
8585 else
8586 cst0 = TREE_OPERAND (arg0, 1);
8587 sgn0 = tree_int_cst_sgn (cst0);
8589 /* Overflowed constants and zero will cause problems. */
8590 if (integer_zerop (cst0)
8591 || TREE_OVERFLOW (cst0))
8592 return NULL_TREE;
8594 /* See if we can reduce the magnitude of the constant in
8595 arg0 by changing the comparison code. */
8596 if (code0 == INTEGER_CST)
8598 /* CST <= arg1 -> CST-1 < arg1. */
8599 if (code == LE_EXPR && sgn0 == 1)
8600 code = LT_EXPR;
8601 /* -CST < arg1 -> -CST-1 <= arg1. */
8602 else if (code == LT_EXPR && sgn0 == -1)
8603 code = LE_EXPR;
8604 /* CST > arg1 -> CST-1 >= arg1. */
8605 else if (code == GT_EXPR && sgn0 == 1)
8606 code = GE_EXPR;
8607 /* -CST >= arg1 -> -CST-1 > arg1. */
8608 else if (code == GE_EXPR && sgn0 == -1)
8609 code = GT_EXPR;
8610 else
8611 return NULL_TREE;
8612 /* arg1 code' CST' might be more canonical. */
8613 swap = true;
8615 else
8617 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8618 if (code == LT_EXPR
8619 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8620 code = LE_EXPR;
8621 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8622 else if (code == GT_EXPR
8623 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8624 code = GE_EXPR;
8625 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8626 else if (code == LE_EXPR
8627 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8628 code = LT_EXPR;
8629 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8630 else if (code == GE_EXPR
8631 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8632 code = GT_EXPR;
8633 else
8634 return NULL_TREE;
8635 *strict_overflow_p = true;
8638 /* Now build the constant reduced in magnitude. But not if that
8639 would produce one outside of its types range. */
8640 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8641 && ((sgn0 == 1
8642 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8643 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8644 || (sgn0 == -1
8645 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8646 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8647 /* We cannot swap the comparison here as that would cause us to
8648 endlessly recurse. */
8649 return NULL_TREE;
8651 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8652 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8653 if (code0 != INTEGER_CST)
8654 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8655 t = fold_convert (TREE_TYPE (arg1), t);
8657 /* If swapping might yield to a more canonical form, do so. */
8658 if (swap)
8659 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8660 else
8661 return fold_build2_loc (loc, code, type, t, arg1);
8664 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8665 overflow further. Try to decrease the magnitude of constants involved
8666 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8667 and put sole constants at the second argument position.
8668 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8670 static tree
8671 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8672 tree arg0, tree arg1)
8674 tree t;
8675 bool strict_overflow_p;
8676 const char * const warnmsg = G_("assuming signed overflow does not occur "
8677 "when reducing constant in comparison");
8679 /* Try canonicalization by simplifying arg0. */
8680 strict_overflow_p = false;
8681 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8682 &strict_overflow_p);
8683 if (t)
8685 if (strict_overflow_p)
8686 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8687 return t;
8690 /* Try canonicalization by simplifying arg1 using the swapped
8691 comparison. */
8692 code = swap_tree_comparison (code);
8693 strict_overflow_p = false;
8694 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8695 &strict_overflow_p);
8696 if (t && strict_overflow_p)
8697 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8698 return t;
8701 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8702 space. This is used to avoid issuing overflow warnings for
8703 expressions like &p->x which can not wrap. */
8705 static bool
8706 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8708 double_int di_offset, total;
8710 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8711 return true;
8713 if (bitpos < 0)
8714 return true;
8716 if (offset == NULL_TREE)
8717 di_offset = double_int_zero;
8718 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8719 return true;
8720 else
8721 di_offset = TREE_INT_CST (offset);
8723 bool overflow;
8724 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8725 total = di_offset.add_with_sign (units, true, &overflow);
8726 if (overflow)
8727 return true;
8729 if (total.high != 0)
8730 return true;
8732 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8733 if (size <= 0)
8734 return true;
8736 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8737 array. */
8738 if (TREE_CODE (base) == ADDR_EXPR)
8740 HOST_WIDE_INT base_size;
8742 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8743 if (base_size > 0 && size < base_size)
8744 size = base_size;
8747 return total.low > (unsigned HOST_WIDE_INT) size;
8750 /* Subroutine of fold_binary. This routine performs all of the
8751 transformations that are common to the equality/inequality
8752 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8753 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8754 fold_binary should call fold_binary. Fold a comparison with
8755 tree code CODE and type TYPE with operands OP0 and OP1. Return
8756 the folded comparison or NULL_TREE. */
8758 static tree
8759 fold_comparison (location_t loc, enum tree_code code, tree type,
8760 tree op0, tree op1)
8762 tree arg0, arg1, tem;
8764 arg0 = op0;
8765 arg1 = op1;
8767 STRIP_SIGN_NOPS (arg0);
8768 STRIP_SIGN_NOPS (arg1);
8770 tem = fold_relational_const (code, type, arg0, arg1);
8771 if (tem != NULL_TREE)
8772 return tem;
8774 /* If one arg is a real or integer constant, put it last. */
8775 if (tree_swap_operands_p (arg0, arg1, true))
8776 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8778 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8779 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8780 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8781 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8782 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8783 && (TREE_CODE (arg1) == INTEGER_CST
8784 && !TREE_OVERFLOW (arg1)))
8786 tree const1 = TREE_OPERAND (arg0, 1);
8787 tree const2 = arg1;
8788 tree variable = TREE_OPERAND (arg0, 0);
8789 tree lhs;
8790 int lhs_add;
8791 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8793 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8794 TREE_TYPE (arg1), const2, const1);
8796 /* If the constant operation overflowed this can be
8797 simplified as a comparison against INT_MAX/INT_MIN. */
8798 if (TREE_CODE (lhs) == INTEGER_CST
8799 && TREE_OVERFLOW (lhs))
8801 int const1_sgn = tree_int_cst_sgn (const1);
8802 enum tree_code code2 = code;
8804 /* Get the sign of the constant on the lhs if the
8805 operation were VARIABLE + CONST1. */
8806 if (TREE_CODE (arg0) == MINUS_EXPR)
8807 const1_sgn = -const1_sgn;
8809 /* The sign of the constant determines if we overflowed
8810 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8811 Canonicalize to the INT_MIN overflow by swapping the comparison
8812 if necessary. */
8813 if (const1_sgn == -1)
8814 code2 = swap_tree_comparison (code);
8816 /* We now can look at the canonicalized case
8817 VARIABLE + 1 CODE2 INT_MIN
8818 and decide on the result. */
8819 if (code2 == LT_EXPR
8820 || code2 == LE_EXPR
8821 || code2 == EQ_EXPR)
8822 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8823 else if (code2 == NE_EXPR
8824 || code2 == GE_EXPR
8825 || code2 == GT_EXPR)
8826 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8829 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8830 && (TREE_CODE (lhs) != INTEGER_CST
8831 || !TREE_OVERFLOW (lhs)))
8833 if (code != EQ_EXPR && code != NE_EXPR)
8834 fold_overflow_warning ("assuming signed overflow does not occur "
8835 "when changing X +- C1 cmp C2 to "
8836 "X cmp C1 +- C2",
8837 WARN_STRICT_OVERFLOW_COMPARISON);
8838 return fold_build2_loc (loc, code, type, variable, lhs);
8842 /* For comparisons of pointers we can decompose it to a compile time
8843 comparison of the base objects and the offsets into the object.
8844 This requires at least one operand being an ADDR_EXPR or a
8845 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8846 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8847 && (TREE_CODE (arg0) == ADDR_EXPR
8848 || TREE_CODE (arg1) == ADDR_EXPR
8849 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8850 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8852 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8853 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8854 enum machine_mode mode;
8855 int volatilep, unsignedp;
8856 bool indirect_base0 = false, indirect_base1 = false;
8858 /* Get base and offset for the access. Strip ADDR_EXPR for
8859 get_inner_reference, but put it back by stripping INDIRECT_REF
8860 off the base object if possible. indirect_baseN will be true
8861 if baseN is not an address but refers to the object itself. */
8862 base0 = arg0;
8863 if (TREE_CODE (arg0) == ADDR_EXPR)
8865 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8866 &bitsize, &bitpos0, &offset0, &mode,
8867 &unsignedp, &volatilep, false);
8868 if (TREE_CODE (base0) == INDIRECT_REF)
8869 base0 = TREE_OPERAND (base0, 0);
8870 else
8871 indirect_base0 = true;
8873 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8875 base0 = TREE_OPERAND (arg0, 0);
8876 STRIP_SIGN_NOPS (base0);
8877 if (TREE_CODE (base0) == ADDR_EXPR)
8879 base0 = TREE_OPERAND (base0, 0);
8880 indirect_base0 = true;
8882 offset0 = TREE_OPERAND (arg0, 1);
8883 if (host_integerp (offset0, 0))
8885 HOST_WIDE_INT off = size_low_cst (offset0);
8886 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8887 * BITS_PER_UNIT)
8888 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8890 bitpos0 = off * BITS_PER_UNIT;
8891 offset0 = NULL_TREE;
8896 base1 = arg1;
8897 if (TREE_CODE (arg1) == ADDR_EXPR)
8899 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8900 &bitsize, &bitpos1, &offset1, &mode,
8901 &unsignedp, &volatilep, false);
8902 if (TREE_CODE (base1) == INDIRECT_REF)
8903 base1 = TREE_OPERAND (base1, 0);
8904 else
8905 indirect_base1 = true;
8907 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8909 base1 = TREE_OPERAND (arg1, 0);
8910 STRIP_SIGN_NOPS (base1);
8911 if (TREE_CODE (base1) == ADDR_EXPR)
8913 base1 = TREE_OPERAND (base1, 0);
8914 indirect_base1 = true;
8916 offset1 = TREE_OPERAND (arg1, 1);
8917 if (host_integerp (offset1, 0))
8919 HOST_WIDE_INT off = size_low_cst (offset1);
8920 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8921 * BITS_PER_UNIT)
8922 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8924 bitpos1 = off * BITS_PER_UNIT;
8925 offset1 = NULL_TREE;
8930 /* A local variable can never be pointed to by
8931 the default SSA name of an incoming parameter. */
8932 if ((TREE_CODE (arg0) == ADDR_EXPR
8933 && indirect_base0
8934 && TREE_CODE (base0) == VAR_DECL
8935 && auto_var_in_fn_p (base0, current_function_decl)
8936 && !indirect_base1
8937 && TREE_CODE (base1) == SSA_NAME
8938 && SSA_NAME_IS_DEFAULT_DEF (base1)
8939 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8940 || (TREE_CODE (arg1) == ADDR_EXPR
8941 && indirect_base1
8942 && TREE_CODE (base1) == VAR_DECL
8943 && auto_var_in_fn_p (base1, current_function_decl)
8944 && !indirect_base0
8945 && TREE_CODE (base0) == SSA_NAME
8946 && SSA_NAME_IS_DEFAULT_DEF (base0)
8947 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8949 if (code == NE_EXPR)
8950 return constant_boolean_node (1, type);
8951 else if (code == EQ_EXPR)
8952 return constant_boolean_node (0, type);
8954 /* If we have equivalent bases we might be able to simplify. */
8955 else if (indirect_base0 == indirect_base1
8956 && operand_equal_p (base0, base1, 0))
8958 /* We can fold this expression to a constant if the non-constant
8959 offset parts are equal. */
8960 if ((offset0 == offset1
8961 || (offset0 && offset1
8962 && operand_equal_p (offset0, offset1, 0)))
8963 && (code == EQ_EXPR
8964 || code == NE_EXPR
8965 || (indirect_base0 && DECL_P (base0))
8966 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8969 if (code != EQ_EXPR
8970 && code != NE_EXPR
8971 && bitpos0 != bitpos1
8972 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8973 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8974 fold_overflow_warning (("assuming pointer wraparound does not "
8975 "occur when comparing P +- C1 with "
8976 "P +- C2"),
8977 WARN_STRICT_OVERFLOW_CONDITIONAL);
8979 switch (code)
8981 case EQ_EXPR:
8982 return constant_boolean_node (bitpos0 == bitpos1, type);
8983 case NE_EXPR:
8984 return constant_boolean_node (bitpos0 != bitpos1, type);
8985 case LT_EXPR:
8986 return constant_boolean_node (bitpos0 < bitpos1, type);
8987 case LE_EXPR:
8988 return constant_boolean_node (bitpos0 <= bitpos1, type);
8989 case GE_EXPR:
8990 return constant_boolean_node (bitpos0 >= bitpos1, type);
8991 case GT_EXPR:
8992 return constant_boolean_node (bitpos0 > bitpos1, type);
8993 default:;
8996 /* We can simplify the comparison to a comparison of the variable
8997 offset parts if the constant offset parts are equal.
8998 Be careful to use signed size type here because otherwise we
8999 mess with array offsets in the wrong way. This is possible
9000 because pointer arithmetic is restricted to retain within an
9001 object and overflow on pointer differences is undefined as of
9002 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9003 else if (bitpos0 == bitpos1
9004 && ((code == EQ_EXPR || code == NE_EXPR)
9005 || (indirect_base0 && DECL_P (base0))
9006 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9008 /* By converting to signed size type we cover middle-end pointer
9009 arithmetic which operates on unsigned pointer types of size
9010 type size and ARRAY_REF offsets which are properly sign or
9011 zero extended from their type in case it is narrower than
9012 size type. */
9013 if (offset0 == NULL_TREE)
9014 offset0 = build_int_cst (ssizetype, 0);
9015 else
9016 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9017 if (offset1 == NULL_TREE)
9018 offset1 = build_int_cst (ssizetype, 0);
9019 else
9020 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9022 if (code != EQ_EXPR
9023 && code != NE_EXPR
9024 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9025 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9026 fold_overflow_warning (("assuming pointer wraparound does not "
9027 "occur when comparing P +- C1 with "
9028 "P +- C2"),
9029 WARN_STRICT_OVERFLOW_COMPARISON);
9031 return fold_build2_loc (loc, code, type, offset0, offset1);
9034 /* For non-equal bases we can simplify if they are addresses
9035 of local binding decls or constants. */
9036 else if (indirect_base0 && indirect_base1
9037 /* We know that !operand_equal_p (base0, base1, 0)
9038 because the if condition was false. But make
9039 sure two decls are not the same. */
9040 && base0 != base1
9041 && TREE_CODE (arg0) == ADDR_EXPR
9042 && TREE_CODE (arg1) == ADDR_EXPR
9043 && (((TREE_CODE (base0) == VAR_DECL
9044 || TREE_CODE (base0) == PARM_DECL)
9045 && (targetm.binds_local_p (base0)
9046 || CONSTANT_CLASS_P (base1)))
9047 || CONSTANT_CLASS_P (base0))
9048 && (((TREE_CODE (base1) == VAR_DECL
9049 || TREE_CODE (base1) == PARM_DECL)
9050 && (targetm.binds_local_p (base1)
9051 || CONSTANT_CLASS_P (base0)))
9052 || CONSTANT_CLASS_P (base1)))
9054 if (code == EQ_EXPR)
9055 return omit_two_operands_loc (loc, type, boolean_false_node,
9056 arg0, arg1);
9057 else if (code == NE_EXPR)
9058 return omit_two_operands_loc (loc, type, boolean_true_node,
9059 arg0, arg1);
9061 /* For equal offsets we can simplify to a comparison of the
9062 base addresses. */
9063 else if (bitpos0 == bitpos1
9064 && (indirect_base0
9065 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9066 && (indirect_base1
9067 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9068 && ((offset0 == offset1)
9069 || (offset0 && offset1
9070 && operand_equal_p (offset0, offset1, 0))))
9072 if (indirect_base0)
9073 base0 = build_fold_addr_expr_loc (loc, base0);
9074 if (indirect_base1)
9075 base1 = build_fold_addr_expr_loc (loc, base1);
9076 return fold_build2_loc (loc, code, type, base0, base1);
9080 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9081 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9082 the resulting offset is smaller in absolute value than the
9083 original one. */
9084 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9085 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9086 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9087 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9088 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9089 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9090 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9092 tree const1 = TREE_OPERAND (arg0, 1);
9093 tree const2 = TREE_OPERAND (arg1, 1);
9094 tree variable1 = TREE_OPERAND (arg0, 0);
9095 tree variable2 = TREE_OPERAND (arg1, 0);
9096 tree cst;
9097 const char * const warnmsg = G_("assuming signed overflow does not "
9098 "occur when combining constants around "
9099 "a comparison");
9101 /* Put the constant on the side where it doesn't overflow and is
9102 of lower absolute value than before. */
9103 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9104 ? MINUS_EXPR : PLUS_EXPR,
9105 const2, const1);
9106 if (!TREE_OVERFLOW (cst)
9107 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9109 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9110 return fold_build2_loc (loc, code, type,
9111 variable1,
9112 fold_build2_loc (loc,
9113 TREE_CODE (arg1), TREE_TYPE (arg1),
9114 variable2, cst));
9117 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9118 ? MINUS_EXPR : PLUS_EXPR,
9119 const1, const2);
9120 if (!TREE_OVERFLOW (cst)
9121 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9123 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9124 return fold_build2_loc (loc, code, type,
9125 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9126 variable1, cst),
9127 variable2);
9131 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9132 signed arithmetic case. That form is created by the compiler
9133 often enough for folding it to be of value. One example is in
9134 computing loop trip counts after Operator Strength Reduction. */
9135 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9136 && TREE_CODE (arg0) == MULT_EXPR
9137 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9139 && integer_zerop (arg1))
9141 tree const1 = TREE_OPERAND (arg0, 1);
9142 tree const2 = arg1; /* zero */
9143 tree variable1 = TREE_OPERAND (arg0, 0);
9144 enum tree_code cmp_code = code;
9146 /* Handle unfolded multiplication by zero. */
9147 if (integer_zerop (const1))
9148 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9150 fold_overflow_warning (("assuming signed overflow does not occur when "
9151 "eliminating multiplication in comparison "
9152 "with zero"),
9153 WARN_STRICT_OVERFLOW_COMPARISON);
9155 /* If const1 is negative we swap the sense of the comparison. */
9156 if (tree_int_cst_sgn (const1) < 0)
9157 cmp_code = swap_tree_comparison (cmp_code);
9159 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9162 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9163 if (tem)
9164 return tem;
9166 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9168 tree targ0 = strip_float_extensions (arg0);
9169 tree targ1 = strip_float_extensions (arg1);
9170 tree newtype = TREE_TYPE (targ0);
9172 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9173 newtype = TREE_TYPE (targ1);
9175 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9176 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9177 return fold_build2_loc (loc, code, type,
9178 fold_convert_loc (loc, newtype, targ0),
9179 fold_convert_loc (loc, newtype, targ1));
9181 /* (-a) CMP (-b) -> b CMP a */
9182 if (TREE_CODE (arg0) == NEGATE_EXPR
9183 && TREE_CODE (arg1) == NEGATE_EXPR)
9184 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9185 TREE_OPERAND (arg0, 0));
9187 if (TREE_CODE (arg1) == REAL_CST)
9189 REAL_VALUE_TYPE cst;
9190 cst = TREE_REAL_CST (arg1);
9192 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9193 if (TREE_CODE (arg0) == NEGATE_EXPR)
9194 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9195 TREE_OPERAND (arg0, 0),
9196 build_real (TREE_TYPE (arg1),
9197 real_value_negate (&cst)));
9199 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9200 /* a CMP (-0) -> a CMP 0 */
9201 if (REAL_VALUE_MINUS_ZERO (cst))
9202 return fold_build2_loc (loc, code, type, arg0,
9203 build_real (TREE_TYPE (arg1), dconst0));
9205 /* x != NaN is always true, other ops are always false. */
9206 if (REAL_VALUE_ISNAN (cst)
9207 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9209 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9210 return omit_one_operand_loc (loc, type, tem, arg0);
9213 /* Fold comparisons against infinity. */
9214 if (REAL_VALUE_ISINF (cst)
9215 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9217 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9218 if (tem != NULL_TREE)
9219 return tem;
9223 /* If this is a comparison of a real constant with a PLUS_EXPR
9224 or a MINUS_EXPR of a real constant, we can convert it into a
9225 comparison with a revised real constant as long as no overflow
9226 occurs when unsafe_math_optimizations are enabled. */
9227 if (flag_unsafe_math_optimizations
9228 && TREE_CODE (arg1) == REAL_CST
9229 && (TREE_CODE (arg0) == PLUS_EXPR
9230 || TREE_CODE (arg0) == MINUS_EXPR)
9231 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9232 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9233 ? MINUS_EXPR : PLUS_EXPR,
9234 arg1, TREE_OPERAND (arg0, 1)))
9235 && !TREE_OVERFLOW (tem))
9236 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9238 /* Likewise, we can simplify a comparison of a real constant with
9239 a MINUS_EXPR whose first operand is also a real constant, i.e.
9240 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9241 floating-point types only if -fassociative-math is set. */
9242 if (flag_associative_math
9243 && TREE_CODE (arg1) == REAL_CST
9244 && TREE_CODE (arg0) == MINUS_EXPR
9245 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9246 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9247 arg1))
9248 && !TREE_OVERFLOW (tem))
9249 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9250 TREE_OPERAND (arg0, 1), tem);
9252 /* Fold comparisons against built-in math functions. */
9253 if (TREE_CODE (arg1) == REAL_CST
9254 && flag_unsafe_math_optimizations
9255 && ! flag_errno_math)
9257 enum built_in_function fcode = builtin_mathfn_code (arg0);
9259 if (fcode != END_BUILTINS)
9261 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9262 if (tem != NULL_TREE)
9263 return tem;
9268 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9269 && CONVERT_EXPR_P (arg0))
9271 /* If we are widening one operand of an integer comparison,
9272 see if the other operand is similarly being widened. Perhaps we
9273 can do the comparison in the narrower type. */
9274 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9275 if (tem)
9276 return tem;
9278 /* Or if we are changing signedness. */
9279 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9280 if (tem)
9281 return tem;
9284 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9285 constant, we can simplify it. */
9286 if (TREE_CODE (arg1) == INTEGER_CST
9287 && (TREE_CODE (arg0) == MIN_EXPR
9288 || TREE_CODE (arg0) == MAX_EXPR)
9289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9291 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9292 if (tem)
9293 return tem;
9296 /* Simplify comparison of something with itself. (For IEEE
9297 floating-point, we can only do some of these simplifications.) */
9298 if (operand_equal_p (arg0, arg1, 0))
9300 switch (code)
9302 case EQ_EXPR:
9303 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9304 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9305 return constant_boolean_node (1, type);
9306 break;
9308 case GE_EXPR:
9309 case LE_EXPR:
9310 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9311 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9312 return constant_boolean_node (1, type);
9313 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9315 case NE_EXPR:
9316 /* For NE, we can only do this simplification if integer
9317 or we don't honor IEEE floating point NaNs. */
9318 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9319 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9320 break;
9321 /* ... fall through ... */
9322 case GT_EXPR:
9323 case LT_EXPR:
9324 return constant_boolean_node (0, type);
9325 default:
9326 gcc_unreachable ();
9330 /* If we are comparing an expression that just has comparisons
9331 of two integer values, arithmetic expressions of those comparisons,
9332 and constants, we can simplify it. There are only three cases
9333 to check: the two values can either be equal, the first can be
9334 greater, or the second can be greater. Fold the expression for
9335 those three values. Since each value must be 0 or 1, we have
9336 eight possibilities, each of which corresponds to the constant 0
9337 or 1 or one of the six possible comparisons.
9339 This handles common cases like (a > b) == 0 but also handles
9340 expressions like ((x > y) - (y > x)) > 0, which supposedly
9341 occur in macroized code. */
9343 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9345 tree cval1 = 0, cval2 = 0;
9346 int save_p = 0;
9348 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9349 /* Don't handle degenerate cases here; they should already
9350 have been handled anyway. */
9351 && cval1 != 0 && cval2 != 0
9352 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9353 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9354 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9355 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9356 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9357 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9358 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9360 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9361 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9363 /* We can't just pass T to eval_subst in case cval1 or cval2
9364 was the same as ARG1. */
9366 tree high_result
9367 = fold_build2_loc (loc, code, type,
9368 eval_subst (loc, arg0, cval1, maxval,
9369 cval2, minval),
9370 arg1);
9371 tree equal_result
9372 = fold_build2_loc (loc, code, type,
9373 eval_subst (loc, arg0, cval1, maxval,
9374 cval2, maxval),
9375 arg1);
9376 tree low_result
9377 = fold_build2_loc (loc, code, type,
9378 eval_subst (loc, arg0, cval1, minval,
9379 cval2, maxval),
9380 arg1);
9382 /* All three of these results should be 0 or 1. Confirm they are.
9383 Then use those values to select the proper code to use. */
9385 if (TREE_CODE (high_result) == INTEGER_CST
9386 && TREE_CODE (equal_result) == INTEGER_CST
9387 && TREE_CODE (low_result) == INTEGER_CST)
9389 /* Make a 3-bit mask with the high-order bit being the
9390 value for `>', the next for '=', and the low for '<'. */
9391 switch ((integer_onep (high_result) * 4)
9392 + (integer_onep (equal_result) * 2)
9393 + integer_onep (low_result))
9395 case 0:
9396 /* Always false. */
9397 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9398 case 1:
9399 code = LT_EXPR;
9400 break;
9401 case 2:
9402 code = EQ_EXPR;
9403 break;
9404 case 3:
9405 code = LE_EXPR;
9406 break;
9407 case 4:
9408 code = GT_EXPR;
9409 break;
9410 case 5:
9411 code = NE_EXPR;
9412 break;
9413 case 6:
9414 code = GE_EXPR;
9415 break;
9416 case 7:
9417 /* Always true. */
9418 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9421 if (save_p)
9423 tem = save_expr (build2 (code, type, cval1, cval2));
9424 SET_EXPR_LOCATION (tem, loc);
9425 return tem;
9427 return fold_build2_loc (loc, code, type, cval1, cval2);
9432 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9433 into a single range test. */
9434 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9435 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9436 && TREE_CODE (arg1) == INTEGER_CST
9437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9438 && !integer_zerop (TREE_OPERAND (arg0, 1))
9439 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9440 && !TREE_OVERFLOW (arg1))
9442 tem = fold_div_compare (loc, code, type, arg0, arg1);
9443 if (tem != NULL_TREE)
9444 return tem;
9447 /* Fold ~X op ~Y as Y op X. */
9448 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9449 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9451 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9452 return fold_build2_loc (loc, code, type,
9453 fold_convert_loc (loc, cmp_type,
9454 TREE_OPERAND (arg1, 0)),
9455 TREE_OPERAND (arg0, 0));
9458 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9459 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9460 && TREE_CODE (arg1) == INTEGER_CST)
9462 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9463 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9464 TREE_OPERAND (arg0, 0),
9465 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9466 fold_convert_loc (loc, cmp_type, arg1)));
9469 return NULL_TREE;
9473 /* Subroutine of fold_binary. Optimize complex multiplications of the
9474 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9475 argument EXPR represents the expression "z" of type TYPE. */
9477 static tree
9478 fold_mult_zconjz (location_t loc, tree type, tree expr)
9480 tree itype = TREE_TYPE (type);
9481 tree rpart, ipart, tem;
9483 if (TREE_CODE (expr) == COMPLEX_EXPR)
9485 rpart = TREE_OPERAND (expr, 0);
9486 ipart = TREE_OPERAND (expr, 1);
9488 else if (TREE_CODE (expr) == COMPLEX_CST)
9490 rpart = TREE_REALPART (expr);
9491 ipart = TREE_IMAGPART (expr);
9493 else
9495 expr = save_expr (expr);
9496 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9497 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9500 rpart = save_expr (rpart);
9501 ipart = save_expr (ipart);
9502 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9503 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9504 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9505 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9506 build_zero_cst (itype));
9510 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9511 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9512 guarantees that P and N have the same least significant log2(M) bits.
9513 N is not otherwise constrained. In particular, N is not normalized to
9514 0 <= N < M as is common. In general, the precise value of P is unknown.
9515 M is chosen as large as possible such that constant N can be determined.
9517 Returns M and sets *RESIDUE to N.
9519 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9520 account. This is not always possible due to PR 35705.
9523 static unsigned HOST_WIDE_INT
9524 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9525 bool allow_func_align)
9527 enum tree_code code;
9529 *residue = 0;
9531 code = TREE_CODE (expr);
9532 if (code == ADDR_EXPR)
9534 unsigned int bitalign;
9535 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9536 *residue /= BITS_PER_UNIT;
9537 return bitalign / BITS_PER_UNIT;
9539 else if (code == POINTER_PLUS_EXPR)
9541 tree op0, op1;
9542 unsigned HOST_WIDE_INT modulus;
9543 enum tree_code inner_code;
9545 op0 = TREE_OPERAND (expr, 0);
9546 STRIP_NOPS (op0);
9547 modulus = get_pointer_modulus_and_residue (op0, residue,
9548 allow_func_align);
9550 op1 = TREE_OPERAND (expr, 1);
9551 STRIP_NOPS (op1);
9552 inner_code = TREE_CODE (op1);
9553 if (inner_code == INTEGER_CST)
9555 *residue += TREE_INT_CST_LOW (op1);
9556 return modulus;
9558 else if (inner_code == MULT_EXPR)
9560 op1 = TREE_OPERAND (op1, 1);
9561 if (TREE_CODE (op1) == INTEGER_CST)
9563 unsigned HOST_WIDE_INT align;
9565 /* Compute the greatest power-of-2 divisor of op1. */
9566 align = TREE_INT_CST_LOW (op1);
9567 align &= -align;
9569 /* If align is non-zero and less than *modulus, replace
9570 *modulus with align., If align is 0, then either op1 is 0
9571 or the greatest power-of-2 divisor of op1 doesn't fit in an
9572 unsigned HOST_WIDE_INT. In either case, no additional
9573 constraint is imposed. */
9574 if (align)
9575 modulus = MIN (modulus, align);
9577 return modulus;
9582 /* If we get here, we were unable to determine anything useful about the
9583 expression. */
9584 return 1;
9587 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9588 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9590 static bool
9591 vec_cst_ctor_to_array (tree arg, tree *elts)
9593 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9595 if (TREE_CODE (arg) == VECTOR_CST)
9597 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9598 elts[i] = VECTOR_CST_ELT (arg, i);
9600 else if (TREE_CODE (arg) == CONSTRUCTOR)
9602 constructor_elt *elt;
9604 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9605 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9606 return false;
9607 else
9608 elts[i] = elt->value;
9610 else
9611 return false;
9612 for (; i < nelts; i++)
9613 elts[i]
9614 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9615 return true;
9618 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9619 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9620 NULL_TREE otherwise. */
9622 static tree
9623 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9625 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9626 tree *elts;
9627 bool need_ctor = false;
9629 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9630 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9631 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9632 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9633 return NULL_TREE;
9635 elts = XALLOCAVEC (tree, nelts * 3);
9636 if (!vec_cst_ctor_to_array (arg0, elts)
9637 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9638 return NULL_TREE;
9640 for (i = 0; i < nelts; i++)
9642 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9643 need_ctor = true;
9644 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9647 if (need_ctor)
9649 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9650 for (i = 0; i < nelts; i++)
9651 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9652 return build_constructor (type, v);
9654 else
9655 return build_vector (type, &elts[2 * nelts]);
9658 /* Try to fold a pointer difference of type TYPE two address expressions of
9659 array references AREF0 and AREF1 using location LOC. Return a
9660 simplified expression for the difference or NULL_TREE. */
9662 static tree
9663 fold_addr_of_array_ref_difference (location_t loc, tree type,
9664 tree aref0, tree aref1)
9666 tree base0 = TREE_OPERAND (aref0, 0);
9667 tree base1 = TREE_OPERAND (aref1, 0);
9668 tree base_offset = build_int_cst (type, 0);
9670 /* If the bases are array references as well, recurse. If the bases
9671 are pointer indirections compute the difference of the pointers.
9672 If the bases are equal, we are set. */
9673 if ((TREE_CODE (base0) == ARRAY_REF
9674 && TREE_CODE (base1) == ARRAY_REF
9675 && (base_offset
9676 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9677 || (INDIRECT_REF_P (base0)
9678 && INDIRECT_REF_P (base1)
9679 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9680 TREE_OPERAND (base0, 0),
9681 TREE_OPERAND (base1, 0))))
9682 || operand_equal_p (base0, base1, 0))
9684 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9685 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9686 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9687 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9688 return fold_build2_loc (loc, PLUS_EXPR, type,
9689 base_offset,
9690 fold_build2_loc (loc, MULT_EXPR, type,
9691 diff, esz));
9693 return NULL_TREE;
9696 /* If the real or vector real constant CST of type TYPE has an exact
9697 inverse, return it, else return NULL. */
9699 static tree
9700 exact_inverse (tree type, tree cst)
9702 REAL_VALUE_TYPE r;
9703 tree unit_type, *elts;
9704 enum machine_mode mode;
9705 unsigned vec_nelts, i;
9707 switch (TREE_CODE (cst))
9709 case REAL_CST:
9710 r = TREE_REAL_CST (cst);
9712 if (exact_real_inverse (TYPE_MODE (type), &r))
9713 return build_real (type, r);
9715 return NULL_TREE;
9717 case VECTOR_CST:
9718 vec_nelts = VECTOR_CST_NELTS (cst);
9719 elts = XALLOCAVEC (tree, vec_nelts);
9720 unit_type = TREE_TYPE (type);
9721 mode = TYPE_MODE (unit_type);
9723 for (i = 0; i < vec_nelts; i++)
9725 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9726 if (!exact_real_inverse (mode, &r))
9727 return NULL_TREE;
9728 elts[i] = build_real (unit_type, r);
9731 return build_vector (type, elts);
9733 default:
9734 return NULL_TREE;
9738 /* Fold a binary expression of code CODE and type TYPE with operands
9739 OP0 and OP1. LOC is the location of the resulting expression.
9740 Return the folded expression if folding is successful. Otherwise,
9741 return NULL_TREE. */
9743 tree
9744 fold_binary_loc (location_t loc,
9745 enum tree_code code, tree type, tree op0, tree op1)
9747 enum tree_code_class kind = TREE_CODE_CLASS (code);
9748 tree arg0, arg1, tem;
9749 tree t1 = NULL_TREE;
9750 bool strict_overflow_p;
9752 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9753 && TREE_CODE_LENGTH (code) == 2
9754 && op0 != NULL_TREE
9755 && op1 != NULL_TREE);
9757 arg0 = op0;
9758 arg1 = op1;
9760 /* Strip any conversions that don't change the mode. This is
9761 safe for every expression, except for a comparison expression
9762 because its signedness is derived from its operands. So, in
9763 the latter case, only strip conversions that don't change the
9764 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9765 preserved.
9767 Note that this is done as an internal manipulation within the
9768 constant folder, in order to find the simplest representation
9769 of the arguments so that their form can be studied. In any
9770 cases, the appropriate type conversions should be put back in
9771 the tree that will get out of the constant folder. */
9773 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9775 STRIP_SIGN_NOPS (arg0);
9776 STRIP_SIGN_NOPS (arg1);
9778 else
9780 STRIP_NOPS (arg0);
9781 STRIP_NOPS (arg1);
9784 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9785 constant but we can't do arithmetic on them. */
9786 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9787 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9788 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9789 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9790 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9791 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9793 if (kind == tcc_binary)
9795 /* Make sure type and arg0 have the same saturating flag. */
9796 gcc_assert (TYPE_SATURATING (type)
9797 == TYPE_SATURATING (TREE_TYPE (arg0)));
9798 tem = const_binop (code, arg0, arg1);
9800 else if (kind == tcc_comparison)
9801 tem = fold_relational_const (code, type, arg0, arg1);
9802 else
9803 tem = NULL_TREE;
9805 if (tem != NULL_TREE)
9807 if (TREE_TYPE (tem) != type)
9808 tem = fold_convert_loc (loc, type, tem);
9809 return tem;
9813 /* If this is a commutative operation, and ARG0 is a constant, move it
9814 to ARG1 to reduce the number of tests below. */
9815 if (commutative_tree_code (code)
9816 && tree_swap_operands_p (arg0, arg1, true))
9817 return fold_build2_loc (loc, code, type, op1, op0);
9819 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9821 First check for cases where an arithmetic operation is applied to a
9822 compound, conditional, or comparison operation. Push the arithmetic
9823 operation inside the compound or conditional to see if any folding
9824 can then be done. Convert comparison to conditional for this purpose.
9825 The also optimizes non-constant cases that used to be done in
9826 expand_expr.
9828 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9829 one of the operands is a comparison and the other is a comparison, a
9830 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9831 code below would make the expression more complex. Change it to a
9832 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9833 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9835 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9836 || code == EQ_EXPR || code == NE_EXPR)
9837 && TREE_CODE (type) != VECTOR_TYPE
9838 && ((truth_value_p (TREE_CODE (arg0))
9839 && (truth_value_p (TREE_CODE (arg1))
9840 || (TREE_CODE (arg1) == BIT_AND_EXPR
9841 && integer_onep (TREE_OPERAND (arg1, 1)))))
9842 || (truth_value_p (TREE_CODE (arg1))
9843 && (truth_value_p (TREE_CODE (arg0))
9844 || (TREE_CODE (arg0) == BIT_AND_EXPR
9845 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9847 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9848 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9849 : TRUTH_XOR_EXPR,
9850 boolean_type_node,
9851 fold_convert_loc (loc, boolean_type_node, arg0),
9852 fold_convert_loc (loc, boolean_type_node, arg1));
9854 if (code == EQ_EXPR)
9855 tem = invert_truthvalue_loc (loc, tem);
9857 return fold_convert_loc (loc, type, tem);
9860 if (TREE_CODE_CLASS (code) == tcc_binary
9861 || TREE_CODE_CLASS (code) == tcc_comparison)
9863 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9865 tem = fold_build2_loc (loc, code, type,
9866 fold_convert_loc (loc, TREE_TYPE (op0),
9867 TREE_OPERAND (arg0, 1)), op1);
9868 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9869 tem);
9871 if (TREE_CODE (arg1) == COMPOUND_EXPR
9872 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9874 tem = fold_build2_loc (loc, code, type, op0,
9875 fold_convert_loc (loc, TREE_TYPE (op1),
9876 TREE_OPERAND (arg1, 1)));
9877 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9878 tem);
9881 if (TREE_CODE (arg0) == COND_EXPR
9882 || TREE_CODE (arg0) == VEC_COND_EXPR
9883 || COMPARISON_CLASS_P (arg0))
9885 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9886 arg0, arg1,
9887 /*cond_first_p=*/1);
9888 if (tem != NULL_TREE)
9889 return tem;
9892 if (TREE_CODE (arg1) == COND_EXPR
9893 || TREE_CODE (arg1) == VEC_COND_EXPR
9894 || COMPARISON_CLASS_P (arg1))
9896 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9897 arg1, arg0,
9898 /*cond_first_p=*/0);
9899 if (tem != NULL_TREE)
9900 return tem;
9904 switch (code)
9906 case MEM_REF:
9907 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9908 if (TREE_CODE (arg0) == ADDR_EXPR
9909 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9911 tree iref = TREE_OPERAND (arg0, 0);
9912 return fold_build2 (MEM_REF, type,
9913 TREE_OPERAND (iref, 0),
9914 int_const_binop (PLUS_EXPR, arg1,
9915 TREE_OPERAND (iref, 1)));
9918 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9919 if (TREE_CODE (arg0) == ADDR_EXPR
9920 && handled_component_p (TREE_OPERAND (arg0, 0)))
9922 tree base;
9923 HOST_WIDE_INT coffset;
9924 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9925 &coffset);
9926 if (!base)
9927 return NULL_TREE;
9928 return fold_build2 (MEM_REF, type,
9929 build_fold_addr_expr (base),
9930 int_const_binop (PLUS_EXPR, arg1,
9931 size_int (coffset)));
9934 return NULL_TREE;
9936 case POINTER_PLUS_EXPR:
9937 /* 0 +p index -> (type)index */
9938 if (integer_zerop (arg0))
9939 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9941 /* PTR +p 0 -> PTR */
9942 if (integer_zerop (arg1))
9943 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9945 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9946 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9947 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9948 return fold_convert_loc (loc, type,
9949 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9950 fold_convert_loc (loc, sizetype,
9951 arg1),
9952 fold_convert_loc (loc, sizetype,
9953 arg0)));
9955 /* (PTR +p B) +p A -> PTR +p (B + A) */
9956 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9958 tree inner;
9959 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9960 tree arg00 = TREE_OPERAND (arg0, 0);
9961 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9962 arg01, fold_convert_loc (loc, sizetype, arg1));
9963 return fold_convert_loc (loc, type,
9964 fold_build_pointer_plus_loc (loc,
9965 arg00, inner));
9968 /* PTR_CST +p CST -> CST1 */
9969 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9970 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9971 fold_convert_loc (loc, type, arg1));
9973 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9974 of the array. Loop optimizer sometimes produce this type of
9975 expressions. */
9976 if (TREE_CODE (arg0) == ADDR_EXPR)
9978 tem = try_move_mult_to_index (loc, arg0,
9979 fold_convert_loc (loc,
9980 ssizetype, arg1));
9981 if (tem)
9982 return fold_convert_loc (loc, type, tem);
9985 return NULL_TREE;
9987 case PLUS_EXPR:
9988 /* A + (-B) -> A - B */
9989 if (TREE_CODE (arg1) == NEGATE_EXPR)
9990 return fold_build2_loc (loc, MINUS_EXPR, type,
9991 fold_convert_loc (loc, type, arg0),
9992 fold_convert_loc (loc, type,
9993 TREE_OPERAND (arg1, 0)));
9994 /* (-A) + B -> B - A */
9995 if (TREE_CODE (arg0) == NEGATE_EXPR
9996 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9997 return fold_build2_loc (loc, MINUS_EXPR, type,
9998 fold_convert_loc (loc, type, arg1),
9999 fold_convert_loc (loc, type,
10000 TREE_OPERAND (arg0, 0)));
10002 if (INTEGRAL_TYPE_P (type))
10004 /* Convert ~A + 1 to -A. */
10005 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10006 && integer_onep (arg1))
10007 return fold_build1_loc (loc, NEGATE_EXPR, type,
10008 fold_convert_loc (loc, type,
10009 TREE_OPERAND (arg0, 0)));
10011 /* ~X + X is -1. */
10012 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10013 && !TYPE_OVERFLOW_TRAPS (type))
10015 tree tem = TREE_OPERAND (arg0, 0);
10017 STRIP_NOPS (tem);
10018 if (operand_equal_p (tem, arg1, 0))
10020 t1 = build_int_cst_type (type, -1);
10021 return omit_one_operand_loc (loc, type, t1, arg1);
10025 /* X + ~X is -1. */
10026 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10027 && !TYPE_OVERFLOW_TRAPS (type))
10029 tree tem = TREE_OPERAND (arg1, 0);
10031 STRIP_NOPS (tem);
10032 if (operand_equal_p (arg0, tem, 0))
10034 t1 = build_int_cst_type (type, -1);
10035 return omit_one_operand_loc (loc, type, t1, arg0);
10039 /* X + (X / CST) * -CST is X % CST. */
10040 if (TREE_CODE (arg1) == MULT_EXPR
10041 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10042 && operand_equal_p (arg0,
10043 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10045 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10046 tree cst1 = TREE_OPERAND (arg1, 1);
10047 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10048 cst1, cst0);
10049 if (sum && integer_zerop (sum))
10050 return fold_convert_loc (loc, type,
10051 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10052 TREE_TYPE (arg0), arg0,
10053 cst0));
10057 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10058 one. Make sure the type is not saturating and has the signedness of
10059 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10060 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10061 if ((TREE_CODE (arg0) == MULT_EXPR
10062 || TREE_CODE (arg1) == MULT_EXPR)
10063 && !TYPE_SATURATING (type)
10064 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10065 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10066 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10068 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10069 if (tem)
10070 return tem;
10073 if (! FLOAT_TYPE_P (type))
10075 if (integer_zerop (arg1))
10076 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10078 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10079 with a constant, and the two constants have no bits in common,
10080 we should treat this as a BIT_IOR_EXPR since this may produce more
10081 simplifications. */
10082 if (TREE_CODE (arg0) == BIT_AND_EXPR
10083 && TREE_CODE (arg1) == BIT_AND_EXPR
10084 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10085 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10086 && integer_zerop (const_binop (BIT_AND_EXPR,
10087 TREE_OPERAND (arg0, 1),
10088 TREE_OPERAND (arg1, 1))))
10090 code = BIT_IOR_EXPR;
10091 goto bit_ior;
10094 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10095 (plus (plus (mult) (mult)) (foo)) so that we can
10096 take advantage of the factoring cases below. */
10097 if (TYPE_OVERFLOW_WRAPS (type)
10098 && (((TREE_CODE (arg0) == PLUS_EXPR
10099 || TREE_CODE (arg0) == MINUS_EXPR)
10100 && TREE_CODE (arg1) == MULT_EXPR)
10101 || ((TREE_CODE (arg1) == PLUS_EXPR
10102 || TREE_CODE (arg1) == MINUS_EXPR)
10103 && TREE_CODE (arg0) == MULT_EXPR)))
10105 tree parg0, parg1, parg, marg;
10106 enum tree_code pcode;
10108 if (TREE_CODE (arg1) == MULT_EXPR)
10109 parg = arg0, marg = arg1;
10110 else
10111 parg = arg1, marg = arg0;
10112 pcode = TREE_CODE (parg);
10113 parg0 = TREE_OPERAND (parg, 0);
10114 parg1 = TREE_OPERAND (parg, 1);
10115 STRIP_NOPS (parg0);
10116 STRIP_NOPS (parg1);
10118 if (TREE_CODE (parg0) == MULT_EXPR
10119 && TREE_CODE (parg1) != MULT_EXPR)
10120 return fold_build2_loc (loc, pcode, type,
10121 fold_build2_loc (loc, PLUS_EXPR, type,
10122 fold_convert_loc (loc, type,
10123 parg0),
10124 fold_convert_loc (loc, type,
10125 marg)),
10126 fold_convert_loc (loc, type, parg1));
10127 if (TREE_CODE (parg0) != MULT_EXPR
10128 && TREE_CODE (parg1) == MULT_EXPR)
10129 return
10130 fold_build2_loc (loc, PLUS_EXPR, type,
10131 fold_convert_loc (loc, type, parg0),
10132 fold_build2_loc (loc, pcode, type,
10133 fold_convert_loc (loc, type, marg),
10134 fold_convert_loc (loc, type,
10135 parg1)));
10138 else
10140 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10141 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10142 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10144 /* Likewise if the operands are reversed. */
10145 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10146 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10148 /* Convert X + -C into X - C. */
10149 if (TREE_CODE (arg1) == REAL_CST
10150 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10152 tem = fold_negate_const (arg1, type);
10153 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10154 return fold_build2_loc (loc, MINUS_EXPR, type,
10155 fold_convert_loc (loc, type, arg0),
10156 fold_convert_loc (loc, type, tem));
10159 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10160 to __complex__ ( x, y ). This is not the same for SNaNs or
10161 if signed zeros are involved. */
10162 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10164 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10166 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10167 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10168 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10169 bool arg0rz = false, arg0iz = false;
10170 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10171 || (arg0i && (arg0iz = real_zerop (arg0i))))
10173 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10174 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10175 if (arg0rz && arg1i && real_zerop (arg1i))
10177 tree rp = arg1r ? arg1r
10178 : build1 (REALPART_EXPR, rtype, arg1);
10179 tree ip = arg0i ? arg0i
10180 : build1 (IMAGPART_EXPR, rtype, arg0);
10181 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10183 else if (arg0iz && arg1r && real_zerop (arg1r))
10185 tree rp = arg0r ? arg0r
10186 : build1 (REALPART_EXPR, rtype, arg0);
10187 tree ip = arg1i ? arg1i
10188 : build1 (IMAGPART_EXPR, rtype, arg1);
10189 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10194 if (flag_unsafe_math_optimizations
10195 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10196 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10197 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10198 return tem;
10200 /* Convert x+x into x*2.0. */
10201 if (operand_equal_p (arg0, arg1, 0)
10202 && SCALAR_FLOAT_TYPE_P (type))
10203 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10204 build_real (type, dconst2));
10206 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10207 We associate floats only if the user has specified
10208 -fassociative-math. */
10209 if (flag_associative_math
10210 && TREE_CODE (arg1) == PLUS_EXPR
10211 && TREE_CODE (arg0) != MULT_EXPR)
10213 tree tree10 = TREE_OPERAND (arg1, 0);
10214 tree tree11 = TREE_OPERAND (arg1, 1);
10215 if (TREE_CODE (tree11) == MULT_EXPR
10216 && TREE_CODE (tree10) == MULT_EXPR)
10218 tree tree0;
10219 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10220 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10223 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10224 We associate floats only if the user has specified
10225 -fassociative-math. */
10226 if (flag_associative_math
10227 && TREE_CODE (arg0) == PLUS_EXPR
10228 && TREE_CODE (arg1) != MULT_EXPR)
10230 tree tree00 = TREE_OPERAND (arg0, 0);
10231 tree tree01 = TREE_OPERAND (arg0, 1);
10232 if (TREE_CODE (tree01) == MULT_EXPR
10233 && TREE_CODE (tree00) == MULT_EXPR)
10235 tree tree0;
10236 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10237 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10242 bit_rotate:
10243 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10244 is a rotate of A by C1 bits. */
10245 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10246 is a rotate of A by B bits. */
10248 enum tree_code code0, code1;
10249 tree rtype;
10250 code0 = TREE_CODE (arg0);
10251 code1 = TREE_CODE (arg1);
10252 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10253 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10254 && operand_equal_p (TREE_OPERAND (arg0, 0),
10255 TREE_OPERAND (arg1, 0), 0)
10256 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10257 TYPE_UNSIGNED (rtype))
10258 /* Only create rotates in complete modes. Other cases are not
10259 expanded properly. */
10260 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10262 tree tree01, tree11;
10263 enum tree_code code01, code11;
10265 tree01 = TREE_OPERAND (arg0, 1);
10266 tree11 = TREE_OPERAND (arg1, 1);
10267 STRIP_NOPS (tree01);
10268 STRIP_NOPS (tree11);
10269 code01 = TREE_CODE (tree01);
10270 code11 = TREE_CODE (tree11);
10271 if (code01 == INTEGER_CST
10272 && code11 == INTEGER_CST
10273 && TREE_INT_CST_HIGH (tree01) == 0
10274 && TREE_INT_CST_HIGH (tree11) == 0
10275 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10276 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10278 tem = build2_loc (loc, LROTATE_EXPR,
10279 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10280 TREE_OPERAND (arg0, 0),
10281 code0 == LSHIFT_EXPR ? tree01 : tree11);
10282 return fold_convert_loc (loc, type, tem);
10284 else if (code11 == MINUS_EXPR)
10286 tree tree110, tree111;
10287 tree110 = TREE_OPERAND (tree11, 0);
10288 tree111 = TREE_OPERAND (tree11, 1);
10289 STRIP_NOPS (tree110);
10290 STRIP_NOPS (tree111);
10291 if (TREE_CODE (tree110) == INTEGER_CST
10292 && 0 == compare_tree_int (tree110,
10293 TYPE_PRECISION
10294 (TREE_TYPE (TREE_OPERAND
10295 (arg0, 0))))
10296 && operand_equal_p (tree01, tree111, 0))
10297 return
10298 fold_convert_loc (loc, type,
10299 build2 ((code0 == LSHIFT_EXPR
10300 ? LROTATE_EXPR
10301 : RROTATE_EXPR),
10302 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10303 TREE_OPERAND (arg0, 0), tree01));
10305 else if (code01 == MINUS_EXPR)
10307 tree tree010, tree011;
10308 tree010 = TREE_OPERAND (tree01, 0);
10309 tree011 = TREE_OPERAND (tree01, 1);
10310 STRIP_NOPS (tree010);
10311 STRIP_NOPS (tree011);
10312 if (TREE_CODE (tree010) == INTEGER_CST
10313 && 0 == compare_tree_int (tree010,
10314 TYPE_PRECISION
10315 (TREE_TYPE (TREE_OPERAND
10316 (arg0, 0))))
10317 && operand_equal_p (tree11, tree011, 0))
10318 return fold_convert_loc
10319 (loc, type,
10320 build2 ((code0 != LSHIFT_EXPR
10321 ? LROTATE_EXPR
10322 : RROTATE_EXPR),
10323 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10324 TREE_OPERAND (arg0, 0), tree11));
10329 associate:
10330 /* In most languages, can't associate operations on floats through
10331 parentheses. Rather than remember where the parentheses were, we
10332 don't associate floats at all, unless the user has specified
10333 -fassociative-math.
10334 And, we need to make sure type is not saturating. */
10336 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10337 && !TYPE_SATURATING (type))
10339 tree var0, con0, lit0, minus_lit0;
10340 tree var1, con1, lit1, minus_lit1;
10341 bool ok = true;
10343 /* Split both trees into variables, constants, and literals. Then
10344 associate each group together, the constants with literals,
10345 then the result with variables. This increases the chances of
10346 literals being recombined later and of generating relocatable
10347 expressions for the sum of a constant and literal. */
10348 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10349 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10350 code == MINUS_EXPR);
10352 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10353 if (code == MINUS_EXPR)
10354 code = PLUS_EXPR;
10356 /* With undefined overflow we can only associate constants with one
10357 variable, and constants whose association doesn't overflow. */
10358 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10359 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10361 if (var0 && var1)
10363 tree tmp0 = var0;
10364 tree tmp1 = var1;
10366 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10367 tmp0 = TREE_OPERAND (tmp0, 0);
10368 if (CONVERT_EXPR_P (tmp0)
10369 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10371 <= TYPE_PRECISION (type)))
10372 tmp0 = TREE_OPERAND (tmp0, 0);
10373 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10374 tmp1 = TREE_OPERAND (tmp1, 0);
10375 if (CONVERT_EXPR_P (tmp1)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10378 <= TYPE_PRECISION (type)))
10379 tmp1 = TREE_OPERAND (tmp1, 0);
10380 /* The only case we can still associate with two variables
10381 is if they are the same, modulo negation and bit-pattern
10382 preserving conversions. */
10383 if (!operand_equal_p (tmp0, tmp1, 0))
10384 ok = false;
10387 if (ok && lit0 && lit1)
10389 tree tmp0 = fold_convert (type, lit0);
10390 tree tmp1 = fold_convert (type, lit1);
10392 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10393 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10394 ok = false;
10398 /* Only do something if we found more than two objects. Otherwise,
10399 nothing has changed and we risk infinite recursion. */
10400 if (ok
10401 && (2 < ((var0 != 0) + (var1 != 0)
10402 + (con0 != 0) + (con1 != 0)
10403 + (lit0 != 0) + (lit1 != 0)
10404 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10406 var0 = associate_trees (loc, var0, var1, code, type);
10407 con0 = associate_trees (loc, con0, con1, code, type);
10408 lit0 = associate_trees (loc, lit0, lit1, code, type);
10409 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10411 /* Preserve the MINUS_EXPR if the negative part of the literal is
10412 greater than the positive part. Otherwise, the multiplicative
10413 folding code (i.e extract_muldiv) may be fooled in case
10414 unsigned constants are subtracted, like in the following
10415 example: ((X*2 + 4) - 8U)/2. */
10416 if (minus_lit0 && lit0)
10418 if (TREE_CODE (lit0) == INTEGER_CST
10419 && TREE_CODE (minus_lit0) == INTEGER_CST
10420 && tree_int_cst_lt (lit0, minus_lit0))
10422 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10423 MINUS_EXPR, type);
10424 lit0 = 0;
10426 else
10428 lit0 = associate_trees (loc, lit0, minus_lit0,
10429 MINUS_EXPR, type);
10430 minus_lit0 = 0;
10433 if (minus_lit0)
10435 if (con0 == 0)
10436 return
10437 fold_convert_loc (loc, type,
10438 associate_trees (loc, var0, minus_lit0,
10439 MINUS_EXPR, type));
10440 else
10442 con0 = associate_trees (loc, con0, minus_lit0,
10443 MINUS_EXPR, type);
10444 return
10445 fold_convert_loc (loc, type,
10446 associate_trees (loc, var0, con0,
10447 PLUS_EXPR, type));
10451 con0 = associate_trees (loc, con0, lit0, code, type);
10452 return
10453 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10454 code, type));
10458 return NULL_TREE;
10460 case MINUS_EXPR:
10461 /* Pointer simplifications for subtraction, simple reassociations. */
10462 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10464 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10465 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10466 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10468 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10469 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10470 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10471 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10472 return fold_build2_loc (loc, PLUS_EXPR, type,
10473 fold_build2_loc (loc, MINUS_EXPR, type,
10474 arg00, arg10),
10475 fold_build2_loc (loc, MINUS_EXPR, type,
10476 arg01, arg11));
10478 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10479 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10481 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10482 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10483 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10484 fold_convert_loc (loc, type, arg1));
10485 if (tmp)
10486 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10489 /* A - (-B) -> A + B */
10490 if (TREE_CODE (arg1) == NEGATE_EXPR)
10491 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10492 fold_convert_loc (loc, type,
10493 TREE_OPERAND (arg1, 0)));
10494 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10495 if (TREE_CODE (arg0) == NEGATE_EXPR
10496 && (FLOAT_TYPE_P (type)
10497 || INTEGRAL_TYPE_P (type))
10498 && negate_expr_p (arg1)
10499 && reorder_operands_p (arg0, arg1))
10500 return fold_build2_loc (loc, MINUS_EXPR, type,
10501 fold_convert_loc (loc, type,
10502 negate_expr (arg1)),
10503 fold_convert_loc (loc, type,
10504 TREE_OPERAND (arg0, 0)));
10505 /* Convert -A - 1 to ~A. */
10506 if (INTEGRAL_TYPE_P (type)
10507 && TREE_CODE (arg0) == NEGATE_EXPR
10508 && integer_onep (arg1)
10509 && !TYPE_OVERFLOW_TRAPS (type))
10510 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10511 fold_convert_loc (loc, type,
10512 TREE_OPERAND (arg0, 0)));
10514 /* Convert -1 - A to ~A. */
10515 if (INTEGRAL_TYPE_P (type)
10516 && integer_all_onesp (arg0))
10517 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10520 /* X - (X / CST) * CST is X % CST. */
10521 if (INTEGRAL_TYPE_P (type)
10522 && TREE_CODE (arg1) == MULT_EXPR
10523 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10524 && operand_equal_p (arg0,
10525 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10526 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10527 TREE_OPERAND (arg1, 1), 0))
10528 return
10529 fold_convert_loc (loc, type,
10530 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10531 arg0, TREE_OPERAND (arg1, 1)));
10533 if (! FLOAT_TYPE_P (type))
10535 if (integer_zerop (arg0))
10536 return negate_expr (fold_convert_loc (loc, type, arg1));
10537 if (integer_zerop (arg1))
10538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10540 /* Fold A - (A & B) into ~B & A. */
10541 if (!TREE_SIDE_EFFECTS (arg0)
10542 && TREE_CODE (arg1) == BIT_AND_EXPR)
10544 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10546 tree arg10 = fold_convert_loc (loc, type,
10547 TREE_OPERAND (arg1, 0));
10548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10549 fold_build1_loc (loc, BIT_NOT_EXPR,
10550 type, arg10),
10551 fold_convert_loc (loc, type, arg0));
10553 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10555 tree arg11 = fold_convert_loc (loc,
10556 type, TREE_OPERAND (arg1, 1));
10557 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10558 fold_build1_loc (loc, BIT_NOT_EXPR,
10559 type, arg11),
10560 fold_convert_loc (loc, type, arg0));
10564 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10565 any power of 2 minus 1. */
10566 if (TREE_CODE (arg0) == BIT_AND_EXPR
10567 && TREE_CODE (arg1) == BIT_AND_EXPR
10568 && operand_equal_p (TREE_OPERAND (arg0, 0),
10569 TREE_OPERAND (arg1, 0), 0))
10571 tree mask0 = TREE_OPERAND (arg0, 1);
10572 tree mask1 = TREE_OPERAND (arg1, 1);
10573 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10575 if (operand_equal_p (tem, mask1, 0))
10577 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10578 TREE_OPERAND (arg0, 0), mask1);
10579 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10584 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10585 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10586 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10588 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10589 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10590 (-ARG1 + ARG0) reduces to -ARG1. */
10591 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10592 return negate_expr (fold_convert_loc (loc, type, arg1));
10594 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10595 __complex__ ( x, -y ). This is not the same for SNaNs or if
10596 signed zeros are involved. */
10597 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10599 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10601 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10602 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10603 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10604 bool arg0rz = false, arg0iz = false;
10605 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10606 || (arg0i && (arg0iz = real_zerop (arg0i))))
10608 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10609 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10610 if (arg0rz && arg1i && real_zerop (arg1i))
10612 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10613 arg1r ? arg1r
10614 : build1 (REALPART_EXPR, rtype, arg1));
10615 tree ip = arg0i ? arg0i
10616 : build1 (IMAGPART_EXPR, rtype, arg0);
10617 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10619 else if (arg0iz && arg1r && real_zerop (arg1r))
10621 tree rp = arg0r ? arg0r
10622 : build1 (REALPART_EXPR, rtype, arg0);
10623 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10624 arg1i ? arg1i
10625 : build1 (IMAGPART_EXPR, rtype, arg1));
10626 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10631 /* Fold &x - &x. This can happen from &x.foo - &x.
10632 This is unsafe for certain floats even in non-IEEE formats.
10633 In IEEE, it is unsafe because it does wrong for NaNs.
10634 Also note that operand_equal_p is always false if an operand
10635 is volatile. */
10637 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10638 && operand_equal_p (arg0, arg1, 0))
10639 return build_zero_cst (type);
10641 /* A - B -> A + (-B) if B is easily negatable. */
10642 if (negate_expr_p (arg1)
10643 && ((FLOAT_TYPE_P (type)
10644 /* Avoid this transformation if B is a positive REAL_CST. */
10645 && (TREE_CODE (arg1) != REAL_CST
10646 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10647 || INTEGRAL_TYPE_P (type)))
10648 return fold_build2_loc (loc, PLUS_EXPR, type,
10649 fold_convert_loc (loc, type, arg0),
10650 fold_convert_loc (loc, type,
10651 negate_expr (arg1)));
10653 /* Try folding difference of addresses. */
10655 HOST_WIDE_INT diff;
10657 if ((TREE_CODE (arg0) == ADDR_EXPR
10658 || TREE_CODE (arg1) == ADDR_EXPR)
10659 && ptr_difference_const (arg0, arg1, &diff))
10660 return build_int_cst_type (type, diff);
10663 /* Fold &a[i] - &a[j] to i-j. */
10664 if (TREE_CODE (arg0) == ADDR_EXPR
10665 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10666 && TREE_CODE (arg1) == ADDR_EXPR
10667 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10669 tree tem = fold_addr_of_array_ref_difference (loc, type,
10670 TREE_OPERAND (arg0, 0),
10671 TREE_OPERAND (arg1, 0));
10672 if (tem)
10673 return tem;
10676 if (FLOAT_TYPE_P (type)
10677 && flag_unsafe_math_optimizations
10678 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10679 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10680 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10681 return tem;
10683 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10684 one. Make sure the type is not saturating and has the signedness of
10685 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10686 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10687 if ((TREE_CODE (arg0) == MULT_EXPR
10688 || TREE_CODE (arg1) == MULT_EXPR)
10689 && !TYPE_SATURATING (type)
10690 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10691 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10692 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10694 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10695 if (tem)
10696 return tem;
10699 goto associate;
10701 case MULT_EXPR:
10702 /* (-A) * (-B) -> A * B */
10703 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10704 return fold_build2_loc (loc, MULT_EXPR, type,
10705 fold_convert_loc (loc, type,
10706 TREE_OPERAND (arg0, 0)),
10707 fold_convert_loc (loc, type,
10708 negate_expr (arg1)));
10709 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10710 return fold_build2_loc (loc, MULT_EXPR, type,
10711 fold_convert_loc (loc, type,
10712 negate_expr (arg0)),
10713 fold_convert_loc (loc, type,
10714 TREE_OPERAND (arg1, 0)));
10716 if (! FLOAT_TYPE_P (type))
10718 if (integer_zerop (arg1))
10719 return omit_one_operand_loc (loc, type, arg1, arg0);
10720 if (integer_onep (arg1))
10721 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10722 /* Transform x * -1 into -x. Make sure to do the negation
10723 on the original operand with conversions not stripped
10724 because we can only strip non-sign-changing conversions. */
10725 if (integer_all_onesp (arg1))
10726 return fold_convert_loc (loc, type, negate_expr (op0));
10727 /* Transform x * -C into -x * C if x is easily negatable. */
10728 if (TREE_CODE (arg1) == INTEGER_CST
10729 && tree_int_cst_sgn (arg1) == -1
10730 && negate_expr_p (arg0)
10731 && (tem = negate_expr (arg1)) != arg1
10732 && !TREE_OVERFLOW (tem))
10733 return fold_build2_loc (loc, MULT_EXPR, type,
10734 fold_convert_loc (loc, type,
10735 negate_expr (arg0)),
10736 tem);
10738 /* (a * (1 << b)) is (a << b) */
10739 if (TREE_CODE (arg1) == LSHIFT_EXPR
10740 && integer_onep (TREE_OPERAND (arg1, 0)))
10741 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10742 TREE_OPERAND (arg1, 1));
10743 if (TREE_CODE (arg0) == LSHIFT_EXPR
10744 && integer_onep (TREE_OPERAND (arg0, 0)))
10745 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10746 TREE_OPERAND (arg0, 1));
10748 /* (A + A) * C -> A * 2 * C */
10749 if (TREE_CODE (arg0) == PLUS_EXPR
10750 && TREE_CODE (arg1) == INTEGER_CST
10751 && operand_equal_p (TREE_OPERAND (arg0, 0),
10752 TREE_OPERAND (arg0, 1), 0))
10753 return fold_build2_loc (loc, MULT_EXPR, type,
10754 omit_one_operand_loc (loc, type,
10755 TREE_OPERAND (arg0, 0),
10756 TREE_OPERAND (arg0, 1)),
10757 fold_build2_loc (loc, MULT_EXPR, type,
10758 build_int_cst (type, 2) , arg1));
10760 strict_overflow_p = false;
10761 if (TREE_CODE (arg1) == INTEGER_CST
10762 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10763 &strict_overflow_p)))
10765 if (strict_overflow_p)
10766 fold_overflow_warning (("assuming signed overflow does not "
10767 "occur when simplifying "
10768 "multiplication"),
10769 WARN_STRICT_OVERFLOW_MISC);
10770 return fold_convert_loc (loc, type, tem);
10773 /* Optimize z * conj(z) for integer complex numbers. */
10774 if (TREE_CODE (arg0) == CONJ_EXPR
10775 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10776 return fold_mult_zconjz (loc, type, arg1);
10777 if (TREE_CODE (arg1) == CONJ_EXPR
10778 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10779 return fold_mult_zconjz (loc, type, arg0);
10781 else
10783 /* Maybe fold x * 0 to 0. The expressions aren't the same
10784 when x is NaN, since x * 0 is also NaN. Nor are they the
10785 same in modes with signed zeros, since multiplying a
10786 negative value by 0 gives -0, not +0. */
10787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10788 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10789 && real_zerop (arg1))
10790 return omit_one_operand_loc (loc, type, arg1, arg0);
10791 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10792 Likewise for complex arithmetic with signed zeros. */
10793 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10794 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10795 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10796 && real_onep (arg1))
10797 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10799 /* Transform x * -1.0 into -x. */
10800 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10801 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10802 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10803 && real_minus_onep (arg1))
10804 return fold_convert_loc (loc, type, negate_expr (arg0));
10806 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10807 the result for floating point types due to rounding so it is applied
10808 only if -fassociative-math was specify. */
10809 if (flag_associative_math
10810 && TREE_CODE (arg0) == RDIV_EXPR
10811 && TREE_CODE (arg1) == REAL_CST
10812 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10814 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10815 arg1);
10816 if (tem)
10817 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10818 TREE_OPERAND (arg0, 1));
10821 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10822 if (operand_equal_p (arg0, arg1, 0))
10824 tree tem = fold_strip_sign_ops (arg0);
10825 if (tem != NULL_TREE)
10827 tem = fold_convert_loc (loc, type, tem);
10828 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10832 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10833 This is not the same for NaNs or if signed zeros are
10834 involved. */
10835 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10836 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10837 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10838 && TREE_CODE (arg1) == COMPLEX_CST
10839 && real_zerop (TREE_REALPART (arg1)))
10841 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10842 if (real_onep (TREE_IMAGPART (arg1)))
10843 return
10844 fold_build2_loc (loc, COMPLEX_EXPR, type,
10845 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10846 rtype, arg0)),
10847 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10848 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10849 return
10850 fold_build2_loc (loc, COMPLEX_EXPR, type,
10851 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10852 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10853 rtype, arg0)));
10856 /* Optimize z * conj(z) for floating point complex numbers.
10857 Guarded by flag_unsafe_math_optimizations as non-finite
10858 imaginary components don't produce scalar results. */
10859 if (flag_unsafe_math_optimizations
10860 && TREE_CODE (arg0) == CONJ_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10862 return fold_mult_zconjz (loc, type, arg1);
10863 if (flag_unsafe_math_optimizations
10864 && TREE_CODE (arg1) == CONJ_EXPR
10865 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10866 return fold_mult_zconjz (loc, type, arg0);
10868 if (flag_unsafe_math_optimizations)
10870 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10871 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10873 /* Optimizations of root(...)*root(...). */
10874 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10876 tree rootfn, arg;
10877 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10878 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10880 /* Optimize sqrt(x)*sqrt(x) as x. */
10881 if (BUILTIN_SQRT_P (fcode0)
10882 && operand_equal_p (arg00, arg10, 0)
10883 && ! HONOR_SNANS (TYPE_MODE (type)))
10884 return arg00;
10886 /* Optimize root(x)*root(y) as root(x*y). */
10887 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10888 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10889 return build_call_expr_loc (loc, rootfn, 1, arg);
10892 /* Optimize expN(x)*expN(y) as expN(x+y). */
10893 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10895 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10896 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10897 CALL_EXPR_ARG (arg0, 0),
10898 CALL_EXPR_ARG (arg1, 0));
10899 return build_call_expr_loc (loc, expfn, 1, arg);
10902 /* Optimizations of pow(...)*pow(...). */
10903 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10904 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10905 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10907 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10908 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10909 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10910 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10912 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10913 if (operand_equal_p (arg01, arg11, 0))
10915 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10916 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10917 arg00, arg10);
10918 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10921 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10922 if (operand_equal_p (arg00, arg10, 0))
10924 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10925 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10926 arg01, arg11);
10927 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10931 /* Optimize tan(x)*cos(x) as sin(x). */
10932 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10933 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10934 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10935 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10936 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10937 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10938 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10939 CALL_EXPR_ARG (arg1, 0), 0))
10941 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10943 if (sinfn != NULL_TREE)
10944 return build_call_expr_loc (loc, sinfn, 1,
10945 CALL_EXPR_ARG (arg0, 0));
10948 /* Optimize x*pow(x,c) as pow(x,c+1). */
10949 if (fcode1 == BUILT_IN_POW
10950 || fcode1 == BUILT_IN_POWF
10951 || fcode1 == BUILT_IN_POWL)
10953 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10954 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10955 if (TREE_CODE (arg11) == REAL_CST
10956 && !TREE_OVERFLOW (arg11)
10957 && operand_equal_p (arg0, arg10, 0))
10959 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10960 REAL_VALUE_TYPE c;
10961 tree arg;
10963 c = TREE_REAL_CST (arg11);
10964 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10965 arg = build_real (type, c);
10966 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10970 /* Optimize pow(x,c)*x as pow(x,c+1). */
10971 if (fcode0 == BUILT_IN_POW
10972 || fcode0 == BUILT_IN_POWF
10973 || fcode0 == BUILT_IN_POWL)
10975 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10976 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10977 if (TREE_CODE (arg01) == REAL_CST
10978 && !TREE_OVERFLOW (arg01)
10979 && operand_equal_p (arg1, arg00, 0))
10981 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10982 REAL_VALUE_TYPE c;
10983 tree arg;
10985 c = TREE_REAL_CST (arg01);
10986 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10987 arg = build_real (type, c);
10988 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10992 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10993 if (!in_gimple_form
10994 && optimize
10995 && operand_equal_p (arg0, arg1, 0))
10997 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10999 if (powfn)
11001 tree arg = build_real (type, dconst2);
11002 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11007 goto associate;
11009 case BIT_IOR_EXPR:
11010 bit_ior:
11011 if (integer_all_onesp (arg1))
11012 return omit_one_operand_loc (loc, type, arg1, arg0);
11013 if (integer_zerop (arg1))
11014 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11015 if (operand_equal_p (arg0, arg1, 0))
11016 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11018 /* ~X | X is -1. */
11019 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11022 t1 = build_zero_cst (type);
11023 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11024 return omit_one_operand_loc (loc, type, t1, arg1);
11027 /* X | ~X is -1. */
11028 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11029 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11031 t1 = build_zero_cst (type);
11032 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11033 return omit_one_operand_loc (loc, type, t1, arg0);
11036 /* Canonicalize (X & C1) | C2. */
11037 if (TREE_CODE (arg0) == BIT_AND_EXPR
11038 && TREE_CODE (arg1) == INTEGER_CST
11039 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11041 double_int c1, c2, c3, msk;
11042 int width = TYPE_PRECISION (type), w;
11043 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11044 c2 = tree_to_double_int (arg1);
11046 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11047 if ((c1 & c2) == c1)
11048 return omit_one_operand_loc (loc, type, arg1,
11049 TREE_OPERAND (arg0, 0));
11051 msk = double_int::mask (width);
11053 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11054 if (msk.and_not (c1 | c2).is_zero ())
11055 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11056 TREE_OPERAND (arg0, 0), arg1);
11058 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11059 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11060 mode which allows further optimizations. */
11061 c1 &= msk;
11062 c2 &= msk;
11063 c3 = c1.and_not (c2);
11064 for (w = BITS_PER_UNIT;
11065 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11066 w <<= 1)
11068 unsigned HOST_WIDE_INT mask
11069 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11070 if (((c1.low | c2.low) & mask) == mask
11071 && (c1.low & ~mask) == 0 && c1.high == 0)
11073 c3 = double_int::from_uhwi (mask);
11074 break;
11077 if (c3 != c1)
11078 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11079 fold_build2_loc (loc, BIT_AND_EXPR, type,
11080 TREE_OPERAND (arg0, 0),
11081 double_int_to_tree (type,
11082 c3)),
11083 arg1);
11086 /* (X & Y) | Y is (X, Y). */
11087 if (TREE_CODE (arg0) == BIT_AND_EXPR
11088 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11089 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11090 /* (X & Y) | X is (Y, X). */
11091 if (TREE_CODE (arg0) == BIT_AND_EXPR
11092 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11093 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11094 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11095 /* X | (X & Y) is (Y, X). */
11096 if (TREE_CODE (arg1) == BIT_AND_EXPR
11097 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11098 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11099 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11100 /* X | (Y & X) is (Y, X). */
11101 if (TREE_CODE (arg1) == BIT_AND_EXPR
11102 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11103 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11104 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11106 /* (X & ~Y) | (~X & Y) is X ^ Y */
11107 if (TREE_CODE (arg0) == BIT_AND_EXPR
11108 && TREE_CODE (arg1) == BIT_AND_EXPR)
11110 tree a0, a1, l0, l1, n0, n1;
11112 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11113 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11115 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11116 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11118 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11119 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11121 if ((operand_equal_p (n0, a0, 0)
11122 && operand_equal_p (n1, a1, 0))
11123 || (operand_equal_p (n0, a1, 0)
11124 && operand_equal_p (n1, a0, 0)))
11125 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11128 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11129 if (t1 != NULL_TREE)
11130 return t1;
11132 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11134 This results in more efficient code for machines without a NAND
11135 instruction. Combine will canonicalize to the first form
11136 which will allow use of NAND instructions provided by the
11137 backend if they exist. */
11138 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11139 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11141 return
11142 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11143 build2 (BIT_AND_EXPR, type,
11144 fold_convert_loc (loc, type,
11145 TREE_OPERAND (arg0, 0)),
11146 fold_convert_loc (loc, type,
11147 TREE_OPERAND (arg1, 0))));
11150 /* See if this can be simplified into a rotate first. If that
11151 is unsuccessful continue in the association code. */
11152 goto bit_rotate;
11154 case BIT_XOR_EXPR:
11155 if (integer_zerop (arg1))
11156 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11157 if (integer_all_onesp (arg1))
11158 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11159 if (operand_equal_p (arg0, arg1, 0))
11160 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11162 /* ~X ^ X is -1. */
11163 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11164 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11166 t1 = build_zero_cst (type);
11167 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11168 return omit_one_operand_loc (loc, type, t1, arg1);
11171 /* X ^ ~X is -1. */
11172 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11175 t1 = build_zero_cst (type);
11176 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11177 return omit_one_operand_loc (loc, type, t1, arg0);
11180 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11181 with a constant, and the two constants have no bits in common,
11182 we should treat this as a BIT_IOR_EXPR since this may produce more
11183 simplifications. */
11184 if (TREE_CODE (arg0) == BIT_AND_EXPR
11185 && TREE_CODE (arg1) == BIT_AND_EXPR
11186 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11187 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11188 && integer_zerop (const_binop (BIT_AND_EXPR,
11189 TREE_OPERAND (arg0, 1),
11190 TREE_OPERAND (arg1, 1))))
11192 code = BIT_IOR_EXPR;
11193 goto bit_ior;
11196 /* (X | Y) ^ X -> Y & ~ X*/
11197 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11198 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11200 tree t2 = TREE_OPERAND (arg0, 1);
11201 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11202 arg1);
11203 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11204 fold_convert_loc (loc, type, t2),
11205 fold_convert_loc (loc, type, t1));
11206 return t1;
11209 /* (Y | X) ^ X -> Y & ~ X*/
11210 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11211 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11213 tree t2 = TREE_OPERAND (arg0, 0);
11214 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11215 arg1);
11216 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11217 fold_convert_loc (loc, type, t2),
11218 fold_convert_loc (loc, type, t1));
11219 return t1;
11222 /* X ^ (X | Y) -> Y & ~ X*/
11223 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11224 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11226 tree t2 = TREE_OPERAND (arg1, 1);
11227 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11228 arg0);
11229 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11230 fold_convert_loc (loc, type, t2),
11231 fold_convert_loc (loc, type, t1));
11232 return t1;
11235 /* X ^ (Y | X) -> Y & ~ X*/
11236 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11237 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11239 tree t2 = TREE_OPERAND (arg1, 0);
11240 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11241 arg0);
11242 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11243 fold_convert_loc (loc, type, t2),
11244 fold_convert_loc (loc, type, t1));
11245 return t1;
11248 /* Convert ~X ^ ~Y to X ^ Y. */
11249 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11250 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11251 return fold_build2_loc (loc, code, type,
11252 fold_convert_loc (loc, type,
11253 TREE_OPERAND (arg0, 0)),
11254 fold_convert_loc (loc, type,
11255 TREE_OPERAND (arg1, 0)));
11257 /* Convert ~X ^ C to X ^ ~C. */
11258 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11259 && TREE_CODE (arg1) == INTEGER_CST)
11260 return fold_build2_loc (loc, code, type,
11261 fold_convert_loc (loc, type,
11262 TREE_OPERAND (arg0, 0)),
11263 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11265 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11266 if (TREE_CODE (arg0) == BIT_AND_EXPR
11267 && integer_onep (TREE_OPERAND (arg0, 1))
11268 && integer_onep (arg1))
11269 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11270 build_zero_cst (TREE_TYPE (arg0)));
11272 /* Fold (X & Y) ^ Y as ~X & Y. */
11273 if (TREE_CODE (arg0) == BIT_AND_EXPR
11274 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11276 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11277 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11278 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11279 fold_convert_loc (loc, type, arg1));
11281 /* Fold (X & Y) ^ X as ~Y & X. */
11282 if (TREE_CODE (arg0) == BIT_AND_EXPR
11283 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11284 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11286 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11287 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11288 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11289 fold_convert_loc (loc, type, arg1));
11291 /* Fold X ^ (X & Y) as X & ~Y. */
11292 if (TREE_CODE (arg1) == BIT_AND_EXPR
11293 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11295 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11296 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11297 fold_convert_loc (loc, type, arg0),
11298 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11300 /* Fold X ^ (Y & X) as ~Y & X. */
11301 if (TREE_CODE (arg1) == BIT_AND_EXPR
11302 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11303 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11305 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11306 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11307 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11308 fold_convert_loc (loc, type, arg0));
11311 /* See if this can be simplified into a rotate first. If that
11312 is unsuccessful continue in the association code. */
11313 goto bit_rotate;
11315 case BIT_AND_EXPR:
11316 if (integer_all_onesp (arg1))
11317 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11318 if (integer_zerop (arg1))
11319 return omit_one_operand_loc (loc, type, arg1, arg0);
11320 if (operand_equal_p (arg0, arg1, 0))
11321 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11323 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11324 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11325 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11326 || (TREE_CODE (arg0) == EQ_EXPR
11327 && integer_zerop (TREE_OPERAND (arg0, 1))))
11328 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11329 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11331 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11332 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11333 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11334 || (TREE_CODE (arg1) == EQ_EXPR
11335 && integer_zerop (TREE_OPERAND (arg1, 1))))
11336 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11337 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11339 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11340 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11341 && TREE_CODE (arg1) == INTEGER_CST
11342 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11344 tree tmp1 = fold_convert_loc (loc, type, arg1);
11345 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11346 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11347 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11348 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11349 return
11350 fold_convert_loc (loc, type,
11351 fold_build2_loc (loc, BIT_IOR_EXPR,
11352 type, tmp2, tmp3));
11355 /* (X | Y) & Y is (X, Y). */
11356 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11357 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11358 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11359 /* (X | Y) & X is (Y, X). */
11360 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11361 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11362 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11363 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11364 /* X & (X | Y) is (Y, X). */
11365 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11366 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11367 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11368 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11369 /* X & (Y | X) is (Y, X). */
11370 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11371 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11372 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11373 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11375 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11376 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11377 && integer_onep (TREE_OPERAND (arg0, 1))
11378 && integer_onep (arg1))
11380 tree tem2;
11381 tem = TREE_OPERAND (arg0, 0);
11382 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11383 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11384 tem, tem2);
11385 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11386 build_zero_cst (TREE_TYPE (tem)));
11388 /* Fold ~X & 1 as (X & 1) == 0. */
11389 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11390 && integer_onep (arg1))
11392 tree tem2;
11393 tem = TREE_OPERAND (arg0, 0);
11394 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11395 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11396 tem, tem2);
11397 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11398 build_zero_cst (TREE_TYPE (tem)));
11400 /* Fold !X & 1 as X == 0. */
11401 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11402 && integer_onep (arg1))
11404 tem = TREE_OPERAND (arg0, 0);
11405 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11406 build_zero_cst (TREE_TYPE (tem)));
11409 /* Fold (X ^ Y) & Y as ~X & Y. */
11410 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11411 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11413 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11414 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11415 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11416 fold_convert_loc (loc, type, arg1));
11418 /* Fold (X ^ Y) & X as ~Y & X. */
11419 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11420 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11421 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11423 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11424 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11425 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11426 fold_convert_loc (loc, type, arg1));
11428 /* Fold X & (X ^ Y) as X & ~Y. */
11429 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11430 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11432 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11433 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11434 fold_convert_loc (loc, type, arg0),
11435 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11437 /* Fold X & (Y ^ X) as ~Y & X. */
11438 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11439 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11440 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11442 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11443 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11444 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11445 fold_convert_loc (loc, type, arg0));
11448 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11449 multiple of 1 << CST. */
11450 if (TREE_CODE (arg1) == INTEGER_CST)
11452 double_int cst1 = tree_to_double_int (arg1);
11453 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11454 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11455 if ((cst1 & ncst1) == ncst1
11456 && multiple_of_p (type, arg0,
11457 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11458 return fold_convert_loc (loc, type, arg0);
11461 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11462 bits from CST2. */
11463 if (TREE_CODE (arg1) == INTEGER_CST
11464 && TREE_CODE (arg0) == MULT_EXPR
11465 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11467 int arg1tz
11468 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11469 if (arg1tz > 0)
11471 double_int arg1mask, masked;
11472 arg1mask = ~double_int::mask (arg1tz);
11473 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11474 TYPE_UNSIGNED (type));
11475 masked = arg1mask & tree_to_double_int (arg1);
11476 if (masked.is_zero ())
11477 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11478 arg0, arg1);
11479 else if (masked != tree_to_double_int (arg1))
11480 return fold_build2_loc (loc, code, type, op0,
11481 double_int_to_tree (type, masked));
11485 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11486 ((A & N) + B) & M -> (A + B) & M
11487 Similarly if (N & M) == 0,
11488 ((A | N) + B) & M -> (A + B) & M
11489 and for - instead of + (or unary - instead of +)
11490 and/or ^ instead of |.
11491 If B is constant and (B & M) == 0, fold into A & M. */
11492 if (host_integerp (arg1, 1))
11494 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11495 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11496 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11497 && (TREE_CODE (arg0) == PLUS_EXPR
11498 || TREE_CODE (arg0) == MINUS_EXPR
11499 || TREE_CODE (arg0) == NEGATE_EXPR)
11500 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11501 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11503 tree pmop[2];
11504 int which = 0;
11505 unsigned HOST_WIDE_INT cst0;
11507 /* Now we know that arg0 is (C + D) or (C - D) or
11508 -C and arg1 (M) is == (1LL << cst) - 1.
11509 Store C into PMOP[0] and D into PMOP[1]. */
11510 pmop[0] = TREE_OPERAND (arg0, 0);
11511 pmop[1] = NULL;
11512 if (TREE_CODE (arg0) != NEGATE_EXPR)
11514 pmop[1] = TREE_OPERAND (arg0, 1);
11515 which = 1;
11518 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11519 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11520 & cst1) != cst1)
11521 which = -1;
11523 for (; which >= 0; which--)
11524 switch (TREE_CODE (pmop[which]))
11526 case BIT_AND_EXPR:
11527 case BIT_IOR_EXPR:
11528 case BIT_XOR_EXPR:
11529 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11530 != INTEGER_CST)
11531 break;
11532 /* tree_low_cst not used, because we don't care about
11533 the upper bits. */
11534 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11535 cst0 &= cst1;
11536 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11538 if (cst0 != cst1)
11539 break;
11541 else if (cst0 != 0)
11542 break;
11543 /* If C or D is of the form (A & N) where
11544 (N & M) == M, or of the form (A | N) or
11545 (A ^ N) where (N & M) == 0, replace it with A. */
11546 pmop[which] = TREE_OPERAND (pmop[which], 0);
11547 break;
11548 case INTEGER_CST:
11549 /* If C or D is a N where (N & M) == 0, it can be
11550 omitted (assumed 0). */
11551 if ((TREE_CODE (arg0) == PLUS_EXPR
11552 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11553 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11554 pmop[which] = NULL;
11555 break;
11556 default:
11557 break;
11560 /* Only build anything new if we optimized one or both arguments
11561 above. */
11562 if (pmop[0] != TREE_OPERAND (arg0, 0)
11563 || (TREE_CODE (arg0) != NEGATE_EXPR
11564 && pmop[1] != TREE_OPERAND (arg0, 1)))
11566 tree utype = TREE_TYPE (arg0);
11567 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11569 /* Perform the operations in a type that has defined
11570 overflow behavior. */
11571 utype = unsigned_type_for (TREE_TYPE (arg0));
11572 if (pmop[0] != NULL)
11573 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11574 if (pmop[1] != NULL)
11575 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11578 if (TREE_CODE (arg0) == NEGATE_EXPR)
11579 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11580 else if (TREE_CODE (arg0) == PLUS_EXPR)
11582 if (pmop[0] != NULL && pmop[1] != NULL)
11583 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11584 pmop[0], pmop[1]);
11585 else if (pmop[0] != NULL)
11586 tem = pmop[0];
11587 else if (pmop[1] != NULL)
11588 tem = pmop[1];
11589 else
11590 return build_int_cst (type, 0);
11592 else if (pmop[0] == NULL)
11593 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11594 else
11595 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11596 pmop[0], pmop[1]);
11597 /* TEM is now the new binary +, - or unary - replacement. */
11598 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11599 fold_convert_loc (loc, utype, arg1));
11600 return fold_convert_loc (loc, type, tem);
11605 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11606 if (t1 != NULL_TREE)
11607 return t1;
11608 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11609 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11610 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11612 unsigned int prec
11613 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11615 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11616 && (~TREE_INT_CST_LOW (arg1)
11617 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11618 return
11619 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11622 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11624 This results in more efficient code for machines without a NOR
11625 instruction. Combine will canonicalize to the first form
11626 which will allow use of NOR instructions provided by the
11627 backend if they exist. */
11628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11629 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11631 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11632 build2 (BIT_IOR_EXPR, type,
11633 fold_convert_loc (loc, type,
11634 TREE_OPERAND (arg0, 0)),
11635 fold_convert_loc (loc, type,
11636 TREE_OPERAND (arg1, 0))));
11639 /* If arg0 is derived from the address of an object or function, we may
11640 be able to fold this expression using the object or function's
11641 alignment. */
11642 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11644 unsigned HOST_WIDE_INT modulus, residue;
11645 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11647 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11648 integer_onep (arg1));
11650 /* This works because modulus is a power of 2. If this weren't the
11651 case, we'd have to replace it by its greatest power-of-2
11652 divisor: modulus & -modulus. */
11653 if (low < modulus)
11654 return build_int_cst (type, residue & low);
11657 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11658 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11659 if the new mask might be further optimized. */
11660 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11661 || TREE_CODE (arg0) == RSHIFT_EXPR)
11662 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11663 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11664 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11665 < TYPE_PRECISION (TREE_TYPE (arg0))
11666 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11667 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11669 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11670 unsigned HOST_WIDE_INT mask
11671 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11672 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11673 tree shift_type = TREE_TYPE (arg0);
11675 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11676 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11677 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11678 && TYPE_PRECISION (TREE_TYPE (arg0))
11679 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11681 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11682 tree arg00 = TREE_OPERAND (arg0, 0);
11683 /* See if more bits can be proven as zero because of
11684 zero extension. */
11685 if (TREE_CODE (arg00) == NOP_EXPR
11686 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11688 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11689 if (TYPE_PRECISION (inner_type)
11690 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11691 && TYPE_PRECISION (inner_type) < prec)
11693 prec = TYPE_PRECISION (inner_type);
11694 /* See if we can shorten the right shift. */
11695 if (shiftc < prec)
11696 shift_type = inner_type;
11699 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11700 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11701 zerobits <<= prec - shiftc;
11702 /* For arithmetic shift if sign bit could be set, zerobits
11703 can contain actually sign bits, so no transformation is
11704 possible, unless MASK masks them all away. In that
11705 case the shift needs to be converted into logical shift. */
11706 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11707 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11709 if ((mask & zerobits) == 0)
11710 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11711 else
11712 zerobits = 0;
11716 /* ((X << 16) & 0xff00) is (X, 0). */
11717 if ((mask & zerobits) == mask)
11718 return omit_one_operand_loc (loc, type,
11719 build_int_cst (type, 0), arg0);
11721 newmask = mask | zerobits;
11722 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11724 unsigned int prec;
11726 /* Only do the transformation if NEWMASK is some integer
11727 mode's mask. */
11728 for (prec = BITS_PER_UNIT;
11729 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11730 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11731 break;
11732 if (prec < HOST_BITS_PER_WIDE_INT
11733 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11735 tree newmaskt;
11737 if (shift_type != TREE_TYPE (arg0))
11739 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11740 fold_convert_loc (loc, shift_type,
11741 TREE_OPERAND (arg0, 0)),
11742 TREE_OPERAND (arg0, 1));
11743 tem = fold_convert_loc (loc, type, tem);
11745 else
11746 tem = op0;
11747 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11748 if (!tree_int_cst_equal (newmaskt, arg1))
11749 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11754 goto associate;
11756 case RDIV_EXPR:
11757 /* Don't touch a floating-point divide by zero unless the mode
11758 of the constant can represent infinity. */
11759 if (TREE_CODE (arg1) == REAL_CST
11760 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11761 && real_zerop (arg1))
11762 return NULL_TREE;
11764 /* Optimize A / A to 1.0 if we don't care about
11765 NaNs or Infinities. Skip the transformation
11766 for non-real operands. */
11767 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11768 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11769 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11770 && operand_equal_p (arg0, arg1, 0))
11772 tree r = build_real (TREE_TYPE (arg0), dconst1);
11774 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11777 /* The complex version of the above A / A optimization. */
11778 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11779 && operand_equal_p (arg0, arg1, 0))
11781 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11782 if (! HONOR_NANS (TYPE_MODE (elem_type))
11783 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11785 tree r = build_real (elem_type, dconst1);
11786 /* omit_two_operands will call fold_convert for us. */
11787 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11791 /* (-A) / (-B) -> A / B */
11792 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11793 return fold_build2_loc (loc, RDIV_EXPR, type,
11794 TREE_OPERAND (arg0, 0),
11795 negate_expr (arg1));
11796 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11797 return fold_build2_loc (loc, RDIV_EXPR, type,
11798 negate_expr (arg0),
11799 TREE_OPERAND (arg1, 0));
11801 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11802 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11803 && real_onep (arg1))
11804 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11806 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11807 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11808 && real_minus_onep (arg1))
11809 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11810 negate_expr (arg0)));
11812 /* If ARG1 is a constant, we can convert this to a multiply by the
11813 reciprocal. This does not have the same rounding properties,
11814 so only do this if -freciprocal-math. We can actually
11815 always safely do it if ARG1 is a power of two, but it's hard to
11816 tell if it is or not in a portable manner. */
11817 if (optimize
11818 && (TREE_CODE (arg1) == REAL_CST
11819 || (TREE_CODE (arg1) == COMPLEX_CST
11820 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11821 || (TREE_CODE (arg1) == VECTOR_CST
11822 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11824 if (flag_reciprocal_math
11825 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11826 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11827 /* Find the reciprocal if optimizing and the result is exact.
11828 TODO: Complex reciprocal not implemented. */
11829 if (TREE_CODE (arg1) != COMPLEX_CST)
11831 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11833 if (inverse)
11834 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11837 /* Convert A/B/C to A/(B*C). */
11838 if (flag_reciprocal_math
11839 && TREE_CODE (arg0) == RDIV_EXPR)
11840 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11841 fold_build2_loc (loc, MULT_EXPR, type,
11842 TREE_OPERAND (arg0, 1), arg1));
11844 /* Convert A/(B/C) to (A/B)*C. */
11845 if (flag_reciprocal_math
11846 && TREE_CODE (arg1) == RDIV_EXPR)
11847 return fold_build2_loc (loc, MULT_EXPR, type,
11848 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11849 TREE_OPERAND (arg1, 0)),
11850 TREE_OPERAND (arg1, 1));
11852 /* Convert C1/(X*C2) into (C1/C2)/X. */
11853 if (flag_reciprocal_math
11854 && TREE_CODE (arg1) == MULT_EXPR
11855 && TREE_CODE (arg0) == REAL_CST
11856 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11858 tree tem = const_binop (RDIV_EXPR, arg0,
11859 TREE_OPERAND (arg1, 1));
11860 if (tem)
11861 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11862 TREE_OPERAND (arg1, 0));
11865 if (flag_unsafe_math_optimizations)
11867 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11868 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11870 /* Optimize sin(x)/cos(x) as tan(x). */
11871 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11872 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11873 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11874 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11875 CALL_EXPR_ARG (arg1, 0), 0))
11877 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11879 if (tanfn != NULL_TREE)
11880 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11883 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11884 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11885 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11886 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11887 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11888 CALL_EXPR_ARG (arg1, 0), 0))
11890 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11892 if (tanfn != NULL_TREE)
11894 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11895 CALL_EXPR_ARG (arg0, 0));
11896 return fold_build2_loc (loc, RDIV_EXPR, type,
11897 build_real (type, dconst1), tmp);
11901 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11902 NaNs or Infinities. */
11903 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11904 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11905 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11907 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11908 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11910 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11911 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11912 && operand_equal_p (arg00, arg01, 0))
11914 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11916 if (cosfn != NULL_TREE)
11917 return build_call_expr_loc (loc, cosfn, 1, arg00);
11921 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11922 NaNs or Infinities. */
11923 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11924 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11925 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11927 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11928 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11930 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11931 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11932 && operand_equal_p (arg00, arg01, 0))
11934 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11936 if (cosfn != NULL_TREE)
11938 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11939 return fold_build2_loc (loc, RDIV_EXPR, type,
11940 build_real (type, dconst1),
11941 tmp);
11946 /* Optimize pow(x,c)/x as pow(x,c-1). */
11947 if (fcode0 == BUILT_IN_POW
11948 || fcode0 == BUILT_IN_POWF
11949 || fcode0 == BUILT_IN_POWL)
11951 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11952 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11953 if (TREE_CODE (arg01) == REAL_CST
11954 && !TREE_OVERFLOW (arg01)
11955 && operand_equal_p (arg1, arg00, 0))
11957 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11958 REAL_VALUE_TYPE c;
11959 tree arg;
11961 c = TREE_REAL_CST (arg01);
11962 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11963 arg = build_real (type, c);
11964 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11968 /* Optimize a/root(b/c) into a*root(c/b). */
11969 if (BUILTIN_ROOT_P (fcode1))
11971 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11973 if (TREE_CODE (rootarg) == RDIV_EXPR)
11975 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11976 tree b = TREE_OPERAND (rootarg, 0);
11977 tree c = TREE_OPERAND (rootarg, 1);
11979 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11981 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11982 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11986 /* Optimize x/expN(y) into x*expN(-y). */
11987 if (BUILTIN_EXPONENT_P (fcode1))
11989 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11990 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11991 arg1 = build_call_expr_loc (loc,
11992 expfn, 1,
11993 fold_convert_loc (loc, type, arg));
11994 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11997 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11998 if (fcode1 == BUILT_IN_POW
11999 || fcode1 == BUILT_IN_POWF
12000 || fcode1 == BUILT_IN_POWL)
12002 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12003 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12004 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12005 tree neg11 = fold_convert_loc (loc, type,
12006 negate_expr (arg11));
12007 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12008 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12011 return NULL_TREE;
12013 case TRUNC_DIV_EXPR:
12014 /* Optimize (X & (-A)) / A where A is a power of 2,
12015 to X >> log2(A) */
12016 if (TREE_CODE (arg0) == BIT_AND_EXPR
12017 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12018 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12020 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12021 arg1, TREE_OPERAND (arg0, 1));
12022 if (sum && integer_zerop (sum)) {
12023 unsigned long pow2;
12025 if (TREE_INT_CST_LOW (arg1))
12026 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12027 else
12028 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12029 + HOST_BITS_PER_WIDE_INT;
12031 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12032 TREE_OPERAND (arg0, 0),
12033 build_int_cst (integer_type_node, pow2));
12037 /* Fall through */
12039 case FLOOR_DIV_EXPR:
12040 /* Simplify A / (B << N) where A and B are positive and B is
12041 a power of 2, to A >> (N + log2(B)). */
12042 strict_overflow_p = false;
12043 if (TREE_CODE (arg1) == LSHIFT_EXPR
12044 && (TYPE_UNSIGNED (type)
12045 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12047 tree sval = TREE_OPERAND (arg1, 0);
12048 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12050 tree sh_cnt = TREE_OPERAND (arg1, 1);
12051 unsigned long pow2;
12053 if (TREE_INT_CST_LOW (sval))
12054 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12055 else
12056 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12057 + HOST_BITS_PER_WIDE_INT;
12059 if (strict_overflow_p)
12060 fold_overflow_warning (("assuming signed overflow does not "
12061 "occur when simplifying A / (B << N)"),
12062 WARN_STRICT_OVERFLOW_MISC);
12064 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12065 sh_cnt,
12066 build_int_cst (TREE_TYPE (sh_cnt),
12067 pow2));
12068 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12069 fold_convert_loc (loc, type, arg0), sh_cnt);
12073 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12074 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12075 if (INTEGRAL_TYPE_P (type)
12076 && TYPE_UNSIGNED (type)
12077 && code == FLOOR_DIV_EXPR)
12078 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12080 /* Fall through */
12082 case ROUND_DIV_EXPR:
12083 case CEIL_DIV_EXPR:
12084 case EXACT_DIV_EXPR:
12085 if (integer_onep (arg1))
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12087 if (integer_zerop (arg1))
12088 return NULL_TREE;
12089 /* X / -1 is -X. */
12090 if (!TYPE_UNSIGNED (type)
12091 && TREE_CODE (arg1) == INTEGER_CST
12092 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12093 && TREE_INT_CST_HIGH (arg1) == -1)
12094 return fold_convert_loc (loc, type, negate_expr (arg0));
12096 /* Convert -A / -B to A / B when the type is signed and overflow is
12097 undefined. */
12098 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12099 && TREE_CODE (arg0) == NEGATE_EXPR
12100 && negate_expr_p (arg1))
12102 if (INTEGRAL_TYPE_P (type))
12103 fold_overflow_warning (("assuming signed overflow does not occur "
12104 "when distributing negation across "
12105 "division"),
12106 WARN_STRICT_OVERFLOW_MISC);
12107 return fold_build2_loc (loc, code, type,
12108 fold_convert_loc (loc, type,
12109 TREE_OPERAND (arg0, 0)),
12110 fold_convert_loc (loc, type,
12111 negate_expr (arg1)));
12113 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12114 && TREE_CODE (arg1) == NEGATE_EXPR
12115 && negate_expr_p (arg0))
12117 if (INTEGRAL_TYPE_P (type))
12118 fold_overflow_warning (("assuming signed overflow does not occur "
12119 "when distributing negation across "
12120 "division"),
12121 WARN_STRICT_OVERFLOW_MISC);
12122 return fold_build2_loc (loc, code, type,
12123 fold_convert_loc (loc, type,
12124 negate_expr (arg0)),
12125 fold_convert_loc (loc, type,
12126 TREE_OPERAND (arg1, 0)));
12129 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12130 operation, EXACT_DIV_EXPR.
12132 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12133 At one time others generated faster code, it's not clear if they do
12134 after the last round to changes to the DIV code in expmed.c. */
12135 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12136 && multiple_of_p (type, arg0, arg1))
12137 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12139 strict_overflow_p = false;
12140 if (TREE_CODE (arg1) == INTEGER_CST
12141 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12142 &strict_overflow_p)))
12144 if (strict_overflow_p)
12145 fold_overflow_warning (("assuming signed overflow does not occur "
12146 "when simplifying division"),
12147 WARN_STRICT_OVERFLOW_MISC);
12148 return fold_convert_loc (loc, type, tem);
12151 return NULL_TREE;
12153 case CEIL_MOD_EXPR:
12154 case FLOOR_MOD_EXPR:
12155 case ROUND_MOD_EXPR:
12156 case TRUNC_MOD_EXPR:
12157 /* X % 1 is always zero, but be sure to preserve any side
12158 effects in X. */
12159 if (integer_onep (arg1))
12160 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12162 /* X % 0, return X % 0 unchanged so that we can get the
12163 proper warnings and errors. */
12164 if (integer_zerop (arg1))
12165 return NULL_TREE;
12167 /* 0 % X is always zero, but be sure to preserve any side
12168 effects in X. Place this after checking for X == 0. */
12169 if (integer_zerop (arg0))
12170 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12172 /* X % -1 is zero. */
12173 if (!TYPE_UNSIGNED (type)
12174 && TREE_CODE (arg1) == INTEGER_CST
12175 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12176 && TREE_INT_CST_HIGH (arg1) == -1)
12177 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12179 /* X % -C is the same as X % C. */
12180 if (code == TRUNC_MOD_EXPR
12181 && !TYPE_UNSIGNED (type)
12182 && TREE_CODE (arg1) == INTEGER_CST
12183 && !TREE_OVERFLOW (arg1)
12184 && TREE_INT_CST_HIGH (arg1) < 0
12185 && !TYPE_OVERFLOW_TRAPS (type)
12186 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12187 && !sign_bit_p (arg1, arg1))
12188 return fold_build2_loc (loc, code, type,
12189 fold_convert_loc (loc, type, arg0),
12190 fold_convert_loc (loc, type,
12191 negate_expr (arg1)));
12193 /* X % -Y is the same as X % Y. */
12194 if (code == TRUNC_MOD_EXPR
12195 && !TYPE_UNSIGNED (type)
12196 && TREE_CODE (arg1) == NEGATE_EXPR
12197 && !TYPE_OVERFLOW_TRAPS (type))
12198 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12199 fold_convert_loc (loc, type,
12200 TREE_OPERAND (arg1, 0)));
12202 strict_overflow_p = false;
12203 if (TREE_CODE (arg1) == INTEGER_CST
12204 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12205 &strict_overflow_p)))
12207 if (strict_overflow_p)
12208 fold_overflow_warning (("assuming signed overflow does not occur "
12209 "when simplifying modulus"),
12210 WARN_STRICT_OVERFLOW_MISC);
12211 return fold_convert_loc (loc, type, tem);
12214 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12215 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12216 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12217 && (TYPE_UNSIGNED (type)
12218 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12220 tree c = arg1;
12221 /* Also optimize A % (C << N) where C is a power of 2,
12222 to A & ((C << N) - 1). */
12223 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12224 c = TREE_OPERAND (arg1, 0);
12226 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12228 tree mask
12229 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12230 build_int_cst (TREE_TYPE (arg1), 1));
12231 if (strict_overflow_p)
12232 fold_overflow_warning (("assuming signed overflow does not "
12233 "occur when simplifying "
12234 "X % (power of two)"),
12235 WARN_STRICT_OVERFLOW_MISC);
12236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12237 fold_convert_loc (loc, type, arg0),
12238 fold_convert_loc (loc, type, mask));
12242 return NULL_TREE;
12244 case LROTATE_EXPR:
12245 case RROTATE_EXPR:
12246 if (integer_all_onesp (arg0))
12247 return omit_one_operand_loc (loc, type, arg0, arg1);
12248 goto shift;
12250 case RSHIFT_EXPR:
12251 /* Optimize -1 >> x for arithmetic right shifts. */
12252 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12253 && tree_expr_nonnegative_p (arg1))
12254 return omit_one_operand_loc (loc, type, arg0, arg1);
12255 /* ... fall through ... */
12257 case LSHIFT_EXPR:
12258 shift:
12259 if (integer_zerop (arg1))
12260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12261 if (integer_zerop (arg0))
12262 return omit_one_operand_loc (loc, type, arg0, arg1);
12264 /* Since negative shift count is not well-defined,
12265 don't try to compute it in the compiler. */
12266 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12267 return NULL_TREE;
12269 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12270 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12271 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12272 && host_integerp (TREE_OPERAND (arg0, 1), false)
12273 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12275 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12276 + TREE_INT_CST_LOW (arg1));
12278 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12279 being well defined. */
12280 if (low >= TYPE_PRECISION (type))
12282 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12283 low = low % TYPE_PRECISION (type);
12284 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12285 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12286 TREE_OPERAND (arg0, 0));
12287 else
12288 low = TYPE_PRECISION (type) - 1;
12291 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12292 build_int_cst (type, low));
12295 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12296 into x & ((unsigned)-1 >> c) for unsigned types. */
12297 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12298 || (TYPE_UNSIGNED (type)
12299 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12300 && host_integerp (arg1, false)
12301 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12302 && host_integerp (TREE_OPERAND (arg0, 1), false)
12303 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12305 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12306 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12307 tree lshift;
12308 tree arg00;
12310 if (low0 == low1)
12312 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12314 lshift = build_int_cst (type, -1);
12315 lshift = int_const_binop (code, lshift, arg1);
12317 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12321 /* Rewrite an LROTATE_EXPR by a constant into an
12322 RROTATE_EXPR by a new constant. */
12323 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12325 tree tem = build_int_cst (TREE_TYPE (arg1),
12326 TYPE_PRECISION (type));
12327 tem = const_binop (MINUS_EXPR, tem, arg1);
12328 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12331 /* If we have a rotate of a bit operation with the rotate count and
12332 the second operand of the bit operation both constant,
12333 permute the two operations. */
12334 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12335 && (TREE_CODE (arg0) == BIT_AND_EXPR
12336 || TREE_CODE (arg0) == BIT_IOR_EXPR
12337 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12339 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12340 fold_build2_loc (loc, code, type,
12341 TREE_OPERAND (arg0, 0), arg1),
12342 fold_build2_loc (loc, code, type,
12343 TREE_OPERAND (arg0, 1), arg1));
12345 /* Two consecutive rotates adding up to the precision of the
12346 type can be ignored. */
12347 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12348 && TREE_CODE (arg0) == RROTATE_EXPR
12349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12350 && TREE_INT_CST_HIGH (arg1) == 0
12351 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12352 && ((TREE_INT_CST_LOW (arg1)
12353 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12354 == (unsigned int) TYPE_PRECISION (type)))
12355 return TREE_OPERAND (arg0, 0);
12357 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12358 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12359 if the latter can be further optimized. */
12360 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12361 && TREE_CODE (arg0) == BIT_AND_EXPR
12362 && TREE_CODE (arg1) == INTEGER_CST
12363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12365 tree mask = fold_build2_loc (loc, code, type,
12366 fold_convert_loc (loc, type,
12367 TREE_OPERAND (arg0, 1)),
12368 arg1);
12369 tree shift = fold_build2_loc (loc, code, type,
12370 fold_convert_loc (loc, type,
12371 TREE_OPERAND (arg0, 0)),
12372 arg1);
12373 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12374 if (tem)
12375 return tem;
12378 return NULL_TREE;
12380 case MIN_EXPR:
12381 if (operand_equal_p (arg0, arg1, 0))
12382 return omit_one_operand_loc (loc, type, arg0, arg1);
12383 if (INTEGRAL_TYPE_P (type)
12384 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12385 return omit_one_operand_loc (loc, type, arg1, arg0);
12386 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12387 if (tem)
12388 return tem;
12389 goto associate;
12391 case MAX_EXPR:
12392 if (operand_equal_p (arg0, arg1, 0))
12393 return omit_one_operand_loc (loc, type, arg0, arg1);
12394 if (INTEGRAL_TYPE_P (type)
12395 && TYPE_MAX_VALUE (type)
12396 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12397 return omit_one_operand_loc (loc, type, arg1, arg0);
12398 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12399 if (tem)
12400 return tem;
12401 goto associate;
12403 case TRUTH_ANDIF_EXPR:
12404 /* Note that the operands of this must be ints
12405 and their values must be 0 or 1.
12406 ("true" is a fixed value perhaps depending on the language.) */
12407 /* If first arg is constant zero, return it. */
12408 if (integer_zerop (arg0))
12409 return fold_convert_loc (loc, type, arg0);
12410 case TRUTH_AND_EXPR:
12411 /* If either arg is constant true, drop it. */
12412 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12414 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12415 /* Preserve sequence points. */
12416 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12418 /* If second arg is constant zero, result is zero, but first arg
12419 must be evaluated. */
12420 if (integer_zerop (arg1))
12421 return omit_one_operand_loc (loc, type, arg1, arg0);
12422 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12423 case will be handled here. */
12424 if (integer_zerop (arg0))
12425 return omit_one_operand_loc (loc, type, arg0, arg1);
12427 /* !X && X is always false. */
12428 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12429 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12430 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12431 /* X && !X is always false. */
12432 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12433 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12434 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12436 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12437 means A >= Y && A != MAX, but in this case we know that
12438 A < X <= MAX. */
12440 if (!TREE_SIDE_EFFECTS (arg0)
12441 && !TREE_SIDE_EFFECTS (arg1))
12443 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12444 if (tem && !operand_equal_p (tem, arg0, 0))
12445 return fold_build2_loc (loc, code, type, tem, arg1);
12447 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12448 if (tem && !operand_equal_p (tem, arg1, 0))
12449 return fold_build2_loc (loc, code, type, arg0, tem);
12452 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12453 != NULL_TREE)
12454 return tem;
12456 return NULL_TREE;
12458 case TRUTH_ORIF_EXPR:
12459 /* Note that the operands of this must be ints
12460 and their values must be 0 or true.
12461 ("true" is a fixed value perhaps depending on the language.) */
12462 /* If first arg is constant true, return it. */
12463 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12464 return fold_convert_loc (loc, type, arg0);
12465 case TRUTH_OR_EXPR:
12466 /* If either arg is constant zero, drop it. */
12467 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12468 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12469 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12470 /* Preserve sequence points. */
12471 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12472 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12473 /* If second arg is constant true, result is true, but we must
12474 evaluate first arg. */
12475 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12476 return omit_one_operand_loc (loc, type, arg1, arg0);
12477 /* Likewise for first arg, but note this only occurs here for
12478 TRUTH_OR_EXPR. */
12479 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12480 return omit_one_operand_loc (loc, type, arg0, arg1);
12482 /* !X || X is always true. */
12483 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12485 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12486 /* X || !X is always true. */
12487 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12489 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12491 /* (X && !Y) || (!X && Y) is X ^ Y */
12492 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12493 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12495 tree a0, a1, l0, l1, n0, n1;
12497 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12498 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12500 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12501 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12503 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12504 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12506 if ((operand_equal_p (n0, a0, 0)
12507 && operand_equal_p (n1, a1, 0))
12508 || (operand_equal_p (n0, a1, 0)
12509 && operand_equal_p (n1, a0, 0)))
12510 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12513 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12514 != NULL_TREE)
12515 return tem;
12517 return NULL_TREE;
12519 case TRUTH_XOR_EXPR:
12520 /* If the second arg is constant zero, drop it. */
12521 if (integer_zerop (arg1))
12522 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12523 /* If the second arg is constant true, this is a logical inversion. */
12524 if (integer_onep (arg1))
12526 /* Only call invert_truthvalue if operand is a truth value. */
12527 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12528 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12529 else
12530 tem = invert_truthvalue_loc (loc, arg0);
12531 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12533 /* Identical arguments cancel to zero. */
12534 if (operand_equal_p (arg0, arg1, 0))
12535 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12537 /* !X ^ X is always true. */
12538 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12539 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12540 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12542 /* X ^ !X is always true. */
12543 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12544 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12545 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12547 return NULL_TREE;
12549 case EQ_EXPR:
12550 case NE_EXPR:
12551 STRIP_NOPS (arg0);
12552 STRIP_NOPS (arg1);
12554 tem = fold_comparison (loc, code, type, op0, op1);
12555 if (tem != NULL_TREE)
12556 return tem;
12558 /* bool_var != 0 becomes bool_var. */
12559 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12560 && code == NE_EXPR)
12561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12563 /* bool_var == 1 becomes bool_var. */
12564 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12565 && code == EQ_EXPR)
12566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12568 /* bool_var != 1 becomes !bool_var. */
12569 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12570 && code == NE_EXPR)
12571 return fold_convert_loc (loc, type,
12572 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12573 TREE_TYPE (arg0), arg0));
12575 /* bool_var == 0 becomes !bool_var. */
12576 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12577 && code == EQ_EXPR)
12578 return fold_convert_loc (loc, type,
12579 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12580 TREE_TYPE (arg0), arg0));
12582 /* !exp != 0 becomes !exp */
12583 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12584 && code == NE_EXPR)
12585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12587 /* If this is an equality comparison of the address of two non-weak,
12588 unaliased symbols neither of which are extern (since we do not
12589 have access to attributes for externs), then we know the result. */
12590 if (TREE_CODE (arg0) == ADDR_EXPR
12591 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12592 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12593 && ! lookup_attribute ("alias",
12594 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12595 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12596 && TREE_CODE (arg1) == ADDR_EXPR
12597 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12598 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12599 && ! lookup_attribute ("alias",
12600 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12601 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12603 /* We know that we're looking at the address of two
12604 non-weak, unaliased, static _DECL nodes.
12606 It is both wasteful and incorrect to call operand_equal_p
12607 to compare the two ADDR_EXPR nodes. It is wasteful in that
12608 all we need to do is test pointer equality for the arguments
12609 to the two ADDR_EXPR nodes. It is incorrect to use
12610 operand_equal_p as that function is NOT equivalent to a
12611 C equality test. It can in fact return false for two
12612 objects which would test as equal using the C equality
12613 operator. */
12614 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12615 return constant_boolean_node (equal
12616 ? code == EQ_EXPR : code != EQ_EXPR,
12617 type);
12620 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12621 a MINUS_EXPR of a constant, we can convert it into a comparison with
12622 a revised constant as long as no overflow occurs. */
12623 if (TREE_CODE (arg1) == INTEGER_CST
12624 && (TREE_CODE (arg0) == PLUS_EXPR
12625 || TREE_CODE (arg0) == MINUS_EXPR)
12626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12627 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12628 ? MINUS_EXPR : PLUS_EXPR,
12629 fold_convert_loc (loc, TREE_TYPE (arg0),
12630 arg1),
12631 TREE_OPERAND (arg0, 1)))
12632 && !TREE_OVERFLOW (tem))
12633 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12635 /* Similarly for a NEGATE_EXPR. */
12636 if (TREE_CODE (arg0) == NEGATE_EXPR
12637 && TREE_CODE (arg1) == INTEGER_CST
12638 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12639 arg1)))
12640 && TREE_CODE (tem) == INTEGER_CST
12641 && !TREE_OVERFLOW (tem))
12642 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12644 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12645 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12646 && TREE_CODE (arg1) == INTEGER_CST
12647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12648 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12649 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12650 fold_convert_loc (loc,
12651 TREE_TYPE (arg0),
12652 arg1),
12653 TREE_OPERAND (arg0, 1)));
12655 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12656 if ((TREE_CODE (arg0) == PLUS_EXPR
12657 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12658 || TREE_CODE (arg0) == MINUS_EXPR)
12659 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12660 0)),
12661 arg1, 0)
12662 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12663 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12665 tree val = TREE_OPERAND (arg0, 1);
12666 return omit_two_operands_loc (loc, type,
12667 fold_build2_loc (loc, code, type,
12668 val,
12669 build_int_cst (TREE_TYPE (val),
12670 0)),
12671 TREE_OPERAND (arg0, 0), arg1);
12674 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12675 if (TREE_CODE (arg0) == MINUS_EXPR
12676 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12677 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12678 1)),
12679 arg1, 0)
12680 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12682 return omit_two_operands_loc (loc, type,
12683 code == NE_EXPR
12684 ? boolean_true_node : boolean_false_node,
12685 TREE_OPERAND (arg0, 1), arg1);
12688 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12689 for !=. Don't do this for ordered comparisons due to overflow. */
12690 if (TREE_CODE (arg0) == MINUS_EXPR
12691 && integer_zerop (arg1))
12692 return fold_build2_loc (loc, code, type,
12693 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12695 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12696 if (TREE_CODE (arg0) == ABS_EXPR
12697 && (integer_zerop (arg1) || real_zerop (arg1)))
12698 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12700 /* If this is an EQ or NE comparison with zero and ARG0 is
12701 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12702 two operations, but the latter can be done in one less insn
12703 on machines that have only two-operand insns or on which a
12704 constant cannot be the first operand. */
12705 if (TREE_CODE (arg0) == BIT_AND_EXPR
12706 && integer_zerop (arg1))
12708 tree arg00 = TREE_OPERAND (arg0, 0);
12709 tree arg01 = TREE_OPERAND (arg0, 1);
12710 if (TREE_CODE (arg00) == LSHIFT_EXPR
12711 && integer_onep (TREE_OPERAND (arg00, 0)))
12713 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12714 arg01, TREE_OPERAND (arg00, 1));
12715 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12716 build_int_cst (TREE_TYPE (arg0), 1));
12717 return fold_build2_loc (loc, code, type,
12718 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12719 arg1);
12721 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12722 && integer_onep (TREE_OPERAND (arg01, 0)))
12724 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12725 arg00, TREE_OPERAND (arg01, 1));
12726 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12727 build_int_cst (TREE_TYPE (arg0), 1));
12728 return fold_build2_loc (loc, code, type,
12729 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12730 arg1);
12734 /* If this is an NE or EQ comparison of zero against the result of a
12735 signed MOD operation whose second operand is a power of 2, make
12736 the MOD operation unsigned since it is simpler and equivalent. */
12737 if (integer_zerop (arg1)
12738 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12739 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12740 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12741 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12742 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12743 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12745 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12746 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12747 fold_convert_loc (loc, newtype,
12748 TREE_OPERAND (arg0, 0)),
12749 fold_convert_loc (loc, newtype,
12750 TREE_OPERAND (arg0, 1)));
12752 return fold_build2_loc (loc, code, type, newmod,
12753 fold_convert_loc (loc, newtype, arg1));
12756 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12757 C1 is a valid shift constant, and C2 is a power of two, i.e.
12758 a single bit. */
12759 if (TREE_CODE (arg0) == BIT_AND_EXPR
12760 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12761 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12762 == INTEGER_CST
12763 && integer_pow2p (TREE_OPERAND (arg0, 1))
12764 && integer_zerop (arg1))
12766 tree itype = TREE_TYPE (arg0);
12767 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12768 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12770 /* Check for a valid shift count. */
12771 if (TREE_INT_CST_HIGH (arg001) == 0
12772 && TREE_INT_CST_LOW (arg001) < prec)
12774 tree arg01 = TREE_OPERAND (arg0, 1);
12775 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12776 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12777 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12778 can be rewritten as (X & (C2 << C1)) != 0. */
12779 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12781 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12782 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12783 return fold_build2_loc (loc, code, type, tem,
12784 fold_convert_loc (loc, itype, arg1));
12786 /* Otherwise, for signed (arithmetic) shifts,
12787 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12788 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12789 else if (!TYPE_UNSIGNED (itype))
12790 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12791 arg000, build_int_cst (itype, 0));
12792 /* Otherwise, of unsigned (logical) shifts,
12793 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12794 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12795 else
12796 return omit_one_operand_loc (loc, type,
12797 code == EQ_EXPR ? integer_one_node
12798 : integer_zero_node,
12799 arg000);
12803 /* If we have (A & C) == C where C is a power of 2, convert this into
12804 (A & C) != 0. Similarly for NE_EXPR. */
12805 if (TREE_CODE (arg0) == BIT_AND_EXPR
12806 && integer_pow2p (TREE_OPERAND (arg0, 1))
12807 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12808 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12809 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12810 integer_zero_node));
12812 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12813 bit, then fold the expression into A < 0 or A >= 0. */
12814 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12815 if (tem)
12816 return tem;
12818 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12819 Similarly for NE_EXPR. */
12820 if (TREE_CODE (arg0) == BIT_AND_EXPR
12821 && TREE_CODE (arg1) == INTEGER_CST
12822 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12824 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12825 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12826 TREE_OPERAND (arg0, 1));
12827 tree dandnotc
12828 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12829 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12830 notc);
12831 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12832 if (integer_nonzerop (dandnotc))
12833 return omit_one_operand_loc (loc, type, rslt, arg0);
12836 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12837 Similarly for NE_EXPR. */
12838 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12839 && TREE_CODE (arg1) == INTEGER_CST
12840 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12842 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12843 tree candnotd
12844 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12845 TREE_OPERAND (arg0, 1),
12846 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12847 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12848 if (integer_nonzerop (candnotd))
12849 return omit_one_operand_loc (loc, type, rslt, arg0);
12852 /* If this is a comparison of a field, we may be able to simplify it. */
12853 if ((TREE_CODE (arg0) == COMPONENT_REF
12854 || TREE_CODE (arg0) == BIT_FIELD_REF)
12855 /* Handle the constant case even without -O
12856 to make sure the warnings are given. */
12857 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12859 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12860 if (t1)
12861 return t1;
12864 /* Optimize comparisons of strlen vs zero to a compare of the
12865 first character of the string vs zero. To wit,
12866 strlen(ptr) == 0 => *ptr == 0
12867 strlen(ptr) != 0 => *ptr != 0
12868 Other cases should reduce to one of these two (or a constant)
12869 due to the return value of strlen being unsigned. */
12870 if (TREE_CODE (arg0) == CALL_EXPR
12871 && integer_zerop (arg1))
12873 tree fndecl = get_callee_fndecl (arg0);
12875 if (fndecl
12876 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12877 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12878 && call_expr_nargs (arg0) == 1
12879 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12881 tree iref = build_fold_indirect_ref_loc (loc,
12882 CALL_EXPR_ARG (arg0, 0));
12883 return fold_build2_loc (loc, code, type, iref,
12884 build_int_cst (TREE_TYPE (iref), 0));
12888 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12889 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12890 if (TREE_CODE (arg0) == RSHIFT_EXPR
12891 && integer_zerop (arg1)
12892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12894 tree arg00 = TREE_OPERAND (arg0, 0);
12895 tree arg01 = TREE_OPERAND (arg0, 1);
12896 tree itype = TREE_TYPE (arg00);
12897 if (TREE_INT_CST_HIGH (arg01) == 0
12898 && TREE_INT_CST_LOW (arg01)
12899 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12901 if (TYPE_UNSIGNED (itype))
12903 itype = signed_type_for (itype);
12904 arg00 = fold_convert_loc (loc, itype, arg00);
12906 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12907 type, arg00, build_zero_cst (itype));
12911 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12912 if (integer_zerop (arg1)
12913 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12914 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12915 TREE_OPERAND (arg0, 1));
12917 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12918 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12919 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12920 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12921 build_zero_cst (TREE_TYPE (arg0)));
12922 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12923 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12925 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12926 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12927 build_zero_cst (TREE_TYPE (arg0)));
12929 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12930 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12931 && TREE_CODE (arg1) == INTEGER_CST
12932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12933 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12934 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12935 TREE_OPERAND (arg0, 1), arg1));
12937 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12938 (X & C) == 0 when C is a single bit. */
12939 if (TREE_CODE (arg0) == BIT_AND_EXPR
12940 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12941 && integer_zerop (arg1)
12942 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12944 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12945 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12946 TREE_OPERAND (arg0, 1));
12947 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12948 type, tem,
12949 fold_convert_loc (loc, TREE_TYPE (arg0),
12950 arg1));
12953 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12954 constant C is a power of two, i.e. a single bit. */
12955 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12956 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12957 && integer_zerop (arg1)
12958 && integer_pow2p (TREE_OPERAND (arg0, 1))
12959 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12960 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12962 tree arg00 = TREE_OPERAND (arg0, 0);
12963 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12964 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12967 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12968 when is C is a power of two, i.e. a single bit. */
12969 if (TREE_CODE (arg0) == BIT_AND_EXPR
12970 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12971 && integer_zerop (arg1)
12972 && integer_pow2p (TREE_OPERAND (arg0, 1))
12973 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12974 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12976 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12977 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12978 arg000, TREE_OPERAND (arg0, 1));
12979 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12980 tem, build_int_cst (TREE_TYPE (tem), 0));
12983 if (integer_zerop (arg1)
12984 && tree_expr_nonzero_p (arg0))
12986 tree res = constant_boolean_node (code==NE_EXPR, type);
12987 return omit_one_operand_loc (loc, type, res, arg0);
12990 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12991 if (TREE_CODE (arg0) == NEGATE_EXPR
12992 && TREE_CODE (arg1) == NEGATE_EXPR)
12993 return fold_build2_loc (loc, code, type,
12994 TREE_OPERAND (arg0, 0),
12995 fold_convert_loc (loc, TREE_TYPE (arg0),
12996 TREE_OPERAND (arg1, 0)));
12998 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12999 if (TREE_CODE (arg0) == BIT_AND_EXPR
13000 && TREE_CODE (arg1) == BIT_AND_EXPR)
13002 tree arg00 = TREE_OPERAND (arg0, 0);
13003 tree arg01 = TREE_OPERAND (arg0, 1);
13004 tree arg10 = TREE_OPERAND (arg1, 0);
13005 tree arg11 = TREE_OPERAND (arg1, 1);
13006 tree itype = TREE_TYPE (arg0);
13008 if (operand_equal_p (arg01, arg11, 0))
13009 return fold_build2_loc (loc, code, type,
13010 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13011 fold_build2_loc (loc,
13012 BIT_XOR_EXPR, itype,
13013 arg00, arg10),
13014 arg01),
13015 build_zero_cst (itype));
13017 if (operand_equal_p (arg01, arg10, 0))
13018 return fold_build2_loc (loc, code, type,
13019 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13020 fold_build2_loc (loc,
13021 BIT_XOR_EXPR, itype,
13022 arg00, arg11),
13023 arg01),
13024 build_zero_cst (itype));
13026 if (operand_equal_p (arg00, arg11, 0))
13027 return fold_build2_loc (loc, code, type,
13028 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13029 fold_build2_loc (loc,
13030 BIT_XOR_EXPR, itype,
13031 arg01, arg10),
13032 arg00),
13033 build_zero_cst (itype));
13035 if (operand_equal_p (arg00, arg10, 0))
13036 return fold_build2_loc (loc, code, type,
13037 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13038 fold_build2_loc (loc,
13039 BIT_XOR_EXPR, itype,
13040 arg01, arg11),
13041 arg00),
13042 build_zero_cst (itype));
13045 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13046 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13048 tree arg00 = TREE_OPERAND (arg0, 0);
13049 tree arg01 = TREE_OPERAND (arg0, 1);
13050 tree arg10 = TREE_OPERAND (arg1, 0);
13051 tree arg11 = TREE_OPERAND (arg1, 1);
13052 tree itype = TREE_TYPE (arg0);
13054 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13055 operand_equal_p guarantees no side-effects so we don't need
13056 to use omit_one_operand on Z. */
13057 if (operand_equal_p (arg01, arg11, 0))
13058 return fold_build2_loc (loc, code, type, arg00,
13059 fold_convert_loc (loc, TREE_TYPE (arg00),
13060 arg10));
13061 if (operand_equal_p (arg01, arg10, 0))
13062 return fold_build2_loc (loc, code, type, arg00,
13063 fold_convert_loc (loc, TREE_TYPE (arg00),
13064 arg11));
13065 if (operand_equal_p (arg00, arg11, 0))
13066 return fold_build2_loc (loc, code, type, arg01,
13067 fold_convert_loc (loc, TREE_TYPE (arg01),
13068 arg10));
13069 if (operand_equal_p (arg00, arg10, 0))
13070 return fold_build2_loc (loc, code, type, arg01,
13071 fold_convert_loc (loc, TREE_TYPE (arg01),
13072 arg11));
13074 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13075 if (TREE_CODE (arg01) == INTEGER_CST
13076 && TREE_CODE (arg11) == INTEGER_CST)
13078 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13079 fold_convert_loc (loc, itype, arg11));
13080 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13081 return fold_build2_loc (loc, code, type, tem,
13082 fold_convert_loc (loc, itype, arg10));
13086 /* Attempt to simplify equality/inequality comparisons of complex
13087 values. Only lower the comparison if the result is known or
13088 can be simplified to a single scalar comparison. */
13089 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13090 || TREE_CODE (arg0) == COMPLEX_CST)
13091 && (TREE_CODE (arg1) == COMPLEX_EXPR
13092 || TREE_CODE (arg1) == COMPLEX_CST))
13094 tree real0, imag0, real1, imag1;
13095 tree rcond, icond;
13097 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13099 real0 = TREE_OPERAND (arg0, 0);
13100 imag0 = TREE_OPERAND (arg0, 1);
13102 else
13104 real0 = TREE_REALPART (arg0);
13105 imag0 = TREE_IMAGPART (arg0);
13108 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13110 real1 = TREE_OPERAND (arg1, 0);
13111 imag1 = TREE_OPERAND (arg1, 1);
13113 else
13115 real1 = TREE_REALPART (arg1);
13116 imag1 = TREE_IMAGPART (arg1);
13119 rcond = fold_binary_loc (loc, code, type, real0, real1);
13120 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13122 if (integer_zerop (rcond))
13124 if (code == EQ_EXPR)
13125 return omit_two_operands_loc (loc, type, boolean_false_node,
13126 imag0, imag1);
13127 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13129 else
13131 if (code == NE_EXPR)
13132 return omit_two_operands_loc (loc, type, boolean_true_node,
13133 imag0, imag1);
13134 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13138 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13139 if (icond && TREE_CODE (icond) == INTEGER_CST)
13141 if (integer_zerop (icond))
13143 if (code == EQ_EXPR)
13144 return omit_two_operands_loc (loc, type, boolean_false_node,
13145 real0, real1);
13146 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13148 else
13150 if (code == NE_EXPR)
13151 return omit_two_operands_loc (loc, type, boolean_true_node,
13152 real0, real1);
13153 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13158 return NULL_TREE;
13160 case LT_EXPR:
13161 case GT_EXPR:
13162 case LE_EXPR:
13163 case GE_EXPR:
13164 tem = fold_comparison (loc, code, type, op0, op1);
13165 if (tem != NULL_TREE)
13166 return tem;
13168 /* Transform comparisons of the form X +- C CMP X. */
13169 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13171 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13172 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13173 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13174 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13176 tree arg01 = TREE_OPERAND (arg0, 1);
13177 enum tree_code code0 = TREE_CODE (arg0);
13178 int is_positive;
13180 if (TREE_CODE (arg01) == REAL_CST)
13181 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13182 else
13183 is_positive = tree_int_cst_sgn (arg01);
13185 /* (X - c) > X becomes false. */
13186 if (code == GT_EXPR
13187 && ((code0 == MINUS_EXPR && is_positive >= 0)
13188 || (code0 == PLUS_EXPR && is_positive <= 0)))
13190 if (TREE_CODE (arg01) == INTEGER_CST
13191 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13192 fold_overflow_warning (("assuming signed overflow does not "
13193 "occur when assuming that (X - c) > X "
13194 "is always false"),
13195 WARN_STRICT_OVERFLOW_ALL);
13196 return constant_boolean_node (0, type);
13199 /* Likewise (X + c) < X becomes false. */
13200 if (code == LT_EXPR
13201 && ((code0 == PLUS_EXPR && is_positive >= 0)
13202 || (code0 == MINUS_EXPR && is_positive <= 0)))
13204 if (TREE_CODE (arg01) == INTEGER_CST
13205 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13206 fold_overflow_warning (("assuming signed overflow does not "
13207 "occur when assuming that "
13208 "(X + c) < X is always false"),
13209 WARN_STRICT_OVERFLOW_ALL);
13210 return constant_boolean_node (0, type);
13213 /* Convert (X - c) <= X to true. */
13214 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13215 && code == LE_EXPR
13216 && ((code0 == MINUS_EXPR && is_positive >= 0)
13217 || (code0 == PLUS_EXPR && is_positive <= 0)))
13219 if (TREE_CODE (arg01) == INTEGER_CST
13220 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13221 fold_overflow_warning (("assuming signed overflow does not "
13222 "occur when assuming that "
13223 "(X - c) <= X is always true"),
13224 WARN_STRICT_OVERFLOW_ALL);
13225 return constant_boolean_node (1, type);
13228 /* Convert (X + c) >= X to true. */
13229 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13230 && code == GE_EXPR
13231 && ((code0 == PLUS_EXPR && is_positive >= 0)
13232 || (code0 == MINUS_EXPR && is_positive <= 0)))
13234 if (TREE_CODE (arg01) == INTEGER_CST
13235 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13236 fold_overflow_warning (("assuming signed overflow does not "
13237 "occur when assuming that "
13238 "(X + c) >= X is always true"),
13239 WARN_STRICT_OVERFLOW_ALL);
13240 return constant_boolean_node (1, type);
13243 if (TREE_CODE (arg01) == INTEGER_CST)
13245 /* Convert X + c > X and X - c < X to true for integers. */
13246 if (code == GT_EXPR
13247 && ((code0 == PLUS_EXPR && is_positive > 0)
13248 || (code0 == MINUS_EXPR && is_positive < 0)))
13250 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13251 fold_overflow_warning (("assuming signed overflow does "
13252 "not occur when assuming that "
13253 "(X + c) > X is always true"),
13254 WARN_STRICT_OVERFLOW_ALL);
13255 return constant_boolean_node (1, type);
13258 if (code == LT_EXPR
13259 && ((code0 == MINUS_EXPR && is_positive > 0)
13260 || (code0 == PLUS_EXPR && is_positive < 0)))
13262 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13263 fold_overflow_warning (("assuming signed overflow does "
13264 "not occur when assuming that "
13265 "(X - c) < X is always true"),
13266 WARN_STRICT_OVERFLOW_ALL);
13267 return constant_boolean_node (1, type);
13270 /* Convert X + c <= X and X - c >= X to false for integers. */
13271 if (code == LE_EXPR
13272 && ((code0 == PLUS_EXPR && is_positive > 0)
13273 || (code0 == MINUS_EXPR && is_positive < 0)))
13275 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13276 fold_overflow_warning (("assuming signed overflow does "
13277 "not occur when assuming that "
13278 "(X + c) <= X is always false"),
13279 WARN_STRICT_OVERFLOW_ALL);
13280 return constant_boolean_node (0, type);
13283 if (code == GE_EXPR
13284 && ((code0 == MINUS_EXPR && is_positive > 0)
13285 || (code0 == PLUS_EXPR && is_positive < 0)))
13287 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13288 fold_overflow_warning (("assuming signed overflow does "
13289 "not occur when assuming that "
13290 "(X - c) >= X is always false"),
13291 WARN_STRICT_OVERFLOW_ALL);
13292 return constant_boolean_node (0, type);
13297 /* Comparisons with the highest or lowest possible integer of
13298 the specified precision will have known values. */
13300 tree arg1_type = TREE_TYPE (arg1);
13301 unsigned int width = TYPE_PRECISION (arg1_type);
13303 if (TREE_CODE (arg1) == INTEGER_CST
13304 && width <= HOST_BITS_PER_DOUBLE_INT
13305 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13307 HOST_WIDE_INT signed_max_hi;
13308 unsigned HOST_WIDE_INT signed_max_lo;
13309 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13311 if (width <= HOST_BITS_PER_WIDE_INT)
13313 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13314 - 1;
13315 signed_max_hi = 0;
13316 max_hi = 0;
13318 if (TYPE_UNSIGNED (arg1_type))
13320 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13321 min_lo = 0;
13322 min_hi = 0;
13324 else
13326 max_lo = signed_max_lo;
13327 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13328 min_hi = -1;
13331 else
13333 width -= HOST_BITS_PER_WIDE_INT;
13334 signed_max_lo = -1;
13335 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13336 - 1;
13337 max_lo = -1;
13338 min_lo = 0;
13340 if (TYPE_UNSIGNED (arg1_type))
13342 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13343 min_hi = 0;
13345 else
13347 max_hi = signed_max_hi;
13348 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13352 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13353 && TREE_INT_CST_LOW (arg1) == max_lo)
13354 switch (code)
13356 case GT_EXPR:
13357 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13359 case GE_EXPR:
13360 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13362 case LE_EXPR:
13363 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13365 case LT_EXPR:
13366 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13368 /* The GE_EXPR and LT_EXPR cases above are not normally
13369 reached because of previous transformations. */
13371 default:
13372 break;
13374 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13375 == max_hi
13376 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13377 switch (code)
13379 case GT_EXPR:
13380 arg1 = const_binop (PLUS_EXPR, arg1,
13381 build_int_cst (TREE_TYPE (arg1), 1));
13382 return fold_build2_loc (loc, EQ_EXPR, type,
13383 fold_convert_loc (loc,
13384 TREE_TYPE (arg1), arg0),
13385 arg1);
13386 case LE_EXPR:
13387 arg1 = const_binop (PLUS_EXPR, arg1,
13388 build_int_cst (TREE_TYPE (arg1), 1));
13389 return fold_build2_loc (loc, NE_EXPR, type,
13390 fold_convert_loc (loc, TREE_TYPE (arg1),
13391 arg0),
13392 arg1);
13393 default:
13394 break;
13396 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13397 == min_hi
13398 && TREE_INT_CST_LOW (arg1) == min_lo)
13399 switch (code)
13401 case LT_EXPR:
13402 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13404 case LE_EXPR:
13405 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13407 case GE_EXPR:
13408 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13410 case GT_EXPR:
13411 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13413 default:
13414 break;
13416 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13417 == min_hi
13418 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13419 switch (code)
13421 case GE_EXPR:
13422 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13423 return fold_build2_loc (loc, NE_EXPR, type,
13424 fold_convert_loc (loc,
13425 TREE_TYPE (arg1), arg0),
13426 arg1);
13427 case LT_EXPR:
13428 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13429 return fold_build2_loc (loc, EQ_EXPR, type,
13430 fold_convert_loc (loc, TREE_TYPE (arg1),
13431 arg0),
13432 arg1);
13433 default:
13434 break;
13437 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13438 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13439 && TYPE_UNSIGNED (arg1_type)
13440 /* We will flip the signedness of the comparison operator
13441 associated with the mode of arg1, so the sign bit is
13442 specified by this mode. Check that arg1 is the signed
13443 max associated with this sign bit. */
13444 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13445 /* signed_type does not work on pointer types. */
13446 && INTEGRAL_TYPE_P (arg1_type))
13448 /* The following case also applies to X < signed_max+1
13449 and X >= signed_max+1 because previous transformations. */
13450 if (code == LE_EXPR || code == GT_EXPR)
13452 tree st;
13453 st = signed_type_for (TREE_TYPE (arg1));
13454 return fold_build2_loc (loc,
13455 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13456 type, fold_convert_loc (loc, st, arg0),
13457 build_int_cst (st, 0));
13463 /* If we are comparing an ABS_EXPR with a constant, we can
13464 convert all the cases into explicit comparisons, but they may
13465 well not be faster than doing the ABS and one comparison.
13466 But ABS (X) <= C is a range comparison, which becomes a subtraction
13467 and a comparison, and is probably faster. */
13468 if (code == LE_EXPR
13469 && TREE_CODE (arg1) == INTEGER_CST
13470 && TREE_CODE (arg0) == ABS_EXPR
13471 && ! TREE_SIDE_EFFECTS (arg0)
13472 && (0 != (tem = negate_expr (arg1)))
13473 && TREE_CODE (tem) == INTEGER_CST
13474 && !TREE_OVERFLOW (tem))
13475 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13476 build2 (GE_EXPR, type,
13477 TREE_OPERAND (arg0, 0), tem),
13478 build2 (LE_EXPR, type,
13479 TREE_OPERAND (arg0, 0), arg1));
13481 /* Convert ABS_EXPR<x> >= 0 to true. */
13482 strict_overflow_p = false;
13483 if (code == GE_EXPR
13484 && (integer_zerop (arg1)
13485 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13486 && real_zerop (arg1)))
13487 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13489 if (strict_overflow_p)
13490 fold_overflow_warning (("assuming signed overflow does not occur "
13491 "when simplifying comparison of "
13492 "absolute value and zero"),
13493 WARN_STRICT_OVERFLOW_CONDITIONAL);
13494 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13497 /* Convert ABS_EXPR<x> < 0 to false. */
13498 strict_overflow_p = false;
13499 if (code == LT_EXPR
13500 && (integer_zerop (arg1) || real_zerop (arg1))
13501 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13503 if (strict_overflow_p)
13504 fold_overflow_warning (("assuming signed overflow does not occur "
13505 "when simplifying comparison of "
13506 "absolute value and zero"),
13507 WARN_STRICT_OVERFLOW_CONDITIONAL);
13508 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13511 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13512 and similarly for >= into !=. */
13513 if ((code == LT_EXPR || code == GE_EXPR)
13514 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13515 && TREE_CODE (arg1) == LSHIFT_EXPR
13516 && integer_onep (TREE_OPERAND (arg1, 0)))
13517 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13518 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13519 TREE_OPERAND (arg1, 1)),
13520 build_zero_cst (TREE_TYPE (arg0)));
13522 if ((code == LT_EXPR || code == GE_EXPR)
13523 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13524 && CONVERT_EXPR_P (arg1)
13525 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13526 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13528 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13529 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13530 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13531 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13532 build_zero_cst (TREE_TYPE (arg0)));
13535 return NULL_TREE;
13537 case UNORDERED_EXPR:
13538 case ORDERED_EXPR:
13539 case UNLT_EXPR:
13540 case UNLE_EXPR:
13541 case UNGT_EXPR:
13542 case UNGE_EXPR:
13543 case UNEQ_EXPR:
13544 case LTGT_EXPR:
13545 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13547 t1 = fold_relational_const (code, type, arg0, arg1);
13548 if (t1 != NULL_TREE)
13549 return t1;
13552 /* If the first operand is NaN, the result is constant. */
13553 if (TREE_CODE (arg0) == REAL_CST
13554 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13555 && (code != LTGT_EXPR || ! flag_trapping_math))
13557 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13558 ? integer_zero_node
13559 : integer_one_node;
13560 return omit_one_operand_loc (loc, type, t1, arg1);
13563 /* If the second operand is NaN, the result is constant. */
13564 if (TREE_CODE (arg1) == REAL_CST
13565 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13566 && (code != LTGT_EXPR || ! flag_trapping_math))
13568 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13569 ? integer_zero_node
13570 : integer_one_node;
13571 return omit_one_operand_loc (loc, type, t1, arg0);
13574 /* Simplify unordered comparison of something with itself. */
13575 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13576 && operand_equal_p (arg0, arg1, 0))
13577 return constant_boolean_node (1, type);
13579 if (code == LTGT_EXPR
13580 && !flag_trapping_math
13581 && operand_equal_p (arg0, arg1, 0))
13582 return constant_boolean_node (0, type);
13584 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13586 tree targ0 = strip_float_extensions (arg0);
13587 tree targ1 = strip_float_extensions (arg1);
13588 tree newtype = TREE_TYPE (targ0);
13590 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13591 newtype = TREE_TYPE (targ1);
13593 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13594 return fold_build2_loc (loc, code, type,
13595 fold_convert_loc (loc, newtype, targ0),
13596 fold_convert_loc (loc, newtype, targ1));
13599 return NULL_TREE;
13601 case COMPOUND_EXPR:
13602 /* When pedantic, a compound expression can be neither an lvalue
13603 nor an integer constant expression. */
13604 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13605 return NULL_TREE;
13606 /* Don't let (0, 0) be null pointer constant. */
13607 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13608 : fold_convert_loc (loc, type, arg1);
13609 return pedantic_non_lvalue_loc (loc, tem);
13611 case COMPLEX_EXPR:
13612 if ((TREE_CODE (arg0) == REAL_CST
13613 && TREE_CODE (arg1) == REAL_CST)
13614 || (TREE_CODE (arg0) == INTEGER_CST
13615 && TREE_CODE (arg1) == INTEGER_CST))
13616 return build_complex (type, arg0, arg1);
13617 if (TREE_CODE (arg0) == REALPART_EXPR
13618 && TREE_CODE (arg1) == IMAGPART_EXPR
13619 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13620 && operand_equal_p (TREE_OPERAND (arg0, 0),
13621 TREE_OPERAND (arg1, 0), 0))
13622 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13623 TREE_OPERAND (arg1, 0));
13624 return NULL_TREE;
13626 case ASSERT_EXPR:
13627 /* An ASSERT_EXPR should never be passed to fold_binary. */
13628 gcc_unreachable ();
13630 case VEC_PACK_TRUNC_EXPR:
13631 case VEC_PACK_FIX_TRUNC_EXPR:
13633 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13634 tree *elts;
13636 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13637 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13638 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13639 return NULL_TREE;
13641 elts = XALLOCAVEC (tree, nelts);
13642 if (!vec_cst_ctor_to_array (arg0, elts)
13643 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13644 return NULL_TREE;
13646 for (i = 0; i < nelts; i++)
13648 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13649 ? NOP_EXPR : FIX_TRUNC_EXPR,
13650 TREE_TYPE (type), elts[i]);
13651 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13652 return NULL_TREE;
13655 return build_vector (type, elts);
13658 case VEC_WIDEN_MULT_LO_EXPR:
13659 case VEC_WIDEN_MULT_HI_EXPR:
13660 case VEC_WIDEN_MULT_EVEN_EXPR:
13661 case VEC_WIDEN_MULT_ODD_EXPR:
13663 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13664 unsigned int out, ofs, scale;
13665 tree *elts;
13667 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13668 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13669 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13670 return NULL_TREE;
13672 elts = XALLOCAVEC (tree, nelts * 4);
13673 if (!vec_cst_ctor_to_array (arg0, elts)
13674 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13675 return NULL_TREE;
13677 if (code == VEC_WIDEN_MULT_LO_EXPR)
13678 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13679 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13680 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13681 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13682 scale = 1, ofs = 0;
13683 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13684 scale = 1, ofs = 1;
13686 for (out = 0; out < nelts; out++)
13688 unsigned int in1 = (out << scale) + ofs;
13689 unsigned int in2 = in1 + nelts * 2;
13690 tree t1, t2;
13692 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13693 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13695 if (t1 == NULL_TREE || t2 == NULL_TREE)
13696 return NULL_TREE;
13697 elts[out] = const_binop (MULT_EXPR, t1, t2);
13698 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13699 return NULL_TREE;
13702 return build_vector (type, elts);
13705 default:
13706 return NULL_TREE;
13707 } /* switch (code) */
13710 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13711 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13712 of GOTO_EXPR. */
13714 static tree
13715 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13717 switch (TREE_CODE (*tp))
13719 case LABEL_EXPR:
13720 return *tp;
13722 case GOTO_EXPR:
13723 *walk_subtrees = 0;
13725 /* ... fall through ... */
13727 default:
13728 return NULL_TREE;
13732 /* Return whether the sub-tree ST contains a label which is accessible from
13733 outside the sub-tree. */
13735 static bool
13736 contains_label_p (tree st)
13738 return
13739 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13742 /* Fold a ternary expression of code CODE and type TYPE with operands
13743 OP0, OP1, and OP2. Return the folded expression if folding is
13744 successful. Otherwise, return NULL_TREE. */
13746 tree
13747 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13748 tree op0, tree op1, tree op2)
13750 tree tem;
13751 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13752 enum tree_code_class kind = TREE_CODE_CLASS (code);
13754 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13755 && TREE_CODE_LENGTH (code) == 3);
13757 /* Strip any conversions that don't change the mode. This is safe
13758 for every expression, except for a comparison expression because
13759 its signedness is derived from its operands. So, in the latter
13760 case, only strip conversions that don't change the signedness.
13762 Note that this is done as an internal manipulation within the
13763 constant folder, in order to find the simplest representation of
13764 the arguments so that their form can be studied. In any cases,
13765 the appropriate type conversions should be put back in the tree
13766 that will get out of the constant folder. */
13767 if (op0)
13769 arg0 = op0;
13770 STRIP_NOPS (arg0);
13773 if (op1)
13775 arg1 = op1;
13776 STRIP_NOPS (arg1);
13779 if (op2)
13781 arg2 = op2;
13782 STRIP_NOPS (arg2);
13785 switch (code)
13787 case COMPONENT_REF:
13788 if (TREE_CODE (arg0) == CONSTRUCTOR
13789 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13791 unsigned HOST_WIDE_INT idx;
13792 tree field, value;
13793 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13794 if (field == arg1)
13795 return value;
13797 return NULL_TREE;
13799 case COND_EXPR:
13800 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13801 so all simple results must be passed through pedantic_non_lvalue. */
13802 if (TREE_CODE (arg0) == INTEGER_CST)
13804 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13805 tem = integer_zerop (arg0) ? op2 : op1;
13806 /* Only optimize constant conditions when the selected branch
13807 has the same type as the COND_EXPR. This avoids optimizing
13808 away "c ? x : throw", where the throw has a void type.
13809 Avoid throwing away that operand which contains label. */
13810 if ((!TREE_SIDE_EFFECTS (unused_op)
13811 || !contains_label_p (unused_op))
13812 && (! VOID_TYPE_P (TREE_TYPE (tem))
13813 || VOID_TYPE_P (type)))
13814 return pedantic_non_lvalue_loc (loc, tem);
13815 return NULL_TREE;
13817 if (operand_equal_p (arg1, op2, 0))
13818 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13820 /* If we have A op B ? A : C, we may be able to convert this to a
13821 simpler expression, depending on the operation and the values
13822 of B and C. Signed zeros prevent all of these transformations,
13823 for reasons given above each one.
13825 Also try swapping the arguments and inverting the conditional. */
13826 if (COMPARISON_CLASS_P (arg0)
13827 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13828 arg1, TREE_OPERAND (arg0, 1))
13829 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13831 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13832 if (tem)
13833 return tem;
13836 if (COMPARISON_CLASS_P (arg0)
13837 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13838 op2,
13839 TREE_OPERAND (arg0, 1))
13840 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13842 location_t loc0 = expr_location_or (arg0, loc);
13843 tem = fold_truth_not_expr (loc0, arg0);
13844 if (tem && COMPARISON_CLASS_P (tem))
13846 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13847 if (tem)
13848 return tem;
13852 /* If the second operand is simpler than the third, swap them
13853 since that produces better jump optimization results. */
13854 if (truth_value_p (TREE_CODE (arg0))
13855 && tree_swap_operands_p (op1, op2, false))
13857 location_t loc0 = expr_location_or (arg0, loc);
13858 /* See if this can be inverted. If it can't, possibly because
13859 it was a floating-point inequality comparison, don't do
13860 anything. */
13861 tem = fold_truth_not_expr (loc0, arg0);
13862 if (tem)
13863 return fold_build3_loc (loc, code, type, tem, op2, op1);
13866 /* Convert A ? 1 : 0 to simply A. */
13867 if (integer_onep (op1)
13868 && integer_zerop (op2)
13869 /* If we try to convert OP0 to our type, the
13870 call to fold will try to move the conversion inside
13871 a COND, which will recurse. In that case, the COND_EXPR
13872 is probably the best choice, so leave it alone. */
13873 && type == TREE_TYPE (arg0))
13874 return pedantic_non_lvalue_loc (loc, arg0);
13876 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13877 over COND_EXPR in cases such as floating point comparisons. */
13878 if (integer_zerop (op1)
13879 && integer_onep (op2)
13880 && truth_value_p (TREE_CODE (arg0)))
13881 return pedantic_non_lvalue_loc (loc,
13882 fold_convert_loc (loc, type,
13883 invert_truthvalue_loc (loc,
13884 arg0)));
13886 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13887 if (TREE_CODE (arg0) == LT_EXPR
13888 && integer_zerop (TREE_OPERAND (arg0, 1))
13889 && integer_zerop (op2)
13890 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13892 /* sign_bit_p only checks ARG1 bits within A's precision.
13893 If <sign bit of A> has wider type than A, bits outside
13894 of A's precision in <sign bit of A> need to be checked.
13895 If they are all 0, this optimization needs to be done
13896 in unsigned A's type, if they are all 1 in signed A's type,
13897 otherwise this can't be done. */
13898 if (TYPE_PRECISION (TREE_TYPE (tem))
13899 < TYPE_PRECISION (TREE_TYPE (arg1))
13900 && TYPE_PRECISION (TREE_TYPE (tem))
13901 < TYPE_PRECISION (type))
13903 unsigned HOST_WIDE_INT mask_lo;
13904 HOST_WIDE_INT mask_hi;
13905 int inner_width, outer_width;
13906 tree tem_type;
13908 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13909 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13910 if (outer_width > TYPE_PRECISION (type))
13911 outer_width = TYPE_PRECISION (type);
13913 if (outer_width > HOST_BITS_PER_WIDE_INT)
13915 mask_hi = ((unsigned HOST_WIDE_INT) -1
13916 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13917 mask_lo = -1;
13919 else
13921 mask_hi = 0;
13922 mask_lo = ((unsigned HOST_WIDE_INT) -1
13923 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13925 if (inner_width > HOST_BITS_PER_WIDE_INT)
13927 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13928 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13929 mask_lo = 0;
13931 else
13932 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13933 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13935 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13936 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13938 tem_type = signed_type_for (TREE_TYPE (tem));
13939 tem = fold_convert_loc (loc, tem_type, tem);
13941 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13942 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13944 tem_type = unsigned_type_for (TREE_TYPE (tem));
13945 tem = fold_convert_loc (loc, tem_type, tem);
13947 else
13948 tem = NULL;
13951 if (tem)
13952 return
13953 fold_convert_loc (loc, type,
13954 fold_build2_loc (loc, BIT_AND_EXPR,
13955 TREE_TYPE (tem), tem,
13956 fold_convert_loc (loc,
13957 TREE_TYPE (tem),
13958 arg1)));
13961 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13962 already handled above. */
13963 if (TREE_CODE (arg0) == BIT_AND_EXPR
13964 && integer_onep (TREE_OPERAND (arg0, 1))
13965 && integer_zerop (op2)
13966 && integer_pow2p (arg1))
13968 tree tem = TREE_OPERAND (arg0, 0);
13969 STRIP_NOPS (tem);
13970 if (TREE_CODE (tem) == RSHIFT_EXPR
13971 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13972 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13973 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13974 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13975 TREE_OPERAND (tem, 0), arg1);
13978 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13979 is probably obsolete because the first operand should be a
13980 truth value (that's why we have the two cases above), but let's
13981 leave it in until we can confirm this for all front-ends. */
13982 if (integer_zerop (op2)
13983 && TREE_CODE (arg0) == NE_EXPR
13984 && integer_zerop (TREE_OPERAND (arg0, 1))
13985 && integer_pow2p (arg1)
13986 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13987 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13988 arg1, OEP_ONLY_CONST))
13989 return pedantic_non_lvalue_loc (loc,
13990 fold_convert_loc (loc, type,
13991 TREE_OPERAND (arg0, 0)));
13993 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13994 if (integer_zerop (op2)
13995 && truth_value_p (TREE_CODE (arg0))
13996 && truth_value_p (TREE_CODE (arg1)))
13997 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13998 fold_convert_loc (loc, type, arg0),
13999 arg1);
14001 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14002 if (integer_onep (op2)
14003 && truth_value_p (TREE_CODE (arg0))
14004 && truth_value_p (TREE_CODE (arg1)))
14006 location_t loc0 = expr_location_or (arg0, loc);
14007 /* Only perform transformation if ARG0 is easily inverted. */
14008 tem = fold_truth_not_expr (loc0, arg0);
14009 if (tem)
14010 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14011 fold_convert_loc (loc, type, tem),
14012 arg1);
14015 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14016 if (integer_zerop (arg1)
14017 && truth_value_p (TREE_CODE (arg0))
14018 && truth_value_p (TREE_CODE (op2)))
14020 location_t loc0 = expr_location_or (arg0, loc);
14021 /* Only perform transformation if ARG0 is easily inverted. */
14022 tem = fold_truth_not_expr (loc0, arg0);
14023 if (tem)
14024 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14025 fold_convert_loc (loc, type, tem),
14026 op2);
14029 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14030 if (integer_onep (arg1)
14031 && truth_value_p (TREE_CODE (arg0))
14032 && truth_value_p (TREE_CODE (op2)))
14033 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14034 fold_convert_loc (loc, type, arg0),
14035 op2);
14037 return NULL_TREE;
14039 case CALL_EXPR:
14040 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14041 of fold_ternary on them. */
14042 gcc_unreachable ();
14044 case BIT_FIELD_REF:
14045 if ((TREE_CODE (arg0) == VECTOR_CST
14046 || (TREE_CODE (arg0) == CONSTRUCTOR
14047 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14048 && (type == TREE_TYPE (TREE_TYPE (arg0))
14049 || (TREE_CODE (type) == VECTOR_TYPE
14050 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14052 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14053 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14054 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14055 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14057 if (n != 0
14058 && (idx % width) == 0
14059 && (n % width) == 0
14060 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14062 idx = idx / width;
14063 n = n / width;
14064 if (TREE_CODE (type) == VECTOR_TYPE)
14066 if (TREE_CODE (arg0) == VECTOR_CST)
14068 tree *vals = XALLOCAVEC (tree, n);
14069 unsigned i;
14070 for (i = 0; i < n; ++i)
14071 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14072 return build_vector (type, vals);
14074 else
14076 VEC(constructor_elt, gc) *vals;
14077 unsigned i;
14078 if (CONSTRUCTOR_NELTS (arg0) == 0)
14079 return build_constructor (type, NULL);
14080 if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14081 0)->value))
14082 != VECTOR_TYPE)
14084 vals = VEC_alloc (constructor_elt, gc, n);
14085 for (i = 0;
14086 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14087 ++i)
14088 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14089 CONSTRUCTOR_ELT
14090 (arg0, idx + i)->value);
14091 return build_constructor (type, vals);
14095 else if (n == 1)
14097 if (TREE_CODE (arg0) == VECTOR_CST)
14098 return VECTOR_CST_ELT (arg0, idx);
14099 else if (CONSTRUCTOR_NELTS (arg0) == 0)
14100 return build_zero_cst (type);
14101 else if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
14102 0)->value))
14103 != VECTOR_TYPE)
14105 if (idx < CONSTRUCTOR_NELTS (arg0))
14106 return CONSTRUCTOR_ELT (arg0, idx)->value;
14107 return build_zero_cst (type);
14113 /* A bit-field-ref that referenced the full argument can be stripped. */
14114 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14115 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14116 && integer_zerop (op2))
14117 return fold_convert_loc (loc, type, arg0);
14119 /* On constants we can use native encode/interpret to constant
14120 fold (nearly) all BIT_FIELD_REFs. */
14121 if (CONSTANT_CLASS_P (arg0)
14122 && can_native_interpret_type_p (type)
14123 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14124 /* This limitation should not be necessary, we just need to
14125 round this up to mode size. */
14126 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14127 /* Need bit-shifting of the buffer to relax the following. */
14128 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14130 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14131 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14132 unsigned HOST_WIDE_INT clen;
14133 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14134 /* ??? We cannot tell native_encode_expr to start at
14135 some random byte only. So limit us to a reasonable amount
14136 of work. */
14137 if (clen <= 4096)
14139 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14140 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14141 if (len > 0
14142 && len * BITS_PER_UNIT >= bitpos + bitsize)
14144 tree v = native_interpret_expr (type,
14145 b + bitpos / BITS_PER_UNIT,
14146 bitsize / BITS_PER_UNIT);
14147 if (v)
14148 return v;
14153 return NULL_TREE;
14155 case FMA_EXPR:
14156 /* For integers we can decompose the FMA if possible. */
14157 if (TREE_CODE (arg0) == INTEGER_CST
14158 && TREE_CODE (arg1) == INTEGER_CST)
14159 return fold_build2_loc (loc, PLUS_EXPR, type,
14160 const_binop (MULT_EXPR, arg0, arg1), arg2);
14161 if (integer_zerop (arg2))
14162 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14164 return fold_fma (loc, type, arg0, arg1, arg2);
14166 case VEC_PERM_EXPR:
14167 if (TREE_CODE (arg2) == VECTOR_CST)
14169 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14170 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14171 tree t;
14172 bool need_mask_canon = false;
14173 bool all_in_vec0 = true;
14174 bool all_in_vec1 = true;
14175 bool maybe_identity = true;
14176 bool single_arg = (op0 == op1);
14177 bool changed = false;
14179 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14180 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14181 for (i = 0; i < nelts; i++)
14183 tree val = VECTOR_CST_ELT (arg2, i);
14184 if (TREE_CODE (val) != INTEGER_CST)
14185 return NULL_TREE;
14187 sel[i] = TREE_INT_CST_LOW (val) & mask;
14188 if (TREE_INT_CST_HIGH (val)
14189 || ((unsigned HOST_WIDE_INT)
14190 TREE_INT_CST_LOW (val) != sel[i]))
14191 need_mask_canon = true;
14193 if (sel[i] < nelts)
14194 all_in_vec1 = false;
14195 else
14196 all_in_vec0 = false;
14198 if ((sel[i] & (nelts-1)) != i)
14199 maybe_identity = false;
14202 if (maybe_identity)
14204 if (all_in_vec0)
14205 return op0;
14206 if (all_in_vec1)
14207 return op1;
14210 if (all_in_vec0)
14211 op1 = op0;
14212 else if (all_in_vec1)
14214 op0 = op1;
14215 for (i = 0; i < nelts; i++)
14216 sel[i] -= nelts;
14217 need_mask_canon = true;
14220 if ((TREE_CODE (op0) == VECTOR_CST
14221 || TREE_CODE (op0) == CONSTRUCTOR)
14222 && (TREE_CODE (op1) == VECTOR_CST
14223 || TREE_CODE (op1) == CONSTRUCTOR))
14225 t = fold_vec_perm (type, op0, op1, sel);
14226 if (t != NULL_TREE)
14227 return t;
14230 if (op0 == op1 && !single_arg)
14231 changed = true;
14233 if (need_mask_canon && arg2 == op2)
14235 tree *tsel = XALLOCAVEC (tree, nelts);
14236 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14237 for (i = 0; i < nelts; i++)
14238 tsel[i] = build_int_cst (eltype, sel[i]);
14239 op2 = build_vector (TREE_TYPE (arg2), tsel);
14240 changed = true;
14243 if (changed)
14244 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14246 return NULL_TREE;
14248 default:
14249 return NULL_TREE;
14250 } /* switch (code) */
14253 /* Perform constant folding and related simplification of EXPR.
14254 The related simplifications include x*1 => x, x*0 => 0, etc.,
14255 and application of the associative law.
14256 NOP_EXPR conversions may be removed freely (as long as we
14257 are careful not to change the type of the overall expression).
14258 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14259 but we can constant-fold them if they have constant operands. */
14261 #ifdef ENABLE_FOLD_CHECKING
14262 # define fold(x) fold_1 (x)
14263 static tree fold_1 (tree);
14264 static
14265 #endif
14266 tree
14267 fold (tree expr)
14269 const tree t = expr;
14270 enum tree_code code = TREE_CODE (t);
14271 enum tree_code_class kind = TREE_CODE_CLASS (code);
14272 tree tem;
14273 location_t loc = EXPR_LOCATION (expr);
14275 /* Return right away if a constant. */
14276 if (kind == tcc_constant)
14277 return t;
14279 /* CALL_EXPR-like objects with variable numbers of operands are
14280 treated specially. */
14281 if (kind == tcc_vl_exp)
14283 if (code == CALL_EXPR)
14285 tem = fold_call_expr (loc, expr, false);
14286 return tem ? tem : expr;
14288 return expr;
14291 if (IS_EXPR_CODE_CLASS (kind))
14293 tree type = TREE_TYPE (t);
14294 tree op0, op1, op2;
14296 switch (TREE_CODE_LENGTH (code))
14298 case 1:
14299 op0 = TREE_OPERAND (t, 0);
14300 tem = fold_unary_loc (loc, code, type, op0);
14301 return tem ? tem : expr;
14302 case 2:
14303 op0 = TREE_OPERAND (t, 0);
14304 op1 = TREE_OPERAND (t, 1);
14305 tem = fold_binary_loc (loc, code, type, op0, op1);
14306 return tem ? tem : expr;
14307 case 3:
14308 op0 = TREE_OPERAND (t, 0);
14309 op1 = TREE_OPERAND (t, 1);
14310 op2 = TREE_OPERAND (t, 2);
14311 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14312 return tem ? tem : expr;
14313 default:
14314 break;
14318 switch (code)
14320 case ARRAY_REF:
14322 tree op0 = TREE_OPERAND (t, 0);
14323 tree op1 = TREE_OPERAND (t, 1);
14325 if (TREE_CODE (op1) == INTEGER_CST
14326 && TREE_CODE (op0) == CONSTRUCTOR
14327 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14329 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14330 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14331 unsigned HOST_WIDE_INT begin = 0;
14333 /* Find a matching index by means of a binary search. */
14334 while (begin != end)
14336 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14337 tree index = VEC_index (constructor_elt, elts, middle).index;
14339 if (TREE_CODE (index) == INTEGER_CST
14340 && tree_int_cst_lt (index, op1))
14341 begin = middle + 1;
14342 else if (TREE_CODE (index) == INTEGER_CST
14343 && tree_int_cst_lt (op1, index))
14344 end = middle;
14345 else if (TREE_CODE (index) == RANGE_EXPR
14346 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14347 begin = middle + 1;
14348 else if (TREE_CODE (index) == RANGE_EXPR
14349 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14350 end = middle;
14351 else
14352 return VEC_index (constructor_elt, elts, middle).value;
14356 return t;
14359 case CONST_DECL:
14360 return fold (DECL_INITIAL (t));
14362 default:
14363 return t;
14364 } /* switch (code) */
14367 #ifdef ENABLE_FOLD_CHECKING
14368 #undef fold
14370 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14371 hash_table <pointer_hash <tree_node> >);
14372 static void fold_check_failed (const_tree, const_tree);
14373 void print_fold_checksum (const_tree);
14375 /* When --enable-checking=fold, compute a digest of expr before
14376 and after actual fold call to see if fold did not accidentally
14377 change original expr. */
14379 tree
14380 fold (tree expr)
14382 tree ret;
14383 struct md5_ctx ctx;
14384 unsigned char checksum_before[16], checksum_after[16];
14385 hash_table <pointer_hash <tree_node> > ht;
14387 ht.create (32);
14388 md5_init_ctx (&ctx);
14389 fold_checksum_tree (expr, &ctx, ht);
14390 md5_finish_ctx (&ctx, checksum_before);
14391 ht.empty ();
14393 ret = fold_1 (expr);
14395 md5_init_ctx (&ctx);
14396 fold_checksum_tree (expr, &ctx, ht);
14397 md5_finish_ctx (&ctx, checksum_after);
14398 ht.dispose ();
14400 if (memcmp (checksum_before, checksum_after, 16))
14401 fold_check_failed (expr, ret);
14403 return ret;
14406 void
14407 print_fold_checksum (const_tree expr)
14409 struct md5_ctx ctx;
14410 unsigned char checksum[16], cnt;
14411 hash_table <pointer_hash <tree_node> > ht;
14413 ht.create (32);
14414 md5_init_ctx (&ctx);
14415 fold_checksum_tree (expr, &ctx, ht);
14416 md5_finish_ctx (&ctx, checksum);
14417 ht.dispose ();
14418 for (cnt = 0; cnt < 16; ++cnt)
14419 fprintf (stderr, "%02x", checksum[cnt]);
14420 putc ('\n', stderr);
14423 static void
14424 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14426 internal_error ("fold check: original tree changed by fold");
14429 static void
14430 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14431 hash_table <pointer_hash <tree_node> > ht)
14433 tree_node **slot;
14434 enum tree_code code;
14435 union tree_node buf;
14436 int i, len;
14438 recursive_label:
14439 if (expr == NULL)
14440 return;
14441 slot = ht.find_slot (expr, INSERT);
14442 if (*slot != NULL)
14443 return;
14444 *slot = CONST_CAST_TREE (expr);
14445 code = TREE_CODE (expr);
14446 if (TREE_CODE_CLASS (code) == tcc_declaration
14447 && DECL_ASSEMBLER_NAME_SET_P (expr))
14449 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14450 memcpy ((char *) &buf, expr, tree_size (expr));
14451 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14452 expr = (tree) &buf;
14454 else if (TREE_CODE_CLASS (code) == tcc_type
14455 && (TYPE_POINTER_TO (expr)
14456 || TYPE_REFERENCE_TO (expr)
14457 || TYPE_CACHED_VALUES_P (expr)
14458 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14459 || TYPE_NEXT_VARIANT (expr)))
14461 /* Allow these fields to be modified. */
14462 tree tmp;
14463 memcpy ((char *) &buf, expr, tree_size (expr));
14464 expr = tmp = (tree) &buf;
14465 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14466 TYPE_POINTER_TO (tmp) = NULL;
14467 TYPE_REFERENCE_TO (tmp) = NULL;
14468 TYPE_NEXT_VARIANT (tmp) = NULL;
14469 if (TYPE_CACHED_VALUES_P (tmp))
14471 TYPE_CACHED_VALUES_P (tmp) = 0;
14472 TYPE_CACHED_VALUES (tmp) = NULL;
14475 md5_process_bytes (expr, tree_size (expr), ctx);
14476 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14477 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14478 if (TREE_CODE_CLASS (code) != tcc_type
14479 && TREE_CODE_CLASS (code) != tcc_declaration
14480 && code != TREE_LIST
14481 && code != SSA_NAME
14482 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14483 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14484 switch (TREE_CODE_CLASS (code))
14486 case tcc_constant:
14487 switch (code)
14489 case STRING_CST:
14490 md5_process_bytes (TREE_STRING_POINTER (expr),
14491 TREE_STRING_LENGTH (expr), ctx);
14492 break;
14493 case COMPLEX_CST:
14494 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14495 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14496 break;
14497 case VECTOR_CST:
14498 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14499 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14500 break;
14501 default:
14502 break;
14504 break;
14505 case tcc_exceptional:
14506 switch (code)
14508 case TREE_LIST:
14509 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14510 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14511 expr = TREE_CHAIN (expr);
14512 goto recursive_label;
14513 break;
14514 case TREE_VEC:
14515 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14516 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14517 break;
14518 default:
14519 break;
14521 break;
14522 case tcc_expression:
14523 case tcc_reference:
14524 case tcc_comparison:
14525 case tcc_unary:
14526 case tcc_binary:
14527 case tcc_statement:
14528 case tcc_vl_exp:
14529 len = TREE_OPERAND_LENGTH (expr);
14530 for (i = 0; i < len; ++i)
14531 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14532 break;
14533 case tcc_declaration:
14534 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14535 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14536 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14538 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14539 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14540 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14541 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14542 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14544 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14545 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14547 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14549 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14550 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14551 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14553 break;
14554 case tcc_type:
14555 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14556 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14557 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14558 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14559 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14560 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14561 if (INTEGRAL_TYPE_P (expr)
14562 || SCALAR_FLOAT_TYPE_P (expr))
14564 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14565 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14567 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14568 if (TREE_CODE (expr) == RECORD_TYPE
14569 || TREE_CODE (expr) == UNION_TYPE
14570 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14571 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14572 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14573 break;
14574 default:
14575 break;
14579 /* Helper function for outputting the checksum of a tree T. When
14580 debugging with gdb, you can "define mynext" to be "next" followed
14581 by "call debug_fold_checksum (op0)", then just trace down till the
14582 outputs differ. */
14584 DEBUG_FUNCTION void
14585 debug_fold_checksum (const_tree t)
14587 int i;
14588 unsigned char checksum[16];
14589 struct md5_ctx ctx;
14590 hash_table <pointer_hash <tree_node> > ht;
14591 ht.create (32);
14593 md5_init_ctx (&ctx);
14594 fold_checksum_tree (t, &ctx, ht);
14595 md5_finish_ctx (&ctx, checksum);
14596 ht.empty ();
14598 for (i = 0; i < 16; i++)
14599 fprintf (stderr, "%d ", checksum[i]);
14601 fprintf (stderr, "\n");
14604 #endif
14606 /* Fold a unary tree expression with code CODE of type TYPE with an
14607 operand OP0. LOC is the location of the resulting expression.
14608 Return a folded expression if successful. Otherwise, return a tree
14609 expression with code CODE of type TYPE with an operand OP0. */
14611 tree
14612 fold_build1_stat_loc (location_t loc,
14613 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14615 tree tem;
14616 #ifdef ENABLE_FOLD_CHECKING
14617 unsigned char checksum_before[16], checksum_after[16];
14618 struct md5_ctx ctx;
14619 hash_table <pointer_hash <tree_node> > ht;
14621 ht.create (32);
14622 md5_init_ctx (&ctx);
14623 fold_checksum_tree (op0, &ctx, ht);
14624 md5_finish_ctx (&ctx, checksum_before);
14625 ht.empty ();
14626 #endif
14628 tem = fold_unary_loc (loc, code, type, op0);
14629 if (!tem)
14630 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14632 #ifdef ENABLE_FOLD_CHECKING
14633 md5_init_ctx (&ctx);
14634 fold_checksum_tree (op0, &ctx, ht);
14635 md5_finish_ctx (&ctx, checksum_after);
14636 ht.dispose ();
14638 if (memcmp (checksum_before, checksum_after, 16))
14639 fold_check_failed (op0, tem);
14640 #endif
14641 return tem;
14644 /* Fold a binary tree expression with code CODE of type TYPE with
14645 operands OP0 and OP1. LOC is the location of the resulting
14646 expression. Return a folded expression if successful. Otherwise,
14647 return a tree expression with code CODE of type TYPE with operands
14648 OP0 and OP1. */
14650 tree
14651 fold_build2_stat_loc (location_t loc,
14652 enum tree_code code, tree type, tree op0, tree op1
14653 MEM_STAT_DECL)
14655 tree tem;
14656 #ifdef ENABLE_FOLD_CHECKING
14657 unsigned char checksum_before_op0[16],
14658 checksum_before_op1[16],
14659 checksum_after_op0[16],
14660 checksum_after_op1[16];
14661 struct md5_ctx ctx;
14662 hash_table <pointer_hash <tree_node> > ht;
14664 ht.create (32);
14665 md5_init_ctx (&ctx);
14666 fold_checksum_tree (op0, &ctx, ht);
14667 md5_finish_ctx (&ctx, checksum_before_op0);
14668 ht.empty ();
14670 md5_init_ctx (&ctx);
14671 fold_checksum_tree (op1, &ctx, ht);
14672 md5_finish_ctx (&ctx, checksum_before_op1);
14673 ht.empty ();
14674 #endif
14676 tem = fold_binary_loc (loc, code, type, op0, op1);
14677 if (!tem)
14678 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14680 #ifdef ENABLE_FOLD_CHECKING
14681 md5_init_ctx (&ctx);
14682 fold_checksum_tree (op0, &ctx, ht);
14683 md5_finish_ctx (&ctx, checksum_after_op0);
14684 ht.empty ();
14686 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14687 fold_check_failed (op0, tem);
14689 md5_init_ctx (&ctx);
14690 fold_checksum_tree (op1, &ctx, ht);
14691 md5_finish_ctx (&ctx, checksum_after_op1);
14692 ht.dispose ();
14694 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14695 fold_check_failed (op1, tem);
14696 #endif
14697 return tem;
14700 /* Fold a ternary tree expression with code CODE of type TYPE with
14701 operands OP0, OP1, and OP2. Return a folded expression if
14702 successful. Otherwise, return a tree expression with code CODE of
14703 type TYPE with operands OP0, OP1, and OP2. */
14705 tree
14706 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14707 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14709 tree tem;
14710 #ifdef ENABLE_FOLD_CHECKING
14711 unsigned char checksum_before_op0[16],
14712 checksum_before_op1[16],
14713 checksum_before_op2[16],
14714 checksum_after_op0[16],
14715 checksum_after_op1[16],
14716 checksum_after_op2[16];
14717 struct md5_ctx ctx;
14718 hash_table <pointer_hash <tree_node> > ht;
14720 ht.create (32);
14721 md5_init_ctx (&ctx);
14722 fold_checksum_tree (op0, &ctx, ht);
14723 md5_finish_ctx (&ctx, checksum_before_op0);
14724 ht.empty ();
14726 md5_init_ctx (&ctx);
14727 fold_checksum_tree (op1, &ctx, ht);
14728 md5_finish_ctx (&ctx, checksum_before_op1);
14729 ht.empty ();
14731 md5_init_ctx (&ctx);
14732 fold_checksum_tree (op2, &ctx, ht);
14733 md5_finish_ctx (&ctx, checksum_before_op2);
14734 ht.empty ();
14735 #endif
14737 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14738 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14739 if (!tem)
14740 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14742 #ifdef ENABLE_FOLD_CHECKING
14743 md5_init_ctx (&ctx);
14744 fold_checksum_tree (op0, &ctx, ht);
14745 md5_finish_ctx (&ctx, checksum_after_op0);
14746 ht.empty ();
14748 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14749 fold_check_failed (op0, tem);
14751 md5_init_ctx (&ctx);
14752 fold_checksum_tree (op1, &ctx, ht);
14753 md5_finish_ctx (&ctx, checksum_after_op1);
14754 ht.empty ();
14756 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14757 fold_check_failed (op1, tem);
14759 md5_init_ctx (&ctx);
14760 fold_checksum_tree (op2, &ctx, ht);
14761 md5_finish_ctx (&ctx, checksum_after_op2);
14762 ht.dispose ();
14764 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14765 fold_check_failed (op2, tem);
14766 #endif
14767 return tem;
14770 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14771 arguments in ARGARRAY, and a null static chain.
14772 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14773 of type TYPE from the given operands as constructed by build_call_array. */
14775 tree
14776 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14777 int nargs, tree *argarray)
14779 tree tem;
14780 #ifdef ENABLE_FOLD_CHECKING
14781 unsigned char checksum_before_fn[16],
14782 checksum_before_arglist[16],
14783 checksum_after_fn[16],
14784 checksum_after_arglist[16];
14785 struct md5_ctx ctx;
14786 hash_table <pointer_hash <tree_node> > ht;
14787 int i;
14789 ht.create (32);
14790 md5_init_ctx (&ctx);
14791 fold_checksum_tree (fn, &ctx, ht);
14792 md5_finish_ctx (&ctx, checksum_before_fn);
14793 ht.empty ();
14795 md5_init_ctx (&ctx);
14796 for (i = 0; i < nargs; i++)
14797 fold_checksum_tree (argarray[i], &ctx, ht);
14798 md5_finish_ctx (&ctx, checksum_before_arglist);
14799 ht.empty ();
14800 #endif
14802 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14804 #ifdef ENABLE_FOLD_CHECKING
14805 md5_init_ctx (&ctx);
14806 fold_checksum_tree (fn, &ctx, ht);
14807 md5_finish_ctx (&ctx, checksum_after_fn);
14808 ht.empty ();
14810 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14811 fold_check_failed (fn, tem);
14813 md5_init_ctx (&ctx);
14814 for (i = 0; i < nargs; i++)
14815 fold_checksum_tree (argarray[i], &ctx, ht);
14816 md5_finish_ctx (&ctx, checksum_after_arglist);
14817 ht.dispose ();
14819 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14820 fold_check_failed (NULL_TREE, tem);
14821 #endif
14822 return tem;
14825 /* Perform constant folding and related simplification of initializer
14826 expression EXPR. These behave identically to "fold_buildN" but ignore
14827 potential run-time traps and exceptions that fold must preserve. */
14829 #define START_FOLD_INIT \
14830 int saved_signaling_nans = flag_signaling_nans;\
14831 int saved_trapping_math = flag_trapping_math;\
14832 int saved_rounding_math = flag_rounding_math;\
14833 int saved_trapv = flag_trapv;\
14834 int saved_folding_initializer = folding_initializer;\
14835 flag_signaling_nans = 0;\
14836 flag_trapping_math = 0;\
14837 flag_rounding_math = 0;\
14838 flag_trapv = 0;\
14839 folding_initializer = 1;
14841 #define END_FOLD_INIT \
14842 flag_signaling_nans = saved_signaling_nans;\
14843 flag_trapping_math = saved_trapping_math;\
14844 flag_rounding_math = saved_rounding_math;\
14845 flag_trapv = saved_trapv;\
14846 folding_initializer = saved_folding_initializer;
14848 tree
14849 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14850 tree type, tree op)
14852 tree result;
14853 START_FOLD_INIT;
14855 result = fold_build1_loc (loc, code, type, op);
14857 END_FOLD_INIT;
14858 return result;
14861 tree
14862 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14863 tree type, tree op0, tree op1)
14865 tree result;
14866 START_FOLD_INIT;
14868 result = fold_build2_loc (loc, code, type, op0, op1);
14870 END_FOLD_INIT;
14871 return result;
14874 tree
14875 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14876 tree type, tree op0, tree op1, tree op2)
14878 tree result;
14879 START_FOLD_INIT;
14881 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14883 END_FOLD_INIT;
14884 return result;
14887 tree
14888 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14889 int nargs, tree *argarray)
14891 tree result;
14892 START_FOLD_INIT;
14894 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14896 END_FOLD_INIT;
14897 return result;
14900 #undef START_FOLD_INIT
14901 #undef END_FOLD_INIT
14903 /* Determine if first argument is a multiple of second argument. Return 0 if
14904 it is not, or we cannot easily determined it to be.
14906 An example of the sort of thing we care about (at this point; this routine
14907 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14908 fold cases do now) is discovering that
14910 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14912 is a multiple of
14914 SAVE_EXPR (J * 8)
14916 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14918 This code also handles discovering that
14920 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14922 is a multiple of 8 so we don't have to worry about dealing with a
14923 possible remainder.
14925 Note that we *look* inside a SAVE_EXPR only to determine how it was
14926 calculated; it is not safe for fold to do much of anything else with the
14927 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14928 at run time. For example, the latter example above *cannot* be implemented
14929 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14930 evaluation time of the original SAVE_EXPR is not necessarily the same at
14931 the time the new expression is evaluated. The only optimization of this
14932 sort that would be valid is changing
14934 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14936 divided by 8 to
14938 SAVE_EXPR (I) * SAVE_EXPR (J)
14940 (where the same SAVE_EXPR (J) is used in the original and the
14941 transformed version). */
14944 multiple_of_p (tree type, const_tree top, const_tree bottom)
14946 if (operand_equal_p (top, bottom, 0))
14947 return 1;
14949 if (TREE_CODE (type) != INTEGER_TYPE)
14950 return 0;
14952 switch (TREE_CODE (top))
14954 case BIT_AND_EXPR:
14955 /* Bitwise and provides a power of two multiple. If the mask is
14956 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14957 if (!integer_pow2p (bottom))
14958 return 0;
14959 /* FALLTHRU */
14961 case MULT_EXPR:
14962 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14963 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14965 case PLUS_EXPR:
14966 case MINUS_EXPR:
14967 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14968 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14970 case LSHIFT_EXPR:
14971 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14973 tree op1, t1;
14975 op1 = TREE_OPERAND (top, 1);
14976 /* const_binop may not detect overflow correctly,
14977 so check for it explicitly here. */
14978 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14979 > TREE_INT_CST_LOW (op1)
14980 && TREE_INT_CST_HIGH (op1) == 0
14981 && 0 != (t1 = fold_convert (type,
14982 const_binop (LSHIFT_EXPR,
14983 size_one_node,
14984 op1)))
14985 && !TREE_OVERFLOW (t1))
14986 return multiple_of_p (type, t1, bottom);
14988 return 0;
14990 case NOP_EXPR:
14991 /* Can't handle conversions from non-integral or wider integral type. */
14992 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14993 || (TYPE_PRECISION (type)
14994 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14995 return 0;
14997 /* .. fall through ... */
14999 case SAVE_EXPR:
15000 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15002 case COND_EXPR:
15003 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15004 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15006 case INTEGER_CST:
15007 if (TREE_CODE (bottom) != INTEGER_CST
15008 || integer_zerop (bottom)
15009 || (TYPE_UNSIGNED (type)
15010 && (tree_int_cst_sgn (top) < 0
15011 || tree_int_cst_sgn (bottom) < 0)))
15012 return 0;
15013 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15014 top, bottom));
15016 default:
15017 return 0;
15021 /* Return true if CODE or TYPE is known to be non-negative. */
15023 static bool
15024 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15026 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15027 && truth_value_p (code))
15028 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15029 have a signed:1 type (where the value is -1 and 0). */
15030 return true;
15031 return false;
15034 /* Return true if (CODE OP0) is known to be non-negative. If the return
15035 value is based on the assumption that signed overflow is undefined,
15036 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15037 *STRICT_OVERFLOW_P. */
15039 bool
15040 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15041 bool *strict_overflow_p)
15043 if (TYPE_UNSIGNED (type))
15044 return true;
15046 switch (code)
15048 case ABS_EXPR:
15049 /* We can't return 1 if flag_wrapv is set because
15050 ABS_EXPR<INT_MIN> = INT_MIN. */
15051 if (!INTEGRAL_TYPE_P (type))
15052 return true;
15053 if (TYPE_OVERFLOW_UNDEFINED (type))
15055 *strict_overflow_p = true;
15056 return true;
15058 break;
15060 case NON_LVALUE_EXPR:
15061 case FLOAT_EXPR:
15062 case FIX_TRUNC_EXPR:
15063 return tree_expr_nonnegative_warnv_p (op0,
15064 strict_overflow_p);
15066 case NOP_EXPR:
15068 tree inner_type = TREE_TYPE (op0);
15069 tree outer_type = type;
15071 if (TREE_CODE (outer_type) == REAL_TYPE)
15073 if (TREE_CODE (inner_type) == REAL_TYPE)
15074 return tree_expr_nonnegative_warnv_p (op0,
15075 strict_overflow_p);
15076 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15078 if (TYPE_UNSIGNED (inner_type))
15079 return true;
15080 return tree_expr_nonnegative_warnv_p (op0,
15081 strict_overflow_p);
15084 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15086 if (TREE_CODE (inner_type) == REAL_TYPE)
15087 return tree_expr_nonnegative_warnv_p (op0,
15088 strict_overflow_p);
15089 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15090 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15091 && TYPE_UNSIGNED (inner_type);
15094 break;
15096 default:
15097 return tree_simple_nonnegative_warnv_p (code, type);
15100 /* We don't know sign of `t', so be conservative and return false. */
15101 return false;
15104 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15105 value is based on the assumption that signed overflow is undefined,
15106 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15107 *STRICT_OVERFLOW_P. */
15109 bool
15110 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15111 tree op1, bool *strict_overflow_p)
15113 if (TYPE_UNSIGNED (type))
15114 return true;
15116 switch (code)
15118 case POINTER_PLUS_EXPR:
15119 case PLUS_EXPR:
15120 if (FLOAT_TYPE_P (type))
15121 return (tree_expr_nonnegative_warnv_p (op0,
15122 strict_overflow_p)
15123 && tree_expr_nonnegative_warnv_p (op1,
15124 strict_overflow_p));
15126 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15127 both unsigned and at least 2 bits shorter than the result. */
15128 if (TREE_CODE (type) == INTEGER_TYPE
15129 && TREE_CODE (op0) == NOP_EXPR
15130 && TREE_CODE (op1) == NOP_EXPR)
15132 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15133 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15134 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15135 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15137 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15138 TYPE_PRECISION (inner2)) + 1;
15139 return prec < TYPE_PRECISION (type);
15142 break;
15144 case MULT_EXPR:
15145 if (FLOAT_TYPE_P (type))
15147 /* x * x for floating point x is always non-negative. */
15148 if (operand_equal_p (op0, op1, 0))
15149 return true;
15150 return (tree_expr_nonnegative_warnv_p (op0,
15151 strict_overflow_p)
15152 && tree_expr_nonnegative_warnv_p (op1,
15153 strict_overflow_p));
15156 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15157 both unsigned and their total bits is shorter than the result. */
15158 if (TREE_CODE (type) == INTEGER_TYPE
15159 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15160 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15162 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15163 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15164 : TREE_TYPE (op0);
15165 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15166 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15167 : TREE_TYPE (op1);
15169 bool unsigned0 = TYPE_UNSIGNED (inner0);
15170 bool unsigned1 = TYPE_UNSIGNED (inner1);
15172 if (TREE_CODE (op0) == INTEGER_CST)
15173 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15175 if (TREE_CODE (op1) == INTEGER_CST)
15176 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15178 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15179 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15181 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15182 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15183 : TYPE_PRECISION (inner0);
15185 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15186 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15187 : TYPE_PRECISION (inner1);
15189 return precision0 + precision1 < TYPE_PRECISION (type);
15192 return false;
15194 case BIT_AND_EXPR:
15195 case MAX_EXPR:
15196 return (tree_expr_nonnegative_warnv_p (op0,
15197 strict_overflow_p)
15198 || tree_expr_nonnegative_warnv_p (op1,
15199 strict_overflow_p));
15201 case BIT_IOR_EXPR:
15202 case BIT_XOR_EXPR:
15203 case MIN_EXPR:
15204 case RDIV_EXPR:
15205 case TRUNC_DIV_EXPR:
15206 case CEIL_DIV_EXPR:
15207 case FLOOR_DIV_EXPR:
15208 case ROUND_DIV_EXPR:
15209 return (tree_expr_nonnegative_warnv_p (op0,
15210 strict_overflow_p)
15211 && tree_expr_nonnegative_warnv_p (op1,
15212 strict_overflow_p));
15214 case TRUNC_MOD_EXPR:
15215 case CEIL_MOD_EXPR:
15216 case FLOOR_MOD_EXPR:
15217 case ROUND_MOD_EXPR:
15218 return tree_expr_nonnegative_warnv_p (op0,
15219 strict_overflow_p);
15220 default:
15221 return tree_simple_nonnegative_warnv_p (code, type);
15224 /* We don't know sign of `t', so be conservative and return false. */
15225 return false;
15228 /* Return true if T is known to be non-negative. If the return
15229 value is based on the assumption that signed overflow is undefined,
15230 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15231 *STRICT_OVERFLOW_P. */
15233 bool
15234 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15236 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15237 return true;
15239 switch (TREE_CODE (t))
15241 case INTEGER_CST:
15242 return tree_int_cst_sgn (t) >= 0;
15244 case REAL_CST:
15245 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15247 case FIXED_CST:
15248 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15250 case COND_EXPR:
15251 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15252 strict_overflow_p)
15253 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15254 strict_overflow_p));
15255 default:
15256 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15257 TREE_TYPE (t));
15259 /* We don't know sign of `t', so be conservative and return false. */
15260 return false;
15263 /* Return true if T is known to be non-negative. If the return
15264 value is based on the assumption that signed overflow is undefined,
15265 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15266 *STRICT_OVERFLOW_P. */
15268 bool
15269 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15270 tree arg0, tree arg1, bool *strict_overflow_p)
15272 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15273 switch (DECL_FUNCTION_CODE (fndecl))
15275 CASE_FLT_FN (BUILT_IN_ACOS):
15276 CASE_FLT_FN (BUILT_IN_ACOSH):
15277 CASE_FLT_FN (BUILT_IN_CABS):
15278 CASE_FLT_FN (BUILT_IN_COSH):
15279 CASE_FLT_FN (BUILT_IN_ERFC):
15280 CASE_FLT_FN (BUILT_IN_EXP):
15281 CASE_FLT_FN (BUILT_IN_EXP10):
15282 CASE_FLT_FN (BUILT_IN_EXP2):
15283 CASE_FLT_FN (BUILT_IN_FABS):
15284 CASE_FLT_FN (BUILT_IN_FDIM):
15285 CASE_FLT_FN (BUILT_IN_HYPOT):
15286 CASE_FLT_FN (BUILT_IN_POW10):
15287 CASE_INT_FN (BUILT_IN_FFS):
15288 CASE_INT_FN (BUILT_IN_PARITY):
15289 CASE_INT_FN (BUILT_IN_POPCOUNT):
15290 case BUILT_IN_BSWAP32:
15291 case BUILT_IN_BSWAP64:
15292 /* Always true. */
15293 return true;
15295 CASE_FLT_FN (BUILT_IN_SQRT):
15296 /* sqrt(-0.0) is -0.0. */
15297 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15298 return true;
15299 return tree_expr_nonnegative_warnv_p (arg0,
15300 strict_overflow_p);
15302 CASE_FLT_FN (BUILT_IN_ASINH):
15303 CASE_FLT_FN (BUILT_IN_ATAN):
15304 CASE_FLT_FN (BUILT_IN_ATANH):
15305 CASE_FLT_FN (BUILT_IN_CBRT):
15306 CASE_FLT_FN (BUILT_IN_CEIL):
15307 CASE_FLT_FN (BUILT_IN_ERF):
15308 CASE_FLT_FN (BUILT_IN_EXPM1):
15309 CASE_FLT_FN (BUILT_IN_FLOOR):
15310 CASE_FLT_FN (BUILT_IN_FMOD):
15311 CASE_FLT_FN (BUILT_IN_FREXP):
15312 CASE_FLT_FN (BUILT_IN_ICEIL):
15313 CASE_FLT_FN (BUILT_IN_IFLOOR):
15314 CASE_FLT_FN (BUILT_IN_IRINT):
15315 CASE_FLT_FN (BUILT_IN_IROUND):
15316 CASE_FLT_FN (BUILT_IN_LCEIL):
15317 CASE_FLT_FN (BUILT_IN_LDEXP):
15318 CASE_FLT_FN (BUILT_IN_LFLOOR):
15319 CASE_FLT_FN (BUILT_IN_LLCEIL):
15320 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15321 CASE_FLT_FN (BUILT_IN_LLRINT):
15322 CASE_FLT_FN (BUILT_IN_LLROUND):
15323 CASE_FLT_FN (BUILT_IN_LRINT):
15324 CASE_FLT_FN (BUILT_IN_LROUND):
15325 CASE_FLT_FN (BUILT_IN_MODF):
15326 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15327 CASE_FLT_FN (BUILT_IN_RINT):
15328 CASE_FLT_FN (BUILT_IN_ROUND):
15329 CASE_FLT_FN (BUILT_IN_SCALB):
15330 CASE_FLT_FN (BUILT_IN_SCALBLN):
15331 CASE_FLT_FN (BUILT_IN_SCALBN):
15332 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15333 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15334 CASE_FLT_FN (BUILT_IN_SINH):
15335 CASE_FLT_FN (BUILT_IN_TANH):
15336 CASE_FLT_FN (BUILT_IN_TRUNC):
15337 /* True if the 1st argument is nonnegative. */
15338 return tree_expr_nonnegative_warnv_p (arg0,
15339 strict_overflow_p);
15341 CASE_FLT_FN (BUILT_IN_FMAX):
15342 /* True if the 1st OR 2nd arguments are nonnegative. */
15343 return (tree_expr_nonnegative_warnv_p (arg0,
15344 strict_overflow_p)
15345 || (tree_expr_nonnegative_warnv_p (arg1,
15346 strict_overflow_p)));
15348 CASE_FLT_FN (BUILT_IN_FMIN):
15349 /* True if the 1st AND 2nd arguments are nonnegative. */
15350 return (tree_expr_nonnegative_warnv_p (arg0,
15351 strict_overflow_p)
15352 && (tree_expr_nonnegative_warnv_p (arg1,
15353 strict_overflow_p)));
15355 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15356 /* True if the 2nd argument is nonnegative. */
15357 return tree_expr_nonnegative_warnv_p (arg1,
15358 strict_overflow_p);
15360 CASE_FLT_FN (BUILT_IN_POWI):
15361 /* True if the 1st argument is nonnegative or the second
15362 argument is an even integer. */
15363 if (TREE_CODE (arg1) == INTEGER_CST
15364 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15365 return true;
15366 return tree_expr_nonnegative_warnv_p (arg0,
15367 strict_overflow_p);
15369 CASE_FLT_FN (BUILT_IN_POW):
15370 /* True if the 1st argument is nonnegative or the second
15371 argument is an even integer valued real. */
15372 if (TREE_CODE (arg1) == REAL_CST)
15374 REAL_VALUE_TYPE c;
15375 HOST_WIDE_INT n;
15377 c = TREE_REAL_CST (arg1);
15378 n = real_to_integer (&c);
15379 if ((n & 1) == 0)
15381 REAL_VALUE_TYPE cint;
15382 real_from_integer (&cint, VOIDmode, n,
15383 n < 0 ? -1 : 0, 0);
15384 if (real_identical (&c, &cint))
15385 return true;
15388 return tree_expr_nonnegative_warnv_p (arg0,
15389 strict_overflow_p);
15391 default:
15392 break;
15394 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15395 type);
15398 /* Return true if T is known to be non-negative. If the return
15399 value is based on the assumption that signed overflow is undefined,
15400 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15401 *STRICT_OVERFLOW_P. */
15403 bool
15404 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15406 enum tree_code code = TREE_CODE (t);
15407 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15408 return true;
15410 switch (code)
15412 case TARGET_EXPR:
15414 tree temp = TARGET_EXPR_SLOT (t);
15415 t = TARGET_EXPR_INITIAL (t);
15417 /* If the initializer is non-void, then it's a normal expression
15418 that will be assigned to the slot. */
15419 if (!VOID_TYPE_P (t))
15420 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15422 /* Otherwise, the initializer sets the slot in some way. One common
15423 way is an assignment statement at the end of the initializer. */
15424 while (1)
15426 if (TREE_CODE (t) == BIND_EXPR)
15427 t = expr_last (BIND_EXPR_BODY (t));
15428 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15429 || TREE_CODE (t) == TRY_CATCH_EXPR)
15430 t = expr_last (TREE_OPERAND (t, 0));
15431 else if (TREE_CODE (t) == STATEMENT_LIST)
15432 t = expr_last (t);
15433 else
15434 break;
15436 if (TREE_CODE (t) == MODIFY_EXPR
15437 && TREE_OPERAND (t, 0) == temp)
15438 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15439 strict_overflow_p);
15441 return false;
15444 case CALL_EXPR:
15446 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15447 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15449 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15450 get_callee_fndecl (t),
15451 arg0,
15452 arg1,
15453 strict_overflow_p);
15455 case COMPOUND_EXPR:
15456 case MODIFY_EXPR:
15457 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15458 strict_overflow_p);
15459 case BIND_EXPR:
15460 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15461 strict_overflow_p);
15462 case SAVE_EXPR:
15463 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15464 strict_overflow_p);
15466 default:
15467 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15468 TREE_TYPE (t));
15471 /* We don't know sign of `t', so be conservative and return false. */
15472 return false;
15475 /* Return true if T is known to be non-negative. If the return
15476 value is based on the assumption that signed overflow is undefined,
15477 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15478 *STRICT_OVERFLOW_P. */
15480 bool
15481 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15483 enum tree_code code;
15484 if (t == error_mark_node)
15485 return false;
15487 code = TREE_CODE (t);
15488 switch (TREE_CODE_CLASS (code))
15490 case tcc_binary:
15491 case tcc_comparison:
15492 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15493 TREE_TYPE (t),
15494 TREE_OPERAND (t, 0),
15495 TREE_OPERAND (t, 1),
15496 strict_overflow_p);
15498 case tcc_unary:
15499 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15500 TREE_TYPE (t),
15501 TREE_OPERAND (t, 0),
15502 strict_overflow_p);
15504 case tcc_constant:
15505 case tcc_declaration:
15506 case tcc_reference:
15507 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15509 default:
15510 break;
15513 switch (code)
15515 case TRUTH_AND_EXPR:
15516 case TRUTH_OR_EXPR:
15517 case TRUTH_XOR_EXPR:
15518 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15519 TREE_TYPE (t),
15520 TREE_OPERAND (t, 0),
15521 TREE_OPERAND (t, 1),
15522 strict_overflow_p);
15523 case TRUTH_NOT_EXPR:
15524 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15525 TREE_TYPE (t),
15526 TREE_OPERAND (t, 0),
15527 strict_overflow_p);
15529 case COND_EXPR:
15530 case CONSTRUCTOR:
15531 case OBJ_TYPE_REF:
15532 case ASSERT_EXPR:
15533 case ADDR_EXPR:
15534 case WITH_SIZE_EXPR:
15535 case SSA_NAME:
15536 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15538 default:
15539 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15543 /* Return true if `t' is known to be non-negative. Handle warnings
15544 about undefined signed overflow. */
15546 bool
15547 tree_expr_nonnegative_p (tree t)
15549 bool ret, strict_overflow_p;
15551 strict_overflow_p = false;
15552 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15553 if (strict_overflow_p)
15554 fold_overflow_warning (("assuming signed overflow does not occur when "
15555 "determining that expression is always "
15556 "non-negative"),
15557 WARN_STRICT_OVERFLOW_MISC);
15558 return ret;
15562 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15563 For floating point we further ensure that T is not denormal.
15564 Similar logic is present in nonzero_address in rtlanal.h.
15566 If the return value is based on the assumption that signed overflow
15567 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15568 change *STRICT_OVERFLOW_P. */
15570 bool
15571 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15572 bool *strict_overflow_p)
15574 switch (code)
15576 case ABS_EXPR:
15577 return tree_expr_nonzero_warnv_p (op0,
15578 strict_overflow_p);
15580 case NOP_EXPR:
15582 tree inner_type = TREE_TYPE (op0);
15583 tree outer_type = type;
15585 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15586 && tree_expr_nonzero_warnv_p (op0,
15587 strict_overflow_p));
15589 break;
15591 case NON_LVALUE_EXPR:
15592 return tree_expr_nonzero_warnv_p (op0,
15593 strict_overflow_p);
15595 default:
15596 break;
15599 return false;
15602 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15603 For floating point we further ensure that T is not denormal.
15604 Similar logic is present in nonzero_address in rtlanal.h.
15606 If the return value is based on the assumption that signed overflow
15607 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15608 change *STRICT_OVERFLOW_P. */
15610 bool
15611 tree_binary_nonzero_warnv_p (enum tree_code code,
15612 tree type,
15613 tree op0,
15614 tree op1, bool *strict_overflow_p)
15616 bool sub_strict_overflow_p;
15617 switch (code)
15619 case POINTER_PLUS_EXPR:
15620 case PLUS_EXPR:
15621 if (TYPE_OVERFLOW_UNDEFINED (type))
15623 /* With the presence of negative values it is hard
15624 to say something. */
15625 sub_strict_overflow_p = false;
15626 if (!tree_expr_nonnegative_warnv_p (op0,
15627 &sub_strict_overflow_p)
15628 || !tree_expr_nonnegative_warnv_p (op1,
15629 &sub_strict_overflow_p))
15630 return false;
15631 /* One of operands must be positive and the other non-negative. */
15632 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15633 overflows, on a twos-complement machine the sum of two
15634 nonnegative numbers can never be zero. */
15635 return (tree_expr_nonzero_warnv_p (op0,
15636 strict_overflow_p)
15637 || tree_expr_nonzero_warnv_p (op1,
15638 strict_overflow_p));
15640 break;
15642 case MULT_EXPR:
15643 if (TYPE_OVERFLOW_UNDEFINED (type))
15645 if (tree_expr_nonzero_warnv_p (op0,
15646 strict_overflow_p)
15647 && tree_expr_nonzero_warnv_p (op1,
15648 strict_overflow_p))
15650 *strict_overflow_p = true;
15651 return true;
15654 break;
15656 case MIN_EXPR:
15657 sub_strict_overflow_p = false;
15658 if (tree_expr_nonzero_warnv_p (op0,
15659 &sub_strict_overflow_p)
15660 && tree_expr_nonzero_warnv_p (op1,
15661 &sub_strict_overflow_p))
15663 if (sub_strict_overflow_p)
15664 *strict_overflow_p = true;
15666 break;
15668 case MAX_EXPR:
15669 sub_strict_overflow_p = false;
15670 if (tree_expr_nonzero_warnv_p (op0,
15671 &sub_strict_overflow_p))
15673 if (sub_strict_overflow_p)
15674 *strict_overflow_p = true;
15676 /* When both operands are nonzero, then MAX must be too. */
15677 if (tree_expr_nonzero_warnv_p (op1,
15678 strict_overflow_p))
15679 return true;
15681 /* MAX where operand 0 is positive is positive. */
15682 return tree_expr_nonnegative_warnv_p (op0,
15683 strict_overflow_p);
15685 /* MAX where operand 1 is positive is positive. */
15686 else if (tree_expr_nonzero_warnv_p (op1,
15687 &sub_strict_overflow_p)
15688 && tree_expr_nonnegative_warnv_p (op1,
15689 &sub_strict_overflow_p))
15691 if (sub_strict_overflow_p)
15692 *strict_overflow_p = true;
15693 return true;
15695 break;
15697 case BIT_IOR_EXPR:
15698 return (tree_expr_nonzero_warnv_p (op1,
15699 strict_overflow_p)
15700 || tree_expr_nonzero_warnv_p (op0,
15701 strict_overflow_p));
15703 default:
15704 break;
15707 return false;
15710 /* Return true when T is an address and is known to be nonzero.
15711 For floating point we further ensure that T is not denormal.
15712 Similar logic is present in nonzero_address in rtlanal.h.
15714 If the return value is based on the assumption that signed overflow
15715 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15716 change *STRICT_OVERFLOW_P. */
15718 bool
15719 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15721 bool sub_strict_overflow_p;
15722 switch (TREE_CODE (t))
15724 case INTEGER_CST:
15725 return !integer_zerop (t);
15727 case ADDR_EXPR:
15729 tree base = TREE_OPERAND (t, 0);
15730 if (!DECL_P (base))
15731 base = get_base_address (base);
15733 if (!base)
15734 return false;
15736 /* Weak declarations may link to NULL. Other things may also be NULL
15737 so protect with -fdelete-null-pointer-checks; but not variables
15738 allocated on the stack. */
15739 if (DECL_P (base)
15740 && (flag_delete_null_pointer_checks
15741 || (DECL_CONTEXT (base)
15742 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15743 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15744 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15746 /* Constants are never weak. */
15747 if (CONSTANT_CLASS_P (base))
15748 return true;
15750 return false;
15753 case COND_EXPR:
15754 sub_strict_overflow_p = false;
15755 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15756 &sub_strict_overflow_p)
15757 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15758 &sub_strict_overflow_p))
15760 if (sub_strict_overflow_p)
15761 *strict_overflow_p = true;
15762 return true;
15764 break;
15766 default:
15767 break;
15769 return false;
15772 /* Return true when T is an address and is known to be nonzero.
15773 For floating point we further ensure that T is not denormal.
15774 Similar logic is present in nonzero_address in rtlanal.h.
15776 If the return value is based on the assumption that signed overflow
15777 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15778 change *STRICT_OVERFLOW_P. */
15780 bool
15781 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15783 tree type = TREE_TYPE (t);
15784 enum tree_code code;
15786 /* Doing something useful for floating point would need more work. */
15787 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15788 return false;
15790 code = TREE_CODE (t);
15791 switch (TREE_CODE_CLASS (code))
15793 case tcc_unary:
15794 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15795 strict_overflow_p);
15796 case tcc_binary:
15797 case tcc_comparison:
15798 return tree_binary_nonzero_warnv_p (code, type,
15799 TREE_OPERAND (t, 0),
15800 TREE_OPERAND (t, 1),
15801 strict_overflow_p);
15802 case tcc_constant:
15803 case tcc_declaration:
15804 case tcc_reference:
15805 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15807 default:
15808 break;
15811 switch (code)
15813 case TRUTH_NOT_EXPR:
15814 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15815 strict_overflow_p);
15817 case TRUTH_AND_EXPR:
15818 case TRUTH_OR_EXPR:
15819 case TRUTH_XOR_EXPR:
15820 return tree_binary_nonzero_warnv_p (code, type,
15821 TREE_OPERAND (t, 0),
15822 TREE_OPERAND (t, 1),
15823 strict_overflow_p);
15825 case COND_EXPR:
15826 case CONSTRUCTOR:
15827 case OBJ_TYPE_REF:
15828 case ASSERT_EXPR:
15829 case ADDR_EXPR:
15830 case WITH_SIZE_EXPR:
15831 case SSA_NAME:
15832 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15834 case COMPOUND_EXPR:
15835 case MODIFY_EXPR:
15836 case BIND_EXPR:
15837 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15838 strict_overflow_p);
15840 case SAVE_EXPR:
15841 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15842 strict_overflow_p);
15844 case CALL_EXPR:
15845 return alloca_call_p (t);
15847 default:
15848 break;
15850 return false;
15853 /* Return true when T is an address and is known to be nonzero.
15854 Handle warnings about undefined signed overflow. */
15856 bool
15857 tree_expr_nonzero_p (tree t)
15859 bool ret, strict_overflow_p;
15861 strict_overflow_p = false;
15862 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15863 if (strict_overflow_p)
15864 fold_overflow_warning (("assuming signed overflow does not occur when "
15865 "determining that expression is always "
15866 "non-zero"),
15867 WARN_STRICT_OVERFLOW_MISC);
15868 return ret;
15871 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15872 attempt to fold the expression to a constant without modifying TYPE,
15873 OP0 or OP1.
15875 If the expression could be simplified to a constant, then return
15876 the constant. If the expression would not be simplified to a
15877 constant, then return NULL_TREE. */
15879 tree
15880 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15882 tree tem = fold_binary (code, type, op0, op1);
15883 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15886 /* Given the components of a unary expression CODE, TYPE and OP0,
15887 attempt to fold the expression to a constant without modifying
15888 TYPE or OP0.
15890 If the expression could be simplified to a constant, then return
15891 the constant. If the expression would not be simplified to a
15892 constant, then return NULL_TREE. */
15894 tree
15895 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15897 tree tem = fold_unary (code, type, op0);
15898 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15901 /* If EXP represents referencing an element in a constant string
15902 (either via pointer arithmetic or array indexing), return the
15903 tree representing the value accessed, otherwise return NULL. */
15905 tree
15906 fold_read_from_constant_string (tree exp)
15908 if ((TREE_CODE (exp) == INDIRECT_REF
15909 || TREE_CODE (exp) == ARRAY_REF)
15910 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15912 tree exp1 = TREE_OPERAND (exp, 0);
15913 tree index;
15914 tree string;
15915 location_t loc = EXPR_LOCATION (exp);
15917 if (TREE_CODE (exp) == INDIRECT_REF)
15918 string = string_constant (exp1, &index);
15919 else
15921 tree low_bound = array_ref_low_bound (exp);
15922 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15924 /* Optimize the special-case of a zero lower bound.
15926 We convert the low_bound to sizetype to avoid some problems
15927 with constant folding. (E.g. suppose the lower bound is 1,
15928 and its mode is QI. Without the conversion,l (ARRAY
15929 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15930 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15931 if (! integer_zerop (low_bound))
15932 index = size_diffop_loc (loc, index,
15933 fold_convert_loc (loc, sizetype, low_bound));
15935 string = exp1;
15938 if (string
15939 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15940 && TREE_CODE (string) == STRING_CST
15941 && TREE_CODE (index) == INTEGER_CST
15942 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15943 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15944 == MODE_INT)
15945 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15946 return build_int_cst_type (TREE_TYPE (exp),
15947 (TREE_STRING_POINTER (string)
15948 [TREE_INT_CST_LOW (index)]));
15950 return NULL;
15953 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15954 an integer constant, real, or fixed-point constant.
15956 TYPE is the type of the result. */
15958 static tree
15959 fold_negate_const (tree arg0, tree type)
15961 tree t = NULL_TREE;
15963 switch (TREE_CODE (arg0))
15965 case INTEGER_CST:
15967 double_int val = tree_to_double_int (arg0);
15968 bool overflow;
15969 val = val.neg_with_overflow (&overflow);
15970 t = force_fit_type_double (type, val, 1,
15971 (overflow | TREE_OVERFLOW (arg0))
15972 && !TYPE_UNSIGNED (type));
15973 break;
15976 case REAL_CST:
15977 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15978 break;
15980 case FIXED_CST:
15982 FIXED_VALUE_TYPE f;
15983 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15984 &(TREE_FIXED_CST (arg0)), NULL,
15985 TYPE_SATURATING (type));
15986 t = build_fixed (type, f);
15987 /* Propagate overflow flags. */
15988 if (overflow_p | TREE_OVERFLOW (arg0))
15989 TREE_OVERFLOW (t) = 1;
15990 break;
15993 default:
15994 gcc_unreachable ();
15997 return t;
16000 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16001 an integer constant or real constant.
16003 TYPE is the type of the result. */
16005 tree
16006 fold_abs_const (tree arg0, tree type)
16008 tree t = NULL_TREE;
16010 switch (TREE_CODE (arg0))
16012 case INTEGER_CST:
16014 double_int val = tree_to_double_int (arg0);
16016 /* If the value is unsigned or non-negative, then the absolute value
16017 is the same as the ordinary value. */
16018 if (TYPE_UNSIGNED (type)
16019 || !val.is_negative ())
16020 t = arg0;
16022 /* If the value is negative, then the absolute value is
16023 its negation. */
16024 else
16026 bool overflow;
16027 val = val.neg_with_overflow (&overflow);
16028 t = force_fit_type_double (type, val, -1,
16029 overflow | TREE_OVERFLOW (arg0));
16032 break;
16034 case REAL_CST:
16035 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16036 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16037 else
16038 t = arg0;
16039 break;
16041 default:
16042 gcc_unreachable ();
16045 return t;
16048 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16049 constant. TYPE is the type of the result. */
16051 static tree
16052 fold_not_const (const_tree arg0, tree type)
16054 double_int val;
16056 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16058 val = ~tree_to_double_int (arg0);
16059 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16062 /* Given CODE, a relational operator, the target type, TYPE and two
16063 constant operands OP0 and OP1, return the result of the
16064 relational operation. If the result is not a compile time
16065 constant, then return NULL_TREE. */
16067 static tree
16068 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16070 int result, invert;
16072 /* From here on, the only cases we handle are when the result is
16073 known to be a constant. */
16075 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16077 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16078 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16080 /* Handle the cases where either operand is a NaN. */
16081 if (real_isnan (c0) || real_isnan (c1))
16083 switch (code)
16085 case EQ_EXPR:
16086 case ORDERED_EXPR:
16087 result = 0;
16088 break;
16090 case NE_EXPR:
16091 case UNORDERED_EXPR:
16092 case UNLT_EXPR:
16093 case UNLE_EXPR:
16094 case UNGT_EXPR:
16095 case UNGE_EXPR:
16096 case UNEQ_EXPR:
16097 result = 1;
16098 break;
16100 case LT_EXPR:
16101 case LE_EXPR:
16102 case GT_EXPR:
16103 case GE_EXPR:
16104 case LTGT_EXPR:
16105 if (flag_trapping_math)
16106 return NULL_TREE;
16107 result = 0;
16108 break;
16110 default:
16111 gcc_unreachable ();
16114 return constant_boolean_node (result, type);
16117 return constant_boolean_node (real_compare (code, c0, c1), type);
16120 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16122 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16123 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16124 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16127 /* Handle equality/inequality of complex constants. */
16128 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16130 tree rcond = fold_relational_const (code, type,
16131 TREE_REALPART (op0),
16132 TREE_REALPART (op1));
16133 tree icond = fold_relational_const (code, type,
16134 TREE_IMAGPART (op0),
16135 TREE_IMAGPART (op1));
16136 if (code == EQ_EXPR)
16137 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16138 else if (code == NE_EXPR)
16139 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16140 else
16141 return NULL_TREE;
16144 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16146 unsigned count = VECTOR_CST_NELTS (op0);
16147 tree *elts = XALLOCAVEC (tree, count);
16148 gcc_assert (VECTOR_CST_NELTS (op1) == count
16149 && TYPE_VECTOR_SUBPARTS (type) == count);
16151 for (unsigned i = 0; i < count; i++)
16153 tree elem_type = TREE_TYPE (type);
16154 tree elem0 = VECTOR_CST_ELT (op0, i);
16155 tree elem1 = VECTOR_CST_ELT (op1, i);
16157 tree tem = fold_relational_const (code, elem_type,
16158 elem0, elem1);
16160 if (tem == NULL_TREE)
16161 return NULL_TREE;
16163 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16166 return build_vector (type, elts);
16169 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16171 To compute GT, swap the arguments and do LT.
16172 To compute GE, do LT and invert the result.
16173 To compute LE, swap the arguments, do LT and invert the result.
16174 To compute NE, do EQ and invert the result.
16176 Therefore, the code below must handle only EQ and LT. */
16178 if (code == LE_EXPR || code == GT_EXPR)
16180 tree tem = op0;
16181 op0 = op1;
16182 op1 = tem;
16183 code = swap_tree_comparison (code);
16186 /* Note that it is safe to invert for real values here because we
16187 have already handled the one case that it matters. */
16189 invert = 0;
16190 if (code == NE_EXPR || code == GE_EXPR)
16192 invert = 1;
16193 code = invert_tree_comparison (code, false);
16196 /* Compute a result for LT or EQ if args permit;
16197 Otherwise return T. */
16198 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16200 if (code == EQ_EXPR)
16201 result = tree_int_cst_equal (op0, op1);
16202 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16203 result = INT_CST_LT_UNSIGNED (op0, op1);
16204 else
16205 result = INT_CST_LT (op0, op1);
16207 else
16208 return NULL_TREE;
16210 if (invert)
16211 result ^= 1;
16212 return constant_boolean_node (result, type);
16215 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16216 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16217 itself. */
16219 tree
16220 fold_build_cleanup_point_expr (tree type, tree expr)
16222 /* If the expression does not have side effects then we don't have to wrap
16223 it with a cleanup point expression. */
16224 if (!TREE_SIDE_EFFECTS (expr))
16225 return expr;
16227 /* If the expression is a return, check to see if the expression inside the
16228 return has no side effects or the right hand side of the modify expression
16229 inside the return. If either don't have side effects set we don't need to
16230 wrap the expression in a cleanup point expression. Note we don't check the
16231 left hand side of the modify because it should always be a return decl. */
16232 if (TREE_CODE (expr) == RETURN_EXPR)
16234 tree op = TREE_OPERAND (expr, 0);
16235 if (!op || !TREE_SIDE_EFFECTS (op))
16236 return expr;
16237 op = TREE_OPERAND (op, 1);
16238 if (!TREE_SIDE_EFFECTS (op))
16239 return expr;
16242 return build1 (CLEANUP_POINT_EXPR, type, expr);
16245 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16246 of an indirection through OP0, or NULL_TREE if no simplification is
16247 possible. */
16249 tree
16250 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16252 tree sub = op0;
16253 tree subtype;
16255 STRIP_NOPS (sub);
16256 subtype = TREE_TYPE (sub);
16257 if (!POINTER_TYPE_P (subtype))
16258 return NULL_TREE;
16260 if (TREE_CODE (sub) == ADDR_EXPR)
16262 tree op = TREE_OPERAND (sub, 0);
16263 tree optype = TREE_TYPE (op);
16264 /* *&CONST_DECL -> to the value of the const decl. */
16265 if (TREE_CODE (op) == CONST_DECL)
16266 return DECL_INITIAL (op);
16267 /* *&p => p; make sure to handle *&"str"[cst] here. */
16268 if (type == optype)
16270 tree fop = fold_read_from_constant_string (op);
16271 if (fop)
16272 return fop;
16273 else
16274 return op;
16276 /* *(foo *)&fooarray => fooarray[0] */
16277 else if (TREE_CODE (optype) == ARRAY_TYPE
16278 && type == TREE_TYPE (optype)
16279 && (!in_gimple_form
16280 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16282 tree type_domain = TYPE_DOMAIN (optype);
16283 tree min_val = size_zero_node;
16284 if (type_domain && TYPE_MIN_VALUE (type_domain))
16285 min_val = TYPE_MIN_VALUE (type_domain);
16286 if (in_gimple_form
16287 && TREE_CODE (min_val) != INTEGER_CST)
16288 return NULL_TREE;
16289 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16290 NULL_TREE, NULL_TREE);
16292 /* *(foo *)&complexfoo => __real__ complexfoo */
16293 else if (TREE_CODE (optype) == COMPLEX_TYPE
16294 && type == TREE_TYPE (optype))
16295 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16296 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16297 else if (TREE_CODE (optype) == VECTOR_TYPE
16298 && type == TREE_TYPE (optype))
16300 tree part_width = TYPE_SIZE (type);
16301 tree index = bitsize_int (0);
16302 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16306 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16307 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16309 tree op00 = TREE_OPERAND (sub, 0);
16310 tree op01 = TREE_OPERAND (sub, 1);
16312 STRIP_NOPS (op00);
16313 if (TREE_CODE (op00) == ADDR_EXPR)
16315 tree op00type;
16316 op00 = TREE_OPERAND (op00, 0);
16317 op00type = TREE_TYPE (op00);
16319 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16320 if (TREE_CODE (op00type) == VECTOR_TYPE
16321 && type == TREE_TYPE (op00type))
16323 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16324 tree part_width = TYPE_SIZE (type);
16325 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16326 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16327 tree index = bitsize_int (indexi);
16329 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16330 return fold_build3_loc (loc,
16331 BIT_FIELD_REF, type, op00,
16332 part_width, index);
16335 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16336 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16337 && type == TREE_TYPE (op00type))
16339 tree size = TYPE_SIZE_UNIT (type);
16340 if (tree_int_cst_equal (size, op01))
16341 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16343 /* ((foo *)&fooarray)[1] => fooarray[1] */
16344 else if (TREE_CODE (op00type) == ARRAY_TYPE
16345 && type == TREE_TYPE (op00type))
16347 tree type_domain = TYPE_DOMAIN (op00type);
16348 tree min_val = size_zero_node;
16349 if (type_domain && TYPE_MIN_VALUE (type_domain))
16350 min_val = TYPE_MIN_VALUE (type_domain);
16351 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16352 TYPE_SIZE_UNIT (type));
16353 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16354 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16355 NULL_TREE, NULL_TREE);
16360 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16361 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16362 && type == TREE_TYPE (TREE_TYPE (subtype))
16363 && (!in_gimple_form
16364 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16366 tree type_domain;
16367 tree min_val = size_zero_node;
16368 sub = build_fold_indirect_ref_loc (loc, sub);
16369 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16370 if (type_domain && TYPE_MIN_VALUE (type_domain))
16371 min_val = TYPE_MIN_VALUE (type_domain);
16372 if (in_gimple_form
16373 && TREE_CODE (min_val) != INTEGER_CST)
16374 return NULL_TREE;
16375 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16376 NULL_TREE);
16379 return NULL_TREE;
16382 /* Builds an expression for an indirection through T, simplifying some
16383 cases. */
16385 tree
16386 build_fold_indirect_ref_loc (location_t loc, tree t)
16388 tree type = TREE_TYPE (TREE_TYPE (t));
16389 tree sub = fold_indirect_ref_1 (loc, type, t);
16391 if (sub)
16392 return sub;
16394 return build1_loc (loc, INDIRECT_REF, type, t);
16397 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16399 tree
16400 fold_indirect_ref_loc (location_t loc, tree t)
16402 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16404 if (sub)
16405 return sub;
16406 else
16407 return t;
16410 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16411 whose result is ignored. The type of the returned tree need not be
16412 the same as the original expression. */
16414 tree
16415 fold_ignored_result (tree t)
16417 if (!TREE_SIDE_EFFECTS (t))
16418 return integer_zero_node;
16420 for (;;)
16421 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16423 case tcc_unary:
16424 t = TREE_OPERAND (t, 0);
16425 break;
16427 case tcc_binary:
16428 case tcc_comparison:
16429 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16430 t = TREE_OPERAND (t, 0);
16431 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16432 t = TREE_OPERAND (t, 1);
16433 else
16434 return t;
16435 break;
16437 case tcc_expression:
16438 switch (TREE_CODE (t))
16440 case COMPOUND_EXPR:
16441 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16442 return t;
16443 t = TREE_OPERAND (t, 0);
16444 break;
16446 case COND_EXPR:
16447 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16448 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16449 return t;
16450 t = TREE_OPERAND (t, 0);
16451 break;
16453 default:
16454 return t;
16456 break;
16458 default:
16459 return t;
16463 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16464 This can only be applied to objects of a sizetype. */
16466 tree
16467 round_up_loc (location_t loc, tree value, int divisor)
16469 tree div = NULL_TREE;
16471 gcc_assert (divisor > 0);
16472 if (divisor == 1)
16473 return value;
16475 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16476 have to do anything. Only do this when we are not given a const,
16477 because in that case, this check is more expensive than just
16478 doing it. */
16479 if (TREE_CODE (value) != INTEGER_CST)
16481 div = build_int_cst (TREE_TYPE (value), divisor);
16483 if (multiple_of_p (TREE_TYPE (value), value, div))
16484 return value;
16487 /* If divisor is a power of two, simplify this to bit manipulation. */
16488 if (divisor == (divisor & -divisor))
16490 if (TREE_CODE (value) == INTEGER_CST)
16492 double_int val = tree_to_double_int (value);
16493 bool overflow_p;
16495 if ((val.low & (divisor - 1)) == 0)
16496 return value;
16498 overflow_p = TREE_OVERFLOW (value);
16499 val.low &= ~(divisor - 1);
16500 val.low += divisor;
16501 if (val.low == 0)
16503 val.high++;
16504 if (val.high == 0)
16505 overflow_p = true;
16508 return force_fit_type_double (TREE_TYPE (value), val,
16509 -1, overflow_p);
16511 else
16513 tree t;
16515 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16516 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16517 t = build_int_cst (TREE_TYPE (value), -divisor);
16518 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16521 else
16523 if (!div)
16524 div = build_int_cst (TREE_TYPE (value), divisor);
16525 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16526 value = size_binop_loc (loc, MULT_EXPR, value, div);
16529 return value;
16532 /* Likewise, but round down. */
16534 tree
16535 round_down_loc (location_t loc, tree value, int divisor)
16537 tree div = NULL_TREE;
16539 gcc_assert (divisor > 0);
16540 if (divisor == 1)
16541 return value;
16543 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16544 have to do anything. Only do this when we are not given a const,
16545 because in that case, this check is more expensive than just
16546 doing it. */
16547 if (TREE_CODE (value) != INTEGER_CST)
16549 div = build_int_cst (TREE_TYPE (value), divisor);
16551 if (multiple_of_p (TREE_TYPE (value), value, div))
16552 return value;
16555 /* If divisor is a power of two, simplify this to bit manipulation. */
16556 if (divisor == (divisor & -divisor))
16558 tree t;
16560 t = build_int_cst (TREE_TYPE (value), -divisor);
16561 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16563 else
16565 if (!div)
16566 div = build_int_cst (TREE_TYPE (value), divisor);
16567 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16568 value = size_binop_loc (loc, MULT_EXPR, value, div);
16571 return value;
16574 /* Returns the pointer to the base of the object addressed by EXP and
16575 extracts the information about the offset of the access, storing it
16576 to PBITPOS and POFFSET. */
16578 static tree
16579 split_address_to_core_and_offset (tree exp,
16580 HOST_WIDE_INT *pbitpos, tree *poffset)
16582 tree core;
16583 enum machine_mode mode;
16584 int unsignedp, volatilep;
16585 HOST_WIDE_INT bitsize;
16586 location_t loc = EXPR_LOCATION (exp);
16588 if (TREE_CODE (exp) == ADDR_EXPR)
16590 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16591 poffset, &mode, &unsignedp, &volatilep,
16592 false);
16593 core = build_fold_addr_expr_loc (loc, core);
16595 else
16597 core = exp;
16598 *pbitpos = 0;
16599 *poffset = NULL_TREE;
16602 return core;
16605 /* Returns true if addresses of E1 and E2 differ by a constant, false
16606 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16608 bool
16609 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16611 tree core1, core2;
16612 HOST_WIDE_INT bitpos1, bitpos2;
16613 tree toffset1, toffset2, tdiff, type;
16615 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16616 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16618 if (bitpos1 % BITS_PER_UNIT != 0
16619 || bitpos2 % BITS_PER_UNIT != 0
16620 || !operand_equal_p (core1, core2, 0))
16621 return false;
16623 if (toffset1 && toffset2)
16625 type = TREE_TYPE (toffset1);
16626 if (type != TREE_TYPE (toffset2))
16627 toffset2 = fold_convert (type, toffset2);
16629 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16630 if (!cst_and_fits_in_hwi (tdiff))
16631 return false;
16633 *diff = int_cst_value (tdiff);
16635 else if (toffset1 || toffset2)
16637 /* If only one of the offsets is non-constant, the difference cannot
16638 be a constant. */
16639 return false;
16641 else
16642 *diff = 0;
16644 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16645 return true;
16648 /* Simplify the floating point expression EXP when the sign of the
16649 result is not significant. Return NULL_TREE if no simplification
16650 is possible. */
16652 tree
16653 fold_strip_sign_ops (tree exp)
16655 tree arg0, arg1;
16656 location_t loc = EXPR_LOCATION (exp);
16658 switch (TREE_CODE (exp))
16660 case ABS_EXPR:
16661 case NEGATE_EXPR:
16662 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16663 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16665 case MULT_EXPR:
16666 case RDIV_EXPR:
16667 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16668 return NULL_TREE;
16669 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16670 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16671 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16672 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16673 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16674 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16675 break;
16677 case COMPOUND_EXPR:
16678 arg0 = TREE_OPERAND (exp, 0);
16679 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16680 if (arg1)
16681 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16682 break;
16684 case COND_EXPR:
16685 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16686 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16687 if (arg0 || arg1)
16688 return fold_build3_loc (loc,
16689 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16690 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16691 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16692 break;
16694 case CALL_EXPR:
16696 const enum built_in_function fcode = builtin_mathfn_code (exp);
16697 switch (fcode)
16699 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16700 /* Strip copysign function call, return the 1st argument. */
16701 arg0 = CALL_EXPR_ARG (exp, 0);
16702 arg1 = CALL_EXPR_ARG (exp, 1);
16703 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16705 default:
16706 /* Strip sign ops from the argument of "odd" math functions. */
16707 if (negate_mathfn_p (fcode))
16709 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16710 if (arg0)
16711 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16713 break;
16716 break;
16718 default:
16719 break;
16721 return NULL_TREE;