2013-11-25 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / fold-const.c
blobf91673d3e5b51224097f5c7c21e3028eb6d60b5c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
71 /* Nonzero if we are folding constants inside an initializer; zero
72 otherwise. */
73 int folding_initializer = 0;
75 /* The following constants represent a bit based encoding of GCC's
76 comparison operators. This encoding simplifies transformations
77 on relational comparison operators, such as AND and OR. */
78 enum comparison_code {
79 COMPCODE_FALSE = 0,
80 COMPCODE_LT = 1,
81 COMPCODE_EQ = 2,
82 COMPCODE_LE = 3,
83 COMPCODE_GT = 4,
84 COMPCODE_LTGT = 5,
85 COMPCODE_GE = 6,
86 COMPCODE_ORD = 7,
87 COMPCODE_UNORD = 8,
88 COMPCODE_UNLT = 9,
89 COMPCODE_UNEQ = 10,
90 COMPCODE_UNLE = 11,
91 COMPCODE_UNGT = 12,
92 COMPCODE_NE = 13,
93 COMPCODE_UNGE = 14,
94 COMPCODE_TRUE = 15
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
109 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (location_t, tree, tree,
111 HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (location_t, enum tree_code,
113 tree, tree, tree);
114 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
115 HOST_WIDE_INT *,
116 enum machine_mode *, int *, int *,
117 tree *, tree *);
118 static int all_ones_mask_p (const_tree, int);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree optimize_minmax_comparison (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (location_t,
137 enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (const_tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static tree fold_convert_const (enum tree_code, tree, tree);
147 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
148 Otherwise, return LOC. */
150 static location_t
151 expr_location_or (tree t, location_t loc)
153 location_t tloc = EXPR_LOCATION (t);
154 return tloc == UNKNOWN_LOCATION ? loc : tloc;
157 /* Similar to protected_set_expr_location, but never modify x in place,
158 if location can and needs to be set, unshare it. */
160 static inline tree
161 protected_set_expr_location_unshare (tree x, location_t loc)
163 if (CAN_HAVE_LOCATION_P (x)
164 && EXPR_LOCATION (x) != loc
165 && !(TREE_CODE (x) == SAVE_EXPR
166 || TREE_CODE (x) == TARGET_EXPR
167 || TREE_CODE (x) == BIND_EXPR))
169 x = copy_node (x);
170 SET_EXPR_LOCATION (x, loc);
172 return x;
175 /* If ARG2 divides ARG1 with zero remainder, carries out the division
176 of type CODE and returns the quotient.
177 Otherwise returns NULL_TREE. */
179 tree
180 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
182 double_int quo, rem;
183 int uns;
185 /* The sign of the division is according to operand two, that
186 does the correct thing for POINTER_PLUS_EXPR where we want
187 a signed division. */
188 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
190 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
191 uns, code, &rem);
193 if (rem.is_zero ())
194 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
196 return NULL_TREE;
199 /* This is nonzero if we should defer warnings about undefined
200 overflow. This facility exists because these warnings are a
201 special case. The code to estimate loop iterations does not want
202 to issue any warnings, since it works with expressions which do not
203 occur in user code. Various bits of cleanup code call fold(), but
204 only use the result if it has certain characteristics (e.g., is a
205 constant); that code only wants to issue a warning if the result is
206 used. */
208 static int fold_deferring_overflow_warnings;
210 /* If a warning about undefined overflow is deferred, this is the
211 warning. Note that this may cause us to turn two warnings into
212 one, but that is fine since it is sufficient to only give one
213 warning per expression. */
215 static const char* fold_deferred_overflow_warning;
217 /* If a warning about undefined overflow is deferred, this is the
218 level at which the warning should be emitted. */
220 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222 /* Start deferring overflow warnings. We could use a stack here to
223 permit nested calls, but at present it is not necessary. */
225 void
226 fold_defer_overflow_warnings (void)
228 ++fold_deferring_overflow_warnings;
231 /* Stop deferring overflow warnings. If there is a pending warning,
232 and ISSUE is true, then issue the warning if appropriate. STMT is
233 the statement with which the warning should be associated (used for
234 location information); STMT may be NULL. CODE is the level of the
235 warning--a warn_strict_overflow_code value. This function will use
236 the smaller of CODE and the deferred code when deciding whether to
237 issue the warning. CODE may be zero to mean to always use the
238 deferred code. */
240 void
241 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
243 const char *warnmsg;
244 location_t locus;
246 gcc_assert (fold_deferring_overflow_warnings > 0);
247 --fold_deferring_overflow_warnings;
248 if (fold_deferring_overflow_warnings > 0)
250 if (fold_deferred_overflow_warning != NULL
251 && code != 0
252 && code < (int) fold_deferred_overflow_code)
253 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
254 return;
257 warnmsg = fold_deferred_overflow_warning;
258 fold_deferred_overflow_warning = NULL;
260 if (!issue || warnmsg == NULL)
261 return;
263 if (gimple_no_warning_p (stmt))
264 return;
266 /* Use the smallest code level when deciding to issue the
267 warning. */
268 if (code == 0 || code > (int) fold_deferred_overflow_code)
269 code = fold_deferred_overflow_code;
271 if (!issue_strict_overflow_warning (code))
272 return;
274 if (stmt == NULL)
275 locus = input_location;
276 else
277 locus = gimple_location (stmt);
278 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
281 /* Stop deferring overflow warnings, ignoring any deferred
282 warnings. */
284 void
285 fold_undefer_and_ignore_overflow_warnings (void)
287 fold_undefer_overflow_warnings (false, NULL, 0);
290 /* Whether we are deferring overflow warnings. */
292 bool
293 fold_deferring_overflow_warnings_p (void)
295 return fold_deferring_overflow_warnings > 0;
298 /* This is called when we fold something based on the fact that signed
299 overflow is undefined. */
301 static void
302 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 if (fold_deferring_overflow_warnings > 0)
306 if (fold_deferred_overflow_warning == NULL
307 || wc < fold_deferred_overflow_code)
309 fold_deferred_overflow_warning = gmsgid;
310 fold_deferred_overflow_code = wc;
313 else if (issue_strict_overflow_warning (wc))
314 warning (OPT_Wstrict_overflow, gmsgid);
317 /* Return true if the built-in mathematical function specified by CODE
318 is odd, i.e. -f(x) == f(-x). */
320 static bool
321 negate_mathfn_p (enum built_in_function code)
323 switch (code)
325 CASE_FLT_FN (BUILT_IN_ASIN):
326 CASE_FLT_FN (BUILT_IN_ASINH):
327 CASE_FLT_FN (BUILT_IN_ATAN):
328 CASE_FLT_FN (BUILT_IN_ATANH):
329 CASE_FLT_FN (BUILT_IN_CASIN):
330 CASE_FLT_FN (BUILT_IN_CASINH):
331 CASE_FLT_FN (BUILT_IN_CATAN):
332 CASE_FLT_FN (BUILT_IN_CATANH):
333 CASE_FLT_FN (BUILT_IN_CBRT):
334 CASE_FLT_FN (BUILT_IN_CPROJ):
335 CASE_FLT_FN (BUILT_IN_CSIN):
336 CASE_FLT_FN (BUILT_IN_CSINH):
337 CASE_FLT_FN (BUILT_IN_CTAN):
338 CASE_FLT_FN (BUILT_IN_CTANH):
339 CASE_FLT_FN (BUILT_IN_ERF):
340 CASE_FLT_FN (BUILT_IN_LLROUND):
341 CASE_FLT_FN (BUILT_IN_LROUND):
342 CASE_FLT_FN (BUILT_IN_ROUND):
343 CASE_FLT_FN (BUILT_IN_SIN):
344 CASE_FLT_FN (BUILT_IN_SINH):
345 CASE_FLT_FN (BUILT_IN_TAN):
346 CASE_FLT_FN (BUILT_IN_TANH):
347 CASE_FLT_FN (BUILT_IN_TRUNC):
348 return true;
350 CASE_FLT_FN (BUILT_IN_LLRINT):
351 CASE_FLT_FN (BUILT_IN_LRINT):
352 CASE_FLT_FN (BUILT_IN_NEARBYINT):
353 CASE_FLT_FN (BUILT_IN_RINT):
354 return !flag_rounding_math;
356 default:
357 break;
359 return false;
362 /* Check whether we may negate an integer constant T without causing
363 overflow. */
365 bool
366 may_negate_without_overflow_p (const_tree t)
368 unsigned HOST_WIDE_INT val;
369 unsigned int prec;
370 tree type;
372 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374 type = TREE_TYPE (t);
375 if (TYPE_UNSIGNED (type))
376 return false;
378 prec = TYPE_PRECISION (type);
379 if (prec > HOST_BITS_PER_WIDE_INT)
381 if (TREE_INT_CST_LOW (t) != 0)
382 return true;
383 prec -= HOST_BITS_PER_WIDE_INT;
384 val = TREE_INT_CST_HIGH (t);
386 else
387 val = TREE_INT_CST_LOW (t);
388 if (prec < HOST_BITS_PER_WIDE_INT)
389 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
390 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
393 /* Determine whether an expression T can be cheaply negated using
394 the function negate_expr without introducing undefined overflow. */
396 static bool
397 negate_expr_p (tree t)
399 tree type;
401 if (t == 0)
402 return false;
404 type = TREE_TYPE (t);
406 STRIP_SIGN_NOPS (t);
407 switch (TREE_CODE (t))
409 case INTEGER_CST:
410 if (TYPE_OVERFLOW_WRAPS (type))
411 return true;
413 /* Check that -CST will not overflow type. */
414 return may_negate_without_overflow_p (t);
415 case BIT_NOT_EXPR:
416 return (INTEGRAL_TYPE_P (type)
417 && TYPE_OVERFLOW_WRAPS (type));
419 case FIXED_CST:
420 case NEGATE_EXPR:
421 return true;
423 case REAL_CST:
424 /* We want to canonicalize to positive real constants. Pretend
425 that only negative ones can be easily negated. */
426 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
428 case COMPLEX_CST:
429 return negate_expr_p (TREE_REALPART (t))
430 && negate_expr_p (TREE_IMAGPART (t));
432 case VECTOR_CST:
434 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
435 return true;
437 int count = TYPE_VECTOR_SUBPARTS (type), i;
439 for (i = 0; i < count; i++)
440 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
441 return false;
443 return true;
446 case COMPLEX_EXPR:
447 return negate_expr_p (TREE_OPERAND (t, 0))
448 && negate_expr_p (TREE_OPERAND (t, 1));
450 case CONJ_EXPR:
451 return negate_expr_p (TREE_OPERAND (t, 0));
453 case PLUS_EXPR:
454 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
455 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
456 return false;
457 /* -(A + B) -> (-B) - A. */
458 if (negate_expr_p (TREE_OPERAND (t, 1))
459 && reorder_operands_p (TREE_OPERAND (t, 0),
460 TREE_OPERAND (t, 1)))
461 return true;
462 /* -(A + B) -> (-A) - B. */
463 return negate_expr_p (TREE_OPERAND (t, 0));
465 case MINUS_EXPR:
466 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
467 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
468 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
469 && reorder_operands_p (TREE_OPERAND (t, 0),
470 TREE_OPERAND (t, 1));
472 case MULT_EXPR:
473 if (TYPE_UNSIGNED (TREE_TYPE (t)))
474 break;
476 /* Fall through. */
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case FLOOR_DIV_EXPR:
487 case CEIL_DIV_EXPR:
488 case EXACT_DIV_EXPR:
489 /* In general we can't negate A / B, because if A is INT_MIN and
490 B is 1, we may turn this into INT_MIN / -1 which is undefined
491 and actually traps on some architectures. But if overflow is
492 undefined, we can negate, because - (INT_MIN / 1) is an
493 overflow. */
494 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
496 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
497 break;
498 /* If overflow is undefined then we have to be careful because
499 we ask whether it's ok to associate the negate with the
500 division which is not ok for example for
501 -((a - b) / c) where (-(a - b)) / c may invoke undefined
502 overflow because of negating INT_MIN. So do not use
503 negate_expr_p here but open-code the two important cases. */
504 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
505 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
506 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
507 return true;
509 else if (negate_expr_p (TREE_OPERAND (t, 0)))
510 return true;
511 return negate_expr_p (TREE_OPERAND (t, 1));
513 case NOP_EXPR:
514 /* Negate -((double)float) as (double)(-float). */
515 if (TREE_CODE (type) == REAL_TYPE)
517 tree tem = strip_float_extensions (t);
518 if (tem != t)
519 return negate_expr_p (tem);
521 break;
523 case CALL_EXPR:
524 /* Negate -f(x) as f(-x). */
525 if (negate_mathfn_p (builtin_mathfn_code (t)))
526 return negate_expr_p (CALL_EXPR_ARG (t, 0));
527 break;
529 case RSHIFT_EXPR:
530 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
531 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
533 tree op1 = TREE_OPERAND (t, 1);
534 if (TREE_INT_CST_HIGH (op1) == 0
535 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
536 == TREE_INT_CST_LOW (op1))
537 return true;
539 break;
541 default:
542 break;
544 return false;
547 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
548 simplification is possible.
549 If negate_expr_p would return true for T, NULL_TREE will never be
550 returned. */
552 static tree
553 fold_negate_expr (location_t loc, tree t)
555 tree type = TREE_TYPE (t);
556 tree tem;
558 switch (TREE_CODE (t))
560 /* Convert - (~A) to A + 1. */
561 case BIT_NOT_EXPR:
562 if (INTEGRAL_TYPE_P (type))
563 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
564 build_one_cst (type));
565 break;
567 case INTEGER_CST:
568 tem = fold_negate_const (t, type);
569 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
570 || !TYPE_OVERFLOW_TRAPS (type))
571 return tem;
572 break;
574 case REAL_CST:
575 tem = fold_negate_const (t, type);
576 /* Two's complement FP formats, such as c4x, may overflow. */
577 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
578 return tem;
579 break;
581 case FIXED_CST:
582 tem = fold_negate_const (t, type);
583 return tem;
585 case COMPLEX_CST:
587 tree rpart = negate_expr (TREE_REALPART (t));
588 tree ipart = negate_expr (TREE_IMAGPART (t));
590 if ((TREE_CODE (rpart) == REAL_CST
591 && TREE_CODE (ipart) == REAL_CST)
592 || (TREE_CODE (rpart) == INTEGER_CST
593 && TREE_CODE (ipart) == INTEGER_CST))
594 return build_complex (type, rpart, ipart);
596 break;
598 case VECTOR_CST:
600 int count = TYPE_VECTOR_SUBPARTS (type), i;
601 tree *elts = XALLOCAVEC (tree, count);
603 for (i = 0; i < count; i++)
605 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
606 if (elts[i] == NULL_TREE)
607 return NULL_TREE;
610 return build_vector (type, elts);
613 case COMPLEX_EXPR:
614 if (negate_expr_p (t))
615 return fold_build2_loc (loc, COMPLEX_EXPR, type,
616 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
617 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
618 break;
620 case CONJ_EXPR:
621 if (negate_expr_p (t))
622 return fold_build1_loc (loc, CONJ_EXPR, type,
623 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
624 break;
626 case NEGATE_EXPR:
627 return TREE_OPERAND (t, 0);
629 case PLUS_EXPR:
630 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
631 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
633 /* -(A + B) -> (-B) - A. */
634 if (negate_expr_p (TREE_OPERAND (t, 1))
635 && reorder_operands_p (TREE_OPERAND (t, 0),
636 TREE_OPERAND (t, 1)))
638 tem = negate_expr (TREE_OPERAND (t, 1));
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 tem, TREE_OPERAND (t, 0));
643 /* -(A + B) -> (-A) - B. */
644 if (negate_expr_p (TREE_OPERAND (t, 0)))
646 tem = negate_expr (TREE_OPERAND (t, 0));
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 tem, TREE_OPERAND (t, 1));
651 break;
653 case MINUS_EXPR:
654 /* - (A - B) -> B - A */
655 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
656 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
657 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
658 return fold_build2_loc (loc, MINUS_EXPR, type,
659 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
660 break;
662 case MULT_EXPR:
663 if (TYPE_UNSIGNED (type))
664 break;
666 /* Fall through. */
668 case RDIV_EXPR:
669 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
671 tem = TREE_OPERAND (t, 1);
672 if (negate_expr_p (tem))
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
675 tem = TREE_OPERAND (t, 0);
676 if (negate_expr_p (tem))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 negate_expr (tem), TREE_OPERAND (t, 1));
680 break;
682 case TRUNC_DIV_EXPR:
683 case ROUND_DIV_EXPR:
684 case FLOOR_DIV_EXPR:
685 case CEIL_DIV_EXPR:
686 case EXACT_DIV_EXPR:
687 /* In general we can't negate A / B, because if A is INT_MIN and
688 B is 1, we may turn this into INT_MIN / -1 which is undefined
689 and actually traps on some architectures. But if overflow is
690 undefined, we can negate, because - (INT_MIN / 1) is an
691 overflow. */
692 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
694 const char * const warnmsg = G_("assuming signed overflow does not "
695 "occur when negating a division");
696 tem = TREE_OPERAND (t, 1);
697 if (negate_expr_p (tem))
699 if (INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) != INTEGER_CST
701 || integer_onep (tem)))
702 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
703 return fold_build2_loc (loc, TREE_CODE (t), type,
704 TREE_OPERAND (t, 0), negate_expr (tem));
706 /* If overflow is undefined then we have to be careful because
707 we ask whether it's ok to associate the negate with the
708 division which is not ok for example for
709 -((a - b) / c) where (-(a - b)) / c may invoke undefined
710 overflow because of negating INT_MIN. So do not use
711 negate_expr_p here but open-code the two important cases. */
712 tem = TREE_OPERAND (t, 0);
713 if ((INTEGRAL_TYPE_P (type)
714 && (TREE_CODE (tem) == NEGATE_EXPR
715 || (TREE_CODE (tem) == INTEGER_CST
716 && may_negate_without_overflow_p (tem))))
717 || !INTEGRAL_TYPE_P (type))
718 return fold_build2_loc (loc, TREE_CODE (t), type,
719 negate_expr (tem), TREE_OPERAND (t, 1));
721 break;
723 case NOP_EXPR:
724 /* Convert -((double)float) into (double)(-float). */
725 if (TREE_CODE (type) == REAL_TYPE)
727 tem = strip_float_extensions (t);
728 if (tem != t && negate_expr_p (tem))
729 return fold_convert_loc (loc, type, negate_expr (tem));
731 break;
733 case CALL_EXPR:
734 /* Negate -f(x) as f(-x). */
735 if (negate_mathfn_p (builtin_mathfn_code (t))
736 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
738 tree fndecl, arg;
740 fndecl = get_callee_fndecl (t);
741 arg = negate_expr (CALL_EXPR_ARG (t, 0));
742 return build_call_expr_loc (loc, fndecl, 1, arg);
744 break;
746 case RSHIFT_EXPR:
747 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
748 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
750 tree op1 = TREE_OPERAND (t, 1);
751 if (TREE_INT_CST_HIGH (op1) == 0
752 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
753 == TREE_INT_CST_LOW (op1))
755 tree ntype = TYPE_UNSIGNED (type)
756 ? signed_type_for (type)
757 : unsigned_type_for (type);
758 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
759 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
760 return fold_convert_loc (loc, type, temp);
763 break;
765 default:
766 break;
769 return NULL_TREE;
772 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
773 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
774 return NULL_TREE. */
776 static tree
777 negate_expr (tree t)
779 tree type, tem;
780 location_t loc;
782 if (t == NULL_TREE)
783 return NULL_TREE;
785 loc = EXPR_LOCATION (t);
786 type = TREE_TYPE (t);
787 STRIP_SIGN_NOPS (t);
789 tem = fold_negate_expr (loc, t);
790 if (!tem)
791 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
792 return fold_convert_loc (loc, type, tem);
795 /* Split a tree IN into a constant, literal and variable parts that could be
796 combined with CODE to make IN. "constant" means an expression with
797 TREE_CONSTANT but that isn't an actual constant. CODE must be a
798 commutative arithmetic operation. Store the constant part into *CONP,
799 the literal in *LITP and return the variable part. If a part isn't
800 present, set it to null. If the tree does not decompose in this way,
801 return the entire tree as the variable part and the other parts as null.
803 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
804 case, we negate an operand that was subtracted. Except if it is a
805 literal for which we use *MINUS_LITP instead.
807 If NEGATE_P is true, we are negating all of IN, again except a literal
808 for which we use *MINUS_LITP instead.
810 If IN is itself a literal or constant, return it as appropriate.
812 Note that we do not guarantee that any of the three values will be the
813 same type as IN, but they will have the same signedness and mode. */
815 static tree
816 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
817 tree *minus_litp, int negate_p)
819 tree var = 0;
821 *conp = 0;
822 *litp = 0;
823 *minus_litp = 0;
825 /* Strip any conversions that don't change the machine mode or signedness. */
826 STRIP_SIGN_NOPS (in);
828 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
829 || TREE_CODE (in) == FIXED_CST)
830 *litp = in;
831 else if (TREE_CODE (in) == code
832 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
833 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
834 /* We can associate addition and subtraction together (even
835 though the C standard doesn't say so) for integers because
836 the value is not affected. For reals, the value might be
837 affected, so we can't. */
838 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
839 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
841 tree op0 = TREE_OPERAND (in, 0);
842 tree op1 = TREE_OPERAND (in, 1);
843 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
844 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
846 /* First see if either of the operands is a literal, then a constant. */
847 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
848 || TREE_CODE (op0) == FIXED_CST)
849 *litp = op0, op0 = 0;
850 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
851 || TREE_CODE (op1) == FIXED_CST)
852 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
854 if (op0 != 0 && TREE_CONSTANT (op0))
855 *conp = op0, op0 = 0;
856 else if (op1 != 0 && TREE_CONSTANT (op1))
857 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
859 /* If we haven't dealt with either operand, this is not a case we can
860 decompose. Otherwise, VAR is either of the ones remaining, if any. */
861 if (op0 != 0 && op1 != 0)
862 var = in;
863 else if (op0 != 0)
864 var = op0;
865 else
866 var = op1, neg_var_p = neg1_p;
868 /* Now do any needed negations. */
869 if (neg_litp_p)
870 *minus_litp = *litp, *litp = 0;
871 if (neg_conp_p)
872 *conp = negate_expr (*conp);
873 if (neg_var_p)
874 var = negate_expr (var);
876 else if (TREE_CODE (in) == BIT_NOT_EXPR
877 && code == PLUS_EXPR)
879 /* -X - 1 is folded to ~X, undo that here. */
880 *minus_litp = build_one_cst (TREE_TYPE (in));
881 var = negate_expr (TREE_OPERAND (in, 0));
883 else if (TREE_CONSTANT (in))
884 *conp = in;
885 else
886 var = in;
888 if (negate_p)
890 if (*litp)
891 *minus_litp = *litp, *litp = 0;
892 else if (*minus_litp)
893 *litp = *minus_litp, *minus_litp = 0;
894 *conp = negate_expr (*conp);
895 var = negate_expr (var);
898 return var;
901 /* Re-associate trees split by the above function. T1 and T2 are
902 either expressions to associate or null. Return the new
903 expression, if any. LOC is the location of the new expression. If
904 we build an operation, do it in TYPE and with CODE. */
906 static tree
907 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 if (t1 == 0)
910 return t2;
911 else if (t2 == 0)
912 return t1;
914 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
915 try to fold this since we will have infinite recursion. But do
916 deal with any NEGATE_EXPRs. */
917 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
918 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
920 if (code == PLUS_EXPR)
922 if (TREE_CODE (t1) == NEGATE_EXPR)
923 return build2_loc (loc, MINUS_EXPR, type,
924 fold_convert_loc (loc, type, t2),
925 fold_convert_loc (loc, type,
926 TREE_OPERAND (t1, 0)));
927 else if (TREE_CODE (t2) == NEGATE_EXPR)
928 return build2_loc (loc, MINUS_EXPR, type,
929 fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type,
931 TREE_OPERAND (t2, 0)));
932 else if (integer_zerop (t2))
933 return fold_convert_loc (loc, type, t1);
935 else if (code == MINUS_EXPR)
937 if (integer_zerop (t2))
938 return fold_convert_loc (loc, type, t1);
941 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
942 fold_convert_loc (loc, type, t2));
945 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
946 fold_convert_loc (loc, type, t2));
949 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
950 for use in int_const_binop, size_binop and size_diffop. */
952 static bool
953 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
955 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
956 return false;
957 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
958 return false;
960 switch (code)
962 case LSHIFT_EXPR:
963 case RSHIFT_EXPR:
964 case LROTATE_EXPR:
965 case RROTATE_EXPR:
966 return true;
968 default:
969 break;
972 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
973 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
974 && TYPE_MODE (type1) == TYPE_MODE (type2);
978 /* Combine two integer constants ARG1 and ARG2 under operation CODE
979 to produce a new constant. Return NULL_TREE if we don't know how
980 to evaluate CODE at compile-time. */
982 static tree
983 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
984 int overflowable)
986 double_int op1, op2, res, tmp;
987 tree t;
988 tree type = TREE_TYPE (arg1);
989 bool uns = TYPE_UNSIGNED (type);
990 bool overflow = false;
992 op1 = tree_to_double_int (arg1);
993 op2 = tree_to_double_int (arg2);
995 switch (code)
997 case BIT_IOR_EXPR:
998 res = op1 | op2;
999 break;
1001 case BIT_XOR_EXPR:
1002 res = op1 ^ op2;
1003 break;
1005 case BIT_AND_EXPR:
1006 res = op1 & op2;
1007 break;
1009 case RSHIFT_EXPR:
1010 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1011 break;
1013 case LSHIFT_EXPR:
1014 /* It's unclear from the C standard whether shifts can overflow.
1015 The following code ignores overflow; perhaps a C standard
1016 interpretation ruling is needed. */
1017 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1018 break;
1020 case RROTATE_EXPR:
1021 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1022 break;
1024 case LROTATE_EXPR:
1025 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1026 break;
1028 case PLUS_EXPR:
1029 res = op1.add_with_sign (op2, false, &overflow);
1030 break;
1032 case MINUS_EXPR:
1033 res = op1.sub_with_overflow (op2, &overflow);
1034 break;
1036 case MULT_EXPR:
1037 res = op1.mul_with_sign (op2, false, &overflow);
1038 break;
1040 case MULT_HIGHPART_EXPR:
1041 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1043 bool dummy_overflow;
1044 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1045 return NULL_TREE;
1046 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1048 else
1050 bool dummy_overflow;
1051 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1052 is performed in twice the precision of arguments. */
1053 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1054 res = tmp.rshift (TYPE_PRECISION (type),
1055 2 * TYPE_PRECISION (type), !uns);
1057 break;
1059 case TRUNC_DIV_EXPR:
1060 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1061 case EXACT_DIV_EXPR:
1062 /* This is a shortcut for a common special case. */
1063 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1064 && !TREE_OVERFLOW (arg1)
1065 && !TREE_OVERFLOW (arg2)
1066 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1068 if (code == CEIL_DIV_EXPR)
1069 op1.low += op2.low - 1;
1071 res.low = op1.low / op2.low, res.high = 0;
1072 break;
1075 /* ... fall through ... */
1077 case ROUND_DIV_EXPR:
1078 if (op2.is_zero ())
1079 return NULL_TREE;
1080 if (op2.is_one ())
1082 res = op1;
1083 break;
1085 if (op1 == op2 && !op1.is_zero ())
1087 res = double_int_one;
1088 break;
1090 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1091 break;
1093 case TRUNC_MOD_EXPR:
1094 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1095 /* This is a shortcut for a common special case. */
1096 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1097 && !TREE_OVERFLOW (arg1)
1098 && !TREE_OVERFLOW (arg2)
1099 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1101 if (code == CEIL_MOD_EXPR)
1102 op1.low += op2.low - 1;
1103 res.low = op1.low % op2.low, res.high = 0;
1104 break;
1107 /* ... fall through ... */
1109 case ROUND_MOD_EXPR:
1110 if (op2.is_zero ())
1111 return NULL_TREE;
1112 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1113 break;
1115 case MIN_EXPR:
1116 res = op1.min (op2, uns);
1117 break;
1119 case MAX_EXPR:
1120 res = op1.max (op2, uns);
1121 break;
1123 default:
1124 return NULL_TREE;
1127 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1128 (!uns && overflow)
1129 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1131 return t;
1134 tree
1135 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1137 return int_const_binop_1 (code, arg1, arg2, 1);
1140 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1141 constant. We assume ARG1 and ARG2 have the same data type, or at least
1142 are the same kind of constant and the same machine mode. Return zero if
1143 combining the constants is not allowed in the current operating mode. */
1145 static tree
1146 const_binop (enum tree_code code, tree arg1, tree arg2)
1148 /* Sanity check for the recursive cases. */
1149 if (!arg1 || !arg2)
1150 return NULL_TREE;
1152 STRIP_NOPS (arg1);
1153 STRIP_NOPS (arg2);
1155 if (TREE_CODE (arg1) == INTEGER_CST)
1156 return int_const_binop (code, arg1, arg2);
1158 if (TREE_CODE (arg1) == REAL_CST)
1160 enum machine_mode mode;
1161 REAL_VALUE_TYPE d1;
1162 REAL_VALUE_TYPE d2;
1163 REAL_VALUE_TYPE value;
1164 REAL_VALUE_TYPE result;
1165 bool inexact;
1166 tree t, type;
1168 /* The following codes are handled by real_arithmetic. */
1169 switch (code)
1171 case PLUS_EXPR:
1172 case MINUS_EXPR:
1173 case MULT_EXPR:
1174 case RDIV_EXPR:
1175 case MIN_EXPR:
1176 case MAX_EXPR:
1177 break;
1179 default:
1180 return NULL_TREE;
1183 d1 = TREE_REAL_CST (arg1);
1184 d2 = TREE_REAL_CST (arg2);
1186 type = TREE_TYPE (arg1);
1187 mode = TYPE_MODE (type);
1189 /* Don't perform operation if we honor signaling NaNs and
1190 either operand is a NaN. */
1191 if (HONOR_SNANS (mode)
1192 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1193 return NULL_TREE;
1195 /* Don't perform operation if it would raise a division
1196 by zero exception. */
1197 if (code == RDIV_EXPR
1198 && REAL_VALUES_EQUAL (d2, dconst0)
1199 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1200 return NULL_TREE;
1202 /* If either operand is a NaN, just return it. Otherwise, set up
1203 for floating-point trap; we return an overflow. */
1204 if (REAL_VALUE_ISNAN (d1))
1205 return arg1;
1206 else if (REAL_VALUE_ISNAN (d2))
1207 return arg2;
1209 inexact = real_arithmetic (&value, code, &d1, &d2);
1210 real_convert (&result, mode, &value);
1212 /* Don't constant fold this floating point operation if
1213 the result has overflowed and flag_trapping_math. */
1214 if (flag_trapping_math
1215 && MODE_HAS_INFINITIES (mode)
1216 && REAL_VALUE_ISINF (result)
1217 && !REAL_VALUE_ISINF (d1)
1218 && !REAL_VALUE_ISINF (d2))
1219 return NULL_TREE;
1221 /* Don't constant fold this floating point operation if the
1222 result may dependent upon the run-time rounding mode and
1223 flag_rounding_math is set, or if GCC's software emulation
1224 is unable to accurately represent the result. */
1225 if ((flag_rounding_math
1226 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1227 && (inexact || !real_identical (&result, &value)))
1228 return NULL_TREE;
1230 t = build_real (type, result);
1232 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1233 return t;
1236 if (TREE_CODE (arg1) == FIXED_CST)
1238 FIXED_VALUE_TYPE f1;
1239 FIXED_VALUE_TYPE f2;
1240 FIXED_VALUE_TYPE result;
1241 tree t, type;
1242 int sat_p;
1243 bool overflow_p;
1245 /* The following codes are handled by fixed_arithmetic. */
1246 switch (code)
1248 case PLUS_EXPR:
1249 case MINUS_EXPR:
1250 case MULT_EXPR:
1251 case TRUNC_DIV_EXPR:
1252 f2 = TREE_FIXED_CST (arg2);
1253 break;
1255 case LSHIFT_EXPR:
1256 case RSHIFT_EXPR:
1257 f2.data.high = TREE_INT_CST_HIGH (arg2);
1258 f2.data.low = TREE_INT_CST_LOW (arg2);
1259 f2.mode = SImode;
1260 break;
1262 default:
1263 return NULL_TREE;
1266 f1 = TREE_FIXED_CST (arg1);
1267 type = TREE_TYPE (arg1);
1268 sat_p = TYPE_SATURATING (type);
1269 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1270 t = build_fixed (type, result);
1271 /* Propagate overflow flags. */
1272 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1273 TREE_OVERFLOW (t) = 1;
1274 return t;
1277 if (TREE_CODE (arg1) == COMPLEX_CST)
1279 tree type = TREE_TYPE (arg1);
1280 tree r1 = TREE_REALPART (arg1);
1281 tree i1 = TREE_IMAGPART (arg1);
1282 tree r2 = TREE_REALPART (arg2);
1283 tree i2 = TREE_IMAGPART (arg2);
1284 tree real, imag;
1286 switch (code)
1288 case PLUS_EXPR:
1289 case MINUS_EXPR:
1290 real = const_binop (code, r1, r2);
1291 imag = const_binop (code, i1, i2);
1292 break;
1294 case MULT_EXPR:
1295 if (COMPLEX_FLOAT_TYPE_P (type))
1296 return do_mpc_arg2 (arg1, arg2, type,
1297 /* do_nonfinite= */ folding_initializer,
1298 mpc_mul);
1300 real = const_binop (MINUS_EXPR,
1301 const_binop (MULT_EXPR, r1, r2),
1302 const_binop (MULT_EXPR, i1, i2));
1303 imag = const_binop (PLUS_EXPR,
1304 const_binop (MULT_EXPR, r1, i2),
1305 const_binop (MULT_EXPR, i1, r2));
1306 break;
1308 case RDIV_EXPR:
1309 if (COMPLEX_FLOAT_TYPE_P (type))
1310 return do_mpc_arg2 (arg1, arg2, type,
1311 /* do_nonfinite= */ folding_initializer,
1312 mpc_div);
1313 /* Fallthru ... */
1314 case TRUNC_DIV_EXPR:
1315 case CEIL_DIV_EXPR:
1316 case FLOOR_DIV_EXPR:
1317 case ROUND_DIV_EXPR:
1318 if (flag_complex_method == 0)
1320 /* Keep this algorithm in sync with
1321 tree-complex.c:expand_complex_div_straight().
1323 Expand complex division to scalars, straightforward algorithm.
1324 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1325 t = br*br + bi*bi
1327 tree magsquared
1328 = const_binop (PLUS_EXPR,
1329 const_binop (MULT_EXPR, r2, r2),
1330 const_binop (MULT_EXPR, i2, i2));
1331 tree t1
1332 = const_binop (PLUS_EXPR,
1333 const_binop (MULT_EXPR, r1, r2),
1334 const_binop (MULT_EXPR, i1, i2));
1335 tree t2
1336 = const_binop (MINUS_EXPR,
1337 const_binop (MULT_EXPR, i1, r2),
1338 const_binop (MULT_EXPR, r1, i2));
1340 real = const_binop (code, t1, magsquared);
1341 imag = const_binop (code, t2, magsquared);
1343 else
1345 /* Keep this algorithm in sync with
1346 tree-complex.c:expand_complex_div_wide().
1348 Expand complex division to scalars, modified algorithm to minimize
1349 overflow with wide input ranges. */
1350 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1351 fold_abs_const (r2, TREE_TYPE (type)),
1352 fold_abs_const (i2, TREE_TYPE (type)));
1354 if (integer_nonzerop (compare))
1356 /* In the TRUE branch, we compute
1357 ratio = br/bi;
1358 div = (br * ratio) + bi;
1359 tr = (ar * ratio) + ai;
1360 ti = (ai * ratio) - ar;
1361 tr = tr / div;
1362 ti = ti / div; */
1363 tree ratio = const_binop (code, r2, i2);
1364 tree div = const_binop (PLUS_EXPR, i2,
1365 const_binop (MULT_EXPR, r2, ratio));
1366 real = const_binop (MULT_EXPR, r1, ratio);
1367 real = const_binop (PLUS_EXPR, real, i1);
1368 real = const_binop (code, real, div);
1370 imag = const_binop (MULT_EXPR, i1, ratio);
1371 imag = const_binop (MINUS_EXPR, imag, r1);
1372 imag = const_binop (code, imag, div);
1374 else
1376 /* In the FALSE branch, we compute
1377 ratio = d/c;
1378 divisor = (d * ratio) + c;
1379 tr = (b * ratio) + a;
1380 ti = b - (a * ratio);
1381 tr = tr / div;
1382 ti = ti / div; */
1383 tree ratio = const_binop (code, i2, r2);
1384 tree div = const_binop (PLUS_EXPR, r2,
1385 const_binop (MULT_EXPR, i2, ratio));
1387 real = const_binop (MULT_EXPR, i1, ratio);
1388 real = const_binop (PLUS_EXPR, real, r1);
1389 real = const_binop (code, real, div);
1391 imag = const_binop (MULT_EXPR, r1, ratio);
1392 imag = const_binop (MINUS_EXPR, i1, imag);
1393 imag = const_binop (code, imag, div);
1396 break;
1398 default:
1399 return NULL_TREE;
1402 if (real && imag)
1403 return build_complex (type, real, imag);
1406 if (TREE_CODE (arg1) == VECTOR_CST
1407 && TREE_CODE (arg2) == VECTOR_CST)
1409 tree type = TREE_TYPE (arg1);
1410 int count = TYPE_VECTOR_SUBPARTS (type), i;
1411 tree *elts = XALLOCAVEC (tree, count);
1413 for (i = 0; i < count; i++)
1415 tree elem1 = VECTOR_CST_ELT (arg1, i);
1416 tree elem2 = VECTOR_CST_ELT (arg2, i);
1418 elts[i] = const_binop (code, elem1, elem2);
1420 /* It is possible that const_binop cannot handle the given
1421 code and return NULL_TREE */
1422 if (elts[i] == NULL_TREE)
1423 return NULL_TREE;
1426 return build_vector (type, elts);
1429 /* Shifts allow a scalar offset for a vector. */
1430 if (TREE_CODE (arg1) == VECTOR_CST
1431 && TREE_CODE (arg2) == INTEGER_CST)
1433 tree type = TREE_TYPE (arg1);
1434 int count = TYPE_VECTOR_SUBPARTS (type), i;
1435 tree *elts = XALLOCAVEC (tree, count);
1437 if (code == VEC_LSHIFT_EXPR
1438 || code == VEC_RSHIFT_EXPR)
1440 if (!tree_fits_uhwi_p (arg2))
1441 return NULL_TREE;
1443 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1444 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1445 unsigned HOST_WIDE_INT innerc
1446 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1447 if (shiftc >= outerc || (shiftc % innerc) != 0)
1448 return NULL_TREE;
1449 int offset = shiftc / innerc;
1450 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1451 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1452 for !BYTES_BIG_ENDIAN picks first vector element, but
1453 for BYTES_BIG_ENDIAN last element from the vector. */
1454 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1455 offset = -offset;
1456 tree zero = build_zero_cst (TREE_TYPE (type));
1457 for (i = 0; i < count; i++)
1459 if (i + offset < 0 || i + offset >= count)
1460 elts[i] = zero;
1461 else
1462 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1465 else
1466 for (i = 0; i < count; i++)
1468 tree elem1 = VECTOR_CST_ELT (arg1, i);
1470 elts[i] = const_binop (code, elem1, arg2);
1472 /* It is possible that const_binop cannot handle the given
1473 code and return NULL_TREE */
1474 if (elts[i] == NULL_TREE)
1475 return NULL_TREE;
1478 return build_vector (type, elts);
1480 return NULL_TREE;
1483 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1484 indicates which particular sizetype to create. */
1486 tree
1487 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1489 return build_int_cst (sizetype_tab[(int) kind], number);
1492 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1493 is a tree code. The type of the result is taken from the operands.
1494 Both must be equivalent integer types, ala int_binop_types_match_p.
1495 If the operands are constant, so is the result. */
1497 tree
1498 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1500 tree type = TREE_TYPE (arg0);
1502 if (arg0 == error_mark_node || arg1 == error_mark_node)
1503 return error_mark_node;
1505 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1506 TREE_TYPE (arg1)));
1508 /* Handle the special case of two integer constants faster. */
1509 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1511 /* And some specific cases even faster than that. */
1512 if (code == PLUS_EXPR)
1514 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1515 return arg1;
1516 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1517 return arg0;
1519 else if (code == MINUS_EXPR)
1521 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1522 return arg0;
1524 else if (code == MULT_EXPR)
1526 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1527 return arg1;
1530 /* Handle general case of two integer constants. For sizetype
1531 constant calculations we always want to know about overflow,
1532 even in the unsigned case. */
1533 return int_const_binop_1 (code, arg0, arg1, -1);
1536 return fold_build2_loc (loc, code, type, arg0, arg1);
1539 /* Given two values, either both of sizetype or both of bitsizetype,
1540 compute the difference between the two values. Return the value
1541 in signed type corresponding to the type of the operands. */
1543 tree
1544 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1546 tree type = TREE_TYPE (arg0);
1547 tree ctype;
1549 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1550 TREE_TYPE (arg1)));
1552 /* If the type is already signed, just do the simple thing. */
1553 if (!TYPE_UNSIGNED (type))
1554 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1556 if (type == sizetype)
1557 ctype = ssizetype;
1558 else if (type == bitsizetype)
1559 ctype = sbitsizetype;
1560 else
1561 ctype = signed_type_for (type);
1563 /* If either operand is not a constant, do the conversions to the signed
1564 type and subtract. The hardware will do the right thing with any
1565 overflow in the subtraction. */
1566 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1567 return size_binop_loc (loc, MINUS_EXPR,
1568 fold_convert_loc (loc, ctype, arg0),
1569 fold_convert_loc (loc, ctype, arg1));
1571 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1572 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1573 overflow) and negate (which can't either). Special-case a result
1574 of zero while we're here. */
1575 if (tree_int_cst_equal (arg0, arg1))
1576 return build_int_cst (ctype, 0);
1577 else if (tree_int_cst_lt (arg1, arg0))
1578 return fold_convert_loc (loc, ctype,
1579 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1580 else
1581 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1582 fold_convert_loc (loc, ctype,
1583 size_binop_loc (loc,
1584 MINUS_EXPR,
1585 arg1, arg0)));
1588 /* A subroutine of fold_convert_const handling conversions of an
1589 INTEGER_CST to another integer type. */
1591 static tree
1592 fold_convert_const_int_from_int (tree type, const_tree arg1)
1594 tree t;
1596 /* Given an integer constant, make new constant with new type,
1597 appropriately sign-extended or truncated. */
1598 t = force_fit_type_double (type, tree_to_double_int (arg1),
1599 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1600 (TREE_INT_CST_HIGH (arg1) < 0
1601 && (TYPE_UNSIGNED (type)
1602 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1603 | TREE_OVERFLOW (arg1));
1605 return t;
1608 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1609 to an integer type. */
1611 static tree
1612 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1614 int overflow = 0;
1615 tree t;
1617 /* The following code implements the floating point to integer
1618 conversion rules required by the Java Language Specification,
1619 that IEEE NaNs are mapped to zero and values that overflow
1620 the target precision saturate, i.e. values greater than
1621 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1622 are mapped to INT_MIN. These semantics are allowed by the
1623 C and C++ standards that simply state that the behavior of
1624 FP-to-integer conversion is unspecified upon overflow. */
1626 double_int val;
1627 REAL_VALUE_TYPE r;
1628 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1630 switch (code)
1632 case FIX_TRUNC_EXPR:
1633 real_trunc (&r, VOIDmode, &x);
1634 break;
1636 default:
1637 gcc_unreachable ();
1640 /* If R is NaN, return zero and show we have an overflow. */
1641 if (REAL_VALUE_ISNAN (r))
1643 overflow = 1;
1644 val = double_int_zero;
1647 /* See if R is less than the lower bound or greater than the
1648 upper bound. */
1650 if (! overflow)
1652 tree lt = TYPE_MIN_VALUE (type);
1653 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1654 if (REAL_VALUES_LESS (r, l))
1656 overflow = 1;
1657 val = tree_to_double_int (lt);
1661 if (! overflow)
1663 tree ut = TYPE_MAX_VALUE (type);
1664 if (ut)
1666 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1667 if (REAL_VALUES_LESS (u, r))
1669 overflow = 1;
1670 val = tree_to_double_int (ut);
1675 if (! overflow)
1676 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1678 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1679 return t;
1682 /* A subroutine of fold_convert_const handling conversions of a
1683 FIXED_CST to an integer type. */
1685 static tree
1686 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1688 tree t;
1689 double_int temp, temp_trunc;
1690 unsigned int mode;
1692 /* Right shift FIXED_CST to temp by fbit. */
1693 temp = TREE_FIXED_CST (arg1).data;
1694 mode = TREE_FIXED_CST (arg1).mode;
1695 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1697 temp = temp.rshift (GET_MODE_FBIT (mode),
1698 HOST_BITS_PER_DOUBLE_INT,
1699 SIGNED_FIXED_POINT_MODE_P (mode));
1701 /* Left shift temp to temp_trunc by fbit. */
1702 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1703 HOST_BITS_PER_DOUBLE_INT,
1704 SIGNED_FIXED_POINT_MODE_P (mode));
1706 else
1708 temp = double_int_zero;
1709 temp_trunc = double_int_zero;
1712 /* If FIXED_CST is negative, we need to round the value toward 0.
1713 By checking if the fractional bits are not zero to add 1 to temp. */
1714 if (SIGNED_FIXED_POINT_MODE_P (mode)
1715 && temp_trunc.is_negative ()
1716 && TREE_FIXED_CST (arg1).data != temp_trunc)
1717 temp += double_int_one;
1719 /* Given a fixed-point constant, make new constant with new type,
1720 appropriately sign-extended or truncated. */
1721 t = force_fit_type_double (type, temp, -1,
1722 (temp.is_negative ()
1723 && (TYPE_UNSIGNED (type)
1724 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1725 | TREE_OVERFLOW (arg1));
1727 return t;
1730 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1731 to another floating point type. */
1733 static tree
1734 fold_convert_const_real_from_real (tree type, const_tree arg1)
1736 REAL_VALUE_TYPE value;
1737 tree t;
1739 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1740 t = build_real (type, value);
1742 /* If converting an infinity or NAN to a representation that doesn't
1743 have one, set the overflow bit so that we can produce some kind of
1744 error message at the appropriate point if necessary. It's not the
1745 most user-friendly message, but it's better than nothing. */
1746 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1747 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1748 TREE_OVERFLOW (t) = 1;
1749 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1750 && !MODE_HAS_NANS (TYPE_MODE (type)))
1751 TREE_OVERFLOW (t) = 1;
1752 /* Regular overflow, conversion produced an infinity in a mode that
1753 can't represent them. */
1754 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1755 && REAL_VALUE_ISINF (value)
1756 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1757 TREE_OVERFLOW (t) = 1;
1758 else
1759 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1760 return t;
1763 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1764 to a floating point type. */
1766 static tree
1767 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1769 REAL_VALUE_TYPE value;
1770 tree t;
1772 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1773 t = build_real (type, value);
1775 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1776 return t;
1779 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1780 to another fixed-point type. */
1782 static tree
1783 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1785 FIXED_VALUE_TYPE value;
1786 tree t;
1787 bool overflow_p;
1789 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1790 TYPE_SATURATING (type));
1791 t = build_fixed (type, value);
1793 /* Propagate overflow flags. */
1794 if (overflow_p | TREE_OVERFLOW (arg1))
1795 TREE_OVERFLOW (t) = 1;
1796 return t;
1799 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1800 to a fixed-point type. */
1802 static tree
1803 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1805 FIXED_VALUE_TYPE value;
1806 tree t;
1807 bool overflow_p;
1809 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1810 TREE_INT_CST (arg1),
1811 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1812 TYPE_SATURATING (type));
1813 t = build_fixed (type, value);
1815 /* Propagate overflow flags. */
1816 if (overflow_p | TREE_OVERFLOW (arg1))
1817 TREE_OVERFLOW (t) = 1;
1818 return t;
1821 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1822 to a fixed-point type. */
1824 static tree
1825 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1827 FIXED_VALUE_TYPE value;
1828 tree t;
1829 bool overflow_p;
1831 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1832 &TREE_REAL_CST (arg1),
1833 TYPE_SATURATING (type));
1834 t = build_fixed (type, value);
1836 /* Propagate overflow flags. */
1837 if (overflow_p | TREE_OVERFLOW (arg1))
1838 TREE_OVERFLOW (t) = 1;
1839 return t;
1842 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1843 type TYPE. If no simplification can be done return NULL_TREE. */
1845 static tree
1846 fold_convert_const (enum tree_code code, tree type, tree arg1)
1848 if (TREE_TYPE (arg1) == type)
1849 return arg1;
1851 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1852 || TREE_CODE (type) == OFFSET_TYPE)
1854 if (TREE_CODE (arg1) == INTEGER_CST)
1855 return fold_convert_const_int_from_int (type, arg1);
1856 else if (TREE_CODE (arg1) == REAL_CST)
1857 return fold_convert_const_int_from_real (code, type, arg1);
1858 else if (TREE_CODE (arg1) == FIXED_CST)
1859 return fold_convert_const_int_from_fixed (type, arg1);
1861 else if (TREE_CODE (type) == REAL_TYPE)
1863 if (TREE_CODE (arg1) == INTEGER_CST)
1864 return build_real_from_int_cst (type, arg1);
1865 else if (TREE_CODE (arg1) == REAL_CST)
1866 return fold_convert_const_real_from_real (type, arg1);
1867 else if (TREE_CODE (arg1) == FIXED_CST)
1868 return fold_convert_const_real_from_fixed (type, arg1);
1870 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1872 if (TREE_CODE (arg1) == FIXED_CST)
1873 return fold_convert_const_fixed_from_fixed (type, arg1);
1874 else if (TREE_CODE (arg1) == INTEGER_CST)
1875 return fold_convert_const_fixed_from_int (type, arg1);
1876 else if (TREE_CODE (arg1) == REAL_CST)
1877 return fold_convert_const_fixed_from_real (type, arg1);
1879 return NULL_TREE;
1882 /* Construct a vector of zero elements of vector type TYPE. */
1884 static tree
1885 build_zero_vector (tree type)
1887 tree t;
1889 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1890 return build_vector_from_val (type, t);
1893 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1895 bool
1896 fold_convertible_p (const_tree type, const_tree arg)
1898 tree orig = TREE_TYPE (arg);
1900 if (type == orig)
1901 return true;
1903 if (TREE_CODE (arg) == ERROR_MARK
1904 || TREE_CODE (type) == ERROR_MARK
1905 || TREE_CODE (orig) == ERROR_MARK)
1906 return false;
1908 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1909 return true;
1911 switch (TREE_CODE (type))
1913 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1914 case POINTER_TYPE: case REFERENCE_TYPE:
1915 case OFFSET_TYPE:
1916 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1917 || TREE_CODE (orig) == OFFSET_TYPE)
1918 return true;
1919 return (TREE_CODE (orig) == VECTOR_TYPE
1920 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 case REAL_TYPE:
1923 case FIXED_POINT_TYPE:
1924 case COMPLEX_TYPE:
1925 case VECTOR_TYPE:
1926 case VOID_TYPE:
1927 return TREE_CODE (type) == TREE_CODE (orig);
1929 default:
1930 return false;
1934 /* Convert expression ARG to type TYPE. Used by the middle-end for
1935 simple conversions in preference to calling the front-end's convert. */
1937 tree
1938 fold_convert_loc (location_t loc, tree type, tree arg)
1940 tree orig = TREE_TYPE (arg);
1941 tree tem;
1943 if (type == orig)
1944 return arg;
1946 if (TREE_CODE (arg) == ERROR_MARK
1947 || TREE_CODE (type) == ERROR_MARK
1948 || TREE_CODE (orig) == ERROR_MARK)
1949 return error_mark_node;
1951 switch (TREE_CODE (type))
1953 case POINTER_TYPE:
1954 case REFERENCE_TYPE:
1955 /* Handle conversions between pointers to different address spaces. */
1956 if (POINTER_TYPE_P (orig)
1957 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1958 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1959 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1960 /* fall through */
1962 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1963 case OFFSET_TYPE:
1964 if (TREE_CODE (arg) == INTEGER_CST)
1966 tem = fold_convert_const (NOP_EXPR, type, arg);
1967 if (tem != NULL_TREE)
1968 return tem;
1970 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1971 || TREE_CODE (orig) == OFFSET_TYPE)
1972 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1973 if (TREE_CODE (orig) == COMPLEX_TYPE)
1974 return fold_convert_loc (loc, type,
1975 fold_build1_loc (loc, REALPART_EXPR,
1976 TREE_TYPE (orig), arg));
1977 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1978 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1979 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1981 case REAL_TYPE:
1982 if (TREE_CODE (arg) == INTEGER_CST)
1984 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1985 if (tem != NULL_TREE)
1986 return tem;
1988 else if (TREE_CODE (arg) == REAL_CST)
1990 tem = fold_convert_const (NOP_EXPR, type, arg);
1991 if (tem != NULL_TREE)
1992 return tem;
1994 else if (TREE_CODE (arg) == FIXED_CST)
1996 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1997 if (tem != NULL_TREE)
1998 return tem;
2001 switch (TREE_CODE (orig))
2003 case INTEGER_TYPE:
2004 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2005 case POINTER_TYPE: case REFERENCE_TYPE:
2006 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2008 case REAL_TYPE:
2009 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2011 case FIXED_POINT_TYPE:
2012 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2014 case COMPLEX_TYPE:
2015 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2016 return fold_convert_loc (loc, type, tem);
2018 default:
2019 gcc_unreachable ();
2022 case FIXED_POINT_TYPE:
2023 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2024 || TREE_CODE (arg) == REAL_CST)
2026 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2027 if (tem != NULL_TREE)
2028 goto fold_convert_exit;
2031 switch (TREE_CODE (orig))
2033 case FIXED_POINT_TYPE:
2034 case INTEGER_TYPE:
2035 case ENUMERAL_TYPE:
2036 case BOOLEAN_TYPE:
2037 case REAL_TYPE:
2038 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2040 case COMPLEX_TYPE:
2041 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2042 return fold_convert_loc (loc, type, tem);
2044 default:
2045 gcc_unreachable ();
2048 case COMPLEX_TYPE:
2049 switch (TREE_CODE (orig))
2051 case INTEGER_TYPE:
2052 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2053 case POINTER_TYPE: case REFERENCE_TYPE:
2054 case REAL_TYPE:
2055 case FIXED_POINT_TYPE:
2056 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2057 fold_convert_loc (loc, TREE_TYPE (type), arg),
2058 fold_convert_loc (loc, TREE_TYPE (type),
2059 integer_zero_node));
2060 case COMPLEX_TYPE:
2062 tree rpart, ipart;
2064 if (TREE_CODE (arg) == COMPLEX_EXPR)
2066 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2067 TREE_OPERAND (arg, 0));
2068 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2069 TREE_OPERAND (arg, 1));
2070 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2073 arg = save_expr (arg);
2074 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2075 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2076 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2077 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2078 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2081 default:
2082 gcc_unreachable ();
2085 case VECTOR_TYPE:
2086 if (integer_zerop (arg))
2087 return build_zero_vector (type);
2088 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2089 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2090 || TREE_CODE (orig) == VECTOR_TYPE);
2091 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2093 case VOID_TYPE:
2094 tem = fold_ignored_result (arg);
2095 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2097 default:
2098 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2099 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2100 gcc_unreachable ();
2102 fold_convert_exit:
2103 protected_set_expr_location_unshare (tem, loc);
2104 return tem;
2107 /* Return false if expr can be assumed not to be an lvalue, true
2108 otherwise. */
2110 static bool
2111 maybe_lvalue_p (const_tree x)
2113 /* We only need to wrap lvalue tree codes. */
2114 switch (TREE_CODE (x))
2116 case VAR_DECL:
2117 case PARM_DECL:
2118 case RESULT_DECL:
2119 case LABEL_DECL:
2120 case FUNCTION_DECL:
2121 case SSA_NAME:
2123 case COMPONENT_REF:
2124 case MEM_REF:
2125 case INDIRECT_REF:
2126 case ARRAY_REF:
2127 case ARRAY_RANGE_REF:
2128 case BIT_FIELD_REF:
2129 case OBJ_TYPE_REF:
2131 case REALPART_EXPR:
2132 case IMAGPART_EXPR:
2133 case PREINCREMENT_EXPR:
2134 case PREDECREMENT_EXPR:
2135 case SAVE_EXPR:
2136 case TRY_CATCH_EXPR:
2137 case WITH_CLEANUP_EXPR:
2138 case COMPOUND_EXPR:
2139 case MODIFY_EXPR:
2140 case TARGET_EXPR:
2141 case COND_EXPR:
2142 case BIND_EXPR:
2143 break;
2145 default:
2146 /* Assume the worst for front-end tree codes. */
2147 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2148 break;
2149 return false;
2152 return true;
2155 /* Return an expr equal to X but certainly not valid as an lvalue. */
2157 tree
2158 non_lvalue_loc (location_t loc, tree x)
2160 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2161 us. */
2162 if (in_gimple_form)
2163 return x;
2165 if (! maybe_lvalue_p (x))
2166 return x;
2167 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2170 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2171 Zero means allow extended lvalues. */
2173 int pedantic_lvalues;
2175 /* When pedantic, return an expr equal to X but certainly not valid as a
2176 pedantic lvalue. Otherwise, return X. */
2178 static tree
2179 pedantic_non_lvalue_loc (location_t loc, tree x)
2181 if (pedantic_lvalues)
2182 return non_lvalue_loc (loc, x);
2184 return protected_set_expr_location_unshare (x, loc);
2187 /* Given a tree comparison code, return the code that is the logical inverse.
2188 It is generally not safe to do this for floating-point comparisons, except
2189 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2190 ERROR_MARK in this case. */
2192 enum tree_code
2193 invert_tree_comparison (enum tree_code code, bool honor_nans)
2195 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2196 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2197 return ERROR_MARK;
2199 switch (code)
2201 case EQ_EXPR:
2202 return NE_EXPR;
2203 case NE_EXPR:
2204 return EQ_EXPR;
2205 case GT_EXPR:
2206 return honor_nans ? UNLE_EXPR : LE_EXPR;
2207 case GE_EXPR:
2208 return honor_nans ? UNLT_EXPR : LT_EXPR;
2209 case LT_EXPR:
2210 return honor_nans ? UNGE_EXPR : GE_EXPR;
2211 case LE_EXPR:
2212 return honor_nans ? UNGT_EXPR : GT_EXPR;
2213 case LTGT_EXPR:
2214 return UNEQ_EXPR;
2215 case UNEQ_EXPR:
2216 return LTGT_EXPR;
2217 case UNGT_EXPR:
2218 return LE_EXPR;
2219 case UNGE_EXPR:
2220 return LT_EXPR;
2221 case UNLT_EXPR:
2222 return GE_EXPR;
2223 case UNLE_EXPR:
2224 return GT_EXPR;
2225 case ORDERED_EXPR:
2226 return UNORDERED_EXPR;
2227 case UNORDERED_EXPR:
2228 return ORDERED_EXPR;
2229 default:
2230 gcc_unreachable ();
2234 /* Similar, but return the comparison that results if the operands are
2235 swapped. This is safe for floating-point. */
2237 enum tree_code
2238 swap_tree_comparison (enum tree_code code)
2240 switch (code)
2242 case EQ_EXPR:
2243 case NE_EXPR:
2244 case ORDERED_EXPR:
2245 case UNORDERED_EXPR:
2246 case LTGT_EXPR:
2247 case UNEQ_EXPR:
2248 return code;
2249 case GT_EXPR:
2250 return LT_EXPR;
2251 case GE_EXPR:
2252 return LE_EXPR;
2253 case LT_EXPR:
2254 return GT_EXPR;
2255 case LE_EXPR:
2256 return GE_EXPR;
2257 case UNGT_EXPR:
2258 return UNLT_EXPR;
2259 case UNGE_EXPR:
2260 return UNLE_EXPR;
2261 case UNLT_EXPR:
2262 return UNGT_EXPR;
2263 case UNLE_EXPR:
2264 return UNGE_EXPR;
2265 default:
2266 gcc_unreachable ();
2271 /* Convert a comparison tree code from an enum tree_code representation
2272 into a compcode bit-based encoding. This function is the inverse of
2273 compcode_to_comparison. */
2275 static enum comparison_code
2276 comparison_to_compcode (enum tree_code code)
2278 switch (code)
2280 case LT_EXPR:
2281 return COMPCODE_LT;
2282 case EQ_EXPR:
2283 return COMPCODE_EQ;
2284 case LE_EXPR:
2285 return COMPCODE_LE;
2286 case GT_EXPR:
2287 return COMPCODE_GT;
2288 case NE_EXPR:
2289 return COMPCODE_NE;
2290 case GE_EXPR:
2291 return COMPCODE_GE;
2292 case ORDERED_EXPR:
2293 return COMPCODE_ORD;
2294 case UNORDERED_EXPR:
2295 return COMPCODE_UNORD;
2296 case UNLT_EXPR:
2297 return COMPCODE_UNLT;
2298 case UNEQ_EXPR:
2299 return COMPCODE_UNEQ;
2300 case UNLE_EXPR:
2301 return COMPCODE_UNLE;
2302 case UNGT_EXPR:
2303 return COMPCODE_UNGT;
2304 case LTGT_EXPR:
2305 return COMPCODE_LTGT;
2306 case UNGE_EXPR:
2307 return COMPCODE_UNGE;
2308 default:
2309 gcc_unreachable ();
2313 /* Convert a compcode bit-based encoding of a comparison operator back
2314 to GCC's enum tree_code representation. This function is the
2315 inverse of comparison_to_compcode. */
2317 static enum tree_code
2318 compcode_to_comparison (enum comparison_code code)
2320 switch (code)
2322 case COMPCODE_LT:
2323 return LT_EXPR;
2324 case COMPCODE_EQ:
2325 return EQ_EXPR;
2326 case COMPCODE_LE:
2327 return LE_EXPR;
2328 case COMPCODE_GT:
2329 return GT_EXPR;
2330 case COMPCODE_NE:
2331 return NE_EXPR;
2332 case COMPCODE_GE:
2333 return GE_EXPR;
2334 case COMPCODE_ORD:
2335 return ORDERED_EXPR;
2336 case COMPCODE_UNORD:
2337 return UNORDERED_EXPR;
2338 case COMPCODE_UNLT:
2339 return UNLT_EXPR;
2340 case COMPCODE_UNEQ:
2341 return UNEQ_EXPR;
2342 case COMPCODE_UNLE:
2343 return UNLE_EXPR;
2344 case COMPCODE_UNGT:
2345 return UNGT_EXPR;
2346 case COMPCODE_LTGT:
2347 return LTGT_EXPR;
2348 case COMPCODE_UNGE:
2349 return UNGE_EXPR;
2350 default:
2351 gcc_unreachable ();
2355 /* Return a tree for the comparison which is the combination of
2356 doing the AND or OR (depending on CODE) of the two operations LCODE
2357 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2358 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2359 if this makes the transformation invalid. */
2361 tree
2362 combine_comparisons (location_t loc,
2363 enum tree_code code, enum tree_code lcode,
2364 enum tree_code rcode, tree truth_type,
2365 tree ll_arg, tree lr_arg)
2367 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2368 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2369 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2370 int compcode;
2372 switch (code)
2374 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2375 compcode = lcompcode & rcompcode;
2376 break;
2378 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2379 compcode = lcompcode | rcompcode;
2380 break;
2382 default:
2383 return NULL_TREE;
2386 if (!honor_nans)
2388 /* Eliminate unordered comparisons, as well as LTGT and ORD
2389 which are not used unless the mode has NaNs. */
2390 compcode &= ~COMPCODE_UNORD;
2391 if (compcode == COMPCODE_LTGT)
2392 compcode = COMPCODE_NE;
2393 else if (compcode == COMPCODE_ORD)
2394 compcode = COMPCODE_TRUE;
2396 else if (flag_trapping_math)
2398 /* Check that the original operation and the optimized ones will trap
2399 under the same condition. */
2400 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2401 && (lcompcode != COMPCODE_EQ)
2402 && (lcompcode != COMPCODE_ORD);
2403 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2404 && (rcompcode != COMPCODE_EQ)
2405 && (rcompcode != COMPCODE_ORD);
2406 bool trap = (compcode & COMPCODE_UNORD) == 0
2407 && (compcode != COMPCODE_EQ)
2408 && (compcode != COMPCODE_ORD);
2410 /* In a short-circuited boolean expression the LHS might be
2411 such that the RHS, if evaluated, will never trap. For
2412 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2413 if neither x nor y is NaN. (This is a mixed blessing: for
2414 example, the expression above will never trap, hence
2415 optimizing it to x < y would be invalid). */
2416 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2417 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2418 rtrap = false;
2420 /* If the comparison was short-circuited, and only the RHS
2421 trapped, we may now generate a spurious trap. */
2422 if (rtrap && !ltrap
2423 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2424 return NULL_TREE;
2426 /* If we changed the conditions that cause a trap, we lose. */
2427 if ((ltrap || rtrap) != trap)
2428 return NULL_TREE;
2431 if (compcode == COMPCODE_TRUE)
2432 return constant_boolean_node (true, truth_type);
2433 else if (compcode == COMPCODE_FALSE)
2434 return constant_boolean_node (false, truth_type);
2435 else
2437 enum tree_code tcode;
2439 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2440 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2444 /* Return nonzero if two operands (typically of the same tree node)
2445 are necessarily equal. If either argument has side-effects this
2446 function returns zero. FLAGS modifies behavior as follows:
2448 If OEP_ONLY_CONST is set, only return nonzero for constants.
2449 This function tests whether the operands are indistinguishable;
2450 it does not test whether they are equal using C's == operation.
2451 The distinction is important for IEEE floating point, because
2452 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2453 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2455 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2456 even though it may hold multiple values during a function.
2457 This is because a GCC tree node guarantees that nothing else is
2458 executed between the evaluation of its "operands" (which may often
2459 be evaluated in arbitrary order). Hence if the operands themselves
2460 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2461 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2462 unset means assuming isochronic (or instantaneous) tree equivalence.
2463 Unless comparing arbitrary expression trees, such as from different
2464 statements, this flag can usually be left unset.
2466 If OEP_PURE_SAME is set, then pure functions with identical arguments
2467 are considered the same. It is used when the caller has other ways
2468 to ensure that global memory is unchanged in between. */
2471 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2473 /* If either is ERROR_MARK, they aren't equal. */
2474 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2475 || TREE_TYPE (arg0) == error_mark_node
2476 || TREE_TYPE (arg1) == error_mark_node)
2477 return 0;
2479 /* Similar, if either does not have a type (like a released SSA name),
2480 they aren't equal. */
2481 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2482 return 0;
2484 /* Check equality of integer constants before bailing out due to
2485 precision differences. */
2486 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2487 return tree_int_cst_equal (arg0, arg1);
2489 /* If both types don't have the same signedness, then we can't consider
2490 them equal. We must check this before the STRIP_NOPS calls
2491 because they may change the signedness of the arguments. As pointers
2492 strictly don't have a signedness, require either two pointers or
2493 two non-pointers as well. */
2494 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2495 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2496 return 0;
2498 /* We cannot consider pointers to different address space equal. */
2499 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2500 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2501 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2502 return 0;
2504 /* If both types don't have the same precision, then it is not safe
2505 to strip NOPs. */
2506 if (element_precision (TREE_TYPE (arg0))
2507 != element_precision (TREE_TYPE (arg1)))
2508 return 0;
2510 STRIP_NOPS (arg0);
2511 STRIP_NOPS (arg1);
2513 /* In case both args are comparisons but with different comparison
2514 code, try to swap the comparison operands of one arg to produce
2515 a match and compare that variant. */
2516 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2517 && COMPARISON_CLASS_P (arg0)
2518 && COMPARISON_CLASS_P (arg1))
2520 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2522 if (TREE_CODE (arg0) == swap_code)
2523 return operand_equal_p (TREE_OPERAND (arg0, 0),
2524 TREE_OPERAND (arg1, 1), flags)
2525 && operand_equal_p (TREE_OPERAND (arg0, 1),
2526 TREE_OPERAND (arg1, 0), flags);
2529 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2530 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2531 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2532 return 0;
2534 /* This is needed for conversions and for COMPONENT_REF.
2535 Might as well play it safe and always test this. */
2536 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2537 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2538 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2539 return 0;
2541 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2542 We don't care about side effects in that case because the SAVE_EXPR
2543 takes care of that for us. In all other cases, two expressions are
2544 equal if they have no side effects. If we have two identical
2545 expressions with side effects that should be treated the same due
2546 to the only side effects being identical SAVE_EXPR's, that will
2547 be detected in the recursive calls below.
2548 If we are taking an invariant address of two identical objects
2549 they are necessarily equal as well. */
2550 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2551 && (TREE_CODE (arg0) == SAVE_EXPR
2552 || (flags & OEP_CONSTANT_ADDRESS_OF)
2553 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2554 return 1;
2556 /* Next handle constant cases, those for which we can return 1 even
2557 if ONLY_CONST is set. */
2558 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2559 switch (TREE_CODE (arg0))
2561 case INTEGER_CST:
2562 return tree_int_cst_equal (arg0, arg1);
2564 case FIXED_CST:
2565 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2566 TREE_FIXED_CST (arg1));
2568 case REAL_CST:
2569 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2570 TREE_REAL_CST (arg1)))
2571 return 1;
2574 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2576 /* If we do not distinguish between signed and unsigned zero,
2577 consider them equal. */
2578 if (real_zerop (arg0) && real_zerop (arg1))
2579 return 1;
2581 return 0;
2583 case VECTOR_CST:
2585 unsigned i;
2587 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2588 return 0;
2590 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2592 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2593 VECTOR_CST_ELT (arg1, i), flags))
2594 return 0;
2596 return 1;
2599 case COMPLEX_CST:
2600 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2601 flags)
2602 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2603 flags));
2605 case STRING_CST:
2606 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2607 && ! memcmp (TREE_STRING_POINTER (arg0),
2608 TREE_STRING_POINTER (arg1),
2609 TREE_STRING_LENGTH (arg0)));
2611 case ADDR_EXPR:
2612 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2613 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2614 ? OEP_CONSTANT_ADDRESS_OF : 0);
2615 default:
2616 break;
2619 if (flags & OEP_ONLY_CONST)
2620 return 0;
2622 /* Define macros to test an operand from arg0 and arg1 for equality and a
2623 variant that allows null and views null as being different from any
2624 non-null value. In the latter case, if either is null, the both
2625 must be; otherwise, do the normal comparison. */
2626 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2627 TREE_OPERAND (arg1, N), flags)
2629 #define OP_SAME_WITH_NULL(N) \
2630 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2631 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2633 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2635 case tcc_unary:
2636 /* Two conversions are equal only if signedness and modes match. */
2637 switch (TREE_CODE (arg0))
2639 CASE_CONVERT:
2640 case FIX_TRUNC_EXPR:
2641 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2642 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2643 return 0;
2644 break;
2645 default:
2646 break;
2649 return OP_SAME (0);
2652 case tcc_comparison:
2653 case tcc_binary:
2654 if (OP_SAME (0) && OP_SAME (1))
2655 return 1;
2657 /* For commutative ops, allow the other order. */
2658 return (commutative_tree_code (TREE_CODE (arg0))
2659 && operand_equal_p (TREE_OPERAND (arg0, 0),
2660 TREE_OPERAND (arg1, 1), flags)
2661 && operand_equal_p (TREE_OPERAND (arg0, 1),
2662 TREE_OPERAND (arg1, 0), flags));
2664 case tcc_reference:
2665 /* If either of the pointer (or reference) expressions we are
2666 dereferencing contain a side effect, these cannot be equal,
2667 but their addresses can be. */
2668 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2669 && (TREE_SIDE_EFFECTS (arg0)
2670 || TREE_SIDE_EFFECTS (arg1)))
2671 return 0;
2673 switch (TREE_CODE (arg0))
2675 case INDIRECT_REF:
2676 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2677 return OP_SAME (0);
2679 case REALPART_EXPR:
2680 case IMAGPART_EXPR:
2681 return OP_SAME (0);
2683 case TARGET_MEM_REF:
2684 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2685 /* Require equal extra operands and then fall through to MEM_REF
2686 handling of the two common operands. */
2687 if (!OP_SAME_WITH_NULL (2)
2688 || !OP_SAME_WITH_NULL (3)
2689 || !OP_SAME_WITH_NULL (4))
2690 return 0;
2691 /* Fallthru. */
2692 case MEM_REF:
2693 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2694 /* Require equal access sizes, and similar pointer types.
2695 We can have incomplete types for array references of
2696 variable-sized arrays from the Fortran frontend
2697 though. Also verify the types are compatible. */
2698 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2699 || (TYPE_SIZE (TREE_TYPE (arg0))
2700 && TYPE_SIZE (TREE_TYPE (arg1))
2701 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2702 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2703 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2704 && alias_ptr_types_compatible_p
2705 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2706 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2707 && OP_SAME (0) && OP_SAME (1));
2709 case ARRAY_REF:
2710 case ARRAY_RANGE_REF:
2711 /* Operands 2 and 3 may be null.
2712 Compare the array index by value if it is constant first as we
2713 may have different types but same value here. */
2714 if (!OP_SAME (0))
2715 return 0;
2716 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2717 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2718 TREE_OPERAND (arg1, 1))
2719 || OP_SAME (1))
2720 && OP_SAME_WITH_NULL (2)
2721 && OP_SAME_WITH_NULL (3));
2723 case COMPONENT_REF:
2724 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2725 may be NULL when we're called to compare MEM_EXPRs. */
2726 if (!OP_SAME_WITH_NULL (0)
2727 || !OP_SAME (1))
2728 return 0;
2729 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2730 return OP_SAME_WITH_NULL (2);
2732 case BIT_FIELD_REF:
2733 if (!OP_SAME (0))
2734 return 0;
2735 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2736 return OP_SAME (1) && OP_SAME (2);
2738 default:
2739 return 0;
2742 case tcc_expression:
2743 switch (TREE_CODE (arg0))
2745 case ADDR_EXPR:
2746 case TRUTH_NOT_EXPR:
2747 return OP_SAME (0);
2749 case TRUTH_ANDIF_EXPR:
2750 case TRUTH_ORIF_EXPR:
2751 return OP_SAME (0) && OP_SAME (1);
2753 case FMA_EXPR:
2754 case WIDEN_MULT_PLUS_EXPR:
2755 case WIDEN_MULT_MINUS_EXPR:
2756 if (!OP_SAME (2))
2757 return 0;
2758 /* The multiplcation operands are commutative. */
2759 /* FALLTHRU */
2761 case TRUTH_AND_EXPR:
2762 case TRUTH_OR_EXPR:
2763 case TRUTH_XOR_EXPR:
2764 if (OP_SAME (0) && OP_SAME (1))
2765 return 1;
2767 /* Otherwise take into account this is a commutative operation. */
2768 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2769 TREE_OPERAND (arg1, 1), flags)
2770 && operand_equal_p (TREE_OPERAND (arg0, 1),
2771 TREE_OPERAND (arg1, 0), flags));
2773 case COND_EXPR:
2774 case VEC_COND_EXPR:
2775 case DOT_PROD_EXPR:
2776 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2778 default:
2779 return 0;
2782 case tcc_vl_exp:
2783 switch (TREE_CODE (arg0))
2785 case CALL_EXPR:
2786 /* If the CALL_EXPRs call different functions, then they
2787 clearly can not be equal. */
2788 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2789 flags))
2790 return 0;
2793 unsigned int cef = call_expr_flags (arg0);
2794 if (flags & OEP_PURE_SAME)
2795 cef &= ECF_CONST | ECF_PURE;
2796 else
2797 cef &= ECF_CONST;
2798 if (!cef)
2799 return 0;
2802 /* Now see if all the arguments are the same. */
2804 const_call_expr_arg_iterator iter0, iter1;
2805 const_tree a0, a1;
2806 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2807 a1 = first_const_call_expr_arg (arg1, &iter1);
2808 a0 && a1;
2809 a0 = next_const_call_expr_arg (&iter0),
2810 a1 = next_const_call_expr_arg (&iter1))
2811 if (! operand_equal_p (a0, a1, flags))
2812 return 0;
2814 /* If we get here and both argument lists are exhausted
2815 then the CALL_EXPRs are equal. */
2816 return ! (a0 || a1);
2818 default:
2819 return 0;
2822 case tcc_declaration:
2823 /* Consider __builtin_sqrt equal to sqrt. */
2824 return (TREE_CODE (arg0) == FUNCTION_DECL
2825 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2826 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2827 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2829 default:
2830 return 0;
2833 #undef OP_SAME
2834 #undef OP_SAME_WITH_NULL
2837 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2838 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2840 When in doubt, return 0. */
2842 static int
2843 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2845 int unsignedp1, unsignedpo;
2846 tree primarg0, primarg1, primother;
2847 unsigned int correct_width;
2849 if (operand_equal_p (arg0, arg1, 0))
2850 return 1;
2852 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2853 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2854 return 0;
2856 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2857 and see if the inner values are the same. This removes any
2858 signedness comparison, which doesn't matter here. */
2859 primarg0 = arg0, primarg1 = arg1;
2860 STRIP_NOPS (primarg0);
2861 STRIP_NOPS (primarg1);
2862 if (operand_equal_p (primarg0, primarg1, 0))
2863 return 1;
2865 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2866 actual comparison operand, ARG0.
2868 First throw away any conversions to wider types
2869 already present in the operands. */
2871 primarg1 = get_narrower (arg1, &unsignedp1);
2872 primother = get_narrower (other, &unsignedpo);
2874 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2875 if (unsignedp1 == unsignedpo
2876 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2877 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2879 tree type = TREE_TYPE (arg0);
2881 /* Make sure shorter operand is extended the right way
2882 to match the longer operand. */
2883 primarg1 = fold_convert (signed_or_unsigned_type_for
2884 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2886 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2887 return 1;
2890 return 0;
2893 /* See if ARG is an expression that is either a comparison or is performing
2894 arithmetic on comparisons. The comparisons must only be comparing
2895 two different values, which will be stored in *CVAL1 and *CVAL2; if
2896 they are nonzero it means that some operands have already been found.
2897 No variables may be used anywhere else in the expression except in the
2898 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2899 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2901 If this is true, return 1. Otherwise, return zero. */
2903 static int
2904 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2906 enum tree_code code = TREE_CODE (arg);
2907 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2909 /* We can handle some of the tcc_expression cases here. */
2910 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2911 tclass = tcc_unary;
2912 else if (tclass == tcc_expression
2913 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2914 || code == COMPOUND_EXPR))
2915 tclass = tcc_binary;
2917 else if (tclass == tcc_expression && code == SAVE_EXPR
2918 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2920 /* If we've already found a CVAL1 or CVAL2, this expression is
2921 two complex to handle. */
2922 if (*cval1 || *cval2)
2923 return 0;
2925 tclass = tcc_unary;
2926 *save_p = 1;
2929 switch (tclass)
2931 case tcc_unary:
2932 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2934 case tcc_binary:
2935 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2936 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2937 cval1, cval2, save_p));
2939 case tcc_constant:
2940 return 1;
2942 case tcc_expression:
2943 if (code == COND_EXPR)
2944 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2945 cval1, cval2, save_p)
2946 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2947 cval1, cval2, save_p)
2948 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2949 cval1, cval2, save_p));
2950 return 0;
2952 case tcc_comparison:
2953 /* First see if we can handle the first operand, then the second. For
2954 the second operand, we know *CVAL1 can't be zero. It must be that
2955 one side of the comparison is each of the values; test for the
2956 case where this isn't true by failing if the two operands
2957 are the same. */
2959 if (operand_equal_p (TREE_OPERAND (arg, 0),
2960 TREE_OPERAND (arg, 1), 0))
2961 return 0;
2963 if (*cval1 == 0)
2964 *cval1 = TREE_OPERAND (arg, 0);
2965 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2967 else if (*cval2 == 0)
2968 *cval2 = TREE_OPERAND (arg, 0);
2969 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2971 else
2972 return 0;
2974 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2976 else if (*cval2 == 0)
2977 *cval2 = TREE_OPERAND (arg, 1);
2978 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2980 else
2981 return 0;
2983 return 1;
2985 default:
2986 return 0;
2990 /* ARG is a tree that is known to contain just arithmetic operations and
2991 comparisons. Evaluate the operations in the tree substituting NEW0 for
2992 any occurrence of OLD0 as an operand of a comparison and likewise for
2993 NEW1 and OLD1. */
2995 static tree
2996 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2997 tree old1, tree new1)
2999 tree type = TREE_TYPE (arg);
3000 enum tree_code code = TREE_CODE (arg);
3001 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3003 /* We can handle some of the tcc_expression cases here. */
3004 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3005 tclass = tcc_unary;
3006 else if (tclass == tcc_expression
3007 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3008 tclass = tcc_binary;
3010 switch (tclass)
3012 case tcc_unary:
3013 return fold_build1_loc (loc, code, type,
3014 eval_subst (loc, TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1));
3017 case tcc_binary:
3018 return fold_build2_loc (loc, code, type,
3019 eval_subst (loc, TREE_OPERAND (arg, 0),
3020 old0, new0, old1, new1),
3021 eval_subst (loc, TREE_OPERAND (arg, 1),
3022 old0, new0, old1, new1));
3024 case tcc_expression:
3025 switch (code)
3027 case SAVE_EXPR:
3028 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3029 old1, new1);
3031 case COMPOUND_EXPR:
3032 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3033 old1, new1);
3035 case COND_EXPR:
3036 return fold_build3_loc (loc, code, type,
3037 eval_subst (loc, TREE_OPERAND (arg, 0),
3038 old0, new0, old1, new1),
3039 eval_subst (loc, TREE_OPERAND (arg, 1),
3040 old0, new0, old1, new1),
3041 eval_subst (loc, TREE_OPERAND (arg, 2),
3042 old0, new0, old1, new1));
3043 default:
3044 break;
3046 /* Fall through - ??? */
3048 case tcc_comparison:
3050 tree arg0 = TREE_OPERAND (arg, 0);
3051 tree arg1 = TREE_OPERAND (arg, 1);
3053 /* We need to check both for exact equality and tree equality. The
3054 former will be true if the operand has a side-effect. In that
3055 case, we know the operand occurred exactly once. */
3057 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3058 arg0 = new0;
3059 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3060 arg0 = new1;
3062 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3063 arg1 = new0;
3064 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3065 arg1 = new1;
3067 return fold_build2_loc (loc, code, type, arg0, arg1);
3070 default:
3071 return arg;
3075 /* Return a tree for the case when the result of an expression is RESULT
3076 converted to TYPE and OMITTED was previously an operand of the expression
3077 but is now not needed (e.g., we folded OMITTED * 0).
3079 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3080 the conversion of RESULT to TYPE. */
3082 tree
3083 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3085 tree t = fold_convert_loc (loc, type, result);
3087 /* If the resulting operand is an empty statement, just return the omitted
3088 statement casted to void. */
3089 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3090 return build1_loc (loc, NOP_EXPR, void_type_node,
3091 fold_ignored_result (omitted));
3093 if (TREE_SIDE_EFFECTS (omitted))
3094 return build2_loc (loc, COMPOUND_EXPR, type,
3095 fold_ignored_result (omitted), t);
3097 return non_lvalue_loc (loc, t);
3100 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3102 static tree
3103 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3104 tree omitted)
3106 tree t = fold_convert_loc (loc, type, result);
3108 /* If the resulting operand is an empty statement, just return the omitted
3109 statement casted to void. */
3110 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3111 return build1_loc (loc, NOP_EXPR, void_type_node,
3112 fold_ignored_result (omitted));
3114 if (TREE_SIDE_EFFECTS (omitted))
3115 return build2_loc (loc, COMPOUND_EXPR, type,
3116 fold_ignored_result (omitted), t);
3118 return pedantic_non_lvalue_loc (loc, t);
3121 /* Return a tree for the case when the result of an expression is RESULT
3122 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3123 of the expression but are now not needed.
3125 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3126 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3127 evaluated before OMITTED2. Otherwise, if neither has side effects,
3128 just do the conversion of RESULT to TYPE. */
3130 tree
3131 omit_two_operands_loc (location_t loc, tree type, tree result,
3132 tree omitted1, tree omitted2)
3134 tree t = fold_convert_loc (loc, type, result);
3136 if (TREE_SIDE_EFFECTS (omitted2))
3137 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3138 if (TREE_SIDE_EFFECTS (omitted1))
3139 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3141 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3145 /* Return a simplified tree node for the truth-negation of ARG. This
3146 never alters ARG itself. We assume that ARG is an operation that
3147 returns a truth value (0 or 1).
3149 FIXME: one would think we would fold the result, but it causes
3150 problems with the dominator optimizer. */
3152 static tree
3153 fold_truth_not_expr (location_t loc, tree arg)
3155 tree type = TREE_TYPE (arg);
3156 enum tree_code code = TREE_CODE (arg);
3157 location_t loc1, loc2;
3159 /* If this is a comparison, we can simply invert it, except for
3160 floating-point non-equality comparisons, in which case we just
3161 enclose a TRUTH_NOT_EXPR around what we have. */
3163 if (TREE_CODE_CLASS (code) == tcc_comparison)
3165 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3166 if (FLOAT_TYPE_P (op_type)
3167 && flag_trapping_math
3168 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3169 && code != NE_EXPR && code != EQ_EXPR)
3170 return NULL_TREE;
3172 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3173 if (code == ERROR_MARK)
3174 return NULL_TREE;
3176 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3177 TREE_OPERAND (arg, 1));
3180 switch (code)
3182 case INTEGER_CST:
3183 return constant_boolean_node (integer_zerop (arg), type);
3185 case TRUTH_AND_EXPR:
3186 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3187 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3188 return build2_loc (loc, TRUTH_OR_EXPR, type,
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3190 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3192 case TRUTH_OR_EXPR:
3193 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3194 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3195 return build2_loc (loc, TRUTH_AND_EXPR, type,
3196 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3197 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3199 case TRUTH_XOR_EXPR:
3200 /* Here we can invert either operand. We invert the first operand
3201 unless the second operand is a TRUTH_NOT_EXPR in which case our
3202 result is the XOR of the first operand with the inside of the
3203 negation of the second operand. */
3205 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3206 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3207 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3208 else
3209 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3210 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3211 TREE_OPERAND (arg, 1));
3213 case TRUTH_ANDIF_EXPR:
3214 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3215 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3216 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3217 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3218 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3220 case TRUTH_ORIF_EXPR:
3221 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3222 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3223 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3224 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3225 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3227 case TRUTH_NOT_EXPR:
3228 return TREE_OPERAND (arg, 0);
3230 case COND_EXPR:
3232 tree arg1 = TREE_OPERAND (arg, 1);
3233 tree arg2 = TREE_OPERAND (arg, 2);
3235 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3236 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3238 /* A COND_EXPR may have a throw as one operand, which
3239 then has void type. Just leave void operands
3240 as they are. */
3241 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3242 VOID_TYPE_P (TREE_TYPE (arg1))
3243 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3244 VOID_TYPE_P (TREE_TYPE (arg2))
3245 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3248 case COMPOUND_EXPR:
3249 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3250 return build2_loc (loc, COMPOUND_EXPR, type,
3251 TREE_OPERAND (arg, 0),
3252 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3254 case NON_LVALUE_EXPR:
3255 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3256 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3258 CASE_CONVERT:
3259 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3260 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3262 /* ... fall through ... */
3264 case FLOAT_EXPR:
3265 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3266 return build1_loc (loc, TREE_CODE (arg), type,
3267 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3269 case BIT_AND_EXPR:
3270 if (!integer_onep (TREE_OPERAND (arg, 1)))
3271 return NULL_TREE;
3272 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3274 case SAVE_EXPR:
3275 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3277 case CLEANUP_POINT_EXPR:
3278 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3279 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3280 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3282 default:
3283 return NULL_TREE;
3287 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3288 assume that ARG is an operation that returns a truth value (0 or 1
3289 for scalars, 0 or -1 for vectors). Return the folded expression if
3290 folding is successful. Otherwise, return NULL_TREE. */
3292 static tree
3293 fold_invert_truthvalue (location_t loc, tree arg)
3295 tree type = TREE_TYPE (arg);
3296 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3297 ? BIT_NOT_EXPR
3298 : TRUTH_NOT_EXPR,
3299 type, arg);
3302 /* Return a simplified tree node for the truth-negation of ARG. This
3303 never alters ARG itself. We assume that ARG is an operation that
3304 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3306 tree
3307 invert_truthvalue_loc (location_t loc, tree arg)
3309 if (TREE_CODE (arg) == ERROR_MARK)
3310 return arg;
3312 tree type = TREE_TYPE (arg);
3313 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3314 ? BIT_NOT_EXPR
3315 : TRUTH_NOT_EXPR,
3316 type, arg);
3319 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3320 operands are another bit-wise operation with a common input. If so,
3321 distribute the bit operations to save an operation and possibly two if
3322 constants are involved. For example, convert
3323 (A | B) & (A | C) into A | (B & C)
3324 Further simplification will occur if B and C are constants.
3326 If this optimization cannot be done, 0 will be returned. */
3328 static tree
3329 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3330 tree arg0, tree arg1)
3332 tree common;
3333 tree left, right;
3335 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3336 || TREE_CODE (arg0) == code
3337 || (TREE_CODE (arg0) != BIT_AND_EXPR
3338 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3339 return 0;
3341 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3343 common = TREE_OPERAND (arg0, 0);
3344 left = TREE_OPERAND (arg0, 1);
3345 right = TREE_OPERAND (arg1, 1);
3347 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3349 common = TREE_OPERAND (arg0, 0);
3350 left = TREE_OPERAND (arg0, 1);
3351 right = TREE_OPERAND (arg1, 0);
3353 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3355 common = TREE_OPERAND (arg0, 1);
3356 left = TREE_OPERAND (arg0, 0);
3357 right = TREE_OPERAND (arg1, 1);
3359 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3361 common = TREE_OPERAND (arg0, 1);
3362 left = TREE_OPERAND (arg0, 0);
3363 right = TREE_OPERAND (arg1, 0);
3365 else
3366 return 0;
3368 common = fold_convert_loc (loc, type, common);
3369 left = fold_convert_loc (loc, type, left);
3370 right = fold_convert_loc (loc, type, right);
3371 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3372 fold_build2_loc (loc, code, type, left, right));
3375 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3376 with code CODE. This optimization is unsafe. */
3377 static tree
3378 distribute_real_division (location_t loc, enum tree_code code, tree type,
3379 tree arg0, tree arg1)
3381 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3382 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3384 /* (A / C) +- (B / C) -> (A +- B) / C. */
3385 if (mul0 == mul1
3386 && operand_equal_p (TREE_OPERAND (arg0, 1),
3387 TREE_OPERAND (arg1, 1), 0))
3388 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3389 fold_build2_loc (loc, code, type,
3390 TREE_OPERAND (arg0, 0),
3391 TREE_OPERAND (arg1, 0)),
3392 TREE_OPERAND (arg0, 1));
3394 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3395 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3396 TREE_OPERAND (arg1, 0), 0)
3397 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3398 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3400 REAL_VALUE_TYPE r0, r1;
3401 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3402 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3403 if (!mul0)
3404 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3405 if (!mul1)
3406 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3407 real_arithmetic (&r0, code, &r0, &r1);
3408 return fold_build2_loc (loc, MULT_EXPR, type,
3409 TREE_OPERAND (arg0, 0),
3410 build_real (type, r0));
3413 return NULL_TREE;
3416 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3417 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3419 static tree
3420 make_bit_field_ref (location_t loc, tree inner, tree type,
3421 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3423 tree result, bftype;
3425 if (bitpos == 0)
3427 tree size = TYPE_SIZE (TREE_TYPE (inner));
3428 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3429 || POINTER_TYPE_P (TREE_TYPE (inner)))
3430 && tree_fits_shwi_p (size)
3431 && tree_to_shwi (size) == bitsize)
3432 return fold_convert_loc (loc, type, inner);
3435 bftype = type;
3436 if (TYPE_PRECISION (bftype) != bitsize
3437 || TYPE_UNSIGNED (bftype) == !unsignedp)
3438 bftype = build_nonstandard_integer_type (bitsize, 0);
3440 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3441 size_int (bitsize), bitsize_int (bitpos));
3443 if (bftype != type)
3444 result = fold_convert_loc (loc, type, result);
3446 return result;
3449 /* Optimize a bit-field compare.
3451 There are two cases: First is a compare against a constant and the
3452 second is a comparison of two items where the fields are at the same
3453 bit position relative to the start of a chunk (byte, halfword, word)
3454 large enough to contain it. In these cases we can avoid the shift
3455 implicit in bitfield extractions.
3457 For constants, we emit a compare of the shifted constant with the
3458 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3459 compared. For two fields at the same position, we do the ANDs with the
3460 similar mask and compare the result of the ANDs.
3462 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3463 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3464 are the left and right operands of the comparison, respectively.
3466 If the optimization described above can be done, we return the resulting
3467 tree. Otherwise we return zero. */
3469 static tree
3470 optimize_bit_field_compare (location_t loc, enum tree_code code,
3471 tree compare_type, tree lhs, tree rhs)
3473 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3474 tree type = TREE_TYPE (lhs);
3475 tree signed_type, unsigned_type;
3476 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3477 enum machine_mode lmode, rmode, nmode;
3478 int lunsignedp, runsignedp;
3479 int lvolatilep = 0, rvolatilep = 0;
3480 tree linner, rinner = NULL_TREE;
3481 tree mask;
3482 tree offset;
3484 /* Get all the information about the extractions being done. If the bit size
3485 if the same as the size of the underlying object, we aren't doing an
3486 extraction at all and so can do nothing. We also don't want to
3487 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3488 then will no longer be able to replace it. */
3489 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3490 &lunsignedp, &lvolatilep, false);
3491 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3492 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3493 return 0;
3495 if (!const_p)
3497 /* If this is not a constant, we can only do something if bit positions,
3498 sizes, and signedness are the same. */
3499 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3500 &runsignedp, &rvolatilep, false);
3502 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3503 || lunsignedp != runsignedp || offset != 0
3504 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3505 return 0;
3508 /* See if we can find a mode to refer to this field. We should be able to,
3509 but fail if we can't. */
3510 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3511 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3512 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3513 TYPE_ALIGN (TREE_TYPE (rinner))),
3514 word_mode, false);
3515 if (nmode == VOIDmode)
3516 return 0;
3518 /* Set signed and unsigned types of the precision of this mode for the
3519 shifts below. */
3520 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3521 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3523 /* Compute the bit position and size for the new reference and our offset
3524 within it. If the new reference is the same size as the original, we
3525 won't optimize anything, so return zero. */
3526 nbitsize = GET_MODE_BITSIZE (nmode);
3527 nbitpos = lbitpos & ~ (nbitsize - 1);
3528 lbitpos -= nbitpos;
3529 if (nbitsize == lbitsize)
3530 return 0;
3532 if (BYTES_BIG_ENDIAN)
3533 lbitpos = nbitsize - lbitsize - lbitpos;
3535 /* Make the mask to be used against the extracted field. */
3536 mask = build_int_cst_type (unsigned_type, -1);
3537 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3538 mask = const_binop (RSHIFT_EXPR, mask,
3539 size_int (nbitsize - lbitsize - lbitpos));
3541 if (! const_p)
3542 /* If not comparing with constant, just rework the comparison
3543 and return. */
3544 return fold_build2_loc (loc, code, compare_type,
3545 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3546 make_bit_field_ref (loc, linner,
3547 unsigned_type,
3548 nbitsize, nbitpos,
3550 mask),
3551 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3552 make_bit_field_ref (loc, rinner,
3553 unsigned_type,
3554 nbitsize, nbitpos,
3556 mask));
3558 /* Otherwise, we are handling the constant case. See if the constant is too
3559 big for the field. Warn and return a tree of for 0 (false) if so. We do
3560 this not only for its own sake, but to avoid having to test for this
3561 error case below. If we didn't, we might generate wrong code.
3563 For unsigned fields, the constant shifted right by the field length should
3564 be all zero. For signed fields, the high-order bits should agree with
3565 the sign bit. */
3567 if (lunsignedp)
3569 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3570 fold_convert_loc (loc,
3571 unsigned_type, rhs),
3572 size_int (lbitsize))))
3574 warning (0, "comparison is always %d due to width of bit-field",
3575 code == NE_EXPR);
3576 return constant_boolean_node (code == NE_EXPR, compare_type);
3579 else
3581 tree tem = const_binop (RSHIFT_EXPR,
3582 fold_convert_loc (loc, signed_type, rhs),
3583 size_int (lbitsize - 1));
3584 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3586 warning (0, "comparison is always %d due to width of bit-field",
3587 code == NE_EXPR);
3588 return constant_boolean_node (code == NE_EXPR, compare_type);
3592 /* Single-bit compares should always be against zero. */
3593 if (lbitsize == 1 && ! integer_zerop (rhs))
3595 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3596 rhs = build_int_cst (type, 0);
3599 /* Make a new bitfield reference, shift the constant over the
3600 appropriate number of bits and mask it with the computed mask
3601 (in case this was a signed field). If we changed it, make a new one. */
3602 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3604 rhs = const_binop (BIT_AND_EXPR,
3605 const_binop (LSHIFT_EXPR,
3606 fold_convert_loc (loc, unsigned_type, rhs),
3607 size_int (lbitpos)),
3608 mask);
3610 lhs = build2_loc (loc, code, compare_type,
3611 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3612 return lhs;
3615 /* Subroutine for fold_truth_andor_1: decode a field reference.
3617 If EXP is a comparison reference, we return the innermost reference.
3619 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3620 set to the starting bit number.
3622 If the innermost field can be completely contained in a mode-sized
3623 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3625 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3626 otherwise it is not changed.
3628 *PUNSIGNEDP is set to the signedness of the field.
3630 *PMASK is set to the mask used. This is either contained in a
3631 BIT_AND_EXPR or derived from the width of the field.
3633 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3635 Return 0 if this is not a component reference or is one that we can't
3636 do anything with. */
3638 static tree
3639 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3640 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3641 int *punsignedp, int *pvolatilep,
3642 tree *pmask, tree *pand_mask)
3644 tree outer_type = 0;
3645 tree and_mask = 0;
3646 tree mask, inner, offset;
3647 tree unsigned_type;
3648 unsigned int precision;
3650 /* All the optimizations using this function assume integer fields.
3651 There are problems with FP fields since the type_for_size call
3652 below can fail for, e.g., XFmode. */
3653 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3654 return 0;
3656 /* We are interested in the bare arrangement of bits, so strip everything
3657 that doesn't affect the machine mode. However, record the type of the
3658 outermost expression if it may matter below. */
3659 if (CONVERT_EXPR_P (exp)
3660 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3661 outer_type = TREE_TYPE (exp);
3662 STRIP_NOPS (exp);
3664 if (TREE_CODE (exp) == BIT_AND_EXPR)
3666 and_mask = TREE_OPERAND (exp, 1);
3667 exp = TREE_OPERAND (exp, 0);
3668 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3669 if (TREE_CODE (and_mask) != INTEGER_CST)
3670 return 0;
3673 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3674 punsignedp, pvolatilep, false);
3675 if ((inner == exp && and_mask == 0)
3676 || *pbitsize < 0 || offset != 0
3677 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3678 return 0;
3680 /* If the number of bits in the reference is the same as the bitsize of
3681 the outer type, then the outer type gives the signedness. Otherwise
3682 (in case of a small bitfield) the signedness is unchanged. */
3683 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3684 *punsignedp = TYPE_UNSIGNED (outer_type);
3686 /* Compute the mask to access the bitfield. */
3687 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3688 precision = TYPE_PRECISION (unsigned_type);
3690 mask = build_int_cst_type (unsigned_type, -1);
3692 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3693 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3695 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3696 if (and_mask != 0)
3697 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3698 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3700 *pmask = mask;
3701 *pand_mask = and_mask;
3702 return inner;
3705 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3706 bit positions. */
3708 static int
3709 all_ones_mask_p (const_tree mask, int size)
3711 tree type = TREE_TYPE (mask);
3712 unsigned int precision = TYPE_PRECISION (type);
3713 tree tmask;
3715 tmask = build_int_cst_type (signed_type_for (type), -1);
3717 return
3718 tree_int_cst_equal (mask,
3719 const_binop (RSHIFT_EXPR,
3720 const_binop (LSHIFT_EXPR, tmask,
3721 size_int (precision - size)),
3722 size_int (precision - size)));
3725 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3726 represents the sign bit of EXP's type. If EXP represents a sign
3727 or zero extension, also test VAL against the unextended type.
3728 The return value is the (sub)expression whose sign bit is VAL,
3729 or NULL_TREE otherwise. */
3731 static tree
3732 sign_bit_p (tree exp, const_tree val)
3734 unsigned HOST_WIDE_INT mask_lo, lo;
3735 HOST_WIDE_INT mask_hi, hi;
3736 int width;
3737 tree t;
3739 /* Tree EXP must have an integral type. */
3740 t = TREE_TYPE (exp);
3741 if (! INTEGRAL_TYPE_P (t))
3742 return NULL_TREE;
3744 /* Tree VAL must be an integer constant. */
3745 if (TREE_CODE (val) != INTEGER_CST
3746 || TREE_OVERFLOW (val))
3747 return NULL_TREE;
3749 width = TYPE_PRECISION (t);
3750 if (width > HOST_BITS_PER_WIDE_INT)
3752 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3753 lo = 0;
3755 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3756 mask_lo = -1;
3758 else
3760 hi = 0;
3761 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3763 mask_hi = 0;
3764 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3767 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3768 treat VAL as if it were unsigned. */
3769 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3770 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3771 return exp;
3773 /* Handle extension from a narrower type. */
3774 if (TREE_CODE (exp) == NOP_EXPR
3775 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3776 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3778 return NULL_TREE;
3781 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3782 to be evaluated unconditionally. */
3784 static int
3785 simple_operand_p (const_tree exp)
3787 /* Strip any conversions that don't change the machine mode. */
3788 STRIP_NOPS (exp);
3790 return (CONSTANT_CLASS_P (exp)
3791 || TREE_CODE (exp) == SSA_NAME
3792 || (DECL_P (exp)
3793 && ! TREE_ADDRESSABLE (exp)
3794 && ! TREE_THIS_VOLATILE (exp)
3795 && ! DECL_NONLOCAL (exp)
3796 /* Don't regard global variables as simple. They may be
3797 allocated in ways unknown to the compiler (shared memory,
3798 #pragma weak, etc). */
3799 && ! TREE_PUBLIC (exp)
3800 && ! DECL_EXTERNAL (exp)
3801 /* Weakrefs are not safe to be read, since they can be NULL.
3802 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3803 have DECL_WEAK flag set. */
3804 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3805 /* Loading a static variable is unduly expensive, but global
3806 registers aren't expensive. */
3807 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3810 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3811 to be evaluated unconditionally.
3812 I addition to simple_operand_p, we assume that comparisons, conversions,
3813 and logic-not operations are simple, if their operands are simple, too. */
3815 static bool
3816 simple_operand_p_2 (tree exp)
3818 enum tree_code code;
3820 if (TREE_SIDE_EFFECTS (exp)
3821 || tree_could_trap_p (exp))
3822 return false;
3824 while (CONVERT_EXPR_P (exp))
3825 exp = TREE_OPERAND (exp, 0);
3827 code = TREE_CODE (exp);
3829 if (TREE_CODE_CLASS (code) == tcc_comparison)
3830 return (simple_operand_p (TREE_OPERAND (exp, 0))
3831 && simple_operand_p (TREE_OPERAND (exp, 1)));
3833 if (code == TRUTH_NOT_EXPR)
3834 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3836 return simple_operand_p (exp);
3840 /* The following functions are subroutines to fold_range_test and allow it to
3841 try to change a logical combination of comparisons into a range test.
3843 For example, both
3844 X == 2 || X == 3 || X == 4 || X == 5
3846 X >= 2 && X <= 5
3847 are converted to
3848 (unsigned) (X - 2) <= 3
3850 We describe each set of comparisons as being either inside or outside
3851 a range, using a variable named like IN_P, and then describe the
3852 range with a lower and upper bound. If one of the bounds is omitted,
3853 it represents either the highest or lowest value of the type.
3855 In the comments below, we represent a range by two numbers in brackets
3856 preceded by a "+" to designate being inside that range, or a "-" to
3857 designate being outside that range, so the condition can be inverted by
3858 flipping the prefix. An omitted bound is represented by a "-". For
3859 example, "- [-, 10]" means being outside the range starting at the lowest
3860 possible value and ending at 10, in other words, being greater than 10.
3861 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3862 always false.
3864 We set up things so that the missing bounds are handled in a consistent
3865 manner so neither a missing bound nor "true" and "false" need to be
3866 handled using a special case. */
3868 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3869 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3870 and UPPER1_P are nonzero if the respective argument is an upper bound
3871 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3872 must be specified for a comparison. ARG1 will be converted to ARG0's
3873 type if both are specified. */
3875 static tree
3876 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3877 tree arg1, int upper1_p)
3879 tree tem;
3880 int result;
3881 int sgn0, sgn1;
3883 /* If neither arg represents infinity, do the normal operation.
3884 Else, if not a comparison, return infinity. Else handle the special
3885 comparison rules. Note that most of the cases below won't occur, but
3886 are handled for consistency. */
3888 if (arg0 != 0 && arg1 != 0)
3890 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3891 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3892 STRIP_NOPS (tem);
3893 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3896 if (TREE_CODE_CLASS (code) != tcc_comparison)
3897 return 0;
3899 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3900 for neither. In real maths, we cannot assume open ended ranges are
3901 the same. But, this is computer arithmetic, where numbers are finite.
3902 We can therefore make the transformation of any unbounded range with
3903 the value Z, Z being greater than any representable number. This permits
3904 us to treat unbounded ranges as equal. */
3905 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3906 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3907 switch (code)
3909 case EQ_EXPR:
3910 result = sgn0 == sgn1;
3911 break;
3912 case NE_EXPR:
3913 result = sgn0 != sgn1;
3914 break;
3915 case LT_EXPR:
3916 result = sgn0 < sgn1;
3917 break;
3918 case LE_EXPR:
3919 result = sgn0 <= sgn1;
3920 break;
3921 case GT_EXPR:
3922 result = sgn0 > sgn1;
3923 break;
3924 case GE_EXPR:
3925 result = sgn0 >= sgn1;
3926 break;
3927 default:
3928 gcc_unreachable ();
3931 return constant_boolean_node (result, type);
3934 /* Helper routine for make_range. Perform one step for it, return
3935 new expression if the loop should continue or NULL_TREE if it should
3936 stop. */
3938 tree
3939 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3940 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3941 bool *strict_overflow_p)
3943 tree arg0_type = TREE_TYPE (arg0);
3944 tree n_low, n_high, low = *p_low, high = *p_high;
3945 int in_p = *p_in_p, n_in_p;
3947 switch (code)
3949 case TRUTH_NOT_EXPR:
3950 /* We can only do something if the range is testing for zero. */
3951 if (low == NULL_TREE || high == NULL_TREE
3952 || ! integer_zerop (low) || ! integer_zerop (high))
3953 return NULL_TREE;
3954 *p_in_p = ! in_p;
3955 return arg0;
3957 case EQ_EXPR: case NE_EXPR:
3958 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3959 /* We can only do something if the range is testing for zero
3960 and if the second operand is an integer constant. Note that
3961 saying something is "in" the range we make is done by
3962 complementing IN_P since it will set in the initial case of
3963 being not equal to zero; "out" is leaving it alone. */
3964 if (low == NULL_TREE || high == NULL_TREE
3965 || ! integer_zerop (low) || ! integer_zerop (high)
3966 || TREE_CODE (arg1) != INTEGER_CST)
3967 return NULL_TREE;
3969 switch (code)
3971 case NE_EXPR: /* - [c, c] */
3972 low = high = arg1;
3973 break;
3974 case EQ_EXPR: /* + [c, c] */
3975 in_p = ! in_p, low = high = arg1;
3976 break;
3977 case GT_EXPR: /* - [-, c] */
3978 low = 0, high = arg1;
3979 break;
3980 case GE_EXPR: /* + [c, -] */
3981 in_p = ! in_p, low = arg1, high = 0;
3982 break;
3983 case LT_EXPR: /* - [c, -] */
3984 low = arg1, high = 0;
3985 break;
3986 case LE_EXPR: /* + [-, c] */
3987 in_p = ! in_p, low = 0, high = arg1;
3988 break;
3989 default:
3990 gcc_unreachable ();
3993 /* If this is an unsigned comparison, we also know that EXP is
3994 greater than or equal to zero. We base the range tests we make
3995 on that fact, so we record it here so we can parse existing
3996 range tests. We test arg0_type since often the return type
3997 of, e.g. EQ_EXPR, is boolean. */
3998 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4000 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4001 in_p, low, high, 1,
4002 build_int_cst (arg0_type, 0),
4003 NULL_TREE))
4004 return NULL_TREE;
4006 in_p = n_in_p, low = n_low, high = n_high;
4008 /* If the high bound is missing, but we have a nonzero low
4009 bound, reverse the range so it goes from zero to the low bound
4010 minus 1. */
4011 if (high == 0 && low && ! integer_zerop (low))
4013 in_p = ! in_p;
4014 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4015 integer_one_node, 0);
4016 low = build_int_cst (arg0_type, 0);
4020 *p_low = low;
4021 *p_high = high;
4022 *p_in_p = in_p;
4023 return arg0;
4025 case NEGATE_EXPR:
4026 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4027 low and high are non-NULL, then normalize will DTRT. */
4028 if (!TYPE_UNSIGNED (arg0_type)
4029 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4031 if (low == NULL_TREE)
4032 low = TYPE_MIN_VALUE (arg0_type);
4033 if (high == NULL_TREE)
4034 high = TYPE_MAX_VALUE (arg0_type);
4037 /* (-x) IN [a,b] -> x in [-b, -a] */
4038 n_low = range_binop (MINUS_EXPR, exp_type,
4039 build_int_cst (exp_type, 0),
4040 0, high, 1);
4041 n_high = range_binop (MINUS_EXPR, exp_type,
4042 build_int_cst (exp_type, 0),
4043 0, low, 0);
4044 if (n_high != 0 && TREE_OVERFLOW (n_high))
4045 return NULL_TREE;
4046 goto normalize;
4048 case BIT_NOT_EXPR:
4049 /* ~ X -> -X - 1 */
4050 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4051 build_int_cst (exp_type, 1));
4053 case PLUS_EXPR:
4054 case MINUS_EXPR:
4055 if (TREE_CODE (arg1) != INTEGER_CST)
4056 return NULL_TREE;
4058 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4059 move a constant to the other side. */
4060 if (!TYPE_UNSIGNED (arg0_type)
4061 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4062 return NULL_TREE;
4064 /* If EXP is signed, any overflow in the computation is undefined,
4065 so we don't worry about it so long as our computations on
4066 the bounds don't overflow. For unsigned, overflow is defined
4067 and this is exactly the right thing. */
4068 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4069 arg0_type, low, 0, arg1, 0);
4070 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4071 arg0_type, high, 1, arg1, 0);
4072 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4073 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4074 return NULL_TREE;
4076 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4077 *strict_overflow_p = true;
4079 normalize:
4080 /* Check for an unsigned range which has wrapped around the maximum
4081 value thus making n_high < n_low, and normalize it. */
4082 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4084 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4085 integer_one_node, 0);
4086 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4087 integer_one_node, 0);
4089 /* If the range is of the form +/- [ x+1, x ], we won't
4090 be able to normalize it. But then, it represents the
4091 whole range or the empty set, so make it
4092 +/- [ -, - ]. */
4093 if (tree_int_cst_equal (n_low, low)
4094 && tree_int_cst_equal (n_high, high))
4095 low = high = 0;
4096 else
4097 in_p = ! in_p;
4099 else
4100 low = n_low, high = n_high;
4102 *p_low = low;
4103 *p_high = high;
4104 *p_in_p = in_p;
4105 return arg0;
4107 CASE_CONVERT:
4108 case NON_LVALUE_EXPR:
4109 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4110 return NULL_TREE;
4112 if (! INTEGRAL_TYPE_P (arg0_type)
4113 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4114 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4115 return NULL_TREE;
4117 n_low = low, n_high = high;
4119 if (n_low != 0)
4120 n_low = fold_convert_loc (loc, arg0_type, n_low);
4122 if (n_high != 0)
4123 n_high = fold_convert_loc (loc, arg0_type, n_high);
4125 /* If we're converting arg0 from an unsigned type, to exp,
4126 a signed type, we will be doing the comparison as unsigned.
4127 The tests above have already verified that LOW and HIGH
4128 are both positive.
4130 So we have to ensure that we will handle large unsigned
4131 values the same way that the current signed bounds treat
4132 negative values. */
4134 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4136 tree high_positive;
4137 tree equiv_type;
4138 /* For fixed-point modes, we need to pass the saturating flag
4139 as the 2nd parameter. */
4140 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4141 equiv_type
4142 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4143 TYPE_SATURATING (arg0_type));
4144 else
4145 equiv_type
4146 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4148 /* A range without an upper bound is, naturally, unbounded.
4149 Since convert would have cropped a very large value, use
4150 the max value for the destination type. */
4151 high_positive
4152 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4153 : TYPE_MAX_VALUE (arg0_type);
4155 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4156 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4157 fold_convert_loc (loc, arg0_type,
4158 high_positive),
4159 build_int_cst (arg0_type, 1));
4161 /* If the low bound is specified, "and" the range with the
4162 range for which the original unsigned value will be
4163 positive. */
4164 if (low != 0)
4166 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4167 1, fold_convert_loc (loc, arg0_type,
4168 integer_zero_node),
4169 high_positive))
4170 return NULL_TREE;
4172 in_p = (n_in_p == in_p);
4174 else
4176 /* Otherwise, "or" the range with the range of the input
4177 that will be interpreted as negative. */
4178 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4179 1, fold_convert_loc (loc, arg0_type,
4180 integer_zero_node),
4181 high_positive))
4182 return NULL_TREE;
4184 in_p = (in_p != n_in_p);
4188 *p_low = n_low;
4189 *p_high = n_high;
4190 *p_in_p = in_p;
4191 return arg0;
4193 default:
4194 return NULL_TREE;
4198 /* Given EXP, a logical expression, set the range it is testing into
4199 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4200 actually being tested. *PLOW and *PHIGH will be made of the same
4201 type as the returned expression. If EXP is not a comparison, we
4202 will most likely not be returning a useful value and range. Set
4203 *STRICT_OVERFLOW_P to true if the return value is only valid
4204 because signed overflow is undefined; otherwise, do not change
4205 *STRICT_OVERFLOW_P. */
4207 tree
4208 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4209 bool *strict_overflow_p)
4211 enum tree_code code;
4212 tree arg0, arg1 = NULL_TREE;
4213 tree exp_type, nexp;
4214 int in_p;
4215 tree low, high;
4216 location_t loc = EXPR_LOCATION (exp);
4218 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4219 and see if we can refine the range. Some of the cases below may not
4220 happen, but it doesn't seem worth worrying about this. We "continue"
4221 the outer loop when we've changed something; otherwise we "break"
4222 the switch, which will "break" the while. */
4224 in_p = 0;
4225 low = high = build_int_cst (TREE_TYPE (exp), 0);
4227 while (1)
4229 code = TREE_CODE (exp);
4230 exp_type = TREE_TYPE (exp);
4231 arg0 = NULL_TREE;
4233 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4235 if (TREE_OPERAND_LENGTH (exp) > 0)
4236 arg0 = TREE_OPERAND (exp, 0);
4237 if (TREE_CODE_CLASS (code) == tcc_binary
4238 || TREE_CODE_CLASS (code) == tcc_comparison
4239 || (TREE_CODE_CLASS (code) == tcc_expression
4240 && TREE_OPERAND_LENGTH (exp) > 1))
4241 arg1 = TREE_OPERAND (exp, 1);
4243 if (arg0 == NULL_TREE)
4244 break;
4246 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4247 &high, &in_p, strict_overflow_p);
4248 if (nexp == NULL_TREE)
4249 break;
4250 exp = nexp;
4253 /* If EXP is a constant, we can evaluate whether this is true or false. */
4254 if (TREE_CODE (exp) == INTEGER_CST)
4256 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4257 exp, 0, low, 0))
4258 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4259 exp, 1, high, 1)));
4260 low = high = 0;
4261 exp = 0;
4264 *pin_p = in_p, *plow = low, *phigh = high;
4265 return exp;
4268 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4269 type, TYPE, return an expression to test if EXP is in (or out of, depending
4270 on IN_P) the range. Return 0 if the test couldn't be created. */
4272 tree
4273 build_range_check (location_t loc, tree type, tree exp, int in_p,
4274 tree low, tree high)
4276 tree etype = TREE_TYPE (exp), value;
4278 #ifdef HAVE_canonicalize_funcptr_for_compare
4279 /* Disable this optimization for function pointer expressions
4280 on targets that require function pointer canonicalization. */
4281 if (HAVE_canonicalize_funcptr_for_compare
4282 && TREE_CODE (etype) == POINTER_TYPE
4283 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4284 return NULL_TREE;
4285 #endif
4287 if (! in_p)
4289 value = build_range_check (loc, type, exp, 1, low, high);
4290 if (value != 0)
4291 return invert_truthvalue_loc (loc, value);
4293 return 0;
4296 if (low == 0 && high == 0)
4297 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4299 if (low == 0)
4300 return fold_build2_loc (loc, LE_EXPR, type, exp,
4301 fold_convert_loc (loc, etype, high));
4303 if (high == 0)
4304 return fold_build2_loc (loc, GE_EXPR, type, exp,
4305 fold_convert_loc (loc, etype, low));
4307 if (operand_equal_p (low, high, 0))
4308 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4309 fold_convert_loc (loc, etype, low));
4311 if (integer_zerop (low))
4313 if (! TYPE_UNSIGNED (etype))
4315 etype = unsigned_type_for (etype);
4316 high = fold_convert_loc (loc, etype, high);
4317 exp = fold_convert_loc (loc, etype, exp);
4319 return build_range_check (loc, type, exp, 1, 0, high);
4322 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4323 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4325 unsigned HOST_WIDE_INT lo;
4326 HOST_WIDE_INT hi;
4327 int prec;
4329 prec = TYPE_PRECISION (etype);
4330 if (prec <= HOST_BITS_PER_WIDE_INT)
4332 hi = 0;
4333 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4335 else
4337 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4338 lo = HOST_WIDE_INT_M1U;
4341 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4343 if (TYPE_UNSIGNED (etype))
4345 tree signed_etype = signed_type_for (etype);
4346 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4347 etype
4348 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4349 else
4350 etype = signed_etype;
4351 exp = fold_convert_loc (loc, etype, exp);
4353 return fold_build2_loc (loc, GT_EXPR, type, exp,
4354 build_int_cst (etype, 0));
4358 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4359 This requires wrap-around arithmetics for the type of the expression.
4360 First make sure that arithmetics in this type is valid, then make sure
4361 that it wraps around. */
4362 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4363 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4364 TYPE_UNSIGNED (etype));
4366 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4368 tree utype, minv, maxv;
4370 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4371 for the type in question, as we rely on this here. */
4372 utype = unsigned_type_for (etype);
4373 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4374 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4375 integer_one_node, 1);
4376 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4378 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4379 minv, 1, maxv, 1)))
4380 etype = utype;
4381 else
4382 return 0;
4385 high = fold_convert_loc (loc, etype, high);
4386 low = fold_convert_loc (loc, etype, low);
4387 exp = fold_convert_loc (loc, etype, exp);
4389 value = const_binop (MINUS_EXPR, high, low);
4392 if (POINTER_TYPE_P (etype))
4394 if (value != 0 && !TREE_OVERFLOW (value))
4396 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4397 return build_range_check (loc, type,
4398 fold_build_pointer_plus_loc (loc, exp, low),
4399 1, build_int_cst (etype, 0), value);
4401 return 0;
4404 if (value != 0 && !TREE_OVERFLOW (value))
4405 return build_range_check (loc, type,
4406 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4407 1, build_int_cst (etype, 0), value);
4409 return 0;
4412 /* Return the predecessor of VAL in its type, handling the infinite case. */
4414 static tree
4415 range_predecessor (tree val)
4417 tree type = TREE_TYPE (val);
4419 if (INTEGRAL_TYPE_P (type)
4420 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4421 return 0;
4422 else
4423 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4426 /* Return the successor of VAL in its type, handling the infinite case. */
4428 static tree
4429 range_successor (tree val)
4431 tree type = TREE_TYPE (val);
4433 if (INTEGRAL_TYPE_P (type)
4434 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4435 return 0;
4436 else
4437 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4440 /* Given two ranges, see if we can merge them into one. Return 1 if we
4441 can, 0 if we can't. Set the output range into the specified parameters. */
4443 bool
4444 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4445 tree high0, int in1_p, tree low1, tree high1)
4447 int no_overlap;
4448 int subset;
4449 int temp;
4450 tree tem;
4451 int in_p;
4452 tree low, high;
4453 int lowequal = ((low0 == 0 && low1 == 0)
4454 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4455 low0, 0, low1, 0)));
4456 int highequal = ((high0 == 0 && high1 == 0)
4457 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4458 high0, 1, high1, 1)));
4460 /* Make range 0 be the range that starts first, or ends last if they
4461 start at the same value. Swap them if it isn't. */
4462 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4463 low0, 0, low1, 0))
4464 || (lowequal
4465 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4466 high1, 1, high0, 1))))
4468 temp = in0_p, in0_p = in1_p, in1_p = temp;
4469 tem = low0, low0 = low1, low1 = tem;
4470 tem = high0, high0 = high1, high1 = tem;
4473 /* Now flag two cases, whether the ranges are disjoint or whether the
4474 second range is totally subsumed in the first. Note that the tests
4475 below are simplified by the ones above. */
4476 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4477 high0, 1, low1, 0));
4478 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4479 high1, 1, high0, 1));
4481 /* We now have four cases, depending on whether we are including or
4482 excluding the two ranges. */
4483 if (in0_p && in1_p)
4485 /* If they don't overlap, the result is false. If the second range
4486 is a subset it is the result. Otherwise, the range is from the start
4487 of the second to the end of the first. */
4488 if (no_overlap)
4489 in_p = 0, low = high = 0;
4490 else if (subset)
4491 in_p = 1, low = low1, high = high1;
4492 else
4493 in_p = 1, low = low1, high = high0;
4496 else if (in0_p && ! in1_p)
4498 /* If they don't overlap, the result is the first range. If they are
4499 equal, the result is false. If the second range is a subset of the
4500 first, and the ranges begin at the same place, we go from just after
4501 the end of the second range to the end of the first. If the second
4502 range is not a subset of the first, or if it is a subset and both
4503 ranges end at the same place, the range starts at the start of the
4504 first range and ends just before the second range.
4505 Otherwise, we can't describe this as a single range. */
4506 if (no_overlap)
4507 in_p = 1, low = low0, high = high0;
4508 else if (lowequal && highequal)
4509 in_p = 0, low = high = 0;
4510 else if (subset && lowequal)
4512 low = range_successor (high1);
4513 high = high0;
4514 in_p = 1;
4515 if (low == 0)
4517 /* We are in the weird situation where high0 > high1 but
4518 high1 has no successor. Punt. */
4519 return 0;
4522 else if (! subset || highequal)
4524 low = low0;
4525 high = range_predecessor (low1);
4526 in_p = 1;
4527 if (high == 0)
4529 /* low0 < low1 but low1 has no predecessor. Punt. */
4530 return 0;
4533 else
4534 return 0;
4537 else if (! in0_p && in1_p)
4539 /* If they don't overlap, the result is the second range. If the second
4540 is a subset of the first, the result is false. Otherwise,
4541 the range starts just after the first range and ends at the
4542 end of the second. */
4543 if (no_overlap)
4544 in_p = 1, low = low1, high = high1;
4545 else if (subset || highequal)
4546 in_p = 0, low = high = 0;
4547 else
4549 low = range_successor (high0);
4550 high = high1;
4551 in_p = 1;
4552 if (low == 0)
4554 /* high1 > high0 but high0 has no successor. Punt. */
4555 return 0;
4560 else
4562 /* The case where we are excluding both ranges. Here the complex case
4563 is if they don't overlap. In that case, the only time we have a
4564 range is if they are adjacent. If the second is a subset of the
4565 first, the result is the first. Otherwise, the range to exclude
4566 starts at the beginning of the first range and ends at the end of the
4567 second. */
4568 if (no_overlap)
4570 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4571 range_successor (high0),
4572 1, low1, 0)))
4573 in_p = 0, low = low0, high = high1;
4574 else
4576 /* Canonicalize - [min, x] into - [-, x]. */
4577 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4578 switch (TREE_CODE (TREE_TYPE (low0)))
4580 case ENUMERAL_TYPE:
4581 if (TYPE_PRECISION (TREE_TYPE (low0))
4582 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4583 break;
4584 /* FALLTHROUGH */
4585 case INTEGER_TYPE:
4586 if (tree_int_cst_equal (low0,
4587 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4588 low0 = 0;
4589 break;
4590 case POINTER_TYPE:
4591 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4592 && integer_zerop (low0))
4593 low0 = 0;
4594 break;
4595 default:
4596 break;
4599 /* Canonicalize - [x, max] into - [x, -]. */
4600 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4601 switch (TREE_CODE (TREE_TYPE (high1)))
4603 case ENUMERAL_TYPE:
4604 if (TYPE_PRECISION (TREE_TYPE (high1))
4605 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4606 break;
4607 /* FALLTHROUGH */
4608 case INTEGER_TYPE:
4609 if (tree_int_cst_equal (high1,
4610 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4611 high1 = 0;
4612 break;
4613 case POINTER_TYPE:
4614 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4615 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4616 high1, 1,
4617 integer_one_node, 1)))
4618 high1 = 0;
4619 break;
4620 default:
4621 break;
4624 /* The ranges might be also adjacent between the maximum and
4625 minimum values of the given type. For
4626 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4627 return + [x + 1, y - 1]. */
4628 if (low0 == 0 && high1 == 0)
4630 low = range_successor (high0);
4631 high = range_predecessor (low1);
4632 if (low == 0 || high == 0)
4633 return 0;
4635 in_p = 1;
4637 else
4638 return 0;
4641 else if (subset)
4642 in_p = 0, low = low0, high = high0;
4643 else
4644 in_p = 0, low = low0, high = high1;
4647 *pin_p = in_p, *plow = low, *phigh = high;
4648 return 1;
4652 /* Subroutine of fold, looking inside expressions of the form
4653 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4654 of the COND_EXPR. This function is being used also to optimize
4655 A op B ? C : A, by reversing the comparison first.
4657 Return a folded expression whose code is not a COND_EXPR
4658 anymore, or NULL_TREE if no folding opportunity is found. */
4660 static tree
4661 fold_cond_expr_with_comparison (location_t loc, tree type,
4662 tree arg0, tree arg1, tree arg2)
4664 enum tree_code comp_code = TREE_CODE (arg0);
4665 tree arg00 = TREE_OPERAND (arg0, 0);
4666 tree arg01 = TREE_OPERAND (arg0, 1);
4667 tree arg1_type = TREE_TYPE (arg1);
4668 tree tem;
4670 STRIP_NOPS (arg1);
4671 STRIP_NOPS (arg2);
4673 /* If we have A op 0 ? A : -A, consider applying the following
4674 transformations:
4676 A == 0? A : -A same as -A
4677 A != 0? A : -A same as A
4678 A >= 0? A : -A same as abs (A)
4679 A > 0? A : -A same as abs (A)
4680 A <= 0? A : -A same as -abs (A)
4681 A < 0? A : -A same as -abs (A)
4683 None of these transformations work for modes with signed
4684 zeros. If A is +/-0, the first two transformations will
4685 change the sign of the result (from +0 to -0, or vice
4686 versa). The last four will fix the sign of the result,
4687 even though the original expressions could be positive or
4688 negative, depending on the sign of A.
4690 Note that all these transformations are correct if A is
4691 NaN, since the two alternatives (A and -A) are also NaNs. */
4692 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4693 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4694 ? real_zerop (arg01)
4695 : integer_zerop (arg01))
4696 && ((TREE_CODE (arg2) == NEGATE_EXPR
4697 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4698 /* In the case that A is of the form X-Y, '-A' (arg2) may
4699 have already been folded to Y-X, check for that. */
4700 || (TREE_CODE (arg1) == MINUS_EXPR
4701 && TREE_CODE (arg2) == MINUS_EXPR
4702 && operand_equal_p (TREE_OPERAND (arg1, 0),
4703 TREE_OPERAND (arg2, 1), 0)
4704 && operand_equal_p (TREE_OPERAND (arg1, 1),
4705 TREE_OPERAND (arg2, 0), 0))))
4706 switch (comp_code)
4708 case EQ_EXPR:
4709 case UNEQ_EXPR:
4710 tem = fold_convert_loc (loc, arg1_type, arg1);
4711 return pedantic_non_lvalue_loc (loc,
4712 fold_convert_loc (loc, type,
4713 negate_expr (tem)));
4714 case NE_EXPR:
4715 case LTGT_EXPR:
4716 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4717 case UNGE_EXPR:
4718 case UNGT_EXPR:
4719 if (flag_trapping_math)
4720 break;
4721 /* Fall through. */
4722 case GE_EXPR:
4723 case GT_EXPR:
4724 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4725 arg1 = fold_convert_loc (loc, signed_type_for
4726 (TREE_TYPE (arg1)), arg1);
4727 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4728 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4729 case UNLE_EXPR:
4730 case UNLT_EXPR:
4731 if (flag_trapping_math)
4732 break;
4733 case LE_EXPR:
4734 case LT_EXPR:
4735 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4736 arg1 = fold_convert_loc (loc, signed_type_for
4737 (TREE_TYPE (arg1)), arg1);
4738 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4739 return negate_expr (fold_convert_loc (loc, type, tem));
4740 default:
4741 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4742 break;
4745 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4746 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4747 both transformations are correct when A is NaN: A != 0
4748 is then true, and A == 0 is false. */
4750 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4751 && integer_zerop (arg01) && integer_zerop (arg2))
4753 if (comp_code == NE_EXPR)
4754 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4755 else if (comp_code == EQ_EXPR)
4756 return build_zero_cst (type);
4759 /* Try some transformations of A op B ? A : B.
4761 A == B? A : B same as B
4762 A != B? A : B same as A
4763 A >= B? A : B same as max (A, B)
4764 A > B? A : B same as max (B, A)
4765 A <= B? A : B same as min (A, B)
4766 A < B? A : B same as min (B, A)
4768 As above, these transformations don't work in the presence
4769 of signed zeros. For example, if A and B are zeros of
4770 opposite sign, the first two transformations will change
4771 the sign of the result. In the last four, the original
4772 expressions give different results for (A=+0, B=-0) and
4773 (A=-0, B=+0), but the transformed expressions do not.
4775 The first two transformations are correct if either A or B
4776 is a NaN. In the first transformation, the condition will
4777 be false, and B will indeed be chosen. In the case of the
4778 second transformation, the condition A != B will be true,
4779 and A will be chosen.
4781 The conversions to max() and min() are not correct if B is
4782 a number and A is not. The conditions in the original
4783 expressions will be false, so all four give B. The min()
4784 and max() versions would give a NaN instead. */
4785 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4786 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4787 /* Avoid these transformations if the COND_EXPR may be used
4788 as an lvalue in the C++ front-end. PR c++/19199. */
4789 && (in_gimple_form
4790 || VECTOR_TYPE_P (type)
4791 || (strcmp (lang_hooks.name, "GNU C++") != 0
4792 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4793 || ! maybe_lvalue_p (arg1)
4794 || ! maybe_lvalue_p (arg2)))
4796 tree comp_op0 = arg00;
4797 tree comp_op1 = arg01;
4798 tree comp_type = TREE_TYPE (comp_op0);
4800 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4801 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4803 comp_type = type;
4804 comp_op0 = arg1;
4805 comp_op1 = arg2;
4808 switch (comp_code)
4810 case EQ_EXPR:
4811 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4812 case NE_EXPR:
4813 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4814 case LE_EXPR:
4815 case LT_EXPR:
4816 case UNLE_EXPR:
4817 case UNLT_EXPR:
4818 /* In C++ a ?: expression can be an lvalue, so put the
4819 operand which will be used if they are equal first
4820 so that we can convert this back to the
4821 corresponding COND_EXPR. */
4822 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4824 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4825 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4826 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4827 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4828 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4829 comp_op1, comp_op0);
4830 return pedantic_non_lvalue_loc (loc,
4831 fold_convert_loc (loc, type, tem));
4833 break;
4834 case GE_EXPR:
4835 case GT_EXPR:
4836 case UNGE_EXPR:
4837 case UNGT_EXPR:
4838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4841 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4842 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4843 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4844 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4845 comp_op1, comp_op0);
4846 return pedantic_non_lvalue_loc (loc,
4847 fold_convert_loc (loc, type, tem));
4849 break;
4850 case UNEQ_EXPR:
4851 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4852 return pedantic_non_lvalue_loc (loc,
4853 fold_convert_loc (loc, type, arg2));
4854 break;
4855 case LTGT_EXPR:
4856 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4857 return pedantic_non_lvalue_loc (loc,
4858 fold_convert_loc (loc, type, arg1));
4859 break;
4860 default:
4861 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4862 break;
4866 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4867 we might still be able to simplify this. For example,
4868 if C1 is one less or one more than C2, this might have started
4869 out as a MIN or MAX and been transformed by this function.
4870 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4872 if (INTEGRAL_TYPE_P (type)
4873 && TREE_CODE (arg01) == INTEGER_CST
4874 && TREE_CODE (arg2) == INTEGER_CST)
4875 switch (comp_code)
4877 case EQ_EXPR:
4878 if (TREE_CODE (arg1) == INTEGER_CST)
4879 break;
4880 /* We can replace A with C1 in this case. */
4881 arg1 = fold_convert_loc (loc, type, arg01);
4882 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4884 case LT_EXPR:
4885 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4886 MIN_EXPR, to preserve the signedness of the comparison. */
4887 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4888 OEP_ONLY_CONST)
4889 && operand_equal_p (arg01,
4890 const_binop (PLUS_EXPR, arg2,
4891 build_int_cst (type, 1)),
4892 OEP_ONLY_CONST))
4894 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4895 fold_convert_loc (loc, TREE_TYPE (arg00),
4896 arg2));
4897 return pedantic_non_lvalue_loc (loc,
4898 fold_convert_loc (loc, type, tem));
4900 break;
4902 case LE_EXPR:
4903 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4904 as above. */
4905 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4906 OEP_ONLY_CONST)
4907 && operand_equal_p (arg01,
4908 const_binop (MINUS_EXPR, arg2,
4909 build_int_cst (type, 1)),
4910 OEP_ONLY_CONST))
4912 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4913 fold_convert_loc (loc, TREE_TYPE (arg00),
4914 arg2));
4915 return pedantic_non_lvalue_loc (loc,
4916 fold_convert_loc (loc, type, tem));
4918 break;
4920 case GT_EXPR:
4921 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4922 MAX_EXPR, to preserve the signedness of the comparison. */
4923 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4924 OEP_ONLY_CONST)
4925 && operand_equal_p (arg01,
4926 const_binop (MINUS_EXPR, arg2,
4927 build_int_cst (type, 1)),
4928 OEP_ONLY_CONST))
4930 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4931 fold_convert_loc (loc, TREE_TYPE (arg00),
4932 arg2));
4933 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4935 break;
4937 case GE_EXPR:
4938 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4939 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4940 OEP_ONLY_CONST)
4941 && operand_equal_p (arg01,
4942 const_binop (PLUS_EXPR, arg2,
4943 build_int_cst (type, 1)),
4944 OEP_ONLY_CONST))
4946 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4947 fold_convert_loc (loc, TREE_TYPE (arg00),
4948 arg2));
4949 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4951 break;
4952 case NE_EXPR:
4953 break;
4954 default:
4955 gcc_unreachable ();
4958 return NULL_TREE;
4963 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4964 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4965 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4966 false) >= 2)
4967 #endif
4969 /* EXP is some logical combination of boolean tests. See if we can
4970 merge it into some range test. Return the new tree if so. */
4972 static tree
4973 fold_range_test (location_t loc, enum tree_code code, tree type,
4974 tree op0, tree op1)
4976 int or_op = (code == TRUTH_ORIF_EXPR
4977 || code == TRUTH_OR_EXPR);
4978 int in0_p, in1_p, in_p;
4979 tree low0, low1, low, high0, high1, high;
4980 bool strict_overflow_p = false;
4981 tree tem, lhs, rhs;
4982 const char * const warnmsg = G_("assuming signed overflow does not occur "
4983 "when simplifying range test");
4985 if (!INTEGRAL_TYPE_P (type))
4986 return 0;
4988 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4989 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4991 /* If this is an OR operation, invert both sides; we will invert
4992 again at the end. */
4993 if (or_op)
4994 in0_p = ! in0_p, in1_p = ! in1_p;
4996 /* If both expressions are the same, if we can merge the ranges, and we
4997 can build the range test, return it or it inverted. If one of the
4998 ranges is always true or always false, consider it to be the same
4999 expression as the other. */
5000 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5001 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5002 in1_p, low1, high1)
5003 && 0 != (tem = (build_range_check (loc, type,
5004 lhs != 0 ? lhs
5005 : rhs != 0 ? rhs : integer_zero_node,
5006 in_p, low, high))))
5008 if (strict_overflow_p)
5009 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5010 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5013 /* On machines where the branch cost is expensive, if this is a
5014 short-circuited branch and the underlying object on both sides
5015 is the same, make a non-short-circuit operation. */
5016 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5017 && lhs != 0 && rhs != 0
5018 && (code == TRUTH_ANDIF_EXPR
5019 || code == TRUTH_ORIF_EXPR)
5020 && operand_equal_p (lhs, rhs, 0))
5022 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5023 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5024 which cases we can't do this. */
5025 if (simple_operand_p (lhs))
5026 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5027 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5028 type, op0, op1);
5030 else if (!lang_hooks.decls.global_bindings_p ()
5031 && !CONTAINS_PLACEHOLDER_P (lhs))
5033 tree common = save_expr (lhs);
5035 if (0 != (lhs = build_range_check (loc, type, common,
5036 or_op ? ! in0_p : in0_p,
5037 low0, high0))
5038 && (0 != (rhs = build_range_check (loc, type, common,
5039 or_op ? ! in1_p : in1_p,
5040 low1, high1))))
5042 if (strict_overflow_p)
5043 fold_overflow_warning (warnmsg,
5044 WARN_STRICT_OVERFLOW_COMPARISON);
5045 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5046 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5047 type, lhs, rhs);
5052 return 0;
5055 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5056 bit value. Arrange things so the extra bits will be set to zero if and
5057 only if C is signed-extended to its full width. If MASK is nonzero,
5058 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5060 static tree
5061 unextend (tree c, int p, int unsignedp, tree mask)
5063 tree type = TREE_TYPE (c);
5064 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5065 tree temp;
5067 if (p == modesize || unsignedp)
5068 return c;
5070 /* We work by getting just the sign bit into the low-order bit, then
5071 into the high-order bit, then sign-extend. We then XOR that value
5072 with C. */
5073 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5074 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5076 /* We must use a signed type in order to get an arithmetic right shift.
5077 However, we must also avoid introducing accidental overflows, so that
5078 a subsequent call to integer_zerop will work. Hence we must
5079 do the type conversion here. At this point, the constant is either
5080 zero or one, and the conversion to a signed type can never overflow.
5081 We could get an overflow if this conversion is done anywhere else. */
5082 if (TYPE_UNSIGNED (type))
5083 temp = fold_convert (signed_type_for (type), temp);
5085 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5086 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5087 if (mask != 0)
5088 temp = const_binop (BIT_AND_EXPR, temp,
5089 fold_convert (TREE_TYPE (c), mask));
5090 /* If necessary, convert the type back to match the type of C. */
5091 if (TYPE_UNSIGNED (type))
5092 temp = fold_convert (type, temp);
5094 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5097 /* For an expression that has the form
5098 (A && B) || ~B
5100 (A || B) && ~B,
5101 we can drop one of the inner expressions and simplify to
5102 A || ~B
5104 A && ~B
5105 LOC is the location of the resulting expression. OP is the inner
5106 logical operation; the left-hand side in the examples above, while CMPOP
5107 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5108 removing a condition that guards another, as in
5109 (A != NULL && A->...) || A == NULL
5110 which we must not transform. If RHS_ONLY is true, only eliminate the
5111 right-most operand of the inner logical operation. */
5113 static tree
5114 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5115 bool rhs_only)
5117 tree type = TREE_TYPE (cmpop);
5118 enum tree_code code = TREE_CODE (cmpop);
5119 enum tree_code truthop_code = TREE_CODE (op);
5120 tree lhs = TREE_OPERAND (op, 0);
5121 tree rhs = TREE_OPERAND (op, 1);
5122 tree orig_lhs = lhs, orig_rhs = rhs;
5123 enum tree_code rhs_code = TREE_CODE (rhs);
5124 enum tree_code lhs_code = TREE_CODE (lhs);
5125 enum tree_code inv_code;
5127 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5128 return NULL_TREE;
5130 if (TREE_CODE_CLASS (code) != tcc_comparison)
5131 return NULL_TREE;
5133 if (rhs_code == truthop_code)
5135 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5136 if (newrhs != NULL_TREE)
5138 rhs = newrhs;
5139 rhs_code = TREE_CODE (rhs);
5142 if (lhs_code == truthop_code && !rhs_only)
5144 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5145 if (newlhs != NULL_TREE)
5147 lhs = newlhs;
5148 lhs_code = TREE_CODE (lhs);
5152 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5153 if (inv_code == rhs_code
5154 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5155 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5156 return lhs;
5157 if (!rhs_only && inv_code == lhs_code
5158 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5159 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5160 return rhs;
5161 if (rhs != orig_rhs || lhs != orig_lhs)
5162 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5163 lhs, rhs);
5164 return NULL_TREE;
5167 /* Find ways of folding logical expressions of LHS and RHS:
5168 Try to merge two comparisons to the same innermost item.
5169 Look for range tests like "ch >= '0' && ch <= '9'".
5170 Look for combinations of simple terms on machines with expensive branches
5171 and evaluate the RHS unconditionally.
5173 For example, if we have p->a == 2 && p->b == 4 and we can make an
5174 object large enough to span both A and B, we can do this with a comparison
5175 against the object ANDed with the a mask.
5177 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5178 operations to do this with one comparison.
5180 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5181 function and the one above.
5183 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5184 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5186 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5187 two operands.
5189 We return the simplified tree or 0 if no optimization is possible. */
5191 static tree
5192 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5193 tree lhs, tree rhs)
5195 /* If this is the "or" of two comparisons, we can do something if
5196 the comparisons are NE_EXPR. If this is the "and", we can do something
5197 if the comparisons are EQ_EXPR. I.e.,
5198 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5200 WANTED_CODE is this operation code. For single bit fields, we can
5201 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5202 comparison for one-bit fields. */
5204 enum tree_code wanted_code;
5205 enum tree_code lcode, rcode;
5206 tree ll_arg, lr_arg, rl_arg, rr_arg;
5207 tree ll_inner, lr_inner, rl_inner, rr_inner;
5208 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5209 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5210 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5211 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5212 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5213 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5214 enum machine_mode lnmode, rnmode;
5215 tree ll_mask, lr_mask, rl_mask, rr_mask;
5216 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5217 tree l_const, r_const;
5218 tree lntype, rntype, result;
5219 HOST_WIDE_INT first_bit, end_bit;
5220 int volatilep;
5222 /* Start by getting the comparison codes. Fail if anything is volatile.
5223 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5224 it were surrounded with a NE_EXPR. */
5226 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5227 return 0;
5229 lcode = TREE_CODE (lhs);
5230 rcode = TREE_CODE (rhs);
5232 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5234 lhs = build2 (NE_EXPR, truth_type, lhs,
5235 build_int_cst (TREE_TYPE (lhs), 0));
5236 lcode = NE_EXPR;
5239 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5241 rhs = build2 (NE_EXPR, truth_type, rhs,
5242 build_int_cst (TREE_TYPE (rhs), 0));
5243 rcode = NE_EXPR;
5246 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5247 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5248 return 0;
5250 ll_arg = TREE_OPERAND (lhs, 0);
5251 lr_arg = TREE_OPERAND (lhs, 1);
5252 rl_arg = TREE_OPERAND (rhs, 0);
5253 rr_arg = TREE_OPERAND (rhs, 1);
5255 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5256 if (simple_operand_p (ll_arg)
5257 && simple_operand_p (lr_arg))
5259 if (operand_equal_p (ll_arg, rl_arg, 0)
5260 && operand_equal_p (lr_arg, rr_arg, 0))
5262 result = combine_comparisons (loc, code, lcode, rcode,
5263 truth_type, ll_arg, lr_arg);
5264 if (result)
5265 return result;
5267 else if (operand_equal_p (ll_arg, rr_arg, 0)
5268 && operand_equal_p (lr_arg, rl_arg, 0))
5270 result = combine_comparisons (loc, code, lcode,
5271 swap_tree_comparison (rcode),
5272 truth_type, ll_arg, lr_arg);
5273 if (result)
5274 return result;
5278 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5279 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5281 /* If the RHS can be evaluated unconditionally and its operands are
5282 simple, it wins to evaluate the RHS unconditionally on machines
5283 with expensive branches. In this case, this isn't a comparison
5284 that can be merged. */
5286 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5287 false) >= 2
5288 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5289 && simple_operand_p (rl_arg)
5290 && simple_operand_p (rr_arg))
5292 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5293 if (code == TRUTH_OR_EXPR
5294 && lcode == NE_EXPR && integer_zerop (lr_arg)
5295 && rcode == NE_EXPR && integer_zerop (rr_arg)
5296 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5297 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5298 return build2_loc (loc, NE_EXPR, truth_type,
5299 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5300 ll_arg, rl_arg),
5301 build_int_cst (TREE_TYPE (ll_arg), 0));
5303 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5304 if (code == TRUTH_AND_EXPR
5305 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5306 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5307 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5308 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5309 return build2_loc (loc, EQ_EXPR, truth_type,
5310 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5311 ll_arg, rl_arg),
5312 build_int_cst (TREE_TYPE (ll_arg), 0));
5315 /* See if the comparisons can be merged. Then get all the parameters for
5316 each side. */
5318 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5319 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5320 return 0;
5322 volatilep = 0;
5323 ll_inner = decode_field_reference (loc, ll_arg,
5324 &ll_bitsize, &ll_bitpos, &ll_mode,
5325 &ll_unsignedp, &volatilep, &ll_mask,
5326 &ll_and_mask);
5327 lr_inner = decode_field_reference (loc, lr_arg,
5328 &lr_bitsize, &lr_bitpos, &lr_mode,
5329 &lr_unsignedp, &volatilep, &lr_mask,
5330 &lr_and_mask);
5331 rl_inner = decode_field_reference (loc, rl_arg,
5332 &rl_bitsize, &rl_bitpos, &rl_mode,
5333 &rl_unsignedp, &volatilep, &rl_mask,
5334 &rl_and_mask);
5335 rr_inner = decode_field_reference (loc, rr_arg,
5336 &rr_bitsize, &rr_bitpos, &rr_mode,
5337 &rr_unsignedp, &volatilep, &rr_mask,
5338 &rr_and_mask);
5340 /* It must be true that the inner operation on the lhs of each
5341 comparison must be the same if we are to be able to do anything.
5342 Then see if we have constants. If not, the same must be true for
5343 the rhs's. */
5344 if (volatilep || ll_inner == 0 || rl_inner == 0
5345 || ! operand_equal_p (ll_inner, rl_inner, 0))
5346 return 0;
5348 if (TREE_CODE (lr_arg) == INTEGER_CST
5349 && TREE_CODE (rr_arg) == INTEGER_CST)
5350 l_const = lr_arg, r_const = rr_arg;
5351 else if (lr_inner == 0 || rr_inner == 0
5352 || ! operand_equal_p (lr_inner, rr_inner, 0))
5353 return 0;
5354 else
5355 l_const = r_const = 0;
5357 /* If either comparison code is not correct for our logical operation,
5358 fail. However, we can convert a one-bit comparison against zero into
5359 the opposite comparison against that bit being set in the field. */
5361 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5362 if (lcode != wanted_code)
5364 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5366 /* Make the left operand unsigned, since we are only interested
5367 in the value of one bit. Otherwise we are doing the wrong
5368 thing below. */
5369 ll_unsignedp = 1;
5370 l_const = ll_mask;
5372 else
5373 return 0;
5376 /* This is analogous to the code for l_const above. */
5377 if (rcode != wanted_code)
5379 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5381 rl_unsignedp = 1;
5382 r_const = rl_mask;
5384 else
5385 return 0;
5388 /* See if we can find a mode that contains both fields being compared on
5389 the left. If we can't, fail. Otherwise, update all constants and masks
5390 to be relative to a field of that size. */
5391 first_bit = MIN (ll_bitpos, rl_bitpos);
5392 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5393 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5394 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5395 volatilep);
5396 if (lnmode == VOIDmode)
5397 return 0;
5399 lnbitsize = GET_MODE_BITSIZE (lnmode);
5400 lnbitpos = first_bit & ~ (lnbitsize - 1);
5401 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5402 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5404 if (BYTES_BIG_ENDIAN)
5406 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5407 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5410 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5411 size_int (xll_bitpos));
5412 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5413 size_int (xrl_bitpos));
5415 if (l_const)
5417 l_const = fold_convert_loc (loc, lntype, l_const);
5418 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5419 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5420 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5421 fold_build1_loc (loc, BIT_NOT_EXPR,
5422 lntype, ll_mask))))
5424 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5426 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5429 if (r_const)
5431 r_const = fold_convert_loc (loc, lntype, r_const);
5432 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5433 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5434 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5435 fold_build1_loc (loc, BIT_NOT_EXPR,
5436 lntype, rl_mask))))
5438 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5440 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5444 /* If the right sides are not constant, do the same for it. Also,
5445 disallow this optimization if a size or signedness mismatch occurs
5446 between the left and right sides. */
5447 if (l_const == 0)
5449 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5450 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5451 /* Make sure the two fields on the right
5452 correspond to the left without being swapped. */
5453 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5454 return 0;
5456 first_bit = MIN (lr_bitpos, rr_bitpos);
5457 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5458 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5459 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5460 volatilep);
5461 if (rnmode == VOIDmode)
5462 return 0;
5464 rnbitsize = GET_MODE_BITSIZE (rnmode);
5465 rnbitpos = first_bit & ~ (rnbitsize - 1);
5466 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5467 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5469 if (BYTES_BIG_ENDIAN)
5471 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5472 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5475 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5476 rntype, lr_mask),
5477 size_int (xlr_bitpos));
5478 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5479 rntype, rr_mask),
5480 size_int (xrr_bitpos));
5482 /* Make a mask that corresponds to both fields being compared.
5483 Do this for both items being compared. If the operands are the
5484 same size and the bits being compared are in the same position
5485 then we can do this by masking both and comparing the masked
5486 results. */
5487 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5488 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5489 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5491 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5492 ll_unsignedp || rl_unsignedp);
5493 if (! all_ones_mask_p (ll_mask, lnbitsize))
5494 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5496 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5497 lr_unsignedp || rr_unsignedp);
5498 if (! all_ones_mask_p (lr_mask, rnbitsize))
5499 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5501 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5504 /* There is still another way we can do something: If both pairs of
5505 fields being compared are adjacent, we may be able to make a wider
5506 field containing them both.
5508 Note that we still must mask the lhs/rhs expressions. Furthermore,
5509 the mask must be shifted to account for the shift done by
5510 make_bit_field_ref. */
5511 if ((ll_bitsize + ll_bitpos == rl_bitpos
5512 && lr_bitsize + lr_bitpos == rr_bitpos)
5513 || (ll_bitpos == rl_bitpos + rl_bitsize
5514 && lr_bitpos == rr_bitpos + rr_bitsize))
5516 tree type;
5518 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5519 ll_bitsize + rl_bitsize,
5520 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5521 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5522 lr_bitsize + rr_bitsize,
5523 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5525 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5526 size_int (MIN (xll_bitpos, xrl_bitpos)));
5527 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5528 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5530 /* Convert to the smaller type before masking out unwanted bits. */
5531 type = lntype;
5532 if (lntype != rntype)
5534 if (lnbitsize > rnbitsize)
5536 lhs = fold_convert_loc (loc, rntype, lhs);
5537 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5538 type = rntype;
5540 else if (lnbitsize < rnbitsize)
5542 rhs = fold_convert_loc (loc, lntype, rhs);
5543 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5544 type = lntype;
5548 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5549 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5551 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5552 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5554 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5557 return 0;
5560 /* Handle the case of comparisons with constants. If there is something in
5561 common between the masks, those bits of the constants must be the same.
5562 If not, the condition is always false. Test for this to avoid generating
5563 incorrect code below. */
5564 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5565 if (! integer_zerop (result)
5566 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5567 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5569 if (wanted_code == NE_EXPR)
5571 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5572 return constant_boolean_node (true, truth_type);
5574 else
5576 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5577 return constant_boolean_node (false, truth_type);
5581 /* Construct the expression we will return. First get the component
5582 reference we will make. Unless the mask is all ones the width of
5583 that field, perform the mask operation. Then compare with the
5584 merged constant. */
5585 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5586 ll_unsignedp || rl_unsignedp);
5588 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5589 if (! all_ones_mask_p (ll_mask, lnbitsize))
5590 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5592 return build2_loc (loc, wanted_code, truth_type, result,
5593 const_binop (BIT_IOR_EXPR, l_const, r_const));
5596 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5597 constant. */
5599 static tree
5600 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5601 tree op0, tree op1)
5603 tree arg0 = op0;
5604 enum tree_code op_code;
5605 tree comp_const;
5606 tree minmax_const;
5607 int consts_equal, consts_lt;
5608 tree inner;
5610 STRIP_SIGN_NOPS (arg0);
5612 op_code = TREE_CODE (arg0);
5613 minmax_const = TREE_OPERAND (arg0, 1);
5614 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5615 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5616 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5617 inner = TREE_OPERAND (arg0, 0);
5619 /* If something does not permit us to optimize, return the original tree. */
5620 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5621 || TREE_CODE (comp_const) != INTEGER_CST
5622 || TREE_OVERFLOW (comp_const)
5623 || TREE_CODE (minmax_const) != INTEGER_CST
5624 || TREE_OVERFLOW (minmax_const))
5625 return NULL_TREE;
5627 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5628 and GT_EXPR, doing the rest with recursive calls using logical
5629 simplifications. */
5630 switch (code)
5632 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5634 tree tem
5635 = optimize_minmax_comparison (loc,
5636 invert_tree_comparison (code, false),
5637 type, op0, op1);
5638 if (tem)
5639 return invert_truthvalue_loc (loc, tem);
5640 return NULL_TREE;
5643 case GE_EXPR:
5644 return
5645 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5646 optimize_minmax_comparison
5647 (loc, EQ_EXPR, type, arg0, comp_const),
5648 optimize_minmax_comparison
5649 (loc, GT_EXPR, type, arg0, comp_const));
5651 case EQ_EXPR:
5652 if (op_code == MAX_EXPR && consts_equal)
5653 /* MAX (X, 0) == 0 -> X <= 0 */
5654 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5656 else if (op_code == MAX_EXPR && consts_lt)
5657 /* MAX (X, 0) == 5 -> X == 5 */
5658 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5660 else if (op_code == MAX_EXPR)
5661 /* MAX (X, 0) == -1 -> false */
5662 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5664 else if (consts_equal)
5665 /* MIN (X, 0) == 0 -> X >= 0 */
5666 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5668 else if (consts_lt)
5669 /* MIN (X, 0) == 5 -> false */
5670 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5672 else
5673 /* MIN (X, 0) == -1 -> X == -1 */
5674 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5676 case GT_EXPR:
5677 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5678 /* MAX (X, 0) > 0 -> X > 0
5679 MAX (X, 0) > 5 -> X > 5 */
5680 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5682 else if (op_code == MAX_EXPR)
5683 /* MAX (X, 0) > -1 -> true */
5684 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5686 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5687 /* MIN (X, 0) > 0 -> false
5688 MIN (X, 0) > 5 -> false */
5689 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5691 else
5692 /* MIN (X, 0) > -1 -> X > -1 */
5693 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5695 default:
5696 return NULL_TREE;
5700 /* T is an integer expression that is being multiplied, divided, or taken a
5701 modulus (CODE says which and what kind of divide or modulus) by a
5702 constant C. See if we can eliminate that operation by folding it with
5703 other operations already in T. WIDE_TYPE, if non-null, is a type that
5704 should be used for the computation if wider than our type.
5706 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5707 (X * 2) + (Y * 4). We must, however, be assured that either the original
5708 expression would not overflow or that overflow is undefined for the type
5709 in the language in question.
5711 If we return a non-null expression, it is an equivalent form of the
5712 original computation, but need not be in the original type.
5714 We set *STRICT_OVERFLOW_P to true if the return values depends on
5715 signed overflow being undefined. Otherwise we do not change
5716 *STRICT_OVERFLOW_P. */
5718 static tree
5719 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5720 bool *strict_overflow_p)
5722 /* To avoid exponential search depth, refuse to allow recursion past
5723 three levels. Beyond that (1) it's highly unlikely that we'll find
5724 something interesting and (2) we've probably processed it before
5725 when we built the inner expression. */
5727 static int depth;
5728 tree ret;
5730 if (depth > 3)
5731 return NULL;
5733 depth++;
5734 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5735 depth--;
5737 return ret;
5740 static tree
5741 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5742 bool *strict_overflow_p)
5744 tree type = TREE_TYPE (t);
5745 enum tree_code tcode = TREE_CODE (t);
5746 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5747 > GET_MODE_SIZE (TYPE_MODE (type)))
5748 ? wide_type : type);
5749 tree t1, t2;
5750 int same_p = tcode == code;
5751 tree op0 = NULL_TREE, op1 = NULL_TREE;
5752 bool sub_strict_overflow_p;
5754 /* Don't deal with constants of zero here; they confuse the code below. */
5755 if (integer_zerop (c))
5756 return NULL_TREE;
5758 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5759 op0 = TREE_OPERAND (t, 0);
5761 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5762 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5764 /* Note that we need not handle conditional operations here since fold
5765 already handles those cases. So just do arithmetic here. */
5766 switch (tcode)
5768 case INTEGER_CST:
5769 /* For a constant, we can always simplify if we are a multiply
5770 or (for divide and modulus) if it is a multiple of our constant. */
5771 if (code == MULT_EXPR
5772 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5773 return const_binop (code, fold_convert (ctype, t),
5774 fold_convert (ctype, c));
5775 break;
5777 CASE_CONVERT: case NON_LVALUE_EXPR:
5778 /* If op0 is an expression ... */
5779 if ((COMPARISON_CLASS_P (op0)
5780 || UNARY_CLASS_P (op0)
5781 || BINARY_CLASS_P (op0)
5782 || VL_EXP_CLASS_P (op0)
5783 || EXPRESSION_CLASS_P (op0))
5784 /* ... and has wrapping overflow, and its type is smaller
5785 than ctype, then we cannot pass through as widening. */
5786 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5787 && (TYPE_PRECISION (ctype)
5788 > TYPE_PRECISION (TREE_TYPE (op0))))
5789 /* ... or this is a truncation (t is narrower than op0),
5790 then we cannot pass through this narrowing. */
5791 || (TYPE_PRECISION (type)
5792 < TYPE_PRECISION (TREE_TYPE (op0)))
5793 /* ... or signedness changes for division or modulus,
5794 then we cannot pass through this conversion. */
5795 || (code != MULT_EXPR
5796 && (TYPE_UNSIGNED (ctype)
5797 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5798 /* ... or has undefined overflow while the converted to
5799 type has not, we cannot do the operation in the inner type
5800 as that would introduce undefined overflow. */
5801 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5802 && !TYPE_OVERFLOW_UNDEFINED (type))))
5803 break;
5805 /* Pass the constant down and see if we can make a simplification. If
5806 we can, replace this expression with the inner simplification for
5807 possible later conversion to our or some other type. */
5808 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5809 && TREE_CODE (t2) == INTEGER_CST
5810 && !TREE_OVERFLOW (t2)
5811 && (0 != (t1 = extract_muldiv (op0, t2, code,
5812 code == MULT_EXPR
5813 ? ctype : NULL_TREE,
5814 strict_overflow_p))))
5815 return t1;
5816 break;
5818 case ABS_EXPR:
5819 /* If widening the type changes it from signed to unsigned, then we
5820 must avoid building ABS_EXPR itself as unsigned. */
5821 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5823 tree cstype = (*signed_type_for) (ctype);
5824 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5825 != 0)
5827 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5828 return fold_convert (ctype, t1);
5830 break;
5832 /* If the constant is negative, we cannot simplify this. */
5833 if (tree_int_cst_sgn (c) == -1)
5834 break;
5835 /* FALLTHROUGH */
5836 case NEGATE_EXPR:
5837 /* For division and modulus, type can't be unsigned, as e.g.
5838 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5839 For signed types, even with wrapping overflow, this is fine. */
5840 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5841 break;
5842 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5843 != 0)
5844 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5845 break;
5847 case MIN_EXPR: case MAX_EXPR:
5848 /* If widening the type changes the signedness, then we can't perform
5849 this optimization as that changes the result. */
5850 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5851 break;
5853 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5854 sub_strict_overflow_p = false;
5855 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5856 &sub_strict_overflow_p)) != 0
5857 && (t2 = extract_muldiv (op1, c, code, wide_type,
5858 &sub_strict_overflow_p)) != 0)
5860 if (tree_int_cst_sgn (c) < 0)
5861 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5862 if (sub_strict_overflow_p)
5863 *strict_overflow_p = true;
5864 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5865 fold_convert (ctype, t2));
5867 break;
5869 case LSHIFT_EXPR: case RSHIFT_EXPR:
5870 /* If the second operand is constant, this is a multiplication
5871 or floor division, by a power of two, so we can treat it that
5872 way unless the multiplier or divisor overflows. Signed
5873 left-shift overflow is implementation-defined rather than
5874 undefined in C90, so do not convert signed left shift into
5875 multiplication. */
5876 if (TREE_CODE (op1) == INTEGER_CST
5877 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5878 /* const_binop may not detect overflow correctly,
5879 so check for it explicitly here. */
5880 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5881 && TREE_INT_CST_HIGH (op1) == 0
5882 && 0 != (t1 = fold_convert (ctype,
5883 const_binop (LSHIFT_EXPR,
5884 size_one_node,
5885 op1)))
5886 && !TREE_OVERFLOW (t1))
5887 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5888 ? MULT_EXPR : FLOOR_DIV_EXPR,
5889 ctype,
5890 fold_convert (ctype, op0),
5891 t1),
5892 c, code, wide_type, strict_overflow_p);
5893 break;
5895 case PLUS_EXPR: case MINUS_EXPR:
5896 /* See if we can eliminate the operation on both sides. If we can, we
5897 can return a new PLUS or MINUS. If we can't, the only remaining
5898 cases where we can do anything are if the second operand is a
5899 constant. */
5900 sub_strict_overflow_p = false;
5901 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5902 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5903 if (t1 != 0 && t2 != 0
5904 && (code == MULT_EXPR
5905 /* If not multiplication, we can only do this if both operands
5906 are divisible by c. */
5907 || (multiple_of_p (ctype, op0, c)
5908 && multiple_of_p (ctype, op1, c))))
5910 if (sub_strict_overflow_p)
5911 *strict_overflow_p = true;
5912 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5913 fold_convert (ctype, t2));
5916 /* If this was a subtraction, negate OP1 and set it to be an addition.
5917 This simplifies the logic below. */
5918 if (tcode == MINUS_EXPR)
5920 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5921 /* If OP1 was not easily negatable, the constant may be OP0. */
5922 if (TREE_CODE (op0) == INTEGER_CST)
5924 tree tem = op0;
5925 op0 = op1;
5926 op1 = tem;
5927 tem = t1;
5928 t1 = t2;
5929 t2 = tem;
5933 if (TREE_CODE (op1) != INTEGER_CST)
5934 break;
5936 /* If either OP1 or C are negative, this optimization is not safe for
5937 some of the division and remainder types while for others we need
5938 to change the code. */
5939 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5941 if (code == CEIL_DIV_EXPR)
5942 code = FLOOR_DIV_EXPR;
5943 else if (code == FLOOR_DIV_EXPR)
5944 code = CEIL_DIV_EXPR;
5945 else if (code != MULT_EXPR
5946 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5947 break;
5950 /* If it's a multiply or a division/modulus operation of a multiple
5951 of our constant, do the operation and verify it doesn't overflow. */
5952 if (code == MULT_EXPR
5953 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5955 op1 = const_binop (code, fold_convert (ctype, op1),
5956 fold_convert (ctype, c));
5957 /* We allow the constant to overflow with wrapping semantics. */
5958 if (op1 == 0
5959 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5960 break;
5962 else
5963 break;
5965 /* If we have an unsigned type, we cannot widen the operation since it
5966 will change the result if the original computation overflowed. */
5967 if (TYPE_UNSIGNED (ctype) && ctype != type)
5968 break;
5970 /* If we were able to eliminate our operation from the first side,
5971 apply our operation to the second side and reform the PLUS. */
5972 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5975 /* The last case is if we are a multiply. In that case, we can
5976 apply the distributive law to commute the multiply and addition
5977 if the multiplication of the constants doesn't overflow
5978 and overflow is defined. With undefined overflow
5979 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5980 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5981 return fold_build2 (tcode, ctype,
5982 fold_build2 (code, ctype,
5983 fold_convert (ctype, op0),
5984 fold_convert (ctype, c)),
5985 op1);
5987 break;
5989 case MULT_EXPR:
5990 /* We have a special case here if we are doing something like
5991 (C * 8) % 4 since we know that's zero. */
5992 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5993 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5994 /* If the multiplication can overflow we cannot optimize this. */
5995 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5996 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5997 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5999 *strict_overflow_p = true;
6000 return omit_one_operand (type, integer_zero_node, op0);
6003 /* ... fall through ... */
6005 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6006 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6007 /* If we can extract our operation from the LHS, do so and return a
6008 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6009 do something only if the second operand is a constant. */
6010 if (same_p
6011 && (t1 = extract_muldiv (op0, c, code, wide_type,
6012 strict_overflow_p)) != 0)
6013 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6014 fold_convert (ctype, op1));
6015 else if (tcode == MULT_EXPR && code == MULT_EXPR
6016 && (t1 = extract_muldiv (op1, c, code, wide_type,
6017 strict_overflow_p)) != 0)
6018 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6019 fold_convert (ctype, t1));
6020 else if (TREE_CODE (op1) != INTEGER_CST)
6021 return 0;
6023 /* If these are the same operation types, we can associate them
6024 assuming no overflow. */
6025 if (tcode == code)
6027 double_int mul;
6028 bool overflow_p;
6029 unsigned prec = TYPE_PRECISION (ctype);
6030 bool uns = TYPE_UNSIGNED (ctype);
6031 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6032 double_int dic = tree_to_double_int (c).ext (prec, uns);
6033 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6034 overflow_p = ((!uns && overflow_p)
6035 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6036 if (!double_int_fits_to_tree_p (ctype, mul)
6037 && ((uns && tcode != MULT_EXPR) || !uns))
6038 overflow_p = 1;
6039 if (!overflow_p)
6040 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6041 double_int_to_tree (ctype, mul));
6044 /* If these operations "cancel" each other, we have the main
6045 optimizations of this pass, which occur when either constant is a
6046 multiple of the other, in which case we replace this with either an
6047 operation or CODE or TCODE.
6049 If we have an unsigned type, we cannot do this since it will change
6050 the result if the original computation overflowed. */
6051 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6052 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6053 || (tcode == MULT_EXPR
6054 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6055 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6056 && code != MULT_EXPR)))
6058 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6060 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6061 *strict_overflow_p = true;
6062 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6063 fold_convert (ctype,
6064 const_binop (TRUNC_DIV_EXPR,
6065 op1, c)));
6067 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6069 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6070 *strict_overflow_p = true;
6071 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6072 fold_convert (ctype,
6073 const_binop (TRUNC_DIV_EXPR,
6074 c, op1)));
6077 break;
6079 default:
6080 break;
6083 return 0;
6086 /* Return a node which has the indicated constant VALUE (either 0 or
6087 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6088 and is of the indicated TYPE. */
6090 tree
6091 constant_boolean_node (bool value, tree type)
6093 if (type == integer_type_node)
6094 return value ? integer_one_node : integer_zero_node;
6095 else if (type == boolean_type_node)
6096 return value ? boolean_true_node : boolean_false_node;
6097 else if (TREE_CODE (type) == VECTOR_TYPE)
6098 return build_vector_from_val (type,
6099 build_int_cst (TREE_TYPE (type),
6100 value ? -1 : 0));
6101 else
6102 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6106 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6107 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6108 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6109 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6110 COND is the first argument to CODE; otherwise (as in the example
6111 given here), it is the second argument. TYPE is the type of the
6112 original expression. Return NULL_TREE if no simplification is
6113 possible. */
6115 static tree
6116 fold_binary_op_with_conditional_arg (location_t loc,
6117 enum tree_code code,
6118 tree type, tree op0, tree op1,
6119 tree cond, tree arg, int cond_first_p)
6121 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6122 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6123 tree test, true_value, false_value;
6124 tree lhs = NULL_TREE;
6125 tree rhs = NULL_TREE;
6126 enum tree_code cond_code = COND_EXPR;
6128 if (TREE_CODE (cond) == COND_EXPR
6129 || TREE_CODE (cond) == VEC_COND_EXPR)
6131 test = TREE_OPERAND (cond, 0);
6132 true_value = TREE_OPERAND (cond, 1);
6133 false_value = TREE_OPERAND (cond, 2);
6134 /* If this operand throws an expression, then it does not make
6135 sense to try to perform a logical or arithmetic operation
6136 involving it. */
6137 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6138 lhs = true_value;
6139 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6140 rhs = false_value;
6142 else
6144 tree testtype = TREE_TYPE (cond);
6145 test = cond;
6146 true_value = constant_boolean_node (true, testtype);
6147 false_value = constant_boolean_node (false, testtype);
6150 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6151 cond_code = VEC_COND_EXPR;
6153 /* This transformation is only worthwhile if we don't have to wrap ARG
6154 in a SAVE_EXPR and the operation can be simplified without recursing
6155 on at least one of the branches once its pushed inside the COND_EXPR. */
6156 if (!TREE_CONSTANT (arg)
6157 && (TREE_SIDE_EFFECTS (arg)
6158 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6159 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6160 return NULL_TREE;
6162 arg = fold_convert_loc (loc, arg_type, arg);
6163 if (lhs == 0)
6165 true_value = fold_convert_loc (loc, cond_type, true_value);
6166 if (cond_first_p)
6167 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6168 else
6169 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6171 if (rhs == 0)
6173 false_value = fold_convert_loc (loc, cond_type, false_value);
6174 if (cond_first_p)
6175 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6176 else
6177 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6180 /* Check that we have simplified at least one of the branches. */
6181 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6182 return NULL_TREE;
6184 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6188 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6190 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6191 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6192 ADDEND is the same as X.
6194 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6195 and finite. The problematic cases are when X is zero, and its mode
6196 has signed zeros. In the case of rounding towards -infinity,
6197 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6198 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6200 bool
6201 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6203 if (!real_zerop (addend))
6204 return false;
6206 /* Don't allow the fold with -fsignaling-nans. */
6207 if (HONOR_SNANS (TYPE_MODE (type)))
6208 return false;
6210 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6211 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6212 return true;
6214 /* In a vector or complex, we would need to check the sign of all zeros. */
6215 if (TREE_CODE (addend) != REAL_CST)
6216 return false;
6218 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6219 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6220 negate = !negate;
6222 /* The mode has signed zeros, and we have to honor their sign.
6223 In this situation, there is only one case we can return true for.
6224 X - 0 is the same as X unless rounding towards -infinity is
6225 supported. */
6226 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6229 /* Subroutine of fold() that checks comparisons of built-in math
6230 functions against real constants.
6232 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6233 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6234 is the type of the result and ARG0 and ARG1 are the operands of the
6235 comparison. ARG1 must be a TREE_REAL_CST.
6237 The function returns the constant folded tree if a simplification
6238 can be made, and NULL_TREE otherwise. */
6240 static tree
6241 fold_mathfn_compare (location_t loc,
6242 enum built_in_function fcode, enum tree_code code,
6243 tree type, tree arg0, tree arg1)
6245 REAL_VALUE_TYPE c;
6247 if (BUILTIN_SQRT_P (fcode))
6249 tree arg = CALL_EXPR_ARG (arg0, 0);
6250 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6252 c = TREE_REAL_CST (arg1);
6253 if (REAL_VALUE_NEGATIVE (c))
6255 /* sqrt(x) < y is always false, if y is negative. */
6256 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6257 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6259 /* sqrt(x) > y is always true, if y is negative and we
6260 don't care about NaNs, i.e. negative values of x. */
6261 if (code == NE_EXPR || !HONOR_NANS (mode))
6262 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6264 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6265 return fold_build2_loc (loc, GE_EXPR, type, arg,
6266 build_real (TREE_TYPE (arg), dconst0));
6268 else if (code == GT_EXPR || code == GE_EXPR)
6270 REAL_VALUE_TYPE c2;
6272 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6273 real_convert (&c2, mode, &c2);
6275 if (REAL_VALUE_ISINF (c2))
6277 /* sqrt(x) > y is x == +Inf, when y is very large. */
6278 if (HONOR_INFINITIES (mode))
6279 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6280 build_real (TREE_TYPE (arg), c2));
6282 /* sqrt(x) > y is always false, when y is very large
6283 and we don't care about infinities. */
6284 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6287 /* sqrt(x) > c is the same as x > c*c. */
6288 return fold_build2_loc (loc, code, type, arg,
6289 build_real (TREE_TYPE (arg), c2));
6291 else if (code == LT_EXPR || code == LE_EXPR)
6293 REAL_VALUE_TYPE c2;
6295 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6296 real_convert (&c2, mode, &c2);
6298 if (REAL_VALUE_ISINF (c2))
6300 /* sqrt(x) < y is always true, when y is a very large
6301 value and we don't care about NaNs or Infinities. */
6302 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6303 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6305 /* sqrt(x) < y is x != +Inf when y is very large and we
6306 don't care about NaNs. */
6307 if (! HONOR_NANS (mode))
6308 return fold_build2_loc (loc, NE_EXPR, type, arg,
6309 build_real (TREE_TYPE (arg), c2));
6311 /* sqrt(x) < y is x >= 0 when y is very large and we
6312 don't care about Infinities. */
6313 if (! HONOR_INFINITIES (mode))
6314 return fold_build2_loc (loc, GE_EXPR, type, arg,
6315 build_real (TREE_TYPE (arg), dconst0));
6317 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6318 arg = save_expr (arg);
6319 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6320 fold_build2_loc (loc, GE_EXPR, type, arg,
6321 build_real (TREE_TYPE (arg),
6322 dconst0)),
6323 fold_build2_loc (loc, NE_EXPR, type, arg,
6324 build_real (TREE_TYPE (arg),
6325 c2)));
6328 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6329 if (! HONOR_NANS (mode))
6330 return fold_build2_loc (loc, code, type, arg,
6331 build_real (TREE_TYPE (arg), c2));
6333 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6334 arg = save_expr (arg);
6335 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6336 fold_build2_loc (loc, GE_EXPR, type, arg,
6337 build_real (TREE_TYPE (arg),
6338 dconst0)),
6339 fold_build2_loc (loc, code, type, arg,
6340 build_real (TREE_TYPE (arg),
6341 c2)));
6345 return NULL_TREE;
6348 /* Subroutine of fold() that optimizes comparisons against Infinities,
6349 either +Inf or -Inf.
6351 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6352 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6353 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6355 The function returns the constant folded tree if a simplification
6356 can be made, and NULL_TREE otherwise. */
6358 static tree
6359 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6360 tree arg0, tree arg1)
6362 enum machine_mode mode;
6363 REAL_VALUE_TYPE max;
6364 tree temp;
6365 bool neg;
6367 mode = TYPE_MODE (TREE_TYPE (arg0));
6369 /* For negative infinity swap the sense of the comparison. */
6370 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6371 if (neg)
6372 code = swap_tree_comparison (code);
6374 switch (code)
6376 case GT_EXPR:
6377 /* x > +Inf is always false, if with ignore sNANs. */
6378 if (HONOR_SNANS (mode))
6379 return NULL_TREE;
6380 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6382 case LE_EXPR:
6383 /* x <= +Inf is always true, if we don't case about NaNs. */
6384 if (! HONOR_NANS (mode))
6385 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6387 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6388 arg0 = save_expr (arg0);
6389 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6391 case EQ_EXPR:
6392 case GE_EXPR:
6393 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6394 real_maxval (&max, neg, mode);
6395 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6396 arg0, build_real (TREE_TYPE (arg0), max));
6398 case LT_EXPR:
6399 /* x < +Inf is always equal to x <= DBL_MAX. */
6400 real_maxval (&max, neg, mode);
6401 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6402 arg0, build_real (TREE_TYPE (arg0), max));
6404 case NE_EXPR:
6405 /* x != +Inf is always equal to !(x > DBL_MAX). */
6406 real_maxval (&max, neg, mode);
6407 if (! HONOR_NANS (mode))
6408 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6409 arg0, build_real (TREE_TYPE (arg0), max));
6411 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6412 arg0, build_real (TREE_TYPE (arg0), max));
6413 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6415 default:
6416 break;
6419 return NULL_TREE;
6422 /* Subroutine of fold() that optimizes comparisons of a division by
6423 a nonzero integer constant against an integer constant, i.e.
6424 X/C1 op C2.
6426 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6427 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6428 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6430 The function returns the constant folded tree if a simplification
6431 can be made, and NULL_TREE otherwise. */
6433 static tree
6434 fold_div_compare (location_t loc,
6435 enum tree_code code, tree type, tree arg0, tree arg1)
6437 tree prod, tmp, hi, lo;
6438 tree arg00 = TREE_OPERAND (arg0, 0);
6439 tree arg01 = TREE_OPERAND (arg0, 1);
6440 double_int val;
6441 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6442 bool neg_overflow;
6443 bool overflow;
6445 /* We have to do this the hard way to detect unsigned overflow.
6446 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6447 val = TREE_INT_CST (arg01)
6448 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6449 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6450 neg_overflow = false;
6452 if (unsigned_p)
6454 tmp = int_const_binop (MINUS_EXPR, arg01,
6455 build_int_cst (TREE_TYPE (arg01), 1));
6456 lo = prod;
6458 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6459 val = TREE_INT_CST (prod)
6460 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6461 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6462 -1, overflow | TREE_OVERFLOW (prod));
6464 else if (tree_int_cst_sgn (arg01) >= 0)
6466 tmp = int_const_binop (MINUS_EXPR, arg01,
6467 build_int_cst (TREE_TYPE (arg01), 1));
6468 switch (tree_int_cst_sgn (arg1))
6470 case -1:
6471 neg_overflow = true;
6472 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6473 hi = prod;
6474 break;
6476 case 0:
6477 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6478 hi = tmp;
6479 break;
6481 case 1:
6482 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6483 lo = prod;
6484 break;
6486 default:
6487 gcc_unreachable ();
6490 else
6492 /* A negative divisor reverses the relational operators. */
6493 code = swap_tree_comparison (code);
6495 tmp = int_const_binop (PLUS_EXPR, arg01,
6496 build_int_cst (TREE_TYPE (arg01), 1));
6497 switch (tree_int_cst_sgn (arg1))
6499 case -1:
6500 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6501 lo = prod;
6502 break;
6504 case 0:
6505 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6506 lo = tmp;
6507 break;
6509 case 1:
6510 neg_overflow = true;
6511 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6512 hi = prod;
6513 break;
6515 default:
6516 gcc_unreachable ();
6520 switch (code)
6522 case EQ_EXPR:
6523 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6524 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6525 if (TREE_OVERFLOW (hi))
6526 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6527 if (TREE_OVERFLOW (lo))
6528 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6529 return build_range_check (loc, type, arg00, 1, lo, hi);
6531 case NE_EXPR:
6532 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6533 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6534 if (TREE_OVERFLOW (hi))
6535 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6536 if (TREE_OVERFLOW (lo))
6537 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6538 return build_range_check (loc, type, arg00, 0, lo, hi);
6540 case LT_EXPR:
6541 if (TREE_OVERFLOW (lo))
6543 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6544 return omit_one_operand_loc (loc, type, tmp, arg00);
6546 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6548 case LE_EXPR:
6549 if (TREE_OVERFLOW (hi))
6551 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6552 return omit_one_operand_loc (loc, type, tmp, arg00);
6554 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6556 case GT_EXPR:
6557 if (TREE_OVERFLOW (hi))
6559 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6560 return omit_one_operand_loc (loc, type, tmp, arg00);
6562 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6564 case GE_EXPR:
6565 if (TREE_OVERFLOW (lo))
6567 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6568 return omit_one_operand_loc (loc, type, tmp, arg00);
6570 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6572 default:
6573 break;
6576 return NULL_TREE;
6580 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6581 equality/inequality test, then return a simplified form of the test
6582 using a sign testing. Otherwise return NULL. TYPE is the desired
6583 result type. */
6585 static tree
6586 fold_single_bit_test_into_sign_test (location_t loc,
6587 enum tree_code code, tree arg0, tree arg1,
6588 tree result_type)
6590 /* If this is testing a single bit, we can optimize the test. */
6591 if ((code == NE_EXPR || code == EQ_EXPR)
6592 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6593 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6595 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6596 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6597 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6599 if (arg00 != NULL_TREE
6600 /* This is only a win if casting to a signed type is cheap,
6601 i.e. when arg00's type is not a partial mode. */
6602 && TYPE_PRECISION (TREE_TYPE (arg00))
6603 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6605 tree stype = signed_type_for (TREE_TYPE (arg00));
6606 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6607 result_type,
6608 fold_convert_loc (loc, stype, arg00),
6609 build_int_cst (stype, 0));
6613 return NULL_TREE;
6616 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6617 equality/inequality test, then return a simplified form of
6618 the test using shifts and logical operations. Otherwise return
6619 NULL. TYPE is the desired result type. */
6621 tree
6622 fold_single_bit_test (location_t loc, enum tree_code code,
6623 tree arg0, tree arg1, tree result_type)
6625 /* If this is testing a single bit, we can optimize the test. */
6626 if ((code == NE_EXPR || code == EQ_EXPR)
6627 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6628 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6630 tree inner = TREE_OPERAND (arg0, 0);
6631 tree type = TREE_TYPE (arg0);
6632 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6633 enum machine_mode operand_mode = TYPE_MODE (type);
6634 int ops_unsigned;
6635 tree signed_type, unsigned_type, intermediate_type;
6636 tree tem, one;
6638 /* First, see if we can fold the single bit test into a sign-bit
6639 test. */
6640 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6641 result_type);
6642 if (tem)
6643 return tem;
6645 /* Otherwise we have (A & C) != 0 where C is a single bit,
6646 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6647 Similarly for (A & C) == 0. */
6649 /* If INNER is a right shift of a constant and it plus BITNUM does
6650 not overflow, adjust BITNUM and INNER. */
6651 if (TREE_CODE (inner) == RSHIFT_EXPR
6652 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6653 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6654 && bitnum < TYPE_PRECISION (type)
6655 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6656 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6658 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6659 inner = TREE_OPERAND (inner, 0);
6662 /* If we are going to be able to omit the AND below, we must do our
6663 operations as unsigned. If we must use the AND, we have a choice.
6664 Normally unsigned is faster, but for some machines signed is. */
6665 #ifdef LOAD_EXTEND_OP
6666 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6667 && !flag_syntax_only) ? 0 : 1;
6668 #else
6669 ops_unsigned = 1;
6670 #endif
6672 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6673 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6674 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6675 inner = fold_convert_loc (loc, intermediate_type, inner);
6677 if (bitnum != 0)
6678 inner = build2 (RSHIFT_EXPR, intermediate_type,
6679 inner, size_int (bitnum));
6681 one = build_int_cst (intermediate_type, 1);
6683 if (code == EQ_EXPR)
6684 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6686 /* Put the AND last so it can combine with more things. */
6687 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6689 /* Make sure to return the proper type. */
6690 inner = fold_convert_loc (loc, result_type, inner);
6692 return inner;
6694 return NULL_TREE;
6697 /* Check whether we are allowed to reorder operands arg0 and arg1,
6698 such that the evaluation of arg1 occurs before arg0. */
6700 static bool
6701 reorder_operands_p (const_tree arg0, const_tree arg1)
6703 if (! flag_evaluation_order)
6704 return true;
6705 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6706 return true;
6707 return ! TREE_SIDE_EFFECTS (arg0)
6708 && ! TREE_SIDE_EFFECTS (arg1);
6711 /* Test whether it is preferable two swap two operands, ARG0 and
6712 ARG1, for example because ARG0 is an integer constant and ARG1
6713 isn't. If REORDER is true, only recommend swapping if we can
6714 evaluate the operands in reverse order. */
6716 bool
6717 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6719 STRIP_SIGN_NOPS (arg0);
6720 STRIP_SIGN_NOPS (arg1);
6722 if (TREE_CODE (arg1) == INTEGER_CST)
6723 return 0;
6724 if (TREE_CODE (arg0) == INTEGER_CST)
6725 return 1;
6727 if (TREE_CODE (arg1) == REAL_CST)
6728 return 0;
6729 if (TREE_CODE (arg0) == REAL_CST)
6730 return 1;
6732 if (TREE_CODE (arg1) == FIXED_CST)
6733 return 0;
6734 if (TREE_CODE (arg0) == FIXED_CST)
6735 return 1;
6737 if (TREE_CODE (arg1) == COMPLEX_CST)
6738 return 0;
6739 if (TREE_CODE (arg0) == COMPLEX_CST)
6740 return 1;
6742 if (TREE_CONSTANT (arg1))
6743 return 0;
6744 if (TREE_CONSTANT (arg0))
6745 return 1;
6747 if (optimize_function_for_size_p (cfun))
6748 return 0;
6750 if (reorder && flag_evaluation_order
6751 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6752 return 0;
6754 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6755 for commutative and comparison operators. Ensuring a canonical
6756 form allows the optimizers to find additional redundancies without
6757 having to explicitly check for both orderings. */
6758 if (TREE_CODE (arg0) == SSA_NAME
6759 && TREE_CODE (arg1) == SSA_NAME
6760 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6761 return 1;
6763 /* Put SSA_NAMEs last. */
6764 if (TREE_CODE (arg1) == SSA_NAME)
6765 return 0;
6766 if (TREE_CODE (arg0) == SSA_NAME)
6767 return 1;
6769 /* Put variables last. */
6770 if (DECL_P (arg1))
6771 return 0;
6772 if (DECL_P (arg0))
6773 return 1;
6775 return 0;
6778 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6779 ARG0 is extended to a wider type. */
6781 static tree
6782 fold_widened_comparison (location_t loc, enum tree_code code,
6783 tree type, tree arg0, tree arg1)
6785 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6786 tree arg1_unw;
6787 tree shorter_type, outer_type;
6788 tree min, max;
6789 bool above, below;
6791 if (arg0_unw == arg0)
6792 return NULL_TREE;
6793 shorter_type = TREE_TYPE (arg0_unw);
6795 #ifdef HAVE_canonicalize_funcptr_for_compare
6796 /* Disable this optimization if we're casting a function pointer
6797 type on targets that require function pointer canonicalization. */
6798 if (HAVE_canonicalize_funcptr_for_compare
6799 && TREE_CODE (shorter_type) == POINTER_TYPE
6800 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6801 return NULL_TREE;
6802 #endif
6804 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6805 return NULL_TREE;
6807 arg1_unw = get_unwidened (arg1, NULL_TREE);
6809 /* If possible, express the comparison in the shorter mode. */
6810 if ((code == EQ_EXPR || code == NE_EXPR
6811 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6812 && (TREE_TYPE (arg1_unw) == shorter_type
6813 || ((TYPE_PRECISION (shorter_type)
6814 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6815 && (TYPE_UNSIGNED (shorter_type)
6816 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6817 || (TREE_CODE (arg1_unw) == INTEGER_CST
6818 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6819 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6820 && int_fits_type_p (arg1_unw, shorter_type))))
6821 return fold_build2_loc (loc, code, type, arg0_unw,
6822 fold_convert_loc (loc, shorter_type, arg1_unw));
6824 if (TREE_CODE (arg1_unw) != INTEGER_CST
6825 || TREE_CODE (shorter_type) != INTEGER_TYPE
6826 || !int_fits_type_p (arg1_unw, shorter_type))
6827 return NULL_TREE;
6829 /* If we are comparing with the integer that does not fit into the range
6830 of the shorter type, the result is known. */
6831 outer_type = TREE_TYPE (arg1_unw);
6832 min = lower_bound_in_type (outer_type, shorter_type);
6833 max = upper_bound_in_type (outer_type, shorter_type);
6835 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6836 max, arg1_unw));
6837 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6838 arg1_unw, min));
6840 switch (code)
6842 case EQ_EXPR:
6843 if (above || below)
6844 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6845 break;
6847 case NE_EXPR:
6848 if (above || below)
6849 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6850 break;
6852 case LT_EXPR:
6853 case LE_EXPR:
6854 if (above)
6855 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6856 else if (below)
6857 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6859 case GT_EXPR:
6860 case GE_EXPR:
6861 if (above)
6862 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6863 else if (below)
6864 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6866 default:
6867 break;
6870 return NULL_TREE;
6873 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6874 ARG0 just the signedness is changed. */
6876 static tree
6877 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6878 tree arg0, tree arg1)
6880 tree arg0_inner;
6881 tree inner_type, outer_type;
6883 if (!CONVERT_EXPR_P (arg0))
6884 return NULL_TREE;
6886 outer_type = TREE_TYPE (arg0);
6887 arg0_inner = TREE_OPERAND (arg0, 0);
6888 inner_type = TREE_TYPE (arg0_inner);
6890 #ifdef HAVE_canonicalize_funcptr_for_compare
6891 /* Disable this optimization if we're casting a function pointer
6892 type on targets that require function pointer canonicalization. */
6893 if (HAVE_canonicalize_funcptr_for_compare
6894 && TREE_CODE (inner_type) == POINTER_TYPE
6895 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6896 return NULL_TREE;
6897 #endif
6899 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6900 return NULL_TREE;
6902 if (TREE_CODE (arg1) != INTEGER_CST
6903 && !(CONVERT_EXPR_P (arg1)
6904 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6905 return NULL_TREE;
6907 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6908 && code != NE_EXPR
6909 && code != EQ_EXPR)
6910 return NULL_TREE;
6912 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6913 return NULL_TREE;
6915 if (TREE_CODE (arg1) == INTEGER_CST)
6916 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6917 0, TREE_OVERFLOW (arg1));
6918 else
6919 arg1 = fold_convert_loc (loc, inner_type, arg1);
6921 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6924 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6925 step of the array. Reconstructs s and delta in the case of s *
6926 delta being an integer constant (and thus already folded). ADDR is
6927 the address. MULT is the multiplicative expression. If the
6928 function succeeds, the new address expression is returned.
6929 Otherwise NULL_TREE is returned. LOC is the location of the
6930 resulting expression. */
6932 static tree
6933 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6935 tree s, delta, step;
6936 tree ref = TREE_OPERAND (addr, 0), pref;
6937 tree ret, pos;
6938 tree itype;
6939 bool mdim = false;
6941 /* Strip the nops that might be added when converting op1 to sizetype. */
6942 STRIP_NOPS (op1);
6944 /* Canonicalize op1 into a possibly non-constant delta
6945 and an INTEGER_CST s. */
6946 if (TREE_CODE (op1) == MULT_EXPR)
6948 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6950 STRIP_NOPS (arg0);
6951 STRIP_NOPS (arg1);
6953 if (TREE_CODE (arg0) == INTEGER_CST)
6955 s = arg0;
6956 delta = arg1;
6958 else if (TREE_CODE (arg1) == INTEGER_CST)
6960 s = arg1;
6961 delta = arg0;
6963 else
6964 return NULL_TREE;
6966 else if (TREE_CODE (op1) == INTEGER_CST)
6968 delta = op1;
6969 s = NULL_TREE;
6971 else
6973 /* Simulate we are delta * 1. */
6974 delta = op1;
6975 s = integer_one_node;
6978 /* Handle &x.array the same as we would handle &x.array[0]. */
6979 if (TREE_CODE (ref) == COMPONENT_REF
6980 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6982 tree domain;
6984 /* Remember if this was a multi-dimensional array. */
6985 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6986 mdim = true;
6988 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6989 if (! domain)
6990 goto cont;
6991 itype = TREE_TYPE (domain);
6993 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6994 if (TREE_CODE (step) != INTEGER_CST)
6995 goto cont;
6997 if (s)
6999 if (! tree_int_cst_equal (step, s))
7000 goto cont;
7002 else
7004 /* Try if delta is a multiple of step. */
7005 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7006 if (! tmp)
7007 goto cont;
7008 delta = tmp;
7011 /* Only fold here if we can verify we do not overflow one
7012 dimension of a multi-dimensional array. */
7013 if (mdim)
7015 tree tmp;
7017 if (!TYPE_MIN_VALUE (domain)
7018 || !TYPE_MAX_VALUE (domain)
7019 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7020 goto cont;
7022 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7023 fold_convert_loc (loc, itype,
7024 TYPE_MIN_VALUE (domain)),
7025 fold_convert_loc (loc, itype, delta));
7026 if (TREE_CODE (tmp) != INTEGER_CST
7027 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7028 goto cont;
7031 /* We found a suitable component reference. */
7033 pref = TREE_OPERAND (addr, 0);
7034 ret = copy_node (pref);
7035 SET_EXPR_LOCATION (ret, loc);
7037 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7038 fold_build2_loc
7039 (loc, PLUS_EXPR, itype,
7040 fold_convert_loc (loc, itype,
7041 TYPE_MIN_VALUE
7042 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7043 fold_convert_loc (loc, itype, delta)),
7044 NULL_TREE, NULL_TREE);
7045 return build_fold_addr_expr_loc (loc, ret);
7048 cont:
7050 for (;; ref = TREE_OPERAND (ref, 0))
7052 if (TREE_CODE (ref) == ARRAY_REF)
7054 tree domain;
7056 /* Remember if this was a multi-dimensional array. */
7057 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7058 mdim = true;
7060 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7061 if (! domain)
7062 continue;
7063 itype = TREE_TYPE (domain);
7065 step = array_ref_element_size (ref);
7066 if (TREE_CODE (step) != INTEGER_CST)
7067 continue;
7069 if (s)
7071 if (! tree_int_cst_equal (step, s))
7072 continue;
7074 else
7076 /* Try if delta is a multiple of step. */
7077 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7078 if (! tmp)
7079 continue;
7080 delta = tmp;
7083 /* Only fold here if we can verify we do not overflow one
7084 dimension of a multi-dimensional array. */
7085 if (mdim)
7087 tree tmp;
7089 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7090 || !TYPE_MAX_VALUE (domain)
7091 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7092 continue;
7094 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7095 fold_convert_loc (loc, itype,
7096 TREE_OPERAND (ref, 1)),
7097 fold_convert_loc (loc, itype, delta));
7098 if (!tmp
7099 || TREE_CODE (tmp) != INTEGER_CST
7100 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7101 continue;
7104 break;
7106 else
7107 mdim = false;
7109 if (!handled_component_p (ref))
7110 return NULL_TREE;
7113 /* We found the suitable array reference. So copy everything up to it,
7114 and replace the index. */
7116 pref = TREE_OPERAND (addr, 0);
7117 ret = copy_node (pref);
7118 SET_EXPR_LOCATION (ret, loc);
7119 pos = ret;
7121 while (pref != ref)
7123 pref = TREE_OPERAND (pref, 0);
7124 TREE_OPERAND (pos, 0) = copy_node (pref);
7125 pos = TREE_OPERAND (pos, 0);
7128 TREE_OPERAND (pos, 1)
7129 = fold_build2_loc (loc, PLUS_EXPR, itype,
7130 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7131 fold_convert_loc (loc, itype, delta));
7132 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7136 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7137 means A >= Y && A != MAX, but in this case we know that
7138 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7140 static tree
7141 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7143 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7145 if (TREE_CODE (bound) == LT_EXPR)
7146 a = TREE_OPERAND (bound, 0);
7147 else if (TREE_CODE (bound) == GT_EXPR)
7148 a = TREE_OPERAND (bound, 1);
7149 else
7150 return NULL_TREE;
7152 typea = TREE_TYPE (a);
7153 if (!INTEGRAL_TYPE_P (typea)
7154 && !POINTER_TYPE_P (typea))
7155 return NULL_TREE;
7157 if (TREE_CODE (ineq) == LT_EXPR)
7159 a1 = TREE_OPERAND (ineq, 1);
7160 y = TREE_OPERAND (ineq, 0);
7162 else if (TREE_CODE (ineq) == GT_EXPR)
7164 a1 = TREE_OPERAND (ineq, 0);
7165 y = TREE_OPERAND (ineq, 1);
7167 else
7168 return NULL_TREE;
7170 if (TREE_TYPE (a1) != typea)
7171 return NULL_TREE;
7173 if (POINTER_TYPE_P (typea))
7175 /* Convert the pointer types into integer before taking the difference. */
7176 tree ta = fold_convert_loc (loc, ssizetype, a);
7177 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7178 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7180 else
7181 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7183 if (!diff || !integer_onep (diff))
7184 return NULL_TREE;
7186 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7189 /* Fold a sum or difference of at least one multiplication.
7190 Returns the folded tree or NULL if no simplification could be made. */
7192 static tree
7193 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7194 tree arg0, tree arg1)
7196 tree arg00, arg01, arg10, arg11;
7197 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7199 /* (A * C) +- (B * C) -> (A+-B) * C.
7200 (A * C) +- A -> A * (C+-1).
7201 We are most concerned about the case where C is a constant,
7202 but other combinations show up during loop reduction. Since
7203 it is not difficult, try all four possibilities. */
7205 if (TREE_CODE (arg0) == MULT_EXPR)
7207 arg00 = TREE_OPERAND (arg0, 0);
7208 arg01 = TREE_OPERAND (arg0, 1);
7210 else if (TREE_CODE (arg0) == INTEGER_CST)
7212 arg00 = build_one_cst (type);
7213 arg01 = arg0;
7215 else
7217 /* We cannot generate constant 1 for fract. */
7218 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7219 return NULL_TREE;
7220 arg00 = arg0;
7221 arg01 = build_one_cst (type);
7223 if (TREE_CODE (arg1) == MULT_EXPR)
7225 arg10 = TREE_OPERAND (arg1, 0);
7226 arg11 = TREE_OPERAND (arg1, 1);
7228 else if (TREE_CODE (arg1) == INTEGER_CST)
7230 arg10 = build_one_cst (type);
7231 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7232 the purpose of this canonicalization. */
7233 if (TREE_INT_CST_HIGH (arg1) == -1
7234 && negate_expr_p (arg1)
7235 && code == PLUS_EXPR)
7237 arg11 = negate_expr (arg1);
7238 code = MINUS_EXPR;
7240 else
7241 arg11 = arg1;
7243 else
7245 /* We cannot generate constant 1 for fract. */
7246 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7247 return NULL_TREE;
7248 arg10 = arg1;
7249 arg11 = build_one_cst (type);
7251 same = NULL_TREE;
7253 if (operand_equal_p (arg01, arg11, 0))
7254 same = arg01, alt0 = arg00, alt1 = arg10;
7255 else if (operand_equal_p (arg00, arg10, 0))
7256 same = arg00, alt0 = arg01, alt1 = arg11;
7257 else if (operand_equal_p (arg00, arg11, 0))
7258 same = arg00, alt0 = arg01, alt1 = arg10;
7259 else if (operand_equal_p (arg01, arg10, 0))
7260 same = arg01, alt0 = arg00, alt1 = arg11;
7262 /* No identical multiplicands; see if we can find a common
7263 power-of-two factor in non-power-of-two multiplies. This
7264 can help in multi-dimensional array access. */
7265 else if (tree_fits_shwi_p (arg01)
7266 && tree_fits_shwi_p (arg11))
7268 HOST_WIDE_INT int01, int11, tmp;
7269 bool swap = false;
7270 tree maybe_same;
7271 int01 = tree_to_shwi (arg01);
7272 int11 = tree_to_shwi (arg11);
7274 /* Move min of absolute values to int11. */
7275 if (absu_hwi (int01) < absu_hwi (int11))
7277 tmp = int01, int01 = int11, int11 = tmp;
7278 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7279 maybe_same = arg01;
7280 swap = true;
7282 else
7283 maybe_same = arg11;
7285 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7286 /* The remainder should not be a constant, otherwise we
7287 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7288 increased the number of multiplications necessary. */
7289 && TREE_CODE (arg10) != INTEGER_CST)
7291 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7292 build_int_cst (TREE_TYPE (arg00),
7293 int01 / int11));
7294 alt1 = arg10;
7295 same = maybe_same;
7296 if (swap)
7297 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7301 if (same)
7302 return fold_build2_loc (loc, MULT_EXPR, type,
7303 fold_build2_loc (loc, code, type,
7304 fold_convert_loc (loc, type, alt0),
7305 fold_convert_loc (loc, type, alt1)),
7306 fold_convert_loc (loc, type, same));
7308 return NULL_TREE;
7311 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7312 specified by EXPR into the buffer PTR of length LEN bytes.
7313 Return the number of bytes placed in the buffer, or zero
7314 upon failure. */
7316 static int
7317 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7319 tree type = TREE_TYPE (expr);
7320 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7321 int byte, offset, word, words;
7322 unsigned char value;
7324 if (total_bytes > len)
7325 return 0;
7326 words = total_bytes / UNITS_PER_WORD;
7328 for (byte = 0; byte < total_bytes; byte++)
7330 int bitpos = byte * BITS_PER_UNIT;
7331 if (bitpos < HOST_BITS_PER_WIDE_INT)
7332 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7333 else
7334 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7335 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7337 if (total_bytes > UNITS_PER_WORD)
7339 word = byte / UNITS_PER_WORD;
7340 if (WORDS_BIG_ENDIAN)
7341 word = (words - 1) - word;
7342 offset = word * UNITS_PER_WORD;
7343 if (BYTES_BIG_ENDIAN)
7344 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7345 else
7346 offset += byte % UNITS_PER_WORD;
7348 else
7349 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7350 ptr[offset] = value;
7352 return total_bytes;
7356 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7357 specified by EXPR into the buffer PTR of length LEN bytes.
7358 Return the number of bytes placed in the buffer, or zero
7359 upon failure. */
7361 static int
7362 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7364 tree type = TREE_TYPE (expr);
7365 enum machine_mode mode = TYPE_MODE (type);
7366 int total_bytes = GET_MODE_SIZE (mode);
7367 FIXED_VALUE_TYPE value;
7368 tree i_value, i_type;
7370 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7371 return 0;
7373 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7375 if (NULL_TREE == i_type
7376 || TYPE_PRECISION (i_type) != total_bytes)
7377 return 0;
7379 value = TREE_FIXED_CST (expr);
7380 i_value = double_int_to_tree (i_type, value.data);
7382 return native_encode_int (i_value, ptr, len);
7386 /* Subroutine of native_encode_expr. Encode the REAL_CST
7387 specified by EXPR into the buffer PTR of length LEN bytes.
7388 Return the number of bytes placed in the buffer, or zero
7389 upon failure. */
7391 static int
7392 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7394 tree type = TREE_TYPE (expr);
7395 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7396 int byte, offset, word, words, bitpos;
7397 unsigned char value;
7399 /* There are always 32 bits in each long, no matter the size of
7400 the hosts long. We handle floating point representations with
7401 up to 192 bits. */
7402 long tmp[6];
7404 if (total_bytes > len)
7405 return 0;
7406 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7408 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7410 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7411 bitpos += BITS_PER_UNIT)
7413 byte = (bitpos / BITS_PER_UNIT) & 3;
7414 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7416 if (UNITS_PER_WORD < 4)
7418 word = byte / UNITS_PER_WORD;
7419 if (WORDS_BIG_ENDIAN)
7420 word = (words - 1) - word;
7421 offset = word * UNITS_PER_WORD;
7422 if (BYTES_BIG_ENDIAN)
7423 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7424 else
7425 offset += byte % UNITS_PER_WORD;
7427 else
7428 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7429 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7431 return total_bytes;
7434 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7435 specified by EXPR into the buffer PTR of length LEN bytes.
7436 Return the number of bytes placed in the buffer, or zero
7437 upon failure. */
7439 static int
7440 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7442 int rsize, isize;
7443 tree part;
7445 part = TREE_REALPART (expr);
7446 rsize = native_encode_expr (part, ptr, len);
7447 if (rsize == 0)
7448 return 0;
7449 part = TREE_IMAGPART (expr);
7450 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7451 if (isize != rsize)
7452 return 0;
7453 return rsize + isize;
7457 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7458 specified by EXPR into the buffer PTR of length LEN bytes.
7459 Return the number of bytes placed in the buffer, or zero
7460 upon failure. */
7462 static int
7463 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7465 unsigned i, count;
7466 int size, offset;
7467 tree itype, elem;
7469 offset = 0;
7470 count = VECTOR_CST_NELTS (expr);
7471 itype = TREE_TYPE (TREE_TYPE (expr));
7472 size = GET_MODE_SIZE (TYPE_MODE (itype));
7473 for (i = 0; i < count; i++)
7475 elem = VECTOR_CST_ELT (expr, i);
7476 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7477 return 0;
7478 offset += size;
7480 return offset;
7484 /* Subroutine of native_encode_expr. Encode the STRING_CST
7485 specified by EXPR into the buffer PTR of length LEN bytes.
7486 Return the number of bytes placed in the buffer, or zero
7487 upon failure. */
7489 static int
7490 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7492 tree type = TREE_TYPE (expr);
7493 HOST_WIDE_INT total_bytes;
7495 if (TREE_CODE (type) != ARRAY_TYPE
7496 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7497 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7498 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7499 return 0;
7500 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7501 if (total_bytes > len)
7502 return 0;
7503 if (TREE_STRING_LENGTH (expr) < total_bytes)
7505 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7506 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7507 total_bytes - TREE_STRING_LENGTH (expr));
7509 else
7510 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7511 return total_bytes;
7515 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7516 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7517 buffer PTR of length LEN bytes. Return the number of bytes
7518 placed in the buffer, or zero upon failure. */
7521 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7523 switch (TREE_CODE (expr))
7525 case INTEGER_CST:
7526 return native_encode_int (expr, ptr, len);
7528 case REAL_CST:
7529 return native_encode_real (expr, ptr, len);
7531 case FIXED_CST:
7532 return native_encode_fixed (expr, ptr, len);
7534 case COMPLEX_CST:
7535 return native_encode_complex (expr, ptr, len);
7537 case VECTOR_CST:
7538 return native_encode_vector (expr, ptr, len);
7540 case STRING_CST:
7541 return native_encode_string (expr, ptr, len);
7543 default:
7544 return 0;
7549 /* Subroutine of native_interpret_expr. Interpret the contents of
7550 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7551 If the buffer cannot be interpreted, return NULL_TREE. */
7553 static tree
7554 native_interpret_int (tree type, const unsigned char *ptr, int len)
7556 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7557 double_int result;
7559 if (total_bytes > len
7560 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7561 return NULL_TREE;
7563 result = double_int::from_buffer (ptr, total_bytes);
7565 return double_int_to_tree (type, result);
7569 /* Subroutine of native_interpret_expr. Interpret the contents of
7570 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7571 If the buffer cannot be interpreted, return NULL_TREE. */
7573 static tree
7574 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7576 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7577 double_int result;
7578 FIXED_VALUE_TYPE fixed_value;
7580 if (total_bytes > len
7581 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7582 return NULL_TREE;
7584 result = double_int::from_buffer (ptr, total_bytes);
7585 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7587 return build_fixed (type, fixed_value);
7591 /* Subroutine of native_interpret_expr. Interpret the contents of
7592 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7593 If the buffer cannot be interpreted, return NULL_TREE. */
7595 static tree
7596 native_interpret_real (tree type, const unsigned char *ptr, int len)
7598 enum machine_mode mode = TYPE_MODE (type);
7599 int total_bytes = GET_MODE_SIZE (mode);
7600 int byte, offset, word, words, bitpos;
7601 unsigned char value;
7602 /* There are always 32 bits in each long, no matter the size of
7603 the hosts long. We handle floating point representations with
7604 up to 192 bits. */
7605 REAL_VALUE_TYPE r;
7606 long tmp[6];
7608 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7609 if (total_bytes > len || total_bytes > 24)
7610 return NULL_TREE;
7611 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7613 memset (tmp, 0, sizeof (tmp));
7614 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7615 bitpos += BITS_PER_UNIT)
7617 byte = (bitpos / BITS_PER_UNIT) & 3;
7618 if (UNITS_PER_WORD < 4)
7620 word = byte / UNITS_PER_WORD;
7621 if (WORDS_BIG_ENDIAN)
7622 word = (words - 1) - word;
7623 offset = word * UNITS_PER_WORD;
7624 if (BYTES_BIG_ENDIAN)
7625 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7626 else
7627 offset += byte % UNITS_PER_WORD;
7629 else
7630 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7631 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7633 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7636 real_from_target (&r, tmp, mode);
7637 return build_real (type, r);
7641 /* Subroutine of native_interpret_expr. Interpret the contents of
7642 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7643 If the buffer cannot be interpreted, return NULL_TREE. */
7645 static tree
7646 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7648 tree etype, rpart, ipart;
7649 int size;
7651 etype = TREE_TYPE (type);
7652 size = GET_MODE_SIZE (TYPE_MODE (etype));
7653 if (size * 2 > len)
7654 return NULL_TREE;
7655 rpart = native_interpret_expr (etype, ptr, size);
7656 if (!rpart)
7657 return NULL_TREE;
7658 ipart = native_interpret_expr (etype, ptr+size, size);
7659 if (!ipart)
7660 return NULL_TREE;
7661 return build_complex (type, rpart, ipart);
7665 /* Subroutine of native_interpret_expr. Interpret the contents of
7666 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7667 If the buffer cannot be interpreted, return NULL_TREE. */
7669 static tree
7670 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7672 tree etype, elem;
7673 int i, size, count;
7674 tree *elements;
7676 etype = TREE_TYPE (type);
7677 size = GET_MODE_SIZE (TYPE_MODE (etype));
7678 count = TYPE_VECTOR_SUBPARTS (type);
7679 if (size * count > len)
7680 return NULL_TREE;
7682 elements = XALLOCAVEC (tree, count);
7683 for (i = count - 1; i >= 0; i--)
7685 elem = native_interpret_expr (etype, ptr+(i*size), size);
7686 if (!elem)
7687 return NULL_TREE;
7688 elements[i] = elem;
7690 return build_vector (type, elements);
7694 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7695 the buffer PTR of length LEN as a constant of type TYPE. For
7696 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7697 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7698 return NULL_TREE. */
7700 tree
7701 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7703 switch (TREE_CODE (type))
7705 case INTEGER_TYPE:
7706 case ENUMERAL_TYPE:
7707 case BOOLEAN_TYPE:
7708 case POINTER_TYPE:
7709 case REFERENCE_TYPE:
7710 return native_interpret_int (type, ptr, len);
7712 case REAL_TYPE:
7713 return native_interpret_real (type, ptr, len);
7715 case FIXED_POINT_TYPE:
7716 return native_interpret_fixed (type, ptr, len);
7718 case COMPLEX_TYPE:
7719 return native_interpret_complex (type, ptr, len);
7721 case VECTOR_TYPE:
7722 return native_interpret_vector (type, ptr, len);
7724 default:
7725 return NULL_TREE;
7729 /* Returns true if we can interpret the contents of a native encoding
7730 as TYPE. */
7732 static bool
7733 can_native_interpret_type_p (tree type)
7735 switch (TREE_CODE (type))
7737 case INTEGER_TYPE:
7738 case ENUMERAL_TYPE:
7739 case BOOLEAN_TYPE:
7740 case POINTER_TYPE:
7741 case REFERENCE_TYPE:
7742 case FIXED_POINT_TYPE:
7743 case REAL_TYPE:
7744 case COMPLEX_TYPE:
7745 case VECTOR_TYPE:
7746 return true;
7747 default:
7748 return false;
7752 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7753 TYPE at compile-time. If we're unable to perform the conversion
7754 return NULL_TREE. */
7756 static tree
7757 fold_view_convert_expr (tree type, tree expr)
7759 /* We support up to 512-bit values (for V8DFmode). */
7760 unsigned char buffer[64];
7761 int len;
7763 /* Check that the host and target are sane. */
7764 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7765 return NULL_TREE;
7767 len = native_encode_expr (expr, buffer, sizeof (buffer));
7768 if (len == 0)
7769 return NULL_TREE;
7771 return native_interpret_expr (type, buffer, len);
7774 /* Build an expression for the address of T. Folds away INDIRECT_REF
7775 to avoid confusing the gimplify process. */
7777 tree
7778 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7780 /* The size of the object is not relevant when talking about its address. */
7781 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7782 t = TREE_OPERAND (t, 0);
7784 if (TREE_CODE (t) == INDIRECT_REF)
7786 t = TREE_OPERAND (t, 0);
7788 if (TREE_TYPE (t) != ptrtype)
7789 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7791 else if (TREE_CODE (t) == MEM_REF
7792 && integer_zerop (TREE_OPERAND (t, 1)))
7793 return TREE_OPERAND (t, 0);
7794 else if (TREE_CODE (t) == MEM_REF
7795 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7796 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7797 TREE_OPERAND (t, 0),
7798 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7799 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7801 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7803 if (TREE_TYPE (t) != ptrtype)
7804 t = fold_convert_loc (loc, ptrtype, t);
7806 else
7807 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7809 return t;
7812 /* Build an expression for the address of T. */
7814 tree
7815 build_fold_addr_expr_loc (location_t loc, tree t)
7817 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7819 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7822 static bool vec_cst_ctor_to_array (tree, tree *);
7824 /* Fold a unary expression of code CODE and type TYPE with operand
7825 OP0. Return the folded expression if folding is successful.
7826 Otherwise, return NULL_TREE. */
7828 tree
7829 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7831 tree tem;
7832 tree arg0;
7833 enum tree_code_class kind = TREE_CODE_CLASS (code);
7835 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7836 && TREE_CODE_LENGTH (code) == 1);
7838 arg0 = op0;
7839 if (arg0)
7841 if (CONVERT_EXPR_CODE_P (code)
7842 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7844 /* Don't use STRIP_NOPS, because signedness of argument type
7845 matters. */
7846 STRIP_SIGN_NOPS (arg0);
7848 else
7850 /* Strip any conversions that don't change the mode. This
7851 is safe for every expression, except for a comparison
7852 expression because its signedness is derived from its
7853 operands.
7855 Note that this is done as an internal manipulation within
7856 the constant folder, in order to find the simplest
7857 representation of the arguments so that their form can be
7858 studied. In any cases, the appropriate type conversions
7859 should be put back in the tree that will get out of the
7860 constant folder. */
7861 STRIP_NOPS (arg0);
7865 if (TREE_CODE_CLASS (code) == tcc_unary)
7867 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7868 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7869 fold_build1_loc (loc, code, type,
7870 fold_convert_loc (loc, TREE_TYPE (op0),
7871 TREE_OPERAND (arg0, 1))));
7872 else if (TREE_CODE (arg0) == COND_EXPR)
7874 tree arg01 = TREE_OPERAND (arg0, 1);
7875 tree arg02 = TREE_OPERAND (arg0, 2);
7876 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7877 arg01 = fold_build1_loc (loc, code, type,
7878 fold_convert_loc (loc,
7879 TREE_TYPE (op0), arg01));
7880 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7881 arg02 = fold_build1_loc (loc, code, type,
7882 fold_convert_loc (loc,
7883 TREE_TYPE (op0), arg02));
7884 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7885 arg01, arg02);
7887 /* If this was a conversion, and all we did was to move into
7888 inside the COND_EXPR, bring it back out. But leave it if
7889 it is a conversion from integer to integer and the
7890 result precision is no wider than a word since such a
7891 conversion is cheap and may be optimized away by combine,
7892 while it couldn't if it were outside the COND_EXPR. Then return
7893 so we don't get into an infinite recursion loop taking the
7894 conversion out and then back in. */
7896 if ((CONVERT_EXPR_CODE_P (code)
7897 || code == NON_LVALUE_EXPR)
7898 && TREE_CODE (tem) == COND_EXPR
7899 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7900 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7901 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7902 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7903 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7904 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7905 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7906 && (INTEGRAL_TYPE_P
7907 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7908 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7909 || flag_syntax_only))
7910 tem = build1_loc (loc, code, type,
7911 build3 (COND_EXPR,
7912 TREE_TYPE (TREE_OPERAND
7913 (TREE_OPERAND (tem, 1), 0)),
7914 TREE_OPERAND (tem, 0),
7915 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7916 TREE_OPERAND (TREE_OPERAND (tem, 2),
7917 0)));
7918 return tem;
7922 switch (code)
7924 case PAREN_EXPR:
7925 /* Re-association barriers around constants and other re-association
7926 barriers can be removed. */
7927 if (CONSTANT_CLASS_P (op0)
7928 || TREE_CODE (op0) == PAREN_EXPR)
7929 return fold_convert_loc (loc, type, op0);
7930 return NULL_TREE;
7932 CASE_CONVERT:
7933 case FLOAT_EXPR:
7934 case FIX_TRUNC_EXPR:
7935 if (TREE_TYPE (op0) == type)
7936 return op0;
7938 if (COMPARISON_CLASS_P (op0))
7940 /* If we have (type) (a CMP b) and type is an integral type, return
7941 new expression involving the new type. Canonicalize
7942 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7943 non-integral type.
7944 Do not fold the result as that would not simplify further, also
7945 folding again results in recursions. */
7946 if (TREE_CODE (type) == BOOLEAN_TYPE)
7947 return build2_loc (loc, TREE_CODE (op0), type,
7948 TREE_OPERAND (op0, 0),
7949 TREE_OPERAND (op0, 1));
7950 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7951 && TREE_CODE (type) != VECTOR_TYPE)
7952 return build3_loc (loc, COND_EXPR, type, op0,
7953 constant_boolean_node (true, type),
7954 constant_boolean_node (false, type));
7957 /* Handle cases of two conversions in a row. */
7958 if (CONVERT_EXPR_P (op0))
7960 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7961 tree inter_type = TREE_TYPE (op0);
7962 int inside_int = INTEGRAL_TYPE_P (inside_type);
7963 int inside_ptr = POINTER_TYPE_P (inside_type);
7964 int inside_float = FLOAT_TYPE_P (inside_type);
7965 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7966 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7967 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7968 int inter_int = INTEGRAL_TYPE_P (inter_type);
7969 int inter_ptr = POINTER_TYPE_P (inter_type);
7970 int inter_float = FLOAT_TYPE_P (inter_type);
7971 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7972 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7973 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7974 int final_int = INTEGRAL_TYPE_P (type);
7975 int final_ptr = POINTER_TYPE_P (type);
7976 int final_float = FLOAT_TYPE_P (type);
7977 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7978 unsigned int final_prec = TYPE_PRECISION (type);
7979 int final_unsignedp = TYPE_UNSIGNED (type);
7981 /* In addition to the cases of two conversions in a row
7982 handled below, if we are converting something to its own
7983 type via an object of identical or wider precision, neither
7984 conversion is needed. */
7985 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7986 && (((inter_int || inter_ptr) && final_int)
7987 || (inter_float && final_float))
7988 && inter_prec >= final_prec)
7989 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7991 /* Likewise, if the intermediate and initial types are either both
7992 float or both integer, we don't need the middle conversion if the
7993 former is wider than the latter and doesn't change the signedness
7994 (for integers). Avoid this if the final type is a pointer since
7995 then we sometimes need the middle conversion. Likewise if the
7996 final type has a precision not equal to the size of its mode. */
7997 if (((inter_int && inside_int)
7998 || (inter_float && inside_float)
7999 || (inter_vec && inside_vec))
8000 && inter_prec >= inside_prec
8001 && (inter_float || inter_vec
8002 || inter_unsignedp == inside_unsignedp)
8003 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8004 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8005 && ! final_ptr
8006 && (! final_vec || inter_prec == inside_prec))
8007 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8009 /* If we have a sign-extension of a zero-extended value, we can
8010 replace that by a single zero-extension. Likewise if the
8011 final conversion does not change precision we can drop the
8012 intermediate conversion. */
8013 if (inside_int && inter_int && final_int
8014 && ((inside_prec < inter_prec && inter_prec < final_prec
8015 && inside_unsignedp && !inter_unsignedp)
8016 || final_prec == inter_prec))
8017 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8019 /* Two conversions in a row are not needed unless:
8020 - some conversion is floating-point (overstrict for now), or
8021 - some conversion is a vector (overstrict for now), or
8022 - the intermediate type is narrower than both initial and
8023 final, or
8024 - the intermediate type and innermost type differ in signedness,
8025 and the outermost type is wider than the intermediate, or
8026 - the initial type is a pointer type and the precisions of the
8027 intermediate and final types differ, or
8028 - the final type is a pointer type and the precisions of the
8029 initial and intermediate types differ. */
8030 if (! inside_float && ! inter_float && ! final_float
8031 && ! inside_vec && ! inter_vec && ! final_vec
8032 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8033 && ! (inside_int && inter_int
8034 && inter_unsignedp != inside_unsignedp
8035 && inter_prec < final_prec)
8036 && ((inter_unsignedp && inter_prec > inside_prec)
8037 == (final_unsignedp && final_prec > inter_prec))
8038 && ! (inside_ptr && inter_prec != final_prec)
8039 && ! (final_ptr && inside_prec != inter_prec)
8040 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8041 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8042 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8045 /* Handle (T *)&A.B.C for A being of type T and B and C
8046 living at offset zero. This occurs frequently in
8047 C++ upcasting and then accessing the base. */
8048 if (TREE_CODE (op0) == ADDR_EXPR
8049 && POINTER_TYPE_P (type)
8050 && handled_component_p (TREE_OPERAND (op0, 0)))
8052 HOST_WIDE_INT bitsize, bitpos;
8053 tree offset;
8054 enum machine_mode mode;
8055 int unsignedp, volatilep;
8056 tree base = TREE_OPERAND (op0, 0);
8057 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8058 &mode, &unsignedp, &volatilep, false);
8059 /* If the reference was to a (constant) zero offset, we can use
8060 the address of the base if it has the same base type
8061 as the result type and the pointer type is unqualified. */
8062 if (! offset && bitpos == 0
8063 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8064 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8065 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8066 return fold_convert_loc (loc, type,
8067 build_fold_addr_expr_loc (loc, base));
8070 if (TREE_CODE (op0) == MODIFY_EXPR
8071 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8072 /* Detect assigning a bitfield. */
8073 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8074 && DECL_BIT_FIELD
8075 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8077 /* Don't leave an assignment inside a conversion
8078 unless assigning a bitfield. */
8079 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8080 /* First do the assignment, then return converted constant. */
8081 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8082 TREE_NO_WARNING (tem) = 1;
8083 TREE_USED (tem) = 1;
8084 return tem;
8087 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8088 constants (if x has signed type, the sign bit cannot be set
8089 in c). This folds extension into the BIT_AND_EXPR.
8090 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8091 very likely don't have maximal range for their precision and this
8092 transformation effectively doesn't preserve non-maximal ranges. */
8093 if (TREE_CODE (type) == INTEGER_TYPE
8094 && TREE_CODE (op0) == BIT_AND_EXPR
8095 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8097 tree and_expr = op0;
8098 tree and0 = TREE_OPERAND (and_expr, 0);
8099 tree and1 = TREE_OPERAND (and_expr, 1);
8100 int change = 0;
8102 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8103 || (TYPE_PRECISION (type)
8104 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8105 change = 1;
8106 else if (TYPE_PRECISION (TREE_TYPE (and1))
8107 <= HOST_BITS_PER_WIDE_INT
8108 && tree_fits_uhwi_p (and1))
8110 unsigned HOST_WIDE_INT cst;
8112 cst = tree_to_uhwi (and1);
8113 cst &= HOST_WIDE_INT_M1U
8114 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8115 change = (cst == 0);
8116 #ifdef LOAD_EXTEND_OP
8117 if (change
8118 && !flag_syntax_only
8119 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8120 == ZERO_EXTEND))
8122 tree uns = unsigned_type_for (TREE_TYPE (and0));
8123 and0 = fold_convert_loc (loc, uns, and0);
8124 and1 = fold_convert_loc (loc, uns, and1);
8126 #endif
8128 if (change)
8130 tem = force_fit_type_double (type, tree_to_double_int (and1),
8131 0, TREE_OVERFLOW (and1));
8132 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8133 fold_convert_loc (loc, type, and0), tem);
8137 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8138 when one of the new casts will fold away. Conservatively we assume
8139 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8140 if (POINTER_TYPE_P (type)
8141 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8142 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8143 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8144 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8145 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8147 tree arg00 = TREE_OPERAND (arg0, 0);
8148 tree arg01 = TREE_OPERAND (arg0, 1);
8150 return fold_build_pointer_plus_loc
8151 (loc, fold_convert_loc (loc, type, arg00), arg01);
8154 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8155 of the same precision, and X is an integer type not narrower than
8156 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8157 if (INTEGRAL_TYPE_P (type)
8158 && TREE_CODE (op0) == BIT_NOT_EXPR
8159 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8160 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8161 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8163 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8164 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8165 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8166 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8167 fold_convert_loc (loc, type, tem));
8170 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8171 type of X and Y (integer types only). */
8172 if (INTEGRAL_TYPE_P (type)
8173 && TREE_CODE (op0) == MULT_EXPR
8174 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8175 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8177 /* Be careful not to introduce new overflows. */
8178 tree mult_type;
8179 if (TYPE_OVERFLOW_WRAPS (type))
8180 mult_type = type;
8181 else
8182 mult_type = unsigned_type_for (type);
8184 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8186 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8187 fold_convert_loc (loc, mult_type,
8188 TREE_OPERAND (op0, 0)),
8189 fold_convert_loc (loc, mult_type,
8190 TREE_OPERAND (op0, 1)));
8191 return fold_convert_loc (loc, type, tem);
8195 tem = fold_convert_const (code, type, op0);
8196 return tem ? tem : NULL_TREE;
8198 case ADDR_SPACE_CONVERT_EXPR:
8199 if (integer_zerop (arg0))
8200 return fold_convert_const (code, type, arg0);
8201 return NULL_TREE;
8203 case FIXED_CONVERT_EXPR:
8204 tem = fold_convert_const (code, type, arg0);
8205 return tem ? tem : NULL_TREE;
8207 case VIEW_CONVERT_EXPR:
8208 if (TREE_TYPE (op0) == type)
8209 return op0;
8210 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8211 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8212 type, TREE_OPERAND (op0, 0));
8213 if (TREE_CODE (op0) == MEM_REF)
8214 return fold_build2_loc (loc, MEM_REF, type,
8215 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8217 /* For integral conversions with the same precision or pointer
8218 conversions use a NOP_EXPR instead. */
8219 if ((INTEGRAL_TYPE_P (type)
8220 || POINTER_TYPE_P (type))
8221 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8222 || POINTER_TYPE_P (TREE_TYPE (op0)))
8223 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8224 return fold_convert_loc (loc, type, op0);
8226 /* Strip inner integral conversions that do not change the precision. */
8227 if (CONVERT_EXPR_P (op0)
8228 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8229 || POINTER_TYPE_P (TREE_TYPE (op0)))
8230 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8231 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8232 && (TYPE_PRECISION (TREE_TYPE (op0))
8233 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8234 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8235 type, TREE_OPERAND (op0, 0));
8237 return fold_view_convert_expr (type, op0);
8239 case NEGATE_EXPR:
8240 tem = fold_negate_expr (loc, arg0);
8241 if (tem)
8242 return fold_convert_loc (loc, type, tem);
8243 return NULL_TREE;
8245 case ABS_EXPR:
8246 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8247 return fold_abs_const (arg0, type);
8248 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8249 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8250 /* Convert fabs((double)float) into (double)fabsf(float). */
8251 else if (TREE_CODE (arg0) == NOP_EXPR
8252 && TREE_CODE (type) == REAL_TYPE)
8254 tree targ0 = strip_float_extensions (arg0);
8255 if (targ0 != arg0)
8256 return fold_convert_loc (loc, type,
8257 fold_build1_loc (loc, ABS_EXPR,
8258 TREE_TYPE (targ0),
8259 targ0));
8261 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8262 else if (TREE_CODE (arg0) == ABS_EXPR)
8263 return arg0;
8264 else if (tree_expr_nonnegative_p (arg0))
8265 return arg0;
8267 /* Strip sign ops from argument. */
8268 if (TREE_CODE (type) == REAL_TYPE)
8270 tem = fold_strip_sign_ops (arg0);
8271 if (tem)
8272 return fold_build1_loc (loc, ABS_EXPR, type,
8273 fold_convert_loc (loc, type, tem));
8275 return NULL_TREE;
8277 case CONJ_EXPR:
8278 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8279 return fold_convert_loc (loc, type, arg0);
8280 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8282 tree itype = TREE_TYPE (type);
8283 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8284 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8285 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8286 negate_expr (ipart));
8288 if (TREE_CODE (arg0) == COMPLEX_CST)
8290 tree itype = TREE_TYPE (type);
8291 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8292 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8293 return build_complex (type, rpart, negate_expr (ipart));
8295 if (TREE_CODE (arg0) == CONJ_EXPR)
8296 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8297 return NULL_TREE;
8299 case BIT_NOT_EXPR:
8300 if (TREE_CODE (arg0) == INTEGER_CST)
8301 return fold_not_const (arg0, type);
8302 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8303 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8304 /* Convert ~ (-A) to A - 1. */
8305 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8306 return fold_build2_loc (loc, MINUS_EXPR, type,
8307 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8308 build_int_cst (type, 1));
8309 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8310 else if (INTEGRAL_TYPE_P (type)
8311 && ((TREE_CODE (arg0) == MINUS_EXPR
8312 && integer_onep (TREE_OPERAND (arg0, 1)))
8313 || (TREE_CODE (arg0) == PLUS_EXPR
8314 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8315 return fold_build1_loc (loc, NEGATE_EXPR, type,
8316 fold_convert_loc (loc, type,
8317 TREE_OPERAND (arg0, 0)));
8318 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8319 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8320 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8321 fold_convert_loc (loc, type,
8322 TREE_OPERAND (arg0, 0)))))
8323 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8324 fold_convert_loc (loc, type,
8325 TREE_OPERAND (arg0, 1)));
8326 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8327 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8328 fold_convert_loc (loc, type,
8329 TREE_OPERAND (arg0, 1)))))
8330 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8331 fold_convert_loc (loc, type,
8332 TREE_OPERAND (arg0, 0)), tem);
8333 /* Perform BIT_NOT_EXPR on each element individually. */
8334 else if (TREE_CODE (arg0) == VECTOR_CST)
8336 tree *elements;
8337 tree elem;
8338 unsigned count = VECTOR_CST_NELTS (arg0), i;
8340 elements = XALLOCAVEC (tree, count);
8341 for (i = 0; i < count; i++)
8343 elem = VECTOR_CST_ELT (arg0, i);
8344 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8345 if (elem == NULL_TREE)
8346 break;
8347 elements[i] = elem;
8349 if (i == count)
8350 return build_vector (type, elements);
8352 else if (COMPARISON_CLASS_P (arg0)
8353 && (VECTOR_TYPE_P (type)
8354 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8356 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8357 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8358 HONOR_NANS (TYPE_MODE (op_type)));
8359 if (subcode != ERROR_MARK)
8360 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8361 TREE_OPERAND (arg0, 1));
8365 return NULL_TREE;
8367 case TRUTH_NOT_EXPR:
8368 /* Note that the operand of this must be an int
8369 and its values must be 0 or 1.
8370 ("true" is a fixed value perhaps depending on the language,
8371 but we don't handle values other than 1 correctly yet.) */
8372 tem = fold_truth_not_expr (loc, arg0);
8373 if (!tem)
8374 return NULL_TREE;
8375 return fold_convert_loc (loc, type, tem);
8377 case REALPART_EXPR:
8378 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8379 return fold_convert_loc (loc, type, arg0);
8380 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8381 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8382 TREE_OPERAND (arg0, 1));
8383 if (TREE_CODE (arg0) == COMPLEX_CST)
8384 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8385 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8387 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8388 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8389 fold_build1_loc (loc, REALPART_EXPR, itype,
8390 TREE_OPERAND (arg0, 0)),
8391 fold_build1_loc (loc, REALPART_EXPR, itype,
8392 TREE_OPERAND (arg0, 1)));
8393 return fold_convert_loc (loc, type, tem);
8395 if (TREE_CODE (arg0) == CONJ_EXPR)
8397 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8398 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8399 TREE_OPERAND (arg0, 0));
8400 return fold_convert_loc (loc, type, tem);
8402 if (TREE_CODE (arg0) == CALL_EXPR)
8404 tree fn = get_callee_fndecl (arg0);
8405 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8406 switch (DECL_FUNCTION_CODE (fn))
8408 CASE_FLT_FN (BUILT_IN_CEXPI):
8409 fn = mathfn_built_in (type, BUILT_IN_COS);
8410 if (fn)
8411 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8412 break;
8414 default:
8415 break;
8418 return NULL_TREE;
8420 case IMAGPART_EXPR:
8421 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8422 return build_zero_cst (type);
8423 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8424 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8425 TREE_OPERAND (arg0, 0));
8426 if (TREE_CODE (arg0) == COMPLEX_CST)
8427 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8428 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8430 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8431 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8432 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8433 TREE_OPERAND (arg0, 0)),
8434 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8435 TREE_OPERAND (arg0, 1)));
8436 return fold_convert_loc (loc, type, tem);
8438 if (TREE_CODE (arg0) == CONJ_EXPR)
8440 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8441 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8442 return fold_convert_loc (loc, type, negate_expr (tem));
8444 if (TREE_CODE (arg0) == CALL_EXPR)
8446 tree fn = get_callee_fndecl (arg0);
8447 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8448 switch (DECL_FUNCTION_CODE (fn))
8450 CASE_FLT_FN (BUILT_IN_CEXPI):
8451 fn = mathfn_built_in (type, BUILT_IN_SIN);
8452 if (fn)
8453 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8454 break;
8456 default:
8457 break;
8460 return NULL_TREE;
8462 case INDIRECT_REF:
8463 /* Fold *&X to X if X is an lvalue. */
8464 if (TREE_CODE (op0) == ADDR_EXPR)
8466 tree op00 = TREE_OPERAND (op0, 0);
8467 if ((TREE_CODE (op00) == VAR_DECL
8468 || TREE_CODE (op00) == PARM_DECL
8469 || TREE_CODE (op00) == RESULT_DECL)
8470 && !TREE_READONLY (op00))
8471 return op00;
8473 return NULL_TREE;
8475 case VEC_UNPACK_LO_EXPR:
8476 case VEC_UNPACK_HI_EXPR:
8477 case VEC_UNPACK_FLOAT_LO_EXPR:
8478 case VEC_UNPACK_FLOAT_HI_EXPR:
8480 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8481 tree *elts;
8482 enum tree_code subcode;
8484 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8485 if (TREE_CODE (arg0) != VECTOR_CST)
8486 return NULL_TREE;
8488 elts = XALLOCAVEC (tree, nelts * 2);
8489 if (!vec_cst_ctor_to_array (arg0, elts))
8490 return NULL_TREE;
8492 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8493 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8494 elts += nelts;
8496 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8497 subcode = NOP_EXPR;
8498 else
8499 subcode = FLOAT_EXPR;
8501 for (i = 0; i < nelts; i++)
8503 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8504 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8505 return NULL_TREE;
8508 return build_vector (type, elts);
8511 case REDUC_MIN_EXPR:
8512 case REDUC_MAX_EXPR:
8513 case REDUC_PLUS_EXPR:
8515 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8516 tree *elts;
8517 enum tree_code subcode;
8519 if (TREE_CODE (op0) != VECTOR_CST)
8520 return NULL_TREE;
8522 elts = XALLOCAVEC (tree, nelts);
8523 if (!vec_cst_ctor_to_array (op0, elts))
8524 return NULL_TREE;
8526 switch (code)
8528 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8529 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8530 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8531 default: gcc_unreachable ();
8534 for (i = 1; i < nelts; i++)
8536 elts[0] = const_binop (subcode, elts[0], elts[i]);
8537 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8538 return NULL_TREE;
8539 elts[i] = build_zero_cst (TREE_TYPE (type));
8542 return build_vector (type, elts);
8545 default:
8546 return NULL_TREE;
8547 } /* switch (code) */
8551 /* If the operation was a conversion do _not_ mark a resulting constant
8552 with TREE_OVERFLOW if the original constant was not. These conversions
8553 have implementation defined behavior and retaining the TREE_OVERFLOW
8554 flag here would confuse later passes such as VRP. */
8555 tree
8556 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8557 tree type, tree op0)
8559 tree res = fold_unary_loc (loc, code, type, op0);
8560 if (res
8561 && TREE_CODE (res) == INTEGER_CST
8562 && TREE_CODE (op0) == INTEGER_CST
8563 && CONVERT_EXPR_CODE_P (code))
8564 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8566 return res;
8569 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8570 operands OP0 and OP1. LOC is the location of the resulting expression.
8571 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8572 Return the folded expression if folding is successful. Otherwise,
8573 return NULL_TREE. */
8574 static tree
8575 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8576 tree arg0, tree arg1, tree op0, tree op1)
8578 tree tem;
8580 /* We only do these simplifications if we are optimizing. */
8581 if (!optimize)
8582 return NULL_TREE;
8584 /* Check for things like (A || B) && (A || C). We can convert this
8585 to A || (B && C). Note that either operator can be any of the four
8586 truth and/or operations and the transformation will still be
8587 valid. Also note that we only care about order for the
8588 ANDIF and ORIF operators. If B contains side effects, this
8589 might change the truth-value of A. */
8590 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8591 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8592 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8593 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8594 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8595 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8597 tree a00 = TREE_OPERAND (arg0, 0);
8598 tree a01 = TREE_OPERAND (arg0, 1);
8599 tree a10 = TREE_OPERAND (arg1, 0);
8600 tree a11 = TREE_OPERAND (arg1, 1);
8601 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8602 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8603 && (code == TRUTH_AND_EXPR
8604 || code == TRUTH_OR_EXPR));
8606 if (operand_equal_p (a00, a10, 0))
8607 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8608 fold_build2_loc (loc, code, type, a01, a11));
8609 else if (commutative && operand_equal_p (a00, a11, 0))
8610 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8611 fold_build2_loc (loc, code, type, a01, a10));
8612 else if (commutative && operand_equal_p (a01, a10, 0))
8613 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8614 fold_build2_loc (loc, code, type, a00, a11));
8616 /* This case if tricky because we must either have commutative
8617 operators or else A10 must not have side-effects. */
8619 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8620 && operand_equal_p (a01, a11, 0))
8621 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8622 fold_build2_loc (loc, code, type, a00, a10),
8623 a01);
8626 /* See if we can build a range comparison. */
8627 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8628 return tem;
8630 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8631 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8633 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8634 if (tem)
8635 return fold_build2_loc (loc, code, type, tem, arg1);
8638 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8639 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8641 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8642 if (tem)
8643 return fold_build2_loc (loc, code, type, arg0, tem);
8646 /* Check for the possibility of merging component references. If our
8647 lhs is another similar operation, try to merge its rhs with our
8648 rhs. Then try to merge our lhs and rhs. */
8649 if (TREE_CODE (arg0) == code
8650 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8651 TREE_OPERAND (arg0, 1), arg1)))
8652 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8654 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8655 return tem;
8657 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8658 && (code == TRUTH_AND_EXPR
8659 || code == TRUTH_ANDIF_EXPR
8660 || code == TRUTH_OR_EXPR
8661 || code == TRUTH_ORIF_EXPR))
8663 enum tree_code ncode, icode;
8665 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8666 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8667 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8669 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8670 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8671 We don't want to pack more than two leafs to a non-IF AND/OR
8672 expression.
8673 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8674 equal to IF-CODE, then we don't want to add right-hand operand.
8675 If the inner right-hand side of left-hand operand has
8676 side-effects, or isn't simple, then we can't add to it,
8677 as otherwise we might destroy if-sequence. */
8678 if (TREE_CODE (arg0) == icode
8679 && simple_operand_p_2 (arg1)
8680 /* Needed for sequence points to handle trappings, and
8681 side-effects. */
8682 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8684 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8685 arg1);
8686 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8687 tem);
8689 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8690 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8691 else if (TREE_CODE (arg1) == icode
8692 && simple_operand_p_2 (arg0)
8693 /* Needed for sequence points to handle trappings, and
8694 side-effects. */
8695 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8697 tem = fold_build2_loc (loc, ncode, type,
8698 arg0, TREE_OPERAND (arg1, 0));
8699 return fold_build2_loc (loc, icode, type, tem,
8700 TREE_OPERAND (arg1, 1));
8702 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8703 into (A OR B).
8704 For sequence point consistancy, we need to check for trapping,
8705 and side-effects. */
8706 else if (code == icode && simple_operand_p_2 (arg0)
8707 && simple_operand_p_2 (arg1))
8708 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8711 return NULL_TREE;
8714 /* Fold a binary expression of code CODE and type TYPE with operands
8715 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8716 Return the folded expression if folding is successful. Otherwise,
8717 return NULL_TREE. */
8719 static tree
8720 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8722 enum tree_code compl_code;
8724 if (code == MIN_EXPR)
8725 compl_code = MAX_EXPR;
8726 else if (code == MAX_EXPR)
8727 compl_code = MIN_EXPR;
8728 else
8729 gcc_unreachable ();
8731 /* MIN (MAX (a, b), b) == b. */
8732 if (TREE_CODE (op0) == compl_code
8733 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8734 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8736 /* MIN (MAX (b, a), b) == b. */
8737 if (TREE_CODE (op0) == compl_code
8738 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8739 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8740 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8742 /* MIN (a, MAX (a, b)) == a. */
8743 if (TREE_CODE (op1) == compl_code
8744 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8745 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8746 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8748 /* MIN (a, MAX (b, a)) == a. */
8749 if (TREE_CODE (op1) == compl_code
8750 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8751 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8752 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8754 return NULL_TREE;
8757 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8758 by changing CODE to reduce the magnitude of constants involved in
8759 ARG0 of the comparison.
8760 Returns a canonicalized comparison tree if a simplification was
8761 possible, otherwise returns NULL_TREE.
8762 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8763 valid if signed overflow is undefined. */
8765 static tree
8766 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8767 tree arg0, tree arg1,
8768 bool *strict_overflow_p)
8770 enum tree_code code0 = TREE_CODE (arg0);
8771 tree t, cst0 = NULL_TREE;
8772 int sgn0;
8773 bool swap = false;
8775 /* Match A +- CST code arg1 and CST code arg1. We can change the
8776 first form only if overflow is undefined. */
8777 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8778 /* In principle pointers also have undefined overflow behavior,
8779 but that causes problems elsewhere. */
8780 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8781 && (code0 == MINUS_EXPR
8782 || code0 == PLUS_EXPR)
8783 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8784 || code0 == INTEGER_CST))
8785 return NULL_TREE;
8787 /* Identify the constant in arg0 and its sign. */
8788 if (code0 == INTEGER_CST)
8789 cst0 = arg0;
8790 else
8791 cst0 = TREE_OPERAND (arg0, 1);
8792 sgn0 = tree_int_cst_sgn (cst0);
8794 /* Overflowed constants and zero will cause problems. */
8795 if (integer_zerop (cst0)
8796 || TREE_OVERFLOW (cst0))
8797 return NULL_TREE;
8799 /* See if we can reduce the magnitude of the constant in
8800 arg0 by changing the comparison code. */
8801 if (code0 == INTEGER_CST)
8803 /* CST <= arg1 -> CST-1 < arg1. */
8804 if (code == LE_EXPR && sgn0 == 1)
8805 code = LT_EXPR;
8806 /* -CST < arg1 -> -CST-1 <= arg1. */
8807 else if (code == LT_EXPR && sgn0 == -1)
8808 code = LE_EXPR;
8809 /* CST > arg1 -> CST-1 >= arg1. */
8810 else if (code == GT_EXPR && sgn0 == 1)
8811 code = GE_EXPR;
8812 /* -CST >= arg1 -> -CST-1 > arg1. */
8813 else if (code == GE_EXPR && sgn0 == -1)
8814 code = GT_EXPR;
8815 else
8816 return NULL_TREE;
8817 /* arg1 code' CST' might be more canonical. */
8818 swap = true;
8820 else
8822 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8823 if (code == LT_EXPR
8824 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8825 code = LE_EXPR;
8826 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8827 else if (code == GT_EXPR
8828 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8829 code = GE_EXPR;
8830 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8831 else if (code == LE_EXPR
8832 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8833 code = LT_EXPR;
8834 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8835 else if (code == GE_EXPR
8836 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8837 code = GT_EXPR;
8838 else
8839 return NULL_TREE;
8840 *strict_overflow_p = true;
8843 /* Now build the constant reduced in magnitude. But not if that
8844 would produce one outside of its types range. */
8845 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8846 && ((sgn0 == 1
8847 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8848 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8849 || (sgn0 == -1
8850 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8851 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8852 /* We cannot swap the comparison here as that would cause us to
8853 endlessly recurse. */
8854 return NULL_TREE;
8856 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8857 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8858 if (code0 != INTEGER_CST)
8859 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8860 t = fold_convert (TREE_TYPE (arg1), t);
8862 /* If swapping might yield to a more canonical form, do so. */
8863 if (swap)
8864 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8865 else
8866 return fold_build2_loc (loc, code, type, t, arg1);
8869 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8870 overflow further. Try to decrease the magnitude of constants involved
8871 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8872 and put sole constants at the second argument position.
8873 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8875 static tree
8876 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8877 tree arg0, tree arg1)
8879 tree t;
8880 bool strict_overflow_p;
8881 const char * const warnmsg = G_("assuming signed overflow does not occur "
8882 "when reducing constant in comparison");
8884 /* Try canonicalization by simplifying arg0. */
8885 strict_overflow_p = false;
8886 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8887 &strict_overflow_p);
8888 if (t)
8890 if (strict_overflow_p)
8891 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8892 return t;
8895 /* Try canonicalization by simplifying arg1 using the swapped
8896 comparison. */
8897 code = swap_tree_comparison (code);
8898 strict_overflow_p = false;
8899 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8900 &strict_overflow_p);
8901 if (t && strict_overflow_p)
8902 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8903 return t;
8906 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8907 space. This is used to avoid issuing overflow warnings for
8908 expressions like &p->x which can not wrap. */
8910 static bool
8911 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8913 double_int di_offset, total;
8915 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8916 return true;
8918 if (bitpos < 0)
8919 return true;
8921 if (offset == NULL_TREE)
8922 di_offset = double_int_zero;
8923 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8924 return true;
8925 else
8926 di_offset = TREE_INT_CST (offset);
8928 bool overflow;
8929 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8930 total = di_offset.add_with_sign (units, true, &overflow);
8931 if (overflow)
8932 return true;
8934 if (total.high != 0)
8935 return true;
8937 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8938 if (size <= 0)
8939 return true;
8941 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8942 array. */
8943 if (TREE_CODE (base) == ADDR_EXPR)
8945 HOST_WIDE_INT base_size;
8947 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8948 if (base_size > 0 && size < base_size)
8949 size = base_size;
8952 return total.low > (unsigned HOST_WIDE_INT) size;
8955 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8956 kind INTEGER_CST. This makes sure to properly sign-extend the
8957 constant. */
8959 static HOST_WIDE_INT
8960 size_low_cst (const_tree t)
8962 double_int d = tree_to_double_int (t);
8963 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
8966 /* Subroutine of fold_binary. This routine performs all of the
8967 transformations that are common to the equality/inequality
8968 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8969 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8970 fold_binary should call fold_binary. Fold a comparison with
8971 tree code CODE and type TYPE with operands OP0 and OP1. Return
8972 the folded comparison or NULL_TREE. */
8974 static tree
8975 fold_comparison (location_t loc, enum tree_code code, tree type,
8976 tree op0, tree op1)
8978 tree arg0, arg1, tem;
8980 arg0 = op0;
8981 arg1 = op1;
8983 STRIP_SIGN_NOPS (arg0);
8984 STRIP_SIGN_NOPS (arg1);
8986 tem = fold_relational_const (code, type, arg0, arg1);
8987 if (tem != NULL_TREE)
8988 return tem;
8990 /* If one arg is a real or integer constant, put it last. */
8991 if (tree_swap_operands_p (arg0, arg1, true))
8992 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8994 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8995 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8996 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8997 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8999 && (TREE_CODE (arg1) == INTEGER_CST
9000 && !TREE_OVERFLOW (arg1)))
9002 tree const1 = TREE_OPERAND (arg0, 1);
9003 tree const2 = arg1;
9004 tree variable = TREE_OPERAND (arg0, 0);
9005 tree lhs;
9006 int lhs_add;
9007 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9009 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9010 TREE_TYPE (arg1), const2, const1);
9012 /* If the constant operation overflowed this can be
9013 simplified as a comparison against INT_MAX/INT_MIN. */
9014 if (TREE_CODE (lhs) == INTEGER_CST
9015 && TREE_OVERFLOW (lhs))
9017 int const1_sgn = tree_int_cst_sgn (const1);
9018 enum tree_code code2 = code;
9020 /* Get the sign of the constant on the lhs if the
9021 operation were VARIABLE + CONST1. */
9022 if (TREE_CODE (arg0) == MINUS_EXPR)
9023 const1_sgn = -const1_sgn;
9025 /* The sign of the constant determines if we overflowed
9026 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9027 Canonicalize to the INT_MIN overflow by swapping the comparison
9028 if necessary. */
9029 if (const1_sgn == -1)
9030 code2 = swap_tree_comparison (code);
9032 /* We now can look at the canonicalized case
9033 VARIABLE + 1 CODE2 INT_MIN
9034 and decide on the result. */
9035 if (code2 == LT_EXPR
9036 || code2 == LE_EXPR
9037 || code2 == EQ_EXPR)
9038 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9039 else if (code2 == NE_EXPR
9040 || code2 == GE_EXPR
9041 || code2 == GT_EXPR)
9042 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9045 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9046 && (TREE_CODE (lhs) != INTEGER_CST
9047 || !TREE_OVERFLOW (lhs)))
9049 if (code != EQ_EXPR && code != NE_EXPR)
9050 fold_overflow_warning ("assuming signed overflow does not occur "
9051 "when changing X +- C1 cmp C2 to "
9052 "X cmp C1 +- C2",
9053 WARN_STRICT_OVERFLOW_COMPARISON);
9054 return fold_build2_loc (loc, code, type, variable, lhs);
9058 /* For comparisons of pointers we can decompose it to a compile time
9059 comparison of the base objects and the offsets into the object.
9060 This requires at least one operand being an ADDR_EXPR or a
9061 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9062 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9063 && (TREE_CODE (arg0) == ADDR_EXPR
9064 || TREE_CODE (arg1) == ADDR_EXPR
9065 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9066 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9068 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9069 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9070 enum machine_mode mode;
9071 int volatilep, unsignedp;
9072 bool indirect_base0 = false, indirect_base1 = false;
9074 /* Get base and offset for the access. Strip ADDR_EXPR for
9075 get_inner_reference, but put it back by stripping INDIRECT_REF
9076 off the base object if possible. indirect_baseN will be true
9077 if baseN is not an address but refers to the object itself. */
9078 base0 = arg0;
9079 if (TREE_CODE (arg0) == ADDR_EXPR)
9081 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9082 &bitsize, &bitpos0, &offset0, &mode,
9083 &unsignedp, &volatilep, false);
9084 if (TREE_CODE (base0) == INDIRECT_REF)
9085 base0 = TREE_OPERAND (base0, 0);
9086 else
9087 indirect_base0 = true;
9089 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9091 base0 = TREE_OPERAND (arg0, 0);
9092 STRIP_SIGN_NOPS (base0);
9093 if (TREE_CODE (base0) == ADDR_EXPR)
9095 base0 = TREE_OPERAND (base0, 0);
9096 indirect_base0 = true;
9098 offset0 = TREE_OPERAND (arg0, 1);
9099 if (tree_fits_shwi_p (offset0))
9101 HOST_WIDE_INT off = size_low_cst (offset0);
9102 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9103 * BITS_PER_UNIT)
9104 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9106 bitpos0 = off * BITS_PER_UNIT;
9107 offset0 = NULL_TREE;
9112 base1 = arg1;
9113 if (TREE_CODE (arg1) == ADDR_EXPR)
9115 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9116 &bitsize, &bitpos1, &offset1, &mode,
9117 &unsignedp, &volatilep, false);
9118 if (TREE_CODE (base1) == INDIRECT_REF)
9119 base1 = TREE_OPERAND (base1, 0);
9120 else
9121 indirect_base1 = true;
9123 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9125 base1 = TREE_OPERAND (arg1, 0);
9126 STRIP_SIGN_NOPS (base1);
9127 if (TREE_CODE (base1) == ADDR_EXPR)
9129 base1 = TREE_OPERAND (base1, 0);
9130 indirect_base1 = true;
9132 offset1 = TREE_OPERAND (arg1, 1);
9133 if (tree_fits_shwi_p (offset1))
9135 HOST_WIDE_INT off = size_low_cst (offset1);
9136 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9137 * BITS_PER_UNIT)
9138 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9140 bitpos1 = off * BITS_PER_UNIT;
9141 offset1 = NULL_TREE;
9146 /* A local variable can never be pointed to by
9147 the default SSA name of an incoming parameter. */
9148 if ((TREE_CODE (arg0) == ADDR_EXPR
9149 && indirect_base0
9150 && TREE_CODE (base0) == VAR_DECL
9151 && auto_var_in_fn_p (base0, current_function_decl)
9152 && !indirect_base1
9153 && TREE_CODE (base1) == SSA_NAME
9154 && SSA_NAME_IS_DEFAULT_DEF (base1)
9155 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9156 || (TREE_CODE (arg1) == ADDR_EXPR
9157 && indirect_base1
9158 && TREE_CODE (base1) == VAR_DECL
9159 && auto_var_in_fn_p (base1, current_function_decl)
9160 && !indirect_base0
9161 && TREE_CODE (base0) == SSA_NAME
9162 && SSA_NAME_IS_DEFAULT_DEF (base0)
9163 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9165 if (code == NE_EXPR)
9166 return constant_boolean_node (1, type);
9167 else if (code == EQ_EXPR)
9168 return constant_boolean_node (0, type);
9170 /* If we have equivalent bases we might be able to simplify. */
9171 else if (indirect_base0 == indirect_base1
9172 && operand_equal_p (base0, base1, 0))
9174 /* We can fold this expression to a constant if the non-constant
9175 offset parts are equal. */
9176 if ((offset0 == offset1
9177 || (offset0 && offset1
9178 && operand_equal_p (offset0, offset1, 0)))
9179 && (code == EQ_EXPR
9180 || code == NE_EXPR
9181 || (indirect_base0 && DECL_P (base0))
9182 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9185 if (code != EQ_EXPR
9186 && code != NE_EXPR
9187 && bitpos0 != bitpos1
9188 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9189 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9190 fold_overflow_warning (("assuming pointer wraparound does not "
9191 "occur when comparing P +- C1 with "
9192 "P +- C2"),
9193 WARN_STRICT_OVERFLOW_CONDITIONAL);
9195 switch (code)
9197 case EQ_EXPR:
9198 return constant_boolean_node (bitpos0 == bitpos1, type);
9199 case NE_EXPR:
9200 return constant_boolean_node (bitpos0 != bitpos1, type);
9201 case LT_EXPR:
9202 return constant_boolean_node (bitpos0 < bitpos1, type);
9203 case LE_EXPR:
9204 return constant_boolean_node (bitpos0 <= bitpos1, type);
9205 case GE_EXPR:
9206 return constant_boolean_node (bitpos0 >= bitpos1, type);
9207 case GT_EXPR:
9208 return constant_boolean_node (bitpos0 > bitpos1, type);
9209 default:;
9212 /* We can simplify the comparison to a comparison of the variable
9213 offset parts if the constant offset parts are equal.
9214 Be careful to use signed sizetype here because otherwise we
9215 mess with array offsets in the wrong way. This is possible
9216 because pointer arithmetic is restricted to retain within an
9217 object and overflow on pointer differences is undefined as of
9218 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9219 else if (bitpos0 == bitpos1
9220 && ((code == EQ_EXPR || code == NE_EXPR)
9221 || (indirect_base0 && DECL_P (base0))
9222 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9224 /* By converting to signed sizetype we cover middle-end pointer
9225 arithmetic which operates on unsigned pointer types of size
9226 type size and ARRAY_REF offsets which are properly sign or
9227 zero extended from their type in case it is narrower than
9228 sizetype. */
9229 if (offset0 == NULL_TREE)
9230 offset0 = build_int_cst (ssizetype, 0);
9231 else
9232 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9233 if (offset1 == NULL_TREE)
9234 offset1 = build_int_cst (ssizetype, 0);
9235 else
9236 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9238 if (code != EQ_EXPR
9239 && code != NE_EXPR
9240 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9241 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9242 fold_overflow_warning (("assuming pointer wraparound does not "
9243 "occur when comparing P +- C1 with "
9244 "P +- C2"),
9245 WARN_STRICT_OVERFLOW_COMPARISON);
9247 return fold_build2_loc (loc, code, type, offset0, offset1);
9250 /* For non-equal bases we can simplify if they are addresses
9251 of local binding decls or constants. */
9252 else if (indirect_base0 && indirect_base1
9253 /* We know that !operand_equal_p (base0, base1, 0)
9254 because the if condition was false. But make
9255 sure two decls are not the same. */
9256 && base0 != base1
9257 && TREE_CODE (arg0) == ADDR_EXPR
9258 && TREE_CODE (arg1) == ADDR_EXPR
9259 && (((TREE_CODE (base0) == VAR_DECL
9260 || TREE_CODE (base0) == PARM_DECL)
9261 && (targetm.binds_local_p (base0)
9262 || CONSTANT_CLASS_P (base1)))
9263 || CONSTANT_CLASS_P (base0))
9264 && (((TREE_CODE (base1) == VAR_DECL
9265 || TREE_CODE (base1) == PARM_DECL)
9266 && (targetm.binds_local_p (base1)
9267 || CONSTANT_CLASS_P (base0)))
9268 || CONSTANT_CLASS_P (base1)))
9270 if (code == EQ_EXPR)
9271 return omit_two_operands_loc (loc, type, boolean_false_node,
9272 arg0, arg1);
9273 else if (code == NE_EXPR)
9274 return omit_two_operands_loc (loc, type, boolean_true_node,
9275 arg0, arg1);
9277 /* For equal offsets we can simplify to a comparison of the
9278 base addresses. */
9279 else if (bitpos0 == bitpos1
9280 && (indirect_base0
9281 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9282 && (indirect_base1
9283 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9284 && ((offset0 == offset1)
9285 || (offset0 && offset1
9286 && operand_equal_p (offset0, offset1, 0))))
9288 if (indirect_base0)
9289 base0 = build_fold_addr_expr_loc (loc, base0);
9290 if (indirect_base1)
9291 base1 = build_fold_addr_expr_loc (loc, base1);
9292 return fold_build2_loc (loc, code, type, base0, base1);
9296 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9297 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9298 the resulting offset is smaller in absolute value than the
9299 original one. */
9300 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9301 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9302 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9303 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9304 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9305 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9306 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9308 tree const1 = TREE_OPERAND (arg0, 1);
9309 tree const2 = TREE_OPERAND (arg1, 1);
9310 tree variable1 = TREE_OPERAND (arg0, 0);
9311 tree variable2 = TREE_OPERAND (arg1, 0);
9312 tree cst;
9313 const char * const warnmsg = G_("assuming signed overflow does not "
9314 "occur when combining constants around "
9315 "a comparison");
9317 /* Put the constant on the side where it doesn't overflow and is
9318 of lower absolute value than before. */
9319 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9320 ? MINUS_EXPR : PLUS_EXPR,
9321 const2, const1);
9322 if (!TREE_OVERFLOW (cst)
9323 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9325 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9326 return fold_build2_loc (loc, code, type,
9327 variable1,
9328 fold_build2_loc (loc,
9329 TREE_CODE (arg1), TREE_TYPE (arg1),
9330 variable2, cst));
9333 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9334 ? MINUS_EXPR : PLUS_EXPR,
9335 const1, const2);
9336 if (!TREE_OVERFLOW (cst)
9337 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9339 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9340 return fold_build2_loc (loc, code, type,
9341 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9342 variable1, cst),
9343 variable2);
9347 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9348 signed arithmetic case. That form is created by the compiler
9349 often enough for folding it to be of value. One example is in
9350 computing loop trip counts after Operator Strength Reduction. */
9351 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9352 && TREE_CODE (arg0) == MULT_EXPR
9353 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9354 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9355 && integer_zerop (arg1))
9357 tree const1 = TREE_OPERAND (arg0, 1);
9358 tree const2 = arg1; /* zero */
9359 tree variable1 = TREE_OPERAND (arg0, 0);
9360 enum tree_code cmp_code = code;
9362 /* Handle unfolded multiplication by zero. */
9363 if (integer_zerop (const1))
9364 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9366 fold_overflow_warning (("assuming signed overflow does not occur when "
9367 "eliminating multiplication in comparison "
9368 "with zero"),
9369 WARN_STRICT_OVERFLOW_COMPARISON);
9371 /* If const1 is negative we swap the sense of the comparison. */
9372 if (tree_int_cst_sgn (const1) < 0)
9373 cmp_code = swap_tree_comparison (cmp_code);
9375 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9378 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9379 if (tem)
9380 return tem;
9382 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9384 tree targ0 = strip_float_extensions (arg0);
9385 tree targ1 = strip_float_extensions (arg1);
9386 tree newtype = TREE_TYPE (targ0);
9388 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9389 newtype = TREE_TYPE (targ1);
9391 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9392 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9393 return fold_build2_loc (loc, code, type,
9394 fold_convert_loc (loc, newtype, targ0),
9395 fold_convert_loc (loc, newtype, targ1));
9397 /* (-a) CMP (-b) -> b CMP a */
9398 if (TREE_CODE (arg0) == NEGATE_EXPR
9399 && TREE_CODE (arg1) == NEGATE_EXPR)
9400 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9401 TREE_OPERAND (arg0, 0));
9403 if (TREE_CODE (arg1) == REAL_CST)
9405 REAL_VALUE_TYPE cst;
9406 cst = TREE_REAL_CST (arg1);
9408 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9409 if (TREE_CODE (arg0) == NEGATE_EXPR)
9410 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9411 TREE_OPERAND (arg0, 0),
9412 build_real (TREE_TYPE (arg1),
9413 real_value_negate (&cst)));
9415 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9416 /* a CMP (-0) -> a CMP 0 */
9417 if (REAL_VALUE_MINUS_ZERO (cst))
9418 return fold_build2_loc (loc, code, type, arg0,
9419 build_real (TREE_TYPE (arg1), dconst0));
9421 /* x != NaN is always true, other ops are always false. */
9422 if (REAL_VALUE_ISNAN (cst)
9423 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9425 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9426 return omit_one_operand_loc (loc, type, tem, arg0);
9429 /* Fold comparisons against infinity. */
9430 if (REAL_VALUE_ISINF (cst)
9431 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9433 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9434 if (tem != NULL_TREE)
9435 return tem;
9439 /* If this is a comparison of a real constant with a PLUS_EXPR
9440 or a MINUS_EXPR of a real constant, we can convert it into a
9441 comparison with a revised real constant as long as no overflow
9442 occurs when unsafe_math_optimizations are enabled. */
9443 if (flag_unsafe_math_optimizations
9444 && TREE_CODE (arg1) == REAL_CST
9445 && (TREE_CODE (arg0) == PLUS_EXPR
9446 || TREE_CODE (arg0) == MINUS_EXPR)
9447 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9448 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9449 ? MINUS_EXPR : PLUS_EXPR,
9450 arg1, TREE_OPERAND (arg0, 1)))
9451 && !TREE_OVERFLOW (tem))
9452 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9454 /* Likewise, we can simplify a comparison of a real constant with
9455 a MINUS_EXPR whose first operand is also a real constant, i.e.
9456 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9457 floating-point types only if -fassociative-math is set. */
9458 if (flag_associative_math
9459 && TREE_CODE (arg1) == REAL_CST
9460 && TREE_CODE (arg0) == MINUS_EXPR
9461 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9462 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9463 arg1))
9464 && !TREE_OVERFLOW (tem))
9465 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9466 TREE_OPERAND (arg0, 1), tem);
9468 /* Fold comparisons against built-in math functions. */
9469 if (TREE_CODE (arg1) == REAL_CST
9470 && flag_unsafe_math_optimizations
9471 && ! flag_errno_math)
9473 enum built_in_function fcode = builtin_mathfn_code (arg0);
9475 if (fcode != END_BUILTINS)
9477 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9478 if (tem != NULL_TREE)
9479 return tem;
9484 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9485 && CONVERT_EXPR_P (arg0))
9487 /* If we are widening one operand of an integer comparison,
9488 see if the other operand is similarly being widened. Perhaps we
9489 can do the comparison in the narrower type. */
9490 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9491 if (tem)
9492 return tem;
9494 /* Or if we are changing signedness. */
9495 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9496 if (tem)
9497 return tem;
9500 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9501 constant, we can simplify it. */
9502 if (TREE_CODE (arg1) == INTEGER_CST
9503 && (TREE_CODE (arg0) == MIN_EXPR
9504 || TREE_CODE (arg0) == MAX_EXPR)
9505 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9507 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9508 if (tem)
9509 return tem;
9512 /* Simplify comparison of something with itself. (For IEEE
9513 floating-point, we can only do some of these simplifications.) */
9514 if (operand_equal_p (arg0, arg1, 0))
9516 switch (code)
9518 case EQ_EXPR:
9519 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9520 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9521 return constant_boolean_node (1, type);
9522 break;
9524 case GE_EXPR:
9525 case LE_EXPR:
9526 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9527 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9528 return constant_boolean_node (1, type);
9529 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9531 case NE_EXPR:
9532 /* For NE, we can only do this simplification if integer
9533 or we don't honor IEEE floating point NaNs. */
9534 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9535 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9536 break;
9537 /* ... fall through ... */
9538 case GT_EXPR:
9539 case LT_EXPR:
9540 return constant_boolean_node (0, type);
9541 default:
9542 gcc_unreachable ();
9546 /* If we are comparing an expression that just has comparisons
9547 of two integer values, arithmetic expressions of those comparisons,
9548 and constants, we can simplify it. There are only three cases
9549 to check: the two values can either be equal, the first can be
9550 greater, or the second can be greater. Fold the expression for
9551 those three values. Since each value must be 0 or 1, we have
9552 eight possibilities, each of which corresponds to the constant 0
9553 or 1 or one of the six possible comparisons.
9555 This handles common cases like (a > b) == 0 but also handles
9556 expressions like ((x > y) - (y > x)) > 0, which supposedly
9557 occur in macroized code. */
9559 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9561 tree cval1 = 0, cval2 = 0;
9562 int save_p = 0;
9564 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9565 /* Don't handle degenerate cases here; they should already
9566 have been handled anyway. */
9567 && cval1 != 0 && cval2 != 0
9568 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9569 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9570 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9571 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9572 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9573 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9574 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9576 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9577 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9579 /* We can't just pass T to eval_subst in case cval1 or cval2
9580 was the same as ARG1. */
9582 tree high_result
9583 = fold_build2_loc (loc, code, type,
9584 eval_subst (loc, arg0, cval1, maxval,
9585 cval2, minval),
9586 arg1);
9587 tree equal_result
9588 = fold_build2_loc (loc, code, type,
9589 eval_subst (loc, arg0, cval1, maxval,
9590 cval2, maxval),
9591 arg1);
9592 tree low_result
9593 = fold_build2_loc (loc, code, type,
9594 eval_subst (loc, arg0, cval1, minval,
9595 cval2, maxval),
9596 arg1);
9598 /* All three of these results should be 0 or 1. Confirm they are.
9599 Then use those values to select the proper code to use. */
9601 if (TREE_CODE (high_result) == INTEGER_CST
9602 && TREE_CODE (equal_result) == INTEGER_CST
9603 && TREE_CODE (low_result) == INTEGER_CST)
9605 /* Make a 3-bit mask with the high-order bit being the
9606 value for `>', the next for '=', and the low for '<'. */
9607 switch ((integer_onep (high_result) * 4)
9608 + (integer_onep (equal_result) * 2)
9609 + integer_onep (low_result))
9611 case 0:
9612 /* Always false. */
9613 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9614 case 1:
9615 code = LT_EXPR;
9616 break;
9617 case 2:
9618 code = EQ_EXPR;
9619 break;
9620 case 3:
9621 code = LE_EXPR;
9622 break;
9623 case 4:
9624 code = GT_EXPR;
9625 break;
9626 case 5:
9627 code = NE_EXPR;
9628 break;
9629 case 6:
9630 code = GE_EXPR;
9631 break;
9632 case 7:
9633 /* Always true. */
9634 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9637 if (save_p)
9639 tem = save_expr (build2 (code, type, cval1, cval2));
9640 SET_EXPR_LOCATION (tem, loc);
9641 return tem;
9643 return fold_build2_loc (loc, code, type, cval1, cval2);
9648 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9649 into a single range test. */
9650 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9651 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9652 && TREE_CODE (arg1) == INTEGER_CST
9653 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9654 && !integer_zerop (TREE_OPERAND (arg0, 1))
9655 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9656 && !TREE_OVERFLOW (arg1))
9658 tem = fold_div_compare (loc, code, type, arg0, arg1);
9659 if (tem != NULL_TREE)
9660 return tem;
9663 /* Fold ~X op ~Y as Y op X. */
9664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9665 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9667 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9668 return fold_build2_loc (loc, code, type,
9669 fold_convert_loc (loc, cmp_type,
9670 TREE_OPERAND (arg1, 0)),
9671 TREE_OPERAND (arg0, 0));
9674 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9675 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9676 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9678 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9679 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9680 TREE_OPERAND (arg0, 0),
9681 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9682 fold_convert_loc (loc, cmp_type, arg1)));
9685 return NULL_TREE;
9689 /* Subroutine of fold_binary. Optimize complex multiplications of the
9690 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9691 argument EXPR represents the expression "z" of type TYPE. */
9693 static tree
9694 fold_mult_zconjz (location_t loc, tree type, tree expr)
9696 tree itype = TREE_TYPE (type);
9697 tree rpart, ipart, tem;
9699 if (TREE_CODE (expr) == COMPLEX_EXPR)
9701 rpart = TREE_OPERAND (expr, 0);
9702 ipart = TREE_OPERAND (expr, 1);
9704 else if (TREE_CODE (expr) == COMPLEX_CST)
9706 rpart = TREE_REALPART (expr);
9707 ipart = TREE_IMAGPART (expr);
9709 else
9711 expr = save_expr (expr);
9712 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9713 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9716 rpart = save_expr (rpart);
9717 ipart = save_expr (ipart);
9718 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9719 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9720 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9721 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9722 build_zero_cst (itype));
9726 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9727 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9728 guarantees that P and N have the same least significant log2(M) bits.
9729 N is not otherwise constrained. In particular, N is not normalized to
9730 0 <= N < M as is common. In general, the precise value of P is unknown.
9731 M is chosen as large as possible such that constant N can be determined.
9733 Returns M and sets *RESIDUE to N.
9735 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9736 account. This is not always possible due to PR 35705.
9739 static unsigned HOST_WIDE_INT
9740 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9741 bool allow_func_align)
9743 enum tree_code code;
9745 *residue = 0;
9747 code = TREE_CODE (expr);
9748 if (code == ADDR_EXPR)
9750 unsigned int bitalign;
9751 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9752 *residue /= BITS_PER_UNIT;
9753 return bitalign / BITS_PER_UNIT;
9755 else if (code == POINTER_PLUS_EXPR)
9757 tree op0, op1;
9758 unsigned HOST_WIDE_INT modulus;
9759 enum tree_code inner_code;
9761 op0 = TREE_OPERAND (expr, 0);
9762 STRIP_NOPS (op0);
9763 modulus = get_pointer_modulus_and_residue (op0, residue,
9764 allow_func_align);
9766 op1 = TREE_OPERAND (expr, 1);
9767 STRIP_NOPS (op1);
9768 inner_code = TREE_CODE (op1);
9769 if (inner_code == INTEGER_CST)
9771 *residue += TREE_INT_CST_LOW (op1);
9772 return modulus;
9774 else if (inner_code == MULT_EXPR)
9776 op1 = TREE_OPERAND (op1, 1);
9777 if (TREE_CODE (op1) == INTEGER_CST)
9779 unsigned HOST_WIDE_INT align;
9781 /* Compute the greatest power-of-2 divisor of op1. */
9782 align = TREE_INT_CST_LOW (op1);
9783 align &= -align;
9785 /* If align is non-zero and less than *modulus, replace
9786 *modulus with align., If align is 0, then either op1 is 0
9787 or the greatest power-of-2 divisor of op1 doesn't fit in an
9788 unsigned HOST_WIDE_INT. In either case, no additional
9789 constraint is imposed. */
9790 if (align)
9791 modulus = MIN (modulus, align);
9793 return modulus;
9798 /* If we get here, we were unable to determine anything useful about the
9799 expression. */
9800 return 1;
9803 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9804 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9806 static bool
9807 vec_cst_ctor_to_array (tree arg, tree *elts)
9809 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9811 if (TREE_CODE (arg) == VECTOR_CST)
9813 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9814 elts[i] = VECTOR_CST_ELT (arg, i);
9816 else if (TREE_CODE (arg) == CONSTRUCTOR)
9818 constructor_elt *elt;
9820 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9821 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9822 return false;
9823 else
9824 elts[i] = elt->value;
9826 else
9827 return false;
9828 for (; i < nelts; i++)
9829 elts[i]
9830 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9831 return true;
9834 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9835 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9836 NULL_TREE otherwise. */
9838 static tree
9839 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9841 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9842 tree *elts;
9843 bool need_ctor = false;
9845 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9846 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9847 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9848 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9849 return NULL_TREE;
9851 elts = XALLOCAVEC (tree, nelts * 3);
9852 if (!vec_cst_ctor_to_array (arg0, elts)
9853 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9854 return NULL_TREE;
9856 for (i = 0; i < nelts; i++)
9858 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9859 need_ctor = true;
9860 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9863 if (need_ctor)
9865 vec<constructor_elt, va_gc> *v;
9866 vec_alloc (v, nelts);
9867 for (i = 0; i < nelts; i++)
9868 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9869 return build_constructor (type, v);
9871 else
9872 return build_vector (type, &elts[2 * nelts]);
9875 /* Try to fold a pointer difference of type TYPE two address expressions of
9876 array references AREF0 and AREF1 using location LOC. Return a
9877 simplified expression for the difference or NULL_TREE. */
9879 static tree
9880 fold_addr_of_array_ref_difference (location_t loc, tree type,
9881 tree aref0, tree aref1)
9883 tree base0 = TREE_OPERAND (aref0, 0);
9884 tree base1 = TREE_OPERAND (aref1, 0);
9885 tree base_offset = build_int_cst (type, 0);
9887 /* If the bases are array references as well, recurse. If the bases
9888 are pointer indirections compute the difference of the pointers.
9889 If the bases are equal, we are set. */
9890 if ((TREE_CODE (base0) == ARRAY_REF
9891 && TREE_CODE (base1) == ARRAY_REF
9892 && (base_offset
9893 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9894 || (INDIRECT_REF_P (base0)
9895 && INDIRECT_REF_P (base1)
9896 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9897 TREE_OPERAND (base0, 0),
9898 TREE_OPERAND (base1, 0))))
9899 || operand_equal_p (base0, base1, 0))
9901 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9902 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9903 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9904 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9905 return fold_build2_loc (loc, PLUS_EXPR, type,
9906 base_offset,
9907 fold_build2_loc (loc, MULT_EXPR, type,
9908 diff, esz));
9910 return NULL_TREE;
9913 /* If the real or vector real constant CST of type TYPE has an exact
9914 inverse, return it, else return NULL. */
9916 static tree
9917 exact_inverse (tree type, tree cst)
9919 REAL_VALUE_TYPE r;
9920 tree unit_type, *elts;
9921 enum machine_mode mode;
9922 unsigned vec_nelts, i;
9924 switch (TREE_CODE (cst))
9926 case REAL_CST:
9927 r = TREE_REAL_CST (cst);
9929 if (exact_real_inverse (TYPE_MODE (type), &r))
9930 return build_real (type, r);
9932 return NULL_TREE;
9934 case VECTOR_CST:
9935 vec_nelts = VECTOR_CST_NELTS (cst);
9936 elts = XALLOCAVEC (tree, vec_nelts);
9937 unit_type = TREE_TYPE (type);
9938 mode = TYPE_MODE (unit_type);
9940 for (i = 0; i < vec_nelts; i++)
9942 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9943 if (!exact_real_inverse (mode, &r))
9944 return NULL_TREE;
9945 elts[i] = build_real (unit_type, r);
9948 return build_vector (type, elts);
9950 default:
9951 return NULL_TREE;
9955 /* Mask out the tz least significant bits of X of type TYPE where
9956 tz is the number of trailing zeroes in Y. */
9957 static double_int
9958 mask_with_tz (tree type, double_int x, double_int y)
9960 int tz = y.trailing_zeros ();
9962 if (tz > 0)
9964 double_int mask;
9966 mask = ~double_int::mask (tz);
9967 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9968 return mask & x;
9970 return x;
9973 /* Return true when T is an address and is known to be nonzero.
9974 For floating point we further ensure that T is not denormal.
9975 Similar logic is present in nonzero_address in rtlanal.h.
9977 If the return value is based on the assumption that signed overflow
9978 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9979 change *STRICT_OVERFLOW_P. */
9981 static bool
9982 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9984 tree type = TREE_TYPE (t);
9985 enum tree_code code;
9987 /* Doing something useful for floating point would need more work. */
9988 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9989 return false;
9991 code = TREE_CODE (t);
9992 switch (TREE_CODE_CLASS (code))
9994 case tcc_unary:
9995 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9996 strict_overflow_p);
9997 case tcc_binary:
9998 case tcc_comparison:
9999 return tree_binary_nonzero_warnv_p (code, type,
10000 TREE_OPERAND (t, 0),
10001 TREE_OPERAND (t, 1),
10002 strict_overflow_p);
10003 case tcc_constant:
10004 case tcc_declaration:
10005 case tcc_reference:
10006 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10008 default:
10009 break;
10012 switch (code)
10014 case TRUTH_NOT_EXPR:
10015 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10016 strict_overflow_p);
10018 case TRUTH_AND_EXPR:
10019 case TRUTH_OR_EXPR:
10020 case TRUTH_XOR_EXPR:
10021 return tree_binary_nonzero_warnv_p (code, type,
10022 TREE_OPERAND (t, 0),
10023 TREE_OPERAND (t, 1),
10024 strict_overflow_p);
10026 case COND_EXPR:
10027 case CONSTRUCTOR:
10028 case OBJ_TYPE_REF:
10029 case ASSERT_EXPR:
10030 case ADDR_EXPR:
10031 case WITH_SIZE_EXPR:
10032 case SSA_NAME:
10033 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10035 case COMPOUND_EXPR:
10036 case MODIFY_EXPR:
10037 case BIND_EXPR:
10038 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10039 strict_overflow_p);
10041 case SAVE_EXPR:
10042 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10043 strict_overflow_p);
10045 case CALL_EXPR:
10047 tree fndecl = get_callee_fndecl (t);
10048 if (!fndecl) return false;
10049 if (flag_delete_null_pointer_checks && !flag_check_new
10050 && DECL_IS_OPERATOR_NEW (fndecl)
10051 && !TREE_NOTHROW (fndecl))
10052 return true;
10053 if (flag_delete_null_pointer_checks
10054 && lookup_attribute ("returns_nonnull",
10055 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10056 return true;
10057 return alloca_call_p (t);
10060 default:
10061 break;
10063 return false;
10066 /* Return true when T is an address and is known to be nonzero.
10067 Handle warnings about undefined signed overflow. */
10069 static bool
10070 tree_expr_nonzero_p (tree t)
10072 bool ret, strict_overflow_p;
10074 strict_overflow_p = false;
10075 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10076 if (strict_overflow_p)
10077 fold_overflow_warning (("assuming signed overflow does not occur when "
10078 "determining that expression is always "
10079 "non-zero"),
10080 WARN_STRICT_OVERFLOW_MISC);
10081 return ret;
10084 /* Fold a binary expression of code CODE and type TYPE with operands
10085 OP0 and OP1. LOC is the location of the resulting expression.
10086 Return the folded expression if folding is successful. Otherwise,
10087 return NULL_TREE. */
10089 tree
10090 fold_binary_loc (location_t loc,
10091 enum tree_code code, tree type, tree op0, tree op1)
10093 enum tree_code_class kind = TREE_CODE_CLASS (code);
10094 tree arg0, arg1, tem;
10095 tree t1 = NULL_TREE;
10096 bool strict_overflow_p;
10097 unsigned int prec;
10099 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10100 && TREE_CODE_LENGTH (code) == 2
10101 && op0 != NULL_TREE
10102 && op1 != NULL_TREE);
10104 arg0 = op0;
10105 arg1 = op1;
10107 /* Strip any conversions that don't change the mode. This is
10108 safe for every expression, except for a comparison expression
10109 because its signedness is derived from its operands. So, in
10110 the latter case, only strip conversions that don't change the
10111 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10112 preserved.
10114 Note that this is done as an internal manipulation within the
10115 constant folder, in order to find the simplest representation
10116 of the arguments so that their form can be studied. In any
10117 cases, the appropriate type conversions should be put back in
10118 the tree that will get out of the constant folder. */
10120 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10122 STRIP_SIGN_NOPS (arg0);
10123 STRIP_SIGN_NOPS (arg1);
10125 else
10127 STRIP_NOPS (arg0);
10128 STRIP_NOPS (arg1);
10131 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10132 constant but we can't do arithmetic on them. */
10133 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10134 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10135 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10136 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10137 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10138 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10139 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10141 if (kind == tcc_binary)
10143 /* Make sure type and arg0 have the same saturating flag. */
10144 gcc_assert (TYPE_SATURATING (type)
10145 == TYPE_SATURATING (TREE_TYPE (arg0)));
10146 tem = const_binop (code, arg0, arg1);
10148 else if (kind == tcc_comparison)
10149 tem = fold_relational_const (code, type, arg0, arg1);
10150 else
10151 tem = NULL_TREE;
10153 if (tem != NULL_TREE)
10155 if (TREE_TYPE (tem) != type)
10156 tem = fold_convert_loc (loc, type, tem);
10157 return tem;
10161 /* If this is a commutative operation, and ARG0 is a constant, move it
10162 to ARG1 to reduce the number of tests below. */
10163 if (commutative_tree_code (code)
10164 && tree_swap_operands_p (arg0, arg1, true))
10165 return fold_build2_loc (loc, code, type, op1, op0);
10167 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10169 First check for cases where an arithmetic operation is applied to a
10170 compound, conditional, or comparison operation. Push the arithmetic
10171 operation inside the compound or conditional to see if any folding
10172 can then be done. Convert comparison to conditional for this purpose.
10173 The also optimizes non-constant cases that used to be done in
10174 expand_expr.
10176 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10177 one of the operands is a comparison and the other is a comparison, a
10178 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10179 code below would make the expression more complex. Change it to a
10180 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10181 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10183 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10184 || code == EQ_EXPR || code == NE_EXPR)
10185 && TREE_CODE (type) != VECTOR_TYPE
10186 && ((truth_value_p (TREE_CODE (arg0))
10187 && (truth_value_p (TREE_CODE (arg1))
10188 || (TREE_CODE (arg1) == BIT_AND_EXPR
10189 && integer_onep (TREE_OPERAND (arg1, 1)))))
10190 || (truth_value_p (TREE_CODE (arg1))
10191 && (truth_value_p (TREE_CODE (arg0))
10192 || (TREE_CODE (arg0) == BIT_AND_EXPR
10193 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10195 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10196 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10197 : TRUTH_XOR_EXPR,
10198 boolean_type_node,
10199 fold_convert_loc (loc, boolean_type_node, arg0),
10200 fold_convert_loc (loc, boolean_type_node, arg1));
10202 if (code == EQ_EXPR)
10203 tem = invert_truthvalue_loc (loc, tem);
10205 return fold_convert_loc (loc, type, tem);
10208 if (TREE_CODE_CLASS (code) == tcc_binary
10209 || TREE_CODE_CLASS (code) == tcc_comparison)
10211 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10213 tem = fold_build2_loc (loc, code, type,
10214 fold_convert_loc (loc, TREE_TYPE (op0),
10215 TREE_OPERAND (arg0, 1)), op1);
10216 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10217 tem);
10219 if (TREE_CODE (arg1) == COMPOUND_EXPR
10220 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10222 tem = fold_build2_loc (loc, code, type, op0,
10223 fold_convert_loc (loc, TREE_TYPE (op1),
10224 TREE_OPERAND (arg1, 1)));
10225 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10226 tem);
10229 if (TREE_CODE (arg0) == COND_EXPR
10230 || TREE_CODE (arg0) == VEC_COND_EXPR
10231 || COMPARISON_CLASS_P (arg0))
10233 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10234 arg0, arg1,
10235 /*cond_first_p=*/1);
10236 if (tem != NULL_TREE)
10237 return tem;
10240 if (TREE_CODE (arg1) == COND_EXPR
10241 || TREE_CODE (arg1) == VEC_COND_EXPR
10242 || COMPARISON_CLASS_P (arg1))
10244 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10245 arg1, arg0,
10246 /*cond_first_p=*/0);
10247 if (tem != NULL_TREE)
10248 return tem;
10252 switch (code)
10254 case MEM_REF:
10255 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10256 if (TREE_CODE (arg0) == ADDR_EXPR
10257 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10259 tree iref = TREE_OPERAND (arg0, 0);
10260 return fold_build2 (MEM_REF, type,
10261 TREE_OPERAND (iref, 0),
10262 int_const_binop (PLUS_EXPR, arg1,
10263 TREE_OPERAND (iref, 1)));
10266 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10267 if (TREE_CODE (arg0) == ADDR_EXPR
10268 && handled_component_p (TREE_OPERAND (arg0, 0)))
10270 tree base;
10271 HOST_WIDE_INT coffset;
10272 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10273 &coffset);
10274 if (!base)
10275 return NULL_TREE;
10276 return fold_build2 (MEM_REF, type,
10277 build_fold_addr_expr (base),
10278 int_const_binop (PLUS_EXPR, arg1,
10279 size_int (coffset)));
10282 return NULL_TREE;
10284 case POINTER_PLUS_EXPR:
10285 /* 0 +p index -> (type)index */
10286 if (integer_zerop (arg0))
10287 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10289 /* PTR +p 0 -> PTR */
10290 if (integer_zerop (arg1))
10291 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10293 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10294 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10295 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10296 return fold_convert_loc (loc, type,
10297 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10298 fold_convert_loc (loc, sizetype,
10299 arg1),
10300 fold_convert_loc (loc, sizetype,
10301 arg0)));
10303 /* (PTR +p B) +p A -> PTR +p (B + A) */
10304 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10306 tree inner;
10307 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10308 tree arg00 = TREE_OPERAND (arg0, 0);
10309 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10310 arg01, fold_convert_loc (loc, sizetype, arg1));
10311 return fold_convert_loc (loc, type,
10312 fold_build_pointer_plus_loc (loc,
10313 arg00, inner));
10316 /* PTR_CST +p CST -> CST1 */
10317 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10318 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10319 fold_convert_loc (loc, type, arg1));
10321 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10322 of the array. Loop optimizer sometimes produce this type of
10323 expressions. */
10324 if (TREE_CODE (arg0) == ADDR_EXPR)
10326 tem = try_move_mult_to_index (loc, arg0,
10327 fold_convert_loc (loc,
10328 ssizetype, arg1));
10329 if (tem)
10330 return fold_convert_loc (loc, type, tem);
10333 return NULL_TREE;
10335 case PLUS_EXPR:
10336 /* A + (-B) -> A - B */
10337 if (TREE_CODE (arg1) == NEGATE_EXPR)
10338 return fold_build2_loc (loc, MINUS_EXPR, type,
10339 fold_convert_loc (loc, type, arg0),
10340 fold_convert_loc (loc, type,
10341 TREE_OPERAND (arg1, 0)));
10342 /* (-A) + B -> B - A */
10343 if (TREE_CODE (arg0) == NEGATE_EXPR
10344 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10345 return fold_build2_loc (loc, MINUS_EXPR, type,
10346 fold_convert_loc (loc, type, arg1),
10347 fold_convert_loc (loc, type,
10348 TREE_OPERAND (arg0, 0)));
10350 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10352 /* Convert ~A + 1 to -A. */
10353 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10354 && integer_onep (arg1))
10355 return fold_build1_loc (loc, NEGATE_EXPR, type,
10356 fold_convert_loc (loc, type,
10357 TREE_OPERAND (arg0, 0)));
10359 /* ~X + X is -1. */
10360 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10361 && !TYPE_OVERFLOW_TRAPS (type))
10363 tree tem = TREE_OPERAND (arg0, 0);
10365 STRIP_NOPS (tem);
10366 if (operand_equal_p (tem, arg1, 0))
10368 t1 = build_all_ones_cst (type);
10369 return omit_one_operand_loc (loc, type, t1, arg1);
10373 /* X + ~X is -1. */
10374 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10375 && !TYPE_OVERFLOW_TRAPS (type))
10377 tree tem = TREE_OPERAND (arg1, 0);
10379 STRIP_NOPS (tem);
10380 if (operand_equal_p (arg0, tem, 0))
10382 t1 = build_all_ones_cst (type);
10383 return omit_one_operand_loc (loc, type, t1, arg0);
10387 /* X + (X / CST) * -CST is X % CST. */
10388 if (TREE_CODE (arg1) == MULT_EXPR
10389 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10390 && operand_equal_p (arg0,
10391 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10393 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10394 tree cst1 = TREE_OPERAND (arg1, 1);
10395 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10396 cst1, cst0);
10397 if (sum && integer_zerop (sum))
10398 return fold_convert_loc (loc, type,
10399 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10400 TREE_TYPE (arg0), arg0,
10401 cst0));
10405 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10406 one. Make sure the type is not saturating and has the signedness of
10407 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10408 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10409 if ((TREE_CODE (arg0) == MULT_EXPR
10410 || TREE_CODE (arg1) == MULT_EXPR)
10411 && !TYPE_SATURATING (type)
10412 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10413 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10414 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10416 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10417 if (tem)
10418 return tem;
10421 if (! FLOAT_TYPE_P (type))
10423 if (integer_zerop (arg1))
10424 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10426 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10427 with a constant, and the two constants have no bits in common,
10428 we should treat this as a BIT_IOR_EXPR since this may produce more
10429 simplifications. */
10430 if (TREE_CODE (arg0) == BIT_AND_EXPR
10431 && TREE_CODE (arg1) == BIT_AND_EXPR
10432 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10433 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10434 && integer_zerop (const_binop (BIT_AND_EXPR,
10435 TREE_OPERAND (arg0, 1),
10436 TREE_OPERAND (arg1, 1))))
10438 code = BIT_IOR_EXPR;
10439 goto bit_ior;
10442 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10443 (plus (plus (mult) (mult)) (foo)) so that we can
10444 take advantage of the factoring cases below. */
10445 if (TYPE_OVERFLOW_WRAPS (type)
10446 && (((TREE_CODE (arg0) == PLUS_EXPR
10447 || TREE_CODE (arg0) == MINUS_EXPR)
10448 && TREE_CODE (arg1) == MULT_EXPR)
10449 || ((TREE_CODE (arg1) == PLUS_EXPR
10450 || TREE_CODE (arg1) == MINUS_EXPR)
10451 && TREE_CODE (arg0) == MULT_EXPR)))
10453 tree parg0, parg1, parg, marg;
10454 enum tree_code pcode;
10456 if (TREE_CODE (arg1) == MULT_EXPR)
10457 parg = arg0, marg = arg1;
10458 else
10459 parg = arg1, marg = arg0;
10460 pcode = TREE_CODE (parg);
10461 parg0 = TREE_OPERAND (parg, 0);
10462 parg1 = TREE_OPERAND (parg, 1);
10463 STRIP_NOPS (parg0);
10464 STRIP_NOPS (parg1);
10466 if (TREE_CODE (parg0) == MULT_EXPR
10467 && TREE_CODE (parg1) != MULT_EXPR)
10468 return fold_build2_loc (loc, pcode, type,
10469 fold_build2_loc (loc, PLUS_EXPR, type,
10470 fold_convert_loc (loc, type,
10471 parg0),
10472 fold_convert_loc (loc, type,
10473 marg)),
10474 fold_convert_loc (loc, type, parg1));
10475 if (TREE_CODE (parg0) != MULT_EXPR
10476 && TREE_CODE (parg1) == MULT_EXPR)
10477 return
10478 fold_build2_loc (loc, PLUS_EXPR, type,
10479 fold_convert_loc (loc, type, parg0),
10480 fold_build2_loc (loc, pcode, type,
10481 fold_convert_loc (loc, type, marg),
10482 fold_convert_loc (loc, type,
10483 parg1)));
10486 else
10488 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10489 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10490 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10492 /* Likewise if the operands are reversed. */
10493 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10496 /* Convert X + -C into X - C. */
10497 if (TREE_CODE (arg1) == REAL_CST
10498 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10500 tem = fold_negate_const (arg1, type);
10501 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10502 return fold_build2_loc (loc, MINUS_EXPR, type,
10503 fold_convert_loc (loc, type, arg0),
10504 fold_convert_loc (loc, type, tem));
10507 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10508 to __complex__ ( x, y ). This is not the same for SNaNs or
10509 if signed zeros are involved. */
10510 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10511 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10512 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10514 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10515 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10516 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10517 bool arg0rz = false, arg0iz = false;
10518 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10519 || (arg0i && (arg0iz = real_zerop (arg0i))))
10521 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10522 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10523 if (arg0rz && arg1i && real_zerop (arg1i))
10525 tree rp = arg1r ? arg1r
10526 : build1 (REALPART_EXPR, rtype, arg1);
10527 tree ip = arg0i ? arg0i
10528 : build1 (IMAGPART_EXPR, rtype, arg0);
10529 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10531 else if (arg0iz && arg1r && real_zerop (arg1r))
10533 tree rp = arg0r ? arg0r
10534 : build1 (REALPART_EXPR, rtype, arg0);
10535 tree ip = arg1i ? arg1i
10536 : build1 (IMAGPART_EXPR, rtype, arg1);
10537 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10542 if (flag_unsafe_math_optimizations
10543 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10544 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10545 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10546 return tem;
10548 /* Convert x+x into x*2.0. */
10549 if (operand_equal_p (arg0, arg1, 0)
10550 && SCALAR_FLOAT_TYPE_P (type))
10551 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10552 build_real (type, dconst2));
10554 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10555 We associate floats only if the user has specified
10556 -fassociative-math. */
10557 if (flag_associative_math
10558 && TREE_CODE (arg1) == PLUS_EXPR
10559 && TREE_CODE (arg0) != MULT_EXPR)
10561 tree tree10 = TREE_OPERAND (arg1, 0);
10562 tree tree11 = TREE_OPERAND (arg1, 1);
10563 if (TREE_CODE (tree11) == MULT_EXPR
10564 && TREE_CODE (tree10) == MULT_EXPR)
10566 tree tree0;
10567 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10568 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10571 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10572 We associate floats only if the user has specified
10573 -fassociative-math. */
10574 if (flag_associative_math
10575 && TREE_CODE (arg0) == PLUS_EXPR
10576 && TREE_CODE (arg1) != MULT_EXPR)
10578 tree tree00 = TREE_OPERAND (arg0, 0);
10579 tree tree01 = TREE_OPERAND (arg0, 1);
10580 if (TREE_CODE (tree01) == MULT_EXPR
10581 && TREE_CODE (tree00) == MULT_EXPR)
10583 tree tree0;
10584 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10585 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10590 bit_rotate:
10591 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10592 is a rotate of A by C1 bits. */
10593 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10594 is a rotate of A by B bits. */
10596 enum tree_code code0, code1;
10597 tree rtype;
10598 code0 = TREE_CODE (arg0);
10599 code1 = TREE_CODE (arg1);
10600 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10601 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10602 && operand_equal_p (TREE_OPERAND (arg0, 0),
10603 TREE_OPERAND (arg1, 0), 0)
10604 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10605 TYPE_UNSIGNED (rtype))
10606 /* Only create rotates in complete modes. Other cases are not
10607 expanded properly. */
10608 && (element_precision (rtype)
10609 == element_precision (TYPE_MODE (rtype))))
10611 tree tree01, tree11;
10612 enum tree_code code01, code11;
10614 tree01 = TREE_OPERAND (arg0, 1);
10615 tree11 = TREE_OPERAND (arg1, 1);
10616 STRIP_NOPS (tree01);
10617 STRIP_NOPS (tree11);
10618 code01 = TREE_CODE (tree01);
10619 code11 = TREE_CODE (tree11);
10620 if (code01 == INTEGER_CST
10621 && code11 == INTEGER_CST
10622 && TREE_INT_CST_HIGH (tree01) == 0
10623 && TREE_INT_CST_HIGH (tree11) == 0
10624 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10625 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10627 tem = build2_loc (loc, LROTATE_EXPR,
10628 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10629 TREE_OPERAND (arg0, 0),
10630 code0 == LSHIFT_EXPR ? tree01 : tree11);
10631 return fold_convert_loc (loc, type, tem);
10633 else if (code11 == MINUS_EXPR)
10635 tree tree110, tree111;
10636 tree110 = TREE_OPERAND (tree11, 0);
10637 tree111 = TREE_OPERAND (tree11, 1);
10638 STRIP_NOPS (tree110);
10639 STRIP_NOPS (tree111);
10640 if (TREE_CODE (tree110) == INTEGER_CST
10641 && 0 == compare_tree_int (tree110,
10642 element_precision
10643 (TREE_TYPE (TREE_OPERAND
10644 (arg0, 0))))
10645 && operand_equal_p (tree01, tree111, 0))
10646 return
10647 fold_convert_loc (loc, type,
10648 build2 ((code0 == LSHIFT_EXPR
10649 ? LROTATE_EXPR
10650 : RROTATE_EXPR),
10651 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10652 TREE_OPERAND (arg0, 0), tree01));
10654 else if (code01 == MINUS_EXPR)
10656 tree tree010, tree011;
10657 tree010 = TREE_OPERAND (tree01, 0);
10658 tree011 = TREE_OPERAND (tree01, 1);
10659 STRIP_NOPS (tree010);
10660 STRIP_NOPS (tree011);
10661 if (TREE_CODE (tree010) == INTEGER_CST
10662 && 0 == compare_tree_int (tree010,
10663 element_precision
10664 (TREE_TYPE (TREE_OPERAND
10665 (arg0, 0))))
10666 && operand_equal_p (tree11, tree011, 0))
10667 return fold_convert_loc
10668 (loc, type,
10669 build2 ((code0 != LSHIFT_EXPR
10670 ? LROTATE_EXPR
10671 : RROTATE_EXPR),
10672 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10673 TREE_OPERAND (arg0, 0), tree11));
10678 associate:
10679 /* In most languages, can't associate operations on floats through
10680 parentheses. Rather than remember where the parentheses were, we
10681 don't associate floats at all, unless the user has specified
10682 -fassociative-math.
10683 And, we need to make sure type is not saturating. */
10685 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10686 && !TYPE_SATURATING (type))
10688 tree var0, con0, lit0, minus_lit0;
10689 tree var1, con1, lit1, minus_lit1;
10690 tree atype = type;
10691 bool ok = true;
10693 /* Split both trees into variables, constants, and literals. Then
10694 associate each group together, the constants with literals,
10695 then the result with variables. This increases the chances of
10696 literals being recombined later and of generating relocatable
10697 expressions for the sum of a constant and literal. */
10698 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10699 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10700 code == MINUS_EXPR);
10702 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10703 if (code == MINUS_EXPR)
10704 code = PLUS_EXPR;
10706 /* With undefined overflow prefer doing association in a type
10707 which wraps on overflow, if that is one of the operand types. */
10708 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10709 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10711 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10712 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10713 atype = TREE_TYPE (arg0);
10714 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10715 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10716 atype = TREE_TYPE (arg1);
10717 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10720 /* With undefined overflow we can only associate constants with one
10721 variable, and constants whose association doesn't overflow. */
10722 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10723 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10725 if (var0 && var1)
10727 tree tmp0 = var0;
10728 tree tmp1 = var1;
10730 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10731 tmp0 = TREE_OPERAND (tmp0, 0);
10732 if (CONVERT_EXPR_P (tmp0)
10733 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10734 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10735 <= TYPE_PRECISION (atype)))
10736 tmp0 = TREE_OPERAND (tmp0, 0);
10737 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10738 tmp1 = TREE_OPERAND (tmp1, 0);
10739 if (CONVERT_EXPR_P (tmp1)
10740 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10741 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10742 <= TYPE_PRECISION (atype)))
10743 tmp1 = TREE_OPERAND (tmp1, 0);
10744 /* The only case we can still associate with two variables
10745 is if they are the same, modulo negation and bit-pattern
10746 preserving conversions. */
10747 if (!operand_equal_p (tmp0, tmp1, 0))
10748 ok = false;
10752 /* Only do something if we found more than two objects. Otherwise,
10753 nothing has changed and we risk infinite recursion. */
10754 if (ok
10755 && (2 < ((var0 != 0) + (var1 != 0)
10756 + (con0 != 0) + (con1 != 0)
10757 + (lit0 != 0) + (lit1 != 0)
10758 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10760 bool any_overflows = false;
10761 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10762 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10763 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10764 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10765 var0 = associate_trees (loc, var0, var1, code, atype);
10766 con0 = associate_trees (loc, con0, con1, code, atype);
10767 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10768 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10769 code, atype);
10771 /* Preserve the MINUS_EXPR if the negative part of the literal is
10772 greater than the positive part. Otherwise, the multiplicative
10773 folding code (i.e extract_muldiv) may be fooled in case
10774 unsigned constants are subtracted, like in the following
10775 example: ((X*2 + 4) - 8U)/2. */
10776 if (minus_lit0 && lit0)
10778 if (TREE_CODE (lit0) == INTEGER_CST
10779 && TREE_CODE (minus_lit0) == INTEGER_CST
10780 && tree_int_cst_lt (lit0, minus_lit0))
10782 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10783 MINUS_EXPR, atype);
10784 lit0 = 0;
10786 else
10788 lit0 = associate_trees (loc, lit0, minus_lit0,
10789 MINUS_EXPR, atype);
10790 minus_lit0 = 0;
10794 /* Don't introduce overflows through reassociation. */
10795 if (!any_overflows
10796 && ((lit0 && TREE_OVERFLOW (lit0))
10797 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10798 return NULL_TREE;
10800 if (minus_lit0)
10802 if (con0 == 0)
10803 return
10804 fold_convert_loc (loc, type,
10805 associate_trees (loc, var0, minus_lit0,
10806 MINUS_EXPR, atype));
10807 else
10809 con0 = associate_trees (loc, con0, minus_lit0,
10810 MINUS_EXPR, atype);
10811 return
10812 fold_convert_loc (loc, type,
10813 associate_trees (loc, var0, con0,
10814 PLUS_EXPR, atype));
10818 con0 = associate_trees (loc, con0, lit0, code, atype);
10819 return
10820 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10821 code, atype));
10825 return NULL_TREE;
10827 case MINUS_EXPR:
10828 /* Pointer simplifications for subtraction, simple reassociations. */
10829 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10831 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10832 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10833 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10835 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10836 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10837 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10838 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10839 return fold_build2_loc (loc, PLUS_EXPR, type,
10840 fold_build2_loc (loc, MINUS_EXPR, type,
10841 arg00, arg10),
10842 fold_build2_loc (loc, MINUS_EXPR, type,
10843 arg01, arg11));
10845 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10846 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10848 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10849 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10850 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10851 fold_convert_loc (loc, type, arg1));
10852 if (tmp)
10853 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10856 /* A - (-B) -> A + B */
10857 if (TREE_CODE (arg1) == NEGATE_EXPR)
10858 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10859 fold_convert_loc (loc, type,
10860 TREE_OPERAND (arg1, 0)));
10861 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10862 if (TREE_CODE (arg0) == NEGATE_EXPR
10863 && negate_expr_p (arg1)
10864 && reorder_operands_p (arg0, arg1))
10865 return fold_build2_loc (loc, MINUS_EXPR, type,
10866 fold_convert_loc (loc, type,
10867 negate_expr (arg1)),
10868 fold_convert_loc (loc, type,
10869 TREE_OPERAND (arg0, 0)));
10870 /* Convert -A - 1 to ~A. */
10871 if (TREE_CODE (type) != COMPLEX_TYPE
10872 && TREE_CODE (arg0) == NEGATE_EXPR
10873 && integer_onep (arg1)
10874 && !TYPE_OVERFLOW_TRAPS (type))
10875 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10876 fold_convert_loc (loc, type,
10877 TREE_OPERAND (arg0, 0)));
10879 /* Convert -1 - A to ~A. */
10880 if (TREE_CODE (type) != COMPLEX_TYPE
10881 && integer_all_onesp (arg0))
10882 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10885 /* X - (X / Y) * Y is X % Y. */
10886 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10887 && TREE_CODE (arg1) == MULT_EXPR
10888 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10889 && operand_equal_p (arg0,
10890 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10891 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10892 TREE_OPERAND (arg1, 1), 0))
10893 return
10894 fold_convert_loc (loc, type,
10895 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10896 arg0, TREE_OPERAND (arg1, 1)));
10898 if (! FLOAT_TYPE_P (type))
10900 if (integer_zerop (arg0))
10901 return negate_expr (fold_convert_loc (loc, type, arg1));
10902 if (integer_zerop (arg1))
10903 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10905 /* Fold A - (A & B) into ~B & A. */
10906 if (!TREE_SIDE_EFFECTS (arg0)
10907 && TREE_CODE (arg1) == BIT_AND_EXPR)
10909 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10911 tree arg10 = fold_convert_loc (loc, type,
10912 TREE_OPERAND (arg1, 0));
10913 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10914 fold_build1_loc (loc, BIT_NOT_EXPR,
10915 type, arg10),
10916 fold_convert_loc (loc, type, arg0));
10918 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10920 tree arg11 = fold_convert_loc (loc,
10921 type, TREE_OPERAND (arg1, 1));
10922 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10923 fold_build1_loc (loc, BIT_NOT_EXPR,
10924 type, arg11),
10925 fold_convert_loc (loc, type, arg0));
10929 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10930 any power of 2 minus 1. */
10931 if (TREE_CODE (arg0) == BIT_AND_EXPR
10932 && TREE_CODE (arg1) == BIT_AND_EXPR
10933 && operand_equal_p (TREE_OPERAND (arg0, 0),
10934 TREE_OPERAND (arg1, 0), 0))
10936 tree mask0 = TREE_OPERAND (arg0, 1);
10937 tree mask1 = TREE_OPERAND (arg1, 1);
10938 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10940 if (operand_equal_p (tem, mask1, 0))
10942 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10943 TREE_OPERAND (arg0, 0), mask1);
10944 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10949 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10950 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10951 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10953 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10954 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10955 (-ARG1 + ARG0) reduces to -ARG1. */
10956 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10957 return negate_expr (fold_convert_loc (loc, type, arg1));
10959 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10960 __complex__ ( x, -y ). This is not the same for SNaNs or if
10961 signed zeros are involved. */
10962 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10963 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10964 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10966 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10967 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10968 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10969 bool arg0rz = false, arg0iz = false;
10970 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10971 || (arg0i && (arg0iz = real_zerop (arg0i))))
10973 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10974 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10975 if (arg0rz && arg1i && real_zerop (arg1i))
10977 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10978 arg1r ? arg1r
10979 : build1 (REALPART_EXPR, rtype, arg1));
10980 tree ip = arg0i ? arg0i
10981 : build1 (IMAGPART_EXPR, rtype, arg0);
10982 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10984 else if (arg0iz && arg1r && real_zerop (arg1r))
10986 tree rp = arg0r ? arg0r
10987 : build1 (REALPART_EXPR, rtype, arg0);
10988 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10989 arg1i ? arg1i
10990 : build1 (IMAGPART_EXPR, rtype, arg1));
10991 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10996 /* Fold &x - &x. This can happen from &x.foo - &x.
10997 This is unsafe for certain floats even in non-IEEE formats.
10998 In IEEE, it is unsafe because it does wrong for NaNs.
10999 Also note that operand_equal_p is always false if an operand
11000 is volatile. */
11002 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11003 && operand_equal_p (arg0, arg1, 0))
11004 return build_zero_cst (type);
11006 /* A - B -> A + (-B) if B is easily negatable. */
11007 if (negate_expr_p (arg1)
11008 && ((FLOAT_TYPE_P (type)
11009 /* Avoid this transformation if B is a positive REAL_CST. */
11010 && (TREE_CODE (arg1) != REAL_CST
11011 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11012 || INTEGRAL_TYPE_P (type)))
11013 return fold_build2_loc (loc, PLUS_EXPR, type,
11014 fold_convert_loc (loc, type, arg0),
11015 fold_convert_loc (loc, type,
11016 negate_expr (arg1)));
11018 /* Try folding difference of addresses. */
11020 HOST_WIDE_INT diff;
11022 if ((TREE_CODE (arg0) == ADDR_EXPR
11023 || TREE_CODE (arg1) == ADDR_EXPR)
11024 && ptr_difference_const (arg0, arg1, &diff))
11025 return build_int_cst_type (type, diff);
11028 /* Fold &a[i] - &a[j] to i-j. */
11029 if (TREE_CODE (arg0) == ADDR_EXPR
11030 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11031 && TREE_CODE (arg1) == ADDR_EXPR
11032 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11034 tree tem = fold_addr_of_array_ref_difference (loc, type,
11035 TREE_OPERAND (arg0, 0),
11036 TREE_OPERAND (arg1, 0));
11037 if (tem)
11038 return tem;
11041 if (FLOAT_TYPE_P (type)
11042 && flag_unsafe_math_optimizations
11043 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11044 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11045 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11046 return tem;
11048 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11049 one. Make sure the type is not saturating and has the signedness of
11050 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11051 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11052 if ((TREE_CODE (arg0) == MULT_EXPR
11053 || TREE_CODE (arg1) == MULT_EXPR)
11054 && !TYPE_SATURATING (type)
11055 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11056 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11057 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11059 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11060 if (tem)
11061 return tem;
11064 goto associate;
11066 case MULT_EXPR:
11067 /* (-A) * (-B) -> A * B */
11068 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11069 return fold_build2_loc (loc, MULT_EXPR, type,
11070 fold_convert_loc (loc, type,
11071 TREE_OPERAND (arg0, 0)),
11072 fold_convert_loc (loc, type,
11073 negate_expr (arg1)));
11074 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11075 return fold_build2_loc (loc, MULT_EXPR, type,
11076 fold_convert_loc (loc, type,
11077 negate_expr (arg0)),
11078 fold_convert_loc (loc, type,
11079 TREE_OPERAND (arg1, 0)));
11081 if (! FLOAT_TYPE_P (type))
11083 if (integer_zerop (arg1))
11084 return omit_one_operand_loc (loc, type, arg1, arg0);
11085 if (integer_onep (arg1))
11086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11087 /* Transform x * -1 into -x. Make sure to do the negation
11088 on the original operand with conversions not stripped
11089 because we can only strip non-sign-changing conversions. */
11090 if (integer_minus_onep (arg1))
11091 return fold_convert_loc (loc, type, negate_expr (op0));
11092 /* Transform x * -C into -x * C if x is easily negatable. */
11093 if (TREE_CODE (arg1) == INTEGER_CST
11094 && tree_int_cst_sgn (arg1) == -1
11095 && negate_expr_p (arg0)
11096 && (tem = negate_expr (arg1)) != arg1
11097 && !TREE_OVERFLOW (tem))
11098 return fold_build2_loc (loc, MULT_EXPR, type,
11099 fold_convert_loc (loc, type,
11100 negate_expr (arg0)),
11101 tem);
11103 /* (a * (1 << b)) is (a << b) */
11104 if (TREE_CODE (arg1) == LSHIFT_EXPR
11105 && integer_onep (TREE_OPERAND (arg1, 0)))
11106 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11107 TREE_OPERAND (arg1, 1));
11108 if (TREE_CODE (arg0) == LSHIFT_EXPR
11109 && integer_onep (TREE_OPERAND (arg0, 0)))
11110 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11111 TREE_OPERAND (arg0, 1));
11113 /* (A + A) * C -> A * 2 * C */
11114 if (TREE_CODE (arg0) == PLUS_EXPR
11115 && TREE_CODE (arg1) == INTEGER_CST
11116 && operand_equal_p (TREE_OPERAND (arg0, 0),
11117 TREE_OPERAND (arg0, 1), 0))
11118 return fold_build2_loc (loc, MULT_EXPR, type,
11119 omit_one_operand_loc (loc, type,
11120 TREE_OPERAND (arg0, 0),
11121 TREE_OPERAND (arg0, 1)),
11122 fold_build2_loc (loc, MULT_EXPR, type,
11123 build_int_cst (type, 2) , arg1));
11125 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11126 sign-changing only. */
11127 if (TREE_CODE (arg1) == INTEGER_CST
11128 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11129 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11130 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11132 strict_overflow_p = false;
11133 if (TREE_CODE (arg1) == INTEGER_CST
11134 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11135 &strict_overflow_p)))
11137 if (strict_overflow_p)
11138 fold_overflow_warning (("assuming signed overflow does not "
11139 "occur when simplifying "
11140 "multiplication"),
11141 WARN_STRICT_OVERFLOW_MISC);
11142 return fold_convert_loc (loc, type, tem);
11145 /* Optimize z * conj(z) for integer complex numbers. */
11146 if (TREE_CODE (arg0) == CONJ_EXPR
11147 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11148 return fold_mult_zconjz (loc, type, arg1);
11149 if (TREE_CODE (arg1) == CONJ_EXPR
11150 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11151 return fold_mult_zconjz (loc, type, arg0);
11153 else
11155 /* Maybe fold x * 0 to 0. The expressions aren't the same
11156 when x is NaN, since x * 0 is also NaN. Nor are they the
11157 same in modes with signed zeros, since multiplying a
11158 negative value by 0 gives -0, not +0. */
11159 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11160 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11161 && real_zerop (arg1))
11162 return omit_one_operand_loc (loc, type, arg1, arg0);
11163 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11164 Likewise for complex arithmetic with signed zeros. */
11165 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11166 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11167 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11168 && real_onep (arg1))
11169 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11171 /* Transform x * -1.0 into -x. */
11172 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11173 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11174 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11175 && real_minus_onep (arg1))
11176 return fold_convert_loc (loc, type, negate_expr (arg0));
11178 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11179 the result for floating point types due to rounding so it is applied
11180 only if -fassociative-math was specify. */
11181 if (flag_associative_math
11182 && TREE_CODE (arg0) == RDIV_EXPR
11183 && TREE_CODE (arg1) == REAL_CST
11184 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11186 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11187 arg1);
11188 if (tem)
11189 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11190 TREE_OPERAND (arg0, 1));
11193 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11194 if (operand_equal_p (arg0, arg1, 0))
11196 tree tem = fold_strip_sign_ops (arg0);
11197 if (tem != NULL_TREE)
11199 tem = fold_convert_loc (loc, type, tem);
11200 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11204 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11205 This is not the same for NaNs or if signed zeros are
11206 involved. */
11207 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11208 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11209 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11210 && TREE_CODE (arg1) == COMPLEX_CST
11211 && real_zerop (TREE_REALPART (arg1)))
11213 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11214 if (real_onep (TREE_IMAGPART (arg1)))
11215 return
11216 fold_build2_loc (loc, COMPLEX_EXPR, type,
11217 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11218 rtype, arg0)),
11219 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11220 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11221 return
11222 fold_build2_loc (loc, COMPLEX_EXPR, type,
11223 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11224 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11225 rtype, arg0)));
11228 /* Optimize z * conj(z) for floating point complex numbers.
11229 Guarded by flag_unsafe_math_optimizations as non-finite
11230 imaginary components don't produce scalar results. */
11231 if (flag_unsafe_math_optimizations
11232 && TREE_CODE (arg0) == CONJ_EXPR
11233 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11234 return fold_mult_zconjz (loc, type, arg1);
11235 if (flag_unsafe_math_optimizations
11236 && TREE_CODE (arg1) == CONJ_EXPR
11237 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11238 return fold_mult_zconjz (loc, type, arg0);
11240 if (flag_unsafe_math_optimizations)
11242 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11243 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11245 /* Optimizations of root(...)*root(...). */
11246 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11248 tree rootfn, arg;
11249 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11250 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11252 /* Optimize sqrt(x)*sqrt(x) as x. */
11253 if (BUILTIN_SQRT_P (fcode0)
11254 && operand_equal_p (arg00, arg10, 0)
11255 && ! HONOR_SNANS (TYPE_MODE (type)))
11256 return arg00;
11258 /* Optimize root(x)*root(y) as root(x*y). */
11259 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11260 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11261 return build_call_expr_loc (loc, rootfn, 1, arg);
11264 /* Optimize expN(x)*expN(y) as expN(x+y). */
11265 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11267 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11268 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11269 CALL_EXPR_ARG (arg0, 0),
11270 CALL_EXPR_ARG (arg1, 0));
11271 return build_call_expr_loc (loc, expfn, 1, arg);
11274 /* Optimizations of pow(...)*pow(...). */
11275 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11276 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11277 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11279 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11280 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11281 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11282 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11284 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11285 if (operand_equal_p (arg01, arg11, 0))
11287 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11288 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11289 arg00, arg10);
11290 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11293 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11294 if (operand_equal_p (arg00, arg10, 0))
11296 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11297 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11298 arg01, arg11);
11299 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11303 /* Optimize tan(x)*cos(x) as sin(x). */
11304 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11305 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11306 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11307 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11308 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11309 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11310 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11311 CALL_EXPR_ARG (arg1, 0), 0))
11313 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11315 if (sinfn != NULL_TREE)
11316 return build_call_expr_loc (loc, sinfn, 1,
11317 CALL_EXPR_ARG (arg0, 0));
11320 /* Optimize x*pow(x,c) as pow(x,c+1). */
11321 if (fcode1 == BUILT_IN_POW
11322 || fcode1 == BUILT_IN_POWF
11323 || fcode1 == BUILT_IN_POWL)
11325 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11326 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11327 if (TREE_CODE (arg11) == REAL_CST
11328 && !TREE_OVERFLOW (arg11)
11329 && operand_equal_p (arg0, arg10, 0))
11331 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11332 REAL_VALUE_TYPE c;
11333 tree arg;
11335 c = TREE_REAL_CST (arg11);
11336 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11337 arg = build_real (type, c);
11338 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11342 /* Optimize pow(x,c)*x as pow(x,c+1). */
11343 if (fcode0 == BUILT_IN_POW
11344 || fcode0 == BUILT_IN_POWF
11345 || fcode0 == BUILT_IN_POWL)
11347 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11348 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11349 if (TREE_CODE (arg01) == REAL_CST
11350 && !TREE_OVERFLOW (arg01)
11351 && operand_equal_p (arg1, arg00, 0))
11353 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11354 REAL_VALUE_TYPE c;
11355 tree arg;
11357 c = TREE_REAL_CST (arg01);
11358 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11359 arg = build_real (type, c);
11360 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11364 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11365 if (!in_gimple_form
11366 && optimize
11367 && operand_equal_p (arg0, arg1, 0))
11369 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11371 if (powfn)
11373 tree arg = build_real (type, dconst2);
11374 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11379 goto associate;
11381 case BIT_IOR_EXPR:
11382 bit_ior:
11383 if (integer_all_onesp (arg1))
11384 return omit_one_operand_loc (loc, type, arg1, arg0);
11385 if (integer_zerop (arg1))
11386 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11387 if (operand_equal_p (arg0, arg1, 0))
11388 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11390 /* ~X | X is -1. */
11391 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11392 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11394 t1 = build_zero_cst (type);
11395 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11396 return omit_one_operand_loc (loc, type, t1, arg1);
11399 /* X | ~X is -1. */
11400 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11403 t1 = build_zero_cst (type);
11404 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11405 return omit_one_operand_loc (loc, type, t1, arg0);
11408 /* Canonicalize (X & C1) | C2. */
11409 if (TREE_CODE (arg0) == BIT_AND_EXPR
11410 && TREE_CODE (arg1) == INTEGER_CST
11411 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11413 double_int c1, c2, c3, msk;
11414 int width = TYPE_PRECISION (type), w;
11415 bool try_simplify = true;
11417 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11418 c2 = tree_to_double_int (arg1);
11420 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11421 if ((c1 & c2) == c1)
11422 return omit_one_operand_loc (loc, type, arg1,
11423 TREE_OPERAND (arg0, 0));
11425 msk = double_int::mask (width);
11427 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11428 if (msk.and_not (c1 | c2).is_zero ())
11429 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11430 TREE_OPERAND (arg0, 0), arg1);
11432 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11433 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11434 mode which allows further optimizations. */
11435 c1 &= msk;
11436 c2 &= msk;
11437 c3 = c1.and_not (c2);
11438 for (w = BITS_PER_UNIT;
11439 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11440 w <<= 1)
11442 unsigned HOST_WIDE_INT mask
11443 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11444 if (((c1.low | c2.low) & mask) == mask
11445 && (c1.low & ~mask) == 0 && c1.high == 0)
11447 c3 = double_int::from_uhwi (mask);
11448 break;
11452 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11453 with that optimization from the BIT_AND_EXPR optimizations.
11454 This could end up in an infinite recursion. */
11455 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11456 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11457 == INTEGER_CST)
11459 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11460 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11462 try_simplify = (masked != c1);
11465 if (try_simplify && c3 != c1)
11466 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11467 fold_build2_loc (loc, BIT_AND_EXPR, type,
11468 TREE_OPERAND (arg0, 0),
11469 double_int_to_tree (type,
11470 c3)),
11471 arg1);
11474 /* (X & Y) | Y is (X, Y). */
11475 if (TREE_CODE (arg0) == BIT_AND_EXPR
11476 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11477 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11478 /* (X & Y) | X is (Y, X). */
11479 if (TREE_CODE (arg0) == BIT_AND_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11481 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11482 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11483 /* X | (X & Y) is (Y, X). */
11484 if (TREE_CODE (arg1) == BIT_AND_EXPR
11485 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11486 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11487 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11488 /* X | (Y & X) is (Y, X). */
11489 if (TREE_CODE (arg1) == BIT_AND_EXPR
11490 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11491 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11492 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11494 /* (X & ~Y) | (~X & Y) is X ^ Y */
11495 if (TREE_CODE (arg0) == BIT_AND_EXPR
11496 && TREE_CODE (arg1) == BIT_AND_EXPR)
11498 tree a0, a1, l0, l1, n0, n1;
11500 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11501 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11503 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11504 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11506 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11507 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11509 if ((operand_equal_p (n0, a0, 0)
11510 && operand_equal_p (n1, a1, 0))
11511 || (operand_equal_p (n0, a1, 0)
11512 && operand_equal_p (n1, a0, 0)))
11513 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11516 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11517 if (t1 != NULL_TREE)
11518 return t1;
11520 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11522 This results in more efficient code for machines without a NAND
11523 instruction. Combine will canonicalize to the first form
11524 which will allow use of NAND instructions provided by the
11525 backend if they exist. */
11526 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11527 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11529 return
11530 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11531 build2 (BIT_AND_EXPR, type,
11532 fold_convert_loc (loc, type,
11533 TREE_OPERAND (arg0, 0)),
11534 fold_convert_loc (loc, type,
11535 TREE_OPERAND (arg1, 0))));
11538 /* See if this can be simplified into a rotate first. If that
11539 is unsuccessful continue in the association code. */
11540 goto bit_rotate;
11542 case BIT_XOR_EXPR:
11543 if (integer_zerop (arg1))
11544 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11545 if (integer_all_onesp (arg1))
11546 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11547 if (operand_equal_p (arg0, arg1, 0))
11548 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11550 /* ~X ^ X is -1. */
11551 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11552 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11554 t1 = build_zero_cst (type);
11555 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11556 return omit_one_operand_loc (loc, type, t1, arg1);
11559 /* X ^ ~X is -1. */
11560 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11561 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11563 t1 = build_zero_cst (type);
11564 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11565 return omit_one_operand_loc (loc, type, t1, arg0);
11568 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11569 with a constant, and the two constants have no bits in common,
11570 we should treat this as a BIT_IOR_EXPR since this may produce more
11571 simplifications. */
11572 if (TREE_CODE (arg0) == BIT_AND_EXPR
11573 && TREE_CODE (arg1) == BIT_AND_EXPR
11574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11575 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11576 && integer_zerop (const_binop (BIT_AND_EXPR,
11577 TREE_OPERAND (arg0, 1),
11578 TREE_OPERAND (arg1, 1))))
11580 code = BIT_IOR_EXPR;
11581 goto bit_ior;
11584 /* (X | Y) ^ X -> Y & ~ X*/
11585 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11586 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11588 tree t2 = TREE_OPERAND (arg0, 1);
11589 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11590 arg1);
11591 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11592 fold_convert_loc (loc, type, t2),
11593 fold_convert_loc (loc, type, t1));
11594 return t1;
11597 /* (Y | X) ^ X -> Y & ~ X*/
11598 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11599 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11601 tree t2 = TREE_OPERAND (arg0, 0);
11602 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11603 arg1);
11604 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11605 fold_convert_loc (loc, type, t2),
11606 fold_convert_loc (loc, type, t1));
11607 return t1;
11610 /* X ^ (X | Y) -> Y & ~ X*/
11611 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11612 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11614 tree t2 = TREE_OPERAND (arg1, 1);
11615 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11616 arg0);
11617 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11618 fold_convert_loc (loc, type, t2),
11619 fold_convert_loc (loc, type, t1));
11620 return t1;
11623 /* X ^ (Y | X) -> Y & ~ X*/
11624 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11625 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11627 tree t2 = TREE_OPERAND (arg1, 0);
11628 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11629 arg0);
11630 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11631 fold_convert_loc (loc, type, t2),
11632 fold_convert_loc (loc, type, t1));
11633 return t1;
11636 /* Convert ~X ^ ~Y to X ^ Y. */
11637 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11638 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11639 return fold_build2_loc (loc, code, type,
11640 fold_convert_loc (loc, type,
11641 TREE_OPERAND (arg0, 0)),
11642 fold_convert_loc (loc, type,
11643 TREE_OPERAND (arg1, 0)));
11645 /* Convert ~X ^ C to X ^ ~C. */
11646 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11647 && TREE_CODE (arg1) == INTEGER_CST)
11648 return fold_build2_loc (loc, code, type,
11649 fold_convert_loc (loc, type,
11650 TREE_OPERAND (arg0, 0)),
11651 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11653 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11654 if (TREE_CODE (arg0) == BIT_AND_EXPR
11655 && integer_onep (TREE_OPERAND (arg0, 1))
11656 && integer_onep (arg1))
11657 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11658 build_zero_cst (TREE_TYPE (arg0)));
11660 /* Fold (X & Y) ^ Y as ~X & Y. */
11661 if (TREE_CODE (arg0) == BIT_AND_EXPR
11662 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11664 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11665 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11666 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11667 fold_convert_loc (loc, type, arg1));
11669 /* Fold (X & Y) ^ X as ~Y & X. */
11670 if (TREE_CODE (arg0) == BIT_AND_EXPR
11671 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11672 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11674 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11675 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11676 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11677 fold_convert_loc (loc, type, arg1));
11679 /* Fold X ^ (X & Y) as X & ~Y. */
11680 if (TREE_CODE (arg1) == BIT_AND_EXPR
11681 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11683 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11684 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11685 fold_convert_loc (loc, type, arg0),
11686 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11688 /* Fold X ^ (Y & X) as ~Y & X. */
11689 if (TREE_CODE (arg1) == BIT_AND_EXPR
11690 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11691 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11693 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11694 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11695 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11696 fold_convert_loc (loc, type, arg0));
11699 /* See if this can be simplified into a rotate first. If that
11700 is unsuccessful continue in the association code. */
11701 goto bit_rotate;
11703 case BIT_AND_EXPR:
11704 if (integer_all_onesp (arg1))
11705 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11706 if (integer_zerop (arg1))
11707 return omit_one_operand_loc (loc, type, arg1, arg0);
11708 if (operand_equal_p (arg0, arg1, 0))
11709 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11711 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11712 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11713 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11714 || (TREE_CODE (arg0) == EQ_EXPR
11715 && integer_zerop (TREE_OPERAND (arg0, 1))))
11716 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11717 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11719 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11720 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11721 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11722 || (TREE_CODE (arg1) == EQ_EXPR
11723 && integer_zerop (TREE_OPERAND (arg1, 1))))
11724 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11725 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11727 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11728 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11729 && TREE_CODE (arg1) == INTEGER_CST
11730 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11732 tree tmp1 = fold_convert_loc (loc, type, arg1);
11733 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11734 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11735 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11736 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11737 return
11738 fold_convert_loc (loc, type,
11739 fold_build2_loc (loc, BIT_IOR_EXPR,
11740 type, tmp2, tmp3));
11743 /* (X | Y) & Y is (X, Y). */
11744 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11745 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11746 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11747 /* (X | Y) & X is (Y, X). */
11748 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11749 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11750 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11751 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11752 /* X & (X | Y) is (Y, X). */
11753 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11754 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11755 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11756 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11757 /* X & (Y | X) is (Y, X). */
11758 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11759 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11760 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11761 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11763 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11764 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11765 && integer_onep (TREE_OPERAND (arg0, 1))
11766 && integer_onep (arg1))
11768 tree tem2;
11769 tem = TREE_OPERAND (arg0, 0);
11770 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11771 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11772 tem, tem2);
11773 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11774 build_zero_cst (TREE_TYPE (tem)));
11776 /* Fold ~X & 1 as (X & 1) == 0. */
11777 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11778 && integer_onep (arg1))
11780 tree tem2;
11781 tem = TREE_OPERAND (arg0, 0);
11782 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11783 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11784 tem, tem2);
11785 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11786 build_zero_cst (TREE_TYPE (tem)));
11788 /* Fold !X & 1 as X == 0. */
11789 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11790 && integer_onep (arg1))
11792 tem = TREE_OPERAND (arg0, 0);
11793 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11794 build_zero_cst (TREE_TYPE (tem)));
11797 /* Fold (X ^ Y) & Y as ~X & Y. */
11798 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11799 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11801 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11802 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11803 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11804 fold_convert_loc (loc, type, arg1));
11806 /* Fold (X ^ Y) & X as ~Y & X. */
11807 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11808 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11809 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11811 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11812 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11813 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11814 fold_convert_loc (loc, type, arg1));
11816 /* Fold X & (X ^ Y) as X & ~Y. */
11817 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11820 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11821 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11822 fold_convert_loc (loc, type, arg0),
11823 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11825 /* Fold X & (Y ^ X) as ~Y & X. */
11826 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11827 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11828 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11830 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11831 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11832 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11833 fold_convert_loc (loc, type, arg0));
11836 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11837 multiple of 1 << CST. */
11838 if (TREE_CODE (arg1) == INTEGER_CST)
11840 double_int cst1 = tree_to_double_int (arg1);
11841 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11842 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11843 if ((cst1 & ncst1) == ncst1
11844 && multiple_of_p (type, arg0,
11845 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11846 return fold_convert_loc (loc, type, arg0);
11849 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11850 bits from CST2. */
11851 if (TREE_CODE (arg1) == INTEGER_CST
11852 && TREE_CODE (arg0) == MULT_EXPR
11853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11855 double_int masked
11856 = mask_with_tz (type, tree_to_double_int (arg1),
11857 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11859 if (masked.is_zero ())
11860 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11861 arg0, arg1);
11862 else if (masked != tree_to_double_int (arg1))
11863 return fold_build2_loc (loc, code, type, op0,
11864 double_int_to_tree (type, masked));
11867 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11868 ((A & N) + B) & M -> (A + B) & M
11869 Similarly if (N & M) == 0,
11870 ((A | N) + B) & M -> (A + B) & M
11871 and for - instead of + (or unary - instead of +)
11872 and/or ^ instead of |.
11873 If B is constant and (B & M) == 0, fold into A & M. */
11874 if (tree_fits_uhwi_p (arg1))
11876 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11877 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11878 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11879 && (TREE_CODE (arg0) == PLUS_EXPR
11880 || TREE_CODE (arg0) == MINUS_EXPR
11881 || TREE_CODE (arg0) == NEGATE_EXPR)
11882 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11883 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11885 tree pmop[2];
11886 int which = 0;
11887 unsigned HOST_WIDE_INT cst0;
11889 /* Now we know that arg0 is (C + D) or (C - D) or
11890 -C and arg1 (M) is == (1LL << cst) - 1.
11891 Store C into PMOP[0] and D into PMOP[1]. */
11892 pmop[0] = TREE_OPERAND (arg0, 0);
11893 pmop[1] = NULL;
11894 if (TREE_CODE (arg0) != NEGATE_EXPR)
11896 pmop[1] = TREE_OPERAND (arg0, 1);
11897 which = 1;
11900 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11901 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11902 & cst1) != cst1)
11903 which = -1;
11905 for (; which >= 0; which--)
11906 switch (TREE_CODE (pmop[which]))
11908 case BIT_AND_EXPR:
11909 case BIT_IOR_EXPR:
11910 case BIT_XOR_EXPR:
11911 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11912 != INTEGER_CST)
11913 break;
11914 /* tree_to_[su]hwi not used, because we don't care about
11915 the upper bits. */
11916 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11917 cst0 &= cst1;
11918 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11920 if (cst0 != cst1)
11921 break;
11923 else if (cst0 != 0)
11924 break;
11925 /* If C or D is of the form (A & N) where
11926 (N & M) == M, or of the form (A | N) or
11927 (A ^ N) where (N & M) == 0, replace it with A. */
11928 pmop[which] = TREE_OPERAND (pmop[which], 0);
11929 break;
11930 case INTEGER_CST:
11931 /* If C or D is a N where (N & M) == 0, it can be
11932 omitted (assumed 0). */
11933 if ((TREE_CODE (arg0) == PLUS_EXPR
11934 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11935 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11936 pmop[which] = NULL;
11937 break;
11938 default:
11939 break;
11942 /* Only build anything new if we optimized one or both arguments
11943 above. */
11944 if (pmop[0] != TREE_OPERAND (arg0, 0)
11945 || (TREE_CODE (arg0) != NEGATE_EXPR
11946 && pmop[1] != TREE_OPERAND (arg0, 1)))
11948 tree utype = TREE_TYPE (arg0);
11949 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11951 /* Perform the operations in a type that has defined
11952 overflow behavior. */
11953 utype = unsigned_type_for (TREE_TYPE (arg0));
11954 if (pmop[0] != NULL)
11955 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11956 if (pmop[1] != NULL)
11957 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11960 if (TREE_CODE (arg0) == NEGATE_EXPR)
11961 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11962 else if (TREE_CODE (arg0) == PLUS_EXPR)
11964 if (pmop[0] != NULL && pmop[1] != NULL)
11965 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11966 pmop[0], pmop[1]);
11967 else if (pmop[0] != NULL)
11968 tem = pmop[0];
11969 else if (pmop[1] != NULL)
11970 tem = pmop[1];
11971 else
11972 return build_int_cst (type, 0);
11974 else if (pmop[0] == NULL)
11975 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11976 else
11977 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11978 pmop[0], pmop[1]);
11979 /* TEM is now the new binary +, - or unary - replacement. */
11980 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11981 fold_convert_loc (loc, utype, arg1));
11982 return fold_convert_loc (loc, type, tem);
11987 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11988 if (t1 != NULL_TREE)
11989 return t1;
11990 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11991 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11992 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11994 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11996 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11997 && (~TREE_INT_CST_LOW (arg1)
11998 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11999 return
12000 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12003 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12005 This results in more efficient code for machines without a NOR
12006 instruction. Combine will canonicalize to the first form
12007 which will allow use of NOR instructions provided by the
12008 backend if they exist. */
12009 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12010 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12012 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12013 build2 (BIT_IOR_EXPR, type,
12014 fold_convert_loc (loc, type,
12015 TREE_OPERAND (arg0, 0)),
12016 fold_convert_loc (loc, type,
12017 TREE_OPERAND (arg1, 0))));
12020 /* If arg0 is derived from the address of an object or function, we may
12021 be able to fold this expression using the object or function's
12022 alignment. */
12023 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12025 unsigned HOST_WIDE_INT modulus, residue;
12026 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12028 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12029 integer_onep (arg1));
12031 /* This works because modulus is a power of 2. If this weren't the
12032 case, we'd have to replace it by its greatest power-of-2
12033 divisor: modulus & -modulus. */
12034 if (low < modulus)
12035 return build_int_cst (type, residue & low);
12038 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12039 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12040 if the new mask might be further optimized. */
12041 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12042 || TREE_CODE (arg0) == RSHIFT_EXPR)
12043 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12044 && TREE_CODE (arg1) == INTEGER_CST
12045 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12046 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12047 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12048 < TYPE_PRECISION (TREE_TYPE (arg0))))
12050 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12051 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12052 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12053 tree shift_type = TREE_TYPE (arg0);
12055 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12056 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12057 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12058 && TYPE_PRECISION (TREE_TYPE (arg0))
12059 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12061 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12062 tree arg00 = TREE_OPERAND (arg0, 0);
12063 /* See if more bits can be proven as zero because of
12064 zero extension. */
12065 if (TREE_CODE (arg00) == NOP_EXPR
12066 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12068 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12069 if (TYPE_PRECISION (inner_type)
12070 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12071 && TYPE_PRECISION (inner_type) < prec)
12073 prec = TYPE_PRECISION (inner_type);
12074 /* See if we can shorten the right shift. */
12075 if (shiftc < prec)
12076 shift_type = inner_type;
12079 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12080 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12081 zerobits <<= prec - shiftc;
12082 /* For arithmetic shift if sign bit could be set, zerobits
12083 can contain actually sign bits, so no transformation is
12084 possible, unless MASK masks them all away. In that
12085 case the shift needs to be converted into logical shift. */
12086 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12087 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12089 if ((mask & zerobits) == 0)
12090 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12091 else
12092 zerobits = 0;
12096 /* ((X << 16) & 0xff00) is (X, 0). */
12097 if ((mask & zerobits) == mask)
12098 return omit_one_operand_loc (loc, type,
12099 build_int_cst (type, 0), arg0);
12101 newmask = mask | zerobits;
12102 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12104 /* Only do the transformation if NEWMASK is some integer
12105 mode's mask. */
12106 for (prec = BITS_PER_UNIT;
12107 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12108 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12109 break;
12110 if (prec < HOST_BITS_PER_WIDE_INT
12111 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12113 tree newmaskt;
12115 if (shift_type != TREE_TYPE (arg0))
12117 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12118 fold_convert_loc (loc, shift_type,
12119 TREE_OPERAND (arg0, 0)),
12120 TREE_OPERAND (arg0, 1));
12121 tem = fold_convert_loc (loc, type, tem);
12123 else
12124 tem = op0;
12125 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12126 if (!tree_int_cst_equal (newmaskt, arg1))
12127 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12132 goto associate;
12134 case RDIV_EXPR:
12135 /* Don't touch a floating-point divide by zero unless the mode
12136 of the constant can represent infinity. */
12137 if (TREE_CODE (arg1) == REAL_CST
12138 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12139 && real_zerop (arg1))
12140 return NULL_TREE;
12142 /* Optimize A / A to 1.0 if we don't care about
12143 NaNs or Infinities. Skip the transformation
12144 for non-real operands. */
12145 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12146 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12147 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12148 && operand_equal_p (arg0, arg1, 0))
12150 tree r = build_real (TREE_TYPE (arg0), dconst1);
12152 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12155 /* The complex version of the above A / A optimization. */
12156 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12157 && operand_equal_p (arg0, arg1, 0))
12159 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12160 if (! HONOR_NANS (TYPE_MODE (elem_type))
12161 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12163 tree r = build_real (elem_type, dconst1);
12164 /* omit_two_operands will call fold_convert for us. */
12165 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12169 /* (-A) / (-B) -> A / B */
12170 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12171 return fold_build2_loc (loc, RDIV_EXPR, type,
12172 TREE_OPERAND (arg0, 0),
12173 negate_expr (arg1));
12174 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12175 return fold_build2_loc (loc, RDIV_EXPR, type,
12176 negate_expr (arg0),
12177 TREE_OPERAND (arg1, 0));
12179 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12180 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12181 && real_onep (arg1))
12182 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12184 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12185 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12186 && real_minus_onep (arg1))
12187 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12188 negate_expr (arg0)));
12190 /* If ARG1 is a constant, we can convert this to a multiply by the
12191 reciprocal. This does not have the same rounding properties,
12192 so only do this if -freciprocal-math. We can actually
12193 always safely do it if ARG1 is a power of two, but it's hard to
12194 tell if it is or not in a portable manner. */
12195 if (optimize
12196 && (TREE_CODE (arg1) == REAL_CST
12197 || (TREE_CODE (arg1) == COMPLEX_CST
12198 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12199 || (TREE_CODE (arg1) == VECTOR_CST
12200 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12202 if (flag_reciprocal_math
12203 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12204 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12205 /* Find the reciprocal if optimizing and the result is exact.
12206 TODO: Complex reciprocal not implemented. */
12207 if (TREE_CODE (arg1) != COMPLEX_CST)
12209 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12211 if (inverse)
12212 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12215 /* Convert A/B/C to A/(B*C). */
12216 if (flag_reciprocal_math
12217 && TREE_CODE (arg0) == RDIV_EXPR)
12218 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12219 fold_build2_loc (loc, MULT_EXPR, type,
12220 TREE_OPERAND (arg0, 1), arg1));
12222 /* Convert A/(B/C) to (A/B)*C. */
12223 if (flag_reciprocal_math
12224 && TREE_CODE (arg1) == RDIV_EXPR)
12225 return fold_build2_loc (loc, MULT_EXPR, type,
12226 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12227 TREE_OPERAND (arg1, 0)),
12228 TREE_OPERAND (arg1, 1));
12230 /* Convert C1/(X*C2) into (C1/C2)/X. */
12231 if (flag_reciprocal_math
12232 && TREE_CODE (arg1) == MULT_EXPR
12233 && TREE_CODE (arg0) == REAL_CST
12234 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12236 tree tem = const_binop (RDIV_EXPR, arg0,
12237 TREE_OPERAND (arg1, 1));
12238 if (tem)
12239 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12240 TREE_OPERAND (arg1, 0));
12243 if (flag_unsafe_math_optimizations)
12245 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12246 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12248 /* Optimize sin(x)/cos(x) as tan(x). */
12249 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12250 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12251 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12252 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12253 CALL_EXPR_ARG (arg1, 0), 0))
12255 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12257 if (tanfn != NULL_TREE)
12258 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12261 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12262 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12263 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12264 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12265 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12266 CALL_EXPR_ARG (arg1, 0), 0))
12268 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12270 if (tanfn != NULL_TREE)
12272 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12273 CALL_EXPR_ARG (arg0, 0));
12274 return fold_build2_loc (loc, RDIV_EXPR, type,
12275 build_real (type, dconst1), tmp);
12279 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12280 NaNs or Infinities. */
12281 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12282 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12283 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12285 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12286 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12288 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12289 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12290 && operand_equal_p (arg00, arg01, 0))
12292 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12294 if (cosfn != NULL_TREE)
12295 return build_call_expr_loc (loc, cosfn, 1, arg00);
12299 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12300 NaNs or Infinities. */
12301 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12302 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12303 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12305 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12306 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12308 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12309 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12310 && operand_equal_p (arg00, arg01, 0))
12312 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12314 if (cosfn != NULL_TREE)
12316 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12317 return fold_build2_loc (loc, RDIV_EXPR, type,
12318 build_real (type, dconst1),
12319 tmp);
12324 /* Optimize pow(x,c)/x as pow(x,c-1). */
12325 if (fcode0 == BUILT_IN_POW
12326 || fcode0 == BUILT_IN_POWF
12327 || fcode0 == BUILT_IN_POWL)
12329 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12330 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12331 if (TREE_CODE (arg01) == REAL_CST
12332 && !TREE_OVERFLOW (arg01)
12333 && operand_equal_p (arg1, arg00, 0))
12335 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12336 REAL_VALUE_TYPE c;
12337 tree arg;
12339 c = TREE_REAL_CST (arg01);
12340 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12341 arg = build_real (type, c);
12342 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12346 /* Optimize a/root(b/c) into a*root(c/b). */
12347 if (BUILTIN_ROOT_P (fcode1))
12349 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12351 if (TREE_CODE (rootarg) == RDIV_EXPR)
12353 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12354 tree b = TREE_OPERAND (rootarg, 0);
12355 tree c = TREE_OPERAND (rootarg, 1);
12357 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12359 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12360 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12364 /* Optimize x/expN(y) into x*expN(-y). */
12365 if (BUILTIN_EXPONENT_P (fcode1))
12367 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12368 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12369 arg1 = build_call_expr_loc (loc,
12370 expfn, 1,
12371 fold_convert_loc (loc, type, arg));
12372 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12375 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12376 if (fcode1 == BUILT_IN_POW
12377 || fcode1 == BUILT_IN_POWF
12378 || fcode1 == BUILT_IN_POWL)
12380 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12381 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12382 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12383 tree neg11 = fold_convert_loc (loc, type,
12384 negate_expr (arg11));
12385 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12386 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12389 return NULL_TREE;
12391 case TRUNC_DIV_EXPR:
12392 /* Optimize (X & (-A)) / A where A is a power of 2,
12393 to X >> log2(A) */
12394 if (TREE_CODE (arg0) == BIT_AND_EXPR
12395 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12396 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12398 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12399 arg1, TREE_OPERAND (arg0, 1));
12400 if (sum && integer_zerop (sum)) {
12401 unsigned long pow2;
12403 if (TREE_INT_CST_LOW (arg1))
12404 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12405 else
12406 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12407 + HOST_BITS_PER_WIDE_INT;
12409 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12410 TREE_OPERAND (arg0, 0),
12411 build_int_cst (integer_type_node, pow2));
12415 /* Fall through */
12417 case FLOOR_DIV_EXPR:
12418 /* Simplify A / (B << N) where A and B are positive and B is
12419 a power of 2, to A >> (N + log2(B)). */
12420 strict_overflow_p = false;
12421 if (TREE_CODE (arg1) == LSHIFT_EXPR
12422 && (TYPE_UNSIGNED (type)
12423 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12425 tree sval = TREE_OPERAND (arg1, 0);
12426 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12428 tree sh_cnt = TREE_OPERAND (arg1, 1);
12429 unsigned long pow2;
12431 if (TREE_INT_CST_LOW (sval))
12432 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12433 else
12434 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12435 + HOST_BITS_PER_WIDE_INT;
12437 if (strict_overflow_p)
12438 fold_overflow_warning (("assuming signed overflow does not "
12439 "occur when simplifying A / (B << N)"),
12440 WARN_STRICT_OVERFLOW_MISC);
12442 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12443 sh_cnt,
12444 build_int_cst (TREE_TYPE (sh_cnt),
12445 pow2));
12446 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12447 fold_convert_loc (loc, type, arg0), sh_cnt);
12451 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12452 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12453 if (INTEGRAL_TYPE_P (type)
12454 && TYPE_UNSIGNED (type)
12455 && code == FLOOR_DIV_EXPR)
12456 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12458 /* Fall through */
12460 case ROUND_DIV_EXPR:
12461 case CEIL_DIV_EXPR:
12462 case EXACT_DIV_EXPR:
12463 if (integer_onep (arg1))
12464 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12465 if (integer_zerop (arg1))
12466 return NULL_TREE;
12467 /* X / -1 is -X. */
12468 if (!TYPE_UNSIGNED (type)
12469 && TREE_CODE (arg1) == INTEGER_CST
12470 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12471 && TREE_INT_CST_HIGH (arg1) == -1)
12472 return fold_convert_loc (loc, type, negate_expr (arg0));
12474 /* Convert -A / -B to A / B when the type is signed and overflow is
12475 undefined. */
12476 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12477 && TREE_CODE (arg0) == NEGATE_EXPR
12478 && negate_expr_p (arg1))
12480 if (INTEGRAL_TYPE_P (type))
12481 fold_overflow_warning (("assuming signed overflow does not occur "
12482 "when distributing negation across "
12483 "division"),
12484 WARN_STRICT_OVERFLOW_MISC);
12485 return fold_build2_loc (loc, code, type,
12486 fold_convert_loc (loc, type,
12487 TREE_OPERAND (arg0, 0)),
12488 fold_convert_loc (loc, type,
12489 negate_expr (arg1)));
12491 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12492 && TREE_CODE (arg1) == NEGATE_EXPR
12493 && negate_expr_p (arg0))
12495 if (INTEGRAL_TYPE_P (type))
12496 fold_overflow_warning (("assuming signed overflow does not occur "
12497 "when distributing negation across "
12498 "division"),
12499 WARN_STRICT_OVERFLOW_MISC);
12500 return fold_build2_loc (loc, code, type,
12501 fold_convert_loc (loc, type,
12502 negate_expr (arg0)),
12503 fold_convert_loc (loc, type,
12504 TREE_OPERAND (arg1, 0)));
12507 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12508 operation, EXACT_DIV_EXPR.
12510 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12511 At one time others generated faster code, it's not clear if they do
12512 after the last round to changes to the DIV code in expmed.c. */
12513 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12514 && multiple_of_p (type, arg0, arg1))
12515 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12517 strict_overflow_p = false;
12518 if (TREE_CODE (arg1) == INTEGER_CST
12519 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12520 &strict_overflow_p)))
12522 if (strict_overflow_p)
12523 fold_overflow_warning (("assuming signed overflow does not occur "
12524 "when simplifying division"),
12525 WARN_STRICT_OVERFLOW_MISC);
12526 return fold_convert_loc (loc, type, tem);
12529 return NULL_TREE;
12531 case CEIL_MOD_EXPR:
12532 case FLOOR_MOD_EXPR:
12533 case ROUND_MOD_EXPR:
12534 case TRUNC_MOD_EXPR:
12535 /* X % 1 is always zero, but be sure to preserve any side
12536 effects in X. */
12537 if (integer_onep (arg1))
12538 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12540 /* X % 0, return X % 0 unchanged so that we can get the
12541 proper warnings and errors. */
12542 if (integer_zerop (arg1))
12543 return NULL_TREE;
12545 /* 0 % X is always zero, but be sure to preserve any side
12546 effects in X. Place this after checking for X == 0. */
12547 if (integer_zerop (arg0))
12548 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12550 /* X % -1 is zero. */
12551 if (!TYPE_UNSIGNED (type)
12552 && TREE_CODE (arg1) == INTEGER_CST
12553 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12554 && TREE_INT_CST_HIGH (arg1) == -1)
12555 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12557 /* X % -C is the same as X % C. */
12558 if (code == TRUNC_MOD_EXPR
12559 && !TYPE_UNSIGNED (type)
12560 && TREE_CODE (arg1) == INTEGER_CST
12561 && !TREE_OVERFLOW (arg1)
12562 && TREE_INT_CST_HIGH (arg1) < 0
12563 && !TYPE_OVERFLOW_TRAPS (type)
12564 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12565 && !sign_bit_p (arg1, arg1))
12566 return fold_build2_loc (loc, code, type,
12567 fold_convert_loc (loc, type, arg0),
12568 fold_convert_loc (loc, type,
12569 negate_expr (arg1)));
12571 /* X % -Y is the same as X % Y. */
12572 if (code == TRUNC_MOD_EXPR
12573 && !TYPE_UNSIGNED (type)
12574 && TREE_CODE (arg1) == NEGATE_EXPR
12575 && !TYPE_OVERFLOW_TRAPS (type))
12576 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12577 fold_convert_loc (loc, type,
12578 TREE_OPERAND (arg1, 0)));
12580 strict_overflow_p = false;
12581 if (TREE_CODE (arg1) == INTEGER_CST
12582 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12583 &strict_overflow_p)))
12585 if (strict_overflow_p)
12586 fold_overflow_warning (("assuming signed overflow does not occur "
12587 "when simplifying modulus"),
12588 WARN_STRICT_OVERFLOW_MISC);
12589 return fold_convert_loc (loc, type, tem);
12592 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12593 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12594 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12595 && (TYPE_UNSIGNED (type)
12596 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12598 tree c = arg1;
12599 /* Also optimize A % (C << N) where C is a power of 2,
12600 to A & ((C << N) - 1). */
12601 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12602 c = TREE_OPERAND (arg1, 0);
12604 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12606 tree mask
12607 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12608 build_int_cst (TREE_TYPE (arg1), 1));
12609 if (strict_overflow_p)
12610 fold_overflow_warning (("assuming signed overflow does not "
12611 "occur when simplifying "
12612 "X % (power of two)"),
12613 WARN_STRICT_OVERFLOW_MISC);
12614 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12615 fold_convert_loc (loc, type, arg0),
12616 fold_convert_loc (loc, type, mask));
12620 return NULL_TREE;
12622 case LROTATE_EXPR:
12623 case RROTATE_EXPR:
12624 if (integer_all_onesp (arg0))
12625 return omit_one_operand_loc (loc, type, arg0, arg1);
12626 goto shift;
12628 case RSHIFT_EXPR:
12629 /* Optimize -1 >> x for arithmetic right shifts. */
12630 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12631 && tree_expr_nonnegative_p (arg1))
12632 return omit_one_operand_loc (loc, type, arg0, arg1);
12633 /* ... fall through ... */
12635 case LSHIFT_EXPR:
12636 shift:
12637 if (integer_zerop (arg1))
12638 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12639 if (integer_zerop (arg0))
12640 return omit_one_operand_loc (loc, type, arg0, arg1);
12642 /* Prefer vector1 << scalar to vector1 << vector2
12643 if vector2 is uniform. */
12644 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12645 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12646 return fold_build2_loc (loc, code, type, op0, tem);
12648 /* Since negative shift count is not well-defined,
12649 don't try to compute it in the compiler. */
12650 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12651 return NULL_TREE;
12653 prec = element_precision (type);
12655 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12656 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12657 && tree_to_uhwi (arg1) < prec
12658 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12659 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12661 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12662 + tree_to_uhwi (arg1));
12664 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12665 being well defined. */
12666 if (low >= prec)
12668 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12669 low = low % prec;
12670 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12671 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12672 TREE_OPERAND (arg0, 0));
12673 else
12674 low = prec - 1;
12677 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12678 build_int_cst (TREE_TYPE (arg1), low));
12681 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12682 into x & ((unsigned)-1 >> c) for unsigned types. */
12683 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12684 || (TYPE_UNSIGNED (type)
12685 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12686 && tree_fits_uhwi_p (arg1)
12687 && tree_to_uhwi (arg1) < prec
12688 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12689 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12691 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12692 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12693 tree lshift;
12694 tree arg00;
12696 if (low0 == low1)
12698 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12700 lshift = build_minus_one_cst (type);
12701 lshift = const_binop (code, lshift, arg1);
12703 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12707 /* Rewrite an LROTATE_EXPR by a constant into an
12708 RROTATE_EXPR by a new constant. */
12709 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12711 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12712 tem = const_binop (MINUS_EXPR, tem, arg1);
12713 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12716 /* If we have a rotate of a bit operation with the rotate count and
12717 the second operand of the bit operation both constant,
12718 permute the two operations. */
12719 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12720 && (TREE_CODE (arg0) == BIT_AND_EXPR
12721 || TREE_CODE (arg0) == BIT_IOR_EXPR
12722 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12723 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12724 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12725 fold_build2_loc (loc, code, type,
12726 TREE_OPERAND (arg0, 0), arg1),
12727 fold_build2_loc (loc, code, type,
12728 TREE_OPERAND (arg0, 1), arg1));
12730 /* Two consecutive rotates adding up to the precision of the
12731 type can be ignored. */
12732 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12733 && TREE_CODE (arg0) == RROTATE_EXPR
12734 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12735 && TREE_INT_CST_HIGH (arg1) == 0
12736 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12737 && ((TREE_INT_CST_LOW (arg1)
12738 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12739 == prec))
12740 return TREE_OPERAND (arg0, 0);
12742 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12743 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12744 if the latter can be further optimized. */
12745 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12746 && TREE_CODE (arg0) == BIT_AND_EXPR
12747 && TREE_CODE (arg1) == INTEGER_CST
12748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12750 tree mask = fold_build2_loc (loc, code, type,
12751 fold_convert_loc (loc, type,
12752 TREE_OPERAND (arg0, 1)),
12753 arg1);
12754 tree shift = fold_build2_loc (loc, code, type,
12755 fold_convert_loc (loc, type,
12756 TREE_OPERAND (arg0, 0)),
12757 arg1);
12758 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12759 if (tem)
12760 return tem;
12763 return NULL_TREE;
12765 case MIN_EXPR:
12766 if (operand_equal_p (arg0, arg1, 0))
12767 return omit_one_operand_loc (loc, type, arg0, arg1);
12768 if (INTEGRAL_TYPE_P (type)
12769 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12770 return omit_one_operand_loc (loc, type, arg1, arg0);
12771 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12772 if (tem)
12773 return tem;
12774 goto associate;
12776 case MAX_EXPR:
12777 if (operand_equal_p (arg0, arg1, 0))
12778 return omit_one_operand_loc (loc, type, arg0, arg1);
12779 if (INTEGRAL_TYPE_P (type)
12780 && TYPE_MAX_VALUE (type)
12781 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12782 return omit_one_operand_loc (loc, type, arg1, arg0);
12783 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12784 if (tem)
12785 return tem;
12786 goto associate;
12788 case TRUTH_ANDIF_EXPR:
12789 /* Note that the operands of this must be ints
12790 and their values must be 0 or 1.
12791 ("true" is a fixed value perhaps depending on the language.) */
12792 /* If first arg is constant zero, return it. */
12793 if (integer_zerop (arg0))
12794 return fold_convert_loc (loc, type, arg0);
12795 case TRUTH_AND_EXPR:
12796 /* If either arg is constant true, drop it. */
12797 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12798 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12799 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12800 /* Preserve sequence points. */
12801 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12802 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12803 /* If second arg is constant zero, result is zero, but first arg
12804 must be evaluated. */
12805 if (integer_zerop (arg1))
12806 return omit_one_operand_loc (loc, type, arg1, arg0);
12807 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12808 case will be handled here. */
12809 if (integer_zerop (arg0))
12810 return omit_one_operand_loc (loc, type, arg0, arg1);
12812 /* !X && X is always false. */
12813 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12814 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12815 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12816 /* X && !X is always false. */
12817 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12819 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12821 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12822 means A >= Y && A != MAX, but in this case we know that
12823 A < X <= MAX. */
12825 if (!TREE_SIDE_EFFECTS (arg0)
12826 && !TREE_SIDE_EFFECTS (arg1))
12828 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12829 if (tem && !operand_equal_p (tem, arg0, 0))
12830 return fold_build2_loc (loc, code, type, tem, arg1);
12832 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12833 if (tem && !operand_equal_p (tem, arg1, 0))
12834 return fold_build2_loc (loc, code, type, arg0, tem);
12837 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12838 != NULL_TREE)
12839 return tem;
12841 return NULL_TREE;
12843 case TRUTH_ORIF_EXPR:
12844 /* Note that the operands of this must be ints
12845 and their values must be 0 or true.
12846 ("true" is a fixed value perhaps depending on the language.) */
12847 /* If first arg is constant true, return it. */
12848 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12849 return fold_convert_loc (loc, type, arg0);
12850 case TRUTH_OR_EXPR:
12851 /* If either arg is constant zero, drop it. */
12852 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12853 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12854 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12855 /* Preserve sequence points. */
12856 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12857 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12858 /* If second arg is constant true, result is true, but we must
12859 evaluate first arg. */
12860 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12861 return omit_one_operand_loc (loc, type, arg1, arg0);
12862 /* Likewise for first arg, but note this only occurs here for
12863 TRUTH_OR_EXPR. */
12864 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12865 return omit_one_operand_loc (loc, type, arg0, arg1);
12867 /* !X || X is always true. */
12868 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12869 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12870 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12871 /* X || !X is always true. */
12872 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12873 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12874 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12876 /* (X && !Y) || (!X && Y) is X ^ Y */
12877 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12878 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12880 tree a0, a1, l0, l1, n0, n1;
12882 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12883 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12885 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12886 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12888 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12889 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12891 if ((operand_equal_p (n0, a0, 0)
12892 && operand_equal_p (n1, a1, 0))
12893 || (operand_equal_p (n0, a1, 0)
12894 && operand_equal_p (n1, a0, 0)))
12895 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12898 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12899 != NULL_TREE)
12900 return tem;
12902 return NULL_TREE;
12904 case TRUTH_XOR_EXPR:
12905 /* If the second arg is constant zero, drop it. */
12906 if (integer_zerop (arg1))
12907 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12908 /* If the second arg is constant true, this is a logical inversion. */
12909 if (integer_onep (arg1))
12911 tem = invert_truthvalue_loc (loc, arg0);
12912 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12914 /* Identical arguments cancel to zero. */
12915 if (operand_equal_p (arg0, arg1, 0))
12916 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12918 /* !X ^ X is always true. */
12919 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12920 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12921 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12923 /* X ^ !X is always true. */
12924 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12925 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12926 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12928 return NULL_TREE;
12930 case EQ_EXPR:
12931 case NE_EXPR:
12932 STRIP_NOPS (arg0);
12933 STRIP_NOPS (arg1);
12935 tem = fold_comparison (loc, code, type, op0, op1);
12936 if (tem != NULL_TREE)
12937 return tem;
12939 /* bool_var != 0 becomes bool_var. */
12940 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12941 && code == NE_EXPR)
12942 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12944 /* bool_var == 1 becomes bool_var. */
12945 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12946 && code == EQ_EXPR)
12947 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12949 /* bool_var != 1 becomes !bool_var. */
12950 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12951 && code == NE_EXPR)
12952 return fold_convert_loc (loc, type,
12953 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12954 TREE_TYPE (arg0), arg0));
12956 /* bool_var == 0 becomes !bool_var. */
12957 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12958 && code == EQ_EXPR)
12959 return fold_convert_loc (loc, type,
12960 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12961 TREE_TYPE (arg0), arg0));
12963 /* !exp != 0 becomes !exp */
12964 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12965 && code == NE_EXPR)
12966 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12968 /* If this is an equality comparison of the address of two non-weak,
12969 unaliased symbols neither of which are extern (since we do not
12970 have access to attributes for externs), then we know the result. */
12971 if (TREE_CODE (arg0) == ADDR_EXPR
12972 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12973 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12974 && ! lookup_attribute ("alias",
12975 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12976 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12977 && TREE_CODE (arg1) == ADDR_EXPR
12978 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12979 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12980 && ! lookup_attribute ("alias",
12981 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12982 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12984 /* We know that we're looking at the address of two
12985 non-weak, unaliased, static _DECL nodes.
12987 It is both wasteful and incorrect to call operand_equal_p
12988 to compare the two ADDR_EXPR nodes. It is wasteful in that
12989 all we need to do is test pointer equality for the arguments
12990 to the two ADDR_EXPR nodes. It is incorrect to use
12991 operand_equal_p as that function is NOT equivalent to a
12992 C equality test. It can in fact return false for two
12993 objects which would test as equal using the C equality
12994 operator. */
12995 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12996 return constant_boolean_node (equal
12997 ? code == EQ_EXPR : code != EQ_EXPR,
12998 type);
13001 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13002 a MINUS_EXPR of a constant, we can convert it into a comparison with
13003 a revised constant as long as no overflow occurs. */
13004 if (TREE_CODE (arg1) == INTEGER_CST
13005 && (TREE_CODE (arg0) == PLUS_EXPR
13006 || TREE_CODE (arg0) == MINUS_EXPR)
13007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13008 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13009 ? MINUS_EXPR : PLUS_EXPR,
13010 fold_convert_loc (loc, TREE_TYPE (arg0),
13011 arg1),
13012 TREE_OPERAND (arg0, 1)))
13013 && !TREE_OVERFLOW (tem))
13014 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13016 /* Similarly for a NEGATE_EXPR. */
13017 if (TREE_CODE (arg0) == NEGATE_EXPR
13018 && TREE_CODE (arg1) == INTEGER_CST
13019 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13020 arg1)))
13021 && TREE_CODE (tem) == INTEGER_CST
13022 && !TREE_OVERFLOW (tem))
13023 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13025 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13026 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13027 && TREE_CODE (arg1) == INTEGER_CST
13028 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13029 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13030 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13031 fold_convert_loc (loc,
13032 TREE_TYPE (arg0),
13033 arg1),
13034 TREE_OPERAND (arg0, 1)));
13036 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13037 if ((TREE_CODE (arg0) == PLUS_EXPR
13038 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13039 || TREE_CODE (arg0) == MINUS_EXPR)
13040 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13041 0)),
13042 arg1, 0)
13043 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13044 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13046 tree val = TREE_OPERAND (arg0, 1);
13047 return omit_two_operands_loc (loc, type,
13048 fold_build2_loc (loc, code, type,
13049 val,
13050 build_int_cst (TREE_TYPE (val),
13051 0)),
13052 TREE_OPERAND (arg0, 0), arg1);
13055 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13056 if (TREE_CODE (arg0) == MINUS_EXPR
13057 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13058 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13059 1)),
13060 arg1, 0)
13061 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13063 return omit_two_operands_loc (loc, type,
13064 code == NE_EXPR
13065 ? boolean_true_node : boolean_false_node,
13066 TREE_OPERAND (arg0, 1), arg1);
13069 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13070 for !=. Don't do this for ordered comparisons due to overflow. */
13071 if (TREE_CODE (arg0) == MINUS_EXPR
13072 && integer_zerop (arg1))
13073 return fold_build2_loc (loc, code, type,
13074 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13076 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13077 if (TREE_CODE (arg0) == ABS_EXPR
13078 && (integer_zerop (arg1) || real_zerop (arg1)))
13079 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13081 /* If this is an EQ or NE comparison with zero and ARG0 is
13082 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13083 two operations, but the latter can be done in one less insn
13084 on machines that have only two-operand insns or on which a
13085 constant cannot be the first operand. */
13086 if (TREE_CODE (arg0) == BIT_AND_EXPR
13087 && integer_zerop (arg1))
13089 tree arg00 = TREE_OPERAND (arg0, 0);
13090 tree arg01 = TREE_OPERAND (arg0, 1);
13091 if (TREE_CODE (arg00) == LSHIFT_EXPR
13092 && integer_onep (TREE_OPERAND (arg00, 0)))
13094 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13095 arg01, TREE_OPERAND (arg00, 1));
13096 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13097 build_int_cst (TREE_TYPE (arg0), 1));
13098 return fold_build2_loc (loc, code, type,
13099 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13100 arg1);
13102 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13103 && integer_onep (TREE_OPERAND (arg01, 0)))
13105 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13106 arg00, TREE_OPERAND (arg01, 1));
13107 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13108 build_int_cst (TREE_TYPE (arg0), 1));
13109 return fold_build2_loc (loc, code, type,
13110 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13111 arg1);
13115 /* If this is an NE or EQ comparison of zero against the result of a
13116 signed MOD operation whose second operand is a power of 2, make
13117 the MOD operation unsigned since it is simpler and equivalent. */
13118 if (integer_zerop (arg1)
13119 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13120 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13121 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13122 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13123 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13124 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13126 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13127 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13128 fold_convert_loc (loc, newtype,
13129 TREE_OPERAND (arg0, 0)),
13130 fold_convert_loc (loc, newtype,
13131 TREE_OPERAND (arg0, 1)));
13133 return fold_build2_loc (loc, code, type, newmod,
13134 fold_convert_loc (loc, newtype, arg1));
13137 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13138 C1 is a valid shift constant, and C2 is a power of two, i.e.
13139 a single bit. */
13140 if (TREE_CODE (arg0) == BIT_AND_EXPR
13141 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13142 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13143 == INTEGER_CST
13144 && integer_pow2p (TREE_OPERAND (arg0, 1))
13145 && integer_zerop (arg1))
13147 tree itype = TREE_TYPE (arg0);
13148 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13149 prec = TYPE_PRECISION (itype);
13151 /* Check for a valid shift count. */
13152 if (TREE_INT_CST_HIGH (arg001) == 0
13153 && TREE_INT_CST_LOW (arg001) < prec)
13155 tree arg01 = TREE_OPERAND (arg0, 1);
13156 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13157 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13158 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13159 can be rewritten as (X & (C2 << C1)) != 0. */
13160 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13162 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13163 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13164 return fold_build2_loc (loc, code, type, tem,
13165 fold_convert_loc (loc, itype, arg1));
13167 /* Otherwise, for signed (arithmetic) shifts,
13168 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13169 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13170 else if (!TYPE_UNSIGNED (itype))
13171 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13172 arg000, build_int_cst (itype, 0));
13173 /* Otherwise, of unsigned (logical) shifts,
13174 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13175 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13176 else
13177 return omit_one_operand_loc (loc, type,
13178 code == EQ_EXPR ? integer_one_node
13179 : integer_zero_node,
13180 arg000);
13184 /* If we have (A & C) == C where C is a power of 2, convert this into
13185 (A & C) != 0. Similarly for NE_EXPR. */
13186 if (TREE_CODE (arg0) == BIT_AND_EXPR
13187 && integer_pow2p (TREE_OPERAND (arg0, 1))
13188 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13189 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13190 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13191 integer_zero_node));
13193 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13194 bit, then fold the expression into A < 0 or A >= 0. */
13195 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13196 if (tem)
13197 return tem;
13199 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13200 Similarly for NE_EXPR. */
13201 if (TREE_CODE (arg0) == BIT_AND_EXPR
13202 && TREE_CODE (arg1) == INTEGER_CST
13203 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13205 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13206 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13207 TREE_OPERAND (arg0, 1));
13208 tree dandnotc
13209 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13210 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13211 notc);
13212 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13213 if (integer_nonzerop (dandnotc))
13214 return omit_one_operand_loc (loc, type, rslt, arg0);
13217 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13218 Similarly for NE_EXPR. */
13219 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13220 && TREE_CODE (arg1) == INTEGER_CST
13221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13223 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13224 tree candnotd
13225 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13226 TREE_OPERAND (arg0, 1),
13227 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13228 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13229 if (integer_nonzerop (candnotd))
13230 return omit_one_operand_loc (loc, type, rslt, arg0);
13233 /* If this is a comparison of a field, we may be able to simplify it. */
13234 if ((TREE_CODE (arg0) == COMPONENT_REF
13235 || TREE_CODE (arg0) == BIT_FIELD_REF)
13236 /* Handle the constant case even without -O
13237 to make sure the warnings are given. */
13238 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13240 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13241 if (t1)
13242 return t1;
13245 /* Optimize comparisons of strlen vs zero to a compare of the
13246 first character of the string vs zero. To wit,
13247 strlen(ptr) == 0 => *ptr == 0
13248 strlen(ptr) != 0 => *ptr != 0
13249 Other cases should reduce to one of these two (or a constant)
13250 due to the return value of strlen being unsigned. */
13251 if (TREE_CODE (arg0) == CALL_EXPR
13252 && integer_zerop (arg1))
13254 tree fndecl = get_callee_fndecl (arg0);
13256 if (fndecl
13257 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13258 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13259 && call_expr_nargs (arg0) == 1
13260 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13262 tree iref = build_fold_indirect_ref_loc (loc,
13263 CALL_EXPR_ARG (arg0, 0));
13264 return fold_build2_loc (loc, code, type, iref,
13265 build_int_cst (TREE_TYPE (iref), 0));
13269 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13270 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13271 if (TREE_CODE (arg0) == RSHIFT_EXPR
13272 && integer_zerop (arg1)
13273 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13275 tree arg00 = TREE_OPERAND (arg0, 0);
13276 tree arg01 = TREE_OPERAND (arg0, 1);
13277 tree itype = TREE_TYPE (arg00);
13278 if (TREE_INT_CST_HIGH (arg01) == 0
13279 && TREE_INT_CST_LOW (arg01)
13280 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13282 if (TYPE_UNSIGNED (itype))
13284 itype = signed_type_for (itype);
13285 arg00 = fold_convert_loc (loc, itype, arg00);
13287 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13288 type, arg00, build_zero_cst (itype));
13292 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13293 if (integer_zerop (arg1)
13294 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13295 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13296 TREE_OPERAND (arg0, 1));
13298 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13299 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13300 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13301 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13302 build_zero_cst (TREE_TYPE (arg0)));
13303 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13304 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13305 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13306 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13307 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13308 build_zero_cst (TREE_TYPE (arg0)));
13310 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13311 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13312 && TREE_CODE (arg1) == INTEGER_CST
13313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13314 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13315 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13316 TREE_OPERAND (arg0, 1), arg1));
13318 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13319 (X & C) == 0 when C is a single bit. */
13320 if (TREE_CODE (arg0) == BIT_AND_EXPR
13321 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13322 && integer_zerop (arg1)
13323 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13325 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13326 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13327 TREE_OPERAND (arg0, 1));
13328 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13329 type, tem,
13330 fold_convert_loc (loc, TREE_TYPE (arg0),
13331 arg1));
13334 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13335 constant C is a power of two, i.e. a single bit. */
13336 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13337 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13338 && integer_zerop (arg1)
13339 && integer_pow2p (TREE_OPERAND (arg0, 1))
13340 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13341 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13343 tree arg00 = TREE_OPERAND (arg0, 0);
13344 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13345 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13348 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13349 when is C is a power of two, i.e. a single bit. */
13350 if (TREE_CODE (arg0) == BIT_AND_EXPR
13351 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13352 && integer_zerop (arg1)
13353 && integer_pow2p (TREE_OPERAND (arg0, 1))
13354 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13355 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13357 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13358 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13359 arg000, TREE_OPERAND (arg0, 1));
13360 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13361 tem, build_int_cst (TREE_TYPE (tem), 0));
13364 if (integer_zerop (arg1)
13365 && tree_expr_nonzero_p (arg0))
13367 tree res = constant_boolean_node (code==NE_EXPR, type);
13368 return omit_one_operand_loc (loc, type, res, arg0);
13371 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13372 if (TREE_CODE (arg0) == NEGATE_EXPR
13373 && TREE_CODE (arg1) == NEGATE_EXPR)
13374 return fold_build2_loc (loc, code, type,
13375 TREE_OPERAND (arg0, 0),
13376 fold_convert_loc (loc, TREE_TYPE (arg0),
13377 TREE_OPERAND (arg1, 0)));
13379 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13380 if (TREE_CODE (arg0) == BIT_AND_EXPR
13381 && TREE_CODE (arg1) == BIT_AND_EXPR)
13383 tree arg00 = TREE_OPERAND (arg0, 0);
13384 tree arg01 = TREE_OPERAND (arg0, 1);
13385 tree arg10 = TREE_OPERAND (arg1, 0);
13386 tree arg11 = TREE_OPERAND (arg1, 1);
13387 tree itype = TREE_TYPE (arg0);
13389 if (operand_equal_p (arg01, arg11, 0))
13390 return fold_build2_loc (loc, code, type,
13391 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13392 fold_build2_loc (loc,
13393 BIT_XOR_EXPR, itype,
13394 arg00, arg10),
13395 arg01),
13396 build_zero_cst (itype));
13398 if (operand_equal_p (arg01, arg10, 0))
13399 return fold_build2_loc (loc, code, type,
13400 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13401 fold_build2_loc (loc,
13402 BIT_XOR_EXPR, itype,
13403 arg00, arg11),
13404 arg01),
13405 build_zero_cst (itype));
13407 if (operand_equal_p (arg00, arg11, 0))
13408 return fold_build2_loc (loc, code, type,
13409 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13410 fold_build2_loc (loc,
13411 BIT_XOR_EXPR, itype,
13412 arg01, arg10),
13413 arg00),
13414 build_zero_cst (itype));
13416 if (operand_equal_p (arg00, arg10, 0))
13417 return fold_build2_loc (loc, code, type,
13418 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13419 fold_build2_loc (loc,
13420 BIT_XOR_EXPR, itype,
13421 arg01, arg11),
13422 arg00),
13423 build_zero_cst (itype));
13426 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13427 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13429 tree arg00 = TREE_OPERAND (arg0, 0);
13430 tree arg01 = TREE_OPERAND (arg0, 1);
13431 tree arg10 = TREE_OPERAND (arg1, 0);
13432 tree arg11 = TREE_OPERAND (arg1, 1);
13433 tree itype = TREE_TYPE (arg0);
13435 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13436 operand_equal_p guarantees no side-effects so we don't need
13437 to use omit_one_operand on Z. */
13438 if (operand_equal_p (arg01, arg11, 0))
13439 return fold_build2_loc (loc, code, type, arg00,
13440 fold_convert_loc (loc, TREE_TYPE (arg00),
13441 arg10));
13442 if (operand_equal_p (arg01, arg10, 0))
13443 return fold_build2_loc (loc, code, type, arg00,
13444 fold_convert_loc (loc, TREE_TYPE (arg00),
13445 arg11));
13446 if (operand_equal_p (arg00, arg11, 0))
13447 return fold_build2_loc (loc, code, type, arg01,
13448 fold_convert_loc (loc, TREE_TYPE (arg01),
13449 arg10));
13450 if (operand_equal_p (arg00, arg10, 0))
13451 return fold_build2_loc (loc, code, type, arg01,
13452 fold_convert_loc (loc, TREE_TYPE (arg01),
13453 arg11));
13455 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13456 if (TREE_CODE (arg01) == INTEGER_CST
13457 && TREE_CODE (arg11) == INTEGER_CST)
13459 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13460 fold_convert_loc (loc, itype, arg11));
13461 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13462 return fold_build2_loc (loc, code, type, tem,
13463 fold_convert_loc (loc, itype, arg10));
13467 /* Attempt to simplify equality/inequality comparisons of complex
13468 values. Only lower the comparison if the result is known or
13469 can be simplified to a single scalar comparison. */
13470 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13471 || TREE_CODE (arg0) == COMPLEX_CST)
13472 && (TREE_CODE (arg1) == COMPLEX_EXPR
13473 || TREE_CODE (arg1) == COMPLEX_CST))
13475 tree real0, imag0, real1, imag1;
13476 tree rcond, icond;
13478 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13480 real0 = TREE_OPERAND (arg0, 0);
13481 imag0 = TREE_OPERAND (arg0, 1);
13483 else
13485 real0 = TREE_REALPART (arg0);
13486 imag0 = TREE_IMAGPART (arg0);
13489 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13491 real1 = TREE_OPERAND (arg1, 0);
13492 imag1 = TREE_OPERAND (arg1, 1);
13494 else
13496 real1 = TREE_REALPART (arg1);
13497 imag1 = TREE_IMAGPART (arg1);
13500 rcond = fold_binary_loc (loc, code, type, real0, real1);
13501 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13503 if (integer_zerop (rcond))
13505 if (code == EQ_EXPR)
13506 return omit_two_operands_loc (loc, type, boolean_false_node,
13507 imag0, imag1);
13508 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13510 else
13512 if (code == NE_EXPR)
13513 return omit_two_operands_loc (loc, type, boolean_true_node,
13514 imag0, imag1);
13515 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13519 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13520 if (icond && TREE_CODE (icond) == INTEGER_CST)
13522 if (integer_zerop (icond))
13524 if (code == EQ_EXPR)
13525 return omit_two_operands_loc (loc, type, boolean_false_node,
13526 real0, real1);
13527 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13529 else
13531 if (code == NE_EXPR)
13532 return omit_two_operands_loc (loc, type, boolean_true_node,
13533 real0, real1);
13534 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13539 return NULL_TREE;
13541 case LT_EXPR:
13542 case GT_EXPR:
13543 case LE_EXPR:
13544 case GE_EXPR:
13545 tem = fold_comparison (loc, code, type, op0, op1);
13546 if (tem != NULL_TREE)
13547 return tem;
13549 /* Transform comparisons of the form X +- C CMP X. */
13550 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13551 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13552 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13553 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13554 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13555 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13557 tree arg01 = TREE_OPERAND (arg0, 1);
13558 enum tree_code code0 = TREE_CODE (arg0);
13559 int is_positive;
13561 if (TREE_CODE (arg01) == REAL_CST)
13562 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13563 else
13564 is_positive = tree_int_cst_sgn (arg01);
13566 /* (X - c) > X becomes false. */
13567 if (code == GT_EXPR
13568 && ((code0 == MINUS_EXPR && is_positive >= 0)
13569 || (code0 == PLUS_EXPR && is_positive <= 0)))
13571 if (TREE_CODE (arg01) == INTEGER_CST
13572 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13573 fold_overflow_warning (("assuming signed overflow does not "
13574 "occur when assuming that (X - c) > X "
13575 "is always false"),
13576 WARN_STRICT_OVERFLOW_ALL);
13577 return constant_boolean_node (0, type);
13580 /* Likewise (X + c) < X becomes false. */
13581 if (code == LT_EXPR
13582 && ((code0 == PLUS_EXPR && is_positive >= 0)
13583 || (code0 == MINUS_EXPR && is_positive <= 0)))
13585 if (TREE_CODE (arg01) == INTEGER_CST
13586 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13587 fold_overflow_warning (("assuming signed overflow does not "
13588 "occur when assuming that "
13589 "(X + c) < X is always false"),
13590 WARN_STRICT_OVERFLOW_ALL);
13591 return constant_boolean_node (0, type);
13594 /* Convert (X - c) <= X to true. */
13595 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13596 && code == LE_EXPR
13597 && ((code0 == MINUS_EXPR && is_positive >= 0)
13598 || (code0 == PLUS_EXPR && is_positive <= 0)))
13600 if (TREE_CODE (arg01) == INTEGER_CST
13601 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13602 fold_overflow_warning (("assuming signed overflow does not "
13603 "occur when assuming that "
13604 "(X - c) <= X is always true"),
13605 WARN_STRICT_OVERFLOW_ALL);
13606 return constant_boolean_node (1, type);
13609 /* Convert (X + c) >= X to true. */
13610 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13611 && code == GE_EXPR
13612 && ((code0 == PLUS_EXPR && is_positive >= 0)
13613 || (code0 == MINUS_EXPR && is_positive <= 0)))
13615 if (TREE_CODE (arg01) == INTEGER_CST
13616 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13617 fold_overflow_warning (("assuming signed overflow does not "
13618 "occur when assuming that "
13619 "(X + c) >= X is always true"),
13620 WARN_STRICT_OVERFLOW_ALL);
13621 return constant_boolean_node (1, type);
13624 if (TREE_CODE (arg01) == INTEGER_CST)
13626 /* Convert X + c > X and X - c < X to true for integers. */
13627 if (code == GT_EXPR
13628 && ((code0 == PLUS_EXPR && is_positive > 0)
13629 || (code0 == MINUS_EXPR && is_positive < 0)))
13631 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13632 fold_overflow_warning (("assuming signed overflow does "
13633 "not occur when assuming that "
13634 "(X + c) > X is always true"),
13635 WARN_STRICT_OVERFLOW_ALL);
13636 return constant_boolean_node (1, type);
13639 if (code == LT_EXPR
13640 && ((code0 == MINUS_EXPR && is_positive > 0)
13641 || (code0 == PLUS_EXPR && is_positive < 0)))
13643 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13644 fold_overflow_warning (("assuming signed overflow does "
13645 "not occur when assuming that "
13646 "(X - c) < X is always true"),
13647 WARN_STRICT_OVERFLOW_ALL);
13648 return constant_boolean_node (1, type);
13651 /* Convert X + c <= X and X - c >= X to false for integers. */
13652 if (code == LE_EXPR
13653 && ((code0 == PLUS_EXPR && is_positive > 0)
13654 || (code0 == MINUS_EXPR && is_positive < 0)))
13656 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13657 fold_overflow_warning (("assuming signed overflow does "
13658 "not occur when assuming that "
13659 "(X + c) <= X is always false"),
13660 WARN_STRICT_OVERFLOW_ALL);
13661 return constant_boolean_node (0, type);
13664 if (code == GE_EXPR
13665 && ((code0 == MINUS_EXPR && is_positive > 0)
13666 || (code0 == PLUS_EXPR && is_positive < 0)))
13668 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13669 fold_overflow_warning (("assuming signed overflow does "
13670 "not occur when assuming that "
13671 "(X - c) >= X is always false"),
13672 WARN_STRICT_OVERFLOW_ALL);
13673 return constant_boolean_node (0, type);
13678 /* Comparisons with the highest or lowest possible integer of
13679 the specified precision will have known values. */
13681 tree arg1_type = TREE_TYPE (arg1);
13682 unsigned int width = TYPE_PRECISION (arg1_type);
13684 if (TREE_CODE (arg1) == INTEGER_CST
13685 && width <= HOST_BITS_PER_DOUBLE_INT
13686 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13688 HOST_WIDE_INT signed_max_hi;
13689 unsigned HOST_WIDE_INT signed_max_lo;
13690 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13692 if (width <= HOST_BITS_PER_WIDE_INT)
13694 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13695 - 1;
13696 signed_max_hi = 0;
13697 max_hi = 0;
13699 if (TYPE_UNSIGNED (arg1_type))
13701 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13702 min_lo = 0;
13703 min_hi = 0;
13705 else
13707 max_lo = signed_max_lo;
13708 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13709 min_hi = -1;
13712 else
13714 width -= HOST_BITS_PER_WIDE_INT;
13715 signed_max_lo = -1;
13716 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13717 - 1;
13718 max_lo = -1;
13719 min_lo = 0;
13721 if (TYPE_UNSIGNED (arg1_type))
13723 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13724 min_hi = 0;
13726 else
13728 max_hi = signed_max_hi;
13729 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13733 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13734 && TREE_INT_CST_LOW (arg1) == max_lo)
13735 switch (code)
13737 case GT_EXPR:
13738 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13740 case GE_EXPR:
13741 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13743 case LE_EXPR:
13744 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13746 case LT_EXPR:
13747 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13749 /* The GE_EXPR and LT_EXPR cases above are not normally
13750 reached because of previous transformations. */
13752 default:
13753 break;
13755 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13756 == max_hi
13757 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13758 switch (code)
13760 case GT_EXPR:
13761 arg1 = const_binop (PLUS_EXPR, arg1,
13762 build_int_cst (TREE_TYPE (arg1), 1));
13763 return fold_build2_loc (loc, EQ_EXPR, type,
13764 fold_convert_loc (loc,
13765 TREE_TYPE (arg1), arg0),
13766 arg1);
13767 case LE_EXPR:
13768 arg1 = const_binop (PLUS_EXPR, arg1,
13769 build_int_cst (TREE_TYPE (arg1), 1));
13770 return fold_build2_loc (loc, NE_EXPR, type,
13771 fold_convert_loc (loc, TREE_TYPE (arg1),
13772 arg0),
13773 arg1);
13774 default:
13775 break;
13777 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13778 == min_hi
13779 && TREE_INT_CST_LOW (arg1) == min_lo)
13780 switch (code)
13782 case LT_EXPR:
13783 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13785 case LE_EXPR:
13786 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13788 case GE_EXPR:
13789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13791 case GT_EXPR:
13792 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13794 default:
13795 break;
13797 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13798 == min_hi
13799 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13800 switch (code)
13802 case GE_EXPR:
13803 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13804 return fold_build2_loc (loc, NE_EXPR, type,
13805 fold_convert_loc (loc,
13806 TREE_TYPE (arg1), arg0),
13807 arg1);
13808 case LT_EXPR:
13809 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13810 return fold_build2_loc (loc, EQ_EXPR, type,
13811 fold_convert_loc (loc, TREE_TYPE (arg1),
13812 arg0),
13813 arg1);
13814 default:
13815 break;
13818 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13819 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13820 && TYPE_UNSIGNED (arg1_type)
13821 /* We will flip the signedness of the comparison operator
13822 associated with the mode of arg1, so the sign bit is
13823 specified by this mode. Check that arg1 is the signed
13824 max associated with this sign bit. */
13825 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13826 /* signed_type does not work on pointer types. */
13827 && INTEGRAL_TYPE_P (arg1_type))
13829 /* The following case also applies to X < signed_max+1
13830 and X >= signed_max+1 because previous transformations. */
13831 if (code == LE_EXPR || code == GT_EXPR)
13833 tree st = signed_type_for (arg1_type);
13834 return fold_build2_loc (loc,
13835 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13836 type, fold_convert_loc (loc, st, arg0),
13837 build_int_cst (st, 0));
13843 /* If we are comparing an ABS_EXPR with a constant, we can
13844 convert all the cases into explicit comparisons, but they may
13845 well not be faster than doing the ABS and one comparison.
13846 But ABS (X) <= C is a range comparison, which becomes a subtraction
13847 and a comparison, and is probably faster. */
13848 if (code == LE_EXPR
13849 && TREE_CODE (arg1) == INTEGER_CST
13850 && TREE_CODE (arg0) == ABS_EXPR
13851 && ! TREE_SIDE_EFFECTS (arg0)
13852 && (0 != (tem = negate_expr (arg1)))
13853 && TREE_CODE (tem) == INTEGER_CST
13854 && !TREE_OVERFLOW (tem))
13855 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13856 build2 (GE_EXPR, type,
13857 TREE_OPERAND (arg0, 0), tem),
13858 build2 (LE_EXPR, type,
13859 TREE_OPERAND (arg0, 0), arg1));
13861 /* Convert ABS_EXPR<x> >= 0 to true. */
13862 strict_overflow_p = false;
13863 if (code == GE_EXPR
13864 && (integer_zerop (arg1)
13865 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13866 && real_zerop (arg1)))
13867 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13869 if (strict_overflow_p)
13870 fold_overflow_warning (("assuming signed overflow does not occur "
13871 "when simplifying comparison of "
13872 "absolute value and zero"),
13873 WARN_STRICT_OVERFLOW_CONDITIONAL);
13874 return omit_one_operand_loc (loc, type,
13875 constant_boolean_node (true, type),
13876 arg0);
13879 /* Convert ABS_EXPR<x> < 0 to false. */
13880 strict_overflow_p = false;
13881 if (code == LT_EXPR
13882 && (integer_zerop (arg1) || real_zerop (arg1))
13883 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13885 if (strict_overflow_p)
13886 fold_overflow_warning (("assuming signed overflow does not occur "
13887 "when simplifying comparison of "
13888 "absolute value and zero"),
13889 WARN_STRICT_OVERFLOW_CONDITIONAL);
13890 return omit_one_operand_loc (loc, type,
13891 constant_boolean_node (false, type),
13892 arg0);
13895 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13896 and similarly for >= into !=. */
13897 if ((code == LT_EXPR || code == GE_EXPR)
13898 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13899 && TREE_CODE (arg1) == LSHIFT_EXPR
13900 && integer_onep (TREE_OPERAND (arg1, 0)))
13901 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13902 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13903 TREE_OPERAND (arg1, 1)),
13904 build_zero_cst (TREE_TYPE (arg0)));
13906 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13907 otherwise Y might be >= # of bits in X's type and thus e.g.
13908 (unsigned char) (1 << Y) for Y 15 might be 0.
13909 If the cast is widening, then 1 << Y should have unsigned type,
13910 otherwise if Y is number of bits in the signed shift type minus 1,
13911 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13912 31 might be 0xffffffff80000000. */
13913 if ((code == LT_EXPR || code == GE_EXPR)
13914 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13915 && CONVERT_EXPR_P (arg1)
13916 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13917 && (TYPE_PRECISION (TREE_TYPE (arg1))
13918 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13919 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13920 || (TYPE_PRECISION (TREE_TYPE (arg1))
13921 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13922 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13924 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13925 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13926 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13927 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13928 build_zero_cst (TREE_TYPE (arg0)));
13931 return NULL_TREE;
13933 case UNORDERED_EXPR:
13934 case ORDERED_EXPR:
13935 case UNLT_EXPR:
13936 case UNLE_EXPR:
13937 case UNGT_EXPR:
13938 case UNGE_EXPR:
13939 case UNEQ_EXPR:
13940 case LTGT_EXPR:
13941 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13943 t1 = fold_relational_const (code, type, arg0, arg1);
13944 if (t1 != NULL_TREE)
13945 return t1;
13948 /* If the first operand is NaN, the result is constant. */
13949 if (TREE_CODE (arg0) == REAL_CST
13950 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13951 && (code != LTGT_EXPR || ! flag_trapping_math))
13953 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13954 ? integer_zero_node
13955 : integer_one_node;
13956 return omit_one_operand_loc (loc, type, t1, arg1);
13959 /* If the second operand is NaN, the result is constant. */
13960 if (TREE_CODE (arg1) == REAL_CST
13961 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13962 && (code != LTGT_EXPR || ! flag_trapping_math))
13964 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13965 ? integer_zero_node
13966 : integer_one_node;
13967 return omit_one_operand_loc (loc, type, t1, arg0);
13970 /* Simplify unordered comparison of something with itself. */
13971 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13972 && operand_equal_p (arg0, arg1, 0))
13973 return constant_boolean_node (1, type);
13975 if (code == LTGT_EXPR
13976 && !flag_trapping_math
13977 && operand_equal_p (arg0, arg1, 0))
13978 return constant_boolean_node (0, type);
13980 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13982 tree targ0 = strip_float_extensions (arg0);
13983 tree targ1 = strip_float_extensions (arg1);
13984 tree newtype = TREE_TYPE (targ0);
13986 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13987 newtype = TREE_TYPE (targ1);
13989 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13990 return fold_build2_loc (loc, code, type,
13991 fold_convert_loc (loc, newtype, targ0),
13992 fold_convert_loc (loc, newtype, targ1));
13995 return NULL_TREE;
13997 case COMPOUND_EXPR:
13998 /* When pedantic, a compound expression can be neither an lvalue
13999 nor an integer constant expression. */
14000 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14001 return NULL_TREE;
14002 /* Don't let (0, 0) be null pointer constant. */
14003 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14004 : fold_convert_loc (loc, type, arg1);
14005 return pedantic_non_lvalue_loc (loc, tem);
14007 case COMPLEX_EXPR:
14008 if ((TREE_CODE (arg0) == REAL_CST
14009 && TREE_CODE (arg1) == REAL_CST)
14010 || (TREE_CODE (arg0) == INTEGER_CST
14011 && TREE_CODE (arg1) == INTEGER_CST))
14012 return build_complex (type, arg0, arg1);
14013 if (TREE_CODE (arg0) == REALPART_EXPR
14014 && TREE_CODE (arg1) == IMAGPART_EXPR
14015 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14016 && operand_equal_p (TREE_OPERAND (arg0, 0),
14017 TREE_OPERAND (arg1, 0), 0))
14018 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14019 TREE_OPERAND (arg1, 0));
14020 return NULL_TREE;
14022 case ASSERT_EXPR:
14023 /* An ASSERT_EXPR should never be passed to fold_binary. */
14024 gcc_unreachable ();
14026 case VEC_PACK_TRUNC_EXPR:
14027 case VEC_PACK_FIX_TRUNC_EXPR:
14029 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14030 tree *elts;
14032 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14033 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14034 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14035 return NULL_TREE;
14037 elts = XALLOCAVEC (tree, nelts);
14038 if (!vec_cst_ctor_to_array (arg0, elts)
14039 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14040 return NULL_TREE;
14042 for (i = 0; i < nelts; i++)
14044 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14045 ? NOP_EXPR : FIX_TRUNC_EXPR,
14046 TREE_TYPE (type), elts[i]);
14047 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14048 return NULL_TREE;
14051 return build_vector (type, elts);
14054 case VEC_WIDEN_MULT_LO_EXPR:
14055 case VEC_WIDEN_MULT_HI_EXPR:
14056 case VEC_WIDEN_MULT_EVEN_EXPR:
14057 case VEC_WIDEN_MULT_ODD_EXPR:
14059 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14060 unsigned int out, ofs, scale;
14061 tree *elts;
14063 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14064 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14065 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14066 return NULL_TREE;
14068 elts = XALLOCAVEC (tree, nelts * 4);
14069 if (!vec_cst_ctor_to_array (arg0, elts)
14070 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14071 return NULL_TREE;
14073 if (code == VEC_WIDEN_MULT_LO_EXPR)
14074 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14075 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14076 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14077 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14078 scale = 1, ofs = 0;
14079 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14080 scale = 1, ofs = 1;
14082 for (out = 0; out < nelts; out++)
14084 unsigned int in1 = (out << scale) + ofs;
14085 unsigned int in2 = in1 + nelts * 2;
14086 tree t1, t2;
14088 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14089 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14091 if (t1 == NULL_TREE || t2 == NULL_TREE)
14092 return NULL_TREE;
14093 elts[out] = const_binop (MULT_EXPR, t1, t2);
14094 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14095 return NULL_TREE;
14098 return build_vector (type, elts);
14101 default:
14102 return NULL_TREE;
14103 } /* switch (code) */
14106 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14107 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14108 of GOTO_EXPR. */
14110 static tree
14111 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14113 switch (TREE_CODE (*tp))
14115 case LABEL_EXPR:
14116 return *tp;
14118 case GOTO_EXPR:
14119 *walk_subtrees = 0;
14121 /* ... fall through ... */
14123 default:
14124 return NULL_TREE;
14128 /* Return whether the sub-tree ST contains a label which is accessible from
14129 outside the sub-tree. */
14131 static bool
14132 contains_label_p (tree st)
14134 return
14135 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14138 /* Fold a ternary expression of code CODE and type TYPE with operands
14139 OP0, OP1, and OP2. Return the folded expression if folding is
14140 successful. Otherwise, return NULL_TREE. */
14142 tree
14143 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14144 tree op0, tree op1, tree op2)
14146 tree tem;
14147 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14148 enum tree_code_class kind = TREE_CODE_CLASS (code);
14150 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14151 && TREE_CODE_LENGTH (code) == 3);
14153 /* Strip any conversions that don't change the mode. This is safe
14154 for every expression, except for a comparison expression because
14155 its signedness is derived from its operands. So, in the latter
14156 case, only strip conversions that don't change the signedness.
14158 Note that this is done as an internal manipulation within the
14159 constant folder, in order to find the simplest representation of
14160 the arguments so that their form can be studied. In any cases,
14161 the appropriate type conversions should be put back in the tree
14162 that will get out of the constant folder. */
14163 if (op0)
14165 arg0 = op0;
14166 STRIP_NOPS (arg0);
14169 if (op1)
14171 arg1 = op1;
14172 STRIP_NOPS (arg1);
14175 if (op2)
14177 arg2 = op2;
14178 STRIP_NOPS (arg2);
14181 switch (code)
14183 case COMPONENT_REF:
14184 if (TREE_CODE (arg0) == CONSTRUCTOR
14185 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14187 unsigned HOST_WIDE_INT idx;
14188 tree field, value;
14189 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14190 if (field == arg1)
14191 return value;
14193 return NULL_TREE;
14195 case COND_EXPR:
14196 case VEC_COND_EXPR:
14197 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14198 so all simple results must be passed through pedantic_non_lvalue. */
14199 if (TREE_CODE (arg0) == INTEGER_CST)
14201 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14202 tem = integer_zerop (arg0) ? op2 : op1;
14203 /* Only optimize constant conditions when the selected branch
14204 has the same type as the COND_EXPR. This avoids optimizing
14205 away "c ? x : throw", where the throw has a void type.
14206 Avoid throwing away that operand which contains label. */
14207 if ((!TREE_SIDE_EFFECTS (unused_op)
14208 || !contains_label_p (unused_op))
14209 && (! VOID_TYPE_P (TREE_TYPE (tem))
14210 || VOID_TYPE_P (type)))
14211 return pedantic_non_lvalue_loc (loc, tem);
14212 return NULL_TREE;
14214 else if (TREE_CODE (arg0) == VECTOR_CST)
14216 if (integer_all_onesp (arg0))
14217 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14218 if (integer_zerop (arg0))
14219 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14221 if ((TREE_CODE (arg1) == VECTOR_CST
14222 || TREE_CODE (arg1) == CONSTRUCTOR)
14223 && (TREE_CODE (arg2) == VECTOR_CST
14224 || TREE_CODE (arg2) == CONSTRUCTOR))
14226 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14227 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14228 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14229 for (i = 0; i < nelts; i++)
14231 tree val = VECTOR_CST_ELT (arg0, i);
14232 if (integer_all_onesp (val))
14233 sel[i] = i;
14234 else if (integer_zerop (val))
14235 sel[i] = nelts + i;
14236 else /* Currently unreachable. */
14237 return NULL_TREE;
14239 tree t = fold_vec_perm (type, arg1, arg2, sel);
14240 if (t != NULL_TREE)
14241 return t;
14245 if (operand_equal_p (arg1, op2, 0))
14246 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14248 /* If we have A op B ? A : C, we may be able to convert this to a
14249 simpler expression, depending on the operation and the values
14250 of B and C. Signed zeros prevent all of these transformations,
14251 for reasons given above each one.
14253 Also try swapping the arguments and inverting the conditional. */
14254 if (COMPARISON_CLASS_P (arg0)
14255 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14256 arg1, TREE_OPERAND (arg0, 1))
14257 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14259 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14260 if (tem)
14261 return tem;
14264 if (COMPARISON_CLASS_P (arg0)
14265 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14266 op2,
14267 TREE_OPERAND (arg0, 1))
14268 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14270 location_t loc0 = expr_location_or (arg0, loc);
14271 tem = fold_invert_truthvalue (loc0, arg0);
14272 if (tem && COMPARISON_CLASS_P (tem))
14274 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14275 if (tem)
14276 return tem;
14280 /* If the second operand is simpler than the third, swap them
14281 since that produces better jump optimization results. */
14282 if (truth_value_p (TREE_CODE (arg0))
14283 && tree_swap_operands_p (op1, op2, false))
14285 location_t loc0 = expr_location_or (arg0, loc);
14286 /* See if this can be inverted. If it can't, possibly because
14287 it was a floating-point inequality comparison, don't do
14288 anything. */
14289 tem = fold_invert_truthvalue (loc0, arg0);
14290 if (tem)
14291 return fold_build3_loc (loc, code, type, tem, op2, op1);
14294 /* Convert A ? 1 : 0 to simply A. */
14295 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14296 : (integer_onep (op1)
14297 && !VECTOR_TYPE_P (type)))
14298 && integer_zerop (op2)
14299 /* If we try to convert OP0 to our type, the
14300 call to fold will try to move the conversion inside
14301 a COND, which will recurse. In that case, the COND_EXPR
14302 is probably the best choice, so leave it alone. */
14303 && type == TREE_TYPE (arg0))
14304 return pedantic_non_lvalue_loc (loc, arg0);
14306 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14307 over COND_EXPR in cases such as floating point comparisons. */
14308 if (integer_zerop (op1)
14309 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14310 : (integer_onep (op2)
14311 && !VECTOR_TYPE_P (type)))
14312 && truth_value_p (TREE_CODE (arg0)))
14313 return pedantic_non_lvalue_loc (loc,
14314 fold_convert_loc (loc, type,
14315 invert_truthvalue_loc (loc,
14316 arg0)));
14318 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14319 if (TREE_CODE (arg0) == LT_EXPR
14320 && integer_zerop (TREE_OPERAND (arg0, 1))
14321 && integer_zerop (op2)
14322 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14324 /* sign_bit_p looks through both zero and sign extensions,
14325 but for this optimization only sign extensions are
14326 usable. */
14327 tree tem2 = TREE_OPERAND (arg0, 0);
14328 while (tem != tem2)
14330 if (TREE_CODE (tem2) != NOP_EXPR
14331 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14333 tem = NULL_TREE;
14334 break;
14336 tem2 = TREE_OPERAND (tem2, 0);
14338 /* sign_bit_p only checks ARG1 bits within A's precision.
14339 If <sign bit of A> has wider type than A, bits outside
14340 of A's precision in <sign bit of A> need to be checked.
14341 If they are all 0, this optimization needs to be done
14342 in unsigned A's type, if they are all 1 in signed A's type,
14343 otherwise this can't be done. */
14344 if (tem
14345 && TYPE_PRECISION (TREE_TYPE (tem))
14346 < TYPE_PRECISION (TREE_TYPE (arg1))
14347 && TYPE_PRECISION (TREE_TYPE (tem))
14348 < TYPE_PRECISION (type))
14350 unsigned HOST_WIDE_INT mask_lo;
14351 HOST_WIDE_INT mask_hi;
14352 int inner_width, outer_width;
14353 tree tem_type;
14355 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14356 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14357 if (outer_width > TYPE_PRECISION (type))
14358 outer_width = TYPE_PRECISION (type);
14360 if (outer_width > HOST_BITS_PER_WIDE_INT)
14362 mask_hi = (HOST_WIDE_INT_M1U
14363 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14364 mask_lo = -1;
14366 else
14368 mask_hi = 0;
14369 mask_lo = (HOST_WIDE_INT_M1U
14370 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14372 if (inner_width > HOST_BITS_PER_WIDE_INT)
14374 mask_hi &= ~(HOST_WIDE_INT_M1U
14375 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14376 mask_lo = 0;
14378 else
14379 mask_lo &= ~(HOST_WIDE_INT_M1U
14380 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14382 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14383 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14385 tem_type = signed_type_for (TREE_TYPE (tem));
14386 tem = fold_convert_loc (loc, tem_type, tem);
14388 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14389 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14391 tem_type = unsigned_type_for (TREE_TYPE (tem));
14392 tem = fold_convert_loc (loc, tem_type, tem);
14394 else
14395 tem = NULL;
14398 if (tem)
14399 return
14400 fold_convert_loc (loc, type,
14401 fold_build2_loc (loc, BIT_AND_EXPR,
14402 TREE_TYPE (tem), tem,
14403 fold_convert_loc (loc,
14404 TREE_TYPE (tem),
14405 arg1)));
14408 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14409 already handled above. */
14410 if (TREE_CODE (arg0) == BIT_AND_EXPR
14411 && integer_onep (TREE_OPERAND (arg0, 1))
14412 && integer_zerop (op2)
14413 && integer_pow2p (arg1))
14415 tree tem = TREE_OPERAND (arg0, 0);
14416 STRIP_NOPS (tem);
14417 if (TREE_CODE (tem) == RSHIFT_EXPR
14418 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14419 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14420 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14421 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14422 TREE_OPERAND (tem, 0), arg1);
14425 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14426 is probably obsolete because the first operand should be a
14427 truth value (that's why we have the two cases above), but let's
14428 leave it in until we can confirm this for all front-ends. */
14429 if (integer_zerop (op2)
14430 && TREE_CODE (arg0) == NE_EXPR
14431 && integer_zerop (TREE_OPERAND (arg0, 1))
14432 && integer_pow2p (arg1)
14433 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14434 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14435 arg1, OEP_ONLY_CONST))
14436 return pedantic_non_lvalue_loc (loc,
14437 fold_convert_loc (loc, type,
14438 TREE_OPERAND (arg0, 0)));
14440 /* Disable the transformations below for vectors, since
14441 fold_binary_op_with_conditional_arg may undo them immediately,
14442 yielding an infinite loop. */
14443 if (code == VEC_COND_EXPR)
14444 return NULL_TREE;
14446 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14447 if (integer_zerop (op2)
14448 && truth_value_p (TREE_CODE (arg0))
14449 && truth_value_p (TREE_CODE (arg1))
14450 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14451 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14452 : TRUTH_ANDIF_EXPR,
14453 type, fold_convert_loc (loc, type, arg0), arg1);
14455 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14456 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14457 && truth_value_p (TREE_CODE (arg0))
14458 && truth_value_p (TREE_CODE (arg1))
14459 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14461 location_t loc0 = expr_location_or (arg0, loc);
14462 /* Only perform transformation if ARG0 is easily inverted. */
14463 tem = fold_invert_truthvalue (loc0, arg0);
14464 if (tem)
14465 return fold_build2_loc (loc, code == VEC_COND_EXPR
14466 ? BIT_IOR_EXPR
14467 : TRUTH_ORIF_EXPR,
14468 type, fold_convert_loc (loc, type, tem),
14469 arg1);
14472 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14473 if (integer_zerop (arg1)
14474 && truth_value_p (TREE_CODE (arg0))
14475 && truth_value_p (TREE_CODE (op2))
14476 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14478 location_t loc0 = expr_location_or (arg0, loc);
14479 /* Only perform transformation if ARG0 is easily inverted. */
14480 tem = fold_invert_truthvalue (loc0, arg0);
14481 if (tem)
14482 return fold_build2_loc (loc, code == VEC_COND_EXPR
14483 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14484 type, fold_convert_loc (loc, type, tem),
14485 op2);
14488 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14489 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14490 && truth_value_p (TREE_CODE (arg0))
14491 && truth_value_p (TREE_CODE (op2))
14492 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14493 return fold_build2_loc (loc, code == VEC_COND_EXPR
14494 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14495 type, fold_convert_loc (loc, type, arg0), op2);
14497 return NULL_TREE;
14499 case CALL_EXPR:
14500 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14501 of fold_ternary on them. */
14502 gcc_unreachable ();
14504 case BIT_FIELD_REF:
14505 if ((TREE_CODE (arg0) == VECTOR_CST
14506 || (TREE_CODE (arg0) == CONSTRUCTOR
14507 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14508 && (type == TREE_TYPE (TREE_TYPE (arg0))
14509 || (TREE_CODE (type) == VECTOR_TYPE
14510 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14512 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14513 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14514 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14515 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14517 if (n != 0
14518 && (idx % width) == 0
14519 && (n % width) == 0
14520 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14522 idx = idx / width;
14523 n = n / width;
14525 if (TREE_CODE (arg0) == VECTOR_CST)
14527 if (n == 1)
14528 return VECTOR_CST_ELT (arg0, idx);
14530 tree *vals = XALLOCAVEC (tree, n);
14531 for (unsigned i = 0; i < n; ++i)
14532 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14533 return build_vector (type, vals);
14536 /* Constructor elements can be subvectors. */
14537 unsigned HOST_WIDE_INT k = 1;
14538 if (CONSTRUCTOR_NELTS (arg0) != 0)
14540 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14541 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14542 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14545 /* We keep an exact subset of the constructor elements. */
14546 if ((idx % k) == 0 && (n % k) == 0)
14548 if (CONSTRUCTOR_NELTS (arg0) == 0)
14549 return build_constructor (type, NULL);
14550 idx /= k;
14551 n /= k;
14552 if (n == 1)
14554 if (idx < CONSTRUCTOR_NELTS (arg0))
14555 return CONSTRUCTOR_ELT (arg0, idx)->value;
14556 return build_zero_cst (type);
14559 vec<constructor_elt, va_gc> *vals;
14560 vec_alloc (vals, n);
14561 for (unsigned i = 0;
14562 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14563 ++i)
14564 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14565 CONSTRUCTOR_ELT
14566 (arg0, idx + i)->value);
14567 return build_constructor (type, vals);
14569 /* The bitfield references a single constructor element. */
14570 else if (idx + n <= (idx / k + 1) * k)
14572 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14573 return build_zero_cst (type);
14574 else if (n == k)
14575 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14576 else
14577 return fold_build3_loc (loc, code, type,
14578 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14579 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14584 /* A bit-field-ref that referenced the full argument can be stripped. */
14585 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14586 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14587 && integer_zerop (op2))
14588 return fold_convert_loc (loc, type, arg0);
14590 /* On constants we can use native encode/interpret to constant
14591 fold (nearly) all BIT_FIELD_REFs. */
14592 if (CONSTANT_CLASS_P (arg0)
14593 && can_native_interpret_type_p (type)
14594 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14595 /* This limitation should not be necessary, we just need to
14596 round this up to mode size. */
14597 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14598 /* Need bit-shifting of the buffer to relax the following. */
14599 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14601 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14602 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14603 unsigned HOST_WIDE_INT clen;
14604 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14605 /* ??? We cannot tell native_encode_expr to start at
14606 some random byte only. So limit us to a reasonable amount
14607 of work. */
14608 if (clen <= 4096)
14610 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14611 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14612 if (len > 0
14613 && len * BITS_PER_UNIT >= bitpos + bitsize)
14615 tree v = native_interpret_expr (type,
14616 b + bitpos / BITS_PER_UNIT,
14617 bitsize / BITS_PER_UNIT);
14618 if (v)
14619 return v;
14624 return NULL_TREE;
14626 case FMA_EXPR:
14627 /* For integers we can decompose the FMA if possible. */
14628 if (TREE_CODE (arg0) == INTEGER_CST
14629 && TREE_CODE (arg1) == INTEGER_CST)
14630 return fold_build2_loc (loc, PLUS_EXPR, type,
14631 const_binop (MULT_EXPR, arg0, arg1), arg2);
14632 if (integer_zerop (arg2))
14633 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14635 return fold_fma (loc, type, arg0, arg1, arg2);
14637 case VEC_PERM_EXPR:
14638 if (TREE_CODE (arg2) == VECTOR_CST)
14640 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14641 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14642 tree t;
14643 bool need_mask_canon = false;
14644 bool all_in_vec0 = true;
14645 bool all_in_vec1 = true;
14646 bool maybe_identity = true;
14647 bool single_arg = (op0 == op1);
14648 bool changed = false;
14650 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14651 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14652 for (i = 0; i < nelts; i++)
14654 tree val = VECTOR_CST_ELT (arg2, i);
14655 if (TREE_CODE (val) != INTEGER_CST)
14656 return NULL_TREE;
14658 sel[i] = TREE_INT_CST_LOW (val) & mask;
14659 if (TREE_INT_CST_HIGH (val)
14660 || ((unsigned HOST_WIDE_INT)
14661 TREE_INT_CST_LOW (val) != sel[i]))
14662 need_mask_canon = true;
14664 if (sel[i] < nelts)
14665 all_in_vec1 = false;
14666 else
14667 all_in_vec0 = false;
14669 if ((sel[i] & (nelts-1)) != i)
14670 maybe_identity = false;
14673 if (maybe_identity)
14675 if (all_in_vec0)
14676 return op0;
14677 if (all_in_vec1)
14678 return op1;
14681 if (all_in_vec0)
14682 op1 = op0;
14683 else if (all_in_vec1)
14685 op0 = op1;
14686 for (i = 0; i < nelts; i++)
14687 sel[i] -= nelts;
14688 need_mask_canon = true;
14691 if ((TREE_CODE (op0) == VECTOR_CST
14692 || TREE_CODE (op0) == CONSTRUCTOR)
14693 && (TREE_CODE (op1) == VECTOR_CST
14694 || TREE_CODE (op1) == CONSTRUCTOR))
14696 t = fold_vec_perm (type, op0, op1, sel);
14697 if (t != NULL_TREE)
14698 return t;
14701 if (op0 == op1 && !single_arg)
14702 changed = true;
14704 if (need_mask_canon && arg2 == op2)
14706 tree *tsel = XALLOCAVEC (tree, nelts);
14707 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14708 for (i = 0; i < nelts; i++)
14709 tsel[i] = build_int_cst (eltype, sel[i]);
14710 op2 = build_vector (TREE_TYPE (arg2), tsel);
14711 changed = true;
14714 if (changed)
14715 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14717 return NULL_TREE;
14719 default:
14720 return NULL_TREE;
14721 } /* switch (code) */
14724 /* Perform constant folding and related simplification of EXPR.
14725 The related simplifications include x*1 => x, x*0 => 0, etc.,
14726 and application of the associative law.
14727 NOP_EXPR conversions may be removed freely (as long as we
14728 are careful not to change the type of the overall expression).
14729 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14730 but we can constant-fold them if they have constant operands. */
14732 #ifdef ENABLE_FOLD_CHECKING
14733 # define fold(x) fold_1 (x)
14734 static tree fold_1 (tree);
14735 static
14736 #endif
14737 tree
14738 fold (tree expr)
14740 const tree t = expr;
14741 enum tree_code code = TREE_CODE (t);
14742 enum tree_code_class kind = TREE_CODE_CLASS (code);
14743 tree tem;
14744 location_t loc = EXPR_LOCATION (expr);
14746 /* Return right away if a constant. */
14747 if (kind == tcc_constant)
14748 return t;
14750 /* CALL_EXPR-like objects with variable numbers of operands are
14751 treated specially. */
14752 if (kind == tcc_vl_exp)
14754 if (code == CALL_EXPR)
14756 tem = fold_call_expr (loc, expr, false);
14757 return tem ? tem : expr;
14759 return expr;
14762 if (IS_EXPR_CODE_CLASS (kind))
14764 tree type = TREE_TYPE (t);
14765 tree op0, op1, op2;
14767 switch (TREE_CODE_LENGTH (code))
14769 case 1:
14770 op0 = TREE_OPERAND (t, 0);
14771 tem = fold_unary_loc (loc, code, type, op0);
14772 return tem ? tem : expr;
14773 case 2:
14774 op0 = TREE_OPERAND (t, 0);
14775 op1 = TREE_OPERAND (t, 1);
14776 tem = fold_binary_loc (loc, code, type, op0, op1);
14777 return tem ? tem : expr;
14778 case 3:
14779 op0 = TREE_OPERAND (t, 0);
14780 op1 = TREE_OPERAND (t, 1);
14781 op2 = TREE_OPERAND (t, 2);
14782 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14783 return tem ? tem : expr;
14784 default:
14785 break;
14789 switch (code)
14791 case ARRAY_REF:
14793 tree op0 = TREE_OPERAND (t, 0);
14794 tree op1 = TREE_OPERAND (t, 1);
14796 if (TREE_CODE (op1) == INTEGER_CST
14797 && TREE_CODE (op0) == CONSTRUCTOR
14798 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14800 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14801 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14802 unsigned HOST_WIDE_INT begin = 0;
14804 /* Find a matching index by means of a binary search. */
14805 while (begin != end)
14807 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14808 tree index = (*elts)[middle].index;
14810 if (TREE_CODE (index) == INTEGER_CST
14811 && tree_int_cst_lt (index, op1))
14812 begin = middle + 1;
14813 else if (TREE_CODE (index) == INTEGER_CST
14814 && tree_int_cst_lt (op1, index))
14815 end = middle;
14816 else if (TREE_CODE (index) == RANGE_EXPR
14817 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14818 begin = middle + 1;
14819 else if (TREE_CODE (index) == RANGE_EXPR
14820 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14821 end = middle;
14822 else
14823 return (*elts)[middle].value;
14827 return t;
14830 /* Return a VECTOR_CST if possible. */
14831 case CONSTRUCTOR:
14833 tree type = TREE_TYPE (t);
14834 if (TREE_CODE (type) != VECTOR_TYPE)
14835 return t;
14837 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14838 unsigned HOST_WIDE_INT idx, pos = 0;
14839 tree value;
14841 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14843 if (!CONSTANT_CLASS_P (value))
14844 return t;
14845 if (TREE_CODE (value) == VECTOR_CST)
14847 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14848 vec[pos++] = VECTOR_CST_ELT (value, i);
14850 else
14851 vec[pos++] = value;
14853 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14854 vec[pos] = build_zero_cst (TREE_TYPE (type));
14856 return build_vector (type, vec);
14859 case CONST_DECL:
14860 return fold (DECL_INITIAL (t));
14862 default:
14863 return t;
14864 } /* switch (code) */
14867 #ifdef ENABLE_FOLD_CHECKING
14868 #undef fold
14870 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14871 hash_table <pointer_hash <tree_node> >);
14872 static void fold_check_failed (const_tree, const_tree);
14873 void print_fold_checksum (const_tree);
14875 /* When --enable-checking=fold, compute a digest of expr before
14876 and after actual fold call to see if fold did not accidentally
14877 change original expr. */
14879 tree
14880 fold (tree expr)
14882 tree ret;
14883 struct md5_ctx ctx;
14884 unsigned char checksum_before[16], checksum_after[16];
14885 hash_table <pointer_hash <tree_node> > ht;
14887 ht.create (32);
14888 md5_init_ctx (&ctx);
14889 fold_checksum_tree (expr, &ctx, ht);
14890 md5_finish_ctx (&ctx, checksum_before);
14891 ht.empty ();
14893 ret = fold_1 (expr);
14895 md5_init_ctx (&ctx);
14896 fold_checksum_tree (expr, &ctx, ht);
14897 md5_finish_ctx (&ctx, checksum_after);
14898 ht.dispose ();
14900 if (memcmp (checksum_before, checksum_after, 16))
14901 fold_check_failed (expr, ret);
14903 return ret;
14906 void
14907 print_fold_checksum (const_tree expr)
14909 struct md5_ctx ctx;
14910 unsigned char checksum[16], cnt;
14911 hash_table <pointer_hash <tree_node> > ht;
14913 ht.create (32);
14914 md5_init_ctx (&ctx);
14915 fold_checksum_tree (expr, &ctx, ht);
14916 md5_finish_ctx (&ctx, checksum);
14917 ht.dispose ();
14918 for (cnt = 0; cnt < 16; ++cnt)
14919 fprintf (stderr, "%02x", checksum[cnt]);
14920 putc ('\n', stderr);
14923 static void
14924 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14926 internal_error ("fold check: original tree changed by fold");
14929 static void
14930 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14931 hash_table <pointer_hash <tree_node> > ht)
14933 tree_node **slot;
14934 enum tree_code code;
14935 union tree_node buf;
14936 int i, len;
14938 recursive_label:
14939 if (expr == NULL)
14940 return;
14941 slot = ht.find_slot (expr, INSERT);
14942 if (*slot != NULL)
14943 return;
14944 *slot = CONST_CAST_TREE (expr);
14945 code = TREE_CODE (expr);
14946 if (TREE_CODE_CLASS (code) == tcc_declaration
14947 && DECL_ASSEMBLER_NAME_SET_P (expr))
14949 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14950 memcpy ((char *) &buf, expr, tree_size (expr));
14951 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14952 expr = (tree) &buf;
14954 else if (TREE_CODE_CLASS (code) == tcc_type
14955 && (TYPE_POINTER_TO (expr)
14956 || TYPE_REFERENCE_TO (expr)
14957 || TYPE_CACHED_VALUES_P (expr)
14958 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14959 || TYPE_NEXT_VARIANT (expr)))
14961 /* Allow these fields to be modified. */
14962 tree tmp;
14963 memcpy ((char *) &buf, expr, tree_size (expr));
14964 expr = tmp = (tree) &buf;
14965 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14966 TYPE_POINTER_TO (tmp) = NULL;
14967 TYPE_REFERENCE_TO (tmp) = NULL;
14968 TYPE_NEXT_VARIANT (tmp) = NULL;
14969 if (TYPE_CACHED_VALUES_P (tmp))
14971 TYPE_CACHED_VALUES_P (tmp) = 0;
14972 TYPE_CACHED_VALUES (tmp) = NULL;
14975 md5_process_bytes (expr, tree_size (expr), ctx);
14976 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14977 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14978 if (TREE_CODE_CLASS (code) != tcc_type
14979 && TREE_CODE_CLASS (code) != tcc_declaration
14980 && code != TREE_LIST
14981 && code != SSA_NAME
14982 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14983 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14984 switch (TREE_CODE_CLASS (code))
14986 case tcc_constant:
14987 switch (code)
14989 case STRING_CST:
14990 md5_process_bytes (TREE_STRING_POINTER (expr),
14991 TREE_STRING_LENGTH (expr), ctx);
14992 break;
14993 case COMPLEX_CST:
14994 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14995 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14996 break;
14997 case VECTOR_CST:
14998 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14999 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15000 break;
15001 default:
15002 break;
15004 break;
15005 case tcc_exceptional:
15006 switch (code)
15008 case TREE_LIST:
15009 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15010 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15011 expr = TREE_CHAIN (expr);
15012 goto recursive_label;
15013 break;
15014 case TREE_VEC:
15015 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15016 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15017 break;
15018 default:
15019 break;
15021 break;
15022 case tcc_expression:
15023 case tcc_reference:
15024 case tcc_comparison:
15025 case tcc_unary:
15026 case tcc_binary:
15027 case tcc_statement:
15028 case tcc_vl_exp:
15029 len = TREE_OPERAND_LENGTH (expr);
15030 for (i = 0; i < len; ++i)
15031 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15032 break;
15033 case tcc_declaration:
15034 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15035 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15036 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15038 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15039 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15040 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15041 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15042 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15044 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15045 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15047 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15049 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15050 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15051 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15053 break;
15054 case tcc_type:
15055 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15056 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15057 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15058 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15059 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15060 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15061 if (INTEGRAL_TYPE_P (expr)
15062 || SCALAR_FLOAT_TYPE_P (expr))
15064 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15065 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15067 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15068 if (TREE_CODE (expr) == RECORD_TYPE
15069 || TREE_CODE (expr) == UNION_TYPE
15070 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15071 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15072 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15073 break;
15074 default:
15075 break;
15079 /* Helper function for outputting the checksum of a tree T. When
15080 debugging with gdb, you can "define mynext" to be "next" followed
15081 by "call debug_fold_checksum (op0)", then just trace down till the
15082 outputs differ. */
15084 DEBUG_FUNCTION void
15085 debug_fold_checksum (const_tree t)
15087 int i;
15088 unsigned char checksum[16];
15089 struct md5_ctx ctx;
15090 hash_table <pointer_hash <tree_node> > ht;
15091 ht.create (32);
15093 md5_init_ctx (&ctx);
15094 fold_checksum_tree (t, &ctx, ht);
15095 md5_finish_ctx (&ctx, checksum);
15096 ht.empty ();
15098 for (i = 0; i < 16; i++)
15099 fprintf (stderr, "%d ", checksum[i]);
15101 fprintf (stderr, "\n");
15104 #endif
15106 /* Fold a unary tree expression with code CODE of type TYPE with an
15107 operand OP0. LOC is the location of the resulting expression.
15108 Return a folded expression if successful. Otherwise, return a tree
15109 expression with code CODE of type TYPE with an operand OP0. */
15111 tree
15112 fold_build1_stat_loc (location_t loc,
15113 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15115 tree tem;
15116 #ifdef ENABLE_FOLD_CHECKING
15117 unsigned char checksum_before[16], checksum_after[16];
15118 struct md5_ctx ctx;
15119 hash_table <pointer_hash <tree_node> > ht;
15121 ht.create (32);
15122 md5_init_ctx (&ctx);
15123 fold_checksum_tree (op0, &ctx, ht);
15124 md5_finish_ctx (&ctx, checksum_before);
15125 ht.empty ();
15126 #endif
15128 tem = fold_unary_loc (loc, code, type, op0);
15129 if (!tem)
15130 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15132 #ifdef ENABLE_FOLD_CHECKING
15133 md5_init_ctx (&ctx);
15134 fold_checksum_tree (op0, &ctx, ht);
15135 md5_finish_ctx (&ctx, checksum_after);
15136 ht.dispose ();
15138 if (memcmp (checksum_before, checksum_after, 16))
15139 fold_check_failed (op0, tem);
15140 #endif
15141 return tem;
15144 /* Fold a binary tree expression with code CODE of type TYPE with
15145 operands OP0 and OP1. LOC is the location of the resulting
15146 expression. Return a folded expression if successful. Otherwise,
15147 return a tree expression with code CODE of type TYPE with operands
15148 OP0 and OP1. */
15150 tree
15151 fold_build2_stat_loc (location_t loc,
15152 enum tree_code code, tree type, tree op0, tree op1
15153 MEM_STAT_DECL)
15155 tree tem;
15156 #ifdef ENABLE_FOLD_CHECKING
15157 unsigned char checksum_before_op0[16],
15158 checksum_before_op1[16],
15159 checksum_after_op0[16],
15160 checksum_after_op1[16];
15161 struct md5_ctx ctx;
15162 hash_table <pointer_hash <tree_node> > ht;
15164 ht.create (32);
15165 md5_init_ctx (&ctx);
15166 fold_checksum_tree (op0, &ctx, ht);
15167 md5_finish_ctx (&ctx, checksum_before_op0);
15168 ht.empty ();
15170 md5_init_ctx (&ctx);
15171 fold_checksum_tree (op1, &ctx, ht);
15172 md5_finish_ctx (&ctx, checksum_before_op1);
15173 ht.empty ();
15174 #endif
15176 tem = fold_binary_loc (loc, code, type, op0, op1);
15177 if (!tem)
15178 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15180 #ifdef ENABLE_FOLD_CHECKING
15181 md5_init_ctx (&ctx);
15182 fold_checksum_tree (op0, &ctx, ht);
15183 md5_finish_ctx (&ctx, checksum_after_op0);
15184 ht.empty ();
15186 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15187 fold_check_failed (op0, tem);
15189 md5_init_ctx (&ctx);
15190 fold_checksum_tree (op1, &ctx, ht);
15191 md5_finish_ctx (&ctx, checksum_after_op1);
15192 ht.dispose ();
15194 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15195 fold_check_failed (op1, tem);
15196 #endif
15197 return tem;
15200 /* Fold a ternary tree expression with code CODE of type TYPE with
15201 operands OP0, OP1, and OP2. Return a folded expression if
15202 successful. Otherwise, return a tree expression with code CODE of
15203 type TYPE with operands OP0, OP1, and OP2. */
15205 tree
15206 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15207 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15209 tree tem;
15210 #ifdef ENABLE_FOLD_CHECKING
15211 unsigned char checksum_before_op0[16],
15212 checksum_before_op1[16],
15213 checksum_before_op2[16],
15214 checksum_after_op0[16],
15215 checksum_after_op1[16],
15216 checksum_after_op2[16];
15217 struct md5_ctx ctx;
15218 hash_table <pointer_hash <tree_node> > ht;
15220 ht.create (32);
15221 md5_init_ctx (&ctx);
15222 fold_checksum_tree (op0, &ctx, ht);
15223 md5_finish_ctx (&ctx, checksum_before_op0);
15224 ht.empty ();
15226 md5_init_ctx (&ctx);
15227 fold_checksum_tree (op1, &ctx, ht);
15228 md5_finish_ctx (&ctx, checksum_before_op1);
15229 ht.empty ();
15231 md5_init_ctx (&ctx);
15232 fold_checksum_tree (op2, &ctx, ht);
15233 md5_finish_ctx (&ctx, checksum_before_op2);
15234 ht.empty ();
15235 #endif
15237 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15238 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15239 if (!tem)
15240 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15242 #ifdef ENABLE_FOLD_CHECKING
15243 md5_init_ctx (&ctx);
15244 fold_checksum_tree (op0, &ctx, ht);
15245 md5_finish_ctx (&ctx, checksum_after_op0);
15246 ht.empty ();
15248 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15249 fold_check_failed (op0, tem);
15251 md5_init_ctx (&ctx);
15252 fold_checksum_tree (op1, &ctx, ht);
15253 md5_finish_ctx (&ctx, checksum_after_op1);
15254 ht.empty ();
15256 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15257 fold_check_failed (op1, tem);
15259 md5_init_ctx (&ctx);
15260 fold_checksum_tree (op2, &ctx, ht);
15261 md5_finish_ctx (&ctx, checksum_after_op2);
15262 ht.dispose ();
15264 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15265 fold_check_failed (op2, tem);
15266 #endif
15267 return tem;
15270 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15271 arguments in ARGARRAY, and a null static chain.
15272 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15273 of type TYPE from the given operands as constructed by build_call_array. */
15275 tree
15276 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15277 int nargs, tree *argarray)
15279 tree tem;
15280 #ifdef ENABLE_FOLD_CHECKING
15281 unsigned char checksum_before_fn[16],
15282 checksum_before_arglist[16],
15283 checksum_after_fn[16],
15284 checksum_after_arglist[16];
15285 struct md5_ctx ctx;
15286 hash_table <pointer_hash <tree_node> > ht;
15287 int i;
15289 ht.create (32);
15290 md5_init_ctx (&ctx);
15291 fold_checksum_tree (fn, &ctx, ht);
15292 md5_finish_ctx (&ctx, checksum_before_fn);
15293 ht.empty ();
15295 md5_init_ctx (&ctx);
15296 for (i = 0; i < nargs; i++)
15297 fold_checksum_tree (argarray[i], &ctx, ht);
15298 md5_finish_ctx (&ctx, checksum_before_arglist);
15299 ht.empty ();
15300 #endif
15302 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15304 #ifdef ENABLE_FOLD_CHECKING
15305 md5_init_ctx (&ctx);
15306 fold_checksum_tree (fn, &ctx, ht);
15307 md5_finish_ctx (&ctx, checksum_after_fn);
15308 ht.empty ();
15310 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15311 fold_check_failed (fn, tem);
15313 md5_init_ctx (&ctx);
15314 for (i = 0; i < nargs; i++)
15315 fold_checksum_tree (argarray[i], &ctx, ht);
15316 md5_finish_ctx (&ctx, checksum_after_arglist);
15317 ht.dispose ();
15319 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15320 fold_check_failed (NULL_TREE, tem);
15321 #endif
15322 return tem;
15325 /* Perform constant folding and related simplification of initializer
15326 expression EXPR. These behave identically to "fold_buildN" but ignore
15327 potential run-time traps and exceptions that fold must preserve. */
15329 #define START_FOLD_INIT \
15330 int saved_signaling_nans = flag_signaling_nans;\
15331 int saved_trapping_math = flag_trapping_math;\
15332 int saved_rounding_math = flag_rounding_math;\
15333 int saved_trapv = flag_trapv;\
15334 int saved_folding_initializer = folding_initializer;\
15335 flag_signaling_nans = 0;\
15336 flag_trapping_math = 0;\
15337 flag_rounding_math = 0;\
15338 flag_trapv = 0;\
15339 folding_initializer = 1;
15341 #define END_FOLD_INIT \
15342 flag_signaling_nans = saved_signaling_nans;\
15343 flag_trapping_math = saved_trapping_math;\
15344 flag_rounding_math = saved_rounding_math;\
15345 flag_trapv = saved_trapv;\
15346 folding_initializer = saved_folding_initializer;
15348 tree
15349 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15350 tree type, tree op)
15352 tree result;
15353 START_FOLD_INIT;
15355 result = fold_build1_loc (loc, code, type, op);
15357 END_FOLD_INIT;
15358 return result;
15361 tree
15362 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15363 tree type, tree op0, tree op1)
15365 tree result;
15366 START_FOLD_INIT;
15368 result = fold_build2_loc (loc, code, type, op0, op1);
15370 END_FOLD_INIT;
15371 return result;
15374 tree
15375 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15376 int nargs, tree *argarray)
15378 tree result;
15379 START_FOLD_INIT;
15381 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15383 END_FOLD_INIT;
15384 return result;
15387 #undef START_FOLD_INIT
15388 #undef END_FOLD_INIT
15390 /* Determine if first argument is a multiple of second argument. Return 0 if
15391 it is not, or we cannot easily determined it to be.
15393 An example of the sort of thing we care about (at this point; this routine
15394 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15395 fold cases do now) is discovering that
15397 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15399 is a multiple of
15401 SAVE_EXPR (J * 8)
15403 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15405 This code also handles discovering that
15407 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15409 is a multiple of 8 so we don't have to worry about dealing with a
15410 possible remainder.
15412 Note that we *look* inside a SAVE_EXPR only to determine how it was
15413 calculated; it is not safe for fold to do much of anything else with the
15414 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15415 at run time. For example, the latter example above *cannot* be implemented
15416 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15417 evaluation time of the original SAVE_EXPR is not necessarily the same at
15418 the time the new expression is evaluated. The only optimization of this
15419 sort that would be valid is changing
15421 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15423 divided by 8 to
15425 SAVE_EXPR (I) * SAVE_EXPR (J)
15427 (where the same SAVE_EXPR (J) is used in the original and the
15428 transformed version). */
15431 multiple_of_p (tree type, const_tree top, const_tree bottom)
15433 if (operand_equal_p (top, bottom, 0))
15434 return 1;
15436 if (TREE_CODE (type) != INTEGER_TYPE)
15437 return 0;
15439 switch (TREE_CODE (top))
15441 case BIT_AND_EXPR:
15442 /* Bitwise and provides a power of two multiple. If the mask is
15443 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15444 if (!integer_pow2p (bottom))
15445 return 0;
15446 /* FALLTHRU */
15448 case MULT_EXPR:
15449 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15450 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15452 case PLUS_EXPR:
15453 case MINUS_EXPR:
15454 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15455 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15457 case LSHIFT_EXPR:
15458 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15460 tree op1, t1;
15462 op1 = TREE_OPERAND (top, 1);
15463 /* const_binop may not detect overflow correctly,
15464 so check for it explicitly here. */
15465 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15466 > TREE_INT_CST_LOW (op1)
15467 && TREE_INT_CST_HIGH (op1) == 0
15468 && 0 != (t1 = fold_convert (type,
15469 const_binop (LSHIFT_EXPR,
15470 size_one_node,
15471 op1)))
15472 && !TREE_OVERFLOW (t1))
15473 return multiple_of_p (type, t1, bottom);
15475 return 0;
15477 case NOP_EXPR:
15478 /* Can't handle conversions from non-integral or wider integral type. */
15479 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15480 || (TYPE_PRECISION (type)
15481 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15482 return 0;
15484 /* .. fall through ... */
15486 case SAVE_EXPR:
15487 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15489 case COND_EXPR:
15490 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15491 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15493 case INTEGER_CST:
15494 if (TREE_CODE (bottom) != INTEGER_CST
15495 || integer_zerop (bottom)
15496 || (TYPE_UNSIGNED (type)
15497 && (tree_int_cst_sgn (top) < 0
15498 || tree_int_cst_sgn (bottom) < 0)))
15499 return 0;
15500 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15501 top, bottom));
15503 default:
15504 return 0;
15508 /* Return true if CODE or TYPE is known to be non-negative. */
15510 static bool
15511 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15513 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15514 && truth_value_p (code))
15515 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15516 have a signed:1 type (where the value is -1 and 0). */
15517 return true;
15518 return false;
15521 /* Return true if (CODE OP0) is known to be non-negative. If the return
15522 value is based on the assumption that signed overflow is undefined,
15523 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15524 *STRICT_OVERFLOW_P. */
15526 bool
15527 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15528 bool *strict_overflow_p)
15530 if (TYPE_UNSIGNED (type))
15531 return true;
15533 switch (code)
15535 case ABS_EXPR:
15536 /* We can't return 1 if flag_wrapv is set because
15537 ABS_EXPR<INT_MIN> = INT_MIN. */
15538 if (!INTEGRAL_TYPE_P (type))
15539 return true;
15540 if (TYPE_OVERFLOW_UNDEFINED (type))
15542 *strict_overflow_p = true;
15543 return true;
15545 break;
15547 case NON_LVALUE_EXPR:
15548 case FLOAT_EXPR:
15549 case FIX_TRUNC_EXPR:
15550 return tree_expr_nonnegative_warnv_p (op0,
15551 strict_overflow_p);
15553 case NOP_EXPR:
15555 tree inner_type = TREE_TYPE (op0);
15556 tree outer_type = type;
15558 if (TREE_CODE (outer_type) == REAL_TYPE)
15560 if (TREE_CODE (inner_type) == REAL_TYPE)
15561 return tree_expr_nonnegative_warnv_p (op0,
15562 strict_overflow_p);
15563 if (INTEGRAL_TYPE_P (inner_type))
15565 if (TYPE_UNSIGNED (inner_type))
15566 return true;
15567 return tree_expr_nonnegative_warnv_p (op0,
15568 strict_overflow_p);
15571 else if (INTEGRAL_TYPE_P (outer_type))
15573 if (TREE_CODE (inner_type) == REAL_TYPE)
15574 return tree_expr_nonnegative_warnv_p (op0,
15575 strict_overflow_p);
15576 if (INTEGRAL_TYPE_P (inner_type))
15577 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15578 && TYPE_UNSIGNED (inner_type);
15581 break;
15583 default:
15584 return tree_simple_nonnegative_warnv_p (code, type);
15587 /* We don't know sign of `t', so be conservative and return false. */
15588 return false;
15591 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15592 value is based on the assumption that signed overflow is undefined,
15593 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15594 *STRICT_OVERFLOW_P. */
15596 bool
15597 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15598 tree op1, bool *strict_overflow_p)
15600 if (TYPE_UNSIGNED (type))
15601 return true;
15603 switch (code)
15605 case POINTER_PLUS_EXPR:
15606 case PLUS_EXPR:
15607 if (FLOAT_TYPE_P (type))
15608 return (tree_expr_nonnegative_warnv_p (op0,
15609 strict_overflow_p)
15610 && tree_expr_nonnegative_warnv_p (op1,
15611 strict_overflow_p));
15613 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15614 both unsigned and at least 2 bits shorter than the result. */
15615 if (TREE_CODE (type) == INTEGER_TYPE
15616 && TREE_CODE (op0) == NOP_EXPR
15617 && TREE_CODE (op1) == NOP_EXPR)
15619 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15620 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15621 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15622 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15624 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15625 TYPE_PRECISION (inner2)) + 1;
15626 return prec < TYPE_PRECISION (type);
15629 break;
15631 case MULT_EXPR:
15632 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15634 /* x * x is always non-negative for floating point x
15635 or without overflow. */
15636 if (operand_equal_p (op0, op1, 0)
15637 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15638 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15640 if (TYPE_OVERFLOW_UNDEFINED (type))
15641 *strict_overflow_p = true;
15642 return true;
15646 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15647 both unsigned and their total bits is shorter than the result. */
15648 if (TREE_CODE (type) == INTEGER_TYPE
15649 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15650 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15652 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15653 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15654 : TREE_TYPE (op0);
15655 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15656 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15657 : TREE_TYPE (op1);
15659 bool unsigned0 = TYPE_UNSIGNED (inner0);
15660 bool unsigned1 = TYPE_UNSIGNED (inner1);
15662 if (TREE_CODE (op0) == INTEGER_CST)
15663 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15665 if (TREE_CODE (op1) == INTEGER_CST)
15666 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15668 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15669 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15671 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15672 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15673 : TYPE_PRECISION (inner0);
15675 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15676 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15677 : TYPE_PRECISION (inner1);
15679 return precision0 + precision1 < TYPE_PRECISION (type);
15682 return false;
15684 case BIT_AND_EXPR:
15685 case MAX_EXPR:
15686 return (tree_expr_nonnegative_warnv_p (op0,
15687 strict_overflow_p)
15688 || tree_expr_nonnegative_warnv_p (op1,
15689 strict_overflow_p));
15691 case BIT_IOR_EXPR:
15692 case BIT_XOR_EXPR:
15693 case MIN_EXPR:
15694 case RDIV_EXPR:
15695 case TRUNC_DIV_EXPR:
15696 case CEIL_DIV_EXPR:
15697 case FLOOR_DIV_EXPR:
15698 case ROUND_DIV_EXPR:
15699 return (tree_expr_nonnegative_warnv_p (op0,
15700 strict_overflow_p)
15701 && tree_expr_nonnegative_warnv_p (op1,
15702 strict_overflow_p));
15704 case TRUNC_MOD_EXPR:
15705 case CEIL_MOD_EXPR:
15706 case FLOOR_MOD_EXPR:
15707 case ROUND_MOD_EXPR:
15708 return tree_expr_nonnegative_warnv_p (op0,
15709 strict_overflow_p);
15710 default:
15711 return tree_simple_nonnegative_warnv_p (code, type);
15714 /* We don't know sign of `t', so be conservative and return false. */
15715 return false;
15718 /* Return true if T is known to be non-negative. If the return
15719 value is based on the assumption that signed overflow is undefined,
15720 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15721 *STRICT_OVERFLOW_P. */
15723 bool
15724 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15726 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15727 return true;
15729 switch (TREE_CODE (t))
15731 case INTEGER_CST:
15732 return tree_int_cst_sgn (t) >= 0;
15734 case REAL_CST:
15735 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15737 case FIXED_CST:
15738 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15740 case COND_EXPR:
15741 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15742 strict_overflow_p)
15743 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15744 strict_overflow_p));
15745 default:
15746 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15747 TREE_TYPE (t));
15749 /* We don't know sign of `t', so be conservative and return false. */
15750 return false;
15753 /* Return true if T is known to be non-negative. If the return
15754 value is based on the assumption that signed overflow is undefined,
15755 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15756 *STRICT_OVERFLOW_P. */
15758 bool
15759 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15760 tree arg0, tree arg1, bool *strict_overflow_p)
15762 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15763 switch (DECL_FUNCTION_CODE (fndecl))
15765 CASE_FLT_FN (BUILT_IN_ACOS):
15766 CASE_FLT_FN (BUILT_IN_ACOSH):
15767 CASE_FLT_FN (BUILT_IN_CABS):
15768 CASE_FLT_FN (BUILT_IN_COSH):
15769 CASE_FLT_FN (BUILT_IN_ERFC):
15770 CASE_FLT_FN (BUILT_IN_EXP):
15771 CASE_FLT_FN (BUILT_IN_EXP10):
15772 CASE_FLT_FN (BUILT_IN_EXP2):
15773 CASE_FLT_FN (BUILT_IN_FABS):
15774 CASE_FLT_FN (BUILT_IN_FDIM):
15775 CASE_FLT_FN (BUILT_IN_HYPOT):
15776 CASE_FLT_FN (BUILT_IN_POW10):
15777 CASE_INT_FN (BUILT_IN_FFS):
15778 CASE_INT_FN (BUILT_IN_PARITY):
15779 CASE_INT_FN (BUILT_IN_POPCOUNT):
15780 CASE_INT_FN (BUILT_IN_CLZ):
15781 CASE_INT_FN (BUILT_IN_CLRSB):
15782 case BUILT_IN_BSWAP32:
15783 case BUILT_IN_BSWAP64:
15784 /* Always true. */
15785 return true;
15787 CASE_FLT_FN (BUILT_IN_SQRT):
15788 /* sqrt(-0.0) is -0.0. */
15789 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15790 return true;
15791 return tree_expr_nonnegative_warnv_p (arg0,
15792 strict_overflow_p);
15794 CASE_FLT_FN (BUILT_IN_ASINH):
15795 CASE_FLT_FN (BUILT_IN_ATAN):
15796 CASE_FLT_FN (BUILT_IN_ATANH):
15797 CASE_FLT_FN (BUILT_IN_CBRT):
15798 CASE_FLT_FN (BUILT_IN_CEIL):
15799 CASE_FLT_FN (BUILT_IN_ERF):
15800 CASE_FLT_FN (BUILT_IN_EXPM1):
15801 CASE_FLT_FN (BUILT_IN_FLOOR):
15802 CASE_FLT_FN (BUILT_IN_FMOD):
15803 CASE_FLT_FN (BUILT_IN_FREXP):
15804 CASE_FLT_FN (BUILT_IN_ICEIL):
15805 CASE_FLT_FN (BUILT_IN_IFLOOR):
15806 CASE_FLT_FN (BUILT_IN_IRINT):
15807 CASE_FLT_FN (BUILT_IN_IROUND):
15808 CASE_FLT_FN (BUILT_IN_LCEIL):
15809 CASE_FLT_FN (BUILT_IN_LDEXP):
15810 CASE_FLT_FN (BUILT_IN_LFLOOR):
15811 CASE_FLT_FN (BUILT_IN_LLCEIL):
15812 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15813 CASE_FLT_FN (BUILT_IN_LLRINT):
15814 CASE_FLT_FN (BUILT_IN_LLROUND):
15815 CASE_FLT_FN (BUILT_IN_LRINT):
15816 CASE_FLT_FN (BUILT_IN_LROUND):
15817 CASE_FLT_FN (BUILT_IN_MODF):
15818 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15819 CASE_FLT_FN (BUILT_IN_RINT):
15820 CASE_FLT_FN (BUILT_IN_ROUND):
15821 CASE_FLT_FN (BUILT_IN_SCALB):
15822 CASE_FLT_FN (BUILT_IN_SCALBLN):
15823 CASE_FLT_FN (BUILT_IN_SCALBN):
15824 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15825 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15826 CASE_FLT_FN (BUILT_IN_SINH):
15827 CASE_FLT_FN (BUILT_IN_TANH):
15828 CASE_FLT_FN (BUILT_IN_TRUNC):
15829 /* True if the 1st argument is nonnegative. */
15830 return tree_expr_nonnegative_warnv_p (arg0,
15831 strict_overflow_p);
15833 CASE_FLT_FN (BUILT_IN_FMAX):
15834 /* True if the 1st OR 2nd arguments are nonnegative. */
15835 return (tree_expr_nonnegative_warnv_p (arg0,
15836 strict_overflow_p)
15837 || (tree_expr_nonnegative_warnv_p (arg1,
15838 strict_overflow_p)));
15840 CASE_FLT_FN (BUILT_IN_FMIN):
15841 /* True if the 1st AND 2nd arguments are nonnegative. */
15842 return (tree_expr_nonnegative_warnv_p (arg0,
15843 strict_overflow_p)
15844 && (tree_expr_nonnegative_warnv_p (arg1,
15845 strict_overflow_p)));
15847 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15848 /* True if the 2nd argument is nonnegative. */
15849 return tree_expr_nonnegative_warnv_p (arg1,
15850 strict_overflow_p);
15852 CASE_FLT_FN (BUILT_IN_POWI):
15853 /* True if the 1st argument is nonnegative or the second
15854 argument is an even integer. */
15855 if (TREE_CODE (arg1) == INTEGER_CST
15856 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15857 return true;
15858 return tree_expr_nonnegative_warnv_p (arg0,
15859 strict_overflow_p);
15861 CASE_FLT_FN (BUILT_IN_POW):
15862 /* True if the 1st argument is nonnegative or the second
15863 argument is an even integer valued real. */
15864 if (TREE_CODE (arg1) == REAL_CST)
15866 REAL_VALUE_TYPE c;
15867 HOST_WIDE_INT n;
15869 c = TREE_REAL_CST (arg1);
15870 n = real_to_integer (&c);
15871 if ((n & 1) == 0)
15873 REAL_VALUE_TYPE cint;
15874 real_from_integer (&cint, VOIDmode, n,
15875 n < 0 ? -1 : 0, 0);
15876 if (real_identical (&c, &cint))
15877 return true;
15880 return tree_expr_nonnegative_warnv_p (arg0,
15881 strict_overflow_p);
15883 default:
15884 break;
15886 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15887 type);
15890 /* Return true if T is known to be non-negative. If the return
15891 value is based on the assumption that signed overflow is undefined,
15892 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15893 *STRICT_OVERFLOW_P. */
15895 static bool
15896 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15898 enum tree_code code = TREE_CODE (t);
15899 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15900 return true;
15902 switch (code)
15904 case TARGET_EXPR:
15906 tree temp = TARGET_EXPR_SLOT (t);
15907 t = TARGET_EXPR_INITIAL (t);
15909 /* If the initializer is non-void, then it's a normal expression
15910 that will be assigned to the slot. */
15911 if (!VOID_TYPE_P (t))
15912 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15914 /* Otherwise, the initializer sets the slot in some way. One common
15915 way is an assignment statement at the end of the initializer. */
15916 while (1)
15918 if (TREE_CODE (t) == BIND_EXPR)
15919 t = expr_last (BIND_EXPR_BODY (t));
15920 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15921 || TREE_CODE (t) == TRY_CATCH_EXPR)
15922 t = expr_last (TREE_OPERAND (t, 0));
15923 else if (TREE_CODE (t) == STATEMENT_LIST)
15924 t = expr_last (t);
15925 else
15926 break;
15928 if (TREE_CODE (t) == MODIFY_EXPR
15929 && TREE_OPERAND (t, 0) == temp)
15930 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15931 strict_overflow_p);
15933 return false;
15936 case CALL_EXPR:
15938 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15939 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15941 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15942 get_callee_fndecl (t),
15943 arg0,
15944 arg1,
15945 strict_overflow_p);
15947 case COMPOUND_EXPR:
15948 case MODIFY_EXPR:
15949 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15950 strict_overflow_p);
15951 case BIND_EXPR:
15952 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15953 strict_overflow_p);
15954 case SAVE_EXPR:
15955 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15956 strict_overflow_p);
15958 default:
15959 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15960 TREE_TYPE (t));
15963 /* We don't know sign of `t', so be conservative and return false. */
15964 return false;
15967 /* Return true if T is known to be non-negative. If the return
15968 value is based on the assumption that signed overflow is undefined,
15969 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15970 *STRICT_OVERFLOW_P. */
15972 bool
15973 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15975 enum tree_code code;
15976 if (t == error_mark_node)
15977 return false;
15979 code = TREE_CODE (t);
15980 switch (TREE_CODE_CLASS (code))
15982 case tcc_binary:
15983 case tcc_comparison:
15984 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15985 TREE_TYPE (t),
15986 TREE_OPERAND (t, 0),
15987 TREE_OPERAND (t, 1),
15988 strict_overflow_p);
15990 case tcc_unary:
15991 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15992 TREE_TYPE (t),
15993 TREE_OPERAND (t, 0),
15994 strict_overflow_p);
15996 case tcc_constant:
15997 case tcc_declaration:
15998 case tcc_reference:
15999 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16001 default:
16002 break;
16005 switch (code)
16007 case TRUTH_AND_EXPR:
16008 case TRUTH_OR_EXPR:
16009 case TRUTH_XOR_EXPR:
16010 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16011 TREE_TYPE (t),
16012 TREE_OPERAND (t, 0),
16013 TREE_OPERAND (t, 1),
16014 strict_overflow_p);
16015 case TRUTH_NOT_EXPR:
16016 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16017 TREE_TYPE (t),
16018 TREE_OPERAND (t, 0),
16019 strict_overflow_p);
16021 case COND_EXPR:
16022 case CONSTRUCTOR:
16023 case OBJ_TYPE_REF:
16024 case ASSERT_EXPR:
16025 case ADDR_EXPR:
16026 case WITH_SIZE_EXPR:
16027 case SSA_NAME:
16028 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16030 default:
16031 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16035 /* Return true if `t' is known to be non-negative. Handle warnings
16036 about undefined signed overflow. */
16038 bool
16039 tree_expr_nonnegative_p (tree t)
16041 bool ret, strict_overflow_p;
16043 strict_overflow_p = false;
16044 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16045 if (strict_overflow_p)
16046 fold_overflow_warning (("assuming signed overflow does not occur when "
16047 "determining that expression is always "
16048 "non-negative"),
16049 WARN_STRICT_OVERFLOW_MISC);
16050 return ret;
16054 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16055 For floating point we further ensure that T is not denormal.
16056 Similar logic is present in nonzero_address in rtlanal.h.
16058 If the return value is based on the assumption that signed overflow
16059 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16060 change *STRICT_OVERFLOW_P. */
16062 bool
16063 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16064 bool *strict_overflow_p)
16066 switch (code)
16068 case ABS_EXPR:
16069 return tree_expr_nonzero_warnv_p (op0,
16070 strict_overflow_p);
16072 case NOP_EXPR:
16074 tree inner_type = TREE_TYPE (op0);
16075 tree outer_type = type;
16077 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16078 && tree_expr_nonzero_warnv_p (op0,
16079 strict_overflow_p));
16081 break;
16083 case NON_LVALUE_EXPR:
16084 return tree_expr_nonzero_warnv_p (op0,
16085 strict_overflow_p);
16087 default:
16088 break;
16091 return false;
16094 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16095 For floating point we further ensure that T is not denormal.
16096 Similar logic is present in nonzero_address in rtlanal.h.
16098 If the return value is based on the assumption that signed overflow
16099 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16100 change *STRICT_OVERFLOW_P. */
16102 bool
16103 tree_binary_nonzero_warnv_p (enum tree_code code,
16104 tree type,
16105 tree op0,
16106 tree op1, bool *strict_overflow_p)
16108 bool sub_strict_overflow_p;
16109 switch (code)
16111 case POINTER_PLUS_EXPR:
16112 case PLUS_EXPR:
16113 if (TYPE_OVERFLOW_UNDEFINED (type))
16115 /* With the presence of negative values it is hard
16116 to say something. */
16117 sub_strict_overflow_p = false;
16118 if (!tree_expr_nonnegative_warnv_p (op0,
16119 &sub_strict_overflow_p)
16120 || !tree_expr_nonnegative_warnv_p (op1,
16121 &sub_strict_overflow_p))
16122 return false;
16123 /* One of operands must be positive and the other non-negative. */
16124 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16125 overflows, on a twos-complement machine the sum of two
16126 nonnegative numbers can never be zero. */
16127 return (tree_expr_nonzero_warnv_p (op0,
16128 strict_overflow_p)
16129 || tree_expr_nonzero_warnv_p (op1,
16130 strict_overflow_p));
16132 break;
16134 case MULT_EXPR:
16135 if (TYPE_OVERFLOW_UNDEFINED (type))
16137 if (tree_expr_nonzero_warnv_p (op0,
16138 strict_overflow_p)
16139 && tree_expr_nonzero_warnv_p (op1,
16140 strict_overflow_p))
16142 *strict_overflow_p = true;
16143 return true;
16146 break;
16148 case MIN_EXPR:
16149 sub_strict_overflow_p = false;
16150 if (tree_expr_nonzero_warnv_p (op0,
16151 &sub_strict_overflow_p)
16152 && tree_expr_nonzero_warnv_p (op1,
16153 &sub_strict_overflow_p))
16155 if (sub_strict_overflow_p)
16156 *strict_overflow_p = true;
16158 break;
16160 case MAX_EXPR:
16161 sub_strict_overflow_p = false;
16162 if (tree_expr_nonzero_warnv_p (op0,
16163 &sub_strict_overflow_p))
16165 if (sub_strict_overflow_p)
16166 *strict_overflow_p = true;
16168 /* When both operands are nonzero, then MAX must be too. */
16169 if (tree_expr_nonzero_warnv_p (op1,
16170 strict_overflow_p))
16171 return true;
16173 /* MAX where operand 0 is positive is positive. */
16174 return tree_expr_nonnegative_warnv_p (op0,
16175 strict_overflow_p);
16177 /* MAX where operand 1 is positive is positive. */
16178 else if (tree_expr_nonzero_warnv_p (op1,
16179 &sub_strict_overflow_p)
16180 && tree_expr_nonnegative_warnv_p (op1,
16181 &sub_strict_overflow_p))
16183 if (sub_strict_overflow_p)
16184 *strict_overflow_p = true;
16185 return true;
16187 break;
16189 case BIT_IOR_EXPR:
16190 return (tree_expr_nonzero_warnv_p (op1,
16191 strict_overflow_p)
16192 || tree_expr_nonzero_warnv_p (op0,
16193 strict_overflow_p));
16195 default:
16196 break;
16199 return false;
16202 /* Return true when T is an address and is known to be nonzero.
16203 For floating point we further ensure that T is not denormal.
16204 Similar logic is present in nonzero_address in rtlanal.h.
16206 If the return value is based on the assumption that signed overflow
16207 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16208 change *STRICT_OVERFLOW_P. */
16210 bool
16211 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16213 bool sub_strict_overflow_p;
16214 switch (TREE_CODE (t))
16216 case INTEGER_CST:
16217 return !integer_zerop (t);
16219 case ADDR_EXPR:
16221 tree base = TREE_OPERAND (t, 0);
16222 if (!DECL_P (base))
16223 base = get_base_address (base);
16225 if (!base)
16226 return false;
16228 /* Weak declarations may link to NULL. Other things may also be NULL
16229 so protect with -fdelete-null-pointer-checks; but not variables
16230 allocated on the stack. */
16231 if (DECL_P (base)
16232 && (flag_delete_null_pointer_checks
16233 || (DECL_CONTEXT (base)
16234 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16235 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16236 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16238 /* Constants are never weak. */
16239 if (CONSTANT_CLASS_P (base))
16240 return true;
16242 return false;
16245 case COND_EXPR:
16246 sub_strict_overflow_p = false;
16247 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16248 &sub_strict_overflow_p)
16249 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16250 &sub_strict_overflow_p))
16252 if (sub_strict_overflow_p)
16253 *strict_overflow_p = true;
16254 return true;
16256 break;
16258 default:
16259 break;
16261 return false;
16264 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16265 attempt to fold the expression to a constant without modifying TYPE,
16266 OP0 or OP1.
16268 If the expression could be simplified to a constant, then return
16269 the constant. If the expression would not be simplified to a
16270 constant, then return NULL_TREE. */
16272 tree
16273 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16275 tree tem = fold_binary (code, type, op0, op1);
16276 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16279 /* Given the components of a unary expression CODE, TYPE and OP0,
16280 attempt to fold the expression to a constant without modifying
16281 TYPE or OP0.
16283 If the expression could be simplified to a constant, then return
16284 the constant. If the expression would not be simplified to a
16285 constant, then return NULL_TREE. */
16287 tree
16288 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16290 tree tem = fold_unary (code, type, op0);
16291 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16294 /* If EXP represents referencing an element in a constant string
16295 (either via pointer arithmetic or array indexing), return the
16296 tree representing the value accessed, otherwise return NULL. */
16298 tree
16299 fold_read_from_constant_string (tree exp)
16301 if ((TREE_CODE (exp) == INDIRECT_REF
16302 || TREE_CODE (exp) == ARRAY_REF)
16303 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16305 tree exp1 = TREE_OPERAND (exp, 0);
16306 tree index;
16307 tree string;
16308 location_t loc = EXPR_LOCATION (exp);
16310 if (TREE_CODE (exp) == INDIRECT_REF)
16311 string = string_constant (exp1, &index);
16312 else
16314 tree low_bound = array_ref_low_bound (exp);
16315 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16317 /* Optimize the special-case of a zero lower bound.
16319 We convert the low_bound to sizetype to avoid some problems
16320 with constant folding. (E.g. suppose the lower bound is 1,
16321 and its mode is QI. Without the conversion,l (ARRAY
16322 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16323 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16324 if (! integer_zerop (low_bound))
16325 index = size_diffop_loc (loc, index,
16326 fold_convert_loc (loc, sizetype, low_bound));
16328 string = exp1;
16331 if (string
16332 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16333 && TREE_CODE (string) == STRING_CST
16334 && TREE_CODE (index) == INTEGER_CST
16335 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16336 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16337 == MODE_INT)
16338 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16339 return build_int_cst_type (TREE_TYPE (exp),
16340 (TREE_STRING_POINTER (string)
16341 [TREE_INT_CST_LOW (index)]));
16343 return NULL;
16346 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16347 an integer constant, real, or fixed-point constant.
16349 TYPE is the type of the result. */
16351 static tree
16352 fold_negate_const (tree arg0, tree type)
16354 tree t = NULL_TREE;
16356 switch (TREE_CODE (arg0))
16358 case INTEGER_CST:
16360 double_int val = tree_to_double_int (arg0);
16361 bool overflow;
16362 val = val.neg_with_overflow (&overflow);
16363 t = force_fit_type_double (type, val, 1,
16364 (overflow | TREE_OVERFLOW (arg0))
16365 && !TYPE_UNSIGNED (type));
16366 break;
16369 case REAL_CST:
16370 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16371 break;
16373 case FIXED_CST:
16375 FIXED_VALUE_TYPE f;
16376 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16377 &(TREE_FIXED_CST (arg0)), NULL,
16378 TYPE_SATURATING (type));
16379 t = build_fixed (type, f);
16380 /* Propagate overflow flags. */
16381 if (overflow_p | TREE_OVERFLOW (arg0))
16382 TREE_OVERFLOW (t) = 1;
16383 break;
16386 default:
16387 gcc_unreachable ();
16390 return t;
16393 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16394 an integer constant or real constant.
16396 TYPE is the type of the result. */
16398 tree
16399 fold_abs_const (tree arg0, tree type)
16401 tree t = NULL_TREE;
16403 switch (TREE_CODE (arg0))
16405 case INTEGER_CST:
16407 double_int val = tree_to_double_int (arg0);
16409 /* If the value is unsigned or non-negative, then the absolute value
16410 is the same as the ordinary value. */
16411 if (TYPE_UNSIGNED (type)
16412 || !val.is_negative ())
16413 t = arg0;
16415 /* If the value is negative, then the absolute value is
16416 its negation. */
16417 else
16419 bool overflow;
16420 val = val.neg_with_overflow (&overflow);
16421 t = force_fit_type_double (type, val, -1,
16422 overflow | TREE_OVERFLOW (arg0));
16425 break;
16427 case REAL_CST:
16428 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16429 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16430 else
16431 t = arg0;
16432 break;
16434 default:
16435 gcc_unreachable ();
16438 return t;
16441 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16442 constant. TYPE is the type of the result. */
16444 static tree
16445 fold_not_const (const_tree arg0, tree type)
16447 double_int val;
16449 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16451 val = ~tree_to_double_int (arg0);
16452 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16455 /* Given CODE, a relational operator, the target type, TYPE and two
16456 constant operands OP0 and OP1, return the result of the
16457 relational operation. If the result is not a compile time
16458 constant, then return NULL_TREE. */
16460 static tree
16461 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16463 int result, invert;
16465 /* From here on, the only cases we handle are when the result is
16466 known to be a constant. */
16468 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16470 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16471 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16473 /* Handle the cases where either operand is a NaN. */
16474 if (real_isnan (c0) || real_isnan (c1))
16476 switch (code)
16478 case EQ_EXPR:
16479 case ORDERED_EXPR:
16480 result = 0;
16481 break;
16483 case NE_EXPR:
16484 case UNORDERED_EXPR:
16485 case UNLT_EXPR:
16486 case UNLE_EXPR:
16487 case UNGT_EXPR:
16488 case UNGE_EXPR:
16489 case UNEQ_EXPR:
16490 result = 1;
16491 break;
16493 case LT_EXPR:
16494 case LE_EXPR:
16495 case GT_EXPR:
16496 case GE_EXPR:
16497 case LTGT_EXPR:
16498 if (flag_trapping_math)
16499 return NULL_TREE;
16500 result = 0;
16501 break;
16503 default:
16504 gcc_unreachable ();
16507 return constant_boolean_node (result, type);
16510 return constant_boolean_node (real_compare (code, c0, c1), type);
16513 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16515 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16516 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16517 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16520 /* Handle equality/inequality of complex constants. */
16521 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16523 tree rcond = fold_relational_const (code, type,
16524 TREE_REALPART (op0),
16525 TREE_REALPART (op1));
16526 tree icond = fold_relational_const (code, type,
16527 TREE_IMAGPART (op0),
16528 TREE_IMAGPART (op1));
16529 if (code == EQ_EXPR)
16530 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16531 else if (code == NE_EXPR)
16532 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16533 else
16534 return NULL_TREE;
16537 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16539 unsigned count = VECTOR_CST_NELTS (op0);
16540 tree *elts = XALLOCAVEC (tree, count);
16541 gcc_assert (VECTOR_CST_NELTS (op1) == count
16542 && TYPE_VECTOR_SUBPARTS (type) == count);
16544 for (unsigned i = 0; i < count; i++)
16546 tree elem_type = TREE_TYPE (type);
16547 tree elem0 = VECTOR_CST_ELT (op0, i);
16548 tree elem1 = VECTOR_CST_ELT (op1, i);
16550 tree tem = fold_relational_const (code, elem_type,
16551 elem0, elem1);
16553 if (tem == NULL_TREE)
16554 return NULL_TREE;
16556 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16559 return build_vector (type, elts);
16562 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16564 To compute GT, swap the arguments and do LT.
16565 To compute GE, do LT and invert the result.
16566 To compute LE, swap the arguments, do LT and invert the result.
16567 To compute NE, do EQ and invert the result.
16569 Therefore, the code below must handle only EQ and LT. */
16571 if (code == LE_EXPR || code == GT_EXPR)
16573 tree tem = op0;
16574 op0 = op1;
16575 op1 = tem;
16576 code = swap_tree_comparison (code);
16579 /* Note that it is safe to invert for real values here because we
16580 have already handled the one case that it matters. */
16582 invert = 0;
16583 if (code == NE_EXPR || code == GE_EXPR)
16585 invert = 1;
16586 code = invert_tree_comparison (code, false);
16589 /* Compute a result for LT or EQ if args permit;
16590 Otherwise return T. */
16591 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16593 if (code == EQ_EXPR)
16594 result = tree_int_cst_equal (op0, op1);
16595 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16596 result = INT_CST_LT_UNSIGNED (op0, op1);
16597 else
16598 result = INT_CST_LT (op0, op1);
16600 else
16601 return NULL_TREE;
16603 if (invert)
16604 result ^= 1;
16605 return constant_boolean_node (result, type);
16608 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16609 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16610 itself. */
16612 tree
16613 fold_build_cleanup_point_expr (tree type, tree expr)
16615 /* If the expression does not have side effects then we don't have to wrap
16616 it with a cleanup point expression. */
16617 if (!TREE_SIDE_EFFECTS (expr))
16618 return expr;
16620 /* If the expression is a return, check to see if the expression inside the
16621 return has no side effects or the right hand side of the modify expression
16622 inside the return. If either don't have side effects set we don't need to
16623 wrap the expression in a cleanup point expression. Note we don't check the
16624 left hand side of the modify because it should always be a return decl. */
16625 if (TREE_CODE (expr) == RETURN_EXPR)
16627 tree op = TREE_OPERAND (expr, 0);
16628 if (!op || !TREE_SIDE_EFFECTS (op))
16629 return expr;
16630 op = TREE_OPERAND (op, 1);
16631 if (!TREE_SIDE_EFFECTS (op))
16632 return expr;
16635 return build1 (CLEANUP_POINT_EXPR, type, expr);
16638 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16639 of an indirection through OP0, or NULL_TREE if no simplification is
16640 possible. */
16642 tree
16643 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16645 tree sub = op0;
16646 tree subtype;
16648 STRIP_NOPS (sub);
16649 subtype = TREE_TYPE (sub);
16650 if (!POINTER_TYPE_P (subtype))
16651 return NULL_TREE;
16653 if (TREE_CODE (sub) == ADDR_EXPR)
16655 tree op = TREE_OPERAND (sub, 0);
16656 tree optype = TREE_TYPE (op);
16657 /* *&CONST_DECL -> to the value of the const decl. */
16658 if (TREE_CODE (op) == CONST_DECL)
16659 return DECL_INITIAL (op);
16660 /* *&p => p; make sure to handle *&"str"[cst] here. */
16661 if (type == optype)
16663 tree fop = fold_read_from_constant_string (op);
16664 if (fop)
16665 return fop;
16666 else
16667 return op;
16669 /* *(foo *)&fooarray => fooarray[0] */
16670 else if (TREE_CODE (optype) == ARRAY_TYPE
16671 && type == TREE_TYPE (optype)
16672 && (!in_gimple_form
16673 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16675 tree type_domain = TYPE_DOMAIN (optype);
16676 tree min_val = size_zero_node;
16677 if (type_domain && TYPE_MIN_VALUE (type_domain))
16678 min_val = TYPE_MIN_VALUE (type_domain);
16679 if (in_gimple_form
16680 && TREE_CODE (min_val) != INTEGER_CST)
16681 return NULL_TREE;
16682 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16683 NULL_TREE, NULL_TREE);
16685 /* *(foo *)&complexfoo => __real__ complexfoo */
16686 else if (TREE_CODE (optype) == COMPLEX_TYPE
16687 && type == TREE_TYPE (optype))
16688 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16689 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16690 else if (TREE_CODE (optype) == VECTOR_TYPE
16691 && type == TREE_TYPE (optype))
16693 tree part_width = TYPE_SIZE (type);
16694 tree index = bitsize_int (0);
16695 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16699 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16700 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16702 tree op00 = TREE_OPERAND (sub, 0);
16703 tree op01 = TREE_OPERAND (sub, 1);
16705 STRIP_NOPS (op00);
16706 if (TREE_CODE (op00) == ADDR_EXPR)
16708 tree op00type;
16709 op00 = TREE_OPERAND (op00, 0);
16710 op00type = TREE_TYPE (op00);
16712 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16713 if (TREE_CODE (op00type) == VECTOR_TYPE
16714 && type == TREE_TYPE (op00type))
16716 HOST_WIDE_INT offset = tree_to_shwi (op01);
16717 tree part_width = TYPE_SIZE (type);
16718 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16719 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16720 tree index = bitsize_int (indexi);
16722 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16723 return fold_build3_loc (loc,
16724 BIT_FIELD_REF, type, op00,
16725 part_width, index);
16728 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16729 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16730 && type == TREE_TYPE (op00type))
16732 tree size = TYPE_SIZE_UNIT (type);
16733 if (tree_int_cst_equal (size, op01))
16734 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16736 /* ((foo *)&fooarray)[1] => fooarray[1] */
16737 else if (TREE_CODE (op00type) == ARRAY_TYPE
16738 && type == TREE_TYPE (op00type))
16740 tree type_domain = TYPE_DOMAIN (op00type);
16741 tree min_val = size_zero_node;
16742 if (type_domain && TYPE_MIN_VALUE (type_domain))
16743 min_val = TYPE_MIN_VALUE (type_domain);
16744 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16745 TYPE_SIZE_UNIT (type));
16746 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16747 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16748 NULL_TREE, NULL_TREE);
16753 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16754 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16755 && type == TREE_TYPE (TREE_TYPE (subtype))
16756 && (!in_gimple_form
16757 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16759 tree type_domain;
16760 tree min_val = size_zero_node;
16761 sub = build_fold_indirect_ref_loc (loc, sub);
16762 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16763 if (type_domain && TYPE_MIN_VALUE (type_domain))
16764 min_val = TYPE_MIN_VALUE (type_domain);
16765 if (in_gimple_form
16766 && TREE_CODE (min_val) != INTEGER_CST)
16767 return NULL_TREE;
16768 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16769 NULL_TREE);
16772 return NULL_TREE;
16775 /* Builds an expression for an indirection through T, simplifying some
16776 cases. */
16778 tree
16779 build_fold_indirect_ref_loc (location_t loc, tree t)
16781 tree type = TREE_TYPE (TREE_TYPE (t));
16782 tree sub = fold_indirect_ref_1 (loc, type, t);
16784 if (sub)
16785 return sub;
16787 return build1_loc (loc, INDIRECT_REF, type, t);
16790 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16792 tree
16793 fold_indirect_ref_loc (location_t loc, tree t)
16795 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16797 if (sub)
16798 return sub;
16799 else
16800 return t;
16803 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16804 whose result is ignored. The type of the returned tree need not be
16805 the same as the original expression. */
16807 tree
16808 fold_ignored_result (tree t)
16810 if (!TREE_SIDE_EFFECTS (t))
16811 return integer_zero_node;
16813 for (;;)
16814 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16816 case tcc_unary:
16817 t = TREE_OPERAND (t, 0);
16818 break;
16820 case tcc_binary:
16821 case tcc_comparison:
16822 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16823 t = TREE_OPERAND (t, 0);
16824 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16825 t = TREE_OPERAND (t, 1);
16826 else
16827 return t;
16828 break;
16830 case tcc_expression:
16831 switch (TREE_CODE (t))
16833 case COMPOUND_EXPR:
16834 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16835 return t;
16836 t = TREE_OPERAND (t, 0);
16837 break;
16839 case COND_EXPR:
16840 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16841 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16842 return t;
16843 t = TREE_OPERAND (t, 0);
16844 break;
16846 default:
16847 return t;
16849 break;
16851 default:
16852 return t;
16856 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16857 This can only be applied to objects of a sizetype. */
16859 tree
16860 round_up_loc (location_t loc, tree value, int divisor)
16862 tree div = NULL_TREE;
16864 gcc_assert (divisor > 0);
16865 if (divisor == 1)
16866 return value;
16868 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16869 have to do anything. Only do this when we are not given a const,
16870 because in that case, this check is more expensive than just
16871 doing it. */
16872 if (TREE_CODE (value) != INTEGER_CST)
16874 div = build_int_cst (TREE_TYPE (value), divisor);
16876 if (multiple_of_p (TREE_TYPE (value), value, div))
16877 return value;
16880 /* If divisor is a power of two, simplify this to bit manipulation. */
16881 if (divisor == (divisor & -divisor))
16883 if (TREE_CODE (value) == INTEGER_CST)
16885 double_int val = tree_to_double_int (value);
16886 bool overflow_p;
16888 if ((val.low & (divisor - 1)) == 0)
16889 return value;
16891 overflow_p = TREE_OVERFLOW (value);
16892 val.low &= ~(divisor - 1);
16893 val.low += divisor;
16894 if (val.low == 0)
16896 val.high++;
16897 if (val.high == 0)
16898 overflow_p = true;
16901 return force_fit_type_double (TREE_TYPE (value), val,
16902 -1, overflow_p);
16904 else
16906 tree t;
16908 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16909 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16910 t = build_int_cst (TREE_TYPE (value), -divisor);
16911 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16914 else
16916 if (!div)
16917 div = build_int_cst (TREE_TYPE (value), divisor);
16918 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16919 value = size_binop_loc (loc, MULT_EXPR, value, div);
16922 return value;
16925 /* Likewise, but round down. */
16927 tree
16928 round_down_loc (location_t loc, tree value, int divisor)
16930 tree div = NULL_TREE;
16932 gcc_assert (divisor > 0);
16933 if (divisor == 1)
16934 return value;
16936 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16937 have to do anything. Only do this when we are not given a const,
16938 because in that case, this check is more expensive than just
16939 doing it. */
16940 if (TREE_CODE (value) != INTEGER_CST)
16942 div = build_int_cst (TREE_TYPE (value), divisor);
16944 if (multiple_of_p (TREE_TYPE (value), value, div))
16945 return value;
16948 /* If divisor is a power of two, simplify this to bit manipulation. */
16949 if (divisor == (divisor & -divisor))
16951 tree t;
16953 t = build_int_cst (TREE_TYPE (value), -divisor);
16954 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16956 else
16958 if (!div)
16959 div = build_int_cst (TREE_TYPE (value), divisor);
16960 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16961 value = size_binop_loc (loc, MULT_EXPR, value, div);
16964 return value;
16967 /* Returns the pointer to the base of the object addressed by EXP and
16968 extracts the information about the offset of the access, storing it
16969 to PBITPOS and POFFSET. */
16971 static tree
16972 split_address_to_core_and_offset (tree exp,
16973 HOST_WIDE_INT *pbitpos, tree *poffset)
16975 tree core;
16976 enum machine_mode mode;
16977 int unsignedp, volatilep;
16978 HOST_WIDE_INT bitsize;
16979 location_t loc = EXPR_LOCATION (exp);
16981 if (TREE_CODE (exp) == ADDR_EXPR)
16983 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16984 poffset, &mode, &unsignedp, &volatilep,
16985 false);
16986 core = build_fold_addr_expr_loc (loc, core);
16988 else
16990 core = exp;
16991 *pbitpos = 0;
16992 *poffset = NULL_TREE;
16995 return core;
16998 /* Returns true if addresses of E1 and E2 differ by a constant, false
16999 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17001 bool
17002 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17004 tree core1, core2;
17005 HOST_WIDE_INT bitpos1, bitpos2;
17006 tree toffset1, toffset2, tdiff, type;
17008 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17009 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17011 if (bitpos1 % BITS_PER_UNIT != 0
17012 || bitpos2 % BITS_PER_UNIT != 0
17013 || !operand_equal_p (core1, core2, 0))
17014 return false;
17016 if (toffset1 && toffset2)
17018 type = TREE_TYPE (toffset1);
17019 if (type != TREE_TYPE (toffset2))
17020 toffset2 = fold_convert (type, toffset2);
17022 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17023 if (!cst_and_fits_in_hwi (tdiff))
17024 return false;
17026 *diff = int_cst_value (tdiff);
17028 else if (toffset1 || toffset2)
17030 /* If only one of the offsets is non-constant, the difference cannot
17031 be a constant. */
17032 return false;
17034 else
17035 *diff = 0;
17037 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17038 return true;
17041 /* Simplify the floating point expression EXP when the sign of the
17042 result is not significant. Return NULL_TREE if no simplification
17043 is possible. */
17045 tree
17046 fold_strip_sign_ops (tree exp)
17048 tree arg0, arg1;
17049 location_t loc = EXPR_LOCATION (exp);
17051 switch (TREE_CODE (exp))
17053 case ABS_EXPR:
17054 case NEGATE_EXPR:
17055 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17056 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17058 case MULT_EXPR:
17059 case RDIV_EXPR:
17060 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17061 return NULL_TREE;
17062 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17063 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17064 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17065 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17066 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17067 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17068 break;
17070 case COMPOUND_EXPR:
17071 arg0 = TREE_OPERAND (exp, 0);
17072 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17073 if (arg1)
17074 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17075 break;
17077 case COND_EXPR:
17078 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17079 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17080 if (arg0 || arg1)
17081 return fold_build3_loc (loc,
17082 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17083 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17084 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17085 break;
17087 case CALL_EXPR:
17089 const enum built_in_function fcode = builtin_mathfn_code (exp);
17090 switch (fcode)
17092 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17093 /* Strip copysign function call, return the 1st argument. */
17094 arg0 = CALL_EXPR_ARG (exp, 0);
17095 arg1 = CALL_EXPR_ARG (exp, 1);
17096 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17098 default:
17099 /* Strip sign ops from the argument of "odd" math functions. */
17100 if (negate_mathfn_p (fcode))
17102 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17103 if (arg0)
17104 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17106 break;
17109 break;
17111 default:
17112 break;
17114 return NULL_TREE;