c-objc-common.c (c_tree_printer): Tidy.
[official-gcc.git] / gcc / fold-const.c
blobf959f0a14a0d27889f438e73b0c94c1a0f6629e6
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
487 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 return negate_expr_p (TREE_OPERAND (t, 1))
490 || negate_expr_p (TREE_OPERAND (t, 0));
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
500 break;
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 tree op1 = TREE_OPERAND (t, 1);
513 if (TREE_INT_CST_HIGH (op1) == 0
514 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
515 == TREE_INT_CST_LOW (op1))
516 return true;
518 break;
520 default:
521 break;
523 return false;
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
529 returned. */
531 static tree
532 fold_negate_expr (location_t loc, tree t)
534 tree type = TREE_TYPE (t);
535 tree tem;
537 switch (TREE_CODE (t))
539 /* Convert - (~A) to A + 1. */
540 case BIT_NOT_EXPR:
541 if (INTEGRAL_TYPE_P (type))
542 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 build_one_cst (type));
544 break;
546 case INTEGER_CST:
547 tem = fold_negate_const (t, type);
548 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 || !TYPE_OVERFLOW_TRAPS (type))
550 return tem;
551 break;
553 case REAL_CST:
554 tem = fold_negate_const (t, type);
555 /* Two's complement FP formats, such as c4x, may overflow. */
556 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
557 return tem;
558 break;
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case COMPLEX_CST:
566 tree rpart = negate_expr (TREE_REALPART (t));
567 tree ipart = negate_expr (TREE_IMAGPART (t));
569 if ((TREE_CODE (rpart) == REAL_CST
570 && TREE_CODE (ipart) == REAL_CST)
571 || (TREE_CODE (rpart) == INTEGER_CST
572 && TREE_CODE (ipart) == INTEGER_CST))
573 return build_complex (type, rpart, ipart);
575 break;
577 case VECTOR_CST:
579 int count = TYPE_VECTOR_SUBPARTS (type), i;
580 tree *elts = XALLOCAVEC (tree, count);
582 for (i = 0; i < count; i++)
584 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
585 if (elts[i] == NULL_TREE)
586 return NULL_TREE;
589 return build_vector (type, elts);
592 case COMPLEX_EXPR:
593 if (negate_expr_p (t))
594 return fold_build2_loc (loc, COMPLEX_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
596 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
597 break;
599 case CONJ_EXPR:
600 if (negate_expr_p (t))
601 return fold_build1_loc (loc, CONJ_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
603 break;
605 case NEGATE_EXPR:
606 return TREE_OPERAND (t, 0);
608 case PLUS_EXPR:
609 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
610 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
612 /* -(A + B) -> (-B) - A. */
613 if (negate_expr_p (TREE_OPERAND (t, 1))
614 && reorder_operands_p (TREE_OPERAND (t, 0),
615 TREE_OPERAND (t, 1)))
617 tem = negate_expr (TREE_OPERAND (t, 1));
618 return fold_build2_loc (loc, MINUS_EXPR, type,
619 tem, TREE_OPERAND (t, 0));
622 /* -(A + B) -> (-A) - B. */
623 if (negate_expr_p (TREE_OPERAND (t, 0)))
625 tem = negate_expr (TREE_OPERAND (t, 0));
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 tem, TREE_OPERAND (t, 1));
630 break;
632 case MINUS_EXPR:
633 /* - (A - B) -> B - A */
634 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
635 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
636 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
637 return fold_build2_loc (loc, MINUS_EXPR, type,
638 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
639 break;
641 case MULT_EXPR:
642 if (TYPE_UNSIGNED (type))
643 break;
645 /* Fall through. */
647 case RDIV_EXPR:
648 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
650 tem = TREE_OPERAND (t, 1);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 TREE_OPERAND (t, 0), negate_expr (tem));
654 tem = TREE_OPERAND (t, 0);
655 if (negate_expr_p (tem))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (tem), TREE_OPERAND (t, 1));
659 break;
661 case TRUNC_DIV_EXPR:
662 case ROUND_DIV_EXPR:
663 case FLOOR_DIV_EXPR:
664 case CEIL_DIV_EXPR:
665 case EXACT_DIV_EXPR:
666 /* In general we can't negate A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. But if overflow is
669 undefined, we can negate, because - (INT_MIN / 1) is an
670 overflow. */
671 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
673 const char * const warnmsg = G_("assuming signed overflow does not "
674 "occur when negating a division");
675 tem = TREE_OPERAND (t, 1);
676 if (negate_expr_p (tem))
678 if (INTEGRAL_TYPE_P (type)
679 && (TREE_CODE (tem) != INTEGER_CST
680 || integer_onep (tem)))
681 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 TREE_OPERAND (t, 0), negate_expr (tem));
685 tem = TREE_OPERAND (t, 0);
686 if (negate_expr_p (tem))
688 if (INTEGRAL_TYPE_P (type)
689 && (TREE_CODE (tem) != INTEGER_CST
690 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
691 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
692 return fold_build2_loc (loc, TREE_CODE (t), type,
693 negate_expr (tem), TREE_OPERAND (t, 1));
696 break;
698 case NOP_EXPR:
699 /* Convert -((double)float) into (double)(-float). */
700 if (TREE_CODE (type) == REAL_TYPE)
702 tem = strip_float_extensions (t);
703 if (tem != t && negate_expr_p (tem))
704 return fold_convert_loc (loc, type, negate_expr (tem));
706 break;
708 case CALL_EXPR:
709 /* Negate -f(x) as f(-x). */
710 if (negate_mathfn_p (builtin_mathfn_code (t))
711 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
713 tree fndecl, arg;
715 fndecl = get_callee_fndecl (t);
716 arg = negate_expr (CALL_EXPR_ARG (t, 0));
717 return build_call_expr_loc (loc, fndecl, 1, arg);
719 break;
721 case RSHIFT_EXPR:
722 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
723 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
725 tree op1 = TREE_OPERAND (t, 1);
726 if (TREE_INT_CST_HIGH (op1) == 0
727 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
728 == TREE_INT_CST_LOW (op1))
730 tree ntype = TYPE_UNSIGNED (type)
731 ? signed_type_for (type)
732 : unsigned_type_for (type);
733 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
734 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
735 return fold_convert_loc (loc, type, temp);
738 break;
740 default:
741 break;
744 return NULL_TREE;
747 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
748 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
749 return NULL_TREE. */
751 static tree
752 negate_expr (tree t)
754 tree type, tem;
755 location_t loc;
757 if (t == NULL_TREE)
758 return NULL_TREE;
760 loc = EXPR_LOCATION (t);
761 type = TREE_TYPE (t);
762 STRIP_SIGN_NOPS (t);
764 tem = fold_negate_expr (loc, t);
765 if (!tem)
766 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
767 return fold_convert_loc (loc, type, tem);
770 /* Split a tree IN into a constant, literal and variable parts that could be
771 combined with CODE to make IN. "constant" means an expression with
772 TREE_CONSTANT but that isn't an actual constant. CODE must be a
773 commutative arithmetic operation. Store the constant part into *CONP,
774 the literal in *LITP and return the variable part. If a part isn't
775 present, set it to null. If the tree does not decompose in this way,
776 return the entire tree as the variable part and the other parts as null.
778 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
779 case, we negate an operand that was subtracted. Except if it is a
780 literal for which we use *MINUS_LITP instead.
782 If NEGATE_P is true, we are negating all of IN, again except a literal
783 for which we use *MINUS_LITP instead.
785 If IN is itself a literal or constant, return it as appropriate.
787 Note that we do not guarantee that any of the three values will be the
788 same type as IN, but they will have the same signedness and mode. */
790 static tree
791 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
792 tree *minus_litp, int negate_p)
794 tree var = 0;
796 *conp = 0;
797 *litp = 0;
798 *minus_litp = 0;
800 /* Strip any conversions that don't change the machine mode or signedness. */
801 STRIP_SIGN_NOPS (in);
803 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
804 || TREE_CODE (in) == FIXED_CST)
805 *litp = in;
806 else if (TREE_CODE (in) == code
807 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
808 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
809 /* We can associate addition and subtraction together (even
810 though the C standard doesn't say so) for integers because
811 the value is not affected. For reals, the value might be
812 affected, so we can't. */
813 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
814 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p)
847 *conp = negate_expr (*conp);
848 if (neg_var_p)
849 var = negate_expr (var);
851 else if (TREE_CODE (in) == BIT_NOT_EXPR
852 && code == PLUS_EXPR)
854 /* -X - 1 is folded to ~X, undo that here. */
855 *minus_litp = build_one_cst (TREE_TYPE (in));
856 var = negate_expr (TREE_OPERAND (in, 0));
858 else if (TREE_CONSTANT (in))
859 *conp = in;
860 else
861 var = in;
863 if (negate_p)
865 if (*litp)
866 *minus_litp = *litp, *litp = 0;
867 else if (*minus_litp)
868 *litp = *minus_litp, *minus_litp = 0;
869 *conp = negate_expr (*conp);
870 var = negate_expr (var);
873 return var;
876 /* Re-associate trees split by the above function. T1 and T2 are
877 either expressions to associate or null. Return the new
878 expression, if any. LOC is the location of the new expression. If
879 we build an operation, do it in TYPE and with CODE. */
881 static tree
882 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
884 if (t1 == 0)
885 return t2;
886 else if (t2 == 0)
887 return t1;
889 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
890 try to fold this since we will have infinite recursion. But do
891 deal with any NEGATE_EXPRs. */
892 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
893 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
895 if (code == PLUS_EXPR)
897 if (TREE_CODE (t1) == NEGATE_EXPR)
898 return build2_loc (loc, MINUS_EXPR, type,
899 fold_convert_loc (loc, type, t2),
900 fold_convert_loc (loc, type,
901 TREE_OPERAND (t1, 0)));
902 else if (TREE_CODE (t2) == NEGATE_EXPR)
903 return build2_loc (loc, MINUS_EXPR, type,
904 fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type,
906 TREE_OPERAND (t2, 0)));
907 else if (integer_zerop (t2))
908 return fold_convert_loc (loc, type, t1);
910 else if (code == MINUS_EXPR)
912 if (integer_zerop (t2))
913 return fold_convert_loc (loc, type, t1);
916 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type, t2));
920 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
921 fold_convert_loc (loc, type, t2));
924 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
925 for use in int_const_binop, size_binop and size_diffop. */
927 static bool
928 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
930 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
931 return false;
932 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
933 return false;
935 switch (code)
937 case LSHIFT_EXPR:
938 case RSHIFT_EXPR:
939 case LROTATE_EXPR:
940 case RROTATE_EXPR:
941 return true;
943 default:
944 break;
947 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
948 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
949 && TYPE_MODE (type1) == TYPE_MODE (type2);
953 /* Combine two integer constants ARG1 and ARG2 under operation CODE
954 to produce a new constant. Return NULL_TREE if we don't know how
955 to evaluate CODE at compile-time. */
957 static tree
958 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
959 int overflowable)
961 double_int op1, op2, res, tmp;
962 tree t;
963 tree type = TREE_TYPE (arg1);
964 bool uns = TYPE_UNSIGNED (type);
965 bool overflow = false;
967 op1 = tree_to_double_int (arg1);
968 op2 = tree_to_double_int (arg2);
970 switch (code)
972 case BIT_IOR_EXPR:
973 res = op1 | op2;
974 break;
976 case BIT_XOR_EXPR:
977 res = op1 ^ op2;
978 break;
980 case BIT_AND_EXPR:
981 res = op1 & op2;
982 break;
984 case RSHIFT_EXPR:
985 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
986 break;
988 case LSHIFT_EXPR:
989 /* It's unclear from the C standard whether shifts can overflow.
990 The following code ignores overflow; perhaps a C standard
991 interpretation ruling is needed. */
992 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
993 break;
995 case RROTATE_EXPR:
996 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
997 break;
999 case LROTATE_EXPR:
1000 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1001 break;
1003 case PLUS_EXPR:
1004 res = op1.add_with_sign (op2, false, &overflow);
1005 break;
1007 case MINUS_EXPR:
1008 res = op1.sub_with_overflow (op2, &overflow);
1009 break;
1011 case MULT_EXPR:
1012 res = op1.mul_with_sign (op2, false, &overflow);
1013 break;
1015 case MULT_HIGHPART_EXPR:
1016 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1018 bool dummy_overflow;
1019 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1020 return NULL_TREE;
1021 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1023 else
1025 bool dummy_overflow;
1026 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1027 is performed in twice the precision of arguments. */
1028 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1029 res = tmp.rshift (TYPE_PRECISION (type),
1030 2 * TYPE_PRECISION (type), !uns);
1032 break;
1034 case TRUNC_DIV_EXPR:
1035 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1036 case EXACT_DIV_EXPR:
1037 /* This is a shortcut for a common special case. */
1038 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1039 && !TREE_OVERFLOW (arg1)
1040 && !TREE_OVERFLOW (arg2)
1041 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1043 if (code == CEIL_DIV_EXPR)
1044 op1.low += op2.low - 1;
1046 res.low = op1.low / op2.low, res.high = 0;
1047 break;
1050 /* ... fall through ... */
1052 case ROUND_DIV_EXPR:
1053 if (op2.is_zero ())
1054 return NULL_TREE;
1055 if (op2.is_one ())
1057 res = op1;
1058 break;
1060 if (op1 == op2 && !op1.is_zero ())
1062 res = double_int_one;
1063 break;
1065 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1066 break;
1068 case TRUNC_MOD_EXPR:
1069 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1070 /* This is a shortcut for a common special case. */
1071 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1072 && !TREE_OVERFLOW (arg1)
1073 && !TREE_OVERFLOW (arg2)
1074 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1076 if (code == CEIL_MOD_EXPR)
1077 op1.low += op2.low - 1;
1078 res.low = op1.low % op2.low, res.high = 0;
1079 break;
1082 /* ... fall through ... */
1084 case ROUND_MOD_EXPR:
1085 if (op2.is_zero ())
1086 return NULL_TREE;
1087 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1088 break;
1090 case MIN_EXPR:
1091 res = op1.min (op2, uns);
1092 break;
1094 case MAX_EXPR:
1095 res = op1.max (op2, uns);
1096 break;
1098 default:
1099 return NULL_TREE;
1102 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1103 (!uns && overflow)
1104 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1106 return t;
1109 tree
1110 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1112 return int_const_binop_1 (code, arg1, arg2, 1);
1115 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1116 constant. We assume ARG1 and ARG2 have the same data type, or at least
1117 are the same kind of constant and the same machine mode. Return zero if
1118 combining the constants is not allowed in the current operating mode. */
1120 static tree
1121 const_binop (enum tree_code code, tree arg1, tree arg2)
1123 /* Sanity check for the recursive cases. */
1124 if (!arg1 || !arg2)
1125 return NULL_TREE;
1127 STRIP_NOPS (arg1);
1128 STRIP_NOPS (arg2);
1130 if (TREE_CODE (arg1) == INTEGER_CST)
1131 return int_const_binop (code, arg1, arg2);
1133 if (TREE_CODE (arg1) == REAL_CST)
1135 enum machine_mode mode;
1136 REAL_VALUE_TYPE d1;
1137 REAL_VALUE_TYPE d2;
1138 REAL_VALUE_TYPE value;
1139 REAL_VALUE_TYPE result;
1140 bool inexact;
1141 tree t, type;
1143 /* The following codes are handled by real_arithmetic. */
1144 switch (code)
1146 case PLUS_EXPR:
1147 case MINUS_EXPR:
1148 case MULT_EXPR:
1149 case RDIV_EXPR:
1150 case MIN_EXPR:
1151 case MAX_EXPR:
1152 break;
1154 default:
1155 return NULL_TREE;
1158 d1 = TREE_REAL_CST (arg1);
1159 d2 = TREE_REAL_CST (arg2);
1161 type = TREE_TYPE (arg1);
1162 mode = TYPE_MODE (type);
1164 /* Don't perform operation if we honor signaling NaNs and
1165 either operand is a NaN. */
1166 if (HONOR_SNANS (mode)
1167 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1168 return NULL_TREE;
1170 /* Don't perform operation if it would raise a division
1171 by zero exception. */
1172 if (code == RDIV_EXPR
1173 && REAL_VALUES_EQUAL (d2, dconst0)
1174 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1175 return NULL_TREE;
1177 /* If either operand is a NaN, just return it. Otherwise, set up
1178 for floating-point trap; we return an overflow. */
1179 if (REAL_VALUE_ISNAN (d1))
1180 return arg1;
1181 else if (REAL_VALUE_ISNAN (d2))
1182 return arg2;
1184 inexact = real_arithmetic (&value, code, &d1, &d2);
1185 real_convert (&result, mode, &value);
1187 /* Don't constant fold this floating point operation if
1188 the result has overflowed and flag_trapping_math. */
1189 if (flag_trapping_math
1190 && MODE_HAS_INFINITIES (mode)
1191 && REAL_VALUE_ISINF (result)
1192 && !REAL_VALUE_ISINF (d1)
1193 && !REAL_VALUE_ISINF (d2))
1194 return NULL_TREE;
1196 /* Don't constant fold this floating point operation if the
1197 result may dependent upon the run-time rounding mode and
1198 flag_rounding_math is set, or if GCC's software emulation
1199 is unable to accurately represent the result. */
1200 if ((flag_rounding_math
1201 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1202 && (inexact || !real_identical (&result, &value)))
1203 return NULL_TREE;
1205 t = build_real (type, result);
1207 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1208 return t;
1211 if (TREE_CODE (arg1) == FIXED_CST)
1213 FIXED_VALUE_TYPE f1;
1214 FIXED_VALUE_TYPE f2;
1215 FIXED_VALUE_TYPE result;
1216 tree t, type;
1217 int sat_p;
1218 bool overflow_p;
1220 /* The following codes are handled by fixed_arithmetic. */
1221 switch (code)
1223 case PLUS_EXPR:
1224 case MINUS_EXPR:
1225 case MULT_EXPR:
1226 case TRUNC_DIV_EXPR:
1227 f2 = TREE_FIXED_CST (arg2);
1228 break;
1230 case LSHIFT_EXPR:
1231 case RSHIFT_EXPR:
1232 f2.data.high = TREE_INT_CST_HIGH (arg2);
1233 f2.data.low = TREE_INT_CST_LOW (arg2);
1234 f2.mode = SImode;
1235 break;
1237 default:
1238 return NULL_TREE;
1241 f1 = TREE_FIXED_CST (arg1);
1242 type = TREE_TYPE (arg1);
1243 sat_p = TYPE_SATURATING (type);
1244 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1245 t = build_fixed (type, result);
1246 /* Propagate overflow flags. */
1247 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1248 TREE_OVERFLOW (t) = 1;
1249 return t;
1252 if (TREE_CODE (arg1) == COMPLEX_CST)
1254 tree type = TREE_TYPE (arg1);
1255 tree r1 = TREE_REALPART (arg1);
1256 tree i1 = TREE_IMAGPART (arg1);
1257 tree r2 = TREE_REALPART (arg2);
1258 tree i2 = TREE_IMAGPART (arg2);
1259 tree real, imag;
1261 switch (code)
1263 case PLUS_EXPR:
1264 case MINUS_EXPR:
1265 real = const_binop (code, r1, r2);
1266 imag = const_binop (code, i1, i2);
1267 break;
1269 case MULT_EXPR:
1270 if (COMPLEX_FLOAT_TYPE_P (type))
1271 return do_mpc_arg2 (arg1, arg2, type,
1272 /* do_nonfinite= */ folding_initializer,
1273 mpc_mul);
1275 real = const_binop (MINUS_EXPR,
1276 const_binop (MULT_EXPR, r1, r2),
1277 const_binop (MULT_EXPR, i1, i2));
1278 imag = const_binop (PLUS_EXPR,
1279 const_binop (MULT_EXPR, r1, i2),
1280 const_binop (MULT_EXPR, i1, r2));
1281 break;
1283 case RDIV_EXPR:
1284 if (COMPLEX_FLOAT_TYPE_P (type))
1285 return do_mpc_arg2 (arg1, arg2, type,
1286 /* do_nonfinite= */ folding_initializer,
1287 mpc_div);
1288 /* Fallthru ... */
1289 case TRUNC_DIV_EXPR:
1290 case CEIL_DIV_EXPR:
1291 case FLOOR_DIV_EXPR:
1292 case ROUND_DIV_EXPR:
1293 if (flag_complex_method == 0)
1295 /* Keep this algorithm in sync with
1296 tree-complex.c:expand_complex_div_straight().
1298 Expand complex division to scalars, straightforward algorithm.
1299 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1300 t = br*br + bi*bi
1302 tree magsquared
1303 = const_binop (PLUS_EXPR,
1304 const_binop (MULT_EXPR, r2, r2),
1305 const_binop (MULT_EXPR, i2, i2));
1306 tree t1
1307 = const_binop (PLUS_EXPR,
1308 const_binop (MULT_EXPR, r1, r2),
1309 const_binop (MULT_EXPR, i1, i2));
1310 tree t2
1311 = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, i1, r2),
1313 const_binop (MULT_EXPR, r1, i2));
1315 real = const_binop (code, t1, magsquared);
1316 imag = const_binop (code, t2, magsquared);
1318 else
1320 /* Keep this algorithm in sync with
1321 tree-complex.c:expand_complex_div_wide().
1323 Expand complex division to scalars, modified algorithm to minimize
1324 overflow with wide input ranges. */
1325 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1326 fold_abs_const (r2, TREE_TYPE (type)),
1327 fold_abs_const (i2, TREE_TYPE (type)));
1329 if (integer_nonzerop (compare))
1331 /* In the TRUE branch, we compute
1332 ratio = br/bi;
1333 div = (br * ratio) + bi;
1334 tr = (ar * ratio) + ai;
1335 ti = (ai * ratio) - ar;
1336 tr = tr / div;
1337 ti = ti / div; */
1338 tree ratio = const_binop (code, r2, i2);
1339 tree div = const_binop (PLUS_EXPR, i2,
1340 const_binop (MULT_EXPR, r2, ratio));
1341 real = const_binop (MULT_EXPR, r1, ratio);
1342 real = const_binop (PLUS_EXPR, real, i1);
1343 real = const_binop (code, real, div);
1345 imag = const_binop (MULT_EXPR, i1, ratio);
1346 imag = const_binop (MINUS_EXPR, imag, r1);
1347 imag = const_binop (code, imag, div);
1349 else
1351 /* In the FALSE branch, we compute
1352 ratio = d/c;
1353 divisor = (d * ratio) + c;
1354 tr = (b * ratio) + a;
1355 ti = b - (a * ratio);
1356 tr = tr / div;
1357 ti = ti / div; */
1358 tree ratio = const_binop (code, i2, r2);
1359 tree div = const_binop (PLUS_EXPR, r2,
1360 const_binop (MULT_EXPR, i2, ratio));
1362 real = const_binop (MULT_EXPR, i1, ratio);
1363 real = const_binop (PLUS_EXPR, real, r1);
1364 real = const_binop (code, real, div);
1366 imag = const_binop (MULT_EXPR, r1, ratio);
1367 imag = const_binop (MINUS_EXPR, i1, imag);
1368 imag = const_binop (code, imag, div);
1371 break;
1373 default:
1374 return NULL_TREE;
1377 if (real && imag)
1378 return build_complex (type, real, imag);
1381 if (TREE_CODE (arg1) == VECTOR_CST
1382 && TREE_CODE (arg2) == VECTOR_CST)
1384 tree type = TREE_TYPE (arg1);
1385 int count = TYPE_VECTOR_SUBPARTS (type), i;
1386 tree *elts = XALLOCAVEC (tree, count);
1388 for (i = 0; i < count; i++)
1390 tree elem1 = VECTOR_CST_ELT (arg1, i);
1391 tree elem2 = VECTOR_CST_ELT (arg2, i);
1393 elts[i] = const_binop (code, elem1, elem2);
1395 /* It is possible that const_binop cannot handle the given
1396 code and return NULL_TREE */
1397 if (elts[i] == NULL_TREE)
1398 return NULL_TREE;
1401 return build_vector (type, elts);
1404 /* Shifts allow a scalar offset for a vector. */
1405 if (TREE_CODE (arg1) == VECTOR_CST
1406 && TREE_CODE (arg2) == INTEGER_CST)
1408 tree type = TREE_TYPE (arg1);
1409 int count = TYPE_VECTOR_SUBPARTS (type), i;
1410 tree *elts = XALLOCAVEC (tree, count);
1412 if (code == VEC_LSHIFT_EXPR
1413 || code == VEC_RSHIFT_EXPR)
1415 if (!host_integerp (arg2, 1))
1416 return NULL_TREE;
1418 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1419 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1420 unsigned HOST_WIDE_INT innerc
1421 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1422 if (shiftc >= outerc || (shiftc % innerc) != 0)
1423 return NULL_TREE;
1424 int offset = shiftc / innerc;
1425 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1426 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1427 for !BYTES_BIG_ENDIAN picks first vector element, but
1428 for BYTES_BIG_ENDIAN last element from the vector. */
1429 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1430 offset = -offset;
1431 tree zero = build_zero_cst (TREE_TYPE (type));
1432 for (i = 0; i < count; i++)
1434 if (i + offset < 0 || i + offset >= count)
1435 elts[i] = zero;
1436 else
1437 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1440 else
1441 for (i = 0; i < count; i++)
1443 tree elem1 = VECTOR_CST_ELT (arg1, i);
1445 elts[i] = const_binop (code, elem1, arg2);
1447 /* It is possible that const_binop cannot handle the given
1448 code and return NULL_TREE */
1449 if (elts[i] == NULL_TREE)
1450 return NULL_TREE;
1453 return build_vector (type, elts);
1455 return NULL_TREE;
1458 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1459 indicates which particular sizetype to create. */
1461 tree
1462 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1464 return build_int_cst (sizetype_tab[(int) kind], number);
1467 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1468 is a tree code. The type of the result is taken from the operands.
1469 Both must be equivalent integer types, ala int_binop_types_match_p.
1470 If the operands are constant, so is the result. */
1472 tree
1473 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1475 tree type = TREE_TYPE (arg0);
1477 if (arg0 == error_mark_node || arg1 == error_mark_node)
1478 return error_mark_node;
1480 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1481 TREE_TYPE (arg1)));
1483 /* Handle the special case of two integer constants faster. */
1484 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1486 /* And some specific cases even faster than that. */
1487 if (code == PLUS_EXPR)
1489 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1490 return arg1;
1491 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1492 return arg0;
1494 else if (code == MINUS_EXPR)
1496 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1497 return arg0;
1499 else if (code == MULT_EXPR)
1501 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1502 return arg1;
1505 /* Handle general case of two integer constants. For sizetype
1506 constant calculations we always want to know about overflow,
1507 even in the unsigned case. */
1508 return int_const_binop_1 (code, arg0, arg1, -1);
1511 return fold_build2_loc (loc, code, type, arg0, arg1);
1514 /* Given two values, either both of sizetype or both of bitsizetype,
1515 compute the difference between the two values. Return the value
1516 in signed type corresponding to the type of the operands. */
1518 tree
1519 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1521 tree type = TREE_TYPE (arg0);
1522 tree ctype;
1524 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1525 TREE_TYPE (arg1)));
1527 /* If the type is already signed, just do the simple thing. */
1528 if (!TYPE_UNSIGNED (type))
1529 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1531 if (type == sizetype)
1532 ctype = ssizetype;
1533 else if (type == bitsizetype)
1534 ctype = sbitsizetype;
1535 else
1536 ctype = signed_type_for (type);
1538 /* If either operand is not a constant, do the conversions to the signed
1539 type and subtract. The hardware will do the right thing with any
1540 overflow in the subtraction. */
1541 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1542 return size_binop_loc (loc, MINUS_EXPR,
1543 fold_convert_loc (loc, ctype, arg0),
1544 fold_convert_loc (loc, ctype, arg1));
1546 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1547 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1548 overflow) and negate (which can't either). Special-case a result
1549 of zero while we're here. */
1550 if (tree_int_cst_equal (arg0, arg1))
1551 return build_int_cst (ctype, 0);
1552 else if (tree_int_cst_lt (arg1, arg0))
1553 return fold_convert_loc (loc, ctype,
1554 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1555 else
1556 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1557 fold_convert_loc (loc, ctype,
1558 size_binop_loc (loc,
1559 MINUS_EXPR,
1560 arg1, arg0)));
1563 /* A subroutine of fold_convert_const handling conversions of an
1564 INTEGER_CST to another integer type. */
1566 static tree
1567 fold_convert_const_int_from_int (tree type, const_tree arg1)
1569 tree t;
1571 /* Given an integer constant, make new constant with new type,
1572 appropriately sign-extended or truncated. */
1573 t = force_fit_type_double (type, tree_to_double_int (arg1),
1574 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1575 (TREE_INT_CST_HIGH (arg1) < 0
1576 && (TYPE_UNSIGNED (type)
1577 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1578 | TREE_OVERFLOW (arg1));
1580 return t;
1583 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1584 to an integer type. */
1586 static tree
1587 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1589 int overflow = 0;
1590 tree t;
1592 /* The following code implements the floating point to integer
1593 conversion rules required by the Java Language Specification,
1594 that IEEE NaNs are mapped to zero and values that overflow
1595 the target precision saturate, i.e. values greater than
1596 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1597 are mapped to INT_MIN. These semantics are allowed by the
1598 C and C++ standards that simply state that the behavior of
1599 FP-to-integer conversion is unspecified upon overflow. */
1601 double_int val;
1602 REAL_VALUE_TYPE r;
1603 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1605 switch (code)
1607 case FIX_TRUNC_EXPR:
1608 real_trunc (&r, VOIDmode, &x);
1609 break;
1611 default:
1612 gcc_unreachable ();
1615 /* If R is NaN, return zero and show we have an overflow. */
1616 if (REAL_VALUE_ISNAN (r))
1618 overflow = 1;
1619 val = double_int_zero;
1622 /* See if R is less than the lower bound or greater than the
1623 upper bound. */
1625 if (! overflow)
1627 tree lt = TYPE_MIN_VALUE (type);
1628 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1629 if (REAL_VALUES_LESS (r, l))
1631 overflow = 1;
1632 val = tree_to_double_int (lt);
1636 if (! overflow)
1638 tree ut = TYPE_MAX_VALUE (type);
1639 if (ut)
1641 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1642 if (REAL_VALUES_LESS (u, r))
1644 overflow = 1;
1645 val = tree_to_double_int (ut);
1650 if (! overflow)
1651 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1653 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1654 return t;
1657 /* A subroutine of fold_convert_const handling conversions of a
1658 FIXED_CST to an integer type. */
1660 static tree
1661 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1663 tree t;
1664 double_int temp, temp_trunc;
1665 unsigned int mode;
1667 /* Right shift FIXED_CST to temp by fbit. */
1668 temp = TREE_FIXED_CST (arg1).data;
1669 mode = TREE_FIXED_CST (arg1).mode;
1670 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1672 temp = temp.rshift (GET_MODE_FBIT (mode),
1673 HOST_BITS_PER_DOUBLE_INT,
1674 SIGNED_FIXED_POINT_MODE_P (mode));
1676 /* Left shift temp to temp_trunc by fbit. */
1677 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1678 HOST_BITS_PER_DOUBLE_INT,
1679 SIGNED_FIXED_POINT_MODE_P (mode));
1681 else
1683 temp = double_int_zero;
1684 temp_trunc = double_int_zero;
1687 /* If FIXED_CST is negative, we need to round the value toward 0.
1688 By checking if the fractional bits are not zero to add 1 to temp. */
1689 if (SIGNED_FIXED_POINT_MODE_P (mode)
1690 && temp_trunc.is_negative ()
1691 && TREE_FIXED_CST (arg1).data != temp_trunc)
1692 temp += double_int_one;
1694 /* Given a fixed-point constant, make new constant with new type,
1695 appropriately sign-extended or truncated. */
1696 t = force_fit_type_double (type, temp, -1,
1697 (temp.is_negative ()
1698 && (TYPE_UNSIGNED (type)
1699 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1700 | TREE_OVERFLOW (arg1));
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to another floating point type. */
1708 static tree
1709 fold_convert_const_real_from_real (tree type, const_tree arg1)
1711 REAL_VALUE_TYPE value;
1712 tree t;
1714 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1715 t = build_real (type, value);
1717 /* If converting an infinity or NAN to a representation that doesn't
1718 have one, set the overflow bit so that we can produce some kind of
1719 error message at the appropriate point if necessary. It's not the
1720 most user-friendly message, but it's better than nothing. */
1721 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1722 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1723 TREE_OVERFLOW (t) = 1;
1724 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1725 && !MODE_HAS_NANS (TYPE_MODE (type)))
1726 TREE_OVERFLOW (t) = 1;
1727 /* Regular overflow, conversion produced an infinity in a mode that
1728 can't represent them. */
1729 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1730 && REAL_VALUE_ISINF (value)
1731 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1732 TREE_OVERFLOW (t) = 1;
1733 else
1734 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1735 return t;
1738 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1739 to a floating point type. */
1741 static tree
1742 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1744 REAL_VALUE_TYPE value;
1745 tree t;
1747 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1748 t = build_real (type, value);
1750 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1751 return t;
1754 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1755 to another fixed-point type. */
1757 static tree
1758 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1760 FIXED_VALUE_TYPE value;
1761 tree t;
1762 bool overflow_p;
1764 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1765 TYPE_SATURATING (type));
1766 t = build_fixed (type, value);
1768 /* Propagate overflow flags. */
1769 if (overflow_p | TREE_OVERFLOW (arg1))
1770 TREE_OVERFLOW (t) = 1;
1771 return t;
1774 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1775 to a fixed-point type. */
1777 static tree
1778 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1780 FIXED_VALUE_TYPE value;
1781 tree t;
1782 bool overflow_p;
1784 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1785 TREE_INT_CST (arg1),
1786 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1787 TYPE_SATURATING (type));
1788 t = build_fixed (type, value);
1790 /* Propagate overflow flags. */
1791 if (overflow_p | TREE_OVERFLOW (arg1))
1792 TREE_OVERFLOW (t) = 1;
1793 return t;
1796 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1797 to a fixed-point type. */
1799 static tree
1800 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1802 FIXED_VALUE_TYPE value;
1803 tree t;
1804 bool overflow_p;
1806 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1807 &TREE_REAL_CST (arg1),
1808 TYPE_SATURATING (type));
1809 t = build_fixed (type, value);
1811 /* Propagate overflow flags. */
1812 if (overflow_p | TREE_OVERFLOW (arg1))
1813 TREE_OVERFLOW (t) = 1;
1814 return t;
1817 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1818 type TYPE. If no simplification can be done return NULL_TREE. */
1820 static tree
1821 fold_convert_const (enum tree_code code, tree type, tree arg1)
1823 if (TREE_TYPE (arg1) == type)
1824 return arg1;
1826 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1827 || TREE_CODE (type) == OFFSET_TYPE)
1829 if (TREE_CODE (arg1) == INTEGER_CST)
1830 return fold_convert_const_int_from_int (type, arg1);
1831 else if (TREE_CODE (arg1) == REAL_CST)
1832 return fold_convert_const_int_from_real (code, type, arg1);
1833 else if (TREE_CODE (arg1) == FIXED_CST)
1834 return fold_convert_const_int_from_fixed (type, arg1);
1836 else if (TREE_CODE (type) == REAL_TYPE)
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return build_real_from_int_cst (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_real_from_real (type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_real_from_fixed (type, arg1);
1845 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1847 if (TREE_CODE (arg1) == FIXED_CST)
1848 return fold_convert_const_fixed_from_fixed (type, arg1);
1849 else if (TREE_CODE (arg1) == INTEGER_CST)
1850 return fold_convert_const_fixed_from_int (type, arg1);
1851 else if (TREE_CODE (arg1) == REAL_CST)
1852 return fold_convert_const_fixed_from_real (type, arg1);
1854 return NULL_TREE;
1857 /* Construct a vector of zero elements of vector type TYPE. */
1859 static tree
1860 build_zero_vector (tree type)
1862 tree t;
1864 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1865 return build_vector_from_val (type, t);
1868 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1870 bool
1871 fold_convertible_p (const_tree type, const_tree arg)
1873 tree orig = TREE_TYPE (arg);
1875 if (type == orig)
1876 return true;
1878 if (TREE_CODE (arg) == ERROR_MARK
1879 || TREE_CODE (type) == ERROR_MARK
1880 || TREE_CODE (orig) == ERROR_MARK)
1881 return false;
1883 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1884 return true;
1886 switch (TREE_CODE (type))
1888 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 case OFFSET_TYPE:
1891 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1892 || TREE_CODE (orig) == OFFSET_TYPE)
1893 return true;
1894 return (TREE_CODE (orig) == VECTOR_TYPE
1895 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1897 case REAL_TYPE:
1898 case FIXED_POINT_TYPE:
1899 case COMPLEX_TYPE:
1900 case VECTOR_TYPE:
1901 case VOID_TYPE:
1902 return TREE_CODE (type) == TREE_CODE (orig);
1904 default:
1905 return false;
1909 /* Convert expression ARG to type TYPE. Used by the middle-end for
1910 simple conversions in preference to calling the front-end's convert. */
1912 tree
1913 fold_convert_loc (location_t loc, tree type, tree arg)
1915 tree orig = TREE_TYPE (arg);
1916 tree tem;
1918 if (type == orig)
1919 return arg;
1921 if (TREE_CODE (arg) == ERROR_MARK
1922 || TREE_CODE (type) == ERROR_MARK
1923 || TREE_CODE (orig) == ERROR_MARK)
1924 return error_mark_node;
1926 switch (TREE_CODE (type))
1928 case POINTER_TYPE:
1929 case REFERENCE_TYPE:
1930 /* Handle conversions between pointers to different address spaces. */
1931 if (POINTER_TYPE_P (orig)
1932 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1933 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1934 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1935 /* fall through */
1937 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1938 case OFFSET_TYPE:
1939 if (TREE_CODE (arg) == INTEGER_CST)
1941 tem = fold_convert_const (NOP_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 return tem;
1945 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1946 || TREE_CODE (orig) == OFFSET_TYPE)
1947 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1948 if (TREE_CODE (orig) == COMPLEX_TYPE)
1949 return fold_convert_loc (loc, type,
1950 fold_build1_loc (loc, REALPART_EXPR,
1951 TREE_TYPE (orig), arg));
1952 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1953 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1954 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1956 case REAL_TYPE:
1957 if (TREE_CODE (arg) == INTEGER_CST)
1959 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1963 else if (TREE_CODE (arg) == REAL_CST)
1965 tem = fold_convert_const (NOP_EXPR, type, arg);
1966 if (tem != NULL_TREE)
1967 return tem;
1969 else if (TREE_CODE (arg) == FIXED_CST)
1971 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1972 if (tem != NULL_TREE)
1973 return tem;
1976 switch (TREE_CODE (orig))
1978 case INTEGER_TYPE:
1979 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1980 case POINTER_TYPE: case REFERENCE_TYPE:
1981 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1983 case REAL_TYPE:
1984 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1986 case FIXED_POINT_TYPE:
1987 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1989 case COMPLEX_TYPE:
1990 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 return fold_convert_loc (loc, type, tem);
1993 default:
1994 gcc_unreachable ();
1997 case FIXED_POINT_TYPE:
1998 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1999 || TREE_CODE (arg) == REAL_CST)
2001 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2002 if (tem != NULL_TREE)
2003 goto fold_convert_exit;
2006 switch (TREE_CODE (orig))
2008 case FIXED_POINT_TYPE:
2009 case INTEGER_TYPE:
2010 case ENUMERAL_TYPE:
2011 case BOOLEAN_TYPE:
2012 case REAL_TYPE:
2013 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2015 case COMPLEX_TYPE:
2016 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2017 return fold_convert_loc (loc, type, tem);
2019 default:
2020 gcc_unreachable ();
2023 case COMPLEX_TYPE:
2024 switch (TREE_CODE (orig))
2026 case INTEGER_TYPE:
2027 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2028 case POINTER_TYPE: case REFERENCE_TYPE:
2029 case REAL_TYPE:
2030 case FIXED_POINT_TYPE:
2031 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2032 fold_convert_loc (loc, TREE_TYPE (type), arg),
2033 fold_convert_loc (loc, TREE_TYPE (type),
2034 integer_zero_node));
2035 case COMPLEX_TYPE:
2037 tree rpart, ipart;
2039 if (TREE_CODE (arg) == COMPLEX_EXPR)
2041 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2042 TREE_OPERAND (arg, 0));
2043 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2044 TREE_OPERAND (arg, 1));
2045 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2048 arg = save_expr (arg);
2049 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2050 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2053 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2056 default:
2057 gcc_unreachable ();
2060 case VECTOR_TYPE:
2061 if (integer_zerop (arg))
2062 return build_zero_vector (type);
2063 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2064 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2065 || TREE_CODE (orig) == VECTOR_TYPE);
2066 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2068 case VOID_TYPE:
2069 tem = fold_ignored_result (arg);
2070 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2072 default:
2073 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2074 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2075 gcc_unreachable ();
2077 fold_convert_exit:
2078 protected_set_expr_location_unshare (tem, loc);
2079 return tem;
2082 /* Return false if expr can be assumed not to be an lvalue, true
2083 otherwise. */
2085 static bool
2086 maybe_lvalue_p (const_tree x)
2088 /* We only need to wrap lvalue tree codes. */
2089 switch (TREE_CODE (x))
2091 case VAR_DECL:
2092 case PARM_DECL:
2093 case RESULT_DECL:
2094 case LABEL_DECL:
2095 case FUNCTION_DECL:
2096 case SSA_NAME:
2098 case COMPONENT_REF:
2099 case MEM_REF:
2100 case INDIRECT_REF:
2101 case ARRAY_REF:
2102 case ARRAY_RANGE_REF:
2103 case BIT_FIELD_REF:
2104 case OBJ_TYPE_REF:
2106 case REALPART_EXPR:
2107 case IMAGPART_EXPR:
2108 case PREINCREMENT_EXPR:
2109 case PREDECREMENT_EXPR:
2110 case SAVE_EXPR:
2111 case TRY_CATCH_EXPR:
2112 case WITH_CLEANUP_EXPR:
2113 case COMPOUND_EXPR:
2114 case MODIFY_EXPR:
2115 case TARGET_EXPR:
2116 case COND_EXPR:
2117 case BIND_EXPR:
2118 break;
2120 default:
2121 /* Assume the worst for front-end tree codes. */
2122 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2123 break;
2124 return false;
2127 return true;
2130 /* Return an expr equal to X but certainly not valid as an lvalue. */
2132 tree
2133 non_lvalue_loc (location_t loc, tree x)
2135 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2136 us. */
2137 if (in_gimple_form)
2138 return x;
2140 if (! maybe_lvalue_p (x))
2141 return x;
2142 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2145 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2146 Zero means allow extended lvalues. */
2148 int pedantic_lvalues;
2150 /* When pedantic, return an expr equal to X but certainly not valid as a
2151 pedantic lvalue. Otherwise, return X. */
2153 static tree
2154 pedantic_non_lvalue_loc (location_t loc, tree x)
2156 if (pedantic_lvalues)
2157 return non_lvalue_loc (loc, x);
2159 return protected_set_expr_location_unshare (x, loc);
2162 /* Given a tree comparison code, return the code that is the logical inverse.
2163 It is generally not safe to do this for floating-point comparisons, except
2164 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2165 ERROR_MARK in this case. */
2167 enum tree_code
2168 invert_tree_comparison (enum tree_code code, bool honor_nans)
2170 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2171 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2172 return ERROR_MARK;
2174 switch (code)
2176 case EQ_EXPR:
2177 return NE_EXPR;
2178 case NE_EXPR:
2179 return EQ_EXPR;
2180 case GT_EXPR:
2181 return honor_nans ? UNLE_EXPR : LE_EXPR;
2182 case GE_EXPR:
2183 return honor_nans ? UNLT_EXPR : LT_EXPR;
2184 case LT_EXPR:
2185 return honor_nans ? UNGE_EXPR : GE_EXPR;
2186 case LE_EXPR:
2187 return honor_nans ? UNGT_EXPR : GT_EXPR;
2188 case LTGT_EXPR:
2189 return UNEQ_EXPR;
2190 case UNEQ_EXPR:
2191 return LTGT_EXPR;
2192 case UNGT_EXPR:
2193 return LE_EXPR;
2194 case UNGE_EXPR:
2195 return LT_EXPR;
2196 case UNLT_EXPR:
2197 return GE_EXPR;
2198 case UNLE_EXPR:
2199 return GT_EXPR;
2200 case ORDERED_EXPR:
2201 return UNORDERED_EXPR;
2202 case UNORDERED_EXPR:
2203 return ORDERED_EXPR;
2204 default:
2205 gcc_unreachable ();
2209 /* Similar, but return the comparison that results if the operands are
2210 swapped. This is safe for floating-point. */
2212 enum tree_code
2213 swap_tree_comparison (enum tree_code code)
2215 switch (code)
2217 case EQ_EXPR:
2218 case NE_EXPR:
2219 case ORDERED_EXPR:
2220 case UNORDERED_EXPR:
2221 case LTGT_EXPR:
2222 case UNEQ_EXPR:
2223 return code;
2224 case GT_EXPR:
2225 return LT_EXPR;
2226 case GE_EXPR:
2227 return LE_EXPR;
2228 case LT_EXPR:
2229 return GT_EXPR;
2230 case LE_EXPR:
2231 return GE_EXPR;
2232 case UNGT_EXPR:
2233 return UNLT_EXPR;
2234 case UNGE_EXPR:
2235 return UNLE_EXPR;
2236 case UNLT_EXPR:
2237 return UNGT_EXPR;
2238 case UNLE_EXPR:
2239 return UNGE_EXPR;
2240 default:
2241 gcc_unreachable ();
2246 /* Convert a comparison tree code from an enum tree_code representation
2247 into a compcode bit-based encoding. This function is the inverse of
2248 compcode_to_comparison. */
2250 static enum comparison_code
2251 comparison_to_compcode (enum tree_code code)
2253 switch (code)
2255 case LT_EXPR:
2256 return COMPCODE_LT;
2257 case EQ_EXPR:
2258 return COMPCODE_EQ;
2259 case LE_EXPR:
2260 return COMPCODE_LE;
2261 case GT_EXPR:
2262 return COMPCODE_GT;
2263 case NE_EXPR:
2264 return COMPCODE_NE;
2265 case GE_EXPR:
2266 return COMPCODE_GE;
2267 case ORDERED_EXPR:
2268 return COMPCODE_ORD;
2269 case UNORDERED_EXPR:
2270 return COMPCODE_UNORD;
2271 case UNLT_EXPR:
2272 return COMPCODE_UNLT;
2273 case UNEQ_EXPR:
2274 return COMPCODE_UNEQ;
2275 case UNLE_EXPR:
2276 return COMPCODE_UNLE;
2277 case UNGT_EXPR:
2278 return COMPCODE_UNGT;
2279 case LTGT_EXPR:
2280 return COMPCODE_LTGT;
2281 case UNGE_EXPR:
2282 return COMPCODE_UNGE;
2283 default:
2284 gcc_unreachable ();
2288 /* Convert a compcode bit-based encoding of a comparison operator back
2289 to GCC's enum tree_code representation. This function is the
2290 inverse of comparison_to_compcode. */
2292 static enum tree_code
2293 compcode_to_comparison (enum comparison_code code)
2295 switch (code)
2297 case COMPCODE_LT:
2298 return LT_EXPR;
2299 case COMPCODE_EQ:
2300 return EQ_EXPR;
2301 case COMPCODE_LE:
2302 return LE_EXPR;
2303 case COMPCODE_GT:
2304 return GT_EXPR;
2305 case COMPCODE_NE:
2306 return NE_EXPR;
2307 case COMPCODE_GE:
2308 return GE_EXPR;
2309 case COMPCODE_ORD:
2310 return ORDERED_EXPR;
2311 case COMPCODE_UNORD:
2312 return UNORDERED_EXPR;
2313 case COMPCODE_UNLT:
2314 return UNLT_EXPR;
2315 case COMPCODE_UNEQ:
2316 return UNEQ_EXPR;
2317 case COMPCODE_UNLE:
2318 return UNLE_EXPR;
2319 case COMPCODE_UNGT:
2320 return UNGT_EXPR;
2321 case COMPCODE_LTGT:
2322 return LTGT_EXPR;
2323 case COMPCODE_UNGE:
2324 return UNGE_EXPR;
2325 default:
2326 gcc_unreachable ();
2330 /* Return a tree for the comparison which is the combination of
2331 doing the AND or OR (depending on CODE) of the two operations LCODE
2332 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2333 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2334 if this makes the transformation invalid. */
2336 tree
2337 combine_comparisons (location_t loc,
2338 enum tree_code code, enum tree_code lcode,
2339 enum tree_code rcode, tree truth_type,
2340 tree ll_arg, tree lr_arg)
2342 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2343 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2344 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2345 int compcode;
2347 switch (code)
2349 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2350 compcode = lcompcode & rcompcode;
2351 break;
2353 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2354 compcode = lcompcode | rcompcode;
2355 break;
2357 default:
2358 return NULL_TREE;
2361 if (!honor_nans)
2363 /* Eliminate unordered comparisons, as well as LTGT and ORD
2364 which are not used unless the mode has NaNs. */
2365 compcode &= ~COMPCODE_UNORD;
2366 if (compcode == COMPCODE_LTGT)
2367 compcode = COMPCODE_NE;
2368 else if (compcode == COMPCODE_ORD)
2369 compcode = COMPCODE_TRUE;
2371 else if (flag_trapping_math)
2373 /* Check that the original operation and the optimized ones will trap
2374 under the same condition. */
2375 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2376 && (lcompcode != COMPCODE_EQ)
2377 && (lcompcode != COMPCODE_ORD);
2378 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2379 && (rcompcode != COMPCODE_EQ)
2380 && (rcompcode != COMPCODE_ORD);
2381 bool trap = (compcode & COMPCODE_UNORD) == 0
2382 && (compcode != COMPCODE_EQ)
2383 && (compcode != COMPCODE_ORD);
2385 /* In a short-circuited boolean expression the LHS might be
2386 such that the RHS, if evaluated, will never trap. For
2387 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2388 if neither x nor y is NaN. (This is a mixed blessing: for
2389 example, the expression above will never trap, hence
2390 optimizing it to x < y would be invalid). */
2391 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2392 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2393 rtrap = false;
2395 /* If the comparison was short-circuited, and only the RHS
2396 trapped, we may now generate a spurious trap. */
2397 if (rtrap && !ltrap
2398 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2399 return NULL_TREE;
2401 /* If we changed the conditions that cause a trap, we lose. */
2402 if ((ltrap || rtrap) != trap)
2403 return NULL_TREE;
2406 if (compcode == COMPCODE_TRUE)
2407 return constant_boolean_node (true, truth_type);
2408 else if (compcode == COMPCODE_FALSE)
2409 return constant_boolean_node (false, truth_type);
2410 else
2412 enum tree_code tcode;
2414 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2415 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2419 /* Return nonzero if two operands (typically of the same tree node)
2420 are necessarily equal. If either argument has side-effects this
2421 function returns zero. FLAGS modifies behavior as follows:
2423 If OEP_ONLY_CONST is set, only return nonzero for constants.
2424 This function tests whether the operands are indistinguishable;
2425 it does not test whether they are equal using C's == operation.
2426 The distinction is important for IEEE floating point, because
2427 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2428 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2430 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2431 even though it may hold multiple values during a function.
2432 This is because a GCC tree node guarantees that nothing else is
2433 executed between the evaluation of its "operands" (which may often
2434 be evaluated in arbitrary order). Hence if the operands themselves
2435 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2436 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2437 unset means assuming isochronic (or instantaneous) tree equivalence.
2438 Unless comparing arbitrary expression trees, such as from different
2439 statements, this flag can usually be left unset.
2441 If OEP_PURE_SAME is set, then pure functions with identical arguments
2442 are considered the same. It is used when the caller has other ways
2443 to ensure that global memory is unchanged in between. */
2446 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2448 /* If either is ERROR_MARK, they aren't equal. */
2449 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2450 || TREE_TYPE (arg0) == error_mark_node
2451 || TREE_TYPE (arg1) == error_mark_node)
2452 return 0;
2454 /* Similar, if either does not have a type (like a released SSA name),
2455 they aren't equal. */
2456 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2457 return 0;
2459 /* Check equality of integer constants before bailing out due to
2460 precision differences. */
2461 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2462 return tree_int_cst_equal (arg0, arg1);
2464 /* If both types don't have the same signedness, then we can't consider
2465 them equal. We must check this before the STRIP_NOPS calls
2466 because they may change the signedness of the arguments. As pointers
2467 strictly don't have a signedness, require either two pointers or
2468 two non-pointers as well. */
2469 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2470 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2471 return 0;
2473 /* We cannot consider pointers to different address space equal. */
2474 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2475 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2476 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2477 return 0;
2479 /* If both types don't have the same precision, then it is not safe
2480 to strip NOPs. */
2481 if (element_precision (TREE_TYPE (arg0))
2482 != element_precision (TREE_TYPE (arg1)))
2483 return 0;
2485 STRIP_NOPS (arg0);
2486 STRIP_NOPS (arg1);
2488 /* In case both args are comparisons but with different comparison
2489 code, try to swap the comparison operands of one arg to produce
2490 a match and compare that variant. */
2491 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2492 && COMPARISON_CLASS_P (arg0)
2493 && COMPARISON_CLASS_P (arg1))
2495 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2497 if (TREE_CODE (arg0) == swap_code)
2498 return operand_equal_p (TREE_OPERAND (arg0, 0),
2499 TREE_OPERAND (arg1, 1), flags)
2500 && operand_equal_p (TREE_OPERAND (arg0, 1),
2501 TREE_OPERAND (arg1, 0), flags);
2504 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2505 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2506 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2507 return 0;
2509 /* This is needed for conversions and for COMPONENT_REF.
2510 Might as well play it safe and always test this. */
2511 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2512 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2513 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2514 return 0;
2516 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2517 We don't care about side effects in that case because the SAVE_EXPR
2518 takes care of that for us. In all other cases, two expressions are
2519 equal if they have no side effects. If we have two identical
2520 expressions with side effects that should be treated the same due
2521 to the only side effects being identical SAVE_EXPR's, that will
2522 be detected in the recursive calls below.
2523 If we are taking an invariant address of two identical objects
2524 they are necessarily equal as well. */
2525 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2526 && (TREE_CODE (arg0) == SAVE_EXPR
2527 || (flags & OEP_CONSTANT_ADDRESS_OF)
2528 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2529 return 1;
2531 /* Next handle constant cases, those for which we can return 1 even
2532 if ONLY_CONST is set. */
2533 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2534 switch (TREE_CODE (arg0))
2536 case INTEGER_CST:
2537 return tree_int_cst_equal (arg0, arg1);
2539 case FIXED_CST:
2540 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2541 TREE_FIXED_CST (arg1));
2543 case REAL_CST:
2544 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2545 TREE_REAL_CST (arg1)))
2546 return 1;
2549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2551 /* If we do not distinguish between signed and unsigned zero,
2552 consider them equal. */
2553 if (real_zerop (arg0) && real_zerop (arg1))
2554 return 1;
2556 return 0;
2558 case VECTOR_CST:
2560 unsigned i;
2562 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2563 return 0;
2565 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2567 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2568 VECTOR_CST_ELT (arg1, i), flags))
2569 return 0;
2571 return 1;
2574 case COMPLEX_CST:
2575 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2576 flags)
2577 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2578 flags));
2580 case STRING_CST:
2581 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2582 && ! memcmp (TREE_STRING_POINTER (arg0),
2583 TREE_STRING_POINTER (arg1),
2584 TREE_STRING_LENGTH (arg0)));
2586 case ADDR_EXPR:
2587 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2588 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2589 ? OEP_CONSTANT_ADDRESS_OF : 0);
2590 default:
2591 break;
2594 if (flags & OEP_ONLY_CONST)
2595 return 0;
2597 /* Define macros to test an operand from arg0 and arg1 for equality and a
2598 variant that allows null and views null as being different from any
2599 non-null value. In the latter case, if either is null, the both
2600 must be; otherwise, do the normal comparison. */
2601 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2602 TREE_OPERAND (arg1, N), flags)
2604 #define OP_SAME_WITH_NULL(N) \
2605 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2606 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2608 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2610 case tcc_unary:
2611 /* Two conversions are equal only if signedness and modes match. */
2612 switch (TREE_CODE (arg0))
2614 CASE_CONVERT:
2615 case FIX_TRUNC_EXPR:
2616 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2617 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2618 return 0;
2619 break;
2620 default:
2621 break;
2624 return OP_SAME (0);
2627 case tcc_comparison:
2628 case tcc_binary:
2629 if (OP_SAME (0) && OP_SAME (1))
2630 return 1;
2632 /* For commutative ops, allow the other order. */
2633 return (commutative_tree_code (TREE_CODE (arg0))
2634 && operand_equal_p (TREE_OPERAND (arg0, 0),
2635 TREE_OPERAND (arg1, 1), flags)
2636 && operand_equal_p (TREE_OPERAND (arg0, 1),
2637 TREE_OPERAND (arg1, 0), flags));
2639 case tcc_reference:
2640 /* If either of the pointer (or reference) expressions we are
2641 dereferencing contain a side effect, these cannot be equal,
2642 but their addresses can be. */
2643 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2644 && (TREE_SIDE_EFFECTS (arg0)
2645 || TREE_SIDE_EFFECTS (arg1)))
2646 return 0;
2648 switch (TREE_CODE (arg0))
2650 case INDIRECT_REF:
2651 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2652 return OP_SAME (0);
2654 case REALPART_EXPR:
2655 case IMAGPART_EXPR:
2656 return OP_SAME (0);
2658 case TARGET_MEM_REF:
2659 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2660 /* Require equal extra operands and then fall through to MEM_REF
2661 handling of the two common operands. */
2662 if (!OP_SAME_WITH_NULL (2)
2663 || !OP_SAME_WITH_NULL (3)
2664 || !OP_SAME_WITH_NULL (4))
2665 return 0;
2666 /* Fallthru. */
2667 case MEM_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 /* Require equal access sizes, and similar pointer types.
2670 We can have incomplete types for array references of
2671 variable-sized arrays from the Fortran frontend
2672 though. Also verify the types are compatible. */
2673 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2674 || (TYPE_SIZE (TREE_TYPE (arg0))
2675 && TYPE_SIZE (TREE_TYPE (arg1))
2676 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2677 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2678 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2679 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2680 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2681 && OP_SAME (0) && OP_SAME (1));
2683 case ARRAY_REF:
2684 case ARRAY_RANGE_REF:
2685 /* Operands 2 and 3 may be null.
2686 Compare the array index by value if it is constant first as we
2687 may have different types but same value here. */
2688 if (!OP_SAME (0))
2689 return 0;
2690 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2691 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2692 TREE_OPERAND (arg1, 1))
2693 || OP_SAME (1))
2694 && OP_SAME_WITH_NULL (2)
2695 && OP_SAME_WITH_NULL (3));
2697 case COMPONENT_REF:
2698 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2699 may be NULL when we're called to compare MEM_EXPRs. */
2700 if (!OP_SAME_WITH_NULL (0))
2701 return 0;
2702 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2703 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2705 case BIT_FIELD_REF:
2706 if (!OP_SAME (0))
2707 return 0;
2708 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2709 return OP_SAME (1) && OP_SAME (2);
2711 default:
2712 return 0;
2715 case tcc_expression:
2716 switch (TREE_CODE (arg0))
2718 case ADDR_EXPR:
2719 case TRUTH_NOT_EXPR:
2720 return OP_SAME (0);
2722 case TRUTH_ANDIF_EXPR:
2723 case TRUTH_ORIF_EXPR:
2724 return OP_SAME (0) && OP_SAME (1);
2726 case FMA_EXPR:
2727 case WIDEN_MULT_PLUS_EXPR:
2728 case WIDEN_MULT_MINUS_EXPR:
2729 if (!OP_SAME (2))
2730 return 0;
2731 /* The multiplcation operands are commutative. */
2732 /* FALLTHRU */
2734 case TRUTH_AND_EXPR:
2735 case TRUTH_OR_EXPR:
2736 case TRUTH_XOR_EXPR:
2737 if (OP_SAME (0) && OP_SAME (1))
2738 return 1;
2740 /* Otherwise take into account this is a commutative operation. */
2741 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2742 TREE_OPERAND (arg1, 1), flags)
2743 && operand_equal_p (TREE_OPERAND (arg0, 1),
2744 TREE_OPERAND (arg1, 0), flags));
2746 case COND_EXPR:
2747 case VEC_COND_EXPR:
2748 case DOT_PROD_EXPR:
2749 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2751 default:
2752 return 0;
2755 case tcc_vl_exp:
2756 switch (TREE_CODE (arg0))
2758 case CALL_EXPR:
2759 /* If the CALL_EXPRs call different functions, then they
2760 clearly can not be equal. */
2761 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2762 flags))
2763 return 0;
2766 unsigned int cef = call_expr_flags (arg0);
2767 if (flags & OEP_PURE_SAME)
2768 cef &= ECF_CONST | ECF_PURE;
2769 else
2770 cef &= ECF_CONST;
2771 if (!cef)
2772 return 0;
2775 /* Now see if all the arguments are the same. */
2777 const_call_expr_arg_iterator iter0, iter1;
2778 const_tree a0, a1;
2779 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2780 a1 = first_const_call_expr_arg (arg1, &iter1);
2781 a0 && a1;
2782 a0 = next_const_call_expr_arg (&iter0),
2783 a1 = next_const_call_expr_arg (&iter1))
2784 if (! operand_equal_p (a0, a1, flags))
2785 return 0;
2787 /* If we get here and both argument lists are exhausted
2788 then the CALL_EXPRs are equal. */
2789 return ! (a0 || a1);
2791 default:
2792 return 0;
2795 case tcc_declaration:
2796 /* Consider __builtin_sqrt equal to sqrt. */
2797 return (TREE_CODE (arg0) == FUNCTION_DECL
2798 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2799 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2800 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2802 default:
2803 return 0;
2806 #undef OP_SAME
2807 #undef OP_SAME_WITH_NULL
2810 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2811 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2813 When in doubt, return 0. */
2815 static int
2816 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2818 int unsignedp1, unsignedpo;
2819 tree primarg0, primarg1, primother;
2820 unsigned int correct_width;
2822 if (operand_equal_p (arg0, arg1, 0))
2823 return 1;
2825 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2826 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2827 return 0;
2829 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2830 and see if the inner values are the same. This removes any
2831 signedness comparison, which doesn't matter here. */
2832 primarg0 = arg0, primarg1 = arg1;
2833 STRIP_NOPS (primarg0);
2834 STRIP_NOPS (primarg1);
2835 if (operand_equal_p (primarg0, primarg1, 0))
2836 return 1;
2838 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2839 actual comparison operand, ARG0.
2841 First throw away any conversions to wider types
2842 already present in the operands. */
2844 primarg1 = get_narrower (arg1, &unsignedp1);
2845 primother = get_narrower (other, &unsignedpo);
2847 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2848 if (unsignedp1 == unsignedpo
2849 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2850 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2852 tree type = TREE_TYPE (arg0);
2854 /* Make sure shorter operand is extended the right way
2855 to match the longer operand. */
2856 primarg1 = fold_convert (signed_or_unsigned_type_for
2857 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2859 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2860 return 1;
2863 return 0;
2866 /* See if ARG is an expression that is either a comparison or is performing
2867 arithmetic on comparisons. The comparisons must only be comparing
2868 two different values, which will be stored in *CVAL1 and *CVAL2; if
2869 they are nonzero it means that some operands have already been found.
2870 No variables may be used anywhere else in the expression except in the
2871 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2872 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2874 If this is true, return 1. Otherwise, return zero. */
2876 static int
2877 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2879 enum tree_code code = TREE_CODE (arg);
2880 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2882 /* We can handle some of the tcc_expression cases here. */
2883 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2884 tclass = tcc_unary;
2885 else if (tclass == tcc_expression
2886 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2887 || code == COMPOUND_EXPR))
2888 tclass = tcc_binary;
2890 else if (tclass == tcc_expression && code == SAVE_EXPR
2891 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2893 /* If we've already found a CVAL1 or CVAL2, this expression is
2894 two complex to handle. */
2895 if (*cval1 || *cval2)
2896 return 0;
2898 tclass = tcc_unary;
2899 *save_p = 1;
2902 switch (tclass)
2904 case tcc_unary:
2905 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2907 case tcc_binary:
2908 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2909 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2910 cval1, cval2, save_p));
2912 case tcc_constant:
2913 return 1;
2915 case tcc_expression:
2916 if (code == COND_EXPR)
2917 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2918 cval1, cval2, save_p)
2919 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2920 cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2922 cval1, cval2, save_p));
2923 return 0;
2925 case tcc_comparison:
2926 /* First see if we can handle the first operand, then the second. For
2927 the second operand, we know *CVAL1 can't be zero. It must be that
2928 one side of the comparison is each of the values; test for the
2929 case where this isn't true by failing if the two operands
2930 are the same. */
2932 if (operand_equal_p (TREE_OPERAND (arg, 0),
2933 TREE_OPERAND (arg, 1), 0))
2934 return 0;
2936 if (*cval1 == 0)
2937 *cval1 = TREE_OPERAND (arg, 0);
2938 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2940 else if (*cval2 == 0)
2941 *cval2 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2944 else
2945 return 0;
2947 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2949 else if (*cval2 == 0)
2950 *cval2 = TREE_OPERAND (arg, 1);
2951 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2953 else
2954 return 0;
2956 return 1;
2958 default:
2959 return 0;
2963 /* ARG is a tree that is known to contain just arithmetic operations and
2964 comparisons. Evaluate the operations in the tree substituting NEW0 for
2965 any occurrence of OLD0 as an operand of a comparison and likewise for
2966 NEW1 and OLD1. */
2968 static tree
2969 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2970 tree old1, tree new1)
2972 tree type = TREE_TYPE (arg);
2973 enum tree_code code = TREE_CODE (arg);
2974 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2976 /* We can handle some of the tcc_expression cases here. */
2977 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2978 tclass = tcc_unary;
2979 else if (tclass == tcc_expression
2980 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2981 tclass = tcc_binary;
2983 switch (tclass)
2985 case tcc_unary:
2986 return fold_build1_loc (loc, code, type,
2987 eval_subst (loc, TREE_OPERAND (arg, 0),
2988 old0, new0, old1, new1));
2990 case tcc_binary:
2991 return fold_build2_loc (loc, code, type,
2992 eval_subst (loc, TREE_OPERAND (arg, 0),
2993 old0, new0, old1, new1),
2994 eval_subst (loc, TREE_OPERAND (arg, 1),
2995 old0, new0, old1, new1));
2997 case tcc_expression:
2998 switch (code)
3000 case SAVE_EXPR:
3001 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3002 old1, new1);
3004 case COMPOUND_EXPR:
3005 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3006 old1, new1);
3008 case COND_EXPR:
3009 return fold_build3_loc (loc, code, type,
3010 eval_subst (loc, TREE_OPERAND (arg, 0),
3011 old0, new0, old1, new1),
3012 eval_subst (loc, TREE_OPERAND (arg, 1),
3013 old0, new0, old1, new1),
3014 eval_subst (loc, TREE_OPERAND (arg, 2),
3015 old0, new0, old1, new1));
3016 default:
3017 break;
3019 /* Fall through - ??? */
3021 case tcc_comparison:
3023 tree arg0 = TREE_OPERAND (arg, 0);
3024 tree arg1 = TREE_OPERAND (arg, 1);
3026 /* We need to check both for exact equality and tree equality. The
3027 former will be true if the operand has a side-effect. In that
3028 case, we know the operand occurred exactly once. */
3030 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3031 arg0 = new0;
3032 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3033 arg0 = new1;
3035 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3036 arg1 = new0;
3037 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3038 arg1 = new1;
3040 return fold_build2_loc (loc, code, type, arg0, arg1);
3043 default:
3044 return arg;
3048 /* Return a tree for the case when the result of an expression is RESULT
3049 converted to TYPE and OMITTED was previously an operand of the expression
3050 but is now not needed (e.g., we folded OMITTED * 0).
3052 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3053 the conversion of RESULT to TYPE. */
3055 tree
3056 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3058 tree t = fold_convert_loc (loc, type, result);
3060 /* If the resulting operand is an empty statement, just return the omitted
3061 statement casted to void. */
3062 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3063 return build1_loc (loc, NOP_EXPR, void_type_node,
3064 fold_ignored_result (omitted));
3066 if (TREE_SIDE_EFFECTS (omitted))
3067 return build2_loc (loc, COMPOUND_EXPR, type,
3068 fold_ignored_result (omitted), t);
3070 return non_lvalue_loc (loc, t);
3073 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3075 static tree
3076 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3077 tree omitted)
3079 tree t = fold_convert_loc (loc, type, result);
3081 /* If the resulting operand is an empty statement, just return the omitted
3082 statement casted to void. */
3083 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3084 return build1_loc (loc, NOP_EXPR, void_type_node,
3085 fold_ignored_result (omitted));
3087 if (TREE_SIDE_EFFECTS (omitted))
3088 return build2_loc (loc, COMPOUND_EXPR, type,
3089 fold_ignored_result (omitted), t);
3091 return pedantic_non_lvalue_loc (loc, t);
3094 /* Return a tree for the case when the result of an expression is RESULT
3095 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3096 of the expression but are now not needed.
3098 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3099 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3100 evaluated before OMITTED2. Otherwise, if neither has side effects,
3101 just do the conversion of RESULT to TYPE. */
3103 tree
3104 omit_two_operands_loc (location_t loc, tree type, tree result,
3105 tree omitted1, tree omitted2)
3107 tree t = fold_convert_loc (loc, type, result);
3109 if (TREE_SIDE_EFFECTS (omitted2))
3110 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3111 if (TREE_SIDE_EFFECTS (omitted1))
3112 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3114 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3118 /* Return a simplified tree node for the truth-negation of ARG. This
3119 never alters ARG itself. We assume that ARG is an operation that
3120 returns a truth value (0 or 1).
3122 FIXME: one would think we would fold the result, but it causes
3123 problems with the dominator optimizer. */
3125 static tree
3126 fold_truth_not_expr (location_t loc, tree arg)
3128 tree type = TREE_TYPE (arg);
3129 enum tree_code code = TREE_CODE (arg);
3130 location_t loc1, loc2;
3132 /* If this is a comparison, we can simply invert it, except for
3133 floating-point non-equality comparisons, in which case we just
3134 enclose a TRUTH_NOT_EXPR around what we have. */
3136 if (TREE_CODE_CLASS (code) == tcc_comparison)
3138 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3139 if (FLOAT_TYPE_P (op_type)
3140 && flag_trapping_math
3141 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3142 && code != NE_EXPR && code != EQ_EXPR)
3143 return NULL_TREE;
3145 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3146 if (code == ERROR_MARK)
3147 return NULL_TREE;
3149 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3150 TREE_OPERAND (arg, 1));
3153 switch (code)
3155 case INTEGER_CST:
3156 return constant_boolean_node (integer_zerop (arg), type);
3158 case TRUTH_AND_EXPR:
3159 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3160 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3161 return build2_loc (loc, TRUTH_OR_EXPR, type,
3162 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3163 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3165 case TRUTH_OR_EXPR:
3166 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3167 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3168 return build2_loc (loc, TRUTH_AND_EXPR, type,
3169 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3170 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3172 case TRUTH_XOR_EXPR:
3173 /* Here we can invert either operand. We invert the first operand
3174 unless the second operand is a TRUTH_NOT_EXPR in which case our
3175 result is the XOR of the first operand with the inside of the
3176 negation of the second operand. */
3178 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3179 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3180 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3181 else
3182 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3183 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3184 TREE_OPERAND (arg, 1));
3186 case TRUTH_ANDIF_EXPR:
3187 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3188 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3189 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3190 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3191 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3193 case TRUTH_ORIF_EXPR:
3194 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3195 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3196 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3197 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3198 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3200 case TRUTH_NOT_EXPR:
3201 return TREE_OPERAND (arg, 0);
3203 case COND_EXPR:
3205 tree arg1 = TREE_OPERAND (arg, 1);
3206 tree arg2 = TREE_OPERAND (arg, 2);
3208 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3209 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3211 /* A COND_EXPR may have a throw as one operand, which
3212 then has void type. Just leave void operands
3213 as they are. */
3214 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3215 VOID_TYPE_P (TREE_TYPE (arg1))
3216 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3217 VOID_TYPE_P (TREE_TYPE (arg2))
3218 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3221 case COMPOUND_EXPR:
3222 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3223 return build2_loc (loc, COMPOUND_EXPR, type,
3224 TREE_OPERAND (arg, 0),
3225 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3227 case NON_LVALUE_EXPR:
3228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3229 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3231 CASE_CONVERT:
3232 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3233 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3235 /* ... fall through ... */
3237 case FLOAT_EXPR:
3238 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3239 return build1_loc (loc, TREE_CODE (arg), type,
3240 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3242 case BIT_AND_EXPR:
3243 if (!integer_onep (TREE_OPERAND (arg, 1)))
3244 return NULL_TREE;
3245 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3247 case SAVE_EXPR:
3248 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3250 case CLEANUP_POINT_EXPR:
3251 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3252 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3253 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3255 default:
3256 return NULL_TREE;
3260 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3261 assume that ARG is an operation that returns a truth value (0 or 1
3262 for scalars, 0 or -1 for vectors). Return the folded expression if
3263 folding is successful. Otherwise, return NULL_TREE. */
3265 static tree
3266 fold_invert_truthvalue (location_t loc, tree arg)
3268 tree type = TREE_TYPE (arg);
3269 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3270 ? BIT_NOT_EXPR
3271 : TRUTH_NOT_EXPR,
3272 type, arg);
3275 /* Return a simplified tree node for the truth-negation of ARG. This
3276 never alters ARG itself. We assume that ARG is an operation that
3277 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3279 tree
3280 invert_truthvalue_loc (location_t loc, tree arg)
3282 if (TREE_CODE (arg) == ERROR_MARK)
3283 return arg;
3285 tree type = TREE_TYPE (arg);
3286 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3287 ? BIT_NOT_EXPR
3288 : TRUTH_NOT_EXPR,
3289 type, arg);
3292 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3293 operands are another bit-wise operation with a common input. If so,
3294 distribute the bit operations to save an operation and possibly two if
3295 constants are involved. For example, convert
3296 (A | B) & (A | C) into A | (B & C)
3297 Further simplification will occur if B and C are constants.
3299 If this optimization cannot be done, 0 will be returned. */
3301 static tree
3302 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3303 tree arg0, tree arg1)
3305 tree common;
3306 tree left, right;
3308 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3309 || TREE_CODE (arg0) == code
3310 || (TREE_CODE (arg0) != BIT_AND_EXPR
3311 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3312 return 0;
3314 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3316 common = TREE_OPERAND (arg0, 0);
3317 left = TREE_OPERAND (arg0, 1);
3318 right = TREE_OPERAND (arg1, 1);
3320 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3322 common = TREE_OPERAND (arg0, 0);
3323 left = TREE_OPERAND (arg0, 1);
3324 right = TREE_OPERAND (arg1, 0);
3326 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3328 common = TREE_OPERAND (arg0, 1);
3329 left = TREE_OPERAND (arg0, 0);
3330 right = TREE_OPERAND (arg1, 1);
3332 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3334 common = TREE_OPERAND (arg0, 1);
3335 left = TREE_OPERAND (arg0, 0);
3336 right = TREE_OPERAND (arg1, 0);
3338 else
3339 return 0;
3341 common = fold_convert_loc (loc, type, common);
3342 left = fold_convert_loc (loc, type, left);
3343 right = fold_convert_loc (loc, type, right);
3344 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3345 fold_build2_loc (loc, code, type, left, right));
3348 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3349 with code CODE. This optimization is unsafe. */
3350 static tree
3351 distribute_real_division (location_t loc, enum tree_code code, tree type,
3352 tree arg0, tree arg1)
3354 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3355 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3357 /* (A / C) +- (B / C) -> (A +- B) / C. */
3358 if (mul0 == mul1
3359 && operand_equal_p (TREE_OPERAND (arg0, 1),
3360 TREE_OPERAND (arg1, 1), 0))
3361 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3362 fold_build2_loc (loc, code, type,
3363 TREE_OPERAND (arg0, 0),
3364 TREE_OPERAND (arg1, 0)),
3365 TREE_OPERAND (arg0, 1));
3367 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3368 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3369 TREE_OPERAND (arg1, 0), 0)
3370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3371 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3373 REAL_VALUE_TYPE r0, r1;
3374 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3375 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3376 if (!mul0)
3377 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3378 if (!mul1)
3379 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3380 real_arithmetic (&r0, code, &r0, &r1);
3381 return fold_build2_loc (loc, MULT_EXPR, type,
3382 TREE_OPERAND (arg0, 0),
3383 build_real (type, r0));
3386 return NULL_TREE;
3389 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3390 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3392 static tree
3393 make_bit_field_ref (location_t loc, tree inner, tree type,
3394 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3396 tree result, bftype;
3398 if (bitpos == 0)
3400 tree size = TYPE_SIZE (TREE_TYPE (inner));
3401 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3402 || POINTER_TYPE_P (TREE_TYPE (inner)))
3403 && host_integerp (size, 0)
3404 && tree_low_cst (size, 0) == bitsize)
3405 return fold_convert_loc (loc, type, inner);
3408 bftype = type;
3409 if (TYPE_PRECISION (bftype) != bitsize
3410 || TYPE_UNSIGNED (bftype) == !unsignedp)
3411 bftype = build_nonstandard_integer_type (bitsize, 0);
3413 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3414 size_int (bitsize), bitsize_int (bitpos));
3416 if (bftype != type)
3417 result = fold_convert_loc (loc, type, result);
3419 return result;
3422 /* Optimize a bit-field compare.
3424 There are two cases: First is a compare against a constant and the
3425 second is a comparison of two items where the fields are at the same
3426 bit position relative to the start of a chunk (byte, halfword, word)
3427 large enough to contain it. In these cases we can avoid the shift
3428 implicit in bitfield extractions.
3430 For constants, we emit a compare of the shifted constant with the
3431 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3432 compared. For two fields at the same position, we do the ANDs with the
3433 similar mask and compare the result of the ANDs.
3435 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3436 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3437 are the left and right operands of the comparison, respectively.
3439 If the optimization described above can be done, we return the resulting
3440 tree. Otherwise we return zero. */
3442 static tree
3443 optimize_bit_field_compare (location_t loc, enum tree_code code,
3444 tree compare_type, tree lhs, tree rhs)
3446 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3447 tree type = TREE_TYPE (lhs);
3448 tree signed_type, unsigned_type;
3449 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3450 enum machine_mode lmode, rmode, nmode;
3451 int lunsignedp, runsignedp;
3452 int lvolatilep = 0, rvolatilep = 0;
3453 tree linner, rinner = NULL_TREE;
3454 tree mask;
3455 tree offset;
3457 /* In the strict volatile bitfields case, doing code changes here may prevent
3458 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3459 if (flag_strict_volatile_bitfields > 0)
3460 return 0;
3462 /* Get all the information about the extractions being done. If the bit size
3463 if the same as the size of the underlying object, we aren't doing an
3464 extraction at all and so can do nothing. We also don't want to
3465 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3466 then will no longer be able to replace it. */
3467 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3468 &lunsignedp, &lvolatilep, false);
3469 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3470 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3471 return 0;
3473 if (!const_p)
3475 /* If this is not a constant, we can only do something if bit positions,
3476 sizes, and signedness are the same. */
3477 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3478 &runsignedp, &rvolatilep, false);
3480 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3481 || lunsignedp != runsignedp || offset != 0
3482 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3483 return 0;
3486 /* See if we can find a mode to refer to this field. We should be able to,
3487 but fail if we can't. */
3488 if (lvolatilep
3489 && GET_MODE_BITSIZE (lmode) > 0
3490 && flag_strict_volatile_bitfields > 0)
3491 nmode = lmode;
3492 else
3493 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3494 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3495 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3496 TYPE_ALIGN (TREE_TYPE (rinner))),
3497 word_mode, lvolatilep || rvolatilep);
3498 if (nmode == VOIDmode)
3499 return 0;
3501 /* Set signed and unsigned types of the precision of this mode for the
3502 shifts below. */
3503 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3504 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3506 /* Compute the bit position and size for the new reference and our offset
3507 within it. If the new reference is the same size as the original, we
3508 won't optimize anything, so return zero. */
3509 nbitsize = GET_MODE_BITSIZE (nmode);
3510 nbitpos = lbitpos & ~ (nbitsize - 1);
3511 lbitpos -= nbitpos;
3512 if (nbitsize == lbitsize)
3513 return 0;
3515 if (BYTES_BIG_ENDIAN)
3516 lbitpos = nbitsize - lbitsize - lbitpos;
3518 /* Make the mask to be used against the extracted field. */
3519 mask = build_int_cst_type (unsigned_type, -1);
3520 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3521 mask = const_binop (RSHIFT_EXPR, mask,
3522 size_int (nbitsize - lbitsize - lbitpos));
3524 if (! const_p)
3525 /* If not comparing with constant, just rework the comparison
3526 and return. */
3527 return fold_build2_loc (loc, code, compare_type,
3528 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3529 make_bit_field_ref (loc, linner,
3530 unsigned_type,
3531 nbitsize, nbitpos,
3533 mask),
3534 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3535 make_bit_field_ref (loc, rinner,
3536 unsigned_type,
3537 nbitsize, nbitpos,
3539 mask));
3541 /* Otherwise, we are handling the constant case. See if the constant is too
3542 big for the field. Warn and return a tree of for 0 (false) if so. We do
3543 this not only for its own sake, but to avoid having to test for this
3544 error case below. If we didn't, we might generate wrong code.
3546 For unsigned fields, the constant shifted right by the field length should
3547 be all zero. For signed fields, the high-order bits should agree with
3548 the sign bit. */
3550 if (lunsignedp)
3552 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3553 fold_convert_loc (loc,
3554 unsigned_type, rhs),
3555 size_int (lbitsize))))
3557 warning (0, "comparison is always %d due to width of bit-field",
3558 code == NE_EXPR);
3559 return constant_boolean_node (code == NE_EXPR, compare_type);
3562 else
3564 tree tem = const_binop (RSHIFT_EXPR,
3565 fold_convert_loc (loc, signed_type, rhs),
3566 size_int (lbitsize - 1));
3567 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3569 warning (0, "comparison is always %d due to width of bit-field",
3570 code == NE_EXPR);
3571 return constant_boolean_node (code == NE_EXPR, compare_type);
3575 /* Single-bit compares should always be against zero. */
3576 if (lbitsize == 1 && ! integer_zerop (rhs))
3578 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3579 rhs = build_int_cst (type, 0);
3582 /* Make a new bitfield reference, shift the constant over the
3583 appropriate number of bits and mask it with the computed mask
3584 (in case this was a signed field). If we changed it, make a new one. */
3585 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3586 if (lvolatilep)
3588 TREE_SIDE_EFFECTS (lhs) = 1;
3589 TREE_THIS_VOLATILE (lhs) = 1;
3592 rhs = const_binop (BIT_AND_EXPR,
3593 const_binop (LSHIFT_EXPR,
3594 fold_convert_loc (loc, unsigned_type, rhs),
3595 size_int (lbitpos)),
3596 mask);
3598 lhs = build2_loc (loc, code, compare_type,
3599 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3600 return lhs;
3603 /* Subroutine for fold_truth_andor_1: decode a field reference.
3605 If EXP is a comparison reference, we return the innermost reference.
3607 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3608 set to the starting bit number.
3610 If the innermost field can be completely contained in a mode-sized
3611 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3613 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3614 otherwise it is not changed.
3616 *PUNSIGNEDP is set to the signedness of the field.
3618 *PMASK is set to the mask used. This is either contained in a
3619 BIT_AND_EXPR or derived from the width of the field.
3621 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3623 Return 0 if this is not a component reference or is one that we can't
3624 do anything with. */
3626 static tree
3627 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3628 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3629 int *punsignedp, int *pvolatilep,
3630 tree *pmask, tree *pand_mask)
3632 tree outer_type = 0;
3633 tree and_mask = 0;
3634 tree mask, inner, offset;
3635 tree unsigned_type;
3636 unsigned int precision;
3638 /* All the optimizations using this function assume integer fields.
3639 There are problems with FP fields since the type_for_size call
3640 below can fail for, e.g., XFmode. */
3641 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3642 return 0;
3644 /* We are interested in the bare arrangement of bits, so strip everything
3645 that doesn't affect the machine mode. However, record the type of the
3646 outermost expression if it may matter below. */
3647 if (CONVERT_EXPR_P (exp)
3648 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3649 outer_type = TREE_TYPE (exp);
3650 STRIP_NOPS (exp);
3652 if (TREE_CODE (exp) == BIT_AND_EXPR)
3654 and_mask = TREE_OPERAND (exp, 1);
3655 exp = TREE_OPERAND (exp, 0);
3656 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3657 if (TREE_CODE (and_mask) != INTEGER_CST)
3658 return 0;
3661 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3662 punsignedp, pvolatilep, false);
3663 if ((inner == exp && and_mask == 0)
3664 || *pbitsize < 0 || offset != 0
3665 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3666 return 0;
3668 /* If the number of bits in the reference is the same as the bitsize of
3669 the outer type, then the outer type gives the signedness. Otherwise
3670 (in case of a small bitfield) the signedness is unchanged. */
3671 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3672 *punsignedp = TYPE_UNSIGNED (outer_type);
3674 /* Compute the mask to access the bitfield. */
3675 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3676 precision = TYPE_PRECISION (unsigned_type);
3678 mask = build_int_cst_type (unsigned_type, -1);
3680 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3681 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3683 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3684 if (and_mask != 0)
3685 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3686 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3688 *pmask = mask;
3689 *pand_mask = and_mask;
3690 return inner;
3693 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3694 bit positions. */
3696 static int
3697 all_ones_mask_p (const_tree mask, int size)
3699 tree type = TREE_TYPE (mask);
3700 unsigned int precision = TYPE_PRECISION (type);
3701 tree tmask;
3703 tmask = build_int_cst_type (signed_type_for (type), -1);
3705 return
3706 tree_int_cst_equal (mask,
3707 const_binop (RSHIFT_EXPR,
3708 const_binop (LSHIFT_EXPR, tmask,
3709 size_int (precision - size)),
3710 size_int (precision - size)));
3713 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3714 represents the sign bit of EXP's type. If EXP represents a sign
3715 or zero extension, also test VAL against the unextended type.
3716 The return value is the (sub)expression whose sign bit is VAL,
3717 or NULL_TREE otherwise. */
3719 static tree
3720 sign_bit_p (tree exp, const_tree val)
3722 unsigned HOST_WIDE_INT mask_lo, lo;
3723 HOST_WIDE_INT mask_hi, hi;
3724 int width;
3725 tree t;
3727 /* Tree EXP must have an integral type. */
3728 t = TREE_TYPE (exp);
3729 if (! INTEGRAL_TYPE_P (t))
3730 return NULL_TREE;
3732 /* Tree VAL must be an integer constant. */
3733 if (TREE_CODE (val) != INTEGER_CST
3734 || TREE_OVERFLOW (val))
3735 return NULL_TREE;
3737 width = TYPE_PRECISION (t);
3738 if (width > HOST_BITS_PER_WIDE_INT)
3740 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3741 lo = 0;
3743 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3744 mask_lo = -1;
3746 else
3748 hi = 0;
3749 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3751 mask_hi = 0;
3752 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3755 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3756 treat VAL as if it were unsigned. */
3757 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3758 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3759 return exp;
3761 /* Handle extension from a narrower type. */
3762 if (TREE_CODE (exp) == NOP_EXPR
3763 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3764 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3766 return NULL_TREE;
3769 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3770 to be evaluated unconditionally. */
3772 static int
3773 simple_operand_p (const_tree exp)
3775 /* Strip any conversions that don't change the machine mode. */
3776 STRIP_NOPS (exp);
3778 return (CONSTANT_CLASS_P (exp)
3779 || TREE_CODE (exp) == SSA_NAME
3780 || (DECL_P (exp)
3781 && ! TREE_ADDRESSABLE (exp)
3782 && ! TREE_THIS_VOLATILE (exp)
3783 && ! DECL_NONLOCAL (exp)
3784 /* Don't regard global variables as simple. They may be
3785 allocated in ways unknown to the compiler (shared memory,
3786 #pragma weak, etc). */
3787 && ! TREE_PUBLIC (exp)
3788 && ! DECL_EXTERNAL (exp)
3789 /* Weakrefs are not safe to be read, since they can be NULL.
3790 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3791 have DECL_WEAK flag set. */
3792 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3793 /* Loading a static variable is unduly expensive, but global
3794 registers aren't expensive. */
3795 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3798 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3799 to be evaluated unconditionally.
3800 I addition to simple_operand_p, we assume that comparisons, conversions,
3801 and logic-not operations are simple, if their operands are simple, too. */
3803 static bool
3804 simple_operand_p_2 (tree exp)
3806 enum tree_code code;
3808 if (TREE_SIDE_EFFECTS (exp)
3809 || tree_could_trap_p (exp))
3810 return false;
3812 while (CONVERT_EXPR_P (exp))
3813 exp = TREE_OPERAND (exp, 0);
3815 code = TREE_CODE (exp);
3817 if (TREE_CODE_CLASS (code) == tcc_comparison)
3818 return (simple_operand_p (TREE_OPERAND (exp, 0))
3819 && simple_operand_p (TREE_OPERAND (exp, 1)));
3821 if (code == TRUTH_NOT_EXPR)
3822 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3824 return simple_operand_p (exp);
3828 /* The following functions are subroutines to fold_range_test and allow it to
3829 try to change a logical combination of comparisons into a range test.
3831 For example, both
3832 X == 2 || X == 3 || X == 4 || X == 5
3834 X >= 2 && X <= 5
3835 are converted to
3836 (unsigned) (X - 2) <= 3
3838 We describe each set of comparisons as being either inside or outside
3839 a range, using a variable named like IN_P, and then describe the
3840 range with a lower and upper bound. If one of the bounds is omitted,
3841 it represents either the highest or lowest value of the type.
3843 In the comments below, we represent a range by two numbers in brackets
3844 preceded by a "+" to designate being inside that range, or a "-" to
3845 designate being outside that range, so the condition can be inverted by
3846 flipping the prefix. An omitted bound is represented by a "-". For
3847 example, "- [-, 10]" means being outside the range starting at the lowest
3848 possible value and ending at 10, in other words, being greater than 10.
3849 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3850 always false.
3852 We set up things so that the missing bounds are handled in a consistent
3853 manner so neither a missing bound nor "true" and "false" need to be
3854 handled using a special case. */
3856 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3857 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3858 and UPPER1_P are nonzero if the respective argument is an upper bound
3859 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3860 must be specified for a comparison. ARG1 will be converted to ARG0's
3861 type if both are specified. */
3863 static tree
3864 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3865 tree arg1, int upper1_p)
3867 tree tem;
3868 int result;
3869 int sgn0, sgn1;
3871 /* If neither arg represents infinity, do the normal operation.
3872 Else, if not a comparison, return infinity. Else handle the special
3873 comparison rules. Note that most of the cases below won't occur, but
3874 are handled for consistency. */
3876 if (arg0 != 0 && arg1 != 0)
3878 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3879 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3880 STRIP_NOPS (tem);
3881 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3884 if (TREE_CODE_CLASS (code) != tcc_comparison)
3885 return 0;
3887 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3888 for neither. In real maths, we cannot assume open ended ranges are
3889 the same. But, this is computer arithmetic, where numbers are finite.
3890 We can therefore make the transformation of any unbounded range with
3891 the value Z, Z being greater than any representable number. This permits
3892 us to treat unbounded ranges as equal. */
3893 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3894 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3895 switch (code)
3897 case EQ_EXPR:
3898 result = sgn0 == sgn1;
3899 break;
3900 case NE_EXPR:
3901 result = sgn0 != sgn1;
3902 break;
3903 case LT_EXPR:
3904 result = sgn0 < sgn1;
3905 break;
3906 case LE_EXPR:
3907 result = sgn0 <= sgn1;
3908 break;
3909 case GT_EXPR:
3910 result = sgn0 > sgn1;
3911 break;
3912 case GE_EXPR:
3913 result = sgn0 >= sgn1;
3914 break;
3915 default:
3916 gcc_unreachable ();
3919 return constant_boolean_node (result, type);
3922 /* Helper routine for make_range. Perform one step for it, return
3923 new expression if the loop should continue or NULL_TREE if it should
3924 stop. */
3926 tree
3927 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3928 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3929 bool *strict_overflow_p)
3931 tree arg0_type = TREE_TYPE (arg0);
3932 tree n_low, n_high, low = *p_low, high = *p_high;
3933 int in_p = *p_in_p, n_in_p;
3935 switch (code)
3937 case TRUTH_NOT_EXPR:
3938 /* We can only do something if the range is testing for zero. */
3939 if (low == NULL_TREE || high == NULL_TREE
3940 || ! integer_zerop (low) || ! integer_zerop (high))
3941 return NULL_TREE;
3942 *p_in_p = ! in_p;
3943 return arg0;
3945 case EQ_EXPR: case NE_EXPR:
3946 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3947 /* We can only do something if the range is testing for zero
3948 and if the second operand is an integer constant. Note that
3949 saying something is "in" the range we make is done by
3950 complementing IN_P since it will set in the initial case of
3951 being not equal to zero; "out" is leaving it alone. */
3952 if (low == NULL_TREE || high == NULL_TREE
3953 || ! integer_zerop (low) || ! integer_zerop (high)
3954 || TREE_CODE (arg1) != INTEGER_CST)
3955 return NULL_TREE;
3957 switch (code)
3959 case NE_EXPR: /* - [c, c] */
3960 low = high = arg1;
3961 break;
3962 case EQ_EXPR: /* + [c, c] */
3963 in_p = ! in_p, low = high = arg1;
3964 break;
3965 case GT_EXPR: /* - [-, c] */
3966 low = 0, high = arg1;
3967 break;
3968 case GE_EXPR: /* + [c, -] */
3969 in_p = ! in_p, low = arg1, high = 0;
3970 break;
3971 case LT_EXPR: /* - [c, -] */
3972 low = arg1, high = 0;
3973 break;
3974 case LE_EXPR: /* + [-, c] */
3975 in_p = ! in_p, low = 0, high = arg1;
3976 break;
3977 default:
3978 gcc_unreachable ();
3981 /* If this is an unsigned comparison, we also know that EXP is
3982 greater than or equal to zero. We base the range tests we make
3983 on that fact, so we record it here so we can parse existing
3984 range tests. We test arg0_type since often the return type
3985 of, e.g. EQ_EXPR, is boolean. */
3986 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3988 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3989 in_p, low, high, 1,
3990 build_int_cst (arg0_type, 0),
3991 NULL_TREE))
3992 return NULL_TREE;
3994 in_p = n_in_p, low = n_low, high = n_high;
3996 /* If the high bound is missing, but we have a nonzero low
3997 bound, reverse the range so it goes from zero to the low bound
3998 minus 1. */
3999 if (high == 0 && low && ! integer_zerop (low))
4001 in_p = ! in_p;
4002 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4003 integer_one_node, 0);
4004 low = build_int_cst (arg0_type, 0);
4008 *p_low = low;
4009 *p_high = high;
4010 *p_in_p = in_p;
4011 return arg0;
4013 case NEGATE_EXPR:
4014 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4015 low and high are non-NULL, then normalize will DTRT. */
4016 if (!TYPE_UNSIGNED (arg0_type)
4017 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4019 if (low == NULL_TREE)
4020 low = TYPE_MIN_VALUE (arg0_type);
4021 if (high == NULL_TREE)
4022 high = TYPE_MAX_VALUE (arg0_type);
4025 /* (-x) IN [a,b] -> x in [-b, -a] */
4026 n_low = range_binop (MINUS_EXPR, exp_type,
4027 build_int_cst (exp_type, 0),
4028 0, high, 1);
4029 n_high = range_binop (MINUS_EXPR, exp_type,
4030 build_int_cst (exp_type, 0),
4031 0, low, 0);
4032 if (n_high != 0 && TREE_OVERFLOW (n_high))
4033 return NULL_TREE;
4034 goto normalize;
4036 case BIT_NOT_EXPR:
4037 /* ~ X -> -X - 1 */
4038 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4039 build_int_cst (exp_type, 1));
4041 case PLUS_EXPR:
4042 case MINUS_EXPR:
4043 if (TREE_CODE (arg1) != INTEGER_CST)
4044 return NULL_TREE;
4046 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4047 move a constant to the other side. */
4048 if (!TYPE_UNSIGNED (arg0_type)
4049 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4050 return NULL_TREE;
4052 /* If EXP is signed, any overflow in the computation is undefined,
4053 so we don't worry about it so long as our computations on
4054 the bounds don't overflow. For unsigned, overflow is defined
4055 and this is exactly the right thing. */
4056 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4057 arg0_type, low, 0, arg1, 0);
4058 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4059 arg0_type, high, 1, arg1, 0);
4060 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4061 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4062 return NULL_TREE;
4064 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4065 *strict_overflow_p = true;
4067 normalize:
4068 /* Check for an unsigned range which has wrapped around the maximum
4069 value thus making n_high < n_low, and normalize it. */
4070 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4072 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4073 integer_one_node, 0);
4074 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4075 integer_one_node, 0);
4077 /* If the range is of the form +/- [ x+1, x ], we won't
4078 be able to normalize it. But then, it represents the
4079 whole range or the empty set, so make it
4080 +/- [ -, - ]. */
4081 if (tree_int_cst_equal (n_low, low)
4082 && tree_int_cst_equal (n_high, high))
4083 low = high = 0;
4084 else
4085 in_p = ! in_p;
4087 else
4088 low = n_low, high = n_high;
4090 *p_low = low;
4091 *p_high = high;
4092 *p_in_p = in_p;
4093 return arg0;
4095 CASE_CONVERT:
4096 case NON_LVALUE_EXPR:
4097 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4098 return NULL_TREE;
4100 if (! INTEGRAL_TYPE_P (arg0_type)
4101 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4102 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4103 return NULL_TREE;
4105 n_low = low, n_high = high;
4107 if (n_low != 0)
4108 n_low = fold_convert_loc (loc, arg0_type, n_low);
4110 if (n_high != 0)
4111 n_high = fold_convert_loc (loc, arg0_type, n_high);
4113 /* If we're converting arg0 from an unsigned type, to exp,
4114 a signed type, we will be doing the comparison as unsigned.
4115 The tests above have already verified that LOW and HIGH
4116 are both positive.
4118 So we have to ensure that we will handle large unsigned
4119 values the same way that the current signed bounds treat
4120 negative values. */
4122 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4124 tree high_positive;
4125 tree equiv_type;
4126 /* For fixed-point modes, we need to pass the saturating flag
4127 as the 2nd parameter. */
4128 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4129 equiv_type
4130 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4131 TYPE_SATURATING (arg0_type));
4132 else
4133 equiv_type
4134 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4136 /* A range without an upper bound is, naturally, unbounded.
4137 Since convert would have cropped a very large value, use
4138 the max value for the destination type. */
4139 high_positive
4140 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4141 : TYPE_MAX_VALUE (arg0_type);
4143 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4144 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4145 fold_convert_loc (loc, arg0_type,
4146 high_positive),
4147 build_int_cst (arg0_type, 1));
4149 /* If the low bound is specified, "and" the range with the
4150 range for which the original unsigned value will be
4151 positive. */
4152 if (low != 0)
4154 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4155 1, fold_convert_loc (loc, arg0_type,
4156 integer_zero_node),
4157 high_positive))
4158 return NULL_TREE;
4160 in_p = (n_in_p == in_p);
4162 else
4164 /* Otherwise, "or" the range with the range of the input
4165 that will be interpreted as negative. */
4166 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4167 1, fold_convert_loc (loc, arg0_type,
4168 integer_zero_node),
4169 high_positive))
4170 return NULL_TREE;
4172 in_p = (in_p != n_in_p);
4176 *p_low = n_low;
4177 *p_high = n_high;
4178 *p_in_p = in_p;
4179 return arg0;
4181 default:
4182 return NULL_TREE;
4186 /* Given EXP, a logical expression, set the range it is testing into
4187 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4188 actually being tested. *PLOW and *PHIGH will be made of the same
4189 type as the returned expression. If EXP is not a comparison, we
4190 will most likely not be returning a useful value and range. Set
4191 *STRICT_OVERFLOW_P to true if the return value is only valid
4192 because signed overflow is undefined; otherwise, do not change
4193 *STRICT_OVERFLOW_P. */
4195 tree
4196 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4197 bool *strict_overflow_p)
4199 enum tree_code code;
4200 tree arg0, arg1 = NULL_TREE;
4201 tree exp_type, nexp;
4202 int in_p;
4203 tree low, high;
4204 location_t loc = EXPR_LOCATION (exp);
4206 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4207 and see if we can refine the range. Some of the cases below may not
4208 happen, but it doesn't seem worth worrying about this. We "continue"
4209 the outer loop when we've changed something; otherwise we "break"
4210 the switch, which will "break" the while. */
4212 in_p = 0;
4213 low = high = build_int_cst (TREE_TYPE (exp), 0);
4215 while (1)
4217 code = TREE_CODE (exp);
4218 exp_type = TREE_TYPE (exp);
4219 arg0 = NULL_TREE;
4221 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4223 if (TREE_OPERAND_LENGTH (exp) > 0)
4224 arg0 = TREE_OPERAND (exp, 0);
4225 if (TREE_CODE_CLASS (code) == tcc_binary
4226 || TREE_CODE_CLASS (code) == tcc_comparison
4227 || (TREE_CODE_CLASS (code) == tcc_expression
4228 && TREE_OPERAND_LENGTH (exp) > 1))
4229 arg1 = TREE_OPERAND (exp, 1);
4231 if (arg0 == NULL_TREE)
4232 break;
4234 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4235 &high, &in_p, strict_overflow_p);
4236 if (nexp == NULL_TREE)
4237 break;
4238 exp = nexp;
4241 /* If EXP is a constant, we can evaluate whether this is true or false. */
4242 if (TREE_CODE (exp) == INTEGER_CST)
4244 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4245 exp, 0, low, 0))
4246 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4247 exp, 1, high, 1)));
4248 low = high = 0;
4249 exp = 0;
4252 *pin_p = in_p, *plow = low, *phigh = high;
4253 return exp;
4256 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4257 type, TYPE, return an expression to test if EXP is in (or out of, depending
4258 on IN_P) the range. Return 0 if the test couldn't be created. */
4260 tree
4261 build_range_check (location_t loc, tree type, tree exp, int in_p,
4262 tree low, tree high)
4264 tree etype = TREE_TYPE (exp), value;
4266 #ifdef HAVE_canonicalize_funcptr_for_compare
4267 /* Disable this optimization for function pointer expressions
4268 on targets that require function pointer canonicalization. */
4269 if (HAVE_canonicalize_funcptr_for_compare
4270 && TREE_CODE (etype) == POINTER_TYPE
4271 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4272 return NULL_TREE;
4273 #endif
4275 if (! in_p)
4277 value = build_range_check (loc, type, exp, 1, low, high);
4278 if (value != 0)
4279 return invert_truthvalue_loc (loc, value);
4281 return 0;
4284 if (low == 0 && high == 0)
4285 return build_int_cst (type, 1);
4287 if (low == 0)
4288 return fold_build2_loc (loc, LE_EXPR, type, exp,
4289 fold_convert_loc (loc, etype, high));
4291 if (high == 0)
4292 return fold_build2_loc (loc, GE_EXPR, type, exp,
4293 fold_convert_loc (loc, etype, low));
4295 if (operand_equal_p (low, high, 0))
4296 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4297 fold_convert_loc (loc, etype, low));
4299 if (integer_zerop (low))
4301 if (! TYPE_UNSIGNED (etype))
4303 etype = unsigned_type_for (etype);
4304 high = fold_convert_loc (loc, etype, high);
4305 exp = fold_convert_loc (loc, etype, exp);
4307 return build_range_check (loc, type, exp, 1, 0, high);
4310 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4311 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4313 unsigned HOST_WIDE_INT lo;
4314 HOST_WIDE_INT hi;
4315 int prec;
4317 prec = TYPE_PRECISION (etype);
4318 if (prec <= HOST_BITS_PER_WIDE_INT)
4320 hi = 0;
4321 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4323 else
4325 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4326 lo = HOST_WIDE_INT_M1U;
4329 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4331 if (TYPE_UNSIGNED (etype))
4333 tree signed_etype = signed_type_for (etype);
4334 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4335 etype
4336 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4337 else
4338 etype = signed_etype;
4339 exp = fold_convert_loc (loc, etype, exp);
4341 return fold_build2_loc (loc, GT_EXPR, type, exp,
4342 build_int_cst (etype, 0));
4346 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4347 This requires wrap-around arithmetics for the type of the expression.
4348 First make sure that arithmetics in this type is valid, then make sure
4349 that it wraps around. */
4350 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4351 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4352 TYPE_UNSIGNED (etype));
4354 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4356 tree utype, minv, maxv;
4358 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4359 for the type in question, as we rely on this here. */
4360 utype = unsigned_type_for (etype);
4361 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4362 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4363 integer_one_node, 1);
4364 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4366 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4367 minv, 1, maxv, 1)))
4368 etype = utype;
4369 else
4370 return 0;
4373 high = fold_convert_loc (loc, etype, high);
4374 low = fold_convert_loc (loc, etype, low);
4375 exp = fold_convert_loc (loc, etype, exp);
4377 value = const_binop (MINUS_EXPR, high, low);
4380 if (POINTER_TYPE_P (etype))
4382 if (value != 0 && !TREE_OVERFLOW (value))
4384 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4385 return build_range_check (loc, type,
4386 fold_build_pointer_plus_loc (loc, exp, low),
4387 1, build_int_cst (etype, 0), value);
4389 return 0;
4392 if (value != 0 && !TREE_OVERFLOW (value))
4393 return build_range_check (loc, type,
4394 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4395 1, build_int_cst (etype, 0), value);
4397 return 0;
4400 /* Return the predecessor of VAL in its type, handling the infinite case. */
4402 static tree
4403 range_predecessor (tree val)
4405 tree type = TREE_TYPE (val);
4407 if (INTEGRAL_TYPE_P (type)
4408 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4409 return 0;
4410 else
4411 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4414 /* Return the successor of VAL in its type, handling the infinite case. */
4416 static tree
4417 range_successor (tree val)
4419 tree type = TREE_TYPE (val);
4421 if (INTEGRAL_TYPE_P (type)
4422 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4423 return 0;
4424 else
4425 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4428 /* Given two ranges, see if we can merge them into one. Return 1 if we
4429 can, 0 if we can't. Set the output range into the specified parameters. */
4431 bool
4432 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4433 tree high0, int in1_p, tree low1, tree high1)
4435 int no_overlap;
4436 int subset;
4437 int temp;
4438 tree tem;
4439 int in_p;
4440 tree low, high;
4441 int lowequal = ((low0 == 0 && low1 == 0)
4442 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4443 low0, 0, low1, 0)));
4444 int highequal = ((high0 == 0 && high1 == 0)
4445 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4446 high0, 1, high1, 1)));
4448 /* Make range 0 be the range that starts first, or ends last if they
4449 start at the same value. Swap them if it isn't. */
4450 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4451 low0, 0, low1, 0))
4452 || (lowequal
4453 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4454 high1, 1, high0, 1))))
4456 temp = in0_p, in0_p = in1_p, in1_p = temp;
4457 tem = low0, low0 = low1, low1 = tem;
4458 tem = high0, high0 = high1, high1 = tem;
4461 /* Now flag two cases, whether the ranges are disjoint or whether the
4462 second range is totally subsumed in the first. Note that the tests
4463 below are simplified by the ones above. */
4464 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4465 high0, 1, low1, 0));
4466 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4467 high1, 1, high0, 1));
4469 /* We now have four cases, depending on whether we are including or
4470 excluding the two ranges. */
4471 if (in0_p && in1_p)
4473 /* If they don't overlap, the result is false. If the second range
4474 is a subset it is the result. Otherwise, the range is from the start
4475 of the second to the end of the first. */
4476 if (no_overlap)
4477 in_p = 0, low = high = 0;
4478 else if (subset)
4479 in_p = 1, low = low1, high = high1;
4480 else
4481 in_p = 1, low = low1, high = high0;
4484 else if (in0_p && ! in1_p)
4486 /* If they don't overlap, the result is the first range. If they are
4487 equal, the result is false. If the second range is a subset of the
4488 first, and the ranges begin at the same place, we go from just after
4489 the end of the second range to the end of the first. If the second
4490 range is not a subset of the first, or if it is a subset and both
4491 ranges end at the same place, the range starts at the start of the
4492 first range and ends just before the second range.
4493 Otherwise, we can't describe this as a single range. */
4494 if (no_overlap)
4495 in_p = 1, low = low0, high = high0;
4496 else if (lowequal && highequal)
4497 in_p = 0, low = high = 0;
4498 else if (subset && lowequal)
4500 low = range_successor (high1);
4501 high = high0;
4502 in_p = 1;
4503 if (low == 0)
4505 /* We are in the weird situation where high0 > high1 but
4506 high1 has no successor. Punt. */
4507 return 0;
4510 else if (! subset || highequal)
4512 low = low0;
4513 high = range_predecessor (low1);
4514 in_p = 1;
4515 if (high == 0)
4517 /* low0 < low1 but low1 has no predecessor. Punt. */
4518 return 0;
4521 else
4522 return 0;
4525 else if (! in0_p && in1_p)
4527 /* If they don't overlap, the result is the second range. If the second
4528 is a subset of the first, the result is false. Otherwise,
4529 the range starts just after the first range and ends at the
4530 end of the second. */
4531 if (no_overlap)
4532 in_p = 1, low = low1, high = high1;
4533 else if (subset || highequal)
4534 in_p = 0, low = high = 0;
4535 else
4537 low = range_successor (high0);
4538 high = high1;
4539 in_p = 1;
4540 if (low == 0)
4542 /* high1 > high0 but high0 has no successor. Punt. */
4543 return 0;
4548 else
4550 /* The case where we are excluding both ranges. Here the complex case
4551 is if they don't overlap. In that case, the only time we have a
4552 range is if they are adjacent. If the second is a subset of the
4553 first, the result is the first. Otherwise, the range to exclude
4554 starts at the beginning of the first range and ends at the end of the
4555 second. */
4556 if (no_overlap)
4558 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4559 range_successor (high0),
4560 1, low1, 0)))
4561 in_p = 0, low = low0, high = high1;
4562 else
4564 /* Canonicalize - [min, x] into - [-, x]. */
4565 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4566 switch (TREE_CODE (TREE_TYPE (low0)))
4568 case ENUMERAL_TYPE:
4569 if (TYPE_PRECISION (TREE_TYPE (low0))
4570 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4571 break;
4572 /* FALLTHROUGH */
4573 case INTEGER_TYPE:
4574 if (tree_int_cst_equal (low0,
4575 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4576 low0 = 0;
4577 break;
4578 case POINTER_TYPE:
4579 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4580 && integer_zerop (low0))
4581 low0 = 0;
4582 break;
4583 default:
4584 break;
4587 /* Canonicalize - [x, max] into - [x, -]. */
4588 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4589 switch (TREE_CODE (TREE_TYPE (high1)))
4591 case ENUMERAL_TYPE:
4592 if (TYPE_PRECISION (TREE_TYPE (high1))
4593 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4594 break;
4595 /* FALLTHROUGH */
4596 case INTEGER_TYPE:
4597 if (tree_int_cst_equal (high1,
4598 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4599 high1 = 0;
4600 break;
4601 case POINTER_TYPE:
4602 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4603 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4604 high1, 1,
4605 integer_one_node, 1)))
4606 high1 = 0;
4607 break;
4608 default:
4609 break;
4612 /* The ranges might be also adjacent between the maximum and
4613 minimum values of the given type. For
4614 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4615 return + [x + 1, y - 1]. */
4616 if (low0 == 0 && high1 == 0)
4618 low = range_successor (high0);
4619 high = range_predecessor (low1);
4620 if (low == 0 || high == 0)
4621 return 0;
4623 in_p = 1;
4625 else
4626 return 0;
4629 else if (subset)
4630 in_p = 0, low = low0, high = high0;
4631 else
4632 in_p = 0, low = low0, high = high1;
4635 *pin_p = in_p, *plow = low, *phigh = high;
4636 return 1;
4640 /* Subroutine of fold, looking inside expressions of the form
4641 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4642 of the COND_EXPR. This function is being used also to optimize
4643 A op B ? C : A, by reversing the comparison first.
4645 Return a folded expression whose code is not a COND_EXPR
4646 anymore, or NULL_TREE if no folding opportunity is found. */
4648 static tree
4649 fold_cond_expr_with_comparison (location_t loc, tree type,
4650 tree arg0, tree arg1, tree arg2)
4652 enum tree_code comp_code = TREE_CODE (arg0);
4653 tree arg00 = TREE_OPERAND (arg0, 0);
4654 tree arg01 = TREE_OPERAND (arg0, 1);
4655 tree arg1_type = TREE_TYPE (arg1);
4656 tree tem;
4658 STRIP_NOPS (arg1);
4659 STRIP_NOPS (arg2);
4661 /* If we have A op 0 ? A : -A, consider applying the following
4662 transformations:
4664 A == 0? A : -A same as -A
4665 A != 0? A : -A same as A
4666 A >= 0? A : -A same as abs (A)
4667 A > 0? A : -A same as abs (A)
4668 A <= 0? A : -A same as -abs (A)
4669 A < 0? A : -A same as -abs (A)
4671 None of these transformations work for modes with signed
4672 zeros. If A is +/-0, the first two transformations will
4673 change the sign of the result (from +0 to -0, or vice
4674 versa). The last four will fix the sign of the result,
4675 even though the original expressions could be positive or
4676 negative, depending on the sign of A.
4678 Note that all these transformations are correct if A is
4679 NaN, since the two alternatives (A and -A) are also NaNs. */
4680 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4681 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4682 ? real_zerop (arg01)
4683 : integer_zerop (arg01))
4684 && ((TREE_CODE (arg2) == NEGATE_EXPR
4685 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4686 /* In the case that A is of the form X-Y, '-A' (arg2) may
4687 have already been folded to Y-X, check for that. */
4688 || (TREE_CODE (arg1) == MINUS_EXPR
4689 && TREE_CODE (arg2) == MINUS_EXPR
4690 && operand_equal_p (TREE_OPERAND (arg1, 0),
4691 TREE_OPERAND (arg2, 1), 0)
4692 && operand_equal_p (TREE_OPERAND (arg1, 1),
4693 TREE_OPERAND (arg2, 0), 0))))
4694 switch (comp_code)
4696 case EQ_EXPR:
4697 case UNEQ_EXPR:
4698 tem = fold_convert_loc (loc, arg1_type, arg1);
4699 return pedantic_non_lvalue_loc (loc,
4700 fold_convert_loc (loc, type,
4701 negate_expr (tem)));
4702 case NE_EXPR:
4703 case LTGT_EXPR:
4704 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4705 case UNGE_EXPR:
4706 case UNGT_EXPR:
4707 if (flag_trapping_math)
4708 break;
4709 /* Fall through. */
4710 case GE_EXPR:
4711 case GT_EXPR:
4712 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4713 arg1 = fold_convert_loc (loc, signed_type_for
4714 (TREE_TYPE (arg1)), arg1);
4715 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4716 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4717 case UNLE_EXPR:
4718 case UNLT_EXPR:
4719 if (flag_trapping_math)
4720 break;
4721 case LE_EXPR:
4722 case LT_EXPR:
4723 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4724 arg1 = fold_convert_loc (loc, signed_type_for
4725 (TREE_TYPE (arg1)), arg1);
4726 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4727 return negate_expr (fold_convert_loc (loc, type, tem));
4728 default:
4729 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4730 break;
4733 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4734 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4735 both transformations are correct when A is NaN: A != 0
4736 is then true, and A == 0 is false. */
4738 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4739 && integer_zerop (arg01) && integer_zerop (arg2))
4741 if (comp_code == NE_EXPR)
4742 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4743 else if (comp_code == EQ_EXPR)
4744 return build_zero_cst (type);
4747 /* Try some transformations of A op B ? A : B.
4749 A == B? A : B same as B
4750 A != B? A : B same as A
4751 A >= B? A : B same as max (A, B)
4752 A > B? A : B same as max (B, A)
4753 A <= B? A : B same as min (A, B)
4754 A < B? A : B same as min (B, A)
4756 As above, these transformations don't work in the presence
4757 of signed zeros. For example, if A and B are zeros of
4758 opposite sign, the first two transformations will change
4759 the sign of the result. In the last four, the original
4760 expressions give different results for (A=+0, B=-0) and
4761 (A=-0, B=+0), but the transformed expressions do not.
4763 The first two transformations are correct if either A or B
4764 is a NaN. In the first transformation, the condition will
4765 be false, and B will indeed be chosen. In the case of the
4766 second transformation, the condition A != B will be true,
4767 and A will be chosen.
4769 The conversions to max() and min() are not correct if B is
4770 a number and A is not. The conditions in the original
4771 expressions will be false, so all four give B. The min()
4772 and max() versions would give a NaN instead. */
4773 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4774 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4775 /* Avoid these transformations if the COND_EXPR may be used
4776 as an lvalue in the C++ front-end. PR c++/19199. */
4777 && (in_gimple_form
4778 || VECTOR_TYPE_P (type)
4779 || (strcmp (lang_hooks.name, "GNU C++") != 0
4780 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4781 || ! maybe_lvalue_p (arg1)
4782 || ! maybe_lvalue_p (arg2)))
4784 tree comp_op0 = arg00;
4785 tree comp_op1 = arg01;
4786 tree comp_type = TREE_TYPE (comp_op0);
4788 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4789 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4791 comp_type = type;
4792 comp_op0 = arg1;
4793 comp_op1 = arg2;
4796 switch (comp_code)
4798 case EQ_EXPR:
4799 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4800 case NE_EXPR:
4801 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4802 case LE_EXPR:
4803 case LT_EXPR:
4804 case UNLE_EXPR:
4805 case UNLT_EXPR:
4806 /* In C++ a ?: expression can be an lvalue, so put the
4807 operand which will be used if they are equal first
4808 so that we can convert this back to the
4809 corresponding COND_EXPR. */
4810 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4812 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4813 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4814 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4815 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4816 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4817 comp_op1, comp_op0);
4818 return pedantic_non_lvalue_loc (loc,
4819 fold_convert_loc (loc, type, tem));
4821 break;
4822 case GE_EXPR:
4823 case GT_EXPR:
4824 case UNGE_EXPR:
4825 case UNGT_EXPR:
4826 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4828 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4829 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4830 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4831 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4832 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4833 comp_op1, comp_op0);
4834 return pedantic_non_lvalue_loc (loc,
4835 fold_convert_loc (loc, type, tem));
4837 break;
4838 case UNEQ_EXPR:
4839 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 return pedantic_non_lvalue_loc (loc,
4841 fold_convert_loc (loc, type, arg2));
4842 break;
4843 case LTGT_EXPR:
4844 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4845 return pedantic_non_lvalue_loc (loc,
4846 fold_convert_loc (loc, type, arg1));
4847 break;
4848 default:
4849 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4850 break;
4854 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4855 we might still be able to simplify this. For example,
4856 if C1 is one less or one more than C2, this might have started
4857 out as a MIN or MAX and been transformed by this function.
4858 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4860 if (INTEGRAL_TYPE_P (type)
4861 && TREE_CODE (arg01) == INTEGER_CST
4862 && TREE_CODE (arg2) == INTEGER_CST)
4863 switch (comp_code)
4865 case EQ_EXPR:
4866 if (TREE_CODE (arg1) == INTEGER_CST)
4867 break;
4868 /* We can replace A with C1 in this case. */
4869 arg1 = fold_convert_loc (loc, type, arg01);
4870 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4872 case LT_EXPR:
4873 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4874 MIN_EXPR, to preserve the signedness of the comparison. */
4875 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4876 OEP_ONLY_CONST)
4877 && operand_equal_p (arg01,
4878 const_binop (PLUS_EXPR, arg2,
4879 build_int_cst (type, 1)),
4880 OEP_ONLY_CONST))
4882 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4883 fold_convert_loc (loc, TREE_TYPE (arg00),
4884 arg2));
4885 return pedantic_non_lvalue_loc (loc,
4886 fold_convert_loc (loc, type, tem));
4888 break;
4890 case LE_EXPR:
4891 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4892 as above. */
4893 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4894 OEP_ONLY_CONST)
4895 && operand_equal_p (arg01,
4896 const_binop (MINUS_EXPR, arg2,
4897 build_int_cst (type, 1)),
4898 OEP_ONLY_CONST))
4900 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4901 fold_convert_loc (loc, TREE_TYPE (arg00),
4902 arg2));
4903 return pedantic_non_lvalue_loc (loc,
4904 fold_convert_loc (loc, type, tem));
4906 break;
4908 case GT_EXPR:
4909 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4910 MAX_EXPR, to preserve the signedness of the comparison. */
4911 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4912 OEP_ONLY_CONST)
4913 && operand_equal_p (arg01,
4914 const_binop (MINUS_EXPR, arg2,
4915 build_int_cst (type, 1)),
4916 OEP_ONLY_CONST))
4918 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4919 fold_convert_loc (loc, TREE_TYPE (arg00),
4920 arg2));
4921 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4923 break;
4925 case GE_EXPR:
4926 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4927 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4928 OEP_ONLY_CONST)
4929 && operand_equal_p (arg01,
4930 const_binop (PLUS_EXPR, arg2,
4931 build_int_cst (type, 1)),
4932 OEP_ONLY_CONST))
4934 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4935 fold_convert_loc (loc, TREE_TYPE (arg00),
4936 arg2));
4937 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4939 break;
4940 case NE_EXPR:
4941 break;
4942 default:
4943 gcc_unreachable ();
4946 return NULL_TREE;
4951 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4952 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4953 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4954 false) >= 2)
4955 #endif
4957 /* EXP is some logical combination of boolean tests. See if we can
4958 merge it into some range test. Return the new tree if so. */
4960 static tree
4961 fold_range_test (location_t loc, enum tree_code code, tree type,
4962 tree op0, tree op1)
4964 int or_op = (code == TRUTH_ORIF_EXPR
4965 || code == TRUTH_OR_EXPR);
4966 int in0_p, in1_p, in_p;
4967 tree low0, low1, low, high0, high1, high;
4968 bool strict_overflow_p = false;
4969 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4970 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4971 tree tem;
4972 const char * const warnmsg = G_("assuming signed overflow does not occur "
4973 "when simplifying range test");
4975 /* If this is an OR operation, invert both sides; we will invert
4976 again at the end. */
4977 if (or_op)
4978 in0_p = ! in0_p, in1_p = ! in1_p;
4980 /* If both expressions are the same, if we can merge the ranges, and we
4981 can build the range test, return it or it inverted. If one of the
4982 ranges is always true or always false, consider it to be the same
4983 expression as the other. */
4984 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4985 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4986 in1_p, low1, high1)
4987 && 0 != (tem = (build_range_check (loc, type,
4988 lhs != 0 ? lhs
4989 : rhs != 0 ? rhs : integer_zero_node,
4990 in_p, low, high))))
4992 if (strict_overflow_p)
4993 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4994 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4997 /* On machines where the branch cost is expensive, if this is a
4998 short-circuited branch and the underlying object on both sides
4999 is the same, make a non-short-circuit operation. */
5000 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5001 && lhs != 0 && rhs != 0
5002 && (code == TRUTH_ANDIF_EXPR
5003 || code == TRUTH_ORIF_EXPR)
5004 && operand_equal_p (lhs, rhs, 0))
5006 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5007 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5008 which cases we can't do this. */
5009 if (simple_operand_p (lhs))
5010 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5011 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5012 type, op0, op1);
5014 else if (!lang_hooks.decls.global_bindings_p ()
5015 && !CONTAINS_PLACEHOLDER_P (lhs))
5017 tree common = save_expr (lhs);
5019 if (0 != (lhs = build_range_check (loc, type, common,
5020 or_op ? ! in0_p : in0_p,
5021 low0, high0))
5022 && (0 != (rhs = build_range_check (loc, type, common,
5023 or_op ? ! in1_p : in1_p,
5024 low1, high1))))
5026 if (strict_overflow_p)
5027 fold_overflow_warning (warnmsg,
5028 WARN_STRICT_OVERFLOW_COMPARISON);
5029 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5030 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5031 type, lhs, rhs);
5036 return 0;
5039 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5040 bit value. Arrange things so the extra bits will be set to zero if and
5041 only if C is signed-extended to its full width. If MASK is nonzero,
5042 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5044 static tree
5045 unextend (tree c, int p, int unsignedp, tree mask)
5047 tree type = TREE_TYPE (c);
5048 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5049 tree temp;
5051 if (p == modesize || unsignedp)
5052 return c;
5054 /* We work by getting just the sign bit into the low-order bit, then
5055 into the high-order bit, then sign-extend. We then XOR that value
5056 with C. */
5057 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5058 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5060 /* We must use a signed type in order to get an arithmetic right shift.
5061 However, we must also avoid introducing accidental overflows, so that
5062 a subsequent call to integer_zerop will work. Hence we must
5063 do the type conversion here. At this point, the constant is either
5064 zero or one, and the conversion to a signed type can never overflow.
5065 We could get an overflow if this conversion is done anywhere else. */
5066 if (TYPE_UNSIGNED (type))
5067 temp = fold_convert (signed_type_for (type), temp);
5069 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5070 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5071 if (mask != 0)
5072 temp = const_binop (BIT_AND_EXPR, temp,
5073 fold_convert (TREE_TYPE (c), mask));
5074 /* If necessary, convert the type back to match the type of C. */
5075 if (TYPE_UNSIGNED (type))
5076 temp = fold_convert (type, temp);
5078 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5081 /* For an expression that has the form
5082 (A && B) || ~B
5084 (A || B) && ~B,
5085 we can drop one of the inner expressions and simplify to
5086 A || ~B
5088 A && ~B
5089 LOC is the location of the resulting expression. OP is the inner
5090 logical operation; the left-hand side in the examples above, while CMPOP
5091 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5092 removing a condition that guards another, as in
5093 (A != NULL && A->...) || A == NULL
5094 which we must not transform. If RHS_ONLY is true, only eliminate the
5095 right-most operand of the inner logical operation. */
5097 static tree
5098 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5099 bool rhs_only)
5101 tree type = TREE_TYPE (cmpop);
5102 enum tree_code code = TREE_CODE (cmpop);
5103 enum tree_code truthop_code = TREE_CODE (op);
5104 tree lhs = TREE_OPERAND (op, 0);
5105 tree rhs = TREE_OPERAND (op, 1);
5106 tree orig_lhs = lhs, orig_rhs = rhs;
5107 enum tree_code rhs_code = TREE_CODE (rhs);
5108 enum tree_code lhs_code = TREE_CODE (lhs);
5109 enum tree_code inv_code;
5111 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5112 return NULL_TREE;
5114 if (TREE_CODE_CLASS (code) != tcc_comparison)
5115 return NULL_TREE;
5117 if (rhs_code == truthop_code)
5119 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5120 if (newrhs != NULL_TREE)
5122 rhs = newrhs;
5123 rhs_code = TREE_CODE (rhs);
5126 if (lhs_code == truthop_code && !rhs_only)
5128 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5129 if (newlhs != NULL_TREE)
5131 lhs = newlhs;
5132 lhs_code = TREE_CODE (lhs);
5136 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5137 if (inv_code == rhs_code
5138 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5139 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5140 return lhs;
5141 if (!rhs_only && inv_code == lhs_code
5142 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5143 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5144 return rhs;
5145 if (rhs != orig_rhs || lhs != orig_lhs)
5146 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5147 lhs, rhs);
5148 return NULL_TREE;
5151 /* Find ways of folding logical expressions of LHS and RHS:
5152 Try to merge two comparisons to the same innermost item.
5153 Look for range tests like "ch >= '0' && ch <= '9'".
5154 Look for combinations of simple terms on machines with expensive branches
5155 and evaluate the RHS unconditionally.
5157 For example, if we have p->a == 2 && p->b == 4 and we can make an
5158 object large enough to span both A and B, we can do this with a comparison
5159 against the object ANDed with the a mask.
5161 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5162 operations to do this with one comparison.
5164 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5165 function and the one above.
5167 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5168 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5170 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5171 two operands.
5173 We return the simplified tree or 0 if no optimization is possible. */
5175 static tree
5176 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5177 tree lhs, tree rhs)
5179 /* If this is the "or" of two comparisons, we can do something if
5180 the comparisons are NE_EXPR. If this is the "and", we can do something
5181 if the comparisons are EQ_EXPR. I.e.,
5182 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5184 WANTED_CODE is this operation code. For single bit fields, we can
5185 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5186 comparison for one-bit fields. */
5188 enum tree_code wanted_code;
5189 enum tree_code lcode, rcode;
5190 tree ll_arg, lr_arg, rl_arg, rr_arg;
5191 tree ll_inner, lr_inner, rl_inner, rr_inner;
5192 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5193 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5194 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5195 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5196 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5197 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5198 enum machine_mode lnmode, rnmode;
5199 tree ll_mask, lr_mask, rl_mask, rr_mask;
5200 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5201 tree l_const, r_const;
5202 tree lntype, rntype, result;
5203 HOST_WIDE_INT first_bit, end_bit;
5204 int volatilep;
5206 /* Start by getting the comparison codes. Fail if anything is volatile.
5207 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5208 it were surrounded with a NE_EXPR. */
5210 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5211 return 0;
5213 lcode = TREE_CODE (lhs);
5214 rcode = TREE_CODE (rhs);
5216 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5218 lhs = build2 (NE_EXPR, truth_type, lhs,
5219 build_int_cst (TREE_TYPE (lhs), 0));
5220 lcode = NE_EXPR;
5223 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5225 rhs = build2 (NE_EXPR, truth_type, rhs,
5226 build_int_cst (TREE_TYPE (rhs), 0));
5227 rcode = NE_EXPR;
5230 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5231 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5232 return 0;
5234 ll_arg = TREE_OPERAND (lhs, 0);
5235 lr_arg = TREE_OPERAND (lhs, 1);
5236 rl_arg = TREE_OPERAND (rhs, 0);
5237 rr_arg = TREE_OPERAND (rhs, 1);
5239 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5240 if (simple_operand_p (ll_arg)
5241 && simple_operand_p (lr_arg))
5243 if (operand_equal_p (ll_arg, rl_arg, 0)
5244 && operand_equal_p (lr_arg, rr_arg, 0))
5246 result = combine_comparisons (loc, code, lcode, rcode,
5247 truth_type, ll_arg, lr_arg);
5248 if (result)
5249 return result;
5251 else if (operand_equal_p (ll_arg, rr_arg, 0)
5252 && operand_equal_p (lr_arg, rl_arg, 0))
5254 result = combine_comparisons (loc, code, lcode,
5255 swap_tree_comparison (rcode),
5256 truth_type, ll_arg, lr_arg);
5257 if (result)
5258 return result;
5262 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5263 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5265 /* If the RHS can be evaluated unconditionally and its operands are
5266 simple, it wins to evaluate the RHS unconditionally on machines
5267 with expensive branches. In this case, this isn't a comparison
5268 that can be merged. */
5270 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5271 false) >= 2
5272 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5273 && simple_operand_p (rl_arg)
5274 && simple_operand_p (rr_arg))
5276 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5277 if (code == TRUTH_OR_EXPR
5278 && lcode == NE_EXPR && integer_zerop (lr_arg)
5279 && rcode == NE_EXPR && integer_zerop (rr_arg)
5280 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5281 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5282 return build2_loc (loc, NE_EXPR, truth_type,
5283 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5284 ll_arg, rl_arg),
5285 build_int_cst (TREE_TYPE (ll_arg), 0));
5287 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5288 if (code == TRUTH_AND_EXPR
5289 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5290 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5291 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5292 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5293 return build2_loc (loc, EQ_EXPR, truth_type,
5294 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5295 ll_arg, rl_arg),
5296 build_int_cst (TREE_TYPE (ll_arg), 0));
5299 /* See if the comparisons can be merged. Then get all the parameters for
5300 each side. */
5302 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5303 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5304 return 0;
5306 volatilep = 0;
5307 ll_inner = decode_field_reference (loc, ll_arg,
5308 &ll_bitsize, &ll_bitpos, &ll_mode,
5309 &ll_unsignedp, &volatilep, &ll_mask,
5310 &ll_and_mask);
5311 lr_inner = decode_field_reference (loc, lr_arg,
5312 &lr_bitsize, &lr_bitpos, &lr_mode,
5313 &lr_unsignedp, &volatilep, &lr_mask,
5314 &lr_and_mask);
5315 rl_inner = decode_field_reference (loc, rl_arg,
5316 &rl_bitsize, &rl_bitpos, &rl_mode,
5317 &rl_unsignedp, &volatilep, &rl_mask,
5318 &rl_and_mask);
5319 rr_inner = decode_field_reference (loc, rr_arg,
5320 &rr_bitsize, &rr_bitpos, &rr_mode,
5321 &rr_unsignedp, &volatilep, &rr_mask,
5322 &rr_and_mask);
5324 /* It must be true that the inner operation on the lhs of each
5325 comparison must be the same if we are to be able to do anything.
5326 Then see if we have constants. If not, the same must be true for
5327 the rhs's. */
5328 if (volatilep || ll_inner == 0 || rl_inner == 0
5329 || ! operand_equal_p (ll_inner, rl_inner, 0))
5330 return 0;
5332 if (TREE_CODE (lr_arg) == INTEGER_CST
5333 && TREE_CODE (rr_arg) == INTEGER_CST)
5334 l_const = lr_arg, r_const = rr_arg;
5335 else if (lr_inner == 0 || rr_inner == 0
5336 || ! operand_equal_p (lr_inner, rr_inner, 0))
5337 return 0;
5338 else
5339 l_const = r_const = 0;
5341 /* If either comparison code is not correct for our logical operation,
5342 fail. However, we can convert a one-bit comparison against zero into
5343 the opposite comparison against that bit being set in the field. */
5345 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5346 if (lcode != wanted_code)
5348 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5350 /* Make the left operand unsigned, since we are only interested
5351 in the value of one bit. Otherwise we are doing the wrong
5352 thing below. */
5353 ll_unsignedp = 1;
5354 l_const = ll_mask;
5356 else
5357 return 0;
5360 /* This is analogous to the code for l_const above. */
5361 if (rcode != wanted_code)
5363 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5365 rl_unsignedp = 1;
5366 r_const = rl_mask;
5368 else
5369 return 0;
5372 /* See if we can find a mode that contains both fields being compared on
5373 the left. If we can't, fail. Otherwise, update all constants and masks
5374 to be relative to a field of that size. */
5375 first_bit = MIN (ll_bitpos, rl_bitpos);
5376 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5377 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5378 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5379 volatilep);
5380 if (lnmode == VOIDmode)
5381 return 0;
5383 lnbitsize = GET_MODE_BITSIZE (lnmode);
5384 lnbitpos = first_bit & ~ (lnbitsize - 1);
5385 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5386 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5388 if (BYTES_BIG_ENDIAN)
5390 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5391 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5394 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5395 size_int (xll_bitpos));
5396 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5397 size_int (xrl_bitpos));
5399 if (l_const)
5401 l_const = fold_convert_loc (loc, lntype, l_const);
5402 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5403 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5404 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5405 fold_build1_loc (loc, BIT_NOT_EXPR,
5406 lntype, ll_mask))))
5408 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5410 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5413 if (r_const)
5415 r_const = fold_convert_loc (loc, lntype, r_const);
5416 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5417 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5418 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5419 fold_build1_loc (loc, BIT_NOT_EXPR,
5420 lntype, rl_mask))))
5422 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5424 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5428 /* If the right sides are not constant, do the same for it. Also,
5429 disallow this optimization if a size or signedness mismatch occurs
5430 between the left and right sides. */
5431 if (l_const == 0)
5433 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5434 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5435 /* Make sure the two fields on the right
5436 correspond to the left without being swapped. */
5437 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5438 return 0;
5440 first_bit = MIN (lr_bitpos, rr_bitpos);
5441 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5442 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5443 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5444 volatilep);
5445 if (rnmode == VOIDmode)
5446 return 0;
5448 rnbitsize = GET_MODE_BITSIZE (rnmode);
5449 rnbitpos = first_bit & ~ (rnbitsize - 1);
5450 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5451 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5453 if (BYTES_BIG_ENDIAN)
5455 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5456 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5459 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5460 rntype, lr_mask),
5461 size_int (xlr_bitpos));
5462 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5463 rntype, rr_mask),
5464 size_int (xrr_bitpos));
5466 /* Make a mask that corresponds to both fields being compared.
5467 Do this for both items being compared. If the operands are the
5468 same size and the bits being compared are in the same position
5469 then we can do this by masking both and comparing the masked
5470 results. */
5471 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5472 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5473 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5475 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5476 ll_unsignedp || rl_unsignedp);
5477 if (! all_ones_mask_p (ll_mask, lnbitsize))
5478 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5480 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5481 lr_unsignedp || rr_unsignedp);
5482 if (! all_ones_mask_p (lr_mask, rnbitsize))
5483 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5485 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5488 /* There is still another way we can do something: If both pairs of
5489 fields being compared are adjacent, we may be able to make a wider
5490 field containing them both.
5492 Note that we still must mask the lhs/rhs expressions. Furthermore,
5493 the mask must be shifted to account for the shift done by
5494 make_bit_field_ref. */
5495 if ((ll_bitsize + ll_bitpos == rl_bitpos
5496 && lr_bitsize + lr_bitpos == rr_bitpos)
5497 || (ll_bitpos == rl_bitpos + rl_bitsize
5498 && lr_bitpos == rr_bitpos + rr_bitsize))
5500 tree type;
5502 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5503 ll_bitsize + rl_bitsize,
5504 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5505 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5506 lr_bitsize + rr_bitsize,
5507 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5509 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5510 size_int (MIN (xll_bitpos, xrl_bitpos)));
5511 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5512 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5514 /* Convert to the smaller type before masking out unwanted bits. */
5515 type = lntype;
5516 if (lntype != rntype)
5518 if (lnbitsize > rnbitsize)
5520 lhs = fold_convert_loc (loc, rntype, lhs);
5521 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5522 type = rntype;
5524 else if (lnbitsize < rnbitsize)
5526 rhs = fold_convert_loc (loc, lntype, rhs);
5527 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5528 type = lntype;
5532 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5533 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5535 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5536 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5538 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5541 return 0;
5544 /* Handle the case of comparisons with constants. If there is something in
5545 common between the masks, those bits of the constants must be the same.
5546 If not, the condition is always false. Test for this to avoid generating
5547 incorrect code below. */
5548 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5549 if (! integer_zerop (result)
5550 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5551 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5553 if (wanted_code == NE_EXPR)
5555 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5556 return constant_boolean_node (true, truth_type);
5558 else
5560 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5561 return constant_boolean_node (false, truth_type);
5565 /* Construct the expression we will return. First get the component
5566 reference we will make. Unless the mask is all ones the width of
5567 that field, perform the mask operation. Then compare with the
5568 merged constant. */
5569 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5570 ll_unsignedp || rl_unsignedp);
5572 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5573 if (! all_ones_mask_p (ll_mask, lnbitsize))
5574 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5576 return build2_loc (loc, wanted_code, truth_type, result,
5577 const_binop (BIT_IOR_EXPR, l_const, r_const));
5580 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5581 constant. */
5583 static tree
5584 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5585 tree op0, tree op1)
5587 tree arg0 = op0;
5588 enum tree_code op_code;
5589 tree comp_const;
5590 tree minmax_const;
5591 int consts_equal, consts_lt;
5592 tree inner;
5594 STRIP_SIGN_NOPS (arg0);
5596 op_code = TREE_CODE (arg0);
5597 minmax_const = TREE_OPERAND (arg0, 1);
5598 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5599 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5600 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5601 inner = TREE_OPERAND (arg0, 0);
5603 /* If something does not permit us to optimize, return the original tree. */
5604 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5605 || TREE_CODE (comp_const) != INTEGER_CST
5606 || TREE_OVERFLOW (comp_const)
5607 || TREE_CODE (minmax_const) != INTEGER_CST
5608 || TREE_OVERFLOW (minmax_const))
5609 return NULL_TREE;
5611 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5612 and GT_EXPR, doing the rest with recursive calls using logical
5613 simplifications. */
5614 switch (code)
5616 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5618 tree tem
5619 = optimize_minmax_comparison (loc,
5620 invert_tree_comparison (code, false),
5621 type, op0, op1);
5622 if (tem)
5623 return invert_truthvalue_loc (loc, tem);
5624 return NULL_TREE;
5627 case GE_EXPR:
5628 return
5629 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5630 optimize_minmax_comparison
5631 (loc, EQ_EXPR, type, arg0, comp_const),
5632 optimize_minmax_comparison
5633 (loc, GT_EXPR, type, arg0, comp_const));
5635 case EQ_EXPR:
5636 if (op_code == MAX_EXPR && consts_equal)
5637 /* MAX (X, 0) == 0 -> X <= 0 */
5638 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5640 else if (op_code == MAX_EXPR && consts_lt)
5641 /* MAX (X, 0) == 5 -> X == 5 */
5642 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5644 else if (op_code == MAX_EXPR)
5645 /* MAX (X, 0) == -1 -> false */
5646 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5648 else if (consts_equal)
5649 /* MIN (X, 0) == 0 -> X >= 0 */
5650 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5652 else if (consts_lt)
5653 /* MIN (X, 0) == 5 -> false */
5654 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5656 else
5657 /* MIN (X, 0) == -1 -> X == -1 */
5658 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5660 case GT_EXPR:
5661 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5662 /* MAX (X, 0) > 0 -> X > 0
5663 MAX (X, 0) > 5 -> X > 5 */
5664 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5666 else if (op_code == MAX_EXPR)
5667 /* MAX (X, 0) > -1 -> true */
5668 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5670 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5671 /* MIN (X, 0) > 0 -> false
5672 MIN (X, 0) > 5 -> false */
5673 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5675 else
5676 /* MIN (X, 0) > -1 -> X > -1 */
5677 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5679 default:
5680 return NULL_TREE;
5684 /* T is an integer expression that is being multiplied, divided, or taken a
5685 modulus (CODE says which and what kind of divide or modulus) by a
5686 constant C. See if we can eliminate that operation by folding it with
5687 other operations already in T. WIDE_TYPE, if non-null, is a type that
5688 should be used for the computation if wider than our type.
5690 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5691 (X * 2) + (Y * 4). We must, however, be assured that either the original
5692 expression would not overflow or that overflow is undefined for the type
5693 in the language in question.
5695 If we return a non-null expression, it is an equivalent form of the
5696 original computation, but need not be in the original type.
5698 We set *STRICT_OVERFLOW_P to true if the return values depends on
5699 signed overflow being undefined. Otherwise we do not change
5700 *STRICT_OVERFLOW_P. */
5702 static tree
5703 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5704 bool *strict_overflow_p)
5706 /* To avoid exponential search depth, refuse to allow recursion past
5707 three levels. Beyond that (1) it's highly unlikely that we'll find
5708 something interesting and (2) we've probably processed it before
5709 when we built the inner expression. */
5711 static int depth;
5712 tree ret;
5714 if (depth > 3)
5715 return NULL;
5717 depth++;
5718 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5719 depth--;
5721 return ret;
5724 static tree
5725 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5726 bool *strict_overflow_p)
5728 tree type = TREE_TYPE (t);
5729 enum tree_code tcode = TREE_CODE (t);
5730 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5731 > GET_MODE_SIZE (TYPE_MODE (type)))
5732 ? wide_type : type);
5733 tree t1, t2;
5734 int same_p = tcode == code;
5735 tree op0 = NULL_TREE, op1 = NULL_TREE;
5736 bool sub_strict_overflow_p;
5738 /* Don't deal with constants of zero here; they confuse the code below. */
5739 if (integer_zerop (c))
5740 return NULL_TREE;
5742 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5743 op0 = TREE_OPERAND (t, 0);
5745 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5746 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5748 /* Note that we need not handle conditional operations here since fold
5749 already handles those cases. So just do arithmetic here. */
5750 switch (tcode)
5752 case INTEGER_CST:
5753 /* For a constant, we can always simplify if we are a multiply
5754 or (for divide and modulus) if it is a multiple of our constant. */
5755 if (code == MULT_EXPR
5756 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5757 return const_binop (code, fold_convert (ctype, t),
5758 fold_convert (ctype, c));
5759 break;
5761 CASE_CONVERT: case NON_LVALUE_EXPR:
5762 /* If op0 is an expression ... */
5763 if ((COMPARISON_CLASS_P (op0)
5764 || UNARY_CLASS_P (op0)
5765 || BINARY_CLASS_P (op0)
5766 || VL_EXP_CLASS_P (op0)
5767 || EXPRESSION_CLASS_P (op0))
5768 /* ... and has wrapping overflow, and its type is smaller
5769 than ctype, then we cannot pass through as widening. */
5770 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5771 && (TYPE_PRECISION (ctype)
5772 > TYPE_PRECISION (TREE_TYPE (op0))))
5773 /* ... or this is a truncation (t is narrower than op0),
5774 then we cannot pass through this narrowing. */
5775 || (TYPE_PRECISION (type)
5776 < TYPE_PRECISION (TREE_TYPE (op0)))
5777 /* ... or signedness changes for division or modulus,
5778 then we cannot pass through this conversion. */
5779 || (code != MULT_EXPR
5780 && (TYPE_UNSIGNED (ctype)
5781 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5782 /* ... or has undefined overflow while the converted to
5783 type has not, we cannot do the operation in the inner type
5784 as that would introduce undefined overflow. */
5785 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5786 && !TYPE_OVERFLOW_UNDEFINED (type))))
5787 break;
5789 /* Pass the constant down and see if we can make a simplification. If
5790 we can, replace this expression with the inner simplification for
5791 possible later conversion to our or some other type. */
5792 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5793 && TREE_CODE (t2) == INTEGER_CST
5794 && !TREE_OVERFLOW (t2)
5795 && (0 != (t1 = extract_muldiv (op0, t2, code,
5796 code == MULT_EXPR
5797 ? ctype : NULL_TREE,
5798 strict_overflow_p))))
5799 return t1;
5800 break;
5802 case ABS_EXPR:
5803 /* If widening the type changes it from signed to unsigned, then we
5804 must avoid building ABS_EXPR itself as unsigned. */
5805 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5807 tree cstype = (*signed_type_for) (ctype);
5808 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5809 != 0)
5811 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5812 return fold_convert (ctype, t1);
5814 break;
5816 /* If the constant is negative, we cannot simplify this. */
5817 if (tree_int_cst_sgn (c) == -1)
5818 break;
5819 /* FALLTHROUGH */
5820 case NEGATE_EXPR:
5821 /* For division and modulus, type can't be unsigned, as e.g.
5822 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5823 For signed types, even with wrapping overflow, this is fine. */
5824 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5825 break;
5826 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5827 != 0)
5828 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5829 break;
5831 case MIN_EXPR: case MAX_EXPR:
5832 /* If widening the type changes the signedness, then we can't perform
5833 this optimization as that changes the result. */
5834 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5835 break;
5837 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5838 sub_strict_overflow_p = false;
5839 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5840 &sub_strict_overflow_p)) != 0
5841 && (t2 = extract_muldiv (op1, c, code, wide_type,
5842 &sub_strict_overflow_p)) != 0)
5844 if (tree_int_cst_sgn (c) < 0)
5845 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5846 if (sub_strict_overflow_p)
5847 *strict_overflow_p = true;
5848 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5849 fold_convert (ctype, t2));
5851 break;
5853 case LSHIFT_EXPR: case RSHIFT_EXPR:
5854 /* If the second operand is constant, this is a multiplication
5855 or floor division, by a power of two, so we can treat it that
5856 way unless the multiplier or divisor overflows. Signed
5857 left-shift overflow is implementation-defined rather than
5858 undefined in C90, so do not convert signed left shift into
5859 multiplication. */
5860 if (TREE_CODE (op1) == INTEGER_CST
5861 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5862 /* const_binop may not detect overflow correctly,
5863 so check for it explicitly here. */
5864 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5865 && TREE_INT_CST_HIGH (op1) == 0
5866 && 0 != (t1 = fold_convert (ctype,
5867 const_binop (LSHIFT_EXPR,
5868 size_one_node,
5869 op1)))
5870 && !TREE_OVERFLOW (t1))
5871 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5872 ? MULT_EXPR : FLOOR_DIV_EXPR,
5873 ctype,
5874 fold_convert (ctype, op0),
5875 t1),
5876 c, code, wide_type, strict_overflow_p);
5877 break;
5879 case PLUS_EXPR: case MINUS_EXPR:
5880 /* See if we can eliminate the operation on both sides. If we can, we
5881 can return a new PLUS or MINUS. If we can't, the only remaining
5882 cases where we can do anything are if the second operand is a
5883 constant. */
5884 sub_strict_overflow_p = false;
5885 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5886 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5887 if (t1 != 0 && t2 != 0
5888 && (code == MULT_EXPR
5889 /* If not multiplication, we can only do this if both operands
5890 are divisible by c. */
5891 || (multiple_of_p (ctype, op0, c)
5892 && multiple_of_p (ctype, op1, c))))
5894 if (sub_strict_overflow_p)
5895 *strict_overflow_p = true;
5896 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5897 fold_convert (ctype, t2));
5900 /* If this was a subtraction, negate OP1 and set it to be an addition.
5901 This simplifies the logic below. */
5902 if (tcode == MINUS_EXPR)
5904 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5905 /* If OP1 was not easily negatable, the constant may be OP0. */
5906 if (TREE_CODE (op0) == INTEGER_CST)
5908 tree tem = op0;
5909 op0 = op1;
5910 op1 = tem;
5911 tem = t1;
5912 t1 = t2;
5913 t2 = tem;
5917 if (TREE_CODE (op1) != INTEGER_CST)
5918 break;
5920 /* If either OP1 or C are negative, this optimization is not safe for
5921 some of the division and remainder types while for others we need
5922 to change the code. */
5923 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5925 if (code == CEIL_DIV_EXPR)
5926 code = FLOOR_DIV_EXPR;
5927 else if (code == FLOOR_DIV_EXPR)
5928 code = CEIL_DIV_EXPR;
5929 else if (code != MULT_EXPR
5930 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5931 break;
5934 /* If it's a multiply or a division/modulus operation of a multiple
5935 of our constant, do the operation and verify it doesn't overflow. */
5936 if (code == MULT_EXPR
5937 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5939 op1 = const_binop (code, fold_convert (ctype, op1),
5940 fold_convert (ctype, c));
5941 /* We allow the constant to overflow with wrapping semantics. */
5942 if (op1 == 0
5943 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5944 break;
5946 else
5947 break;
5949 /* If we have an unsigned type, we cannot widen the operation since it
5950 will change the result if the original computation overflowed. */
5951 if (TYPE_UNSIGNED (ctype) && ctype != type)
5952 break;
5954 /* If we were able to eliminate our operation from the first side,
5955 apply our operation to the second side and reform the PLUS. */
5956 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5957 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5959 /* The last case is if we are a multiply. In that case, we can
5960 apply the distributive law to commute the multiply and addition
5961 if the multiplication of the constants doesn't overflow
5962 and overflow is defined. With undefined overflow
5963 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5964 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5965 return fold_build2 (tcode, ctype,
5966 fold_build2 (code, ctype,
5967 fold_convert (ctype, op0),
5968 fold_convert (ctype, c)),
5969 op1);
5971 break;
5973 case MULT_EXPR:
5974 /* We have a special case here if we are doing something like
5975 (C * 8) % 4 since we know that's zero. */
5976 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5977 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5978 /* If the multiplication can overflow we cannot optimize this. */
5979 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5980 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5981 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5983 *strict_overflow_p = true;
5984 return omit_one_operand (type, integer_zero_node, op0);
5987 /* ... fall through ... */
5989 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5990 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5991 /* If we can extract our operation from the LHS, do so and return a
5992 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5993 do something only if the second operand is a constant. */
5994 if (same_p
5995 && (t1 = extract_muldiv (op0, c, code, wide_type,
5996 strict_overflow_p)) != 0)
5997 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5998 fold_convert (ctype, op1));
5999 else if (tcode == MULT_EXPR && code == MULT_EXPR
6000 && (t1 = extract_muldiv (op1, c, code, wide_type,
6001 strict_overflow_p)) != 0)
6002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6003 fold_convert (ctype, t1));
6004 else if (TREE_CODE (op1) != INTEGER_CST)
6005 return 0;
6007 /* If these are the same operation types, we can associate them
6008 assuming no overflow. */
6009 if (tcode == code)
6011 double_int mul;
6012 bool overflow_p;
6013 unsigned prec = TYPE_PRECISION (ctype);
6014 bool uns = TYPE_UNSIGNED (ctype);
6015 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6016 double_int dic = tree_to_double_int (c).ext (prec, uns);
6017 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6018 overflow_p = ((!uns && overflow_p)
6019 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6020 if (!double_int_fits_to_tree_p (ctype, mul)
6021 && ((uns && tcode != MULT_EXPR) || !uns))
6022 overflow_p = 1;
6023 if (!overflow_p)
6024 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6025 double_int_to_tree (ctype, mul));
6028 /* If these operations "cancel" each other, we have the main
6029 optimizations of this pass, which occur when either constant is a
6030 multiple of the other, in which case we replace this with either an
6031 operation or CODE or TCODE.
6033 If we have an unsigned type, we cannot do this since it will change
6034 the result if the original computation overflowed. */
6035 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6036 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6037 || (tcode == MULT_EXPR
6038 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6039 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6040 && code != MULT_EXPR)))
6042 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6044 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6045 *strict_overflow_p = true;
6046 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6047 fold_convert (ctype,
6048 const_binop (TRUNC_DIV_EXPR,
6049 op1, c)));
6051 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6053 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6054 *strict_overflow_p = true;
6055 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6056 fold_convert (ctype,
6057 const_binop (TRUNC_DIV_EXPR,
6058 c, op1)));
6061 break;
6063 default:
6064 break;
6067 return 0;
6070 /* Return a node which has the indicated constant VALUE (either 0 or
6071 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6072 and is of the indicated TYPE. */
6074 tree
6075 constant_boolean_node (bool value, tree type)
6077 if (type == integer_type_node)
6078 return value ? integer_one_node : integer_zero_node;
6079 else if (type == boolean_type_node)
6080 return value ? boolean_true_node : boolean_false_node;
6081 else if (TREE_CODE (type) == VECTOR_TYPE)
6082 return build_vector_from_val (type,
6083 build_int_cst (TREE_TYPE (type),
6084 value ? -1 : 0));
6085 else
6086 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6090 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6091 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6092 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6093 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6094 COND is the first argument to CODE; otherwise (as in the example
6095 given here), it is the second argument. TYPE is the type of the
6096 original expression. Return NULL_TREE if no simplification is
6097 possible. */
6099 static tree
6100 fold_binary_op_with_conditional_arg (location_t loc,
6101 enum tree_code code,
6102 tree type, tree op0, tree op1,
6103 tree cond, tree arg, int cond_first_p)
6105 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6106 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6107 tree test, true_value, false_value;
6108 tree lhs = NULL_TREE;
6109 tree rhs = NULL_TREE;
6110 enum tree_code cond_code = COND_EXPR;
6112 if (TREE_CODE (cond) == COND_EXPR
6113 || TREE_CODE (cond) == VEC_COND_EXPR)
6115 test = TREE_OPERAND (cond, 0);
6116 true_value = TREE_OPERAND (cond, 1);
6117 false_value = TREE_OPERAND (cond, 2);
6118 /* If this operand throws an expression, then it does not make
6119 sense to try to perform a logical or arithmetic operation
6120 involving it. */
6121 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6122 lhs = true_value;
6123 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6124 rhs = false_value;
6126 else
6128 tree testtype = TREE_TYPE (cond);
6129 test = cond;
6130 true_value = constant_boolean_node (true, testtype);
6131 false_value = constant_boolean_node (false, testtype);
6134 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6135 cond_code = VEC_COND_EXPR;
6137 /* This transformation is only worthwhile if we don't have to wrap ARG
6138 in a SAVE_EXPR and the operation can be simplified without recursing
6139 on at least one of the branches once its pushed inside the COND_EXPR. */
6140 if (!TREE_CONSTANT (arg)
6141 && (TREE_SIDE_EFFECTS (arg)
6142 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6143 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6144 return NULL_TREE;
6146 arg = fold_convert_loc (loc, arg_type, arg);
6147 if (lhs == 0)
6149 true_value = fold_convert_loc (loc, cond_type, true_value);
6150 if (cond_first_p)
6151 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6152 else
6153 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6155 if (rhs == 0)
6157 false_value = fold_convert_loc (loc, cond_type, false_value);
6158 if (cond_first_p)
6159 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6160 else
6161 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6164 /* Check that we have simplified at least one of the branches. */
6165 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6166 return NULL_TREE;
6168 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6172 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6174 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6175 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6176 ADDEND is the same as X.
6178 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6179 and finite. The problematic cases are when X is zero, and its mode
6180 has signed zeros. In the case of rounding towards -infinity,
6181 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6182 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6184 bool
6185 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6187 if (!real_zerop (addend))
6188 return false;
6190 /* Don't allow the fold with -fsignaling-nans. */
6191 if (HONOR_SNANS (TYPE_MODE (type)))
6192 return false;
6194 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6195 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6196 return true;
6198 /* In a vector or complex, we would need to check the sign of all zeros. */
6199 if (TREE_CODE (addend) != REAL_CST)
6200 return false;
6202 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6203 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6204 negate = !negate;
6206 /* The mode has signed zeros, and we have to honor their sign.
6207 In this situation, there is only one case we can return true for.
6208 X - 0 is the same as X unless rounding towards -infinity is
6209 supported. */
6210 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6213 /* Subroutine of fold() that checks comparisons of built-in math
6214 functions against real constants.
6216 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6217 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6218 is the type of the result and ARG0 and ARG1 are the operands of the
6219 comparison. ARG1 must be a TREE_REAL_CST.
6221 The function returns the constant folded tree if a simplification
6222 can be made, and NULL_TREE otherwise. */
6224 static tree
6225 fold_mathfn_compare (location_t loc,
6226 enum built_in_function fcode, enum tree_code code,
6227 tree type, tree arg0, tree arg1)
6229 REAL_VALUE_TYPE c;
6231 if (BUILTIN_SQRT_P (fcode))
6233 tree arg = CALL_EXPR_ARG (arg0, 0);
6234 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6236 c = TREE_REAL_CST (arg1);
6237 if (REAL_VALUE_NEGATIVE (c))
6239 /* sqrt(x) < y is always false, if y is negative. */
6240 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6241 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6243 /* sqrt(x) > y is always true, if y is negative and we
6244 don't care about NaNs, i.e. negative values of x. */
6245 if (code == NE_EXPR || !HONOR_NANS (mode))
6246 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6248 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6249 return fold_build2_loc (loc, GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg), dconst0));
6252 else if (code == GT_EXPR || code == GE_EXPR)
6254 REAL_VALUE_TYPE c2;
6256 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6257 real_convert (&c2, mode, &c2);
6259 if (REAL_VALUE_ISINF (c2))
6261 /* sqrt(x) > y is x == +Inf, when y is very large. */
6262 if (HONOR_INFINITIES (mode))
6263 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6264 build_real (TREE_TYPE (arg), c2));
6266 /* sqrt(x) > y is always false, when y is very large
6267 and we don't care about infinities. */
6268 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6271 /* sqrt(x) > c is the same as x > c*c. */
6272 return fold_build2_loc (loc, code, type, arg,
6273 build_real (TREE_TYPE (arg), c2));
6275 else if (code == LT_EXPR || code == LE_EXPR)
6277 REAL_VALUE_TYPE c2;
6279 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6280 real_convert (&c2, mode, &c2);
6282 if (REAL_VALUE_ISINF (c2))
6284 /* sqrt(x) < y is always true, when y is a very large
6285 value and we don't care about NaNs or Infinities. */
6286 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6287 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6289 /* sqrt(x) < y is x != +Inf when y is very large and we
6290 don't care about NaNs. */
6291 if (! HONOR_NANS (mode))
6292 return fold_build2_loc (loc, NE_EXPR, type, arg,
6293 build_real (TREE_TYPE (arg), c2));
6295 /* sqrt(x) < y is x >= 0 when y is very large and we
6296 don't care about Infinities. */
6297 if (! HONOR_INFINITIES (mode))
6298 return fold_build2_loc (loc, GE_EXPR, type, arg,
6299 build_real (TREE_TYPE (arg), dconst0));
6301 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6302 arg = save_expr (arg);
6303 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6304 fold_build2_loc (loc, GE_EXPR, type, arg,
6305 build_real (TREE_TYPE (arg),
6306 dconst0)),
6307 fold_build2_loc (loc, NE_EXPR, type, arg,
6308 build_real (TREE_TYPE (arg),
6309 c2)));
6312 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6313 if (! HONOR_NANS (mode))
6314 return fold_build2_loc (loc, code, type, arg,
6315 build_real (TREE_TYPE (arg), c2));
6317 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6318 arg = save_expr (arg);
6319 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6320 fold_build2_loc (loc, GE_EXPR, type, arg,
6321 build_real (TREE_TYPE (arg),
6322 dconst0)),
6323 fold_build2_loc (loc, code, type, arg,
6324 build_real (TREE_TYPE (arg),
6325 c2)));
6329 return NULL_TREE;
6332 /* Subroutine of fold() that optimizes comparisons against Infinities,
6333 either +Inf or -Inf.
6335 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6336 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6337 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6339 The function returns the constant folded tree if a simplification
6340 can be made, and NULL_TREE otherwise. */
6342 static tree
6343 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6344 tree arg0, tree arg1)
6346 enum machine_mode mode;
6347 REAL_VALUE_TYPE max;
6348 tree temp;
6349 bool neg;
6351 mode = TYPE_MODE (TREE_TYPE (arg0));
6353 /* For negative infinity swap the sense of the comparison. */
6354 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6355 if (neg)
6356 code = swap_tree_comparison (code);
6358 switch (code)
6360 case GT_EXPR:
6361 /* x > +Inf is always false, if with ignore sNANs. */
6362 if (HONOR_SNANS (mode))
6363 return NULL_TREE;
6364 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6366 case LE_EXPR:
6367 /* x <= +Inf is always true, if we don't case about NaNs. */
6368 if (! HONOR_NANS (mode))
6369 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6371 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6372 arg0 = save_expr (arg0);
6373 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6375 case EQ_EXPR:
6376 case GE_EXPR:
6377 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6378 real_maxval (&max, neg, mode);
6379 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6380 arg0, build_real (TREE_TYPE (arg0), max));
6382 case LT_EXPR:
6383 /* x < +Inf is always equal to x <= DBL_MAX. */
6384 real_maxval (&max, neg, mode);
6385 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6386 arg0, build_real (TREE_TYPE (arg0), max));
6388 case NE_EXPR:
6389 /* x != +Inf is always equal to !(x > DBL_MAX). */
6390 real_maxval (&max, neg, mode);
6391 if (! HONOR_NANS (mode))
6392 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6393 arg0, build_real (TREE_TYPE (arg0), max));
6395 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6396 arg0, build_real (TREE_TYPE (arg0), max));
6397 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6399 default:
6400 break;
6403 return NULL_TREE;
6406 /* Subroutine of fold() that optimizes comparisons of a division by
6407 a nonzero integer constant against an integer constant, i.e.
6408 X/C1 op C2.
6410 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6411 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6412 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6414 The function returns the constant folded tree if a simplification
6415 can be made, and NULL_TREE otherwise. */
6417 static tree
6418 fold_div_compare (location_t loc,
6419 enum tree_code code, tree type, tree arg0, tree arg1)
6421 tree prod, tmp, hi, lo;
6422 tree arg00 = TREE_OPERAND (arg0, 0);
6423 tree arg01 = TREE_OPERAND (arg0, 1);
6424 double_int val;
6425 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6426 bool neg_overflow;
6427 bool overflow;
6429 /* We have to do this the hard way to detect unsigned overflow.
6430 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6431 val = TREE_INT_CST (arg01)
6432 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6433 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6434 neg_overflow = false;
6436 if (unsigned_p)
6438 tmp = int_const_binop (MINUS_EXPR, arg01,
6439 build_int_cst (TREE_TYPE (arg01), 1));
6440 lo = prod;
6442 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6443 val = TREE_INT_CST (prod)
6444 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6445 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6446 -1, overflow | TREE_OVERFLOW (prod));
6448 else if (tree_int_cst_sgn (arg01) >= 0)
6450 tmp = int_const_binop (MINUS_EXPR, arg01,
6451 build_int_cst (TREE_TYPE (arg01), 1));
6452 switch (tree_int_cst_sgn (arg1))
6454 case -1:
6455 neg_overflow = true;
6456 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6457 hi = prod;
6458 break;
6460 case 0:
6461 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6462 hi = tmp;
6463 break;
6465 case 1:
6466 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6467 lo = prod;
6468 break;
6470 default:
6471 gcc_unreachable ();
6474 else
6476 /* A negative divisor reverses the relational operators. */
6477 code = swap_tree_comparison (code);
6479 tmp = int_const_binop (PLUS_EXPR, arg01,
6480 build_int_cst (TREE_TYPE (arg01), 1));
6481 switch (tree_int_cst_sgn (arg1))
6483 case -1:
6484 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6485 lo = prod;
6486 break;
6488 case 0:
6489 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6490 lo = tmp;
6491 break;
6493 case 1:
6494 neg_overflow = true;
6495 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6496 hi = prod;
6497 break;
6499 default:
6500 gcc_unreachable ();
6504 switch (code)
6506 case EQ_EXPR:
6507 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6508 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6509 if (TREE_OVERFLOW (hi))
6510 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6511 if (TREE_OVERFLOW (lo))
6512 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6513 return build_range_check (loc, type, arg00, 1, lo, hi);
6515 case NE_EXPR:
6516 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6517 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6518 if (TREE_OVERFLOW (hi))
6519 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6520 if (TREE_OVERFLOW (lo))
6521 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6522 return build_range_check (loc, type, arg00, 0, lo, hi);
6524 case LT_EXPR:
6525 if (TREE_OVERFLOW (lo))
6527 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6528 return omit_one_operand_loc (loc, type, tmp, arg00);
6530 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6532 case LE_EXPR:
6533 if (TREE_OVERFLOW (hi))
6535 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6536 return omit_one_operand_loc (loc, type, tmp, arg00);
6538 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6540 case GT_EXPR:
6541 if (TREE_OVERFLOW (hi))
6543 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6544 return omit_one_operand_loc (loc, type, tmp, arg00);
6546 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6548 case GE_EXPR:
6549 if (TREE_OVERFLOW (lo))
6551 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6552 return omit_one_operand_loc (loc, type, tmp, arg00);
6554 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6556 default:
6557 break;
6560 return NULL_TREE;
6564 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6565 equality/inequality test, then return a simplified form of the test
6566 using a sign testing. Otherwise return NULL. TYPE is the desired
6567 result type. */
6569 static tree
6570 fold_single_bit_test_into_sign_test (location_t loc,
6571 enum tree_code code, tree arg0, tree arg1,
6572 tree result_type)
6574 /* If this is testing a single bit, we can optimize the test. */
6575 if ((code == NE_EXPR || code == EQ_EXPR)
6576 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6577 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6579 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6580 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6581 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6583 if (arg00 != NULL_TREE
6584 /* This is only a win if casting to a signed type is cheap,
6585 i.e. when arg00's type is not a partial mode. */
6586 && TYPE_PRECISION (TREE_TYPE (arg00))
6587 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6589 tree stype = signed_type_for (TREE_TYPE (arg00));
6590 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6591 result_type,
6592 fold_convert_loc (loc, stype, arg00),
6593 build_int_cst (stype, 0));
6597 return NULL_TREE;
6600 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6601 equality/inequality test, then return a simplified form of
6602 the test using shifts and logical operations. Otherwise return
6603 NULL. TYPE is the desired result type. */
6605 tree
6606 fold_single_bit_test (location_t loc, enum tree_code code,
6607 tree arg0, tree arg1, tree result_type)
6609 /* If this is testing a single bit, we can optimize the test. */
6610 if ((code == NE_EXPR || code == EQ_EXPR)
6611 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6612 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6614 tree inner = TREE_OPERAND (arg0, 0);
6615 tree type = TREE_TYPE (arg0);
6616 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6617 enum machine_mode operand_mode = TYPE_MODE (type);
6618 int ops_unsigned;
6619 tree signed_type, unsigned_type, intermediate_type;
6620 tree tem, one;
6622 /* First, see if we can fold the single bit test into a sign-bit
6623 test. */
6624 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6625 result_type);
6626 if (tem)
6627 return tem;
6629 /* Otherwise we have (A & C) != 0 where C is a single bit,
6630 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6631 Similarly for (A & C) == 0. */
6633 /* If INNER is a right shift of a constant and it plus BITNUM does
6634 not overflow, adjust BITNUM and INNER. */
6635 if (TREE_CODE (inner) == RSHIFT_EXPR
6636 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6637 && host_integerp (TREE_OPERAND (inner, 1), 1)
6638 && bitnum < TYPE_PRECISION (type)
6639 && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
6640 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6642 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6643 inner = TREE_OPERAND (inner, 0);
6646 /* If we are going to be able to omit the AND below, we must do our
6647 operations as unsigned. If we must use the AND, we have a choice.
6648 Normally unsigned is faster, but for some machines signed is. */
6649 #ifdef LOAD_EXTEND_OP
6650 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6651 && !flag_syntax_only) ? 0 : 1;
6652 #else
6653 ops_unsigned = 1;
6654 #endif
6656 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6657 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6658 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6659 inner = fold_convert_loc (loc, intermediate_type, inner);
6661 if (bitnum != 0)
6662 inner = build2 (RSHIFT_EXPR, intermediate_type,
6663 inner, size_int (bitnum));
6665 one = build_int_cst (intermediate_type, 1);
6667 if (code == EQ_EXPR)
6668 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6670 /* Put the AND last so it can combine with more things. */
6671 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6673 /* Make sure to return the proper type. */
6674 inner = fold_convert_loc (loc, result_type, inner);
6676 return inner;
6678 return NULL_TREE;
6681 /* Check whether we are allowed to reorder operands arg0 and arg1,
6682 such that the evaluation of arg1 occurs before arg0. */
6684 static bool
6685 reorder_operands_p (const_tree arg0, const_tree arg1)
6687 if (! flag_evaluation_order)
6688 return true;
6689 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6690 return true;
6691 return ! TREE_SIDE_EFFECTS (arg0)
6692 && ! TREE_SIDE_EFFECTS (arg1);
6695 /* Test whether it is preferable two swap two operands, ARG0 and
6696 ARG1, for example because ARG0 is an integer constant and ARG1
6697 isn't. If REORDER is true, only recommend swapping if we can
6698 evaluate the operands in reverse order. */
6700 bool
6701 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6703 STRIP_SIGN_NOPS (arg0);
6704 STRIP_SIGN_NOPS (arg1);
6706 if (TREE_CODE (arg1) == INTEGER_CST)
6707 return 0;
6708 if (TREE_CODE (arg0) == INTEGER_CST)
6709 return 1;
6711 if (TREE_CODE (arg1) == REAL_CST)
6712 return 0;
6713 if (TREE_CODE (arg0) == REAL_CST)
6714 return 1;
6716 if (TREE_CODE (arg1) == FIXED_CST)
6717 return 0;
6718 if (TREE_CODE (arg0) == FIXED_CST)
6719 return 1;
6721 if (TREE_CODE (arg1) == COMPLEX_CST)
6722 return 0;
6723 if (TREE_CODE (arg0) == COMPLEX_CST)
6724 return 1;
6726 if (TREE_CONSTANT (arg1))
6727 return 0;
6728 if (TREE_CONSTANT (arg0))
6729 return 1;
6731 if (optimize_function_for_size_p (cfun))
6732 return 0;
6734 if (reorder && flag_evaluation_order
6735 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6736 return 0;
6738 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6739 for commutative and comparison operators. Ensuring a canonical
6740 form allows the optimizers to find additional redundancies without
6741 having to explicitly check for both orderings. */
6742 if (TREE_CODE (arg0) == SSA_NAME
6743 && TREE_CODE (arg1) == SSA_NAME
6744 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6745 return 1;
6747 /* Put SSA_NAMEs last. */
6748 if (TREE_CODE (arg1) == SSA_NAME)
6749 return 0;
6750 if (TREE_CODE (arg0) == SSA_NAME)
6751 return 1;
6753 /* Put variables last. */
6754 if (DECL_P (arg1))
6755 return 0;
6756 if (DECL_P (arg0))
6757 return 1;
6759 return 0;
6762 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6763 ARG0 is extended to a wider type. */
6765 static tree
6766 fold_widened_comparison (location_t loc, enum tree_code code,
6767 tree type, tree arg0, tree arg1)
6769 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6770 tree arg1_unw;
6771 tree shorter_type, outer_type;
6772 tree min, max;
6773 bool above, below;
6775 if (arg0_unw == arg0)
6776 return NULL_TREE;
6777 shorter_type = TREE_TYPE (arg0_unw);
6779 #ifdef HAVE_canonicalize_funcptr_for_compare
6780 /* Disable this optimization if we're casting a function pointer
6781 type on targets that require function pointer canonicalization. */
6782 if (HAVE_canonicalize_funcptr_for_compare
6783 && TREE_CODE (shorter_type) == POINTER_TYPE
6784 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6785 return NULL_TREE;
6786 #endif
6788 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6789 return NULL_TREE;
6791 arg1_unw = get_unwidened (arg1, NULL_TREE);
6793 /* If possible, express the comparison in the shorter mode. */
6794 if ((code == EQ_EXPR || code == NE_EXPR
6795 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6796 && (TREE_TYPE (arg1_unw) == shorter_type
6797 || ((TYPE_PRECISION (shorter_type)
6798 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6799 && (TYPE_UNSIGNED (shorter_type)
6800 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6801 || (TREE_CODE (arg1_unw) == INTEGER_CST
6802 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6803 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6804 && int_fits_type_p (arg1_unw, shorter_type))))
6805 return fold_build2_loc (loc, code, type, arg0_unw,
6806 fold_convert_loc (loc, shorter_type, arg1_unw));
6808 if (TREE_CODE (arg1_unw) != INTEGER_CST
6809 || TREE_CODE (shorter_type) != INTEGER_TYPE
6810 || !int_fits_type_p (arg1_unw, shorter_type))
6811 return NULL_TREE;
6813 /* If we are comparing with the integer that does not fit into the range
6814 of the shorter type, the result is known. */
6815 outer_type = TREE_TYPE (arg1_unw);
6816 min = lower_bound_in_type (outer_type, shorter_type);
6817 max = upper_bound_in_type (outer_type, shorter_type);
6819 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6820 max, arg1_unw));
6821 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6822 arg1_unw, min));
6824 switch (code)
6826 case EQ_EXPR:
6827 if (above || below)
6828 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6829 break;
6831 case NE_EXPR:
6832 if (above || below)
6833 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6834 break;
6836 case LT_EXPR:
6837 case LE_EXPR:
6838 if (above)
6839 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6840 else if (below)
6841 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6843 case GT_EXPR:
6844 case GE_EXPR:
6845 if (above)
6846 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6847 else if (below)
6848 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6850 default:
6851 break;
6854 return NULL_TREE;
6857 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6858 ARG0 just the signedness is changed. */
6860 static tree
6861 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6862 tree arg0, tree arg1)
6864 tree arg0_inner;
6865 tree inner_type, outer_type;
6867 if (!CONVERT_EXPR_P (arg0))
6868 return NULL_TREE;
6870 outer_type = TREE_TYPE (arg0);
6871 arg0_inner = TREE_OPERAND (arg0, 0);
6872 inner_type = TREE_TYPE (arg0_inner);
6874 #ifdef HAVE_canonicalize_funcptr_for_compare
6875 /* Disable this optimization if we're casting a function pointer
6876 type on targets that require function pointer canonicalization. */
6877 if (HAVE_canonicalize_funcptr_for_compare
6878 && TREE_CODE (inner_type) == POINTER_TYPE
6879 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6880 return NULL_TREE;
6881 #endif
6883 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6884 return NULL_TREE;
6886 if (TREE_CODE (arg1) != INTEGER_CST
6887 && !(CONVERT_EXPR_P (arg1)
6888 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6889 return NULL_TREE;
6891 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6892 && code != NE_EXPR
6893 && code != EQ_EXPR)
6894 return NULL_TREE;
6896 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6897 return NULL_TREE;
6899 if (TREE_CODE (arg1) == INTEGER_CST)
6900 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6901 0, TREE_OVERFLOW (arg1));
6902 else
6903 arg1 = fold_convert_loc (loc, inner_type, arg1);
6905 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6908 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6909 step of the array. Reconstructs s and delta in the case of s *
6910 delta being an integer constant (and thus already folded). ADDR is
6911 the address. MULT is the multiplicative expression. If the
6912 function succeeds, the new address expression is returned.
6913 Otherwise NULL_TREE is returned. LOC is the location of the
6914 resulting expression. */
6916 static tree
6917 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6919 tree s, delta, step;
6920 tree ref = TREE_OPERAND (addr, 0), pref;
6921 tree ret, pos;
6922 tree itype;
6923 bool mdim = false;
6925 /* Strip the nops that might be added when converting op1 to sizetype. */
6926 STRIP_NOPS (op1);
6928 /* Canonicalize op1 into a possibly non-constant delta
6929 and an INTEGER_CST s. */
6930 if (TREE_CODE (op1) == MULT_EXPR)
6932 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6934 STRIP_NOPS (arg0);
6935 STRIP_NOPS (arg1);
6937 if (TREE_CODE (arg0) == INTEGER_CST)
6939 s = arg0;
6940 delta = arg1;
6942 else if (TREE_CODE (arg1) == INTEGER_CST)
6944 s = arg1;
6945 delta = arg0;
6947 else
6948 return NULL_TREE;
6950 else if (TREE_CODE (op1) == INTEGER_CST)
6952 delta = op1;
6953 s = NULL_TREE;
6955 else
6957 /* Simulate we are delta * 1. */
6958 delta = op1;
6959 s = integer_one_node;
6962 /* Handle &x.array the same as we would handle &x.array[0]. */
6963 if (TREE_CODE (ref) == COMPONENT_REF
6964 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6966 tree domain;
6968 /* Remember if this was a multi-dimensional array. */
6969 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6970 mdim = true;
6972 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6973 if (! domain)
6974 goto cont;
6975 itype = TREE_TYPE (domain);
6977 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6978 if (TREE_CODE (step) != INTEGER_CST)
6979 goto cont;
6981 if (s)
6983 if (! tree_int_cst_equal (step, s))
6984 goto cont;
6986 else
6988 /* Try if delta is a multiple of step. */
6989 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6990 if (! tmp)
6991 goto cont;
6992 delta = tmp;
6995 /* Only fold here if we can verify we do not overflow one
6996 dimension of a multi-dimensional array. */
6997 if (mdim)
6999 tree tmp;
7001 if (!TYPE_MIN_VALUE (domain)
7002 || !TYPE_MAX_VALUE (domain)
7003 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7004 goto cont;
7006 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7007 fold_convert_loc (loc, itype,
7008 TYPE_MIN_VALUE (domain)),
7009 fold_convert_loc (loc, itype, delta));
7010 if (TREE_CODE (tmp) != INTEGER_CST
7011 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7012 goto cont;
7015 /* We found a suitable component reference. */
7017 pref = TREE_OPERAND (addr, 0);
7018 ret = copy_node (pref);
7019 SET_EXPR_LOCATION (ret, loc);
7021 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7022 fold_build2_loc
7023 (loc, PLUS_EXPR, itype,
7024 fold_convert_loc (loc, itype,
7025 TYPE_MIN_VALUE
7026 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7027 fold_convert_loc (loc, itype, delta)),
7028 NULL_TREE, NULL_TREE);
7029 return build_fold_addr_expr_loc (loc, ret);
7032 cont:
7034 for (;; ref = TREE_OPERAND (ref, 0))
7036 if (TREE_CODE (ref) == ARRAY_REF)
7038 tree domain;
7040 /* Remember if this was a multi-dimensional array. */
7041 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7042 mdim = true;
7044 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7045 if (! domain)
7046 continue;
7047 itype = TREE_TYPE (domain);
7049 step = array_ref_element_size (ref);
7050 if (TREE_CODE (step) != INTEGER_CST)
7051 continue;
7053 if (s)
7055 if (! tree_int_cst_equal (step, s))
7056 continue;
7058 else
7060 /* Try if delta is a multiple of step. */
7061 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7062 if (! tmp)
7063 continue;
7064 delta = tmp;
7067 /* Only fold here if we can verify we do not overflow one
7068 dimension of a multi-dimensional array. */
7069 if (mdim)
7071 tree tmp;
7073 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7074 || !TYPE_MAX_VALUE (domain)
7075 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7076 continue;
7078 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7079 fold_convert_loc (loc, itype,
7080 TREE_OPERAND (ref, 1)),
7081 fold_convert_loc (loc, itype, delta));
7082 if (!tmp
7083 || TREE_CODE (tmp) != INTEGER_CST
7084 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7085 continue;
7088 break;
7090 else
7091 mdim = false;
7093 if (!handled_component_p (ref))
7094 return NULL_TREE;
7097 /* We found the suitable array reference. So copy everything up to it,
7098 and replace the index. */
7100 pref = TREE_OPERAND (addr, 0);
7101 ret = copy_node (pref);
7102 SET_EXPR_LOCATION (ret, loc);
7103 pos = ret;
7105 while (pref != ref)
7107 pref = TREE_OPERAND (pref, 0);
7108 TREE_OPERAND (pos, 0) = copy_node (pref);
7109 pos = TREE_OPERAND (pos, 0);
7112 TREE_OPERAND (pos, 1)
7113 = fold_build2_loc (loc, PLUS_EXPR, itype,
7114 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7115 fold_convert_loc (loc, itype, delta));
7116 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7120 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7121 means A >= Y && A != MAX, but in this case we know that
7122 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7124 static tree
7125 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7127 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7129 if (TREE_CODE (bound) == LT_EXPR)
7130 a = TREE_OPERAND (bound, 0);
7131 else if (TREE_CODE (bound) == GT_EXPR)
7132 a = TREE_OPERAND (bound, 1);
7133 else
7134 return NULL_TREE;
7136 typea = TREE_TYPE (a);
7137 if (!INTEGRAL_TYPE_P (typea)
7138 && !POINTER_TYPE_P (typea))
7139 return NULL_TREE;
7141 if (TREE_CODE (ineq) == LT_EXPR)
7143 a1 = TREE_OPERAND (ineq, 1);
7144 y = TREE_OPERAND (ineq, 0);
7146 else if (TREE_CODE (ineq) == GT_EXPR)
7148 a1 = TREE_OPERAND (ineq, 0);
7149 y = TREE_OPERAND (ineq, 1);
7151 else
7152 return NULL_TREE;
7154 if (TREE_TYPE (a1) != typea)
7155 return NULL_TREE;
7157 if (POINTER_TYPE_P (typea))
7159 /* Convert the pointer types into integer before taking the difference. */
7160 tree ta = fold_convert_loc (loc, ssizetype, a);
7161 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7162 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7164 else
7165 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7167 if (!diff || !integer_onep (diff))
7168 return NULL_TREE;
7170 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7173 /* Fold a sum or difference of at least one multiplication.
7174 Returns the folded tree or NULL if no simplification could be made. */
7176 static tree
7177 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7178 tree arg0, tree arg1)
7180 tree arg00, arg01, arg10, arg11;
7181 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7183 /* (A * C) +- (B * C) -> (A+-B) * C.
7184 (A * C) +- A -> A * (C+-1).
7185 We are most concerned about the case where C is a constant,
7186 but other combinations show up during loop reduction. Since
7187 it is not difficult, try all four possibilities. */
7189 if (TREE_CODE (arg0) == MULT_EXPR)
7191 arg00 = TREE_OPERAND (arg0, 0);
7192 arg01 = TREE_OPERAND (arg0, 1);
7194 else if (TREE_CODE (arg0) == INTEGER_CST)
7196 arg00 = build_one_cst (type);
7197 arg01 = arg0;
7199 else
7201 /* We cannot generate constant 1 for fract. */
7202 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7203 return NULL_TREE;
7204 arg00 = arg0;
7205 arg01 = build_one_cst (type);
7207 if (TREE_CODE (arg1) == MULT_EXPR)
7209 arg10 = TREE_OPERAND (arg1, 0);
7210 arg11 = TREE_OPERAND (arg1, 1);
7212 else if (TREE_CODE (arg1) == INTEGER_CST)
7214 arg10 = build_one_cst (type);
7215 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7216 the purpose of this canonicalization. */
7217 if (TREE_INT_CST_HIGH (arg1) == -1
7218 && negate_expr_p (arg1)
7219 && code == PLUS_EXPR)
7221 arg11 = negate_expr (arg1);
7222 code = MINUS_EXPR;
7224 else
7225 arg11 = arg1;
7227 else
7229 /* We cannot generate constant 1 for fract. */
7230 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7231 return NULL_TREE;
7232 arg10 = arg1;
7233 arg11 = build_one_cst (type);
7235 same = NULL_TREE;
7237 if (operand_equal_p (arg01, arg11, 0))
7238 same = arg01, alt0 = arg00, alt1 = arg10;
7239 else if (operand_equal_p (arg00, arg10, 0))
7240 same = arg00, alt0 = arg01, alt1 = arg11;
7241 else if (operand_equal_p (arg00, arg11, 0))
7242 same = arg00, alt0 = arg01, alt1 = arg10;
7243 else if (operand_equal_p (arg01, arg10, 0))
7244 same = arg01, alt0 = arg00, alt1 = arg11;
7246 /* No identical multiplicands; see if we can find a common
7247 power-of-two factor in non-power-of-two multiplies. This
7248 can help in multi-dimensional array access. */
7249 else if (host_integerp (arg01, 0)
7250 && host_integerp (arg11, 0))
7252 HOST_WIDE_INT int01, int11, tmp;
7253 bool swap = false;
7254 tree maybe_same;
7255 int01 = TREE_INT_CST_LOW (arg01);
7256 int11 = TREE_INT_CST_LOW (arg11);
7258 /* Move min of absolute values to int11. */
7259 if (absu_hwi (int01) < absu_hwi (int11))
7261 tmp = int01, int01 = int11, int11 = tmp;
7262 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7263 maybe_same = arg01;
7264 swap = true;
7266 else
7267 maybe_same = arg11;
7269 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7270 /* The remainder should not be a constant, otherwise we
7271 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7272 increased the number of multiplications necessary. */
7273 && TREE_CODE (arg10) != INTEGER_CST)
7275 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7276 build_int_cst (TREE_TYPE (arg00),
7277 int01 / int11));
7278 alt1 = arg10;
7279 same = maybe_same;
7280 if (swap)
7281 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7285 if (same)
7286 return fold_build2_loc (loc, MULT_EXPR, type,
7287 fold_build2_loc (loc, code, type,
7288 fold_convert_loc (loc, type, alt0),
7289 fold_convert_loc (loc, type, alt1)),
7290 fold_convert_loc (loc, type, same));
7292 return NULL_TREE;
7295 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7296 specified by EXPR into the buffer PTR of length LEN bytes.
7297 Return the number of bytes placed in the buffer, or zero
7298 upon failure. */
7300 static int
7301 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7303 tree type = TREE_TYPE (expr);
7304 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7305 int byte, offset, word, words;
7306 unsigned char value;
7308 if (total_bytes > len)
7309 return 0;
7310 words = total_bytes / UNITS_PER_WORD;
7312 for (byte = 0; byte < total_bytes; byte++)
7314 int bitpos = byte * BITS_PER_UNIT;
7315 if (bitpos < HOST_BITS_PER_WIDE_INT)
7316 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7317 else
7318 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7319 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7321 if (total_bytes > UNITS_PER_WORD)
7323 word = byte / UNITS_PER_WORD;
7324 if (WORDS_BIG_ENDIAN)
7325 word = (words - 1) - word;
7326 offset = word * UNITS_PER_WORD;
7327 if (BYTES_BIG_ENDIAN)
7328 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7329 else
7330 offset += byte % UNITS_PER_WORD;
7332 else
7333 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7334 ptr[offset] = value;
7336 return total_bytes;
7340 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7341 specified by EXPR into the buffer PTR of length LEN bytes.
7342 Return the number of bytes placed in the buffer, or zero
7343 upon failure. */
7345 static int
7346 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7348 tree type = TREE_TYPE (expr);
7349 enum machine_mode mode = TYPE_MODE (type);
7350 int total_bytes = GET_MODE_SIZE (mode);
7351 FIXED_VALUE_TYPE value;
7352 tree i_value, i_type;
7354 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7355 return 0;
7357 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7359 if (NULL_TREE == i_type
7360 || TYPE_PRECISION (i_type) != total_bytes)
7361 return 0;
7363 value = TREE_FIXED_CST (expr);
7364 i_value = double_int_to_tree (i_type, value.data);
7366 return native_encode_int (i_value, ptr, len);
7370 /* Subroutine of native_encode_expr. Encode the REAL_CST
7371 specified by EXPR into the buffer PTR of length LEN bytes.
7372 Return the number of bytes placed in the buffer, or zero
7373 upon failure. */
7375 static int
7376 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7378 tree type = TREE_TYPE (expr);
7379 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7380 int byte, offset, word, words, bitpos;
7381 unsigned char value;
7383 /* There are always 32 bits in each long, no matter the size of
7384 the hosts long. We handle floating point representations with
7385 up to 192 bits. */
7386 long tmp[6];
7388 if (total_bytes > len)
7389 return 0;
7390 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7392 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7394 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7395 bitpos += BITS_PER_UNIT)
7397 byte = (bitpos / BITS_PER_UNIT) & 3;
7398 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7400 if (UNITS_PER_WORD < 4)
7402 word = byte / UNITS_PER_WORD;
7403 if (WORDS_BIG_ENDIAN)
7404 word = (words - 1) - word;
7405 offset = word * UNITS_PER_WORD;
7406 if (BYTES_BIG_ENDIAN)
7407 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7408 else
7409 offset += byte % UNITS_PER_WORD;
7411 else
7412 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7413 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7415 return total_bytes;
7418 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7419 specified by EXPR into the buffer PTR of length LEN bytes.
7420 Return the number of bytes placed in the buffer, or zero
7421 upon failure. */
7423 static int
7424 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7426 int rsize, isize;
7427 tree part;
7429 part = TREE_REALPART (expr);
7430 rsize = native_encode_expr (part, ptr, len);
7431 if (rsize == 0)
7432 return 0;
7433 part = TREE_IMAGPART (expr);
7434 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7435 if (isize != rsize)
7436 return 0;
7437 return rsize + isize;
7441 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7442 specified by EXPR into the buffer PTR of length LEN bytes.
7443 Return the number of bytes placed in the buffer, or zero
7444 upon failure. */
7446 static int
7447 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7449 unsigned i, count;
7450 int size, offset;
7451 tree itype, elem;
7453 offset = 0;
7454 count = VECTOR_CST_NELTS (expr);
7455 itype = TREE_TYPE (TREE_TYPE (expr));
7456 size = GET_MODE_SIZE (TYPE_MODE (itype));
7457 for (i = 0; i < count; i++)
7459 elem = VECTOR_CST_ELT (expr, i);
7460 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7461 return 0;
7462 offset += size;
7464 return offset;
7468 /* Subroutine of native_encode_expr. Encode the STRING_CST
7469 specified by EXPR into the buffer PTR of length LEN bytes.
7470 Return the number of bytes placed in the buffer, or zero
7471 upon failure. */
7473 static int
7474 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7476 tree type = TREE_TYPE (expr);
7477 HOST_WIDE_INT total_bytes;
7479 if (TREE_CODE (type) != ARRAY_TYPE
7480 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7481 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7482 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7483 return 0;
7484 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7485 if (total_bytes > len)
7486 return 0;
7487 if (TREE_STRING_LENGTH (expr) < total_bytes)
7489 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7490 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7491 total_bytes - TREE_STRING_LENGTH (expr));
7493 else
7494 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7495 return total_bytes;
7499 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7500 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7501 buffer PTR of length LEN bytes. Return the number of bytes
7502 placed in the buffer, or zero upon failure. */
7505 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7507 switch (TREE_CODE (expr))
7509 case INTEGER_CST:
7510 return native_encode_int (expr, ptr, len);
7512 case REAL_CST:
7513 return native_encode_real (expr, ptr, len);
7515 case FIXED_CST:
7516 return native_encode_fixed (expr, ptr, len);
7518 case COMPLEX_CST:
7519 return native_encode_complex (expr, ptr, len);
7521 case VECTOR_CST:
7522 return native_encode_vector (expr, ptr, len);
7524 case STRING_CST:
7525 return native_encode_string (expr, ptr, len);
7527 default:
7528 return 0;
7533 /* Subroutine of native_interpret_expr. Interpret the contents of
7534 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7535 If the buffer cannot be interpreted, return NULL_TREE. */
7537 static tree
7538 native_interpret_int (tree type, const unsigned char *ptr, int len)
7540 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7541 double_int result;
7543 if (total_bytes > len
7544 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7545 return NULL_TREE;
7547 result = double_int::from_buffer (ptr, total_bytes);
7549 return double_int_to_tree (type, result);
7553 /* Subroutine of native_interpret_expr. Interpret the contents of
7554 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7555 If the buffer cannot be interpreted, return NULL_TREE. */
7557 static tree
7558 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7560 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7561 double_int result;
7562 FIXED_VALUE_TYPE fixed_value;
7564 if (total_bytes > len
7565 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7566 return NULL_TREE;
7568 result = double_int::from_buffer (ptr, total_bytes);
7569 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7571 return build_fixed (type, fixed_value);
7575 /* Subroutine of native_interpret_expr. Interpret the contents of
7576 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7577 If the buffer cannot be interpreted, return NULL_TREE. */
7579 static tree
7580 native_interpret_real (tree type, const unsigned char *ptr, int len)
7582 enum machine_mode mode = TYPE_MODE (type);
7583 int total_bytes = GET_MODE_SIZE (mode);
7584 int byte, offset, word, words, bitpos;
7585 unsigned char value;
7586 /* There are always 32 bits in each long, no matter the size of
7587 the hosts long. We handle floating point representations with
7588 up to 192 bits. */
7589 REAL_VALUE_TYPE r;
7590 long tmp[6];
7592 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7593 if (total_bytes > len || total_bytes > 24)
7594 return NULL_TREE;
7595 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7597 memset (tmp, 0, sizeof (tmp));
7598 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7599 bitpos += BITS_PER_UNIT)
7601 byte = (bitpos / BITS_PER_UNIT) & 3;
7602 if (UNITS_PER_WORD < 4)
7604 word = byte / UNITS_PER_WORD;
7605 if (WORDS_BIG_ENDIAN)
7606 word = (words - 1) - word;
7607 offset = word * UNITS_PER_WORD;
7608 if (BYTES_BIG_ENDIAN)
7609 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7610 else
7611 offset += byte % UNITS_PER_WORD;
7613 else
7614 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7615 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7617 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7620 real_from_target (&r, tmp, mode);
7621 return build_real (type, r);
7625 /* Subroutine of native_interpret_expr. Interpret the contents of
7626 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7627 If the buffer cannot be interpreted, return NULL_TREE. */
7629 static tree
7630 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7632 tree etype, rpart, ipart;
7633 int size;
7635 etype = TREE_TYPE (type);
7636 size = GET_MODE_SIZE (TYPE_MODE (etype));
7637 if (size * 2 > len)
7638 return NULL_TREE;
7639 rpart = native_interpret_expr (etype, ptr, size);
7640 if (!rpart)
7641 return NULL_TREE;
7642 ipart = native_interpret_expr (etype, ptr+size, size);
7643 if (!ipart)
7644 return NULL_TREE;
7645 return build_complex (type, rpart, ipart);
7649 /* Subroutine of native_interpret_expr. Interpret the contents of
7650 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7651 If the buffer cannot be interpreted, return NULL_TREE. */
7653 static tree
7654 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7656 tree etype, elem;
7657 int i, size, count;
7658 tree *elements;
7660 etype = TREE_TYPE (type);
7661 size = GET_MODE_SIZE (TYPE_MODE (etype));
7662 count = TYPE_VECTOR_SUBPARTS (type);
7663 if (size * count > len)
7664 return NULL_TREE;
7666 elements = XALLOCAVEC (tree, count);
7667 for (i = count - 1; i >= 0; i--)
7669 elem = native_interpret_expr (etype, ptr+(i*size), size);
7670 if (!elem)
7671 return NULL_TREE;
7672 elements[i] = elem;
7674 return build_vector (type, elements);
7678 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7679 the buffer PTR of length LEN as a constant of type TYPE. For
7680 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7681 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7682 return NULL_TREE. */
7684 tree
7685 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7687 switch (TREE_CODE (type))
7689 case INTEGER_TYPE:
7690 case ENUMERAL_TYPE:
7691 case BOOLEAN_TYPE:
7692 case POINTER_TYPE:
7693 case REFERENCE_TYPE:
7694 return native_interpret_int (type, ptr, len);
7696 case REAL_TYPE:
7697 return native_interpret_real (type, ptr, len);
7699 case FIXED_POINT_TYPE:
7700 return native_interpret_fixed (type, ptr, len);
7702 case COMPLEX_TYPE:
7703 return native_interpret_complex (type, ptr, len);
7705 case VECTOR_TYPE:
7706 return native_interpret_vector (type, ptr, len);
7708 default:
7709 return NULL_TREE;
7713 /* Returns true if we can interpret the contents of a native encoding
7714 as TYPE. */
7716 static bool
7717 can_native_interpret_type_p (tree type)
7719 switch (TREE_CODE (type))
7721 case INTEGER_TYPE:
7722 case ENUMERAL_TYPE:
7723 case BOOLEAN_TYPE:
7724 case POINTER_TYPE:
7725 case REFERENCE_TYPE:
7726 case FIXED_POINT_TYPE:
7727 case REAL_TYPE:
7728 case COMPLEX_TYPE:
7729 case VECTOR_TYPE:
7730 return true;
7731 default:
7732 return false;
7736 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7737 TYPE at compile-time. If we're unable to perform the conversion
7738 return NULL_TREE. */
7740 static tree
7741 fold_view_convert_expr (tree type, tree expr)
7743 /* We support up to 512-bit values (for V8DFmode). */
7744 unsigned char buffer[64];
7745 int len;
7747 /* Check that the host and target are sane. */
7748 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7749 return NULL_TREE;
7751 len = native_encode_expr (expr, buffer, sizeof (buffer));
7752 if (len == 0)
7753 return NULL_TREE;
7755 return native_interpret_expr (type, buffer, len);
7758 /* Build an expression for the address of T. Folds away INDIRECT_REF
7759 to avoid confusing the gimplify process. */
7761 tree
7762 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7764 /* The size of the object is not relevant when talking about its address. */
7765 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7766 t = TREE_OPERAND (t, 0);
7768 if (TREE_CODE (t) == INDIRECT_REF)
7770 t = TREE_OPERAND (t, 0);
7772 if (TREE_TYPE (t) != ptrtype)
7773 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7775 else if (TREE_CODE (t) == MEM_REF
7776 && integer_zerop (TREE_OPERAND (t, 1)))
7777 return TREE_OPERAND (t, 0);
7778 else if (TREE_CODE (t) == MEM_REF
7779 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7780 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7781 TREE_OPERAND (t, 0),
7782 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7783 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7785 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7787 if (TREE_TYPE (t) != ptrtype)
7788 t = fold_convert_loc (loc, ptrtype, t);
7790 else
7791 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7793 return t;
7796 /* Build an expression for the address of T. */
7798 tree
7799 build_fold_addr_expr_loc (location_t loc, tree t)
7801 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7803 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7806 static bool vec_cst_ctor_to_array (tree, tree *);
7808 /* Fold a unary expression of code CODE and type TYPE with operand
7809 OP0. Return the folded expression if folding is successful.
7810 Otherwise, return NULL_TREE. */
7812 tree
7813 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7815 tree tem;
7816 tree arg0;
7817 enum tree_code_class kind = TREE_CODE_CLASS (code);
7819 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7820 && TREE_CODE_LENGTH (code) == 1);
7822 arg0 = op0;
7823 if (arg0)
7825 if (CONVERT_EXPR_CODE_P (code)
7826 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7828 /* Don't use STRIP_NOPS, because signedness of argument type
7829 matters. */
7830 STRIP_SIGN_NOPS (arg0);
7832 else
7834 /* Strip any conversions that don't change the mode. This
7835 is safe for every expression, except for a comparison
7836 expression because its signedness is derived from its
7837 operands.
7839 Note that this is done as an internal manipulation within
7840 the constant folder, in order to find the simplest
7841 representation of the arguments so that their form can be
7842 studied. In any cases, the appropriate type conversions
7843 should be put back in the tree that will get out of the
7844 constant folder. */
7845 STRIP_NOPS (arg0);
7849 if (TREE_CODE_CLASS (code) == tcc_unary)
7851 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7852 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7853 fold_build1_loc (loc, code, type,
7854 fold_convert_loc (loc, TREE_TYPE (op0),
7855 TREE_OPERAND (arg0, 1))));
7856 else if (TREE_CODE (arg0) == COND_EXPR)
7858 tree arg01 = TREE_OPERAND (arg0, 1);
7859 tree arg02 = TREE_OPERAND (arg0, 2);
7860 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7861 arg01 = fold_build1_loc (loc, code, type,
7862 fold_convert_loc (loc,
7863 TREE_TYPE (op0), arg01));
7864 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7865 arg02 = fold_build1_loc (loc, code, type,
7866 fold_convert_loc (loc,
7867 TREE_TYPE (op0), arg02));
7868 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7869 arg01, arg02);
7871 /* If this was a conversion, and all we did was to move into
7872 inside the COND_EXPR, bring it back out. But leave it if
7873 it is a conversion from integer to integer and the
7874 result precision is no wider than a word since such a
7875 conversion is cheap and may be optimized away by combine,
7876 while it couldn't if it were outside the COND_EXPR. Then return
7877 so we don't get into an infinite recursion loop taking the
7878 conversion out and then back in. */
7880 if ((CONVERT_EXPR_CODE_P (code)
7881 || code == NON_LVALUE_EXPR)
7882 && TREE_CODE (tem) == COND_EXPR
7883 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7884 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7885 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7886 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7887 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7888 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7889 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7890 && (INTEGRAL_TYPE_P
7891 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7892 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7893 || flag_syntax_only))
7894 tem = build1_loc (loc, code, type,
7895 build3 (COND_EXPR,
7896 TREE_TYPE (TREE_OPERAND
7897 (TREE_OPERAND (tem, 1), 0)),
7898 TREE_OPERAND (tem, 0),
7899 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7900 TREE_OPERAND (TREE_OPERAND (tem, 2),
7901 0)));
7902 return tem;
7906 switch (code)
7908 case PAREN_EXPR:
7909 /* Re-association barriers around constants and other re-association
7910 barriers can be removed. */
7911 if (CONSTANT_CLASS_P (op0)
7912 || TREE_CODE (op0) == PAREN_EXPR)
7913 return fold_convert_loc (loc, type, op0);
7914 return NULL_TREE;
7916 CASE_CONVERT:
7917 case FLOAT_EXPR:
7918 case FIX_TRUNC_EXPR:
7919 if (TREE_TYPE (op0) == type)
7920 return op0;
7922 if (COMPARISON_CLASS_P (op0))
7924 /* If we have (type) (a CMP b) and type is an integral type, return
7925 new expression involving the new type. Canonicalize
7926 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7927 non-integral type.
7928 Do not fold the result as that would not simplify further, also
7929 folding again results in recursions. */
7930 if (TREE_CODE (type) == BOOLEAN_TYPE)
7931 return build2_loc (loc, TREE_CODE (op0), type,
7932 TREE_OPERAND (op0, 0),
7933 TREE_OPERAND (op0, 1));
7934 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7935 && TREE_CODE (type) != VECTOR_TYPE)
7936 return build3_loc (loc, COND_EXPR, type, op0,
7937 constant_boolean_node (true, type),
7938 constant_boolean_node (false, type));
7941 /* Handle cases of two conversions in a row. */
7942 if (CONVERT_EXPR_P (op0))
7944 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7945 tree inter_type = TREE_TYPE (op0);
7946 int inside_int = INTEGRAL_TYPE_P (inside_type);
7947 int inside_ptr = POINTER_TYPE_P (inside_type);
7948 int inside_float = FLOAT_TYPE_P (inside_type);
7949 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7950 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7951 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7952 int inter_int = INTEGRAL_TYPE_P (inter_type);
7953 int inter_ptr = POINTER_TYPE_P (inter_type);
7954 int inter_float = FLOAT_TYPE_P (inter_type);
7955 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7956 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7957 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7958 int final_int = INTEGRAL_TYPE_P (type);
7959 int final_ptr = POINTER_TYPE_P (type);
7960 int final_float = FLOAT_TYPE_P (type);
7961 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7962 unsigned int final_prec = TYPE_PRECISION (type);
7963 int final_unsignedp = TYPE_UNSIGNED (type);
7965 /* In addition to the cases of two conversions in a row
7966 handled below, if we are converting something to its own
7967 type via an object of identical or wider precision, neither
7968 conversion is needed. */
7969 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7970 && (((inter_int || inter_ptr) && final_int)
7971 || (inter_float && final_float))
7972 && inter_prec >= final_prec)
7973 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7975 /* Likewise, if the intermediate and initial types are either both
7976 float or both integer, we don't need the middle conversion if the
7977 former is wider than the latter and doesn't change the signedness
7978 (for integers). Avoid this if the final type is a pointer since
7979 then we sometimes need the middle conversion. Likewise if the
7980 final type has a precision not equal to the size of its mode. */
7981 if (((inter_int && inside_int)
7982 || (inter_float && inside_float)
7983 || (inter_vec && inside_vec))
7984 && inter_prec >= inside_prec
7985 && (inter_float || inter_vec
7986 || inter_unsignedp == inside_unsignedp)
7987 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7988 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7989 && ! final_ptr
7990 && (! final_vec || inter_prec == inside_prec))
7991 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7993 /* If we have a sign-extension of a zero-extended value, we can
7994 replace that by a single zero-extension. Likewise if the
7995 final conversion does not change precision we can drop the
7996 intermediate conversion. */
7997 if (inside_int && inter_int && final_int
7998 && ((inside_prec < inter_prec && inter_prec < final_prec
7999 && inside_unsignedp && !inter_unsignedp)
8000 || final_prec == inter_prec))
8001 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8003 /* Two conversions in a row are not needed unless:
8004 - some conversion is floating-point (overstrict for now), or
8005 - some conversion is a vector (overstrict for now), or
8006 - the intermediate type is narrower than both initial and
8007 final, or
8008 - the intermediate type and innermost type differ in signedness,
8009 and the outermost type is wider than the intermediate, or
8010 - the initial type is a pointer type and the precisions of the
8011 intermediate and final types differ, or
8012 - the final type is a pointer type and the precisions of the
8013 initial and intermediate types differ. */
8014 if (! inside_float && ! inter_float && ! final_float
8015 && ! inside_vec && ! inter_vec && ! final_vec
8016 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8017 && ! (inside_int && inter_int
8018 && inter_unsignedp != inside_unsignedp
8019 && inter_prec < final_prec)
8020 && ((inter_unsignedp && inter_prec > inside_prec)
8021 == (final_unsignedp && final_prec > inter_prec))
8022 && ! (inside_ptr && inter_prec != final_prec)
8023 && ! (final_ptr && inside_prec != inter_prec)
8024 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8025 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8026 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8029 /* Handle (T *)&A.B.C for A being of type T and B and C
8030 living at offset zero. This occurs frequently in
8031 C++ upcasting and then accessing the base. */
8032 if (TREE_CODE (op0) == ADDR_EXPR
8033 && POINTER_TYPE_P (type)
8034 && handled_component_p (TREE_OPERAND (op0, 0)))
8036 HOST_WIDE_INT bitsize, bitpos;
8037 tree offset;
8038 enum machine_mode mode;
8039 int unsignedp, volatilep;
8040 tree base = TREE_OPERAND (op0, 0);
8041 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8042 &mode, &unsignedp, &volatilep, false);
8043 /* If the reference was to a (constant) zero offset, we can use
8044 the address of the base if it has the same base type
8045 as the result type and the pointer type is unqualified. */
8046 if (! offset && bitpos == 0
8047 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8048 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8049 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8050 return fold_convert_loc (loc, type,
8051 build_fold_addr_expr_loc (loc, base));
8054 if (TREE_CODE (op0) == MODIFY_EXPR
8055 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8056 /* Detect assigning a bitfield. */
8057 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8058 && DECL_BIT_FIELD
8059 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8061 /* Don't leave an assignment inside a conversion
8062 unless assigning a bitfield. */
8063 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8064 /* First do the assignment, then return converted constant. */
8065 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8066 TREE_NO_WARNING (tem) = 1;
8067 TREE_USED (tem) = 1;
8068 return tem;
8071 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8072 constants (if x has signed type, the sign bit cannot be set
8073 in c). This folds extension into the BIT_AND_EXPR.
8074 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8075 very likely don't have maximal range for their precision and this
8076 transformation effectively doesn't preserve non-maximal ranges. */
8077 if (TREE_CODE (type) == INTEGER_TYPE
8078 && TREE_CODE (op0) == BIT_AND_EXPR
8079 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8081 tree and_expr = op0;
8082 tree and0 = TREE_OPERAND (and_expr, 0);
8083 tree and1 = TREE_OPERAND (and_expr, 1);
8084 int change = 0;
8086 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8087 || (TYPE_PRECISION (type)
8088 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8089 change = 1;
8090 else if (TYPE_PRECISION (TREE_TYPE (and1))
8091 <= HOST_BITS_PER_WIDE_INT
8092 && host_integerp (and1, 1))
8094 unsigned HOST_WIDE_INT cst;
8096 cst = tree_low_cst (and1, 1);
8097 cst &= HOST_WIDE_INT_M1U
8098 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8099 change = (cst == 0);
8100 #ifdef LOAD_EXTEND_OP
8101 if (change
8102 && !flag_syntax_only
8103 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8104 == ZERO_EXTEND))
8106 tree uns = unsigned_type_for (TREE_TYPE (and0));
8107 and0 = fold_convert_loc (loc, uns, and0);
8108 and1 = fold_convert_loc (loc, uns, and1);
8110 #endif
8112 if (change)
8114 tem = force_fit_type_double (type, tree_to_double_int (and1),
8115 0, TREE_OVERFLOW (and1));
8116 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8117 fold_convert_loc (loc, type, and0), tem);
8121 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8122 when one of the new casts will fold away. Conservatively we assume
8123 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8124 if (POINTER_TYPE_P (type)
8125 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8126 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8127 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8128 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8129 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8131 tree arg00 = TREE_OPERAND (arg0, 0);
8132 tree arg01 = TREE_OPERAND (arg0, 1);
8134 return fold_build_pointer_plus_loc
8135 (loc, fold_convert_loc (loc, type, arg00), arg01);
8138 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8139 of the same precision, and X is an integer type not narrower than
8140 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8141 if (INTEGRAL_TYPE_P (type)
8142 && TREE_CODE (op0) == BIT_NOT_EXPR
8143 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8144 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8145 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8147 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8148 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8149 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8150 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8151 fold_convert_loc (loc, type, tem));
8154 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8155 type of X and Y (integer types only). */
8156 if (INTEGRAL_TYPE_P (type)
8157 && TREE_CODE (op0) == MULT_EXPR
8158 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8159 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8161 /* Be careful not to introduce new overflows. */
8162 tree mult_type;
8163 if (TYPE_OVERFLOW_WRAPS (type))
8164 mult_type = type;
8165 else
8166 mult_type = unsigned_type_for (type);
8168 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8170 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8171 fold_convert_loc (loc, mult_type,
8172 TREE_OPERAND (op0, 0)),
8173 fold_convert_loc (loc, mult_type,
8174 TREE_OPERAND (op0, 1)));
8175 return fold_convert_loc (loc, type, tem);
8179 tem = fold_convert_const (code, type, op0);
8180 return tem ? tem : NULL_TREE;
8182 case ADDR_SPACE_CONVERT_EXPR:
8183 if (integer_zerop (arg0))
8184 return fold_convert_const (code, type, arg0);
8185 return NULL_TREE;
8187 case FIXED_CONVERT_EXPR:
8188 tem = fold_convert_const (code, type, arg0);
8189 return tem ? tem : NULL_TREE;
8191 case VIEW_CONVERT_EXPR:
8192 if (TREE_TYPE (op0) == type)
8193 return op0;
8194 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8195 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8196 type, TREE_OPERAND (op0, 0));
8197 if (TREE_CODE (op0) == MEM_REF)
8198 return fold_build2_loc (loc, MEM_REF, type,
8199 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8201 /* For integral conversions with the same precision or pointer
8202 conversions use a NOP_EXPR instead. */
8203 if ((INTEGRAL_TYPE_P (type)
8204 || POINTER_TYPE_P (type))
8205 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8206 || POINTER_TYPE_P (TREE_TYPE (op0)))
8207 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8208 return fold_convert_loc (loc, type, op0);
8210 /* Strip inner integral conversions that do not change the precision. */
8211 if (CONVERT_EXPR_P (op0)
8212 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8213 || POINTER_TYPE_P (TREE_TYPE (op0)))
8214 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8215 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8216 && (TYPE_PRECISION (TREE_TYPE (op0))
8217 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8218 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8219 type, TREE_OPERAND (op0, 0));
8221 return fold_view_convert_expr (type, op0);
8223 case NEGATE_EXPR:
8224 tem = fold_negate_expr (loc, arg0);
8225 if (tem)
8226 return fold_convert_loc (loc, type, tem);
8227 return NULL_TREE;
8229 case ABS_EXPR:
8230 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8231 return fold_abs_const (arg0, type);
8232 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8233 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8234 /* Convert fabs((double)float) into (double)fabsf(float). */
8235 else if (TREE_CODE (arg0) == NOP_EXPR
8236 && TREE_CODE (type) == REAL_TYPE)
8238 tree targ0 = strip_float_extensions (arg0);
8239 if (targ0 != arg0)
8240 return fold_convert_loc (loc, type,
8241 fold_build1_loc (loc, ABS_EXPR,
8242 TREE_TYPE (targ0),
8243 targ0));
8245 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8246 else if (TREE_CODE (arg0) == ABS_EXPR)
8247 return arg0;
8248 else if (tree_expr_nonnegative_p (arg0))
8249 return arg0;
8251 /* Strip sign ops from argument. */
8252 if (TREE_CODE (type) == REAL_TYPE)
8254 tem = fold_strip_sign_ops (arg0);
8255 if (tem)
8256 return fold_build1_loc (loc, ABS_EXPR, type,
8257 fold_convert_loc (loc, type, tem));
8259 return NULL_TREE;
8261 case CONJ_EXPR:
8262 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8263 return fold_convert_loc (loc, type, arg0);
8264 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8266 tree itype = TREE_TYPE (type);
8267 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8268 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8269 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8270 negate_expr (ipart));
8272 if (TREE_CODE (arg0) == COMPLEX_CST)
8274 tree itype = TREE_TYPE (type);
8275 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8276 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8277 return build_complex (type, rpart, negate_expr (ipart));
8279 if (TREE_CODE (arg0) == CONJ_EXPR)
8280 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8281 return NULL_TREE;
8283 case BIT_NOT_EXPR:
8284 if (TREE_CODE (arg0) == INTEGER_CST)
8285 return fold_not_const (arg0, type);
8286 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8287 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8288 /* Convert ~ (-A) to A - 1. */
8289 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8290 return fold_build2_loc (loc, MINUS_EXPR, type,
8291 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8292 build_int_cst (type, 1));
8293 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8294 else if (INTEGRAL_TYPE_P (type)
8295 && ((TREE_CODE (arg0) == MINUS_EXPR
8296 && integer_onep (TREE_OPERAND (arg0, 1)))
8297 || (TREE_CODE (arg0) == PLUS_EXPR
8298 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8299 return fold_build1_loc (loc, NEGATE_EXPR, type,
8300 fold_convert_loc (loc, type,
8301 TREE_OPERAND (arg0, 0)));
8302 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8303 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8304 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8305 fold_convert_loc (loc, type,
8306 TREE_OPERAND (arg0, 0)))))
8307 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8308 fold_convert_loc (loc, type,
8309 TREE_OPERAND (arg0, 1)));
8310 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8311 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8312 fold_convert_loc (loc, type,
8313 TREE_OPERAND (arg0, 1)))))
8314 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8315 fold_convert_loc (loc, type,
8316 TREE_OPERAND (arg0, 0)), tem);
8317 /* Perform BIT_NOT_EXPR on each element individually. */
8318 else if (TREE_CODE (arg0) == VECTOR_CST)
8320 tree *elements;
8321 tree elem;
8322 unsigned count = VECTOR_CST_NELTS (arg0), i;
8324 elements = XALLOCAVEC (tree, count);
8325 for (i = 0; i < count; i++)
8327 elem = VECTOR_CST_ELT (arg0, i);
8328 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8329 if (elem == NULL_TREE)
8330 break;
8331 elements[i] = elem;
8333 if (i == count)
8334 return build_vector (type, elements);
8336 else if (COMPARISON_CLASS_P (arg0)
8337 && (VECTOR_TYPE_P (type)
8338 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8340 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8341 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8342 HONOR_NANS (TYPE_MODE (op_type)));
8343 if (subcode != ERROR_MARK)
8344 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8345 TREE_OPERAND (arg0, 1));
8349 return NULL_TREE;
8351 case TRUTH_NOT_EXPR:
8352 /* Note that the operand of this must be an int
8353 and its values must be 0 or 1.
8354 ("true" is a fixed value perhaps depending on the language,
8355 but we don't handle values other than 1 correctly yet.) */
8356 tem = fold_truth_not_expr (loc, arg0);
8357 if (!tem)
8358 return NULL_TREE;
8359 return fold_convert_loc (loc, type, tem);
8361 case REALPART_EXPR:
8362 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8363 return fold_convert_loc (loc, type, arg0);
8364 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8365 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8366 TREE_OPERAND (arg0, 1));
8367 if (TREE_CODE (arg0) == COMPLEX_CST)
8368 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8369 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8371 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8372 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8373 fold_build1_loc (loc, REALPART_EXPR, itype,
8374 TREE_OPERAND (arg0, 0)),
8375 fold_build1_loc (loc, REALPART_EXPR, itype,
8376 TREE_OPERAND (arg0, 1)));
8377 return fold_convert_loc (loc, type, tem);
8379 if (TREE_CODE (arg0) == CONJ_EXPR)
8381 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8382 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8383 TREE_OPERAND (arg0, 0));
8384 return fold_convert_loc (loc, type, tem);
8386 if (TREE_CODE (arg0) == CALL_EXPR)
8388 tree fn = get_callee_fndecl (arg0);
8389 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8390 switch (DECL_FUNCTION_CODE (fn))
8392 CASE_FLT_FN (BUILT_IN_CEXPI):
8393 fn = mathfn_built_in (type, BUILT_IN_COS);
8394 if (fn)
8395 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8396 break;
8398 default:
8399 break;
8402 return NULL_TREE;
8404 case IMAGPART_EXPR:
8405 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8406 return build_zero_cst (type);
8407 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8408 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8409 TREE_OPERAND (arg0, 0));
8410 if (TREE_CODE (arg0) == COMPLEX_CST)
8411 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8412 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8414 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8415 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8416 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8417 TREE_OPERAND (arg0, 0)),
8418 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8419 TREE_OPERAND (arg0, 1)));
8420 return fold_convert_loc (loc, type, tem);
8422 if (TREE_CODE (arg0) == CONJ_EXPR)
8424 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8425 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8426 return fold_convert_loc (loc, type, negate_expr (tem));
8428 if (TREE_CODE (arg0) == CALL_EXPR)
8430 tree fn = get_callee_fndecl (arg0);
8431 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8432 switch (DECL_FUNCTION_CODE (fn))
8434 CASE_FLT_FN (BUILT_IN_CEXPI):
8435 fn = mathfn_built_in (type, BUILT_IN_SIN);
8436 if (fn)
8437 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8438 break;
8440 default:
8441 break;
8444 return NULL_TREE;
8446 case INDIRECT_REF:
8447 /* Fold *&X to X if X is an lvalue. */
8448 if (TREE_CODE (op0) == ADDR_EXPR)
8450 tree op00 = TREE_OPERAND (op0, 0);
8451 if ((TREE_CODE (op00) == VAR_DECL
8452 || TREE_CODE (op00) == PARM_DECL
8453 || TREE_CODE (op00) == RESULT_DECL)
8454 && !TREE_READONLY (op00))
8455 return op00;
8457 return NULL_TREE;
8459 case VEC_UNPACK_LO_EXPR:
8460 case VEC_UNPACK_HI_EXPR:
8461 case VEC_UNPACK_FLOAT_LO_EXPR:
8462 case VEC_UNPACK_FLOAT_HI_EXPR:
8464 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8465 tree *elts;
8466 enum tree_code subcode;
8468 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8469 if (TREE_CODE (arg0) != VECTOR_CST)
8470 return NULL_TREE;
8472 elts = XALLOCAVEC (tree, nelts * 2);
8473 if (!vec_cst_ctor_to_array (arg0, elts))
8474 return NULL_TREE;
8476 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8477 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8478 elts += nelts;
8480 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8481 subcode = NOP_EXPR;
8482 else
8483 subcode = FLOAT_EXPR;
8485 for (i = 0; i < nelts; i++)
8487 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8488 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8489 return NULL_TREE;
8492 return build_vector (type, elts);
8495 case REDUC_MIN_EXPR:
8496 case REDUC_MAX_EXPR:
8497 case REDUC_PLUS_EXPR:
8499 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8500 tree *elts;
8501 enum tree_code subcode;
8503 if (TREE_CODE (op0) != VECTOR_CST)
8504 return NULL_TREE;
8506 elts = XALLOCAVEC (tree, nelts);
8507 if (!vec_cst_ctor_to_array (op0, elts))
8508 return NULL_TREE;
8510 switch (code)
8512 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8513 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8514 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8515 default: gcc_unreachable ();
8518 for (i = 1; i < nelts; i++)
8520 elts[0] = const_binop (subcode, elts[0], elts[i]);
8521 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8522 return NULL_TREE;
8523 elts[i] = build_zero_cst (TREE_TYPE (type));
8526 return build_vector (type, elts);
8529 default:
8530 return NULL_TREE;
8531 } /* switch (code) */
8535 /* If the operation was a conversion do _not_ mark a resulting constant
8536 with TREE_OVERFLOW if the original constant was not. These conversions
8537 have implementation defined behavior and retaining the TREE_OVERFLOW
8538 flag here would confuse later passes such as VRP. */
8539 tree
8540 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8541 tree type, tree op0)
8543 tree res = fold_unary_loc (loc, code, type, op0);
8544 if (res
8545 && TREE_CODE (res) == INTEGER_CST
8546 && TREE_CODE (op0) == INTEGER_CST
8547 && CONVERT_EXPR_CODE_P (code))
8548 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8550 return res;
8553 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8554 operands OP0 and OP1. LOC is the location of the resulting expression.
8555 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8556 Return the folded expression if folding is successful. Otherwise,
8557 return NULL_TREE. */
8558 static tree
8559 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8560 tree arg0, tree arg1, tree op0, tree op1)
8562 tree tem;
8564 /* We only do these simplifications if we are optimizing. */
8565 if (!optimize)
8566 return NULL_TREE;
8568 /* Check for things like (A || B) && (A || C). We can convert this
8569 to A || (B && C). Note that either operator can be any of the four
8570 truth and/or operations and the transformation will still be
8571 valid. Also note that we only care about order for the
8572 ANDIF and ORIF operators. If B contains side effects, this
8573 might change the truth-value of A. */
8574 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8575 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8576 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8577 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8578 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8579 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8581 tree a00 = TREE_OPERAND (arg0, 0);
8582 tree a01 = TREE_OPERAND (arg0, 1);
8583 tree a10 = TREE_OPERAND (arg1, 0);
8584 tree a11 = TREE_OPERAND (arg1, 1);
8585 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8586 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8587 && (code == TRUTH_AND_EXPR
8588 || code == TRUTH_OR_EXPR));
8590 if (operand_equal_p (a00, a10, 0))
8591 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8592 fold_build2_loc (loc, code, type, a01, a11));
8593 else if (commutative && operand_equal_p (a00, a11, 0))
8594 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8595 fold_build2_loc (loc, code, type, a01, a10));
8596 else if (commutative && operand_equal_p (a01, a10, 0))
8597 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8598 fold_build2_loc (loc, code, type, a00, a11));
8600 /* This case if tricky because we must either have commutative
8601 operators or else A10 must not have side-effects. */
8603 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8604 && operand_equal_p (a01, a11, 0))
8605 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8606 fold_build2_loc (loc, code, type, a00, a10),
8607 a01);
8610 /* See if we can build a range comparison. */
8611 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8612 return tem;
8614 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8615 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8617 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8618 if (tem)
8619 return fold_build2_loc (loc, code, type, tem, arg1);
8622 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8623 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8625 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8626 if (tem)
8627 return fold_build2_loc (loc, code, type, arg0, tem);
8630 /* Check for the possibility of merging component references. If our
8631 lhs is another similar operation, try to merge its rhs with our
8632 rhs. Then try to merge our lhs and rhs. */
8633 if (TREE_CODE (arg0) == code
8634 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8635 TREE_OPERAND (arg0, 1), arg1)))
8636 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8638 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8639 return tem;
8641 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8642 && (code == TRUTH_AND_EXPR
8643 || code == TRUTH_ANDIF_EXPR
8644 || code == TRUTH_OR_EXPR
8645 || code == TRUTH_ORIF_EXPR))
8647 enum tree_code ncode, icode;
8649 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8650 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8651 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8653 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8654 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8655 We don't want to pack more than two leafs to a non-IF AND/OR
8656 expression.
8657 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8658 equal to IF-CODE, then we don't want to add right-hand operand.
8659 If the inner right-hand side of left-hand operand has
8660 side-effects, or isn't simple, then we can't add to it,
8661 as otherwise we might destroy if-sequence. */
8662 if (TREE_CODE (arg0) == icode
8663 && simple_operand_p_2 (arg1)
8664 /* Needed for sequence points to handle trappings, and
8665 side-effects. */
8666 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8668 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8669 arg1);
8670 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8671 tem);
8673 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8674 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8675 else if (TREE_CODE (arg1) == icode
8676 && simple_operand_p_2 (arg0)
8677 /* Needed for sequence points to handle trappings, and
8678 side-effects. */
8679 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8681 tem = fold_build2_loc (loc, ncode, type,
8682 arg0, TREE_OPERAND (arg1, 0));
8683 return fold_build2_loc (loc, icode, type, tem,
8684 TREE_OPERAND (arg1, 1));
8686 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8687 into (A OR B).
8688 For sequence point consistancy, we need to check for trapping,
8689 and side-effects. */
8690 else if (code == icode && simple_operand_p_2 (arg0)
8691 && simple_operand_p_2 (arg1))
8692 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8695 return NULL_TREE;
8698 /* Fold a binary expression of code CODE and type TYPE with operands
8699 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8700 Return the folded expression if folding is successful. Otherwise,
8701 return NULL_TREE. */
8703 static tree
8704 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8706 enum tree_code compl_code;
8708 if (code == MIN_EXPR)
8709 compl_code = MAX_EXPR;
8710 else if (code == MAX_EXPR)
8711 compl_code = MIN_EXPR;
8712 else
8713 gcc_unreachable ();
8715 /* MIN (MAX (a, b), b) == b. */
8716 if (TREE_CODE (op0) == compl_code
8717 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8718 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8720 /* MIN (MAX (b, a), b) == b. */
8721 if (TREE_CODE (op0) == compl_code
8722 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8723 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8724 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8726 /* MIN (a, MAX (a, b)) == a. */
8727 if (TREE_CODE (op1) == compl_code
8728 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8729 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8730 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8732 /* MIN (a, MAX (b, a)) == a. */
8733 if (TREE_CODE (op1) == compl_code
8734 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8735 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8736 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8738 return NULL_TREE;
8741 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8742 by changing CODE to reduce the magnitude of constants involved in
8743 ARG0 of the comparison.
8744 Returns a canonicalized comparison tree if a simplification was
8745 possible, otherwise returns NULL_TREE.
8746 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8747 valid if signed overflow is undefined. */
8749 static tree
8750 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8751 tree arg0, tree arg1,
8752 bool *strict_overflow_p)
8754 enum tree_code code0 = TREE_CODE (arg0);
8755 tree t, cst0 = NULL_TREE;
8756 int sgn0;
8757 bool swap = false;
8759 /* Match A +- CST code arg1 and CST code arg1. We can change the
8760 first form only if overflow is undefined. */
8761 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8762 /* In principle pointers also have undefined overflow behavior,
8763 but that causes problems elsewhere. */
8764 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8765 && (code0 == MINUS_EXPR
8766 || code0 == PLUS_EXPR)
8767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8768 || code0 == INTEGER_CST))
8769 return NULL_TREE;
8771 /* Identify the constant in arg0 and its sign. */
8772 if (code0 == INTEGER_CST)
8773 cst0 = arg0;
8774 else
8775 cst0 = TREE_OPERAND (arg0, 1);
8776 sgn0 = tree_int_cst_sgn (cst0);
8778 /* Overflowed constants and zero will cause problems. */
8779 if (integer_zerop (cst0)
8780 || TREE_OVERFLOW (cst0))
8781 return NULL_TREE;
8783 /* See if we can reduce the magnitude of the constant in
8784 arg0 by changing the comparison code. */
8785 if (code0 == INTEGER_CST)
8787 /* CST <= arg1 -> CST-1 < arg1. */
8788 if (code == LE_EXPR && sgn0 == 1)
8789 code = LT_EXPR;
8790 /* -CST < arg1 -> -CST-1 <= arg1. */
8791 else if (code == LT_EXPR && sgn0 == -1)
8792 code = LE_EXPR;
8793 /* CST > arg1 -> CST-1 >= arg1. */
8794 else if (code == GT_EXPR && sgn0 == 1)
8795 code = GE_EXPR;
8796 /* -CST >= arg1 -> -CST-1 > arg1. */
8797 else if (code == GE_EXPR && sgn0 == -1)
8798 code = GT_EXPR;
8799 else
8800 return NULL_TREE;
8801 /* arg1 code' CST' might be more canonical. */
8802 swap = true;
8804 else
8806 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8807 if (code == LT_EXPR
8808 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8809 code = LE_EXPR;
8810 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8811 else if (code == GT_EXPR
8812 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8813 code = GE_EXPR;
8814 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8815 else if (code == LE_EXPR
8816 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8817 code = LT_EXPR;
8818 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8819 else if (code == GE_EXPR
8820 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8821 code = GT_EXPR;
8822 else
8823 return NULL_TREE;
8824 *strict_overflow_p = true;
8827 /* Now build the constant reduced in magnitude. But not if that
8828 would produce one outside of its types range. */
8829 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8830 && ((sgn0 == 1
8831 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8832 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8833 || (sgn0 == -1
8834 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8835 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8836 /* We cannot swap the comparison here as that would cause us to
8837 endlessly recurse. */
8838 return NULL_TREE;
8840 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8841 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8842 if (code0 != INTEGER_CST)
8843 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8844 t = fold_convert (TREE_TYPE (arg1), t);
8846 /* If swapping might yield to a more canonical form, do so. */
8847 if (swap)
8848 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8849 else
8850 return fold_build2_loc (loc, code, type, t, arg1);
8853 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8854 overflow further. Try to decrease the magnitude of constants involved
8855 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8856 and put sole constants at the second argument position.
8857 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8859 static tree
8860 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8861 tree arg0, tree arg1)
8863 tree t;
8864 bool strict_overflow_p;
8865 const char * const warnmsg = G_("assuming signed overflow does not occur "
8866 "when reducing constant in comparison");
8868 /* Try canonicalization by simplifying arg0. */
8869 strict_overflow_p = false;
8870 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8871 &strict_overflow_p);
8872 if (t)
8874 if (strict_overflow_p)
8875 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8876 return t;
8879 /* Try canonicalization by simplifying arg1 using the swapped
8880 comparison. */
8881 code = swap_tree_comparison (code);
8882 strict_overflow_p = false;
8883 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8884 &strict_overflow_p);
8885 if (t && strict_overflow_p)
8886 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8887 return t;
8890 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8891 space. This is used to avoid issuing overflow warnings for
8892 expressions like &p->x which can not wrap. */
8894 static bool
8895 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8897 double_int di_offset, total;
8899 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8900 return true;
8902 if (bitpos < 0)
8903 return true;
8905 if (offset == NULL_TREE)
8906 di_offset = double_int_zero;
8907 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8908 return true;
8909 else
8910 di_offset = TREE_INT_CST (offset);
8912 bool overflow;
8913 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8914 total = di_offset.add_with_sign (units, true, &overflow);
8915 if (overflow)
8916 return true;
8918 if (total.high != 0)
8919 return true;
8921 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8922 if (size <= 0)
8923 return true;
8925 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8926 array. */
8927 if (TREE_CODE (base) == ADDR_EXPR)
8929 HOST_WIDE_INT base_size;
8931 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8932 if (base_size > 0 && size < base_size)
8933 size = base_size;
8936 return total.low > (unsigned HOST_WIDE_INT) size;
8939 /* Subroutine of fold_binary. This routine performs all of the
8940 transformations that are common to the equality/inequality
8941 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8942 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8943 fold_binary should call fold_binary. Fold a comparison with
8944 tree code CODE and type TYPE with operands OP0 and OP1. Return
8945 the folded comparison or NULL_TREE. */
8947 static tree
8948 fold_comparison (location_t loc, enum tree_code code, tree type,
8949 tree op0, tree op1)
8951 tree arg0, arg1, tem;
8953 arg0 = op0;
8954 arg1 = op1;
8956 STRIP_SIGN_NOPS (arg0);
8957 STRIP_SIGN_NOPS (arg1);
8959 tem = fold_relational_const (code, type, arg0, arg1);
8960 if (tem != NULL_TREE)
8961 return tem;
8963 /* If one arg is a real or integer constant, put it last. */
8964 if (tree_swap_operands_p (arg0, arg1, true))
8965 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8967 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8968 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8969 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8970 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8971 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8972 && (TREE_CODE (arg1) == INTEGER_CST
8973 && !TREE_OVERFLOW (arg1)))
8975 tree const1 = TREE_OPERAND (arg0, 1);
8976 tree const2 = arg1;
8977 tree variable = TREE_OPERAND (arg0, 0);
8978 tree lhs;
8979 int lhs_add;
8980 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8982 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8983 TREE_TYPE (arg1), const2, const1);
8985 /* If the constant operation overflowed this can be
8986 simplified as a comparison against INT_MAX/INT_MIN. */
8987 if (TREE_CODE (lhs) == INTEGER_CST
8988 && TREE_OVERFLOW (lhs))
8990 int const1_sgn = tree_int_cst_sgn (const1);
8991 enum tree_code code2 = code;
8993 /* Get the sign of the constant on the lhs if the
8994 operation were VARIABLE + CONST1. */
8995 if (TREE_CODE (arg0) == MINUS_EXPR)
8996 const1_sgn = -const1_sgn;
8998 /* The sign of the constant determines if we overflowed
8999 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9000 Canonicalize to the INT_MIN overflow by swapping the comparison
9001 if necessary. */
9002 if (const1_sgn == -1)
9003 code2 = swap_tree_comparison (code);
9005 /* We now can look at the canonicalized case
9006 VARIABLE + 1 CODE2 INT_MIN
9007 and decide on the result. */
9008 if (code2 == LT_EXPR
9009 || code2 == LE_EXPR
9010 || code2 == EQ_EXPR)
9011 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9012 else if (code2 == NE_EXPR
9013 || code2 == GE_EXPR
9014 || code2 == GT_EXPR)
9015 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9018 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9019 && (TREE_CODE (lhs) != INTEGER_CST
9020 || !TREE_OVERFLOW (lhs)))
9022 if (code != EQ_EXPR && code != NE_EXPR)
9023 fold_overflow_warning ("assuming signed overflow does not occur "
9024 "when changing X +- C1 cmp C2 to "
9025 "X cmp C1 +- C2",
9026 WARN_STRICT_OVERFLOW_COMPARISON);
9027 return fold_build2_loc (loc, code, type, variable, lhs);
9031 /* For comparisons of pointers we can decompose it to a compile time
9032 comparison of the base objects and the offsets into the object.
9033 This requires at least one operand being an ADDR_EXPR or a
9034 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9035 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9036 && (TREE_CODE (arg0) == ADDR_EXPR
9037 || TREE_CODE (arg1) == ADDR_EXPR
9038 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9039 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9041 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9042 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9043 enum machine_mode mode;
9044 int volatilep, unsignedp;
9045 bool indirect_base0 = false, indirect_base1 = false;
9047 /* Get base and offset for the access. Strip ADDR_EXPR for
9048 get_inner_reference, but put it back by stripping INDIRECT_REF
9049 off the base object if possible. indirect_baseN will be true
9050 if baseN is not an address but refers to the object itself. */
9051 base0 = arg0;
9052 if (TREE_CODE (arg0) == ADDR_EXPR)
9054 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9055 &bitsize, &bitpos0, &offset0, &mode,
9056 &unsignedp, &volatilep, false);
9057 if (TREE_CODE (base0) == INDIRECT_REF)
9058 base0 = TREE_OPERAND (base0, 0);
9059 else
9060 indirect_base0 = true;
9062 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9064 base0 = TREE_OPERAND (arg0, 0);
9065 STRIP_SIGN_NOPS (base0);
9066 if (TREE_CODE (base0) == ADDR_EXPR)
9068 base0 = TREE_OPERAND (base0, 0);
9069 indirect_base0 = true;
9071 offset0 = TREE_OPERAND (arg0, 1);
9072 if (host_integerp (offset0, 0))
9074 HOST_WIDE_INT off = size_low_cst (offset0);
9075 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9076 * BITS_PER_UNIT)
9077 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9079 bitpos0 = off * BITS_PER_UNIT;
9080 offset0 = NULL_TREE;
9085 base1 = arg1;
9086 if (TREE_CODE (arg1) == ADDR_EXPR)
9088 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9089 &bitsize, &bitpos1, &offset1, &mode,
9090 &unsignedp, &volatilep, false);
9091 if (TREE_CODE (base1) == INDIRECT_REF)
9092 base1 = TREE_OPERAND (base1, 0);
9093 else
9094 indirect_base1 = true;
9096 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9098 base1 = TREE_OPERAND (arg1, 0);
9099 STRIP_SIGN_NOPS (base1);
9100 if (TREE_CODE (base1) == ADDR_EXPR)
9102 base1 = TREE_OPERAND (base1, 0);
9103 indirect_base1 = true;
9105 offset1 = TREE_OPERAND (arg1, 1);
9106 if (host_integerp (offset1, 0))
9108 HOST_WIDE_INT off = size_low_cst (offset1);
9109 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9110 * BITS_PER_UNIT)
9111 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9113 bitpos1 = off * BITS_PER_UNIT;
9114 offset1 = NULL_TREE;
9119 /* A local variable can never be pointed to by
9120 the default SSA name of an incoming parameter. */
9121 if ((TREE_CODE (arg0) == ADDR_EXPR
9122 && indirect_base0
9123 && TREE_CODE (base0) == VAR_DECL
9124 && auto_var_in_fn_p (base0, current_function_decl)
9125 && !indirect_base1
9126 && TREE_CODE (base1) == SSA_NAME
9127 && SSA_NAME_IS_DEFAULT_DEF (base1)
9128 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9129 || (TREE_CODE (arg1) == ADDR_EXPR
9130 && indirect_base1
9131 && TREE_CODE (base1) == VAR_DECL
9132 && auto_var_in_fn_p (base1, current_function_decl)
9133 && !indirect_base0
9134 && TREE_CODE (base0) == SSA_NAME
9135 && SSA_NAME_IS_DEFAULT_DEF (base0)
9136 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9138 if (code == NE_EXPR)
9139 return constant_boolean_node (1, type);
9140 else if (code == EQ_EXPR)
9141 return constant_boolean_node (0, type);
9143 /* If we have equivalent bases we might be able to simplify. */
9144 else if (indirect_base0 == indirect_base1
9145 && operand_equal_p (base0, base1, 0))
9147 /* We can fold this expression to a constant if the non-constant
9148 offset parts are equal. */
9149 if ((offset0 == offset1
9150 || (offset0 && offset1
9151 && operand_equal_p (offset0, offset1, 0)))
9152 && (code == EQ_EXPR
9153 || code == NE_EXPR
9154 || (indirect_base0 && DECL_P (base0))
9155 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9158 if (code != EQ_EXPR
9159 && code != NE_EXPR
9160 && bitpos0 != bitpos1
9161 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9162 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9163 fold_overflow_warning (("assuming pointer wraparound does not "
9164 "occur when comparing P +- C1 with "
9165 "P +- C2"),
9166 WARN_STRICT_OVERFLOW_CONDITIONAL);
9168 switch (code)
9170 case EQ_EXPR:
9171 return constant_boolean_node (bitpos0 == bitpos1, type);
9172 case NE_EXPR:
9173 return constant_boolean_node (bitpos0 != bitpos1, type);
9174 case LT_EXPR:
9175 return constant_boolean_node (bitpos0 < bitpos1, type);
9176 case LE_EXPR:
9177 return constant_boolean_node (bitpos0 <= bitpos1, type);
9178 case GE_EXPR:
9179 return constant_boolean_node (bitpos0 >= bitpos1, type);
9180 case GT_EXPR:
9181 return constant_boolean_node (bitpos0 > bitpos1, type);
9182 default:;
9185 /* We can simplify the comparison to a comparison of the variable
9186 offset parts if the constant offset parts are equal.
9187 Be careful to use signed sizetype here because otherwise we
9188 mess with array offsets in the wrong way. This is possible
9189 because pointer arithmetic is restricted to retain within an
9190 object and overflow on pointer differences is undefined as of
9191 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9192 else if (bitpos0 == bitpos1
9193 && ((code == EQ_EXPR || code == NE_EXPR)
9194 || (indirect_base0 && DECL_P (base0))
9195 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9197 /* By converting to signed sizetype we cover middle-end pointer
9198 arithmetic which operates on unsigned pointer types of size
9199 type size and ARRAY_REF offsets which are properly sign or
9200 zero extended from their type in case it is narrower than
9201 sizetype. */
9202 if (offset0 == NULL_TREE)
9203 offset0 = build_int_cst (ssizetype, 0);
9204 else
9205 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9206 if (offset1 == NULL_TREE)
9207 offset1 = build_int_cst (ssizetype, 0);
9208 else
9209 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9211 if (code != EQ_EXPR
9212 && code != NE_EXPR
9213 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9214 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9215 fold_overflow_warning (("assuming pointer wraparound does not "
9216 "occur when comparing P +- C1 with "
9217 "P +- C2"),
9218 WARN_STRICT_OVERFLOW_COMPARISON);
9220 return fold_build2_loc (loc, code, type, offset0, offset1);
9223 /* For non-equal bases we can simplify if they are addresses
9224 of local binding decls or constants. */
9225 else if (indirect_base0 && indirect_base1
9226 /* We know that !operand_equal_p (base0, base1, 0)
9227 because the if condition was false. But make
9228 sure two decls are not the same. */
9229 && base0 != base1
9230 && TREE_CODE (arg0) == ADDR_EXPR
9231 && TREE_CODE (arg1) == ADDR_EXPR
9232 && (((TREE_CODE (base0) == VAR_DECL
9233 || TREE_CODE (base0) == PARM_DECL)
9234 && (targetm.binds_local_p (base0)
9235 || CONSTANT_CLASS_P (base1)))
9236 || CONSTANT_CLASS_P (base0))
9237 && (((TREE_CODE (base1) == VAR_DECL
9238 || TREE_CODE (base1) == PARM_DECL)
9239 && (targetm.binds_local_p (base1)
9240 || CONSTANT_CLASS_P (base0)))
9241 || CONSTANT_CLASS_P (base1)))
9243 if (code == EQ_EXPR)
9244 return omit_two_operands_loc (loc, type, boolean_false_node,
9245 arg0, arg1);
9246 else if (code == NE_EXPR)
9247 return omit_two_operands_loc (loc, type, boolean_true_node,
9248 arg0, arg1);
9250 /* For equal offsets we can simplify to a comparison of the
9251 base addresses. */
9252 else if (bitpos0 == bitpos1
9253 && (indirect_base0
9254 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9255 && (indirect_base1
9256 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9257 && ((offset0 == offset1)
9258 || (offset0 && offset1
9259 && operand_equal_p (offset0, offset1, 0))))
9261 if (indirect_base0)
9262 base0 = build_fold_addr_expr_loc (loc, base0);
9263 if (indirect_base1)
9264 base1 = build_fold_addr_expr_loc (loc, base1);
9265 return fold_build2_loc (loc, code, type, base0, base1);
9269 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9270 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9271 the resulting offset is smaller in absolute value than the
9272 original one. */
9273 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9274 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9275 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9276 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9277 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9278 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9279 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9281 tree const1 = TREE_OPERAND (arg0, 1);
9282 tree const2 = TREE_OPERAND (arg1, 1);
9283 tree variable1 = TREE_OPERAND (arg0, 0);
9284 tree variable2 = TREE_OPERAND (arg1, 0);
9285 tree cst;
9286 const char * const warnmsg = G_("assuming signed overflow does not "
9287 "occur when combining constants around "
9288 "a comparison");
9290 /* Put the constant on the side where it doesn't overflow and is
9291 of lower absolute value than before. */
9292 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9293 ? MINUS_EXPR : PLUS_EXPR,
9294 const2, const1);
9295 if (!TREE_OVERFLOW (cst)
9296 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9298 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9299 return fold_build2_loc (loc, code, type,
9300 variable1,
9301 fold_build2_loc (loc,
9302 TREE_CODE (arg1), TREE_TYPE (arg1),
9303 variable2, cst));
9306 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9307 ? MINUS_EXPR : PLUS_EXPR,
9308 const1, const2);
9309 if (!TREE_OVERFLOW (cst)
9310 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9312 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9313 return fold_build2_loc (loc, code, type,
9314 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9315 variable1, cst),
9316 variable2);
9320 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9321 signed arithmetic case. That form is created by the compiler
9322 often enough for folding it to be of value. One example is in
9323 computing loop trip counts after Operator Strength Reduction. */
9324 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9325 && TREE_CODE (arg0) == MULT_EXPR
9326 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9327 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9328 && integer_zerop (arg1))
9330 tree const1 = TREE_OPERAND (arg0, 1);
9331 tree const2 = arg1; /* zero */
9332 tree variable1 = TREE_OPERAND (arg0, 0);
9333 enum tree_code cmp_code = code;
9335 /* Handle unfolded multiplication by zero. */
9336 if (integer_zerop (const1))
9337 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9339 fold_overflow_warning (("assuming signed overflow does not occur when "
9340 "eliminating multiplication in comparison "
9341 "with zero"),
9342 WARN_STRICT_OVERFLOW_COMPARISON);
9344 /* If const1 is negative we swap the sense of the comparison. */
9345 if (tree_int_cst_sgn (const1) < 0)
9346 cmp_code = swap_tree_comparison (cmp_code);
9348 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9351 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9352 if (tem)
9353 return tem;
9355 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9357 tree targ0 = strip_float_extensions (arg0);
9358 tree targ1 = strip_float_extensions (arg1);
9359 tree newtype = TREE_TYPE (targ0);
9361 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9362 newtype = TREE_TYPE (targ1);
9364 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9365 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9366 return fold_build2_loc (loc, code, type,
9367 fold_convert_loc (loc, newtype, targ0),
9368 fold_convert_loc (loc, newtype, targ1));
9370 /* (-a) CMP (-b) -> b CMP a */
9371 if (TREE_CODE (arg0) == NEGATE_EXPR
9372 && TREE_CODE (arg1) == NEGATE_EXPR)
9373 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9374 TREE_OPERAND (arg0, 0));
9376 if (TREE_CODE (arg1) == REAL_CST)
9378 REAL_VALUE_TYPE cst;
9379 cst = TREE_REAL_CST (arg1);
9381 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9382 if (TREE_CODE (arg0) == NEGATE_EXPR)
9383 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9384 TREE_OPERAND (arg0, 0),
9385 build_real (TREE_TYPE (arg1),
9386 real_value_negate (&cst)));
9388 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9389 /* a CMP (-0) -> a CMP 0 */
9390 if (REAL_VALUE_MINUS_ZERO (cst))
9391 return fold_build2_loc (loc, code, type, arg0,
9392 build_real (TREE_TYPE (arg1), dconst0));
9394 /* x != NaN is always true, other ops are always false. */
9395 if (REAL_VALUE_ISNAN (cst)
9396 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9398 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9399 return omit_one_operand_loc (loc, type, tem, arg0);
9402 /* Fold comparisons against infinity. */
9403 if (REAL_VALUE_ISINF (cst)
9404 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9406 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9407 if (tem != NULL_TREE)
9408 return tem;
9412 /* If this is a comparison of a real constant with a PLUS_EXPR
9413 or a MINUS_EXPR of a real constant, we can convert it into a
9414 comparison with a revised real constant as long as no overflow
9415 occurs when unsafe_math_optimizations are enabled. */
9416 if (flag_unsafe_math_optimizations
9417 && TREE_CODE (arg1) == REAL_CST
9418 && (TREE_CODE (arg0) == PLUS_EXPR
9419 || TREE_CODE (arg0) == MINUS_EXPR)
9420 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9421 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9422 ? MINUS_EXPR : PLUS_EXPR,
9423 arg1, TREE_OPERAND (arg0, 1)))
9424 && !TREE_OVERFLOW (tem))
9425 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9427 /* Likewise, we can simplify a comparison of a real constant with
9428 a MINUS_EXPR whose first operand is also a real constant, i.e.
9429 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9430 floating-point types only if -fassociative-math is set. */
9431 if (flag_associative_math
9432 && TREE_CODE (arg1) == REAL_CST
9433 && TREE_CODE (arg0) == MINUS_EXPR
9434 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9435 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9436 arg1))
9437 && !TREE_OVERFLOW (tem))
9438 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9439 TREE_OPERAND (arg0, 1), tem);
9441 /* Fold comparisons against built-in math functions. */
9442 if (TREE_CODE (arg1) == REAL_CST
9443 && flag_unsafe_math_optimizations
9444 && ! flag_errno_math)
9446 enum built_in_function fcode = builtin_mathfn_code (arg0);
9448 if (fcode != END_BUILTINS)
9450 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9451 if (tem != NULL_TREE)
9452 return tem;
9457 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9458 && CONVERT_EXPR_P (arg0))
9460 /* If we are widening one operand of an integer comparison,
9461 see if the other operand is similarly being widened. Perhaps we
9462 can do the comparison in the narrower type. */
9463 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9464 if (tem)
9465 return tem;
9467 /* Or if we are changing signedness. */
9468 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9469 if (tem)
9470 return tem;
9473 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9474 constant, we can simplify it. */
9475 if (TREE_CODE (arg1) == INTEGER_CST
9476 && (TREE_CODE (arg0) == MIN_EXPR
9477 || TREE_CODE (arg0) == MAX_EXPR)
9478 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9480 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9481 if (tem)
9482 return tem;
9485 /* Simplify comparison of something with itself. (For IEEE
9486 floating-point, we can only do some of these simplifications.) */
9487 if (operand_equal_p (arg0, arg1, 0))
9489 switch (code)
9491 case EQ_EXPR:
9492 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9493 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9494 return constant_boolean_node (1, type);
9495 break;
9497 case GE_EXPR:
9498 case LE_EXPR:
9499 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9500 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9501 return constant_boolean_node (1, type);
9502 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9504 case NE_EXPR:
9505 /* For NE, we can only do this simplification if integer
9506 or we don't honor IEEE floating point NaNs. */
9507 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9508 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9509 break;
9510 /* ... fall through ... */
9511 case GT_EXPR:
9512 case LT_EXPR:
9513 return constant_boolean_node (0, type);
9514 default:
9515 gcc_unreachable ();
9519 /* If we are comparing an expression that just has comparisons
9520 of two integer values, arithmetic expressions of those comparisons,
9521 and constants, we can simplify it. There are only three cases
9522 to check: the two values can either be equal, the first can be
9523 greater, or the second can be greater. Fold the expression for
9524 those three values. Since each value must be 0 or 1, we have
9525 eight possibilities, each of which corresponds to the constant 0
9526 or 1 or one of the six possible comparisons.
9528 This handles common cases like (a > b) == 0 but also handles
9529 expressions like ((x > y) - (y > x)) > 0, which supposedly
9530 occur in macroized code. */
9532 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9534 tree cval1 = 0, cval2 = 0;
9535 int save_p = 0;
9537 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9538 /* Don't handle degenerate cases here; they should already
9539 have been handled anyway. */
9540 && cval1 != 0 && cval2 != 0
9541 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9542 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9543 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9544 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9545 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9546 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9547 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9549 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9550 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9552 /* We can't just pass T to eval_subst in case cval1 or cval2
9553 was the same as ARG1. */
9555 tree high_result
9556 = fold_build2_loc (loc, code, type,
9557 eval_subst (loc, arg0, cval1, maxval,
9558 cval2, minval),
9559 arg1);
9560 tree equal_result
9561 = fold_build2_loc (loc, code, type,
9562 eval_subst (loc, arg0, cval1, maxval,
9563 cval2, maxval),
9564 arg1);
9565 tree low_result
9566 = fold_build2_loc (loc, code, type,
9567 eval_subst (loc, arg0, cval1, minval,
9568 cval2, maxval),
9569 arg1);
9571 /* All three of these results should be 0 or 1. Confirm they are.
9572 Then use those values to select the proper code to use. */
9574 if (TREE_CODE (high_result) == INTEGER_CST
9575 && TREE_CODE (equal_result) == INTEGER_CST
9576 && TREE_CODE (low_result) == INTEGER_CST)
9578 /* Make a 3-bit mask with the high-order bit being the
9579 value for `>', the next for '=', and the low for '<'. */
9580 switch ((integer_onep (high_result) * 4)
9581 + (integer_onep (equal_result) * 2)
9582 + integer_onep (low_result))
9584 case 0:
9585 /* Always false. */
9586 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9587 case 1:
9588 code = LT_EXPR;
9589 break;
9590 case 2:
9591 code = EQ_EXPR;
9592 break;
9593 case 3:
9594 code = LE_EXPR;
9595 break;
9596 case 4:
9597 code = GT_EXPR;
9598 break;
9599 case 5:
9600 code = NE_EXPR;
9601 break;
9602 case 6:
9603 code = GE_EXPR;
9604 break;
9605 case 7:
9606 /* Always true. */
9607 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9610 if (save_p)
9612 tem = save_expr (build2 (code, type, cval1, cval2));
9613 SET_EXPR_LOCATION (tem, loc);
9614 return tem;
9616 return fold_build2_loc (loc, code, type, cval1, cval2);
9621 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9622 into a single range test. */
9623 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9624 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9625 && TREE_CODE (arg1) == INTEGER_CST
9626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9627 && !integer_zerop (TREE_OPERAND (arg0, 1))
9628 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9629 && !TREE_OVERFLOW (arg1))
9631 tem = fold_div_compare (loc, code, type, arg0, arg1);
9632 if (tem != NULL_TREE)
9633 return tem;
9636 /* Fold ~X op ~Y as Y op X. */
9637 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9638 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9640 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9641 return fold_build2_loc (loc, code, type,
9642 fold_convert_loc (loc, cmp_type,
9643 TREE_OPERAND (arg1, 0)),
9644 TREE_OPERAND (arg0, 0));
9647 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9648 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9649 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9651 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9652 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9653 TREE_OPERAND (arg0, 0),
9654 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9655 fold_convert_loc (loc, cmp_type, arg1)));
9658 return NULL_TREE;
9662 /* Subroutine of fold_binary. Optimize complex multiplications of the
9663 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9664 argument EXPR represents the expression "z" of type TYPE. */
9666 static tree
9667 fold_mult_zconjz (location_t loc, tree type, tree expr)
9669 tree itype = TREE_TYPE (type);
9670 tree rpart, ipart, tem;
9672 if (TREE_CODE (expr) == COMPLEX_EXPR)
9674 rpart = TREE_OPERAND (expr, 0);
9675 ipart = TREE_OPERAND (expr, 1);
9677 else if (TREE_CODE (expr) == COMPLEX_CST)
9679 rpart = TREE_REALPART (expr);
9680 ipart = TREE_IMAGPART (expr);
9682 else
9684 expr = save_expr (expr);
9685 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9686 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9689 rpart = save_expr (rpart);
9690 ipart = save_expr (ipart);
9691 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9692 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9693 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9694 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9695 build_zero_cst (itype));
9699 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9700 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9701 guarantees that P and N have the same least significant log2(M) bits.
9702 N is not otherwise constrained. In particular, N is not normalized to
9703 0 <= N < M as is common. In general, the precise value of P is unknown.
9704 M is chosen as large as possible such that constant N can be determined.
9706 Returns M and sets *RESIDUE to N.
9708 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9709 account. This is not always possible due to PR 35705.
9712 static unsigned HOST_WIDE_INT
9713 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9714 bool allow_func_align)
9716 enum tree_code code;
9718 *residue = 0;
9720 code = TREE_CODE (expr);
9721 if (code == ADDR_EXPR)
9723 unsigned int bitalign;
9724 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9725 *residue /= BITS_PER_UNIT;
9726 return bitalign / BITS_PER_UNIT;
9728 else if (code == POINTER_PLUS_EXPR)
9730 tree op0, op1;
9731 unsigned HOST_WIDE_INT modulus;
9732 enum tree_code inner_code;
9734 op0 = TREE_OPERAND (expr, 0);
9735 STRIP_NOPS (op0);
9736 modulus = get_pointer_modulus_and_residue (op0, residue,
9737 allow_func_align);
9739 op1 = TREE_OPERAND (expr, 1);
9740 STRIP_NOPS (op1);
9741 inner_code = TREE_CODE (op1);
9742 if (inner_code == INTEGER_CST)
9744 *residue += TREE_INT_CST_LOW (op1);
9745 return modulus;
9747 else if (inner_code == MULT_EXPR)
9749 op1 = TREE_OPERAND (op1, 1);
9750 if (TREE_CODE (op1) == INTEGER_CST)
9752 unsigned HOST_WIDE_INT align;
9754 /* Compute the greatest power-of-2 divisor of op1. */
9755 align = TREE_INT_CST_LOW (op1);
9756 align &= -align;
9758 /* If align is non-zero and less than *modulus, replace
9759 *modulus with align., If align is 0, then either op1 is 0
9760 or the greatest power-of-2 divisor of op1 doesn't fit in an
9761 unsigned HOST_WIDE_INT. In either case, no additional
9762 constraint is imposed. */
9763 if (align)
9764 modulus = MIN (modulus, align);
9766 return modulus;
9771 /* If we get here, we were unable to determine anything useful about the
9772 expression. */
9773 return 1;
9776 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9777 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9779 static bool
9780 vec_cst_ctor_to_array (tree arg, tree *elts)
9782 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9784 if (TREE_CODE (arg) == VECTOR_CST)
9786 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9787 elts[i] = VECTOR_CST_ELT (arg, i);
9789 else if (TREE_CODE (arg) == CONSTRUCTOR)
9791 constructor_elt *elt;
9793 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9794 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9795 return false;
9796 else
9797 elts[i] = elt->value;
9799 else
9800 return false;
9801 for (; i < nelts; i++)
9802 elts[i]
9803 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9804 return true;
9807 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9808 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9809 NULL_TREE otherwise. */
9811 static tree
9812 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9814 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9815 tree *elts;
9816 bool need_ctor = false;
9818 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9819 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9820 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9821 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9822 return NULL_TREE;
9824 elts = XALLOCAVEC (tree, nelts * 3);
9825 if (!vec_cst_ctor_to_array (arg0, elts)
9826 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9827 return NULL_TREE;
9829 for (i = 0; i < nelts; i++)
9831 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9832 need_ctor = true;
9833 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9836 if (need_ctor)
9838 vec<constructor_elt, va_gc> *v;
9839 vec_alloc (v, nelts);
9840 for (i = 0; i < nelts; i++)
9841 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9842 return build_constructor (type, v);
9844 else
9845 return build_vector (type, &elts[2 * nelts]);
9848 /* Try to fold a pointer difference of type TYPE two address expressions of
9849 array references AREF0 and AREF1 using location LOC. Return a
9850 simplified expression for the difference or NULL_TREE. */
9852 static tree
9853 fold_addr_of_array_ref_difference (location_t loc, tree type,
9854 tree aref0, tree aref1)
9856 tree base0 = TREE_OPERAND (aref0, 0);
9857 tree base1 = TREE_OPERAND (aref1, 0);
9858 tree base_offset = build_int_cst (type, 0);
9860 /* If the bases are array references as well, recurse. If the bases
9861 are pointer indirections compute the difference of the pointers.
9862 If the bases are equal, we are set. */
9863 if ((TREE_CODE (base0) == ARRAY_REF
9864 && TREE_CODE (base1) == ARRAY_REF
9865 && (base_offset
9866 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9867 || (INDIRECT_REF_P (base0)
9868 && INDIRECT_REF_P (base1)
9869 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9870 TREE_OPERAND (base0, 0),
9871 TREE_OPERAND (base1, 0))))
9872 || operand_equal_p (base0, base1, 0))
9874 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9875 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9876 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9877 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9878 return fold_build2_loc (loc, PLUS_EXPR, type,
9879 base_offset,
9880 fold_build2_loc (loc, MULT_EXPR, type,
9881 diff, esz));
9883 return NULL_TREE;
9886 /* If the real or vector real constant CST of type TYPE has an exact
9887 inverse, return it, else return NULL. */
9889 static tree
9890 exact_inverse (tree type, tree cst)
9892 REAL_VALUE_TYPE r;
9893 tree unit_type, *elts;
9894 enum machine_mode mode;
9895 unsigned vec_nelts, i;
9897 switch (TREE_CODE (cst))
9899 case REAL_CST:
9900 r = TREE_REAL_CST (cst);
9902 if (exact_real_inverse (TYPE_MODE (type), &r))
9903 return build_real (type, r);
9905 return NULL_TREE;
9907 case VECTOR_CST:
9908 vec_nelts = VECTOR_CST_NELTS (cst);
9909 elts = XALLOCAVEC (tree, vec_nelts);
9910 unit_type = TREE_TYPE (type);
9911 mode = TYPE_MODE (unit_type);
9913 for (i = 0; i < vec_nelts; i++)
9915 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9916 if (!exact_real_inverse (mode, &r))
9917 return NULL_TREE;
9918 elts[i] = build_real (unit_type, r);
9921 return build_vector (type, elts);
9923 default:
9924 return NULL_TREE;
9928 /* Fold a binary expression of code CODE and type TYPE with operands
9929 OP0 and OP1. LOC is the location of the resulting expression.
9930 Return the folded expression if folding is successful. Otherwise,
9931 return NULL_TREE. */
9933 tree
9934 fold_binary_loc (location_t loc,
9935 enum tree_code code, tree type, tree op0, tree op1)
9937 enum tree_code_class kind = TREE_CODE_CLASS (code);
9938 tree arg0, arg1, tem;
9939 tree t1 = NULL_TREE;
9940 bool strict_overflow_p;
9941 unsigned int prec;
9943 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9944 && TREE_CODE_LENGTH (code) == 2
9945 && op0 != NULL_TREE
9946 && op1 != NULL_TREE);
9948 arg0 = op0;
9949 arg1 = op1;
9951 /* Strip any conversions that don't change the mode. This is
9952 safe for every expression, except for a comparison expression
9953 because its signedness is derived from its operands. So, in
9954 the latter case, only strip conversions that don't change the
9955 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9956 preserved.
9958 Note that this is done as an internal manipulation within the
9959 constant folder, in order to find the simplest representation
9960 of the arguments so that their form can be studied. In any
9961 cases, the appropriate type conversions should be put back in
9962 the tree that will get out of the constant folder. */
9964 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9966 STRIP_SIGN_NOPS (arg0);
9967 STRIP_SIGN_NOPS (arg1);
9969 else
9971 STRIP_NOPS (arg0);
9972 STRIP_NOPS (arg1);
9975 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9976 constant but we can't do arithmetic on them. */
9977 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9978 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9979 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9980 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9981 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9982 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9983 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9985 if (kind == tcc_binary)
9987 /* Make sure type and arg0 have the same saturating flag. */
9988 gcc_assert (TYPE_SATURATING (type)
9989 == TYPE_SATURATING (TREE_TYPE (arg0)));
9990 tem = const_binop (code, arg0, arg1);
9992 else if (kind == tcc_comparison)
9993 tem = fold_relational_const (code, type, arg0, arg1);
9994 else
9995 tem = NULL_TREE;
9997 if (tem != NULL_TREE)
9999 if (TREE_TYPE (tem) != type)
10000 tem = fold_convert_loc (loc, type, tem);
10001 return tem;
10005 /* If this is a commutative operation, and ARG0 is a constant, move it
10006 to ARG1 to reduce the number of tests below. */
10007 if (commutative_tree_code (code)
10008 && tree_swap_operands_p (arg0, arg1, true))
10009 return fold_build2_loc (loc, code, type, op1, op0);
10011 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10013 First check for cases where an arithmetic operation is applied to a
10014 compound, conditional, or comparison operation. Push the arithmetic
10015 operation inside the compound or conditional to see if any folding
10016 can then be done. Convert comparison to conditional for this purpose.
10017 The also optimizes non-constant cases that used to be done in
10018 expand_expr.
10020 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10021 one of the operands is a comparison and the other is a comparison, a
10022 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10023 code below would make the expression more complex. Change it to a
10024 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10025 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10027 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10028 || code == EQ_EXPR || code == NE_EXPR)
10029 && TREE_CODE (type) != VECTOR_TYPE
10030 && ((truth_value_p (TREE_CODE (arg0))
10031 && (truth_value_p (TREE_CODE (arg1))
10032 || (TREE_CODE (arg1) == BIT_AND_EXPR
10033 && integer_onep (TREE_OPERAND (arg1, 1)))))
10034 || (truth_value_p (TREE_CODE (arg1))
10035 && (truth_value_p (TREE_CODE (arg0))
10036 || (TREE_CODE (arg0) == BIT_AND_EXPR
10037 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10039 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10040 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10041 : TRUTH_XOR_EXPR,
10042 boolean_type_node,
10043 fold_convert_loc (loc, boolean_type_node, arg0),
10044 fold_convert_loc (loc, boolean_type_node, arg1));
10046 if (code == EQ_EXPR)
10047 tem = invert_truthvalue_loc (loc, tem);
10049 return fold_convert_loc (loc, type, tem);
10052 if (TREE_CODE_CLASS (code) == tcc_binary
10053 || TREE_CODE_CLASS (code) == tcc_comparison)
10055 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10057 tem = fold_build2_loc (loc, code, type,
10058 fold_convert_loc (loc, TREE_TYPE (op0),
10059 TREE_OPERAND (arg0, 1)), op1);
10060 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10061 tem);
10063 if (TREE_CODE (arg1) == COMPOUND_EXPR
10064 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10066 tem = fold_build2_loc (loc, code, type, op0,
10067 fold_convert_loc (loc, TREE_TYPE (op1),
10068 TREE_OPERAND (arg1, 1)));
10069 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10070 tem);
10073 if (TREE_CODE (arg0) == COND_EXPR
10074 || TREE_CODE (arg0) == VEC_COND_EXPR
10075 || COMPARISON_CLASS_P (arg0))
10077 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10078 arg0, arg1,
10079 /*cond_first_p=*/1);
10080 if (tem != NULL_TREE)
10081 return tem;
10084 if (TREE_CODE (arg1) == COND_EXPR
10085 || TREE_CODE (arg1) == VEC_COND_EXPR
10086 || COMPARISON_CLASS_P (arg1))
10088 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10089 arg1, arg0,
10090 /*cond_first_p=*/0);
10091 if (tem != NULL_TREE)
10092 return tem;
10096 switch (code)
10098 case MEM_REF:
10099 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10100 if (TREE_CODE (arg0) == ADDR_EXPR
10101 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10103 tree iref = TREE_OPERAND (arg0, 0);
10104 return fold_build2 (MEM_REF, type,
10105 TREE_OPERAND (iref, 0),
10106 int_const_binop (PLUS_EXPR, arg1,
10107 TREE_OPERAND (iref, 1)));
10110 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10111 if (TREE_CODE (arg0) == ADDR_EXPR
10112 && handled_component_p (TREE_OPERAND (arg0, 0)))
10114 tree base;
10115 HOST_WIDE_INT coffset;
10116 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10117 &coffset);
10118 if (!base)
10119 return NULL_TREE;
10120 return fold_build2 (MEM_REF, type,
10121 build_fold_addr_expr (base),
10122 int_const_binop (PLUS_EXPR, arg1,
10123 size_int (coffset)));
10126 return NULL_TREE;
10128 case POINTER_PLUS_EXPR:
10129 /* 0 +p index -> (type)index */
10130 if (integer_zerop (arg0))
10131 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10133 /* PTR +p 0 -> PTR */
10134 if (integer_zerop (arg1))
10135 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10137 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10138 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10139 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10140 return fold_convert_loc (loc, type,
10141 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10142 fold_convert_loc (loc, sizetype,
10143 arg1),
10144 fold_convert_loc (loc, sizetype,
10145 arg0)));
10147 /* (PTR +p B) +p A -> PTR +p (B + A) */
10148 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10150 tree inner;
10151 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10152 tree arg00 = TREE_OPERAND (arg0, 0);
10153 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10154 arg01, fold_convert_loc (loc, sizetype, arg1));
10155 return fold_convert_loc (loc, type,
10156 fold_build_pointer_plus_loc (loc,
10157 arg00, inner));
10160 /* PTR_CST +p CST -> CST1 */
10161 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10162 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10163 fold_convert_loc (loc, type, arg1));
10165 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10166 of the array. Loop optimizer sometimes produce this type of
10167 expressions. */
10168 if (TREE_CODE (arg0) == ADDR_EXPR)
10170 tem = try_move_mult_to_index (loc, arg0,
10171 fold_convert_loc (loc,
10172 ssizetype, arg1));
10173 if (tem)
10174 return fold_convert_loc (loc, type, tem);
10177 return NULL_TREE;
10179 case PLUS_EXPR:
10180 /* A + (-B) -> A - B */
10181 if (TREE_CODE (arg1) == NEGATE_EXPR)
10182 return fold_build2_loc (loc, MINUS_EXPR, type,
10183 fold_convert_loc (loc, type, arg0),
10184 fold_convert_loc (loc, type,
10185 TREE_OPERAND (arg1, 0)));
10186 /* (-A) + B -> B - A */
10187 if (TREE_CODE (arg0) == NEGATE_EXPR
10188 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10189 return fold_build2_loc (loc, MINUS_EXPR, type,
10190 fold_convert_loc (loc, type, arg1),
10191 fold_convert_loc (loc, type,
10192 TREE_OPERAND (arg0, 0)));
10194 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10196 /* Convert ~A + 1 to -A. */
10197 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10198 && integer_onep (arg1))
10199 return fold_build1_loc (loc, NEGATE_EXPR, type,
10200 fold_convert_loc (loc, type,
10201 TREE_OPERAND (arg0, 0)));
10203 /* ~X + X is -1. */
10204 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10205 && !TYPE_OVERFLOW_TRAPS (type))
10207 tree tem = TREE_OPERAND (arg0, 0);
10209 STRIP_NOPS (tem);
10210 if (operand_equal_p (tem, arg1, 0))
10212 t1 = build_all_ones_cst (type);
10213 return omit_one_operand_loc (loc, type, t1, arg1);
10217 /* X + ~X is -1. */
10218 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10219 && !TYPE_OVERFLOW_TRAPS (type))
10221 tree tem = TREE_OPERAND (arg1, 0);
10223 STRIP_NOPS (tem);
10224 if (operand_equal_p (arg0, tem, 0))
10226 t1 = build_all_ones_cst (type);
10227 return omit_one_operand_loc (loc, type, t1, arg0);
10231 /* X + (X / CST) * -CST is X % CST. */
10232 if (TREE_CODE (arg1) == MULT_EXPR
10233 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10234 && operand_equal_p (arg0,
10235 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10237 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10238 tree cst1 = TREE_OPERAND (arg1, 1);
10239 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10240 cst1, cst0);
10241 if (sum && integer_zerop (sum))
10242 return fold_convert_loc (loc, type,
10243 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10244 TREE_TYPE (arg0), arg0,
10245 cst0));
10249 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10250 one. Make sure the type is not saturating and has the signedness of
10251 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10252 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10253 if ((TREE_CODE (arg0) == MULT_EXPR
10254 || TREE_CODE (arg1) == MULT_EXPR)
10255 && !TYPE_SATURATING (type)
10256 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10257 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10258 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10260 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10261 if (tem)
10262 return tem;
10265 if (! FLOAT_TYPE_P (type))
10267 if (integer_zerop (arg1))
10268 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10270 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10271 with a constant, and the two constants have no bits in common,
10272 we should treat this as a BIT_IOR_EXPR since this may produce more
10273 simplifications. */
10274 if (TREE_CODE (arg0) == BIT_AND_EXPR
10275 && TREE_CODE (arg1) == BIT_AND_EXPR
10276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10277 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10278 && integer_zerop (const_binop (BIT_AND_EXPR,
10279 TREE_OPERAND (arg0, 1),
10280 TREE_OPERAND (arg1, 1))))
10282 code = BIT_IOR_EXPR;
10283 goto bit_ior;
10286 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10287 (plus (plus (mult) (mult)) (foo)) so that we can
10288 take advantage of the factoring cases below. */
10289 if (TYPE_OVERFLOW_WRAPS (type)
10290 && (((TREE_CODE (arg0) == PLUS_EXPR
10291 || TREE_CODE (arg0) == MINUS_EXPR)
10292 && TREE_CODE (arg1) == MULT_EXPR)
10293 || ((TREE_CODE (arg1) == PLUS_EXPR
10294 || TREE_CODE (arg1) == MINUS_EXPR)
10295 && TREE_CODE (arg0) == MULT_EXPR)))
10297 tree parg0, parg1, parg, marg;
10298 enum tree_code pcode;
10300 if (TREE_CODE (arg1) == MULT_EXPR)
10301 parg = arg0, marg = arg1;
10302 else
10303 parg = arg1, marg = arg0;
10304 pcode = TREE_CODE (parg);
10305 parg0 = TREE_OPERAND (parg, 0);
10306 parg1 = TREE_OPERAND (parg, 1);
10307 STRIP_NOPS (parg0);
10308 STRIP_NOPS (parg1);
10310 if (TREE_CODE (parg0) == MULT_EXPR
10311 && TREE_CODE (parg1) != MULT_EXPR)
10312 return fold_build2_loc (loc, pcode, type,
10313 fold_build2_loc (loc, PLUS_EXPR, type,
10314 fold_convert_loc (loc, type,
10315 parg0),
10316 fold_convert_loc (loc, type,
10317 marg)),
10318 fold_convert_loc (loc, type, parg1));
10319 if (TREE_CODE (parg0) != MULT_EXPR
10320 && TREE_CODE (parg1) == MULT_EXPR)
10321 return
10322 fold_build2_loc (loc, PLUS_EXPR, type,
10323 fold_convert_loc (loc, type, parg0),
10324 fold_build2_loc (loc, pcode, type,
10325 fold_convert_loc (loc, type, marg),
10326 fold_convert_loc (loc, type,
10327 parg1)));
10330 else
10332 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10333 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10334 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10336 /* Likewise if the operands are reversed. */
10337 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10338 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10340 /* Convert X + -C into X - C. */
10341 if (TREE_CODE (arg1) == REAL_CST
10342 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10344 tem = fold_negate_const (arg1, type);
10345 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10346 return fold_build2_loc (loc, MINUS_EXPR, type,
10347 fold_convert_loc (loc, type, arg0),
10348 fold_convert_loc (loc, type, tem));
10351 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10352 to __complex__ ( x, y ). This is not the same for SNaNs or
10353 if signed zeros are involved. */
10354 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10355 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10356 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10358 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10359 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10360 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10361 bool arg0rz = false, arg0iz = false;
10362 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10363 || (arg0i && (arg0iz = real_zerop (arg0i))))
10365 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10366 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10367 if (arg0rz && arg1i && real_zerop (arg1i))
10369 tree rp = arg1r ? arg1r
10370 : build1 (REALPART_EXPR, rtype, arg1);
10371 tree ip = arg0i ? arg0i
10372 : build1 (IMAGPART_EXPR, rtype, arg0);
10373 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10375 else if (arg0iz && arg1r && real_zerop (arg1r))
10377 tree rp = arg0r ? arg0r
10378 : build1 (REALPART_EXPR, rtype, arg0);
10379 tree ip = arg1i ? arg1i
10380 : build1 (IMAGPART_EXPR, rtype, arg1);
10381 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10386 if (flag_unsafe_math_optimizations
10387 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10388 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10389 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10390 return tem;
10392 /* Convert x+x into x*2.0. */
10393 if (operand_equal_p (arg0, arg1, 0)
10394 && SCALAR_FLOAT_TYPE_P (type))
10395 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10396 build_real (type, dconst2));
10398 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10399 We associate floats only if the user has specified
10400 -fassociative-math. */
10401 if (flag_associative_math
10402 && TREE_CODE (arg1) == PLUS_EXPR
10403 && TREE_CODE (arg0) != MULT_EXPR)
10405 tree tree10 = TREE_OPERAND (arg1, 0);
10406 tree tree11 = TREE_OPERAND (arg1, 1);
10407 if (TREE_CODE (tree11) == MULT_EXPR
10408 && TREE_CODE (tree10) == MULT_EXPR)
10410 tree tree0;
10411 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10412 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10415 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10416 We associate floats only if the user has specified
10417 -fassociative-math. */
10418 if (flag_associative_math
10419 && TREE_CODE (arg0) == PLUS_EXPR
10420 && TREE_CODE (arg1) != MULT_EXPR)
10422 tree tree00 = TREE_OPERAND (arg0, 0);
10423 tree tree01 = TREE_OPERAND (arg0, 1);
10424 if (TREE_CODE (tree01) == MULT_EXPR
10425 && TREE_CODE (tree00) == MULT_EXPR)
10427 tree tree0;
10428 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10429 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10434 bit_rotate:
10435 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10436 is a rotate of A by C1 bits. */
10437 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10438 is a rotate of A by B bits. */
10440 enum tree_code code0, code1;
10441 tree rtype;
10442 code0 = TREE_CODE (arg0);
10443 code1 = TREE_CODE (arg1);
10444 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10445 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10446 && operand_equal_p (TREE_OPERAND (arg0, 0),
10447 TREE_OPERAND (arg1, 0), 0)
10448 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10449 TYPE_UNSIGNED (rtype))
10450 /* Only create rotates in complete modes. Other cases are not
10451 expanded properly. */
10452 && (element_precision (rtype)
10453 == element_precision (TYPE_MODE (rtype))))
10455 tree tree01, tree11;
10456 enum tree_code code01, code11;
10458 tree01 = TREE_OPERAND (arg0, 1);
10459 tree11 = TREE_OPERAND (arg1, 1);
10460 STRIP_NOPS (tree01);
10461 STRIP_NOPS (tree11);
10462 code01 = TREE_CODE (tree01);
10463 code11 = TREE_CODE (tree11);
10464 if (code01 == INTEGER_CST
10465 && code11 == INTEGER_CST
10466 && TREE_INT_CST_HIGH (tree01) == 0
10467 && TREE_INT_CST_HIGH (tree11) == 0
10468 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10469 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10471 tem = build2_loc (loc, LROTATE_EXPR,
10472 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10473 TREE_OPERAND (arg0, 0),
10474 code0 == LSHIFT_EXPR ? tree01 : tree11);
10475 return fold_convert_loc (loc, type, tem);
10477 else if (code11 == MINUS_EXPR)
10479 tree tree110, tree111;
10480 tree110 = TREE_OPERAND (tree11, 0);
10481 tree111 = TREE_OPERAND (tree11, 1);
10482 STRIP_NOPS (tree110);
10483 STRIP_NOPS (tree111);
10484 if (TREE_CODE (tree110) == INTEGER_CST
10485 && 0 == compare_tree_int (tree110,
10486 element_precision
10487 (TREE_TYPE (TREE_OPERAND
10488 (arg0, 0))))
10489 && operand_equal_p (tree01, tree111, 0))
10490 return
10491 fold_convert_loc (loc, type,
10492 build2 ((code0 == LSHIFT_EXPR
10493 ? LROTATE_EXPR
10494 : RROTATE_EXPR),
10495 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10496 TREE_OPERAND (arg0, 0), tree01));
10498 else if (code01 == MINUS_EXPR)
10500 tree tree010, tree011;
10501 tree010 = TREE_OPERAND (tree01, 0);
10502 tree011 = TREE_OPERAND (tree01, 1);
10503 STRIP_NOPS (tree010);
10504 STRIP_NOPS (tree011);
10505 if (TREE_CODE (tree010) == INTEGER_CST
10506 && 0 == compare_tree_int (tree010,
10507 element_precision
10508 (TREE_TYPE (TREE_OPERAND
10509 (arg0, 0))))
10510 && operand_equal_p (tree11, tree011, 0))
10511 return fold_convert_loc
10512 (loc, type,
10513 build2 ((code0 != LSHIFT_EXPR
10514 ? LROTATE_EXPR
10515 : RROTATE_EXPR),
10516 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10517 TREE_OPERAND (arg0, 0), tree11));
10522 associate:
10523 /* In most languages, can't associate operations on floats through
10524 parentheses. Rather than remember where the parentheses were, we
10525 don't associate floats at all, unless the user has specified
10526 -fassociative-math.
10527 And, we need to make sure type is not saturating. */
10529 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10530 && !TYPE_SATURATING (type))
10532 tree var0, con0, lit0, minus_lit0;
10533 tree var1, con1, lit1, minus_lit1;
10534 tree atype = type;
10535 bool ok = true;
10537 /* Split both trees into variables, constants, and literals. Then
10538 associate each group together, the constants with literals,
10539 then the result with variables. This increases the chances of
10540 literals being recombined later and of generating relocatable
10541 expressions for the sum of a constant and literal. */
10542 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10543 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10544 code == MINUS_EXPR);
10546 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10547 if (code == MINUS_EXPR)
10548 code = PLUS_EXPR;
10550 /* With undefined overflow prefer doing association in a type
10551 which wraps on overflow, if that is one of the operand types. */
10552 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10553 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10555 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10556 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10557 atype = TREE_TYPE (arg0);
10558 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10559 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10560 atype = TREE_TYPE (arg1);
10561 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10564 /* With undefined overflow we can only associate constants with one
10565 variable, and constants whose association doesn't overflow. */
10566 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10567 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10569 if (var0 && var1)
10571 tree tmp0 = var0;
10572 tree tmp1 = var1;
10574 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10575 tmp0 = TREE_OPERAND (tmp0, 0);
10576 if (CONVERT_EXPR_P (tmp0)
10577 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10578 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10579 <= TYPE_PRECISION (atype)))
10580 tmp0 = TREE_OPERAND (tmp0, 0);
10581 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10582 tmp1 = TREE_OPERAND (tmp1, 0);
10583 if (CONVERT_EXPR_P (tmp1)
10584 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10586 <= TYPE_PRECISION (atype)))
10587 tmp1 = TREE_OPERAND (tmp1, 0);
10588 /* The only case we can still associate with two variables
10589 is if they are the same, modulo negation and bit-pattern
10590 preserving conversions. */
10591 if (!operand_equal_p (tmp0, tmp1, 0))
10592 ok = false;
10596 /* Only do something if we found more than two objects. Otherwise,
10597 nothing has changed and we risk infinite recursion. */
10598 if (ok
10599 && (2 < ((var0 != 0) + (var1 != 0)
10600 + (con0 != 0) + (con1 != 0)
10601 + (lit0 != 0) + (lit1 != 0)
10602 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10604 bool any_overflows = false;
10605 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10606 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10607 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10608 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10609 var0 = associate_trees (loc, var0, var1, code, atype);
10610 con0 = associate_trees (loc, con0, con1, code, atype);
10611 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10612 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10613 code, atype);
10615 /* Preserve the MINUS_EXPR if the negative part of the literal is
10616 greater than the positive part. Otherwise, the multiplicative
10617 folding code (i.e extract_muldiv) may be fooled in case
10618 unsigned constants are subtracted, like in the following
10619 example: ((X*2 + 4) - 8U)/2. */
10620 if (minus_lit0 && lit0)
10622 if (TREE_CODE (lit0) == INTEGER_CST
10623 && TREE_CODE (minus_lit0) == INTEGER_CST
10624 && tree_int_cst_lt (lit0, minus_lit0))
10626 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10627 MINUS_EXPR, atype);
10628 lit0 = 0;
10630 else
10632 lit0 = associate_trees (loc, lit0, minus_lit0,
10633 MINUS_EXPR, atype);
10634 minus_lit0 = 0;
10638 /* Don't introduce overflows through reassociation. */
10639 if (!any_overflows
10640 && ((lit0 && TREE_OVERFLOW (lit0))
10641 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10642 return NULL_TREE;
10644 if (minus_lit0)
10646 if (con0 == 0)
10647 return
10648 fold_convert_loc (loc, type,
10649 associate_trees (loc, var0, minus_lit0,
10650 MINUS_EXPR, atype));
10651 else
10653 con0 = associate_trees (loc, con0, minus_lit0,
10654 MINUS_EXPR, atype);
10655 return
10656 fold_convert_loc (loc, type,
10657 associate_trees (loc, var0, con0,
10658 PLUS_EXPR, atype));
10662 con0 = associate_trees (loc, con0, lit0, code, atype);
10663 return
10664 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10665 code, atype));
10669 return NULL_TREE;
10671 case MINUS_EXPR:
10672 /* Pointer simplifications for subtraction, simple reassociations. */
10673 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10675 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10676 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10677 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10679 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10680 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10681 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10682 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10683 return fold_build2_loc (loc, PLUS_EXPR, type,
10684 fold_build2_loc (loc, MINUS_EXPR, type,
10685 arg00, arg10),
10686 fold_build2_loc (loc, MINUS_EXPR, type,
10687 arg01, arg11));
10689 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10690 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10692 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10693 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10694 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10695 fold_convert_loc (loc, type, arg1));
10696 if (tmp)
10697 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10700 /* A - (-B) -> A + B */
10701 if (TREE_CODE (arg1) == NEGATE_EXPR)
10702 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10703 fold_convert_loc (loc, type,
10704 TREE_OPERAND (arg1, 0)));
10705 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10706 if (TREE_CODE (arg0) == NEGATE_EXPR
10707 && negate_expr_p (arg1)
10708 && reorder_operands_p (arg0, arg1))
10709 return fold_build2_loc (loc, MINUS_EXPR, type,
10710 fold_convert_loc (loc, type,
10711 negate_expr (arg1)),
10712 fold_convert_loc (loc, type,
10713 TREE_OPERAND (arg0, 0)));
10714 /* Convert -A - 1 to ~A. */
10715 if (TREE_CODE (type) != COMPLEX_TYPE
10716 && TREE_CODE (arg0) == NEGATE_EXPR
10717 && integer_onep (arg1)
10718 && !TYPE_OVERFLOW_TRAPS (type))
10719 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10720 fold_convert_loc (loc, type,
10721 TREE_OPERAND (arg0, 0)));
10723 /* Convert -1 - A to ~A. */
10724 if (TREE_CODE (type) != COMPLEX_TYPE
10725 && integer_all_onesp (arg0))
10726 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10729 /* X - (X / Y) * Y is X % Y. */
10730 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10731 && TREE_CODE (arg1) == MULT_EXPR
10732 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10733 && operand_equal_p (arg0,
10734 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10735 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10736 TREE_OPERAND (arg1, 1), 0))
10737 return
10738 fold_convert_loc (loc, type,
10739 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10740 arg0, TREE_OPERAND (arg1, 1)));
10742 if (! FLOAT_TYPE_P (type))
10744 if (integer_zerop (arg0))
10745 return negate_expr (fold_convert_loc (loc, type, arg1));
10746 if (integer_zerop (arg1))
10747 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10749 /* Fold A - (A & B) into ~B & A. */
10750 if (!TREE_SIDE_EFFECTS (arg0)
10751 && TREE_CODE (arg1) == BIT_AND_EXPR)
10753 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10755 tree arg10 = fold_convert_loc (loc, type,
10756 TREE_OPERAND (arg1, 0));
10757 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10758 fold_build1_loc (loc, BIT_NOT_EXPR,
10759 type, arg10),
10760 fold_convert_loc (loc, type, arg0));
10762 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10764 tree arg11 = fold_convert_loc (loc,
10765 type, TREE_OPERAND (arg1, 1));
10766 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10767 fold_build1_loc (loc, BIT_NOT_EXPR,
10768 type, arg11),
10769 fold_convert_loc (loc, type, arg0));
10773 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10774 any power of 2 minus 1. */
10775 if (TREE_CODE (arg0) == BIT_AND_EXPR
10776 && TREE_CODE (arg1) == BIT_AND_EXPR
10777 && operand_equal_p (TREE_OPERAND (arg0, 0),
10778 TREE_OPERAND (arg1, 0), 0))
10780 tree mask0 = TREE_OPERAND (arg0, 1);
10781 tree mask1 = TREE_OPERAND (arg1, 1);
10782 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10784 if (operand_equal_p (tem, mask1, 0))
10786 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10787 TREE_OPERAND (arg0, 0), mask1);
10788 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10793 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10794 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10795 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10797 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10798 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10799 (-ARG1 + ARG0) reduces to -ARG1. */
10800 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10801 return negate_expr (fold_convert_loc (loc, type, arg1));
10803 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10804 __complex__ ( x, -y ). This is not the same for SNaNs or if
10805 signed zeros are involved. */
10806 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10807 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10808 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10810 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10811 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10812 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10813 bool arg0rz = false, arg0iz = false;
10814 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10815 || (arg0i && (arg0iz = real_zerop (arg0i))))
10817 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10818 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10819 if (arg0rz && arg1i && real_zerop (arg1i))
10821 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10822 arg1r ? arg1r
10823 : build1 (REALPART_EXPR, rtype, arg1));
10824 tree ip = arg0i ? arg0i
10825 : build1 (IMAGPART_EXPR, rtype, arg0);
10826 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10828 else if (arg0iz && arg1r && real_zerop (arg1r))
10830 tree rp = arg0r ? arg0r
10831 : build1 (REALPART_EXPR, rtype, arg0);
10832 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10833 arg1i ? arg1i
10834 : build1 (IMAGPART_EXPR, rtype, arg1));
10835 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10840 /* Fold &x - &x. This can happen from &x.foo - &x.
10841 This is unsafe for certain floats even in non-IEEE formats.
10842 In IEEE, it is unsafe because it does wrong for NaNs.
10843 Also note that operand_equal_p is always false if an operand
10844 is volatile. */
10846 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10847 && operand_equal_p (arg0, arg1, 0))
10848 return build_zero_cst (type);
10850 /* A - B -> A + (-B) if B is easily negatable. */
10851 if (negate_expr_p (arg1)
10852 && ((FLOAT_TYPE_P (type)
10853 /* Avoid this transformation if B is a positive REAL_CST. */
10854 && (TREE_CODE (arg1) != REAL_CST
10855 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10856 || INTEGRAL_TYPE_P (type)))
10857 return fold_build2_loc (loc, PLUS_EXPR, type,
10858 fold_convert_loc (loc, type, arg0),
10859 fold_convert_loc (loc, type,
10860 negate_expr (arg1)));
10862 /* Try folding difference of addresses. */
10864 HOST_WIDE_INT diff;
10866 if ((TREE_CODE (arg0) == ADDR_EXPR
10867 || TREE_CODE (arg1) == ADDR_EXPR)
10868 && ptr_difference_const (arg0, arg1, &diff))
10869 return build_int_cst_type (type, diff);
10872 /* Fold &a[i] - &a[j] to i-j. */
10873 if (TREE_CODE (arg0) == ADDR_EXPR
10874 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10875 && TREE_CODE (arg1) == ADDR_EXPR
10876 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10878 tree tem = fold_addr_of_array_ref_difference (loc, type,
10879 TREE_OPERAND (arg0, 0),
10880 TREE_OPERAND (arg1, 0));
10881 if (tem)
10882 return tem;
10885 if (FLOAT_TYPE_P (type)
10886 && flag_unsafe_math_optimizations
10887 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10888 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10889 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10890 return tem;
10892 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10893 one. Make sure the type is not saturating and has the signedness of
10894 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10895 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10896 if ((TREE_CODE (arg0) == MULT_EXPR
10897 || TREE_CODE (arg1) == MULT_EXPR)
10898 && !TYPE_SATURATING (type)
10899 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10900 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10901 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10903 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10904 if (tem)
10905 return tem;
10908 goto associate;
10910 case MULT_EXPR:
10911 /* (-A) * (-B) -> A * B */
10912 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10913 return fold_build2_loc (loc, MULT_EXPR, type,
10914 fold_convert_loc (loc, type,
10915 TREE_OPERAND (arg0, 0)),
10916 fold_convert_loc (loc, type,
10917 negate_expr (arg1)));
10918 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10919 return fold_build2_loc (loc, MULT_EXPR, type,
10920 fold_convert_loc (loc, type,
10921 negate_expr (arg0)),
10922 fold_convert_loc (loc, type,
10923 TREE_OPERAND (arg1, 0)));
10925 if (! FLOAT_TYPE_P (type))
10927 if (integer_zerop (arg1))
10928 return omit_one_operand_loc (loc, type, arg1, arg0);
10929 if (integer_onep (arg1))
10930 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10931 /* Transform x * -1 into -x. Make sure to do the negation
10932 on the original operand with conversions not stripped
10933 because we can only strip non-sign-changing conversions. */
10934 if (integer_minus_onep (arg1))
10935 return fold_convert_loc (loc, type, negate_expr (op0));
10936 /* Transform x * -C into -x * C if x is easily negatable. */
10937 if (TREE_CODE (arg1) == INTEGER_CST
10938 && tree_int_cst_sgn (arg1) == -1
10939 && negate_expr_p (arg0)
10940 && (tem = negate_expr (arg1)) != arg1
10941 && !TREE_OVERFLOW (tem))
10942 return fold_build2_loc (loc, MULT_EXPR, type,
10943 fold_convert_loc (loc, type,
10944 negate_expr (arg0)),
10945 tem);
10947 /* (a * (1 << b)) is (a << b) */
10948 if (TREE_CODE (arg1) == LSHIFT_EXPR
10949 && integer_onep (TREE_OPERAND (arg1, 0)))
10950 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10951 TREE_OPERAND (arg1, 1));
10952 if (TREE_CODE (arg0) == LSHIFT_EXPR
10953 && integer_onep (TREE_OPERAND (arg0, 0)))
10954 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10955 TREE_OPERAND (arg0, 1));
10957 /* (A + A) * C -> A * 2 * C */
10958 if (TREE_CODE (arg0) == PLUS_EXPR
10959 && TREE_CODE (arg1) == INTEGER_CST
10960 && operand_equal_p (TREE_OPERAND (arg0, 0),
10961 TREE_OPERAND (arg0, 1), 0))
10962 return fold_build2_loc (loc, MULT_EXPR, type,
10963 omit_one_operand_loc (loc, type,
10964 TREE_OPERAND (arg0, 0),
10965 TREE_OPERAND (arg0, 1)),
10966 fold_build2_loc (loc, MULT_EXPR, type,
10967 build_int_cst (type, 2) , arg1));
10969 strict_overflow_p = false;
10970 if (TREE_CODE (arg1) == INTEGER_CST
10971 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10972 &strict_overflow_p)))
10974 if (strict_overflow_p)
10975 fold_overflow_warning (("assuming signed overflow does not "
10976 "occur when simplifying "
10977 "multiplication"),
10978 WARN_STRICT_OVERFLOW_MISC);
10979 return fold_convert_loc (loc, type, tem);
10982 /* Optimize z * conj(z) for integer complex numbers. */
10983 if (TREE_CODE (arg0) == CONJ_EXPR
10984 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10985 return fold_mult_zconjz (loc, type, arg1);
10986 if (TREE_CODE (arg1) == CONJ_EXPR
10987 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10988 return fold_mult_zconjz (loc, type, arg0);
10990 else
10992 /* Maybe fold x * 0 to 0. The expressions aren't the same
10993 when x is NaN, since x * 0 is also NaN. Nor are they the
10994 same in modes with signed zeros, since multiplying a
10995 negative value by 0 gives -0, not +0. */
10996 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10997 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10998 && real_zerop (arg1))
10999 return omit_one_operand_loc (loc, type, arg1, arg0);
11000 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11001 Likewise for complex arithmetic with signed zeros. */
11002 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11003 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11004 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11005 && real_onep (arg1))
11006 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11008 /* Transform x * -1.0 into -x. */
11009 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11010 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11011 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11012 && real_minus_onep (arg1))
11013 return fold_convert_loc (loc, type, negate_expr (arg0));
11015 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11016 the result for floating point types due to rounding so it is applied
11017 only if -fassociative-math was specify. */
11018 if (flag_associative_math
11019 && TREE_CODE (arg0) == RDIV_EXPR
11020 && TREE_CODE (arg1) == REAL_CST
11021 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11023 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11024 arg1);
11025 if (tem)
11026 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11027 TREE_OPERAND (arg0, 1));
11030 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11031 if (operand_equal_p (arg0, arg1, 0))
11033 tree tem = fold_strip_sign_ops (arg0);
11034 if (tem != NULL_TREE)
11036 tem = fold_convert_loc (loc, type, tem);
11037 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11041 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11042 This is not the same for NaNs or if signed zeros are
11043 involved. */
11044 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11045 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11046 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11047 && TREE_CODE (arg1) == COMPLEX_CST
11048 && real_zerop (TREE_REALPART (arg1)))
11050 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11051 if (real_onep (TREE_IMAGPART (arg1)))
11052 return
11053 fold_build2_loc (loc, COMPLEX_EXPR, type,
11054 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11055 rtype, arg0)),
11056 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11057 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11058 return
11059 fold_build2_loc (loc, COMPLEX_EXPR, type,
11060 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11061 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11062 rtype, arg0)));
11065 /* Optimize z * conj(z) for floating point complex numbers.
11066 Guarded by flag_unsafe_math_optimizations as non-finite
11067 imaginary components don't produce scalar results. */
11068 if (flag_unsafe_math_optimizations
11069 && TREE_CODE (arg0) == CONJ_EXPR
11070 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11071 return fold_mult_zconjz (loc, type, arg1);
11072 if (flag_unsafe_math_optimizations
11073 && TREE_CODE (arg1) == CONJ_EXPR
11074 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11075 return fold_mult_zconjz (loc, type, arg0);
11077 if (flag_unsafe_math_optimizations)
11079 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11080 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11082 /* Optimizations of root(...)*root(...). */
11083 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11085 tree rootfn, arg;
11086 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11087 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11089 /* Optimize sqrt(x)*sqrt(x) as x. */
11090 if (BUILTIN_SQRT_P (fcode0)
11091 && operand_equal_p (arg00, arg10, 0)
11092 && ! HONOR_SNANS (TYPE_MODE (type)))
11093 return arg00;
11095 /* Optimize root(x)*root(y) as root(x*y). */
11096 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11097 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11098 return build_call_expr_loc (loc, rootfn, 1, arg);
11101 /* Optimize expN(x)*expN(y) as expN(x+y). */
11102 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11104 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11105 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11106 CALL_EXPR_ARG (arg0, 0),
11107 CALL_EXPR_ARG (arg1, 0));
11108 return build_call_expr_loc (loc, expfn, 1, arg);
11111 /* Optimizations of pow(...)*pow(...). */
11112 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11113 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11114 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11116 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11117 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11118 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11119 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11121 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11122 if (operand_equal_p (arg01, arg11, 0))
11124 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11125 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11126 arg00, arg10);
11127 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11130 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11131 if (operand_equal_p (arg00, arg10, 0))
11133 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11134 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11135 arg01, arg11);
11136 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11140 /* Optimize tan(x)*cos(x) as sin(x). */
11141 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11142 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11143 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11144 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11145 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11146 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11147 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11148 CALL_EXPR_ARG (arg1, 0), 0))
11150 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11152 if (sinfn != NULL_TREE)
11153 return build_call_expr_loc (loc, sinfn, 1,
11154 CALL_EXPR_ARG (arg0, 0));
11157 /* Optimize x*pow(x,c) as pow(x,c+1). */
11158 if (fcode1 == BUILT_IN_POW
11159 || fcode1 == BUILT_IN_POWF
11160 || fcode1 == BUILT_IN_POWL)
11162 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11163 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11164 if (TREE_CODE (arg11) == REAL_CST
11165 && !TREE_OVERFLOW (arg11)
11166 && operand_equal_p (arg0, arg10, 0))
11168 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11169 REAL_VALUE_TYPE c;
11170 tree arg;
11172 c = TREE_REAL_CST (arg11);
11173 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11174 arg = build_real (type, c);
11175 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11179 /* Optimize pow(x,c)*x as pow(x,c+1). */
11180 if (fcode0 == BUILT_IN_POW
11181 || fcode0 == BUILT_IN_POWF
11182 || fcode0 == BUILT_IN_POWL)
11184 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11185 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11186 if (TREE_CODE (arg01) == REAL_CST
11187 && !TREE_OVERFLOW (arg01)
11188 && operand_equal_p (arg1, arg00, 0))
11190 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11191 REAL_VALUE_TYPE c;
11192 tree arg;
11194 c = TREE_REAL_CST (arg01);
11195 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11196 arg = build_real (type, c);
11197 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11201 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11202 if (!in_gimple_form
11203 && optimize
11204 && operand_equal_p (arg0, arg1, 0))
11206 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11208 if (powfn)
11210 tree arg = build_real (type, dconst2);
11211 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11216 goto associate;
11218 case BIT_IOR_EXPR:
11219 bit_ior:
11220 if (integer_all_onesp (arg1))
11221 return omit_one_operand_loc (loc, type, arg1, arg0);
11222 if (integer_zerop (arg1))
11223 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11224 if (operand_equal_p (arg0, arg1, 0))
11225 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11227 /* ~X | X is -1. */
11228 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11229 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11231 t1 = build_zero_cst (type);
11232 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11233 return omit_one_operand_loc (loc, type, t1, arg1);
11236 /* X | ~X is -1. */
11237 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11238 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11240 t1 = build_zero_cst (type);
11241 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11242 return omit_one_operand_loc (loc, type, t1, arg0);
11245 /* Canonicalize (X & C1) | C2. */
11246 if (TREE_CODE (arg0) == BIT_AND_EXPR
11247 && TREE_CODE (arg1) == INTEGER_CST
11248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11250 double_int c1, c2, c3, msk;
11251 int width = TYPE_PRECISION (type), w;
11252 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11253 c2 = tree_to_double_int (arg1);
11255 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11256 if ((c1 & c2) == c1)
11257 return omit_one_operand_loc (loc, type, arg1,
11258 TREE_OPERAND (arg0, 0));
11260 msk = double_int::mask (width);
11262 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11263 if (msk.and_not (c1 | c2).is_zero ())
11264 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11265 TREE_OPERAND (arg0, 0), arg1);
11267 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11268 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11269 mode which allows further optimizations. */
11270 c1 &= msk;
11271 c2 &= msk;
11272 c3 = c1.and_not (c2);
11273 for (w = BITS_PER_UNIT;
11274 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11275 w <<= 1)
11277 unsigned HOST_WIDE_INT mask
11278 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11279 if (((c1.low | c2.low) & mask) == mask
11280 && (c1.low & ~mask) == 0 && c1.high == 0)
11282 c3 = double_int::from_uhwi (mask);
11283 break;
11286 if (c3 != c1)
11287 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11288 fold_build2_loc (loc, BIT_AND_EXPR, type,
11289 TREE_OPERAND (arg0, 0),
11290 double_int_to_tree (type,
11291 c3)),
11292 arg1);
11295 /* (X & Y) | Y is (X, Y). */
11296 if (TREE_CODE (arg0) == BIT_AND_EXPR
11297 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11298 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11299 /* (X & Y) | X is (Y, X). */
11300 if (TREE_CODE (arg0) == BIT_AND_EXPR
11301 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11302 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11303 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11304 /* X | (X & Y) is (Y, X). */
11305 if (TREE_CODE (arg1) == BIT_AND_EXPR
11306 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11307 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11308 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11309 /* X | (Y & X) is (Y, X). */
11310 if (TREE_CODE (arg1) == BIT_AND_EXPR
11311 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11312 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11313 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11315 /* (X & ~Y) | (~X & Y) is X ^ Y */
11316 if (TREE_CODE (arg0) == BIT_AND_EXPR
11317 && TREE_CODE (arg1) == BIT_AND_EXPR)
11319 tree a0, a1, l0, l1, n0, n1;
11321 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11322 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11324 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11325 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11327 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11328 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11330 if ((operand_equal_p (n0, a0, 0)
11331 && operand_equal_p (n1, a1, 0))
11332 || (operand_equal_p (n0, a1, 0)
11333 && operand_equal_p (n1, a0, 0)))
11334 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11337 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11338 if (t1 != NULL_TREE)
11339 return t1;
11341 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11343 This results in more efficient code for machines without a NAND
11344 instruction. Combine will canonicalize to the first form
11345 which will allow use of NAND instructions provided by the
11346 backend if they exist. */
11347 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11348 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11350 return
11351 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11352 build2 (BIT_AND_EXPR, type,
11353 fold_convert_loc (loc, type,
11354 TREE_OPERAND (arg0, 0)),
11355 fold_convert_loc (loc, type,
11356 TREE_OPERAND (arg1, 0))));
11359 /* See if this can be simplified into a rotate first. If that
11360 is unsuccessful continue in the association code. */
11361 goto bit_rotate;
11363 case BIT_XOR_EXPR:
11364 if (integer_zerop (arg1))
11365 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11366 if (integer_all_onesp (arg1))
11367 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11368 if (operand_equal_p (arg0, arg1, 0))
11369 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11371 /* ~X ^ X is -1. */
11372 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11373 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11375 t1 = build_zero_cst (type);
11376 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11377 return omit_one_operand_loc (loc, type, t1, arg1);
11380 /* X ^ ~X is -1. */
11381 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11382 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11384 t1 = build_zero_cst (type);
11385 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11386 return omit_one_operand_loc (loc, type, t1, arg0);
11389 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11390 with a constant, and the two constants have no bits in common,
11391 we should treat this as a BIT_IOR_EXPR since this may produce more
11392 simplifications. */
11393 if (TREE_CODE (arg0) == BIT_AND_EXPR
11394 && TREE_CODE (arg1) == BIT_AND_EXPR
11395 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11396 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11397 && integer_zerop (const_binop (BIT_AND_EXPR,
11398 TREE_OPERAND (arg0, 1),
11399 TREE_OPERAND (arg1, 1))))
11401 code = BIT_IOR_EXPR;
11402 goto bit_ior;
11405 /* (X | Y) ^ X -> Y & ~ X*/
11406 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11407 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11409 tree t2 = TREE_OPERAND (arg0, 1);
11410 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11411 arg1);
11412 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11413 fold_convert_loc (loc, type, t2),
11414 fold_convert_loc (loc, type, t1));
11415 return t1;
11418 /* (Y | X) ^ X -> Y & ~ X*/
11419 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11420 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11422 tree t2 = TREE_OPERAND (arg0, 0);
11423 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11424 arg1);
11425 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11426 fold_convert_loc (loc, type, t2),
11427 fold_convert_loc (loc, type, t1));
11428 return t1;
11431 /* X ^ (X | Y) -> Y & ~ X*/
11432 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11433 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11435 tree t2 = TREE_OPERAND (arg1, 1);
11436 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11437 arg0);
11438 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11439 fold_convert_loc (loc, type, t2),
11440 fold_convert_loc (loc, type, t1));
11441 return t1;
11444 /* X ^ (Y | X) -> Y & ~ X*/
11445 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11446 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11448 tree t2 = TREE_OPERAND (arg1, 0);
11449 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11450 arg0);
11451 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11452 fold_convert_loc (loc, type, t2),
11453 fold_convert_loc (loc, type, t1));
11454 return t1;
11457 /* Convert ~X ^ ~Y to X ^ Y. */
11458 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11459 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11460 return fold_build2_loc (loc, code, type,
11461 fold_convert_loc (loc, type,
11462 TREE_OPERAND (arg0, 0)),
11463 fold_convert_loc (loc, type,
11464 TREE_OPERAND (arg1, 0)));
11466 /* Convert ~X ^ C to X ^ ~C. */
11467 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11468 && TREE_CODE (arg1) == INTEGER_CST)
11469 return fold_build2_loc (loc, code, type,
11470 fold_convert_loc (loc, type,
11471 TREE_OPERAND (arg0, 0)),
11472 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11474 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11475 if (TREE_CODE (arg0) == BIT_AND_EXPR
11476 && integer_onep (TREE_OPERAND (arg0, 1))
11477 && integer_onep (arg1))
11478 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11479 build_zero_cst (TREE_TYPE (arg0)));
11481 /* Fold (X & Y) ^ Y as ~X & Y. */
11482 if (TREE_CODE (arg0) == BIT_AND_EXPR
11483 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11485 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11486 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11487 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11488 fold_convert_loc (loc, type, arg1));
11490 /* Fold (X & Y) ^ X as ~Y & X. */
11491 if (TREE_CODE (arg0) == BIT_AND_EXPR
11492 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11493 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11495 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11496 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11497 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11498 fold_convert_loc (loc, type, arg1));
11500 /* Fold X ^ (X & Y) as X & ~Y. */
11501 if (TREE_CODE (arg1) == BIT_AND_EXPR
11502 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11504 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11505 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11506 fold_convert_loc (loc, type, arg0),
11507 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11509 /* Fold X ^ (Y & X) as ~Y & X. */
11510 if (TREE_CODE (arg1) == BIT_AND_EXPR
11511 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11512 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11514 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11515 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11516 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11517 fold_convert_loc (loc, type, arg0));
11520 /* See if this can be simplified into a rotate first. If that
11521 is unsuccessful continue in the association code. */
11522 goto bit_rotate;
11524 case BIT_AND_EXPR:
11525 if (integer_all_onesp (arg1))
11526 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11527 if (integer_zerop (arg1))
11528 return omit_one_operand_loc (loc, type, arg1, arg0);
11529 if (operand_equal_p (arg0, arg1, 0))
11530 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11532 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11533 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11534 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11535 || (TREE_CODE (arg0) == EQ_EXPR
11536 && integer_zerop (TREE_OPERAND (arg0, 1))))
11537 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11538 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11540 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11541 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11542 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11543 || (TREE_CODE (arg1) == EQ_EXPR
11544 && integer_zerop (TREE_OPERAND (arg1, 1))))
11545 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11546 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11548 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11549 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11550 && TREE_CODE (arg1) == INTEGER_CST
11551 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11553 tree tmp1 = fold_convert_loc (loc, type, arg1);
11554 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11555 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11556 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11557 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11558 return
11559 fold_convert_loc (loc, type,
11560 fold_build2_loc (loc, BIT_IOR_EXPR,
11561 type, tmp2, tmp3));
11564 /* (X | Y) & Y is (X, Y). */
11565 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11566 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11567 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11568 /* (X | Y) & X is (Y, X). */
11569 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11570 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11571 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11572 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11573 /* X & (X | Y) is (Y, X). */
11574 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11575 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11576 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11577 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11578 /* X & (Y | X) is (Y, X). */
11579 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11580 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11581 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11582 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11584 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11585 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11586 && integer_onep (TREE_OPERAND (arg0, 1))
11587 && integer_onep (arg1))
11589 tree tem2;
11590 tem = TREE_OPERAND (arg0, 0);
11591 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11592 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11593 tem, tem2);
11594 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11595 build_zero_cst (TREE_TYPE (tem)));
11597 /* Fold ~X & 1 as (X & 1) == 0. */
11598 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11599 && integer_onep (arg1))
11601 tree tem2;
11602 tem = TREE_OPERAND (arg0, 0);
11603 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11604 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11605 tem, tem2);
11606 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11607 build_zero_cst (TREE_TYPE (tem)));
11609 /* Fold !X & 1 as X == 0. */
11610 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11611 && integer_onep (arg1))
11613 tem = TREE_OPERAND (arg0, 0);
11614 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11615 build_zero_cst (TREE_TYPE (tem)));
11618 /* Fold (X ^ Y) & Y as ~X & Y. */
11619 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11620 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11622 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11623 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11624 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11625 fold_convert_loc (loc, type, arg1));
11627 /* Fold (X ^ Y) & X as ~Y & X. */
11628 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11629 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11630 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11632 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11633 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11634 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11635 fold_convert_loc (loc, type, arg1));
11637 /* Fold X & (X ^ Y) as X & ~Y. */
11638 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11639 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11641 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11642 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11643 fold_convert_loc (loc, type, arg0),
11644 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11646 /* Fold X & (Y ^ X) as ~Y & X. */
11647 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11648 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11649 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11651 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11652 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11653 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11654 fold_convert_loc (loc, type, arg0));
11657 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11658 multiple of 1 << CST. */
11659 if (TREE_CODE (arg1) == INTEGER_CST)
11661 double_int cst1 = tree_to_double_int (arg1);
11662 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11663 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11664 if ((cst1 & ncst1) == ncst1
11665 && multiple_of_p (type, arg0,
11666 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11667 return fold_convert_loc (loc, type, arg0);
11670 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11671 bits from CST2. */
11672 if (TREE_CODE (arg1) == INTEGER_CST
11673 && TREE_CODE (arg0) == MULT_EXPR
11674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11676 int arg1tz
11677 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11678 if (arg1tz > 0)
11680 double_int arg1mask, masked;
11681 arg1mask = ~double_int::mask (arg1tz);
11682 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11683 TYPE_UNSIGNED (type));
11684 masked = arg1mask & tree_to_double_int (arg1);
11685 if (masked.is_zero ())
11686 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11687 arg0, arg1);
11688 else if (masked != tree_to_double_int (arg1))
11689 return fold_build2_loc (loc, code, type, op0,
11690 double_int_to_tree (type, masked));
11694 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11695 ((A & N) + B) & M -> (A + B) & M
11696 Similarly if (N & M) == 0,
11697 ((A | N) + B) & M -> (A + B) & M
11698 and for - instead of + (or unary - instead of +)
11699 and/or ^ instead of |.
11700 If B is constant and (B & M) == 0, fold into A & M. */
11701 if (host_integerp (arg1, 1))
11703 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11704 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11705 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11706 && (TREE_CODE (arg0) == PLUS_EXPR
11707 || TREE_CODE (arg0) == MINUS_EXPR
11708 || TREE_CODE (arg0) == NEGATE_EXPR)
11709 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11710 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11712 tree pmop[2];
11713 int which = 0;
11714 unsigned HOST_WIDE_INT cst0;
11716 /* Now we know that arg0 is (C + D) or (C - D) or
11717 -C and arg1 (M) is == (1LL << cst) - 1.
11718 Store C into PMOP[0] and D into PMOP[1]. */
11719 pmop[0] = TREE_OPERAND (arg0, 0);
11720 pmop[1] = NULL;
11721 if (TREE_CODE (arg0) != NEGATE_EXPR)
11723 pmop[1] = TREE_OPERAND (arg0, 1);
11724 which = 1;
11727 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11728 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11729 & cst1) != cst1)
11730 which = -1;
11732 for (; which >= 0; which--)
11733 switch (TREE_CODE (pmop[which]))
11735 case BIT_AND_EXPR:
11736 case BIT_IOR_EXPR:
11737 case BIT_XOR_EXPR:
11738 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11739 != INTEGER_CST)
11740 break;
11741 /* tree_low_cst not used, because we don't care about
11742 the upper bits. */
11743 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11744 cst0 &= cst1;
11745 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11747 if (cst0 != cst1)
11748 break;
11750 else if (cst0 != 0)
11751 break;
11752 /* If C or D is of the form (A & N) where
11753 (N & M) == M, or of the form (A | N) or
11754 (A ^ N) where (N & M) == 0, replace it with A. */
11755 pmop[which] = TREE_OPERAND (pmop[which], 0);
11756 break;
11757 case INTEGER_CST:
11758 /* If C or D is a N where (N & M) == 0, it can be
11759 omitted (assumed 0). */
11760 if ((TREE_CODE (arg0) == PLUS_EXPR
11761 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11762 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11763 pmop[which] = NULL;
11764 break;
11765 default:
11766 break;
11769 /* Only build anything new if we optimized one or both arguments
11770 above. */
11771 if (pmop[0] != TREE_OPERAND (arg0, 0)
11772 || (TREE_CODE (arg0) != NEGATE_EXPR
11773 && pmop[1] != TREE_OPERAND (arg0, 1)))
11775 tree utype = TREE_TYPE (arg0);
11776 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11778 /* Perform the operations in a type that has defined
11779 overflow behavior. */
11780 utype = unsigned_type_for (TREE_TYPE (arg0));
11781 if (pmop[0] != NULL)
11782 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11783 if (pmop[1] != NULL)
11784 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11787 if (TREE_CODE (arg0) == NEGATE_EXPR)
11788 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11789 else if (TREE_CODE (arg0) == PLUS_EXPR)
11791 if (pmop[0] != NULL && pmop[1] != NULL)
11792 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11793 pmop[0], pmop[1]);
11794 else if (pmop[0] != NULL)
11795 tem = pmop[0];
11796 else if (pmop[1] != NULL)
11797 tem = pmop[1];
11798 else
11799 return build_int_cst (type, 0);
11801 else if (pmop[0] == NULL)
11802 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11803 else
11804 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11805 pmop[0], pmop[1]);
11806 /* TEM is now the new binary +, - or unary - replacement. */
11807 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11808 fold_convert_loc (loc, utype, arg1));
11809 return fold_convert_loc (loc, type, tem);
11814 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11815 if (t1 != NULL_TREE)
11816 return t1;
11817 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11818 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11819 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11821 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11823 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11824 && (~TREE_INT_CST_LOW (arg1)
11825 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11826 return
11827 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11830 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11832 This results in more efficient code for machines without a NOR
11833 instruction. Combine will canonicalize to the first form
11834 which will allow use of NOR instructions provided by the
11835 backend if they exist. */
11836 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11837 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11839 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11840 build2 (BIT_IOR_EXPR, type,
11841 fold_convert_loc (loc, type,
11842 TREE_OPERAND (arg0, 0)),
11843 fold_convert_loc (loc, type,
11844 TREE_OPERAND (arg1, 0))));
11847 /* If arg0 is derived from the address of an object or function, we may
11848 be able to fold this expression using the object or function's
11849 alignment. */
11850 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11852 unsigned HOST_WIDE_INT modulus, residue;
11853 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11855 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11856 integer_onep (arg1));
11858 /* This works because modulus is a power of 2. If this weren't the
11859 case, we'd have to replace it by its greatest power-of-2
11860 divisor: modulus & -modulus. */
11861 if (low < modulus)
11862 return build_int_cst (type, residue & low);
11865 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11866 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11867 if the new mask might be further optimized. */
11868 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11869 || TREE_CODE (arg0) == RSHIFT_EXPR)
11870 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11871 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11872 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11873 < TYPE_PRECISION (TREE_TYPE (arg0))
11874 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11875 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11877 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11878 unsigned HOST_WIDE_INT mask
11879 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11880 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11881 tree shift_type = TREE_TYPE (arg0);
11883 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11884 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11885 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11886 && TYPE_PRECISION (TREE_TYPE (arg0))
11887 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11889 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11890 tree arg00 = TREE_OPERAND (arg0, 0);
11891 /* See if more bits can be proven as zero because of
11892 zero extension. */
11893 if (TREE_CODE (arg00) == NOP_EXPR
11894 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11896 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11897 if (TYPE_PRECISION (inner_type)
11898 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11899 && TYPE_PRECISION (inner_type) < prec)
11901 prec = TYPE_PRECISION (inner_type);
11902 /* See if we can shorten the right shift. */
11903 if (shiftc < prec)
11904 shift_type = inner_type;
11907 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11908 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11909 zerobits <<= prec - shiftc;
11910 /* For arithmetic shift if sign bit could be set, zerobits
11911 can contain actually sign bits, so no transformation is
11912 possible, unless MASK masks them all away. In that
11913 case the shift needs to be converted into logical shift. */
11914 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11915 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11917 if ((mask & zerobits) == 0)
11918 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11919 else
11920 zerobits = 0;
11924 /* ((X << 16) & 0xff00) is (X, 0). */
11925 if ((mask & zerobits) == mask)
11926 return omit_one_operand_loc (loc, type,
11927 build_int_cst (type, 0), arg0);
11929 newmask = mask | zerobits;
11930 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11932 /* Only do the transformation if NEWMASK is some integer
11933 mode's mask. */
11934 for (prec = BITS_PER_UNIT;
11935 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11936 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11937 break;
11938 if (prec < HOST_BITS_PER_WIDE_INT
11939 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11941 tree newmaskt;
11943 if (shift_type != TREE_TYPE (arg0))
11945 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11946 fold_convert_loc (loc, shift_type,
11947 TREE_OPERAND (arg0, 0)),
11948 TREE_OPERAND (arg0, 1));
11949 tem = fold_convert_loc (loc, type, tem);
11951 else
11952 tem = op0;
11953 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11954 if (!tree_int_cst_equal (newmaskt, arg1))
11955 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11960 goto associate;
11962 case RDIV_EXPR:
11963 /* Don't touch a floating-point divide by zero unless the mode
11964 of the constant can represent infinity. */
11965 if (TREE_CODE (arg1) == REAL_CST
11966 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11967 && real_zerop (arg1))
11968 return NULL_TREE;
11970 /* Optimize A / A to 1.0 if we don't care about
11971 NaNs or Infinities. Skip the transformation
11972 for non-real operands. */
11973 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11974 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11975 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11976 && operand_equal_p (arg0, arg1, 0))
11978 tree r = build_real (TREE_TYPE (arg0), dconst1);
11980 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11983 /* The complex version of the above A / A optimization. */
11984 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11985 && operand_equal_p (arg0, arg1, 0))
11987 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11988 if (! HONOR_NANS (TYPE_MODE (elem_type))
11989 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11991 tree r = build_real (elem_type, dconst1);
11992 /* omit_two_operands will call fold_convert for us. */
11993 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11997 /* (-A) / (-B) -> A / B */
11998 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11999 return fold_build2_loc (loc, RDIV_EXPR, type,
12000 TREE_OPERAND (arg0, 0),
12001 negate_expr (arg1));
12002 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12003 return fold_build2_loc (loc, RDIV_EXPR, type,
12004 negate_expr (arg0),
12005 TREE_OPERAND (arg1, 0));
12007 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12008 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12009 && real_onep (arg1))
12010 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12012 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12013 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12014 && real_minus_onep (arg1))
12015 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12016 negate_expr (arg0)));
12018 /* If ARG1 is a constant, we can convert this to a multiply by the
12019 reciprocal. This does not have the same rounding properties,
12020 so only do this if -freciprocal-math. We can actually
12021 always safely do it if ARG1 is a power of two, but it's hard to
12022 tell if it is or not in a portable manner. */
12023 if (optimize
12024 && (TREE_CODE (arg1) == REAL_CST
12025 || (TREE_CODE (arg1) == COMPLEX_CST
12026 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12027 || (TREE_CODE (arg1) == VECTOR_CST
12028 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12030 if (flag_reciprocal_math
12031 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12032 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12033 /* Find the reciprocal if optimizing and the result is exact.
12034 TODO: Complex reciprocal not implemented. */
12035 if (TREE_CODE (arg1) != COMPLEX_CST)
12037 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12039 if (inverse)
12040 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12043 /* Convert A/B/C to A/(B*C). */
12044 if (flag_reciprocal_math
12045 && TREE_CODE (arg0) == RDIV_EXPR)
12046 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12047 fold_build2_loc (loc, MULT_EXPR, type,
12048 TREE_OPERAND (arg0, 1), arg1));
12050 /* Convert A/(B/C) to (A/B)*C. */
12051 if (flag_reciprocal_math
12052 && TREE_CODE (arg1) == RDIV_EXPR)
12053 return fold_build2_loc (loc, MULT_EXPR, type,
12054 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12055 TREE_OPERAND (arg1, 0)),
12056 TREE_OPERAND (arg1, 1));
12058 /* Convert C1/(X*C2) into (C1/C2)/X. */
12059 if (flag_reciprocal_math
12060 && TREE_CODE (arg1) == MULT_EXPR
12061 && TREE_CODE (arg0) == REAL_CST
12062 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12064 tree tem = const_binop (RDIV_EXPR, arg0,
12065 TREE_OPERAND (arg1, 1));
12066 if (tem)
12067 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12068 TREE_OPERAND (arg1, 0));
12071 if (flag_unsafe_math_optimizations)
12073 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12074 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12076 /* Optimize sin(x)/cos(x) as tan(x). */
12077 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12078 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12079 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12080 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12081 CALL_EXPR_ARG (arg1, 0), 0))
12083 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12085 if (tanfn != NULL_TREE)
12086 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12089 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12090 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12091 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12092 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12093 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12094 CALL_EXPR_ARG (arg1, 0), 0))
12096 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12098 if (tanfn != NULL_TREE)
12100 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12101 CALL_EXPR_ARG (arg0, 0));
12102 return fold_build2_loc (loc, RDIV_EXPR, type,
12103 build_real (type, dconst1), tmp);
12107 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12108 NaNs or Infinities. */
12109 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12110 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12111 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12113 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12114 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12116 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12117 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12118 && operand_equal_p (arg00, arg01, 0))
12120 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12122 if (cosfn != NULL_TREE)
12123 return build_call_expr_loc (loc, cosfn, 1, arg00);
12127 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12128 NaNs or Infinities. */
12129 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12130 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12131 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12133 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12134 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12136 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12137 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12138 && operand_equal_p (arg00, arg01, 0))
12140 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12142 if (cosfn != NULL_TREE)
12144 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12145 return fold_build2_loc (loc, RDIV_EXPR, type,
12146 build_real (type, dconst1),
12147 tmp);
12152 /* Optimize pow(x,c)/x as pow(x,c-1). */
12153 if (fcode0 == BUILT_IN_POW
12154 || fcode0 == BUILT_IN_POWF
12155 || fcode0 == BUILT_IN_POWL)
12157 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12158 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12159 if (TREE_CODE (arg01) == REAL_CST
12160 && !TREE_OVERFLOW (arg01)
12161 && operand_equal_p (arg1, arg00, 0))
12163 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12164 REAL_VALUE_TYPE c;
12165 tree arg;
12167 c = TREE_REAL_CST (arg01);
12168 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12169 arg = build_real (type, c);
12170 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12174 /* Optimize a/root(b/c) into a*root(c/b). */
12175 if (BUILTIN_ROOT_P (fcode1))
12177 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12179 if (TREE_CODE (rootarg) == RDIV_EXPR)
12181 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12182 tree b = TREE_OPERAND (rootarg, 0);
12183 tree c = TREE_OPERAND (rootarg, 1);
12185 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12187 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12188 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12192 /* Optimize x/expN(y) into x*expN(-y). */
12193 if (BUILTIN_EXPONENT_P (fcode1))
12195 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12196 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12197 arg1 = build_call_expr_loc (loc,
12198 expfn, 1,
12199 fold_convert_loc (loc, type, arg));
12200 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12203 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12204 if (fcode1 == BUILT_IN_POW
12205 || fcode1 == BUILT_IN_POWF
12206 || fcode1 == BUILT_IN_POWL)
12208 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12209 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12210 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12211 tree neg11 = fold_convert_loc (loc, type,
12212 negate_expr (arg11));
12213 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12214 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12217 return NULL_TREE;
12219 case TRUNC_DIV_EXPR:
12220 /* Optimize (X & (-A)) / A where A is a power of 2,
12221 to X >> log2(A) */
12222 if (TREE_CODE (arg0) == BIT_AND_EXPR
12223 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12224 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12226 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12227 arg1, TREE_OPERAND (arg0, 1));
12228 if (sum && integer_zerop (sum)) {
12229 unsigned long pow2;
12231 if (TREE_INT_CST_LOW (arg1))
12232 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12233 else
12234 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12235 + HOST_BITS_PER_WIDE_INT;
12237 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12238 TREE_OPERAND (arg0, 0),
12239 build_int_cst (integer_type_node, pow2));
12243 /* Fall through */
12245 case FLOOR_DIV_EXPR:
12246 /* Simplify A / (B << N) where A and B are positive and B is
12247 a power of 2, to A >> (N + log2(B)). */
12248 strict_overflow_p = false;
12249 if (TREE_CODE (arg1) == LSHIFT_EXPR
12250 && (TYPE_UNSIGNED (type)
12251 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12253 tree sval = TREE_OPERAND (arg1, 0);
12254 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12256 tree sh_cnt = TREE_OPERAND (arg1, 1);
12257 unsigned long pow2;
12259 if (TREE_INT_CST_LOW (sval))
12260 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12261 else
12262 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12263 + HOST_BITS_PER_WIDE_INT;
12265 if (strict_overflow_p)
12266 fold_overflow_warning (("assuming signed overflow does not "
12267 "occur when simplifying A / (B << N)"),
12268 WARN_STRICT_OVERFLOW_MISC);
12270 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12271 sh_cnt,
12272 build_int_cst (TREE_TYPE (sh_cnt),
12273 pow2));
12274 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12275 fold_convert_loc (loc, type, arg0), sh_cnt);
12279 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12280 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12281 if (INTEGRAL_TYPE_P (type)
12282 && TYPE_UNSIGNED (type)
12283 && code == FLOOR_DIV_EXPR)
12284 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12286 /* Fall through */
12288 case ROUND_DIV_EXPR:
12289 case CEIL_DIV_EXPR:
12290 case EXACT_DIV_EXPR:
12291 if (integer_onep (arg1))
12292 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12293 if (integer_zerop (arg1))
12294 return NULL_TREE;
12295 /* X / -1 is -X. */
12296 if (!TYPE_UNSIGNED (type)
12297 && TREE_CODE (arg1) == INTEGER_CST
12298 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12299 && TREE_INT_CST_HIGH (arg1) == -1)
12300 return fold_convert_loc (loc, type, negate_expr (arg0));
12302 /* Convert -A / -B to A / B when the type is signed and overflow is
12303 undefined. */
12304 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12305 && TREE_CODE (arg0) == NEGATE_EXPR
12306 && negate_expr_p (arg1))
12308 if (INTEGRAL_TYPE_P (type))
12309 fold_overflow_warning (("assuming signed overflow does not occur "
12310 "when distributing negation across "
12311 "division"),
12312 WARN_STRICT_OVERFLOW_MISC);
12313 return fold_build2_loc (loc, code, type,
12314 fold_convert_loc (loc, type,
12315 TREE_OPERAND (arg0, 0)),
12316 fold_convert_loc (loc, type,
12317 negate_expr (arg1)));
12319 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12320 && TREE_CODE (arg1) == NEGATE_EXPR
12321 && negate_expr_p (arg0))
12323 if (INTEGRAL_TYPE_P (type))
12324 fold_overflow_warning (("assuming signed overflow does not occur "
12325 "when distributing negation across "
12326 "division"),
12327 WARN_STRICT_OVERFLOW_MISC);
12328 return fold_build2_loc (loc, code, type,
12329 fold_convert_loc (loc, type,
12330 negate_expr (arg0)),
12331 fold_convert_loc (loc, type,
12332 TREE_OPERAND (arg1, 0)));
12335 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12336 operation, EXACT_DIV_EXPR.
12338 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12339 At one time others generated faster code, it's not clear if they do
12340 after the last round to changes to the DIV code in expmed.c. */
12341 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12342 && multiple_of_p (type, arg0, arg1))
12343 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12345 strict_overflow_p = false;
12346 if (TREE_CODE (arg1) == INTEGER_CST
12347 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12348 &strict_overflow_p)))
12350 if (strict_overflow_p)
12351 fold_overflow_warning (("assuming signed overflow does not occur "
12352 "when simplifying division"),
12353 WARN_STRICT_OVERFLOW_MISC);
12354 return fold_convert_loc (loc, type, tem);
12357 return NULL_TREE;
12359 case CEIL_MOD_EXPR:
12360 case FLOOR_MOD_EXPR:
12361 case ROUND_MOD_EXPR:
12362 case TRUNC_MOD_EXPR:
12363 /* X % 1 is always zero, but be sure to preserve any side
12364 effects in X. */
12365 if (integer_onep (arg1))
12366 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12368 /* X % 0, return X % 0 unchanged so that we can get the
12369 proper warnings and errors. */
12370 if (integer_zerop (arg1))
12371 return NULL_TREE;
12373 /* 0 % X is always zero, but be sure to preserve any side
12374 effects in X. Place this after checking for X == 0. */
12375 if (integer_zerop (arg0))
12376 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12378 /* X % -1 is zero. */
12379 if (!TYPE_UNSIGNED (type)
12380 && TREE_CODE (arg1) == INTEGER_CST
12381 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12382 && TREE_INT_CST_HIGH (arg1) == -1)
12383 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12385 /* X % -C is the same as X % C. */
12386 if (code == TRUNC_MOD_EXPR
12387 && !TYPE_UNSIGNED (type)
12388 && TREE_CODE (arg1) == INTEGER_CST
12389 && !TREE_OVERFLOW (arg1)
12390 && TREE_INT_CST_HIGH (arg1) < 0
12391 && !TYPE_OVERFLOW_TRAPS (type)
12392 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12393 && !sign_bit_p (arg1, arg1))
12394 return fold_build2_loc (loc, code, type,
12395 fold_convert_loc (loc, type, arg0),
12396 fold_convert_loc (loc, type,
12397 negate_expr (arg1)));
12399 /* X % -Y is the same as X % Y. */
12400 if (code == TRUNC_MOD_EXPR
12401 && !TYPE_UNSIGNED (type)
12402 && TREE_CODE (arg1) == NEGATE_EXPR
12403 && !TYPE_OVERFLOW_TRAPS (type))
12404 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12405 fold_convert_loc (loc, type,
12406 TREE_OPERAND (arg1, 0)));
12408 strict_overflow_p = false;
12409 if (TREE_CODE (arg1) == INTEGER_CST
12410 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12411 &strict_overflow_p)))
12413 if (strict_overflow_p)
12414 fold_overflow_warning (("assuming signed overflow does not occur "
12415 "when simplifying modulus"),
12416 WARN_STRICT_OVERFLOW_MISC);
12417 return fold_convert_loc (loc, type, tem);
12420 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12421 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12422 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12423 && (TYPE_UNSIGNED (type)
12424 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12426 tree c = arg1;
12427 /* Also optimize A % (C << N) where C is a power of 2,
12428 to A & ((C << N) - 1). */
12429 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12430 c = TREE_OPERAND (arg1, 0);
12432 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12434 tree mask
12435 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12436 build_int_cst (TREE_TYPE (arg1), 1));
12437 if (strict_overflow_p)
12438 fold_overflow_warning (("assuming signed overflow does not "
12439 "occur when simplifying "
12440 "X % (power of two)"),
12441 WARN_STRICT_OVERFLOW_MISC);
12442 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12443 fold_convert_loc (loc, type, arg0),
12444 fold_convert_loc (loc, type, mask));
12448 return NULL_TREE;
12450 case LROTATE_EXPR:
12451 case RROTATE_EXPR:
12452 if (integer_all_onesp (arg0))
12453 return omit_one_operand_loc (loc, type, arg0, arg1);
12454 goto shift;
12456 case RSHIFT_EXPR:
12457 /* Optimize -1 >> x for arithmetic right shifts. */
12458 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12459 && tree_expr_nonnegative_p (arg1))
12460 return omit_one_operand_loc (loc, type, arg0, arg1);
12461 /* ... fall through ... */
12463 case LSHIFT_EXPR:
12464 shift:
12465 if (integer_zerop (arg1))
12466 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12467 if (integer_zerop (arg0))
12468 return omit_one_operand_loc (loc, type, arg0, arg1);
12470 /* Prefer vector1 << scalar to vector1 << vector2
12471 if vector2 is uniform. */
12472 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12473 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12474 return fold_build2_loc (loc, code, type, op0, tem);
12476 /* Since negative shift count is not well-defined,
12477 don't try to compute it in the compiler. */
12478 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12479 return NULL_TREE;
12481 prec = element_precision (type);
12483 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12484 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12485 && TREE_INT_CST_LOW (arg1) < prec
12486 && host_integerp (TREE_OPERAND (arg0, 1), true)
12487 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12489 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12490 + TREE_INT_CST_LOW (arg1));
12492 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12493 being well defined. */
12494 if (low >= prec)
12496 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12497 low = low % prec;
12498 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12499 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12500 TREE_OPERAND (arg0, 0));
12501 else
12502 low = prec - 1;
12505 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12506 build_int_cst (TREE_TYPE (arg1), low));
12509 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12510 into x & ((unsigned)-1 >> c) for unsigned types. */
12511 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12512 || (TYPE_UNSIGNED (type)
12513 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12514 && host_integerp (arg1, false)
12515 && TREE_INT_CST_LOW (arg1) < prec
12516 && host_integerp (TREE_OPERAND (arg0, 1), false)
12517 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12519 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12520 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12521 tree lshift;
12522 tree arg00;
12524 if (low0 == low1)
12526 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12528 lshift = build_minus_one_cst (type);
12529 lshift = const_binop (code, lshift, arg1);
12531 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12535 /* Rewrite an LROTATE_EXPR by a constant into an
12536 RROTATE_EXPR by a new constant. */
12537 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12539 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12540 tem = const_binop (MINUS_EXPR, tem, arg1);
12541 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12544 /* If we have a rotate of a bit operation with the rotate count and
12545 the second operand of the bit operation both constant,
12546 permute the two operations. */
12547 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12548 && (TREE_CODE (arg0) == BIT_AND_EXPR
12549 || TREE_CODE (arg0) == BIT_IOR_EXPR
12550 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12551 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12552 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12553 fold_build2_loc (loc, code, type,
12554 TREE_OPERAND (arg0, 0), arg1),
12555 fold_build2_loc (loc, code, type,
12556 TREE_OPERAND (arg0, 1), arg1));
12558 /* Two consecutive rotates adding up to the precision of the
12559 type can be ignored. */
12560 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12561 && TREE_CODE (arg0) == RROTATE_EXPR
12562 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12563 && TREE_INT_CST_HIGH (arg1) == 0
12564 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12565 && ((TREE_INT_CST_LOW (arg1)
12566 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12567 == prec))
12568 return TREE_OPERAND (arg0, 0);
12570 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12571 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12572 if the latter can be further optimized. */
12573 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12574 && TREE_CODE (arg0) == BIT_AND_EXPR
12575 && TREE_CODE (arg1) == INTEGER_CST
12576 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12578 tree mask = fold_build2_loc (loc, code, type,
12579 fold_convert_loc (loc, type,
12580 TREE_OPERAND (arg0, 1)),
12581 arg1);
12582 tree shift = fold_build2_loc (loc, code, type,
12583 fold_convert_loc (loc, type,
12584 TREE_OPERAND (arg0, 0)),
12585 arg1);
12586 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12587 if (tem)
12588 return tem;
12591 return NULL_TREE;
12593 case MIN_EXPR:
12594 if (operand_equal_p (arg0, arg1, 0))
12595 return omit_one_operand_loc (loc, type, arg0, arg1);
12596 if (INTEGRAL_TYPE_P (type)
12597 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12598 return omit_one_operand_loc (loc, type, arg1, arg0);
12599 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12600 if (tem)
12601 return tem;
12602 goto associate;
12604 case MAX_EXPR:
12605 if (operand_equal_p (arg0, arg1, 0))
12606 return omit_one_operand_loc (loc, type, arg0, arg1);
12607 if (INTEGRAL_TYPE_P (type)
12608 && TYPE_MAX_VALUE (type)
12609 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12610 return omit_one_operand_loc (loc, type, arg1, arg0);
12611 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12612 if (tem)
12613 return tem;
12614 goto associate;
12616 case TRUTH_ANDIF_EXPR:
12617 /* Note that the operands of this must be ints
12618 and their values must be 0 or 1.
12619 ("true" is a fixed value perhaps depending on the language.) */
12620 /* If first arg is constant zero, return it. */
12621 if (integer_zerop (arg0))
12622 return fold_convert_loc (loc, type, arg0);
12623 case TRUTH_AND_EXPR:
12624 /* If either arg is constant true, drop it. */
12625 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12626 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12627 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12628 /* Preserve sequence points. */
12629 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12630 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12631 /* If second arg is constant zero, result is zero, but first arg
12632 must be evaluated. */
12633 if (integer_zerop (arg1))
12634 return omit_one_operand_loc (loc, type, arg1, arg0);
12635 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12636 case will be handled here. */
12637 if (integer_zerop (arg0))
12638 return omit_one_operand_loc (loc, type, arg0, arg1);
12640 /* !X && X is always false. */
12641 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12642 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12643 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12644 /* X && !X is always false. */
12645 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12646 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12647 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12649 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12650 means A >= Y && A != MAX, but in this case we know that
12651 A < X <= MAX. */
12653 if (!TREE_SIDE_EFFECTS (arg0)
12654 && !TREE_SIDE_EFFECTS (arg1))
12656 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12657 if (tem && !operand_equal_p (tem, arg0, 0))
12658 return fold_build2_loc (loc, code, type, tem, arg1);
12660 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12661 if (tem && !operand_equal_p (tem, arg1, 0))
12662 return fold_build2_loc (loc, code, type, arg0, tem);
12665 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12666 != NULL_TREE)
12667 return tem;
12669 return NULL_TREE;
12671 case TRUTH_ORIF_EXPR:
12672 /* Note that the operands of this must be ints
12673 and their values must be 0 or true.
12674 ("true" is a fixed value perhaps depending on the language.) */
12675 /* If first arg is constant true, return it. */
12676 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12677 return fold_convert_loc (loc, type, arg0);
12678 case TRUTH_OR_EXPR:
12679 /* If either arg is constant zero, drop it. */
12680 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12681 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12682 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12683 /* Preserve sequence points. */
12684 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12685 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12686 /* If second arg is constant true, result is true, but we must
12687 evaluate first arg. */
12688 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12689 return omit_one_operand_loc (loc, type, arg1, arg0);
12690 /* Likewise for first arg, but note this only occurs here for
12691 TRUTH_OR_EXPR. */
12692 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12693 return omit_one_operand_loc (loc, type, arg0, arg1);
12695 /* !X || X is always true. */
12696 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12697 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12698 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12699 /* X || !X is always true. */
12700 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12701 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12702 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12704 /* (X && !Y) || (!X && Y) is X ^ Y */
12705 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12706 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12708 tree a0, a1, l0, l1, n0, n1;
12710 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12711 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12713 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12714 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12716 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12717 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12719 if ((operand_equal_p (n0, a0, 0)
12720 && operand_equal_p (n1, a1, 0))
12721 || (operand_equal_p (n0, a1, 0)
12722 && operand_equal_p (n1, a0, 0)))
12723 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12726 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12727 != NULL_TREE)
12728 return tem;
12730 return NULL_TREE;
12732 case TRUTH_XOR_EXPR:
12733 /* If the second arg is constant zero, drop it. */
12734 if (integer_zerop (arg1))
12735 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12736 /* If the second arg is constant true, this is a logical inversion. */
12737 if (integer_onep (arg1))
12739 tem = invert_truthvalue_loc (loc, arg0);
12740 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12742 /* Identical arguments cancel to zero. */
12743 if (operand_equal_p (arg0, arg1, 0))
12744 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12746 /* !X ^ X is always true. */
12747 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12748 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12749 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12751 /* X ^ !X is always true. */
12752 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12753 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12754 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12756 return NULL_TREE;
12758 case EQ_EXPR:
12759 case NE_EXPR:
12760 STRIP_NOPS (arg0);
12761 STRIP_NOPS (arg1);
12763 tem = fold_comparison (loc, code, type, op0, op1);
12764 if (tem != NULL_TREE)
12765 return tem;
12767 /* bool_var != 0 becomes bool_var. */
12768 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12769 && code == NE_EXPR)
12770 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12772 /* bool_var == 1 becomes bool_var. */
12773 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12774 && code == EQ_EXPR)
12775 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12777 /* bool_var != 1 becomes !bool_var. */
12778 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12779 && code == NE_EXPR)
12780 return fold_convert_loc (loc, type,
12781 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12782 TREE_TYPE (arg0), arg0));
12784 /* bool_var == 0 becomes !bool_var. */
12785 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12786 && code == EQ_EXPR)
12787 return fold_convert_loc (loc, type,
12788 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12789 TREE_TYPE (arg0), arg0));
12791 /* !exp != 0 becomes !exp */
12792 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12793 && code == NE_EXPR)
12794 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12796 /* If this is an equality comparison of the address of two non-weak,
12797 unaliased symbols neither of which are extern (since we do not
12798 have access to attributes for externs), then we know the result. */
12799 if (TREE_CODE (arg0) == ADDR_EXPR
12800 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12801 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12802 && ! lookup_attribute ("alias",
12803 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12804 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12805 && TREE_CODE (arg1) == ADDR_EXPR
12806 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12807 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12808 && ! lookup_attribute ("alias",
12809 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12810 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12812 /* We know that we're looking at the address of two
12813 non-weak, unaliased, static _DECL nodes.
12815 It is both wasteful and incorrect to call operand_equal_p
12816 to compare the two ADDR_EXPR nodes. It is wasteful in that
12817 all we need to do is test pointer equality for the arguments
12818 to the two ADDR_EXPR nodes. It is incorrect to use
12819 operand_equal_p as that function is NOT equivalent to a
12820 C equality test. It can in fact return false for two
12821 objects which would test as equal using the C equality
12822 operator. */
12823 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12824 return constant_boolean_node (equal
12825 ? code == EQ_EXPR : code != EQ_EXPR,
12826 type);
12829 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12830 a MINUS_EXPR of a constant, we can convert it into a comparison with
12831 a revised constant as long as no overflow occurs. */
12832 if (TREE_CODE (arg1) == INTEGER_CST
12833 && (TREE_CODE (arg0) == PLUS_EXPR
12834 || TREE_CODE (arg0) == MINUS_EXPR)
12835 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12836 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12837 ? MINUS_EXPR : PLUS_EXPR,
12838 fold_convert_loc (loc, TREE_TYPE (arg0),
12839 arg1),
12840 TREE_OPERAND (arg0, 1)))
12841 && !TREE_OVERFLOW (tem))
12842 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12844 /* Similarly for a NEGATE_EXPR. */
12845 if (TREE_CODE (arg0) == NEGATE_EXPR
12846 && TREE_CODE (arg1) == INTEGER_CST
12847 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12848 arg1)))
12849 && TREE_CODE (tem) == INTEGER_CST
12850 && !TREE_OVERFLOW (tem))
12851 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12853 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12854 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12855 && TREE_CODE (arg1) == INTEGER_CST
12856 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12857 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12858 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12859 fold_convert_loc (loc,
12860 TREE_TYPE (arg0),
12861 arg1),
12862 TREE_OPERAND (arg0, 1)));
12864 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12865 if ((TREE_CODE (arg0) == PLUS_EXPR
12866 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12867 || TREE_CODE (arg0) == MINUS_EXPR)
12868 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12869 0)),
12870 arg1, 0)
12871 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12872 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12874 tree val = TREE_OPERAND (arg0, 1);
12875 return omit_two_operands_loc (loc, type,
12876 fold_build2_loc (loc, code, type,
12877 val,
12878 build_int_cst (TREE_TYPE (val),
12879 0)),
12880 TREE_OPERAND (arg0, 0), arg1);
12883 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12884 if (TREE_CODE (arg0) == MINUS_EXPR
12885 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12886 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12887 1)),
12888 arg1, 0)
12889 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12891 return omit_two_operands_loc (loc, type,
12892 code == NE_EXPR
12893 ? boolean_true_node : boolean_false_node,
12894 TREE_OPERAND (arg0, 1), arg1);
12897 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12898 for !=. Don't do this for ordered comparisons due to overflow. */
12899 if (TREE_CODE (arg0) == MINUS_EXPR
12900 && integer_zerop (arg1))
12901 return fold_build2_loc (loc, code, type,
12902 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12904 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12905 if (TREE_CODE (arg0) == ABS_EXPR
12906 && (integer_zerop (arg1) || real_zerop (arg1)))
12907 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12909 /* If this is an EQ or NE comparison with zero and ARG0 is
12910 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12911 two operations, but the latter can be done in one less insn
12912 on machines that have only two-operand insns or on which a
12913 constant cannot be the first operand. */
12914 if (TREE_CODE (arg0) == BIT_AND_EXPR
12915 && integer_zerop (arg1))
12917 tree arg00 = TREE_OPERAND (arg0, 0);
12918 tree arg01 = TREE_OPERAND (arg0, 1);
12919 if (TREE_CODE (arg00) == LSHIFT_EXPR
12920 && integer_onep (TREE_OPERAND (arg00, 0)))
12922 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12923 arg01, TREE_OPERAND (arg00, 1));
12924 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12925 build_int_cst (TREE_TYPE (arg0), 1));
12926 return fold_build2_loc (loc, code, type,
12927 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12928 arg1);
12930 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12931 && integer_onep (TREE_OPERAND (arg01, 0)))
12933 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12934 arg00, TREE_OPERAND (arg01, 1));
12935 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12936 build_int_cst (TREE_TYPE (arg0), 1));
12937 return fold_build2_loc (loc, code, type,
12938 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12939 arg1);
12943 /* If this is an NE or EQ comparison of zero against the result of a
12944 signed MOD operation whose second operand is a power of 2, make
12945 the MOD operation unsigned since it is simpler and equivalent. */
12946 if (integer_zerop (arg1)
12947 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12948 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12949 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12950 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12951 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12952 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12954 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12955 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12956 fold_convert_loc (loc, newtype,
12957 TREE_OPERAND (arg0, 0)),
12958 fold_convert_loc (loc, newtype,
12959 TREE_OPERAND (arg0, 1)));
12961 return fold_build2_loc (loc, code, type, newmod,
12962 fold_convert_loc (loc, newtype, arg1));
12965 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12966 C1 is a valid shift constant, and C2 is a power of two, i.e.
12967 a single bit. */
12968 if (TREE_CODE (arg0) == BIT_AND_EXPR
12969 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12970 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12971 == INTEGER_CST
12972 && integer_pow2p (TREE_OPERAND (arg0, 1))
12973 && integer_zerop (arg1))
12975 tree itype = TREE_TYPE (arg0);
12976 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12977 prec = TYPE_PRECISION (itype);
12979 /* Check for a valid shift count. */
12980 if (TREE_INT_CST_HIGH (arg001) == 0
12981 && TREE_INT_CST_LOW (arg001) < prec)
12983 tree arg01 = TREE_OPERAND (arg0, 1);
12984 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12985 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12986 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12987 can be rewritten as (X & (C2 << C1)) != 0. */
12988 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12990 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12991 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12992 return fold_build2_loc (loc, code, type, tem,
12993 fold_convert_loc (loc, itype, arg1));
12995 /* Otherwise, for signed (arithmetic) shifts,
12996 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12997 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12998 else if (!TYPE_UNSIGNED (itype))
12999 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13000 arg000, build_int_cst (itype, 0));
13001 /* Otherwise, of unsigned (logical) shifts,
13002 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13003 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13004 else
13005 return omit_one_operand_loc (loc, type,
13006 code == EQ_EXPR ? integer_one_node
13007 : integer_zero_node,
13008 arg000);
13012 /* If we have (A & C) == C where C is a power of 2, convert this into
13013 (A & C) != 0. Similarly for NE_EXPR. */
13014 if (TREE_CODE (arg0) == BIT_AND_EXPR
13015 && integer_pow2p (TREE_OPERAND (arg0, 1))
13016 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13017 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13018 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13019 integer_zero_node));
13021 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13022 bit, then fold the expression into A < 0 or A >= 0. */
13023 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13024 if (tem)
13025 return tem;
13027 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13028 Similarly for NE_EXPR. */
13029 if (TREE_CODE (arg0) == BIT_AND_EXPR
13030 && TREE_CODE (arg1) == INTEGER_CST
13031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13033 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13034 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13035 TREE_OPERAND (arg0, 1));
13036 tree dandnotc
13037 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13038 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13039 notc);
13040 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13041 if (integer_nonzerop (dandnotc))
13042 return omit_one_operand_loc (loc, type, rslt, arg0);
13045 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13046 Similarly for NE_EXPR. */
13047 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13048 && TREE_CODE (arg1) == INTEGER_CST
13049 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13051 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13052 tree candnotd
13053 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13054 TREE_OPERAND (arg0, 1),
13055 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13056 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13057 if (integer_nonzerop (candnotd))
13058 return omit_one_operand_loc (loc, type, rslt, arg0);
13061 /* If this is a comparison of a field, we may be able to simplify it. */
13062 if ((TREE_CODE (arg0) == COMPONENT_REF
13063 || TREE_CODE (arg0) == BIT_FIELD_REF)
13064 /* Handle the constant case even without -O
13065 to make sure the warnings are given. */
13066 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13068 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13069 if (t1)
13070 return t1;
13073 /* Optimize comparisons of strlen vs zero to a compare of the
13074 first character of the string vs zero. To wit,
13075 strlen(ptr) == 0 => *ptr == 0
13076 strlen(ptr) != 0 => *ptr != 0
13077 Other cases should reduce to one of these two (or a constant)
13078 due to the return value of strlen being unsigned. */
13079 if (TREE_CODE (arg0) == CALL_EXPR
13080 && integer_zerop (arg1))
13082 tree fndecl = get_callee_fndecl (arg0);
13084 if (fndecl
13085 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13086 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13087 && call_expr_nargs (arg0) == 1
13088 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13090 tree iref = build_fold_indirect_ref_loc (loc,
13091 CALL_EXPR_ARG (arg0, 0));
13092 return fold_build2_loc (loc, code, type, iref,
13093 build_int_cst (TREE_TYPE (iref), 0));
13097 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13098 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13099 if (TREE_CODE (arg0) == RSHIFT_EXPR
13100 && integer_zerop (arg1)
13101 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13103 tree arg00 = TREE_OPERAND (arg0, 0);
13104 tree arg01 = TREE_OPERAND (arg0, 1);
13105 tree itype = TREE_TYPE (arg00);
13106 if (TREE_INT_CST_HIGH (arg01) == 0
13107 && TREE_INT_CST_LOW (arg01)
13108 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13110 if (TYPE_UNSIGNED (itype))
13112 itype = signed_type_for (itype);
13113 arg00 = fold_convert_loc (loc, itype, arg00);
13115 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13116 type, arg00, build_zero_cst (itype));
13120 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13121 if (integer_zerop (arg1)
13122 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13123 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13124 TREE_OPERAND (arg0, 1));
13126 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13127 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13128 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13129 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13130 build_zero_cst (TREE_TYPE (arg0)));
13131 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13132 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13134 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13135 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13136 build_zero_cst (TREE_TYPE (arg0)));
13138 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13139 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13140 && TREE_CODE (arg1) == INTEGER_CST
13141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13142 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13143 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13144 TREE_OPERAND (arg0, 1), arg1));
13146 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13147 (X & C) == 0 when C is a single bit. */
13148 if (TREE_CODE (arg0) == BIT_AND_EXPR
13149 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13150 && integer_zerop (arg1)
13151 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13153 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13154 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13155 TREE_OPERAND (arg0, 1));
13156 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13157 type, tem,
13158 fold_convert_loc (loc, TREE_TYPE (arg0),
13159 arg1));
13162 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13163 constant C is a power of two, i.e. a single bit. */
13164 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13165 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13166 && integer_zerop (arg1)
13167 && integer_pow2p (TREE_OPERAND (arg0, 1))
13168 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13169 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13171 tree arg00 = TREE_OPERAND (arg0, 0);
13172 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13173 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13176 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13177 when is C is a power of two, i.e. a single bit. */
13178 if (TREE_CODE (arg0) == BIT_AND_EXPR
13179 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13180 && integer_zerop (arg1)
13181 && integer_pow2p (TREE_OPERAND (arg0, 1))
13182 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13183 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13185 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13186 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13187 arg000, TREE_OPERAND (arg0, 1));
13188 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13189 tem, build_int_cst (TREE_TYPE (tem), 0));
13192 if (integer_zerop (arg1)
13193 && tree_expr_nonzero_p (arg0))
13195 tree res = constant_boolean_node (code==NE_EXPR, type);
13196 return omit_one_operand_loc (loc, type, res, arg0);
13199 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13200 if (TREE_CODE (arg0) == NEGATE_EXPR
13201 && TREE_CODE (arg1) == NEGATE_EXPR)
13202 return fold_build2_loc (loc, code, type,
13203 TREE_OPERAND (arg0, 0),
13204 fold_convert_loc (loc, TREE_TYPE (arg0),
13205 TREE_OPERAND (arg1, 0)));
13207 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13208 if (TREE_CODE (arg0) == BIT_AND_EXPR
13209 && TREE_CODE (arg1) == BIT_AND_EXPR)
13211 tree arg00 = TREE_OPERAND (arg0, 0);
13212 tree arg01 = TREE_OPERAND (arg0, 1);
13213 tree arg10 = TREE_OPERAND (arg1, 0);
13214 tree arg11 = TREE_OPERAND (arg1, 1);
13215 tree itype = TREE_TYPE (arg0);
13217 if (operand_equal_p (arg01, arg11, 0))
13218 return fold_build2_loc (loc, code, type,
13219 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13220 fold_build2_loc (loc,
13221 BIT_XOR_EXPR, itype,
13222 arg00, arg10),
13223 arg01),
13224 build_zero_cst (itype));
13226 if (operand_equal_p (arg01, arg10, 0))
13227 return fold_build2_loc (loc, code, type,
13228 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13229 fold_build2_loc (loc,
13230 BIT_XOR_EXPR, itype,
13231 arg00, arg11),
13232 arg01),
13233 build_zero_cst (itype));
13235 if (operand_equal_p (arg00, arg11, 0))
13236 return fold_build2_loc (loc, code, type,
13237 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13238 fold_build2_loc (loc,
13239 BIT_XOR_EXPR, itype,
13240 arg01, arg10),
13241 arg00),
13242 build_zero_cst (itype));
13244 if (operand_equal_p (arg00, arg10, 0))
13245 return fold_build2_loc (loc, code, type,
13246 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13247 fold_build2_loc (loc,
13248 BIT_XOR_EXPR, itype,
13249 arg01, arg11),
13250 arg00),
13251 build_zero_cst (itype));
13254 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13255 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13257 tree arg00 = TREE_OPERAND (arg0, 0);
13258 tree arg01 = TREE_OPERAND (arg0, 1);
13259 tree arg10 = TREE_OPERAND (arg1, 0);
13260 tree arg11 = TREE_OPERAND (arg1, 1);
13261 tree itype = TREE_TYPE (arg0);
13263 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13264 operand_equal_p guarantees no side-effects so we don't need
13265 to use omit_one_operand on Z. */
13266 if (operand_equal_p (arg01, arg11, 0))
13267 return fold_build2_loc (loc, code, type, arg00,
13268 fold_convert_loc (loc, TREE_TYPE (arg00),
13269 arg10));
13270 if (operand_equal_p (arg01, arg10, 0))
13271 return fold_build2_loc (loc, code, type, arg00,
13272 fold_convert_loc (loc, TREE_TYPE (arg00),
13273 arg11));
13274 if (operand_equal_p (arg00, arg11, 0))
13275 return fold_build2_loc (loc, code, type, arg01,
13276 fold_convert_loc (loc, TREE_TYPE (arg01),
13277 arg10));
13278 if (operand_equal_p (arg00, arg10, 0))
13279 return fold_build2_loc (loc, code, type, arg01,
13280 fold_convert_loc (loc, TREE_TYPE (arg01),
13281 arg11));
13283 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13284 if (TREE_CODE (arg01) == INTEGER_CST
13285 && TREE_CODE (arg11) == INTEGER_CST)
13287 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13288 fold_convert_loc (loc, itype, arg11));
13289 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13290 return fold_build2_loc (loc, code, type, tem,
13291 fold_convert_loc (loc, itype, arg10));
13295 /* Attempt to simplify equality/inequality comparisons of complex
13296 values. Only lower the comparison if the result is known or
13297 can be simplified to a single scalar comparison. */
13298 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13299 || TREE_CODE (arg0) == COMPLEX_CST)
13300 && (TREE_CODE (arg1) == COMPLEX_EXPR
13301 || TREE_CODE (arg1) == COMPLEX_CST))
13303 tree real0, imag0, real1, imag1;
13304 tree rcond, icond;
13306 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13308 real0 = TREE_OPERAND (arg0, 0);
13309 imag0 = TREE_OPERAND (arg0, 1);
13311 else
13313 real0 = TREE_REALPART (arg0);
13314 imag0 = TREE_IMAGPART (arg0);
13317 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13319 real1 = TREE_OPERAND (arg1, 0);
13320 imag1 = TREE_OPERAND (arg1, 1);
13322 else
13324 real1 = TREE_REALPART (arg1);
13325 imag1 = TREE_IMAGPART (arg1);
13328 rcond = fold_binary_loc (loc, code, type, real0, real1);
13329 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13331 if (integer_zerop (rcond))
13333 if (code == EQ_EXPR)
13334 return omit_two_operands_loc (loc, type, boolean_false_node,
13335 imag0, imag1);
13336 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13338 else
13340 if (code == NE_EXPR)
13341 return omit_two_operands_loc (loc, type, boolean_true_node,
13342 imag0, imag1);
13343 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13347 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13348 if (icond && TREE_CODE (icond) == INTEGER_CST)
13350 if (integer_zerop (icond))
13352 if (code == EQ_EXPR)
13353 return omit_two_operands_loc (loc, type, boolean_false_node,
13354 real0, real1);
13355 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13357 else
13359 if (code == NE_EXPR)
13360 return omit_two_operands_loc (loc, type, boolean_true_node,
13361 real0, real1);
13362 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13367 return NULL_TREE;
13369 case LT_EXPR:
13370 case GT_EXPR:
13371 case LE_EXPR:
13372 case GE_EXPR:
13373 tem = fold_comparison (loc, code, type, op0, op1);
13374 if (tem != NULL_TREE)
13375 return tem;
13377 /* Transform comparisons of the form X +- C CMP X. */
13378 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13379 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13380 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13381 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13382 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13383 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13385 tree arg01 = TREE_OPERAND (arg0, 1);
13386 enum tree_code code0 = TREE_CODE (arg0);
13387 int is_positive;
13389 if (TREE_CODE (arg01) == REAL_CST)
13390 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13391 else
13392 is_positive = tree_int_cst_sgn (arg01);
13394 /* (X - c) > X becomes false. */
13395 if (code == GT_EXPR
13396 && ((code0 == MINUS_EXPR && is_positive >= 0)
13397 || (code0 == PLUS_EXPR && is_positive <= 0)))
13399 if (TREE_CODE (arg01) == INTEGER_CST
13400 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13401 fold_overflow_warning (("assuming signed overflow does not "
13402 "occur when assuming that (X - c) > X "
13403 "is always false"),
13404 WARN_STRICT_OVERFLOW_ALL);
13405 return constant_boolean_node (0, type);
13408 /* Likewise (X + c) < X becomes false. */
13409 if (code == LT_EXPR
13410 && ((code0 == PLUS_EXPR && is_positive >= 0)
13411 || (code0 == MINUS_EXPR && is_positive <= 0)))
13413 if (TREE_CODE (arg01) == INTEGER_CST
13414 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13415 fold_overflow_warning (("assuming signed overflow does not "
13416 "occur when assuming that "
13417 "(X + c) < X is always false"),
13418 WARN_STRICT_OVERFLOW_ALL);
13419 return constant_boolean_node (0, type);
13422 /* Convert (X - c) <= X to true. */
13423 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13424 && code == LE_EXPR
13425 && ((code0 == MINUS_EXPR && is_positive >= 0)
13426 || (code0 == PLUS_EXPR && is_positive <= 0)))
13428 if (TREE_CODE (arg01) == INTEGER_CST
13429 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13430 fold_overflow_warning (("assuming signed overflow does not "
13431 "occur when assuming that "
13432 "(X - c) <= X is always true"),
13433 WARN_STRICT_OVERFLOW_ALL);
13434 return constant_boolean_node (1, type);
13437 /* Convert (X + c) >= X to true. */
13438 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13439 && code == GE_EXPR
13440 && ((code0 == PLUS_EXPR && is_positive >= 0)
13441 || (code0 == MINUS_EXPR && is_positive <= 0)))
13443 if (TREE_CODE (arg01) == INTEGER_CST
13444 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13445 fold_overflow_warning (("assuming signed overflow does not "
13446 "occur when assuming that "
13447 "(X + c) >= X is always true"),
13448 WARN_STRICT_OVERFLOW_ALL);
13449 return constant_boolean_node (1, type);
13452 if (TREE_CODE (arg01) == INTEGER_CST)
13454 /* Convert X + c > X and X - c < X to true for integers. */
13455 if (code == GT_EXPR
13456 && ((code0 == PLUS_EXPR && is_positive > 0)
13457 || (code0 == MINUS_EXPR && is_positive < 0)))
13459 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13460 fold_overflow_warning (("assuming signed overflow does "
13461 "not occur when assuming that "
13462 "(X + c) > X is always true"),
13463 WARN_STRICT_OVERFLOW_ALL);
13464 return constant_boolean_node (1, type);
13467 if (code == LT_EXPR
13468 && ((code0 == MINUS_EXPR && is_positive > 0)
13469 || (code0 == PLUS_EXPR && is_positive < 0)))
13471 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13472 fold_overflow_warning (("assuming signed overflow does "
13473 "not occur when assuming that "
13474 "(X - c) < X is always true"),
13475 WARN_STRICT_OVERFLOW_ALL);
13476 return constant_boolean_node (1, type);
13479 /* Convert X + c <= X and X - c >= X to false for integers. */
13480 if (code == LE_EXPR
13481 && ((code0 == PLUS_EXPR && is_positive > 0)
13482 || (code0 == MINUS_EXPR && is_positive < 0)))
13484 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13485 fold_overflow_warning (("assuming signed overflow does "
13486 "not occur when assuming that "
13487 "(X + c) <= X is always false"),
13488 WARN_STRICT_OVERFLOW_ALL);
13489 return constant_boolean_node (0, type);
13492 if (code == GE_EXPR
13493 && ((code0 == MINUS_EXPR && is_positive > 0)
13494 || (code0 == PLUS_EXPR && is_positive < 0)))
13496 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13497 fold_overflow_warning (("assuming signed overflow does "
13498 "not occur when assuming that "
13499 "(X - c) >= X is always false"),
13500 WARN_STRICT_OVERFLOW_ALL);
13501 return constant_boolean_node (0, type);
13506 /* Comparisons with the highest or lowest possible integer of
13507 the specified precision will have known values. */
13509 tree arg1_type = TREE_TYPE (arg1);
13510 unsigned int width = TYPE_PRECISION (arg1_type);
13512 if (TREE_CODE (arg1) == INTEGER_CST
13513 && width <= HOST_BITS_PER_DOUBLE_INT
13514 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13516 HOST_WIDE_INT signed_max_hi;
13517 unsigned HOST_WIDE_INT signed_max_lo;
13518 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13520 if (width <= HOST_BITS_PER_WIDE_INT)
13522 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13523 - 1;
13524 signed_max_hi = 0;
13525 max_hi = 0;
13527 if (TYPE_UNSIGNED (arg1_type))
13529 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13530 min_lo = 0;
13531 min_hi = 0;
13533 else
13535 max_lo = signed_max_lo;
13536 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13537 min_hi = -1;
13540 else
13542 width -= HOST_BITS_PER_WIDE_INT;
13543 signed_max_lo = -1;
13544 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13545 - 1;
13546 max_lo = -1;
13547 min_lo = 0;
13549 if (TYPE_UNSIGNED (arg1_type))
13551 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13552 min_hi = 0;
13554 else
13556 max_hi = signed_max_hi;
13557 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13561 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13562 && TREE_INT_CST_LOW (arg1) == max_lo)
13563 switch (code)
13565 case GT_EXPR:
13566 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13568 case GE_EXPR:
13569 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13571 case LE_EXPR:
13572 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13574 case LT_EXPR:
13575 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13577 /* The GE_EXPR and LT_EXPR cases above are not normally
13578 reached because of previous transformations. */
13580 default:
13581 break;
13583 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13584 == max_hi
13585 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13586 switch (code)
13588 case GT_EXPR:
13589 arg1 = const_binop (PLUS_EXPR, arg1,
13590 build_int_cst (TREE_TYPE (arg1), 1));
13591 return fold_build2_loc (loc, EQ_EXPR, type,
13592 fold_convert_loc (loc,
13593 TREE_TYPE (arg1), arg0),
13594 arg1);
13595 case LE_EXPR:
13596 arg1 = const_binop (PLUS_EXPR, arg1,
13597 build_int_cst (TREE_TYPE (arg1), 1));
13598 return fold_build2_loc (loc, NE_EXPR, type,
13599 fold_convert_loc (loc, TREE_TYPE (arg1),
13600 arg0),
13601 arg1);
13602 default:
13603 break;
13605 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13606 == min_hi
13607 && TREE_INT_CST_LOW (arg1) == min_lo)
13608 switch (code)
13610 case LT_EXPR:
13611 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13613 case LE_EXPR:
13614 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13616 case GE_EXPR:
13617 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13619 case GT_EXPR:
13620 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13622 default:
13623 break;
13625 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13626 == min_hi
13627 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13628 switch (code)
13630 case GE_EXPR:
13631 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13632 return fold_build2_loc (loc, NE_EXPR, type,
13633 fold_convert_loc (loc,
13634 TREE_TYPE (arg1), arg0),
13635 arg1);
13636 case LT_EXPR:
13637 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13638 return fold_build2_loc (loc, EQ_EXPR, type,
13639 fold_convert_loc (loc, TREE_TYPE (arg1),
13640 arg0),
13641 arg1);
13642 default:
13643 break;
13646 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13647 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13648 && TYPE_UNSIGNED (arg1_type)
13649 /* We will flip the signedness of the comparison operator
13650 associated with the mode of arg1, so the sign bit is
13651 specified by this mode. Check that arg1 is the signed
13652 max associated with this sign bit. */
13653 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13654 /* signed_type does not work on pointer types. */
13655 && INTEGRAL_TYPE_P (arg1_type))
13657 /* The following case also applies to X < signed_max+1
13658 and X >= signed_max+1 because previous transformations. */
13659 if (code == LE_EXPR || code == GT_EXPR)
13661 tree st;
13662 st = signed_type_for (TREE_TYPE (arg1));
13663 return fold_build2_loc (loc,
13664 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13665 type, fold_convert_loc (loc, st, arg0),
13666 build_int_cst (st, 0));
13672 /* If we are comparing an ABS_EXPR with a constant, we can
13673 convert all the cases into explicit comparisons, but they may
13674 well not be faster than doing the ABS and one comparison.
13675 But ABS (X) <= C is a range comparison, which becomes a subtraction
13676 and a comparison, and is probably faster. */
13677 if (code == LE_EXPR
13678 && TREE_CODE (arg1) == INTEGER_CST
13679 && TREE_CODE (arg0) == ABS_EXPR
13680 && ! TREE_SIDE_EFFECTS (arg0)
13681 && (0 != (tem = negate_expr (arg1)))
13682 && TREE_CODE (tem) == INTEGER_CST
13683 && !TREE_OVERFLOW (tem))
13684 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13685 build2 (GE_EXPR, type,
13686 TREE_OPERAND (arg0, 0), tem),
13687 build2 (LE_EXPR, type,
13688 TREE_OPERAND (arg0, 0), arg1));
13690 /* Convert ABS_EXPR<x> >= 0 to true. */
13691 strict_overflow_p = false;
13692 if (code == GE_EXPR
13693 && (integer_zerop (arg1)
13694 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13695 && real_zerop (arg1)))
13696 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13698 if (strict_overflow_p)
13699 fold_overflow_warning (("assuming signed overflow does not occur "
13700 "when simplifying comparison of "
13701 "absolute value and zero"),
13702 WARN_STRICT_OVERFLOW_CONDITIONAL);
13703 return omit_one_operand_loc (loc, type,
13704 constant_boolean_node (true, type),
13705 arg0);
13708 /* Convert ABS_EXPR<x> < 0 to false. */
13709 strict_overflow_p = false;
13710 if (code == LT_EXPR
13711 && (integer_zerop (arg1) || real_zerop (arg1))
13712 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13714 if (strict_overflow_p)
13715 fold_overflow_warning (("assuming signed overflow does not occur "
13716 "when simplifying comparison of "
13717 "absolute value and zero"),
13718 WARN_STRICT_OVERFLOW_CONDITIONAL);
13719 return omit_one_operand_loc (loc, type,
13720 constant_boolean_node (false, type),
13721 arg0);
13724 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13725 and similarly for >= into !=. */
13726 if ((code == LT_EXPR || code == GE_EXPR)
13727 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13728 && TREE_CODE (arg1) == LSHIFT_EXPR
13729 && integer_onep (TREE_OPERAND (arg1, 0)))
13730 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13731 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13732 TREE_OPERAND (arg1, 1)),
13733 build_zero_cst (TREE_TYPE (arg0)));
13735 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13736 otherwise Y might be >= # of bits in X's type and thus e.g.
13737 (unsigned char) (1 << Y) for Y 15 might be 0.
13738 If the cast is widening, then 1 << Y should have unsigned type,
13739 otherwise if Y is number of bits in the signed shift type minus 1,
13740 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13741 31 might be 0xffffffff80000000. */
13742 if ((code == LT_EXPR || code == GE_EXPR)
13743 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13744 && CONVERT_EXPR_P (arg1)
13745 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13746 && (TYPE_PRECISION (TREE_TYPE (arg1))
13747 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13748 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13749 || (TYPE_PRECISION (TREE_TYPE (arg1))
13750 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13751 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13753 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13754 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13755 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13756 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13757 build_zero_cst (TREE_TYPE (arg0)));
13760 return NULL_TREE;
13762 case UNORDERED_EXPR:
13763 case ORDERED_EXPR:
13764 case UNLT_EXPR:
13765 case UNLE_EXPR:
13766 case UNGT_EXPR:
13767 case UNGE_EXPR:
13768 case UNEQ_EXPR:
13769 case LTGT_EXPR:
13770 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13772 t1 = fold_relational_const (code, type, arg0, arg1);
13773 if (t1 != NULL_TREE)
13774 return t1;
13777 /* If the first operand is NaN, the result is constant. */
13778 if (TREE_CODE (arg0) == REAL_CST
13779 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13780 && (code != LTGT_EXPR || ! flag_trapping_math))
13782 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13783 ? integer_zero_node
13784 : integer_one_node;
13785 return omit_one_operand_loc (loc, type, t1, arg1);
13788 /* If the second operand is NaN, the result is constant. */
13789 if (TREE_CODE (arg1) == REAL_CST
13790 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13791 && (code != LTGT_EXPR || ! flag_trapping_math))
13793 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13794 ? integer_zero_node
13795 : integer_one_node;
13796 return omit_one_operand_loc (loc, type, t1, arg0);
13799 /* Simplify unordered comparison of something with itself. */
13800 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13801 && operand_equal_p (arg0, arg1, 0))
13802 return constant_boolean_node (1, type);
13804 if (code == LTGT_EXPR
13805 && !flag_trapping_math
13806 && operand_equal_p (arg0, arg1, 0))
13807 return constant_boolean_node (0, type);
13809 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13811 tree targ0 = strip_float_extensions (arg0);
13812 tree targ1 = strip_float_extensions (arg1);
13813 tree newtype = TREE_TYPE (targ0);
13815 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13816 newtype = TREE_TYPE (targ1);
13818 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13819 return fold_build2_loc (loc, code, type,
13820 fold_convert_loc (loc, newtype, targ0),
13821 fold_convert_loc (loc, newtype, targ1));
13824 return NULL_TREE;
13826 case COMPOUND_EXPR:
13827 /* When pedantic, a compound expression can be neither an lvalue
13828 nor an integer constant expression. */
13829 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13830 return NULL_TREE;
13831 /* Don't let (0, 0) be null pointer constant. */
13832 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13833 : fold_convert_loc (loc, type, arg1);
13834 return pedantic_non_lvalue_loc (loc, tem);
13836 case COMPLEX_EXPR:
13837 if ((TREE_CODE (arg0) == REAL_CST
13838 && TREE_CODE (arg1) == REAL_CST)
13839 || (TREE_CODE (arg0) == INTEGER_CST
13840 && TREE_CODE (arg1) == INTEGER_CST))
13841 return build_complex (type, arg0, arg1);
13842 if (TREE_CODE (arg0) == REALPART_EXPR
13843 && TREE_CODE (arg1) == IMAGPART_EXPR
13844 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13845 && operand_equal_p (TREE_OPERAND (arg0, 0),
13846 TREE_OPERAND (arg1, 0), 0))
13847 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13848 TREE_OPERAND (arg1, 0));
13849 return NULL_TREE;
13851 case ASSERT_EXPR:
13852 /* An ASSERT_EXPR should never be passed to fold_binary. */
13853 gcc_unreachable ();
13855 case VEC_PACK_TRUNC_EXPR:
13856 case VEC_PACK_FIX_TRUNC_EXPR:
13858 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13859 tree *elts;
13861 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13862 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13863 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13864 return NULL_TREE;
13866 elts = XALLOCAVEC (tree, nelts);
13867 if (!vec_cst_ctor_to_array (arg0, elts)
13868 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13869 return NULL_TREE;
13871 for (i = 0; i < nelts; i++)
13873 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13874 ? NOP_EXPR : FIX_TRUNC_EXPR,
13875 TREE_TYPE (type), elts[i]);
13876 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13877 return NULL_TREE;
13880 return build_vector (type, elts);
13883 case VEC_WIDEN_MULT_LO_EXPR:
13884 case VEC_WIDEN_MULT_HI_EXPR:
13885 case VEC_WIDEN_MULT_EVEN_EXPR:
13886 case VEC_WIDEN_MULT_ODD_EXPR:
13888 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13889 unsigned int out, ofs, scale;
13890 tree *elts;
13892 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13893 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13894 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13895 return NULL_TREE;
13897 elts = XALLOCAVEC (tree, nelts * 4);
13898 if (!vec_cst_ctor_to_array (arg0, elts)
13899 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13900 return NULL_TREE;
13902 if (code == VEC_WIDEN_MULT_LO_EXPR)
13903 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13904 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13905 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13906 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13907 scale = 1, ofs = 0;
13908 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13909 scale = 1, ofs = 1;
13911 for (out = 0; out < nelts; out++)
13913 unsigned int in1 = (out << scale) + ofs;
13914 unsigned int in2 = in1 + nelts * 2;
13915 tree t1, t2;
13917 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13918 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13920 if (t1 == NULL_TREE || t2 == NULL_TREE)
13921 return NULL_TREE;
13922 elts[out] = const_binop (MULT_EXPR, t1, t2);
13923 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13924 return NULL_TREE;
13927 return build_vector (type, elts);
13930 default:
13931 return NULL_TREE;
13932 } /* switch (code) */
13935 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13936 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13937 of GOTO_EXPR. */
13939 static tree
13940 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13942 switch (TREE_CODE (*tp))
13944 case LABEL_EXPR:
13945 return *tp;
13947 case GOTO_EXPR:
13948 *walk_subtrees = 0;
13950 /* ... fall through ... */
13952 default:
13953 return NULL_TREE;
13957 /* Return whether the sub-tree ST contains a label which is accessible from
13958 outside the sub-tree. */
13960 static bool
13961 contains_label_p (tree st)
13963 return
13964 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13967 /* Fold a ternary expression of code CODE and type TYPE with operands
13968 OP0, OP1, and OP2. Return the folded expression if folding is
13969 successful. Otherwise, return NULL_TREE. */
13971 tree
13972 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13973 tree op0, tree op1, tree op2)
13975 tree tem;
13976 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13977 enum tree_code_class kind = TREE_CODE_CLASS (code);
13979 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13980 && TREE_CODE_LENGTH (code) == 3);
13982 /* Strip any conversions that don't change the mode. This is safe
13983 for every expression, except for a comparison expression because
13984 its signedness is derived from its operands. So, in the latter
13985 case, only strip conversions that don't change the signedness.
13987 Note that this is done as an internal manipulation within the
13988 constant folder, in order to find the simplest representation of
13989 the arguments so that their form can be studied. In any cases,
13990 the appropriate type conversions should be put back in the tree
13991 that will get out of the constant folder. */
13992 if (op0)
13994 arg0 = op0;
13995 STRIP_NOPS (arg0);
13998 if (op1)
14000 arg1 = op1;
14001 STRIP_NOPS (arg1);
14004 if (op2)
14006 arg2 = op2;
14007 STRIP_NOPS (arg2);
14010 switch (code)
14012 case COMPONENT_REF:
14013 if (TREE_CODE (arg0) == CONSTRUCTOR
14014 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14016 unsigned HOST_WIDE_INT idx;
14017 tree field, value;
14018 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14019 if (field == arg1)
14020 return value;
14022 return NULL_TREE;
14024 case COND_EXPR:
14025 case VEC_COND_EXPR:
14026 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14027 so all simple results must be passed through pedantic_non_lvalue. */
14028 if (TREE_CODE (arg0) == INTEGER_CST)
14030 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14031 tem = integer_zerop (arg0) ? op2 : op1;
14032 /* Only optimize constant conditions when the selected branch
14033 has the same type as the COND_EXPR. This avoids optimizing
14034 away "c ? x : throw", where the throw has a void type.
14035 Avoid throwing away that operand which contains label. */
14036 if ((!TREE_SIDE_EFFECTS (unused_op)
14037 || !contains_label_p (unused_op))
14038 && (! VOID_TYPE_P (TREE_TYPE (tem))
14039 || VOID_TYPE_P (type)))
14040 return pedantic_non_lvalue_loc (loc, tem);
14041 return NULL_TREE;
14043 else if (TREE_CODE (arg0) == VECTOR_CST)
14045 if (integer_all_onesp (arg0))
14046 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14047 if (integer_zerop (arg0))
14048 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14050 if ((TREE_CODE (arg1) == VECTOR_CST
14051 || TREE_CODE (arg1) == CONSTRUCTOR)
14052 && (TREE_CODE (arg2) == VECTOR_CST
14053 || TREE_CODE (arg2) == CONSTRUCTOR))
14055 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14056 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14057 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14058 for (i = 0; i < nelts; i++)
14060 tree val = VECTOR_CST_ELT (arg0, i);
14061 if (integer_all_onesp (val))
14062 sel[i] = i;
14063 else if (integer_zerop (val))
14064 sel[i] = nelts + i;
14065 else /* Currently unreachable. */
14066 return NULL_TREE;
14068 tree t = fold_vec_perm (type, arg1, arg2, sel);
14069 if (t != NULL_TREE)
14070 return t;
14074 if (operand_equal_p (arg1, op2, 0))
14075 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14077 /* If we have A op B ? A : C, we may be able to convert this to a
14078 simpler expression, depending on the operation and the values
14079 of B and C. Signed zeros prevent all of these transformations,
14080 for reasons given above each one.
14082 Also try swapping the arguments and inverting the conditional. */
14083 if (COMPARISON_CLASS_P (arg0)
14084 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14085 arg1, TREE_OPERAND (arg0, 1))
14086 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14088 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14089 if (tem)
14090 return tem;
14093 if (COMPARISON_CLASS_P (arg0)
14094 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14095 op2,
14096 TREE_OPERAND (arg0, 1))
14097 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14099 location_t loc0 = expr_location_or (arg0, loc);
14100 tem = fold_invert_truthvalue (loc0, arg0);
14101 if (tem && COMPARISON_CLASS_P (tem))
14103 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14104 if (tem)
14105 return tem;
14109 /* If the second operand is simpler than the third, swap them
14110 since that produces better jump optimization results. */
14111 if (truth_value_p (TREE_CODE (arg0))
14112 && tree_swap_operands_p (op1, op2, false))
14114 location_t loc0 = expr_location_or (arg0, loc);
14115 /* See if this can be inverted. If it can't, possibly because
14116 it was a floating-point inequality comparison, don't do
14117 anything. */
14118 tem = fold_invert_truthvalue (loc0, arg0);
14119 if (tem)
14120 return fold_build3_loc (loc, code, type, tem, op2, op1);
14123 /* Convert A ? 1 : 0 to simply A. */
14124 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14125 : (integer_onep (op1)
14126 && !VECTOR_TYPE_P (type)))
14127 && integer_zerop (op2)
14128 /* If we try to convert OP0 to our type, the
14129 call to fold will try to move the conversion inside
14130 a COND, which will recurse. In that case, the COND_EXPR
14131 is probably the best choice, so leave it alone. */
14132 && type == TREE_TYPE (arg0))
14133 return pedantic_non_lvalue_loc (loc, arg0);
14135 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14136 over COND_EXPR in cases such as floating point comparisons. */
14137 if (integer_zerop (op1)
14138 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14139 : (integer_onep (op2)
14140 && !VECTOR_TYPE_P (type)))
14141 && truth_value_p (TREE_CODE (arg0)))
14142 return pedantic_non_lvalue_loc (loc,
14143 fold_convert_loc (loc, type,
14144 invert_truthvalue_loc (loc,
14145 arg0)));
14147 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14148 if (TREE_CODE (arg0) == LT_EXPR
14149 && integer_zerop (TREE_OPERAND (arg0, 1))
14150 && integer_zerop (op2)
14151 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14153 /* sign_bit_p only checks ARG1 bits within A's precision.
14154 If <sign bit of A> has wider type than A, bits outside
14155 of A's precision in <sign bit of A> need to be checked.
14156 If they are all 0, this optimization needs to be done
14157 in unsigned A's type, if they are all 1 in signed A's type,
14158 otherwise this can't be done. */
14159 if (TYPE_PRECISION (TREE_TYPE (tem))
14160 < TYPE_PRECISION (TREE_TYPE (arg1))
14161 && TYPE_PRECISION (TREE_TYPE (tem))
14162 < TYPE_PRECISION (type))
14164 unsigned HOST_WIDE_INT mask_lo;
14165 HOST_WIDE_INT mask_hi;
14166 int inner_width, outer_width;
14167 tree tem_type;
14169 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14170 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14171 if (outer_width > TYPE_PRECISION (type))
14172 outer_width = TYPE_PRECISION (type);
14174 if (outer_width > HOST_BITS_PER_WIDE_INT)
14176 mask_hi = (HOST_WIDE_INT_M1U
14177 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14178 mask_lo = -1;
14180 else
14182 mask_hi = 0;
14183 mask_lo = (HOST_WIDE_INT_M1U
14184 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14186 if (inner_width > HOST_BITS_PER_WIDE_INT)
14188 mask_hi &= ~(HOST_WIDE_INT_M1U
14189 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14190 mask_lo = 0;
14192 else
14193 mask_lo &= ~(HOST_WIDE_INT_M1U
14194 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14196 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14197 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14199 tem_type = signed_type_for (TREE_TYPE (tem));
14200 tem = fold_convert_loc (loc, tem_type, tem);
14202 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14203 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14205 tem_type = unsigned_type_for (TREE_TYPE (tem));
14206 tem = fold_convert_loc (loc, tem_type, tem);
14208 else
14209 tem = NULL;
14212 if (tem)
14213 return
14214 fold_convert_loc (loc, type,
14215 fold_build2_loc (loc, BIT_AND_EXPR,
14216 TREE_TYPE (tem), tem,
14217 fold_convert_loc (loc,
14218 TREE_TYPE (tem),
14219 arg1)));
14222 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14223 already handled above. */
14224 if (TREE_CODE (arg0) == BIT_AND_EXPR
14225 && integer_onep (TREE_OPERAND (arg0, 1))
14226 && integer_zerop (op2)
14227 && integer_pow2p (arg1))
14229 tree tem = TREE_OPERAND (arg0, 0);
14230 STRIP_NOPS (tem);
14231 if (TREE_CODE (tem) == RSHIFT_EXPR
14232 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14233 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14234 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14235 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14236 TREE_OPERAND (tem, 0), arg1);
14239 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14240 is probably obsolete because the first operand should be a
14241 truth value (that's why we have the two cases above), but let's
14242 leave it in until we can confirm this for all front-ends. */
14243 if (integer_zerop (op2)
14244 && TREE_CODE (arg0) == NE_EXPR
14245 && integer_zerop (TREE_OPERAND (arg0, 1))
14246 && integer_pow2p (arg1)
14247 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14248 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14249 arg1, OEP_ONLY_CONST))
14250 return pedantic_non_lvalue_loc (loc,
14251 fold_convert_loc (loc, type,
14252 TREE_OPERAND (arg0, 0)));
14254 /* Disable the transformations below for vectors, since
14255 fold_binary_op_with_conditional_arg may undo them immediately,
14256 yielding an infinite loop. */
14257 if (code == VEC_COND_EXPR)
14258 return NULL_TREE;
14260 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14261 if (integer_zerop (op2)
14262 && truth_value_p (TREE_CODE (arg0))
14263 && truth_value_p (TREE_CODE (arg1))
14264 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14265 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14266 : TRUTH_ANDIF_EXPR,
14267 type, fold_convert_loc (loc, type, arg0), arg1);
14269 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14270 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14271 && truth_value_p (TREE_CODE (arg0))
14272 && truth_value_p (TREE_CODE (arg1))
14273 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14275 location_t loc0 = expr_location_or (arg0, loc);
14276 /* Only perform transformation if ARG0 is easily inverted. */
14277 tem = fold_invert_truthvalue (loc0, arg0);
14278 if (tem)
14279 return fold_build2_loc (loc, code == VEC_COND_EXPR
14280 ? BIT_IOR_EXPR
14281 : TRUTH_ORIF_EXPR,
14282 type, fold_convert_loc (loc, type, tem),
14283 arg1);
14286 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14287 if (integer_zerop (arg1)
14288 && truth_value_p (TREE_CODE (arg0))
14289 && truth_value_p (TREE_CODE (op2))
14290 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14292 location_t loc0 = expr_location_or (arg0, loc);
14293 /* Only perform transformation if ARG0 is easily inverted. */
14294 tem = fold_invert_truthvalue (loc0, arg0);
14295 if (tem)
14296 return fold_build2_loc (loc, code == VEC_COND_EXPR
14297 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14298 type, fold_convert_loc (loc, type, tem),
14299 op2);
14302 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14303 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14304 && truth_value_p (TREE_CODE (arg0))
14305 && truth_value_p (TREE_CODE (op2))
14306 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14307 return fold_build2_loc (loc, code == VEC_COND_EXPR
14308 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14309 type, fold_convert_loc (loc, type, arg0), op2);
14311 return NULL_TREE;
14313 case CALL_EXPR:
14314 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14315 of fold_ternary on them. */
14316 gcc_unreachable ();
14318 case BIT_FIELD_REF:
14319 if ((TREE_CODE (arg0) == VECTOR_CST
14320 || (TREE_CODE (arg0) == CONSTRUCTOR
14321 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14322 && (type == TREE_TYPE (TREE_TYPE (arg0))
14323 || (TREE_CODE (type) == VECTOR_TYPE
14324 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14326 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14327 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14328 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14329 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14331 if (n != 0
14332 && (idx % width) == 0
14333 && (n % width) == 0
14334 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14336 idx = idx / width;
14337 n = n / width;
14339 if (TREE_CODE (arg0) == VECTOR_CST)
14341 if (n == 1)
14342 return VECTOR_CST_ELT (arg0, idx);
14344 tree *vals = XALLOCAVEC (tree, n);
14345 for (unsigned i = 0; i < n; ++i)
14346 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14347 return build_vector (type, vals);
14350 /* Constructor elements can be subvectors. */
14351 unsigned HOST_WIDE_INT k = 1;
14352 if (CONSTRUCTOR_NELTS (arg0) != 0)
14354 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14355 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14356 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14359 /* We keep an exact subset of the constructor elements. */
14360 if ((idx % k) == 0 && (n % k) == 0)
14362 if (CONSTRUCTOR_NELTS (arg0) == 0)
14363 return build_constructor (type, NULL);
14364 idx /= k;
14365 n /= k;
14366 if (n == 1)
14368 if (idx < CONSTRUCTOR_NELTS (arg0))
14369 return CONSTRUCTOR_ELT (arg0, idx)->value;
14370 return build_zero_cst (type);
14373 vec<constructor_elt, va_gc> *vals;
14374 vec_alloc (vals, n);
14375 for (unsigned i = 0;
14376 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14377 ++i)
14378 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14379 CONSTRUCTOR_ELT
14380 (arg0, idx + i)->value);
14381 return build_constructor (type, vals);
14383 /* The bitfield references a single constructor element. */
14384 else if (idx + n <= (idx / k + 1) * k)
14386 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14387 return build_zero_cst (type);
14388 else if (n == k)
14389 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14390 else
14391 return fold_build3_loc (loc, code, type,
14392 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14393 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14398 /* A bit-field-ref that referenced the full argument can be stripped. */
14399 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14400 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14401 && integer_zerop (op2))
14402 return fold_convert_loc (loc, type, arg0);
14404 /* On constants we can use native encode/interpret to constant
14405 fold (nearly) all BIT_FIELD_REFs. */
14406 if (CONSTANT_CLASS_P (arg0)
14407 && can_native_interpret_type_p (type)
14408 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14409 /* This limitation should not be necessary, we just need to
14410 round this up to mode size. */
14411 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14412 /* Need bit-shifting of the buffer to relax the following. */
14413 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14415 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14416 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14417 unsigned HOST_WIDE_INT clen;
14418 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14419 /* ??? We cannot tell native_encode_expr to start at
14420 some random byte only. So limit us to a reasonable amount
14421 of work. */
14422 if (clen <= 4096)
14424 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14425 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14426 if (len > 0
14427 && len * BITS_PER_UNIT >= bitpos + bitsize)
14429 tree v = native_interpret_expr (type,
14430 b + bitpos / BITS_PER_UNIT,
14431 bitsize / BITS_PER_UNIT);
14432 if (v)
14433 return v;
14438 return NULL_TREE;
14440 case FMA_EXPR:
14441 /* For integers we can decompose the FMA if possible. */
14442 if (TREE_CODE (arg0) == INTEGER_CST
14443 && TREE_CODE (arg1) == INTEGER_CST)
14444 return fold_build2_loc (loc, PLUS_EXPR, type,
14445 const_binop (MULT_EXPR, arg0, arg1), arg2);
14446 if (integer_zerop (arg2))
14447 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14449 return fold_fma (loc, type, arg0, arg1, arg2);
14451 case VEC_PERM_EXPR:
14452 if (TREE_CODE (arg2) == VECTOR_CST)
14454 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14455 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14456 tree t;
14457 bool need_mask_canon = false;
14458 bool all_in_vec0 = true;
14459 bool all_in_vec1 = true;
14460 bool maybe_identity = true;
14461 bool single_arg = (op0 == op1);
14462 bool changed = false;
14464 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14465 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14466 for (i = 0; i < nelts; i++)
14468 tree val = VECTOR_CST_ELT (arg2, i);
14469 if (TREE_CODE (val) != INTEGER_CST)
14470 return NULL_TREE;
14472 sel[i] = TREE_INT_CST_LOW (val) & mask;
14473 if (TREE_INT_CST_HIGH (val)
14474 || ((unsigned HOST_WIDE_INT)
14475 TREE_INT_CST_LOW (val) != sel[i]))
14476 need_mask_canon = true;
14478 if (sel[i] < nelts)
14479 all_in_vec1 = false;
14480 else
14481 all_in_vec0 = false;
14483 if ((sel[i] & (nelts-1)) != i)
14484 maybe_identity = false;
14487 if (maybe_identity)
14489 if (all_in_vec0)
14490 return op0;
14491 if (all_in_vec1)
14492 return op1;
14495 if (all_in_vec0)
14496 op1 = op0;
14497 else if (all_in_vec1)
14499 op0 = op1;
14500 for (i = 0; i < nelts; i++)
14501 sel[i] -= nelts;
14502 need_mask_canon = true;
14505 if ((TREE_CODE (op0) == VECTOR_CST
14506 || TREE_CODE (op0) == CONSTRUCTOR)
14507 && (TREE_CODE (op1) == VECTOR_CST
14508 || TREE_CODE (op1) == CONSTRUCTOR))
14510 t = fold_vec_perm (type, op0, op1, sel);
14511 if (t != NULL_TREE)
14512 return t;
14515 if (op0 == op1 && !single_arg)
14516 changed = true;
14518 if (need_mask_canon && arg2 == op2)
14520 tree *tsel = XALLOCAVEC (tree, nelts);
14521 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14522 for (i = 0; i < nelts; i++)
14523 tsel[i] = build_int_cst (eltype, sel[i]);
14524 op2 = build_vector (TREE_TYPE (arg2), tsel);
14525 changed = true;
14528 if (changed)
14529 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14531 return NULL_TREE;
14533 default:
14534 return NULL_TREE;
14535 } /* switch (code) */
14538 /* Perform constant folding and related simplification of EXPR.
14539 The related simplifications include x*1 => x, x*0 => 0, etc.,
14540 and application of the associative law.
14541 NOP_EXPR conversions may be removed freely (as long as we
14542 are careful not to change the type of the overall expression).
14543 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14544 but we can constant-fold them if they have constant operands. */
14546 #ifdef ENABLE_FOLD_CHECKING
14547 # define fold(x) fold_1 (x)
14548 static tree fold_1 (tree);
14549 static
14550 #endif
14551 tree
14552 fold (tree expr)
14554 const tree t = expr;
14555 enum tree_code code = TREE_CODE (t);
14556 enum tree_code_class kind = TREE_CODE_CLASS (code);
14557 tree tem;
14558 location_t loc = EXPR_LOCATION (expr);
14560 /* Return right away if a constant. */
14561 if (kind == tcc_constant)
14562 return t;
14564 /* CALL_EXPR-like objects with variable numbers of operands are
14565 treated specially. */
14566 if (kind == tcc_vl_exp)
14568 if (code == CALL_EXPR)
14570 tem = fold_call_expr (loc, expr, false);
14571 return tem ? tem : expr;
14573 return expr;
14576 if (IS_EXPR_CODE_CLASS (kind))
14578 tree type = TREE_TYPE (t);
14579 tree op0, op1, op2;
14581 switch (TREE_CODE_LENGTH (code))
14583 case 1:
14584 op0 = TREE_OPERAND (t, 0);
14585 tem = fold_unary_loc (loc, code, type, op0);
14586 return tem ? tem : expr;
14587 case 2:
14588 op0 = TREE_OPERAND (t, 0);
14589 op1 = TREE_OPERAND (t, 1);
14590 tem = fold_binary_loc (loc, code, type, op0, op1);
14591 return tem ? tem : expr;
14592 case 3:
14593 op0 = TREE_OPERAND (t, 0);
14594 op1 = TREE_OPERAND (t, 1);
14595 op2 = TREE_OPERAND (t, 2);
14596 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14597 return tem ? tem : expr;
14598 default:
14599 break;
14603 switch (code)
14605 case ARRAY_REF:
14607 tree op0 = TREE_OPERAND (t, 0);
14608 tree op1 = TREE_OPERAND (t, 1);
14610 if (TREE_CODE (op1) == INTEGER_CST
14611 && TREE_CODE (op0) == CONSTRUCTOR
14612 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14614 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14615 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14616 unsigned HOST_WIDE_INT begin = 0;
14618 /* Find a matching index by means of a binary search. */
14619 while (begin != end)
14621 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14622 tree index = (*elts)[middle].index;
14624 if (TREE_CODE (index) == INTEGER_CST
14625 && tree_int_cst_lt (index, op1))
14626 begin = middle + 1;
14627 else if (TREE_CODE (index) == INTEGER_CST
14628 && tree_int_cst_lt (op1, index))
14629 end = middle;
14630 else if (TREE_CODE (index) == RANGE_EXPR
14631 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14632 begin = middle + 1;
14633 else if (TREE_CODE (index) == RANGE_EXPR
14634 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14635 end = middle;
14636 else
14637 return (*elts)[middle].value;
14641 return t;
14644 /* Return a VECTOR_CST if possible. */
14645 case CONSTRUCTOR:
14647 tree type = TREE_TYPE (t);
14648 if (TREE_CODE (type) != VECTOR_TYPE)
14649 return t;
14651 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14652 unsigned HOST_WIDE_INT idx, pos = 0;
14653 tree value;
14655 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14657 if (!CONSTANT_CLASS_P (value))
14658 return t;
14659 if (TREE_CODE (value) == VECTOR_CST)
14661 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14662 vec[pos++] = VECTOR_CST_ELT (value, i);
14664 else
14665 vec[pos++] = value;
14667 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14668 vec[pos] = build_zero_cst (TREE_TYPE (type));
14670 return build_vector (type, vec);
14673 case CONST_DECL:
14674 return fold (DECL_INITIAL (t));
14676 default:
14677 return t;
14678 } /* switch (code) */
14681 #ifdef ENABLE_FOLD_CHECKING
14682 #undef fold
14684 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14685 hash_table <pointer_hash <tree_node> >);
14686 static void fold_check_failed (const_tree, const_tree);
14687 void print_fold_checksum (const_tree);
14689 /* When --enable-checking=fold, compute a digest of expr before
14690 and after actual fold call to see if fold did not accidentally
14691 change original expr. */
14693 tree
14694 fold (tree expr)
14696 tree ret;
14697 struct md5_ctx ctx;
14698 unsigned char checksum_before[16], checksum_after[16];
14699 hash_table <pointer_hash <tree_node> > ht;
14701 ht.create (32);
14702 md5_init_ctx (&ctx);
14703 fold_checksum_tree (expr, &ctx, ht);
14704 md5_finish_ctx (&ctx, checksum_before);
14705 ht.empty ();
14707 ret = fold_1 (expr);
14709 md5_init_ctx (&ctx);
14710 fold_checksum_tree (expr, &ctx, ht);
14711 md5_finish_ctx (&ctx, checksum_after);
14712 ht.dispose ();
14714 if (memcmp (checksum_before, checksum_after, 16))
14715 fold_check_failed (expr, ret);
14717 return ret;
14720 void
14721 print_fold_checksum (const_tree expr)
14723 struct md5_ctx ctx;
14724 unsigned char checksum[16], cnt;
14725 hash_table <pointer_hash <tree_node> > ht;
14727 ht.create (32);
14728 md5_init_ctx (&ctx);
14729 fold_checksum_tree (expr, &ctx, ht);
14730 md5_finish_ctx (&ctx, checksum);
14731 ht.dispose ();
14732 for (cnt = 0; cnt < 16; ++cnt)
14733 fprintf (stderr, "%02x", checksum[cnt]);
14734 putc ('\n', stderr);
14737 static void
14738 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14740 internal_error ("fold check: original tree changed by fold");
14743 static void
14744 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14745 hash_table <pointer_hash <tree_node> > ht)
14747 tree_node **slot;
14748 enum tree_code code;
14749 union tree_node buf;
14750 int i, len;
14752 recursive_label:
14753 if (expr == NULL)
14754 return;
14755 slot = ht.find_slot (expr, INSERT);
14756 if (*slot != NULL)
14757 return;
14758 *slot = CONST_CAST_TREE (expr);
14759 code = TREE_CODE (expr);
14760 if (TREE_CODE_CLASS (code) == tcc_declaration
14761 && DECL_ASSEMBLER_NAME_SET_P (expr))
14763 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14764 memcpy ((char *) &buf, expr, tree_size (expr));
14765 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14766 expr = (tree) &buf;
14768 else if (TREE_CODE_CLASS (code) == tcc_type
14769 && (TYPE_POINTER_TO (expr)
14770 || TYPE_REFERENCE_TO (expr)
14771 || TYPE_CACHED_VALUES_P (expr)
14772 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14773 || TYPE_NEXT_VARIANT (expr)))
14775 /* Allow these fields to be modified. */
14776 tree tmp;
14777 memcpy ((char *) &buf, expr, tree_size (expr));
14778 expr = tmp = (tree) &buf;
14779 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14780 TYPE_POINTER_TO (tmp) = NULL;
14781 TYPE_REFERENCE_TO (tmp) = NULL;
14782 TYPE_NEXT_VARIANT (tmp) = NULL;
14783 if (TYPE_CACHED_VALUES_P (tmp))
14785 TYPE_CACHED_VALUES_P (tmp) = 0;
14786 TYPE_CACHED_VALUES (tmp) = NULL;
14789 md5_process_bytes (expr, tree_size (expr), ctx);
14790 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14791 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14792 if (TREE_CODE_CLASS (code) != tcc_type
14793 && TREE_CODE_CLASS (code) != tcc_declaration
14794 && code != TREE_LIST
14795 && code != SSA_NAME
14796 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14797 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14798 switch (TREE_CODE_CLASS (code))
14800 case tcc_constant:
14801 switch (code)
14803 case STRING_CST:
14804 md5_process_bytes (TREE_STRING_POINTER (expr),
14805 TREE_STRING_LENGTH (expr), ctx);
14806 break;
14807 case COMPLEX_CST:
14808 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14809 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14810 break;
14811 case VECTOR_CST:
14812 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14813 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14814 break;
14815 default:
14816 break;
14818 break;
14819 case tcc_exceptional:
14820 switch (code)
14822 case TREE_LIST:
14823 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14824 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14825 expr = TREE_CHAIN (expr);
14826 goto recursive_label;
14827 break;
14828 case TREE_VEC:
14829 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14830 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14831 break;
14832 default:
14833 break;
14835 break;
14836 case tcc_expression:
14837 case tcc_reference:
14838 case tcc_comparison:
14839 case tcc_unary:
14840 case tcc_binary:
14841 case tcc_statement:
14842 case tcc_vl_exp:
14843 len = TREE_OPERAND_LENGTH (expr);
14844 for (i = 0; i < len; ++i)
14845 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14846 break;
14847 case tcc_declaration:
14848 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14849 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14850 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14852 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14853 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14854 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14855 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14856 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14858 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14859 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14861 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14863 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14864 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14865 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14867 break;
14868 case tcc_type:
14869 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14870 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14871 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14872 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14873 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14874 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14875 if (INTEGRAL_TYPE_P (expr)
14876 || SCALAR_FLOAT_TYPE_P (expr))
14878 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14879 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14881 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14882 if (TREE_CODE (expr) == RECORD_TYPE
14883 || TREE_CODE (expr) == UNION_TYPE
14884 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14885 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14886 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14887 break;
14888 default:
14889 break;
14893 /* Helper function for outputting the checksum of a tree T. When
14894 debugging with gdb, you can "define mynext" to be "next" followed
14895 by "call debug_fold_checksum (op0)", then just trace down till the
14896 outputs differ. */
14898 DEBUG_FUNCTION void
14899 debug_fold_checksum (const_tree t)
14901 int i;
14902 unsigned char checksum[16];
14903 struct md5_ctx ctx;
14904 hash_table <pointer_hash <tree_node> > ht;
14905 ht.create (32);
14907 md5_init_ctx (&ctx);
14908 fold_checksum_tree (t, &ctx, ht);
14909 md5_finish_ctx (&ctx, checksum);
14910 ht.empty ();
14912 for (i = 0; i < 16; i++)
14913 fprintf (stderr, "%d ", checksum[i]);
14915 fprintf (stderr, "\n");
14918 #endif
14920 /* Fold a unary tree expression with code CODE of type TYPE with an
14921 operand OP0. LOC is the location of the resulting expression.
14922 Return a folded expression if successful. Otherwise, return a tree
14923 expression with code CODE of type TYPE with an operand OP0. */
14925 tree
14926 fold_build1_stat_loc (location_t loc,
14927 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14929 tree tem;
14930 #ifdef ENABLE_FOLD_CHECKING
14931 unsigned char checksum_before[16], checksum_after[16];
14932 struct md5_ctx ctx;
14933 hash_table <pointer_hash <tree_node> > ht;
14935 ht.create (32);
14936 md5_init_ctx (&ctx);
14937 fold_checksum_tree (op0, &ctx, ht);
14938 md5_finish_ctx (&ctx, checksum_before);
14939 ht.empty ();
14940 #endif
14942 tem = fold_unary_loc (loc, code, type, op0);
14943 if (!tem)
14944 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14946 #ifdef ENABLE_FOLD_CHECKING
14947 md5_init_ctx (&ctx);
14948 fold_checksum_tree (op0, &ctx, ht);
14949 md5_finish_ctx (&ctx, checksum_after);
14950 ht.dispose ();
14952 if (memcmp (checksum_before, checksum_after, 16))
14953 fold_check_failed (op0, tem);
14954 #endif
14955 return tem;
14958 /* Fold a binary tree expression with code CODE of type TYPE with
14959 operands OP0 and OP1. LOC is the location of the resulting
14960 expression. Return a folded expression if successful. Otherwise,
14961 return a tree expression with code CODE of type TYPE with operands
14962 OP0 and OP1. */
14964 tree
14965 fold_build2_stat_loc (location_t loc,
14966 enum tree_code code, tree type, tree op0, tree op1
14967 MEM_STAT_DECL)
14969 tree tem;
14970 #ifdef ENABLE_FOLD_CHECKING
14971 unsigned char checksum_before_op0[16],
14972 checksum_before_op1[16],
14973 checksum_after_op0[16],
14974 checksum_after_op1[16];
14975 struct md5_ctx ctx;
14976 hash_table <pointer_hash <tree_node> > ht;
14978 ht.create (32);
14979 md5_init_ctx (&ctx);
14980 fold_checksum_tree (op0, &ctx, ht);
14981 md5_finish_ctx (&ctx, checksum_before_op0);
14982 ht.empty ();
14984 md5_init_ctx (&ctx);
14985 fold_checksum_tree (op1, &ctx, ht);
14986 md5_finish_ctx (&ctx, checksum_before_op1);
14987 ht.empty ();
14988 #endif
14990 tem = fold_binary_loc (loc, code, type, op0, op1);
14991 if (!tem)
14992 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14994 #ifdef ENABLE_FOLD_CHECKING
14995 md5_init_ctx (&ctx);
14996 fold_checksum_tree (op0, &ctx, ht);
14997 md5_finish_ctx (&ctx, checksum_after_op0);
14998 ht.empty ();
15000 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15001 fold_check_failed (op0, tem);
15003 md5_init_ctx (&ctx);
15004 fold_checksum_tree (op1, &ctx, ht);
15005 md5_finish_ctx (&ctx, checksum_after_op1);
15006 ht.dispose ();
15008 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15009 fold_check_failed (op1, tem);
15010 #endif
15011 return tem;
15014 /* Fold a ternary tree expression with code CODE of type TYPE with
15015 operands OP0, OP1, and OP2. Return a folded expression if
15016 successful. Otherwise, return a tree expression with code CODE of
15017 type TYPE with operands OP0, OP1, and OP2. */
15019 tree
15020 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15021 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15023 tree tem;
15024 #ifdef ENABLE_FOLD_CHECKING
15025 unsigned char checksum_before_op0[16],
15026 checksum_before_op1[16],
15027 checksum_before_op2[16],
15028 checksum_after_op0[16],
15029 checksum_after_op1[16],
15030 checksum_after_op2[16];
15031 struct md5_ctx ctx;
15032 hash_table <pointer_hash <tree_node> > ht;
15034 ht.create (32);
15035 md5_init_ctx (&ctx);
15036 fold_checksum_tree (op0, &ctx, ht);
15037 md5_finish_ctx (&ctx, checksum_before_op0);
15038 ht.empty ();
15040 md5_init_ctx (&ctx);
15041 fold_checksum_tree (op1, &ctx, ht);
15042 md5_finish_ctx (&ctx, checksum_before_op1);
15043 ht.empty ();
15045 md5_init_ctx (&ctx);
15046 fold_checksum_tree (op2, &ctx, ht);
15047 md5_finish_ctx (&ctx, checksum_before_op2);
15048 ht.empty ();
15049 #endif
15051 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15052 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15053 if (!tem)
15054 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15056 #ifdef ENABLE_FOLD_CHECKING
15057 md5_init_ctx (&ctx);
15058 fold_checksum_tree (op0, &ctx, ht);
15059 md5_finish_ctx (&ctx, checksum_after_op0);
15060 ht.empty ();
15062 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15063 fold_check_failed (op0, tem);
15065 md5_init_ctx (&ctx);
15066 fold_checksum_tree (op1, &ctx, ht);
15067 md5_finish_ctx (&ctx, checksum_after_op1);
15068 ht.empty ();
15070 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15071 fold_check_failed (op1, tem);
15073 md5_init_ctx (&ctx);
15074 fold_checksum_tree (op2, &ctx, ht);
15075 md5_finish_ctx (&ctx, checksum_after_op2);
15076 ht.dispose ();
15078 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15079 fold_check_failed (op2, tem);
15080 #endif
15081 return tem;
15084 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15085 arguments in ARGARRAY, and a null static chain.
15086 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15087 of type TYPE from the given operands as constructed by build_call_array. */
15089 tree
15090 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15091 int nargs, tree *argarray)
15093 tree tem;
15094 #ifdef ENABLE_FOLD_CHECKING
15095 unsigned char checksum_before_fn[16],
15096 checksum_before_arglist[16],
15097 checksum_after_fn[16],
15098 checksum_after_arglist[16];
15099 struct md5_ctx ctx;
15100 hash_table <pointer_hash <tree_node> > ht;
15101 int i;
15103 ht.create (32);
15104 md5_init_ctx (&ctx);
15105 fold_checksum_tree (fn, &ctx, ht);
15106 md5_finish_ctx (&ctx, checksum_before_fn);
15107 ht.empty ();
15109 md5_init_ctx (&ctx);
15110 for (i = 0; i < nargs; i++)
15111 fold_checksum_tree (argarray[i], &ctx, ht);
15112 md5_finish_ctx (&ctx, checksum_before_arglist);
15113 ht.empty ();
15114 #endif
15116 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15118 #ifdef ENABLE_FOLD_CHECKING
15119 md5_init_ctx (&ctx);
15120 fold_checksum_tree (fn, &ctx, ht);
15121 md5_finish_ctx (&ctx, checksum_after_fn);
15122 ht.empty ();
15124 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15125 fold_check_failed (fn, tem);
15127 md5_init_ctx (&ctx);
15128 for (i = 0; i < nargs; i++)
15129 fold_checksum_tree (argarray[i], &ctx, ht);
15130 md5_finish_ctx (&ctx, checksum_after_arglist);
15131 ht.dispose ();
15133 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15134 fold_check_failed (NULL_TREE, tem);
15135 #endif
15136 return tem;
15139 /* Perform constant folding and related simplification of initializer
15140 expression EXPR. These behave identically to "fold_buildN" but ignore
15141 potential run-time traps and exceptions that fold must preserve. */
15143 #define START_FOLD_INIT \
15144 int saved_signaling_nans = flag_signaling_nans;\
15145 int saved_trapping_math = flag_trapping_math;\
15146 int saved_rounding_math = flag_rounding_math;\
15147 int saved_trapv = flag_trapv;\
15148 int saved_folding_initializer = folding_initializer;\
15149 flag_signaling_nans = 0;\
15150 flag_trapping_math = 0;\
15151 flag_rounding_math = 0;\
15152 flag_trapv = 0;\
15153 folding_initializer = 1;
15155 #define END_FOLD_INIT \
15156 flag_signaling_nans = saved_signaling_nans;\
15157 flag_trapping_math = saved_trapping_math;\
15158 flag_rounding_math = saved_rounding_math;\
15159 flag_trapv = saved_trapv;\
15160 folding_initializer = saved_folding_initializer;
15162 tree
15163 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15164 tree type, tree op)
15166 tree result;
15167 START_FOLD_INIT;
15169 result = fold_build1_loc (loc, code, type, op);
15171 END_FOLD_INIT;
15172 return result;
15175 tree
15176 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15177 tree type, tree op0, tree op1)
15179 tree result;
15180 START_FOLD_INIT;
15182 result = fold_build2_loc (loc, code, type, op0, op1);
15184 END_FOLD_INIT;
15185 return result;
15188 tree
15189 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15190 tree type, tree op0, tree op1, tree op2)
15192 tree result;
15193 START_FOLD_INIT;
15195 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15197 END_FOLD_INIT;
15198 return result;
15201 tree
15202 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15203 int nargs, tree *argarray)
15205 tree result;
15206 START_FOLD_INIT;
15208 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15210 END_FOLD_INIT;
15211 return result;
15214 #undef START_FOLD_INIT
15215 #undef END_FOLD_INIT
15217 /* Determine if first argument is a multiple of second argument. Return 0 if
15218 it is not, or we cannot easily determined it to be.
15220 An example of the sort of thing we care about (at this point; this routine
15221 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15222 fold cases do now) is discovering that
15224 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15226 is a multiple of
15228 SAVE_EXPR (J * 8)
15230 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15232 This code also handles discovering that
15234 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15236 is a multiple of 8 so we don't have to worry about dealing with a
15237 possible remainder.
15239 Note that we *look* inside a SAVE_EXPR only to determine how it was
15240 calculated; it is not safe for fold to do much of anything else with the
15241 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15242 at run time. For example, the latter example above *cannot* be implemented
15243 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15244 evaluation time of the original SAVE_EXPR is not necessarily the same at
15245 the time the new expression is evaluated. The only optimization of this
15246 sort that would be valid is changing
15248 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15250 divided by 8 to
15252 SAVE_EXPR (I) * SAVE_EXPR (J)
15254 (where the same SAVE_EXPR (J) is used in the original and the
15255 transformed version). */
15258 multiple_of_p (tree type, const_tree top, const_tree bottom)
15260 if (operand_equal_p (top, bottom, 0))
15261 return 1;
15263 if (TREE_CODE (type) != INTEGER_TYPE)
15264 return 0;
15266 switch (TREE_CODE (top))
15268 case BIT_AND_EXPR:
15269 /* Bitwise and provides a power of two multiple. If the mask is
15270 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15271 if (!integer_pow2p (bottom))
15272 return 0;
15273 /* FALLTHRU */
15275 case MULT_EXPR:
15276 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15277 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15279 case PLUS_EXPR:
15280 case MINUS_EXPR:
15281 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15282 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15284 case LSHIFT_EXPR:
15285 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15287 tree op1, t1;
15289 op1 = TREE_OPERAND (top, 1);
15290 /* const_binop may not detect overflow correctly,
15291 so check for it explicitly here. */
15292 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15293 > TREE_INT_CST_LOW (op1)
15294 && TREE_INT_CST_HIGH (op1) == 0
15295 && 0 != (t1 = fold_convert (type,
15296 const_binop (LSHIFT_EXPR,
15297 size_one_node,
15298 op1)))
15299 && !TREE_OVERFLOW (t1))
15300 return multiple_of_p (type, t1, bottom);
15302 return 0;
15304 case NOP_EXPR:
15305 /* Can't handle conversions from non-integral or wider integral type. */
15306 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15307 || (TYPE_PRECISION (type)
15308 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15309 return 0;
15311 /* .. fall through ... */
15313 case SAVE_EXPR:
15314 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15316 case COND_EXPR:
15317 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15318 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15320 case INTEGER_CST:
15321 if (TREE_CODE (bottom) != INTEGER_CST
15322 || integer_zerop (bottom)
15323 || (TYPE_UNSIGNED (type)
15324 && (tree_int_cst_sgn (top) < 0
15325 || tree_int_cst_sgn (bottom) < 0)))
15326 return 0;
15327 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15328 top, bottom));
15330 default:
15331 return 0;
15335 /* Return true if CODE or TYPE is known to be non-negative. */
15337 static bool
15338 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15340 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15341 && truth_value_p (code))
15342 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15343 have a signed:1 type (where the value is -1 and 0). */
15344 return true;
15345 return false;
15348 /* Return true if (CODE OP0) is known to be non-negative. If the return
15349 value is based on the assumption that signed overflow is undefined,
15350 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15351 *STRICT_OVERFLOW_P. */
15353 bool
15354 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15355 bool *strict_overflow_p)
15357 if (TYPE_UNSIGNED (type))
15358 return true;
15360 switch (code)
15362 case ABS_EXPR:
15363 /* We can't return 1 if flag_wrapv is set because
15364 ABS_EXPR<INT_MIN> = INT_MIN. */
15365 if (!INTEGRAL_TYPE_P (type))
15366 return true;
15367 if (TYPE_OVERFLOW_UNDEFINED (type))
15369 *strict_overflow_p = true;
15370 return true;
15372 break;
15374 case NON_LVALUE_EXPR:
15375 case FLOAT_EXPR:
15376 case FIX_TRUNC_EXPR:
15377 return tree_expr_nonnegative_warnv_p (op0,
15378 strict_overflow_p);
15380 case NOP_EXPR:
15382 tree inner_type = TREE_TYPE (op0);
15383 tree outer_type = type;
15385 if (TREE_CODE (outer_type) == REAL_TYPE)
15387 if (TREE_CODE (inner_type) == REAL_TYPE)
15388 return tree_expr_nonnegative_warnv_p (op0,
15389 strict_overflow_p);
15390 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15392 if (TYPE_UNSIGNED (inner_type))
15393 return true;
15394 return tree_expr_nonnegative_warnv_p (op0,
15395 strict_overflow_p);
15398 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15400 if (TREE_CODE (inner_type) == REAL_TYPE)
15401 return tree_expr_nonnegative_warnv_p (op0,
15402 strict_overflow_p);
15403 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15404 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15405 && TYPE_UNSIGNED (inner_type);
15408 break;
15410 default:
15411 return tree_simple_nonnegative_warnv_p (code, type);
15414 /* We don't know sign of `t', so be conservative and return false. */
15415 return false;
15418 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15419 value is based on the assumption that signed overflow is undefined,
15420 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15421 *STRICT_OVERFLOW_P. */
15423 bool
15424 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15425 tree op1, bool *strict_overflow_p)
15427 if (TYPE_UNSIGNED (type))
15428 return true;
15430 switch (code)
15432 case POINTER_PLUS_EXPR:
15433 case PLUS_EXPR:
15434 if (FLOAT_TYPE_P (type))
15435 return (tree_expr_nonnegative_warnv_p (op0,
15436 strict_overflow_p)
15437 && tree_expr_nonnegative_warnv_p (op1,
15438 strict_overflow_p));
15440 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15441 both unsigned and at least 2 bits shorter than the result. */
15442 if (TREE_CODE (type) == INTEGER_TYPE
15443 && TREE_CODE (op0) == NOP_EXPR
15444 && TREE_CODE (op1) == NOP_EXPR)
15446 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15447 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15448 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15449 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15451 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15452 TYPE_PRECISION (inner2)) + 1;
15453 return prec < TYPE_PRECISION (type);
15456 break;
15458 case MULT_EXPR:
15459 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15461 /* x * x is always non-negative for floating point x
15462 or without overflow. */
15463 if (operand_equal_p (op0, op1, 0)
15464 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15465 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15467 if (TYPE_OVERFLOW_UNDEFINED (type))
15468 *strict_overflow_p = true;
15469 return true;
15473 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15474 both unsigned and their total bits is shorter than the result. */
15475 if (TREE_CODE (type) == INTEGER_TYPE
15476 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15477 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15479 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15480 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15481 : TREE_TYPE (op0);
15482 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15483 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15484 : TREE_TYPE (op1);
15486 bool unsigned0 = TYPE_UNSIGNED (inner0);
15487 bool unsigned1 = TYPE_UNSIGNED (inner1);
15489 if (TREE_CODE (op0) == INTEGER_CST)
15490 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15492 if (TREE_CODE (op1) == INTEGER_CST)
15493 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15495 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15496 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15498 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15499 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15500 : TYPE_PRECISION (inner0);
15502 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15503 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15504 : TYPE_PRECISION (inner1);
15506 return precision0 + precision1 < TYPE_PRECISION (type);
15509 return false;
15511 case BIT_AND_EXPR:
15512 case MAX_EXPR:
15513 return (tree_expr_nonnegative_warnv_p (op0,
15514 strict_overflow_p)
15515 || tree_expr_nonnegative_warnv_p (op1,
15516 strict_overflow_p));
15518 case BIT_IOR_EXPR:
15519 case BIT_XOR_EXPR:
15520 case MIN_EXPR:
15521 case RDIV_EXPR:
15522 case TRUNC_DIV_EXPR:
15523 case CEIL_DIV_EXPR:
15524 case FLOOR_DIV_EXPR:
15525 case ROUND_DIV_EXPR:
15526 return (tree_expr_nonnegative_warnv_p (op0,
15527 strict_overflow_p)
15528 && tree_expr_nonnegative_warnv_p (op1,
15529 strict_overflow_p));
15531 case TRUNC_MOD_EXPR:
15532 case CEIL_MOD_EXPR:
15533 case FLOOR_MOD_EXPR:
15534 case ROUND_MOD_EXPR:
15535 return tree_expr_nonnegative_warnv_p (op0,
15536 strict_overflow_p);
15537 default:
15538 return tree_simple_nonnegative_warnv_p (code, type);
15541 /* We don't know sign of `t', so be conservative and return false. */
15542 return false;
15545 /* Return true if T is known to be non-negative. If the return
15546 value is based on the assumption that signed overflow is undefined,
15547 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15548 *STRICT_OVERFLOW_P. */
15550 bool
15551 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15553 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15554 return true;
15556 switch (TREE_CODE (t))
15558 case INTEGER_CST:
15559 return tree_int_cst_sgn (t) >= 0;
15561 case REAL_CST:
15562 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15564 case FIXED_CST:
15565 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15567 case COND_EXPR:
15568 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15569 strict_overflow_p)
15570 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15571 strict_overflow_p));
15572 default:
15573 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15574 TREE_TYPE (t));
15576 /* We don't know sign of `t', so be conservative and return false. */
15577 return false;
15580 /* Return true if T is known to be non-negative. If the return
15581 value is based on the assumption that signed overflow is undefined,
15582 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15583 *STRICT_OVERFLOW_P. */
15585 bool
15586 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15587 tree arg0, tree arg1, bool *strict_overflow_p)
15589 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15590 switch (DECL_FUNCTION_CODE (fndecl))
15592 CASE_FLT_FN (BUILT_IN_ACOS):
15593 CASE_FLT_FN (BUILT_IN_ACOSH):
15594 CASE_FLT_FN (BUILT_IN_CABS):
15595 CASE_FLT_FN (BUILT_IN_COSH):
15596 CASE_FLT_FN (BUILT_IN_ERFC):
15597 CASE_FLT_FN (BUILT_IN_EXP):
15598 CASE_FLT_FN (BUILT_IN_EXP10):
15599 CASE_FLT_FN (BUILT_IN_EXP2):
15600 CASE_FLT_FN (BUILT_IN_FABS):
15601 CASE_FLT_FN (BUILT_IN_FDIM):
15602 CASE_FLT_FN (BUILT_IN_HYPOT):
15603 CASE_FLT_FN (BUILT_IN_POW10):
15604 CASE_INT_FN (BUILT_IN_FFS):
15605 CASE_INT_FN (BUILT_IN_PARITY):
15606 CASE_INT_FN (BUILT_IN_POPCOUNT):
15607 CASE_INT_FN (BUILT_IN_CLZ):
15608 CASE_INT_FN (BUILT_IN_CLRSB):
15609 case BUILT_IN_BSWAP32:
15610 case BUILT_IN_BSWAP64:
15611 /* Always true. */
15612 return true;
15614 CASE_FLT_FN (BUILT_IN_SQRT):
15615 /* sqrt(-0.0) is -0.0. */
15616 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15617 return true;
15618 return tree_expr_nonnegative_warnv_p (arg0,
15619 strict_overflow_p);
15621 CASE_FLT_FN (BUILT_IN_ASINH):
15622 CASE_FLT_FN (BUILT_IN_ATAN):
15623 CASE_FLT_FN (BUILT_IN_ATANH):
15624 CASE_FLT_FN (BUILT_IN_CBRT):
15625 CASE_FLT_FN (BUILT_IN_CEIL):
15626 CASE_FLT_FN (BUILT_IN_ERF):
15627 CASE_FLT_FN (BUILT_IN_EXPM1):
15628 CASE_FLT_FN (BUILT_IN_FLOOR):
15629 CASE_FLT_FN (BUILT_IN_FMOD):
15630 CASE_FLT_FN (BUILT_IN_FREXP):
15631 CASE_FLT_FN (BUILT_IN_ICEIL):
15632 CASE_FLT_FN (BUILT_IN_IFLOOR):
15633 CASE_FLT_FN (BUILT_IN_IRINT):
15634 CASE_FLT_FN (BUILT_IN_IROUND):
15635 CASE_FLT_FN (BUILT_IN_LCEIL):
15636 CASE_FLT_FN (BUILT_IN_LDEXP):
15637 CASE_FLT_FN (BUILT_IN_LFLOOR):
15638 CASE_FLT_FN (BUILT_IN_LLCEIL):
15639 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15640 CASE_FLT_FN (BUILT_IN_LLRINT):
15641 CASE_FLT_FN (BUILT_IN_LLROUND):
15642 CASE_FLT_FN (BUILT_IN_LRINT):
15643 CASE_FLT_FN (BUILT_IN_LROUND):
15644 CASE_FLT_FN (BUILT_IN_MODF):
15645 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15646 CASE_FLT_FN (BUILT_IN_RINT):
15647 CASE_FLT_FN (BUILT_IN_ROUND):
15648 CASE_FLT_FN (BUILT_IN_SCALB):
15649 CASE_FLT_FN (BUILT_IN_SCALBLN):
15650 CASE_FLT_FN (BUILT_IN_SCALBN):
15651 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15652 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15653 CASE_FLT_FN (BUILT_IN_SINH):
15654 CASE_FLT_FN (BUILT_IN_TANH):
15655 CASE_FLT_FN (BUILT_IN_TRUNC):
15656 /* True if the 1st argument is nonnegative. */
15657 return tree_expr_nonnegative_warnv_p (arg0,
15658 strict_overflow_p);
15660 CASE_FLT_FN (BUILT_IN_FMAX):
15661 /* True if the 1st OR 2nd arguments are nonnegative. */
15662 return (tree_expr_nonnegative_warnv_p (arg0,
15663 strict_overflow_p)
15664 || (tree_expr_nonnegative_warnv_p (arg1,
15665 strict_overflow_p)));
15667 CASE_FLT_FN (BUILT_IN_FMIN):
15668 /* True if the 1st AND 2nd arguments are nonnegative. */
15669 return (tree_expr_nonnegative_warnv_p (arg0,
15670 strict_overflow_p)
15671 && (tree_expr_nonnegative_warnv_p (arg1,
15672 strict_overflow_p)));
15674 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15675 /* True if the 2nd argument is nonnegative. */
15676 return tree_expr_nonnegative_warnv_p (arg1,
15677 strict_overflow_p);
15679 CASE_FLT_FN (BUILT_IN_POWI):
15680 /* True if the 1st argument is nonnegative or the second
15681 argument is an even integer. */
15682 if (TREE_CODE (arg1) == INTEGER_CST
15683 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15684 return true;
15685 return tree_expr_nonnegative_warnv_p (arg0,
15686 strict_overflow_p);
15688 CASE_FLT_FN (BUILT_IN_POW):
15689 /* True if the 1st argument is nonnegative or the second
15690 argument is an even integer valued real. */
15691 if (TREE_CODE (arg1) == REAL_CST)
15693 REAL_VALUE_TYPE c;
15694 HOST_WIDE_INT n;
15696 c = TREE_REAL_CST (arg1);
15697 n = real_to_integer (&c);
15698 if ((n & 1) == 0)
15700 REAL_VALUE_TYPE cint;
15701 real_from_integer (&cint, VOIDmode, n,
15702 n < 0 ? -1 : 0, 0);
15703 if (real_identical (&c, &cint))
15704 return true;
15707 return tree_expr_nonnegative_warnv_p (arg0,
15708 strict_overflow_p);
15710 default:
15711 break;
15713 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15714 type);
15717 /* Return true if T is known to be non-negative. If the return
15718 value is based on the assumption that signed overflow is undefined,
15719 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15720 *STRICT_OVERFLOW_P. */
15722 bool
15723 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15725 enum tree_code code = TREE_CODE (t);
15726 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15727 return true;
15729 switch (code)
15731 case TARGET_EXPR:
15733 tree temp = TARGET_EXPR_SLOT (t);
15734 t = TARGET_EXPR_INITIAL (t);
15736 /* If the initializer is non-void, then it's a normal expression
15737 that will be assigned to the slot. */
15738 if (!VOID_TYPE_P (t))
15739 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15741 /* Otherwise, the initializer sets the slot in some way. One common
15742 way is an assignment statement at the end of the initializer. */
15743 while (1)
15745 if (TREE_CODE (t) == BIND_EXPR)
15746 t = expr_last (BIND_EXPR_BODY (t));
15747 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15748 || TREE_CODE (t) == TRY_CATCH_EXPR)
15749 t = expr_last (TREE_OPERAND (t, 0));
15750 else if (TREE_CODE (t) == STATEMENT_LIST)
15751 t = expr_last (t);
15752 else
15753 break;
15755 if (TREE_CODE (t) == MODIFY_EXPR
15756 && TREE_OPERAND (t, 0) == temp)
15757 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15758 strict_overflow_p);
15760 return false;
15763 case CALL_EXPR:
15765 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15766 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15768 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15769 get_callee_fndecl (t),
15770 arg0,
15771 arg1,
15772 strict_overflow_p);
15774 case COMPOUND_EXPR:
15775 case MODIFY_EXPR:
15776 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15777 strict_overflow_p);
15778 case BIND_EXPR:
15779 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15780 strict_overflow_p);
15781 case SAVE_EXPR:
15782 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15783 strict_overflow_p);
15785 default:
15786 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15787 TREE_TYPE (t));
15790 /* We don't know sign of `t', so be conservative and return false. */
15791 return false;
15794 /* Return true if T is known to be non-negative. If the return
15795 value is based on the assumption that signed overflow is undefined,
15796 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15797 *STRICT_OVERFLOW_P. */
15799 bool
15800 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15802 enum tree_code code;
15803 if (t == error_mark_node)
15804 return false;
15806 code = TREE_CODE (t);
15807 switch (TREE_CODE_CLASS (code))
15809 case tcc_binary:
15810 case tcc_comparison:
15811 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15812 TREE_TYPE (t),
15813 TREE_OPERAND (t, 0),
15814 TREE_OPERAND (t, 1),
15815 strict_overflow_p);
15817 case tcc_unary:
15818 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15819 TREE_TYPE (t),
15820 TREE_OPERAND (t, 0),
15821 strict_overflow_p);
15823 case tcc_constant:
15824 case tcc_declaration:
15825 case tcc_reference:
15826 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15828 default:
15829 break;
15832 switch (code)
15834 case TRUTH_AND_EXPR:
15835 case TRUTH_OR_EXPR:
15836 case TRUTH_XOR_EXPR:
15837 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15838 TREE_TYPE (t),
15839 TREE_OPERAND (t, 0),
15840 TREE_OPERAND (t, 1),
15841 strict_overflow_p);
15842 case TRUTH_NOT_EXPR:
15843 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15844 TREE_TYPE (t),
15845 TREE_OPERAND (t, 0),
15846 strict_overflow_p);
15848 case COND_EXPR:
15849 case CONSTRUCTOR:
15850 case OBJ_TYPE_REF:
15851 case ASSERT_EXPR:
15852 case ADDR_EXPR:
15853 case WITH_SIZE_EXPR:
15854 case SSA_NAME:
15855 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15857 default:
15858 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15862 /* Return true if `t' is known to be non-negative. Handle warnings
15863 about undefined signed overflow. */
15865 bool
15866 tree_expr_nonnegative_p (tree t)
15868 bool ret, strict_overflow_p;
15870 strict_overflow_p = false;
15871 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15872 if (strict_overflow_p)
15873 fold_overflow_warning (("assuming signed overflow does not occur when "
15874 "determining that expression is always "
15875 "non-negative"),
15876 WARN_STRICT_OVERFLOW_MISC);
15877 return ret;
15881 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15882 For floating point we further ensure that T is not denormal.
15883 Similar logic is present in nonzero_address in rtlanal.h.
15885 If the return value is based on the assumption that signed overflow
15886 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15887 change *STRICT_OVERFLOW_P. */
15889 bool
15890 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15891 bool *strict_overflow_p)
15893 switch (code)
15895 case ABS_EXPR:
15896 return tree_expr_nonzero_warnv_p (op0,
15897 strict_overflow_p);
15899 case NOP_EXPR:
15901 tree inner_type = TREE_TYPE (op0);
15902 tree outer_type = type;
15904 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15905 && tree_expr_nonzero_warnv_p (op0,
15906 strict_overflow_p));
15908 break;
15910 case NON_LVALUE_EXPR:
15911 return tree_expr_nonzero_warnv_p (op0,
15912 strict_overflow_p);
15914 default:
15915 break;
15918 return false;
15921 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15922 For floating point we further ensure that T is not denormal.
15923 Similar logic is present in nonzero_address in rtlanal.h.
15925 If the return value is based on the assumption that signed overflow
15926 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15927 change *STRICT_OVERFLOW_P. */
15929 bool
15930 tree_binary_nonzero_warnv_p (enum tree_code code,
15931 tree type,
15932 tree op0,
15933 tree op1, bool *strict_overflow_p)
15935 bool sub_strict_overflow_p;
15936 switch (code)
15938 case POINTER_PLUS_EXPR:
15939 case PLUS_EXPR:
15940 if (TYPE_OVERFLOW_UNDEFINED (type))
15942 /* With the presence of negative values it is hard
15943 to say something. */
15944 sub_strict_overflow_p = false;
15945 if (!tree_expr_nonnegative_warnv_p (op0,
15946 &sub_strict_overflow_p)
15947 || !tree_expr_nonnegative_warnv_p (op1,
15948 &sub_strict_overflow_p))
15949 return false;
15950 /* One of operands must be positive and the other non-negative. */
15951 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15952 overflows, on a twos-complement machine the sum of two
15953 nonnegative numbers can never be zero. */
15954 return (tree_expr_nonzero_warnv_p (op0,
15955 strict_overflow_p)
15956 || tree_expr_nonzero_warnv_p (op1,
15957 strict_overflow_p));
15959 break;
15961 case MULT_EXPR:
15962 if (TYPE_OVERFLOW_UNDEFINED (type))
15964 if (tree_expr_nonzero_warnv_p (op0,
15965 strict_overflow_p)
15966 && tree_expr_nonzero_warnv_p (op1,
15967 strict_overflow_p))
15969 *strict_overflow_p = true;
15970 return true;
15973 break;
15975 case MIN_EXPR:
15976 sub_strict_overflow_p = false;
15977 if (tree_expr_nonzero_warnv_p (op0,
15978 &sub_strict_overflow_p)
15979 && tree_expr_nonzero_warnv_p (op1,
15980 &sub_strict_overflow_p))
15982 if (sub_strict_overflow_p)
15983 *strict_overflow_p = true;
15985 break;
15987 case MAX_EXPR:
15988 sub_strict_overflow_p = false;
15989 if (tree_expr_nonzero_warnv_p (op0,
15990 &sub_strict_overflow_p))
15992 if (sub_strict_overflow_p)
15993 *strict_overflow_p = true;
15995 /* When both operands are nonzero, then MAX must be too. */
15996 if (tree_expr_nonzero_warnv_p (op1,
15997 strict_overflow_p))
15998 return true;
16000 /* MAX where operand 0 is positive is positive. */
16001 return tree_expr_nonnegative_warnv_p (op0,
16002 strict_overflow_p);
16004 /* MAX where operand 1 is positive is positive. */
16005 else if (tree_expr_nonzero_warnv_p (op1,
16006 &sub_strict_overflow_p)
16007 && tree_expr_nonnegative_warnv_p (op1,
16008 &sub_strict_overflow_p))
16010 if (sub_strict_overflow_p)
16011 *strict_overflow_p = true;
16012 return true;
16014 break;
16016 case BIT_IOR_EXPR:
16017 return (tree_expr_nonzero_warnv_p (op1,
16018 strict_overflow_p)
16019 || tree_expr_nonzero_warnv_p (op0,
16020 strict_overflow_p));
16022 default:
16023 break;
16026 return false;
16029 /* Return true when T is an address and is known to be nonzero.
16030 For floating point we further ensure that T is not denormal.
16031 Similar logic is present in nonzero_address in rtlanal.h.
16033 If the return value is based on the assumption that signed overflow
16034 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16035 change *STRICT_OVERFLOW_P. */
16037 bool
16038 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16040 bool sub_strict_overflow_p;
16041 switch (TREE_CODE (t))
16043 case INTEGER_CST:
16044 return !integer_zerop (t);
16046 case ADDR_EXPR:
16048 tree base = TREE_OPERAND (t, 0);
16049 if (!DECL_P (base))
16050 base = get_base_address (base);
16052 if (!base)
16053 return false;
16055 /* Weak declarations may link to NULL. Other things may also be NULL
16056 so protect with -fdelete-null-pointer-checks; but not variables
16057 allocated on the stack. */
16058 if (DECL_P (base)
16059 && (flag_delete_null_pointer_checks
16060 || (DECL_CONTEXT (base)
16061 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16062 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16063 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16065 /* Constants are never weak. */
16066 if (CONSTANT_CLASS_P (base))
16067 return true;
16069 return false;
16072 case COND_EXPR:
16073 sub_strict_overflow_p = false;
16074 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16075 &sub_strict_overflow_p)
16076 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16077 &sub_strict_overflow_p))
16079 if (sub_strict_overflow_p)
16080 *strict_overflow_p = true;
16081 return true;
16083 break;
16085 default:
16086 break;
16088 return false;
16091 /* Return true when T is an address and is known to be nonzero.
16092 For floating point we further ensure that T is not denormal.
16093 Similar logic is present in nonzero_address in rtlanal.h.
16095 If the return value is based on the assumption that signed overflow
16096 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16097 change *STRICT_OVERFLOW_P. */
16099 bool
16100 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16102 tree type = TREE_TYPE (t);
16103 enum tree_code code;
16105 /* Doing something useful for floating point would need more work. */
16106 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16107 return false;
16109 code = TREE_CODE (t);
16110 switch (TREE_CODE_CLASS (code))
16112 case tcc_unary:
16113 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16114 strict_overflow_p);
16115 case tcc_binary:
16116 case tcc_comparison:
16117 return tree_binary_nonzero_warnv_p (code, type,
16118 TREE_OPERAND (t, 0),
16119 TREE_OPERAND (t, 1),
16120 strict_overflow_p);
16121 case tcc_constant:
16122 case tcc_declaration:
16123 case tcc_reference:
16124 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16126 default:
16127 break;
16130 switch (code)
16132 case TRUTH_NOT_EXPR:
16133 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16134 strict_overflow_p);
16136 case TRUTH_AND_EXPR:
16137 case TRUTH_OR_EXPR:
16138 case TRUTH_XOR_EXPR:
16139 return tree_binary_nonzero_warnv_p (code, type,
16140 TREE_OPERAND (t, 0),
16141 TREE_OPERAND (t, 1),
16142 strict_overflow_p);
16144 case COND_EXPR:
16145 case CONSTRUCTOR:
16146 case OBJ_TYPE_REF:
16147 case ASSERT_EXPR:
16148 case ADDR_EXPR:
16149 case WITH_SIZE_EXPR:
16150 case SSA_NAME:
16151 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16153 case COMPOUND_EXPR:
16154 case MODIFY_EXPR:
16155 case BIND_EXPR:
16156 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16157 strict_overflow_p);
16159 case SAVE_EXPR:
16160 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16161 strict_overflow_p);
16163 case CALL_EXPR:
16164 return alloca_call_p (t);
16166 default:
16167 break;
16169 return false;
16172 /* Return true when T is an address and is known to be nonzero.
16173 Handle warnings about undefined signed overflow. */
16175 bool
16176 tree_expr_nonzero_p (tree t)
16178 bool ret, strict_overflow_p;
16180 strict_overflow_p = false;
16181 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16182 if (strict_overflow_p)
16183 fold_overflow_warning (("assuming signed overflow does not occur when "
16184 "determining that expression is always "
16185 "non-zero"),
16186 WARN_STRICT_OVERFLOW_MISC);
16187 return ret;
16190 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16191 attempt to fold the expression to a constant without modifying TYPE,
16192 OP0 or OP1.
16194 If the expression could be simplified to a constant, then return
16195 the constant. If the expression would not be simplified to a
16196 constant, then return NULL_TREE. */
16198 tree
16199 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16201 tree tem = fold_binary (code, type, op0, op1);
16202 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16205 /* Given the components of a unary expression CODE, TYPE and OP0,
16206 attempt to fold the expression to a constant without modifying
16207 TYPE or OP0.
16209 If the expression could be simplified to a constant, then return
16210 the constant. If the expression would not be simplified to a
16211 constant, then return NULL_TREE. */
16213 tree
16214 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16216 tree tem = fold_unary (code, type, op0);
16217 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16220 /* If EXP represents referencing an element in a constant string
16221 (either via pointer arithmetic or array indexing), return the
16222 tree representing the value accessed, otherwise return NULL. */
16224 tree
16225 fold_read_from_constant_string (tree exp)
16227 if ((TREE_CODE (exp) == INDIRECT_REF
16228 || TREE_CODE (exp) == ARRAY_REF)
16229 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16231 tree exp1 = TREE_OPERAND (exp, 0);
16232 tree index;
16233 tree string;
16234 location_t loc = EXPR_LOCATION (exp);
16236 if (TREE_CODE (exp) == INDIRECT_REF)
16237 string = string_constant (exp1, &index);
16238 else
16240 tree low_bound = array_ref_low_bound (exp);
16241 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16243 /* Optimize the special-case of a zero lower bound.
16245 We convert the low_bound to sizetype to avoid some problems
16246 with constant folding. (E.g. suppose the lower bound is 1,
16247 and its mode is QI. Without the conversion,l (ARRAY
16248 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16249 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16250 if (! integer_zerop (low_bound))
16251 index = size_diffop_loc (loc, index,
16252 fold_convert_loc (loc, sizetype, low_bound));
16254 string = exp1;
16257 if (string
16258 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16259 && TREE_CODE (string) == STRING_CST
16260 && TREE_CODE (index) == INTEGER_CST
16261 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16262 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16263 == MODE_INT)
16264 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16265 return build_int_cst_type (TREE_TYPE (exp),
16266 (TREE_STRING_POINTER (string)
16267 [TREE_INT_CST_LOW (index)]));
16269 return NULL;
16272 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16273 an integer constant, real, or fixed-point constant.
16275 TYPE is the type of the result. */
16277 static tree
16278 fold_negate_const (tree arg0, tree type)
16280 tree t = NULL_TREE;
16282 switch (TREE_CODE (arg0))
16284 case INTEGER_CST:
16286 double_int val = tree_to_double_int (arg0);
16287 bool overflow;
16288 val = val.neg_with_overflow (&overflow);
16289 t = force_fit_type_double (type, val, 1,
16290 (overflow | TREE_OVERFLOW (arg0))
16291 && !TYPE_UNSIGNED (type));
16292 break;
16295 case REAL_CST:
16296 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16297 break;
16299 case FIXED_CST:
16301 FIXED_VALUE_TYPE f;
16302 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16303 &(TREE_FIXED_CST (arg0)), NULL,
16304 TYPE_SATURATING (type));
16305 t = build_fixed (type, f);
16306 /* Propagate overflow flags. */
16307 if (overflow_p | TREE_OVERFLOW (arg0))
16308 TREE_OVERFLOW (t) = 1;
16309 break;
16312 default:
16313 gcc_unreachable ();
16316 return t;
16319 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16320 an integer constant or real constant.
16322 TYPE is the type of the result. */
16324 tree
16325 fold_abs_const (tree arg0, tree type)
16327 tree t = NULL_TREE;
16329 switch (TREE_CODE (arg0))
16331 case INTEGER_CST:
16333 double_int val = tree_to_double_int (arg0);
16335 /* If the value is unsigned or non-negative, then the absolute value
16336 is the same as the ordinary value. */
16337 if (TYPE_UNSIGNED (type)
16338 || !val.is_negative ())
16339 t = arg0;
16341 /* If the value is negative, then the absolute value is
16342 its negation. */
16343 else
16345 bool overflow;
16346 val = val.neg_with_overflow (&overflow);
16347 t = force_fit_type_double (type, val, -1,
16348 overflow | TREE_OVERFLOW (arg0));
16351 break;
16353 case REAL_CST:
16354 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16355 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16356 else
16357 t = arg0;
16358 break;
16360 default:
16361 gcc_unreachable ();
16364 return t;
16367 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16368 constant. TYPE is the type of the result. */
16370 static tree
16371 fold_not_const (const_tree arg0, tree type)
16373 double_int val;
16375 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16377 val = ~tree_to_double_int (arg0);
16378 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16381 /* Given CODE, a relational operator, the target type, TYPE and two
16382 constant operands OP0 and OP1, return the result of the
16383 relational operation. If the result is not a compile time
16384 constant, then return NULL_TREE. */
16386 static tree
16387 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16389 int result, invert;
16391 /* From here on, the only cases we handle are when the result is
16392 known to be a constant. */
16394 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16396 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16397 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16399 /* Handle the cases where either operand is a NaN. */
16400 if (real_isnan (c0) || real_isnan (c1))
16402 switch (code)
16404 case EQ_EXPR:
16405 case ORDERED_EXPR:
16406 result = 0;
16407 break;
16409 case NE_EXPR:
16410 case UNORDERED_EXPR:
16411 case UNLT_EXPR:
16412 case UNLE_EXPR:
16413 case UNGT_EXPR:
16414 case UNGE_EXPR:
16415 case UNEQ_EXPR:
16416 result = 1;
16417 break;
16419 case LT_EXPR:
16420 case LE_EXPR:
16421 case GT_EXPR:
16422 case GE_EXPR:
16423 case LTGT_EXPR:
16424 if (flag_trapping_math)
16425 return NULL_TREE;
16426 result = 0;
16427 break;
16429 default:
16430 gcc_unreachable ();
16433 return constant_boolean_node (result, type);
16436 return constant_boolean_node (real_compare (code, c0, c1), type);
16439 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16441 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16442 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16443 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16446 /* Handle equality/inequality of complex constants. */
16447 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16449 tree rcond = fold_relational_const (code, type,
16450 TREE_REALPART (op0),
16451 TREE_REALPART (op1));
16452 tree icond = fold_relational_const (code, type,
16453 TREE_IMAGPART (op0),
16454 TREE_IMAGPART (op1));
16455 if (code == EQ_EXPR)
16456 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16457 else if (code == NE_EXPR)
16458 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16459 else
16460 return NULL_TREE;
16463 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16465 unsigned count = VECTOR_CST_NELTS (op0);
16466 tree *elts = XALLOCAVEC (tree, count);
16467 gcc_assert (VECTOR_CST_NELTS (op1) == count
16468 && TYPE_VECTOR_SUBPARTS (type) == count);
16470 for (unsigned i = 0; i < count; i++)
16472 tree elem_type = TREE_TYPE (type);
16473 tree elem0 = VECTOR_CST_ELT (op0, i);
16474 tree elem1 = VECTOR_CST_ELT (op1, i);
16476 tree tem = fold_relational_const (code, elem_type,
16477 elem0, elem1);
16479 if (tem == NULL_TREE)
16480 return NULL_TREE;
16482 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16485 return build_vector (type, elts);
16488 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16490 To compute GT, swap the arguments and do LT.
16491 To compute GE, do LT and invert the result.
16492 To compute LE, swap the arguments, do LT and invert the result.
16493 To compute NE, do EQ and invert the result.
16495 Therefore, the code below must handle only EQ and LT. */
16497 if (code == LE_EXPR || code == GT_EXPR)
16499 tree tem = op0;
16500 op0 = op1;
16501 op1 = tem;
16502 code = swap_tree_comparison (code);
16505 /* Note that it is safe to invert for real values here because we
16506 have already handled the one case that it matters. */
16508 invert = 0;
16509 if (code == NE_EXPR || code == GE_EXPR)
16511 invert = 1;
16512 code = invert_tree_comparison (code, false);
16515 /* Compute a result for LT or EQ if args permit;
16516 Otherwise return T. */
16517 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16519 if (code == EQ_EXPR)
16520 result = tree_int_cst_equal (op0, op1);
16521 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16522 result = INT_CST_LT_UNSIGNED (op0, op1);
16523 else
16524 result = INT_CST_LT (op0, op1);
16526 else
16527 return NULL_TREE;
16529 if (invert)
16530 result ^= 1;
16531 return constant_boolean_node (result, type);
16534 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16535 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16536 itself. */
16538 tree
16539 fold_build_cleanup_point_expr (tree type, tree expr)
16541 /* If the expression does not have side effects then we don't have to wrap
16542 it with a cleanup point expression. */
16543 if (!TREE_SIDE_EFFECTS (expr))
16544 return expr;
16546 /* If the expression is a return, check to see if the expression inside the
16547 return has no side effects or the right hand side of the modify expression
16548 inside the return. If either don't have side effects set we don't need to
16549 wrap the expression in a cleanup point expression. Note we don't check the
16550 left hand side of the modify because it should always be a return decl. */
16551 if (TREE_CODE (expr) == RETURN_EXPR)
16553 tree op = TREE_OPERAND (expr, 0);
16554 if (!op || !TREE_SIDE_EFFECTS (op))
16555 return expr;
16556 op = TREE_OPERAND (op, 1);
16557 if (!TREE_SIDE_EFFECTS (op))
16558 return expr;
16561 return build1 (CLEANUP_POINT_EXPR, type, expr);
16564 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16565 of an indirection through OP0, or NULL_TREE if no simplification is
16566 possible. */
16568 tree
16569 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16571 tree sub = op0;
16572 tree subtype;
16574 STRIP_NOPS (sub);
16575 subtype = TREE_TYPE (sub);
16576 if (!POINTER_TYPE_P (subtype))
16577 return NULL_TREE;
16579 if (TREE_CODE (sub) == ADDR_EXPR)
16581 tree op = TREE_OPERAND (sub, 0);
16582 tree optype = TREE_TYPE (op);
16583 /* *&CONST_DECL -> to the value of the const decl. */
16584 if (TREE_CODE (op) == CONST_DECL)
16585 return DECL_INITIAL (op);
16586 /* *&p => p; make sure to handle *&"str"[cst] here. */
16587 if (type == optype)
16589 tree fop = fold_read_from_constant_string (op);
16590 if (fop)
16591 return fop;
16592 else
16593 return op;
16595 /* *(foo *)&fooarray => fooarray[0] */
16596 else if (TREE_CODE (optype) == ARRAY_TYPE
16597 && type == TREE_TYPE (optype)
16598 && (!in_gimple_form
16599 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16601 tree type_domain = TYPE_DOMAIN (optype);
16602 tree min_val = size_zero_node;
16603 if (type_domain && TYPE_MIN_VALUE (type_domain))
16604 min_val = TYPE_MIN_VALUE (type_domain);
16605 if (in_gimple_form
16606 && TREE_CODE (min_val) != INTEGER_CST)
16607 return NULL_TREE;
16608 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16609 NULL_TREE, NULL_TREE);
16611 /* *(foo *)&complexfoo => __real__ complexfoo */
16612 else if (TREE_CODE (optype) == COMPLEX_TYPE
16613 && type == TREE_TYPE (optype))
16614 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16615 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16616 else if (TREE_CODE (optype) == VECTOR_TYPE
16617 && type == TREE_TYPE (optype))
16619 tree part_width = TYPE_SIZE (type);
16620 tree index = bitsize_int (0);
16621 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16625 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16626 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16628 tree op00 = TREE_OPERAND (sub, 0);
16629 tree op01 = TREE_OPERAND (sub, 1);
16631 STRIP_NOPS (op00);
16632 if (TREE_CODE (op00) == ADDR_EXPR)
16634 tree op00type;
16635 op00 = TREE_OPERAND (op00, 0);
16636 op00type = TREE_TYPE (op00);
16638 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16639 if (TREE_CODE (op00type) == VECTOR_TYPE
16640 && type == TREE_TYPE (op00type))
16642 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16643 tree part_width = TYPE_SIZE (type);
16644 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16645 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16646 tree index = bitsize_int (indexi);
16648 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16649 return fold_build3_loc (loc,
16650 BIT_FIELD_REF, type, op00,
16651 part_width, index);
16654 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16655 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16656 && type == TREE_TYPE (op00type))
16658 tree size = TYPE_SIZE_UNIT (type);
16659 if (tree_int_cst_equal (size, op01))
16660 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16662 /* ((foo *)&fooarray)[1] => fooarray[1] */
16663 else if (TREE_CODE (op00type) == ARRAY_TYPE
16664 && type == TREE_TYPE (op00type))
16666 tree type_domain = TYPE_DOMAIN (op00type);
16667 tree min_val = size_zero_node;
16668 if (type_domain && TYPE_MIN_VALUE (type_domain))
16669 min_val = TYPE_MIN_VALUE (type_domain);
16670 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16671 TYPE_SIZE_UNIT (type));
16672 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16673 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16674 NULL_TREE, NULL_TREE);
16679 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16680 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16681 && type == TREE_TYPE (TREE_TYPE (subtype))
16682 && (!in_gimple_form
16683 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16685 tree type_domain;
16686 tree min_val = size_zero_node;
16687 sub = build_fold_indirect_ref_loc (loc, sub);
16688 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16689 if (type_domain && TYPE_MIN_VALUE (type_domain))
16690 min_val = TYPE_MIN_VALUE (type_domain);
16691 if (in_gimple_form
16692 && TREE_CODE (min_val) != INTEGER_CST)
16693 return NULL_TREE;
16694 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16695 NULL_TREE);
16698 return NULL_TREE;
16701 /* Builds an expression for an indirection through T, simplifying some
16702 cases. */
16704 tree
16705 build_fold_indirect_ref_loc (location_t loc, tree t)
16707 tree type = TREE_TYPE (TREE_TYPE (t));
16708 tree sub = fold_indirect_ref_1 (loc, type, t);
16710 if (sub)
16711 return sub;
16713 return build1_loc (loc, INDIRECT_REF, type, t);
16716 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16718 tree
16719 fold_indirect_ref_loc (location_t loc, tree t)
16721 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16723 if (sub)
16724 return sub;
16725 else
16726 return t;
16729 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16730 whose result is ignored. The type of the returned tree need not be
16731 the same as the original expression. */
16733 tree
16734 fold_ignored_result (tree t)
16736 if (!TREE_SIDE_EFFECTS (t))
16737 return integer_zero_node;
16739 for (;;)
16740 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16742 case tcc_unary:
16743 t = TREE_OPERAND (t, 0);
16744 break;
16746 case tcc_binary:
16747 case tcc_comparison:
16748 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16749 t = TREE_OPERAND (t, 0);
16750 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16751 t = TREE_OPERAND (t, 1);
16752 else
16753 return t;
16754 break;
16756 case tcc_expression:
16757 switch (TREE_CODE (t))
16759 case COMPOUND_EXPR:
16760 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16761 return t;
16762 t = TREE_OPERAND (t, 0);
16763 break;
16765 case COND_EXPR:
16766 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16767 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16768 return t;
16769 t = TREE_OPERAND (t, 0);
16770 break;
16772 default:
16773 return t;
16775 break;
16777 default:
16778 return t;
16782 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16783 This can only be applied to objects of a sizetype. */
16785 tree
16786 round_up_loc (location_t loc, tree value, int divisor)
16788 tree div = NULL_TREE;
16790 gcc_assert (divisor > 0);
16791 if (divisor == 1)
16792 return value;
16794 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16795 have to do anything. Only do this when we are not given a const,
16796 because in that case, this check is more expensive than just
16797 doing it. */
16798 if (TREE_CODE (value) != INTEGER_CST)
16800 div = build_int_cst (TREE_TYPE (value), divisor);
16802 if (multiple_of_p (TREE_TYPE (value), value, div))
16803 return value;
16806 /* If divisor is a power of two, simplify this to bit manipulation. */
16807 if (divisor == (divisor & -divisor))
16809 if (TREE_CODE (value) == INTEGER_CST)
16811 double_int val = tree_to_double_int (value);
16812 bool overflow_p;
16814 if ((val.low & (divisor - 1)) == 0)
16815 return value;
16817 overflow_p = TREE_OVERFLOW (value);
16818 val.low &= ~(divisor - 1);
16819 val.low += divisor;
16820 if (val.low == 0)
16822 val.high++;
16823 if (val.high == 0)
16824 overflow_p = true;
16827 return force_fit_type_double (TREE_TYPE (value), val,
16828 -1, overflow_p);
16830 else
16832 tree t;
16834 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16835 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16836 t = build_int_cst (TREE_TYPE (value), -divisor);
16837 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16840 else
16842 if (!div)
16843 div = build_int_cst (TREE_TYPE (value), divisor);
16844 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16845 value = size_binop_loc (loc, MULT_EXPR, value, div);
16848 return value;
16851 /* Likewise, but round down. */
16853 tree
16854 round_down_loc (location_t loc, tree value, int divisor)
16856 tree div = NULL_TREE;
16858 gcc_assert (divisor > 0);
16859 if (divisor == 1)
16860 return value;
16862 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16863 have to do anything. Only do this when we are not given a const,
16864 because in that case, this check is more expensive than just
16865 doing it. */
16866 if (TREE_CODE (value) != INTEGER_CST)
16868 div = build_int_cst (TREE_TYPE (value), divisor);
16870 if (multiple_of_p (TREE_TYPE (value), value, div))
16871 return value;
16874 /* If divisor is a power of two, simplify this to bit manipulation. */
16875 if (divisor == (divisor & -divisor))
16877 tree t;
16879 t = build_int_cst (TREE_TYPE (value), -divisor);
16880 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16882 else
16884 if (!div)
16885 div = build_int_cst (TREE_TYPE (value), divisor);
16886 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16887 value = size_binop_loc (loc, MULT_EXPR, value, div);
16890 return value;
16893 /* Returns the pointer to the base of the object addressed by EXP and
16894 extracts the information about the offset of the access, storing it
16895 to PBITPOS and POFFSET. */
16897 static tree
16898 split_address_to_core_and_offset (tree exp,
16899 HOST_WIDE_INT *pbitpos, tree *poffset)
16901 tree core;
16902 enum machine_mode mode;
16903 int unsignedp, volatilep;
16904 HOST_WIDE_INT bitsize;
16905 location_t loc = EXPR_LOCATION (exp);
16907 if (TREE_CODE (exp) == ADDR_EXPR)
16909 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16910 poffset, &mode, &unsignedp, &volatilep,
16911 false);
16912 core = build_fold_addr_expr_loc (loc, core);
16914 else
16916 core = exp;
16917 *pbitpos = 0;
16918 *poffset = NULL_TREE;
16921 return core;
16924 /* Returns true if addresses of E1 and E2 differ by a constant, false
16925 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16927 bool
16928 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16930 tree core1, core2;
16931 HOST_WIDE_INT bitpos1, bitpos2;
16932 tree toffset1, toffset2, tdiff, type;
16934 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16935 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16937 if (bitpos1 % BITS_PER_UNIT != 0
16938 || bitpos2 % BITS_PER_UNIT != 0
16939 || !operand_equal_p (core1, core2, 0))
16940 return false;
16942 if (toffset1 && toffset2)
16944 type = TREE_TYPE (toffset1);
16945 if (type != TREE_TYPE (toffset2))
16946 toffset2 = fold_convert (type, toffset2);
16948 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16949 if (!cst_and_fits_in_hwi (tdiff))
16950 return false;
16952 *diff = int_cst_value (tdiff);
16954 else if (toffset1 || toffset2)
16956 /* If only one of the offsets is non-constant, the difference cannot
16957 be a constant. */
16958 return false;
16960 else
16961 *diff = 0;
16963 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16964 return true;
16967 /* Simplify the floating point expression EXP when the sign of the
16968 result is not significant. Return NULL_TREE if no simplification
16969 is possible. */
16971 tree
16972 fold_strip_sign_ops (tree exp)
16974 tree arg0, arg1;
16975 location_t loc = EXPR_LOCATION (exp);
16977 switch (TREE_CODE (exp))
16979 case ABS_EXPR:
16980 case NEGATE_EXPR:
16981 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16982 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16984 case MULT_EXPR:
16985 case RDIV_EXPR:
16986 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16987 return NULL_TREE;
16988 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16989 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16990 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16991 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16992 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16993 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16994 break;
16996 case COMPOUND_EXPR:
16997 arg0 = TREE_OPERAND (exp, 0);
16998 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16999 if (arg1)
17000 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17001 break;
17003 case COND_EXPR:
17004 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17005 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17006 if (arg0 || arg1)
17007 return fold_build3_loc (loc,
17008 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17009 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17010 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17011 break;
17013 case CALL_EXPR:
17015 const enum built_in_function fcode = builtin_mathfn_code (exp);
17016 switch (fcode)
17018 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17019 /* Strip copysign function call, return the 1st argument. */
17020 arg0 = CALL_EXPR_ARG (exp, 0);
17021 arg1 = CALL_EXPR_ARG (exp, 1);
17022 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17024 default:
17025 /* Strip sign ops from the argument of "odd" math functions. */
17026 if (negate_mathfn_p (fcode))
17028 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17029 if (arg0)
17030 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17032 break;
17035 break;
17037 default:
17038 break;
17040 return NULL_TREE;