Merge trunk version 199093 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blob4fbf133180646f71d7771e71dc9d4a1e42b02e2e
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_one_cst (type));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
989 bool dummy_overflow;
990 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
991 return NULL_TREE;
992 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
994 else
996 bool dummy_overflow;
997 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
998 is performed in twice the precision of arguments. */
999 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1000 res = tmp.rshift (TYPE_PRECISION (type),
1001 2 * TYPE_PRECISION (type), !uns);
1003 break;
1005 case TRUNC_DIV_EXPR:
1006 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1007 case EXACT_DIV_EXPR:
1008 /* This is a shortcut for a common special case. */
1009 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1010 && !TREE_OVERFLOW (arg1)
1011 && !TREE_OVERFLOW (arg2)
1012 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1014 if (code == CEIL_DIV_EXPR)
1015 op1.low += op2.low - 1;
1017 res.low = op1.low / op2.low, res.high = 0;
1018 break;
1021 /* ... fall through ... */
1023 case ROUND_DIV_EXPR:
1024 if (op2.is_zero ())
1025 return NULL_TREE;
1026 if (op2.is_one ())
1028 res = op1;
1029 break;
1031 if (op1 == op2 && !op1.is_zero ())
1033 res = double_int_one;
1034 break;
1036 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1037 break;
1039 case TRUNC_MOD_EXPR:
1040 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1041 /* This is a shortcut for a common special case. */
1042 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1043 && !TREE_OVERFLOW (arg1)
1044 && !TREE_OVERFLOW (arg2)
1045 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1047 if (code == CEIL_MOD_EXPR)
1048 op1.low += op2.low - 1;
1049 res.low = op1.low % op2.low, res.high = 0;
1050 break;
1053 /* ... fall through ... */
1055 case ROUND_MOD_EXPR:
1056 if (op2.is_zero ())
1057 return NULL_TREE;
1058 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1059 break;
1061 case MIN_EXPR:
1062 res = op1.min (op2, uns);
1063 break;
1065 case MAX_EXPR:
1066 res = op1.max (op2, uns);
1067 break;
1069 default:
1070 return NULL_TREE;
1073 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1074 (!uns && overflow)
1075 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1077 return t;
1080 tree
1081 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1083 return int_const_binop_1 (code, arg1, arg2, 1);
1086 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1087 constant. We assume ARG1 and ARG2 have the same data type, or at least
1088 are the same kind of constant and the same machine mode. Return zero if
1089 combining the constants is not allowed in the current operating mode. */
1091 static tree
1092 const_binop (enum tree_code code, tree arg1, tree arg2)
1094 /* Sanity check for the recursive cases. */
1095 if (!arg1 || !arg2)
1096 return NULL_TREE;
1098 STRIP_NOPS (arg1);
1099 STRIP_NOPS (arg2);
1101 if (TREE_CODE (arg1) == INTEGER_CST)
1102 return int_const_binop (code, arg1, arg2);
1104 if (TREE_CODE (arg1) == REAL_CST)
1106 enum machine_mode mode;
1107 REAL_VALUE_TYPE d1;
1108 REAL_VALUE_TYPE d2;
1109 REAL_VALUE_TYPE value;
1110 REAL_VALUE_TYPE result;
1111 bool inexact;
1112 tree t, type;
1114 /* The following codes are handled by real_arithmetic. */
1115 switch (code)
1117 case PLUS_EXPR:
1118 case MINUS_EXPR:
1119 case MULT_EXPR:
1120 case RDIV_EXPR:
1121 case MIN_EXPR:
1122 case MAX_EXPR:
1123 break;
1125 default:
1126 return NULL_TREE;
1129 d1 = TREE_REAL_CST (arg1);
1130 d2 = TREE_REAL_CST (arg2);
1132 type = TREE_TYPE (arg1);
1133 mode = TYPE_MODE (type);
1135 /* Don't perform operation if we honor signaling NaNs and
1136 either operand is a NaN. */
1137 if (HONOR_SNANS (mode)
1138 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1139 return NULL_TREE;
1141 /* Don't perform operation if it would raise a division
1142 by zero exception. */
1143 if (code == RDIV_EXPR
1144 && REAL_VALUES_EQUAL (d2, dconst0)
1145 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1146 return NULL_TREE;
1148 /* If either operand is a NaN, just return it. Otherwise, set up
1149 for floating-point trap; we return an overflow. */
1150 if (REAL_VALUE_ISNAN (d1))
1151 return arg1;
1152 else if (REAL_VALUE_ISNAN (d2))
1153 return arg2;
1155 inexact = real_arithmetic (&value, code, &d1, &d2);
1156 real_convert (&result, mode, &value);
1158 /* Don't constant fold this floating point operation if
1159 the result has overflowed and flag_trapping_math. */
1160 if (flag_trapping_math
1161 && MODE_HAS_INFINITIES (mode)
1162 && REAL_VALUE_ISINF (result)
1163 && !REAL_VALUE_ISINF (d1)
1164 && !REAL_VALUE_ISINF (d2))
1165 return NULL_TREE;
1167 /* Don't constant fold this floating point operation if the
1168 result may dependent upon the run-time rounding mode and
1169 flag_rounding_math is set, or if GCC's software emulation
1170 is unable to accurately represent the result. */
1171 if ((flag_rounding_math
1172 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1173 && (inexact || !real_identical (&result, &value)))
1174 return NULL_TREE;
1176 t = build_real (type, result);
1178 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1179 return t;
1182 if (TREE_CODE (arg1) == FIXED_CST)
1184 FIXED_VALUE_TYPE f1;
1185 FIXED_VALUE_TYPE f2;
1186 FIXED_VALUE_TYPE result;
1187 tree t, type;
1188 int sat_p;
1189 bool overflow_p;
1191 /* The following codes are handled by fixed_arithmetic. */
1192 switch (code)
1194 case PLUS_EXPR:
1195 case MINUS_EXPR:
1196 case MULT_EXPR:
1197 case TRUNC_DIV_EXPR:
1198 f2 = TREE_FIXED_CST (arg2);
1199 break;
1201 case LSHIFT_EXPR:
1202 case RSHIFT_EXPR:
1203 f2.data.high = TREE_INT_CST_HIGH (arg2);
1204 f2.data.low = TREE_INT_CST_LOW (arg2);
1205 f2.mode = SImode;
1206 break;
1208 default:
1209 return NULL_TREE;
1212 f1 = TREE_FIXED_CST (arg1);
1213 type = TREE_TYPE (arg1);
1214 sat_p = TYPE_SATURATING (type);
1215 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1216 t = build_fixed (type, result);
1217 /* Propagate overflow flags. */
1218 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1219 TREE_OVERFLOW (t) = 1;
1220 return t;
1223 if (TREE_CODE (arg1) == COMPLEX_CST)
1225 tree type = TREE_TYPE (arg1);
1226 tree r1 = TREE_REALPART (arg1);
1227 tree i1 = TREE_IMAGPART (arg1);
1228 tree r2 = TREE_REALPART (arg2);
1229 tree i2 = TREE_IMAGPART (arg2);
1230 tree real, imag;
1232 switch (code)
1234 case PLUS_EXPR:
1235 case MINUS_EXPR:
1236 real = const_binop (code, r1, r2);
1237 imag = const_binop (code, i1, i2);
1238 break;
1240 case MULT_EXPR:
1241 if (COMPLEX_FLOAT_TYPE_P (type))
1242 return do_mpc_arg2 (arg1, arg2, type,
1243 /* do_nonfinite= */ folding_initializer,
1244 mpc_mul);
1246 real = const_binop (MINUS_EXPR,
1247 const_binop (MULT_EXPR, r1, r2),
1248 const_binop (MULT_EXPR, i1, i2));
1249 imag = const_binop (PLUS_EXPR,
1250 const_binop (MULT_EXPR, r1, i2),
1251 const_binop (MULT_EXPR, i1, r2));
1252 break;
1254 case RDIV_EXPR:
1255 if (COMPLEX_FLOAT_TYPE_P (type))
1256 return do_mpc_arg2 (arg1, arg2, type,
1257 /* do_nonfinite= */ folding_initializer,
1258 mpc_div);
1259 /* Fallthru ... */
1260 case TRUNC_DIV_EXPR:
1261 case CEIL_DIV_EXPR:
1262 case FLOOR_DIV_EXPR:
1263 case ROUND_DIV_EXPR:
1264 if (flag_complex_method == 0)
1266 /* Keep this algorithm in sync with
1267 tree-complex.c:expand_complex_div_straight().
1269 Expand complex division to scalars, straightforward algorithm.
1270 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1271 t = br*br + bi*bi
1273 tree magsquared
1274 = const_binop (PLUS_EXPR,
1275 const_binop (MULT_EXPR, r2, r2),
1276 const_binop (MULT_EXPR, i2, i2));
1277 tree t1
1278 = const_binop (PLUS_EXPR,
1279 const_binop (MULT_EXPR, r1, r2),
1280 const_binop (MULT_EXPR, i1, i2));
1281 tree t2
1282 = const_binop (MINUS_EXPR,
1283 const_binop (MULT_EXPR, i1, r2),
1284 const_binop (MULT_EXPR, r1, i2));
1286 real = const_binop (code, t1, magsquared);
1287 imag = const_binop (code, t2, magsquared);
1289 else
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_wide().
1294 Expand complex division to scalars, modified algorithm to minimize
1295 overflow with wide input ranges. */
1296 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1297 fold_abs_const (r2, TREE_TYPE (type)),
1298 fold_abs_const (i2, TREE_TYPE (type)));
1300 if (integer_nonzerop (compare))
1302 /* In the TRUE branch, we compute
1303 ratio = br/bi;
1304 div = (br * ratio) + bi;
1305 tr = (ar * ratio) + ai;
1306 ti = (ai * ratio) - ar;
1307 tr = tr / div;
1308 ti = ti / div; */
1309 tree ratio = const_binop (code, r2, i2);
1310 tree div = const_binop (PLUS_EXPR, i2,
1311 const_binop (MULT_EXPR, r2, ratio));
1312 real = const_binop (MULT_EXPR, r1, ratio);
1313 real = const_binop (PLUS_EXPR, real, i1);
1314 real = const_binop (code, real, div);
1316 imag = const_binop (MULT_EXPR, i1, ratio);
1317 imag = const_binop (MINUS_EXPR, imag, r1);
1318 imag = const_binop (code, imag, div);
1320 else
1322 /* In the FALSE branch, we compute
1323 ratio = d/c;
1324 divisor = (d * ratio) + c;
1325 tr = (b * ratio) + a;
1326 ti = b - (a * ratio);
1327 tr = tr / div;
1328 ti = ti / div; */
1329 tree ratio = const_binop (code, i2, r2);
1330 tree div = const_binop (PLUS_EXPR, r2,
1331 const_binop (MULT_EXPR, i2, ratio));
1333 real = const_binop (MULT_EXPR, i1, ratio);
1334 real = const_binop (PLUS_EXPR, real, r1);
1335 real = const_binop (code, real, div);
1337 imag = const_binop (MULT_EXPR, r1, ratio);
1338 imag = const_binop (MINUS_EXPR, i1, imag);
1339 imag = const_binop (code, imag, div);
1342 break;
1344 default:
1345 return NULL_TREE;
1348 if (real && imag)
1349 return build_complex (type, real, imag);
1352 if (TREE_CODE (arg1) == VECTOR_CST
1353 && TREE_CODE (arg2) == VECTOR_CST)
1355 tree type = TREE_TYPE (arg1);
1356 int count = TYPE_VECTOR_SUBPARTS (type), i;
1357 tree *elts = XALLOCAVEC (tree, count);
1359 for (i = 0; i < count; i++)
1361 tree elem1 = VECTOR_CST_ELT (arg1, i);
1362 tree elem2 = VECTOR_CST_ELT (arg2, i);
1364 elts[i] = const_binop (code, elem1, elem2);
1366 /* It is possible that const_binop cannot handle the given
1367 code and return NULL_TREE */
1368 if (elts[i] == NULL_TREE)
1369 return NULL_TREE;
1372 return build_vector (type, elts);
1375 /* Shifts allow a scalar offset for a vector. */
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == INTEGER_CST)
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1383 if (code == VEC_LSHIFT_EXPR
1384 || code == VEC_RSHIFT_EXPR)
1386 if (!host_integerp (arg2, 1))
1387 return NULL_TREE;
1389 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1390 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1391 unsigned HOST_WIDE_INT innerc
1392 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1393 if (shiftc >= outerc || (shiftc % innerc) != 0)
1394 return NULL_TREE;
1395 int offset = shiftc / innerc;
1396 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1397 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1398 for !BYTES_BIG_ENDIAN picks first vector element, but
1399 for BYTES_BIG_ENDIAN last element from the vector. */
1400 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1401 offset = -offset;
1402 tree zero = build_zero_cst (TREE_TYPE (type));
1403 for (i = 0; i < count; i++)
1405 if (i + offset < 0 || i + offset >= count)
1406 elts[i] = zero;
1407 else
1408 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1411 else
1412 for (i = 0; i < count; i++)
1414 tree elem1 = VECTOR_CST_ELT (arg1, i);
1416 elts[i] = const_binop (code, elem1, arg2);
1418 /* It is possible that const_binop cannot handle the given
1419 code and return NULL_TREE */
1420 if (elts[i] == NULL_TREE)
1421 return NULL_TREE;
1424 return build_vector (type, elts);
1426 return NULL_TREE;
1429 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1430 indicates which particular sizetype to create. */
1432 tree
1433 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1435 return build_int_cst (sizetype_tab[(int) kind], number);
1438 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1439 is a tree code. The type of the result is taken from the operands.
1440 Both must be equivalent integer types, ala int_binop_types_match_p.
1441 If the operands are constant, so is the result. */
1443 tree
1444 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1446 tree type = TREE_TYPE (arg0);
1448 if (arg0 == error_mark_node || arg1 == error_mark_node)
1449 return error_mark_node;
1451 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1452 TREE_TYPE (arg1)));
1454 /* Handle the special case of two integer constants faster. */
1455 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1457 /* And some specific cases even faster than that. */
1458 if (code == PLUS_EXPR)
1460 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1461 return arg1;
1462 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1463 return arg0;
1465 else if (code == MINUS_EXPR)
1467 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1468 return arg0;
1470 else if (code == MULT_EXPR)
1472 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1473 return arg1;
1476 /* Handle general case of two integer constants. For sizetype
1477 constant calculations we always want to know about overflow,
1478 even in the unsigned case. */
1479 return int_const_binop_1 (code, arg0, arg1, -1);
1482 return fold_build2_loc (loc, code, type, arg0, arg1);
1485 /* Given two values, either both of sizetype or both of bitsizetype,
1486 compute the difference between the two values. Return the value
1487 in signed type corresponding to the type of the operands. */
1489 tree
1490 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1492 tree type = TREE_TYPE (arg0);
1493 tree ctype;
1495 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1496 TREE_TYPE (arg1)));
1498 /* If the type is already signed, just do the simple thing. */
1499 if (!TYPE_UNSIGNED (type))
1500 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1502 if (type == sizetype)
1503 ctype = ssizetype;
1504 else if (type == bitsizetype)
1505 ctype = sbitsizetype;
1506 else
1507 ctype = signed_type_for (type);
1509 /* If either operand is not a constant, do the conversions to the signed
1510 type and subtract. The hardware will do the right thing with any
1511 overflow in the subtraction. */
1512 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1513 return size_binop_loc (loc, MINUS_EXPR,
1514 fold_convert_loc (loc, ctype, arg0),
1515 fold_convert_loc (loc, ctype, arg1));
1517 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1518 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1519 overflow) and negate (which can't either). Special-case a result
1520 of zero while we're here. */
1521 if (tree_int_cst_equal (arg0, arg1))
1522 return build_int_cst (ctype, 0);
1523 else if (tree_int_cst_lt (arg1, arg0))
1524 return fold_convert_loc (loc, ctype,
1525 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1526 else
1527 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1528 fold_convert_loc (loc, ctype,
1529 size_binop_loc (loc,
1530 MINUS_EXPR,
1531 arg1, arg0)));
1534 /* A subroutine of fold_convert_const handling conversions of an
1535 INTEGER_CST to another integer type. */
1537 static tree
1538 fold_convert_const_int_from_int (tree type, const_tree arg1)
1540 tree t;
1542 /* Given an integer constant, make new constant with new type,
1543 appropriately sign-extended or truncated. */
1544 t = force_fit_type_double (type, tree_to_double_int (arg1),
1545 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1546 (TREE_INT_CST_HIGH (arg1) < 0
1547 && (TYPE_UNSIGNED (type)
1548 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1549 | TREE_OVERFLOW (arg1));
1551 return t;
1554 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1555 to an integer type. */
1557 static tree
1558 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1560 int overflow = 0;
1561 tree t;
1563 /* The following code implements the floating point to integer
1564 conversion rules required by the Java Language Specification,
1565 that IEEE NaNs are mapped to zero and values that overflow
1566 the target precision saturate, i.e. values greater than
1567 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1568 are mapped to INT_MIN. These semantics are allowed by the
1569 C and C++ standards that simply state that the behavior of
1570 FP-to-integer conversion is unspecified upon overflow. */
1572 double_int val;
1573 REAL_VALUE_TYPE r;
1574 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1576 switch (code)
1578 case FIX_TRUNC_EXPR:
1579 real_trunc (&r, VOIDmode, &x);
1580 break;
1582 default:
1583 gcc_unreachable ();
1586 /* If R is NaN, return zero and show we have an overflow. */
1587 if (REAL_VALUE_ISNAN (r))
1589 overflow = 1;
1590 val = double_int_zero;
1593 /* See if R is less than the lower bound or greater than the
1594 upper bound. */
1596 if (! overflow)
1598 tree lt = TYPE_MIN_VALUE (type);
1599 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1600 if (REAL_VALUES_LESS (r, l))
1602 overflow = 1;
1603 val = tree_to_double_int (lt);
1607 if (! overflow)
1609 tree ut = TYPE_MAX_VALUE (type);
1610 if (ut)
1612 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1613 if (REAL_VALUES_LESS (u, r))
1615 overflow = 1;
1616 val = tree_to_double_int (ut);
1621 if (! overflow)
1622 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1624 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1625 return t;
1628 /* A subroutine of fold_convert_const handling conversions of a
1629 FIXED_CST to an integer type. */
1631 static tree
1632 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1634 tree t;
1635 double_int temp, temp_trunc;
1636 unsigned int mode;
1638 /* Right shift FIXED_CST to temp by fbit. */
1639 temp = TREE_FIXED_CST (arg1).data;
1640 mode = TREE_FIXED_CST (arg1).mode;
1641 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1643 temp = temp.rshift (GET_MODE_FBIT (mode),
1644 HOST_BITS_PER_DOUBLE_INT,
1645 SIGNED_FIXED_POINT_MODE_P (mode));
1647 /* Left shift temp to temp_trunc by fbit. */
1648 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1649 HOST_BITS_PER_DOUBLE_INT,
1650 SIGNED_FIXED_POINT_MODE_P (mode));
1652 else
1654 temp = double_int_zero;
1655 temp_trunc = double_int_zero;
1658 /* If FIXED_CST is negative, we need to round the value toward 0.
1659 By checking if the fractional bits are not zero to add 1 to temp. */
1660 if (SIGNED_FIXED_POINT_MODE_P (mode)
1661 && temp_trunc.is_negative ()
1662 && TREE_FIXED_CST (arg1).data != temp_trunc)
1663 temp += double_int_one;
1665 /* Given a fixed-point constant, make new constant with new type,
1666 appropriately sign-extended or truncated. */
1667 t = force_fit_type_double (type, temp, -1,
1668 (temp.is_negative ()
1669 && (TYPE_UNSIGNED (type)
1670 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1671 | TREE_OVERFLOW (arg1));
1673 return t;
1676 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1677 to another floating point type. */
1679 static tree
1680 fold_convert_const_real_from_real (tree type, const_tree arg1)
1682 REAL_VALUE_TYPE value;
1683 tree t;
1685 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1686 t = build_real (type, value);
1688 /* If converting an infinity or NAN to a representation that doesn't
1689 have one, set the overflow bit so that we can produce some kind of
1690 error message at the appropriate point if necessary. It's not the
1691 most user-friendly message, but it's better than nothing. */
1692 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1693 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1694 TREE_OVERFLOW (t) = 1;
1695 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1696 && !MODE_HAS_NANS (TYPE_MODE (type)))
1697 TREE_OVERFLOW (t) = 1;
1698 /* Regular overflow, conversion produced an infinity in a mode that
1699 can't represent them. */
1700 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1701 && REAL_VALUE_ISINF (value)
1702 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1703 TREE_OVERFLOW (t) = 1;
1704 else
1705 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1706 return t;
1709 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1710 to a floating point type. */
1712 static tree
1713 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1715 REAL_VALUE_TYPE value;
1716 tree t;
1718 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1719 t = build_real (type, value);
1721 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1722 return t;
1725 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1726 to another fixed-point type. */
1728 static tree
1729 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1731 FIXED_VALUE_TYPE value;
1732 tree t;
1733 bool overflow_p;
1735 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1736 TYPE_SATURATING (type));
1737 t = build_fixed (type, value);
1739 /* Propagate overflow flags. */
1740 if (overflow_p | TREE_OVERFLOW (arg1))
1741 TREE_OVERFLOW (t) = 1;
1742 return t;
1745 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1746 to a fixed-point type. */
1748 static tree
1749 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1751 FIXED_VALUE_TYPE value;
1752 tree t;
1753 bool overflow_p;
1755 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1756 TREE_INT_CST (arg1),
1757 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
1763 TREE_OVERFLOW (t) = 1;
1764 return t;
1767 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1768 to a fixed-point type. */
1770 static tree
1771 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1773 FIXED_VALUE_TYPE value;
1774 tree t;
1775 bool overflow_p;
1777 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1778 &TREE_REAL_CST (arg1),
1779 TYPE_SATURATING (type));
1780 t = build_fixed (type, value);
1782 /* Propagate overflow flags. */
1783 if (overflow_p | TREE_OVERFLOW (arg1))
1784 TREE_OVERFLOW (t) = 1;
1785 return t;
1788 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1789 type TYPE. If no simplification can be done return NULL_TREE. */
1791 static tree
1792 fold_convert_const (enum tree_code code, tree type, tree arg1)
1794 if (TREE_TYPE (arg1) == type)
1795 return arg1;
1797 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1798 || TREE_CODE (type) == OFFSET_TYPE)
1800 if (TREE_CODE (arg1) == INTEGER_CST)
1801 return fold_convert_const_int_from_int (type, arg1);
1802 else if (TREE_CODE (arg1) == REAL_CST)
1803 return fold_convert_const_int_from_real (code, type, arg1);
1804 else if (TREE_CODE (arg1) == FIXED_CST)
1805 return fold_convert_const_int_from_fixed (type, arg1);
1807 else if (TREE_CODE (type) == REAL_TYPE)
1809 if (TREE_CODE (arg1) == INTEGER_CST)
1810 return build_real_from_int_cst (type, arg1);
1811 else if (TREE_CODE (arg1) == REAL_CST)
1812 return fold_convert_const_real_from_real (type, arg1);
1813 else if (TREE_CODE (arg1) == FIXED_CST)
1814 return fold_convert_const_real_from_fixed (type, arg1);
1816 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1818 if (TREE_CODE (arg1) == FIXED_CST)
1819 return fold_convert_const_fixed_from_fixed (type, arg1);
1820 else if (TREE_CODE (arg1) == INTEGER_CST)
1821 return fold_convert_const_fixed_from_int (type, arg1);
1822 else if (TREE_CODE (arg1) == REAL_CST)
1823 return fold_convert_const_fixed_from_real (type, arg1);
1825 return NULL_TREE;
1828 /* Construct a vector of zero elements of vector type TYPE. */
1830 static tree
1831 build_zero_vector (tree type)
1833 tree t;
1835 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1836 return build_vector_from_val (type, t);
1839 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1841 bool
1842 fold_convertible_p (const_tree type, const_tree arg)
1844 tree orig = TREE_TYPE (arg);
1846 if (type == orig)
1847 return true;
1849 if (TREE_CODE (arg) == ERROR_MARK
1850 || TREE_CODE (type) == ERROR_MARK
1851 || TREE_CODE (orig) == ERROR_MARK)
1852 return false;
1854 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1855 return true;
1857 switch (TREE_CODE (type))
1859 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1860 case POINTER_TYPE: case REFERENCE_TYPE:
1861 case OFFSET_TYPE:
1862 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1863 || TREE_CODE (orig) == OFFSET_TYPE)
1864 return true;
1865 return (TREE_CODE (orig) == VECTOR_TYPE
1866 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1868 case REAL_TYPE:
1869 case FIXED_POINT_TYPE:
1870 case COMPLEX_TYPE:
1871 case VECTOR_TYPE:
1872 case VOID_TYPE:
1873 return TREE_CODE (type) == TREE_CODE (orig);
1875 default:
1876 return false;
1880 /* Convert expression ARG to type TYPE. Used by the middle-end for
1881 simple conversions in preference to calling the front-end's convert. */
1883 tree
1884 fold_convert_loc (location_t loc, tree type, tree arg)
1886 tree orig = TREE_TYPE (arg);
1887 tree tem;
1889 if (type == orig)
1890 return arg;
1892 if (TREE_CODE (arg) == ERROR_MARK
1893 || TREE_CODE (type) == ERROR_MARK
1894 || TREE_CODE (orig) == ERROR_MARK)
1895 return error_mark_node;
1897 switch (TREE_CODE (type))
1899 case POINTER_TYPE:
1900 case REFERENCE_TYPE:
1901 /* Handle conversions between pointers to different address spaces. */
1902 if (POINTER_TYPE_P (orig)
1903 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1904 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1905 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1906 /* fall through */
1908 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1909 case OFFSET_TYPE:
1910 if (TREE_CODE (arg) == INTEGER_CST)
1912 tem = fold_convert_const (NOP_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1916 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1917 || TREE_CODE (orig) == OFFSET_TYPE)
1918 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1919 if (TREE_CODE (orig) == COMPLEX_TYPE)
1920 return fold_convert_loc (loc, type,
1921 fold_build1_loc (loc, REALPART_EXPR,
1922 TREE_TYPE (orig), arg));
1923 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1924 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1925 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1927 case REAL_TYPE:
1928 if (TREE_CODE (arg) == INTEGER_CST)
1930 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1931 if (tem != NULL_TREE)
1932 return tem;
1934 else if (TREE_CODE (arg) == REAL_CST)
1936 tem = fold_convert_const (NOP_EXPR, type, arg);
1937 if (tem != NULL_TREE)
1938 return tem;
1940 else if (TREE_CODE (arg) == FIXED_CST)
1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 return tem;
1947 switch (TREE_CODE (orig))
1949 case INTEGER_TYPE:
1950 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1951 case POINTER_TYPE: case REFERENCE_TYPE:
1952 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1954 case REAL_TYPE:
1955 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1957 case FIXED_POINT_TYPE:
1958 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1960 case COMPLEX_TYPE:
1961 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1962 return fold_convert_loc (loc, type, tem);
1964 default:
1965 gcc_unreachable ();
1968 case FIXED_POINT_TYPE:
1969 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1970 || TREE_CODE (arg) == REAL_CST)
1972 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1973 if (tem != NULL_TREE)
1974 goto fold_convert_exit;
1977 switch (TREE_CODE (orig))
1979 case FIXED_POINT_TYPE:
1980 case INTEGER_TYPE:
1981 case ENUMERAL_TYPE:
1982 case BOOLEAN_TYPE:
1983 case REAL_TYPE:
1984 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1986 case COMPLEX_TYPE:
1987 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1988 return fold_convert_loc (loc, type, tem);
1990 default:
1991 gcc_unreachable ();
1994 case COMPLEX_TYPE:
1995 switch (TREE_CODE (orig))
1997 case INTEGER_TYPE:
1998 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1999 case POINTER_TYPE: case REFERENCE_TYPE:
2000 case REAL_TYPE:
2001 case FIXED_POINT_TYPE:
2002 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2003 fold_convert_loc (loc, TREE_TYPE (type), arg),
2004 fold_convert_loc (loc, TREE_TYPE (type),
2005 integer_zero_node));
2006 case COMPLEX_TYPE:
2008 tree rpart, ipart;
2010 if (TREE_CODE (arg) == COMPLEX_EXPR)
2012 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2013 TREE_OPERAND (arg, 0));
2014 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2015 TREE_OPERAND (arg, 1));
2016 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2019 arg = save_expr (arg);
2020 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2021 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2022 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2023 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2024 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2027 default:
2028 gcc_unreachable ();
2031 case VECTOR_TYPE:
2032 if (integer_zerop (arg))
2033 return build_zero_vector (type);
2034 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2035 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2036 || TREE_CODE (orig) == VECTOR_TYPE);
2037 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2039 case VOID_TYPE:
2040 tem = fold_ignored_result (arg);
2041 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2043 default:
2044 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2045 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2046 gcc_unreachable ();
2048 fold_convert_exit:
2049 protected_set_expr_location_unshare (tem, loc);
2050 return tem;
2053 /* Return false if expr can be assumed not to be an lvalue, true
2054 otherwise. */
2056 static bool
2057 maybe_lvalue_p (const_tree x)
2059 /* We only need to wrap lvalue tree codes. */
2060 switch (TREE_CODE (x))
2062 case VAR_DECL:
2063 case PARM_DECL:
2064 case RESULT_DECL:
2065 case LABEL_DECL:
2066 case FUNCTION_DECL:
2067 case SSA_NAME:
2069 case COMPONENT_REF:
2070 case MEM_REF:
2071 case INDIRECT_REF:
2072 case ARRAY_REF:
2073 case ARRAY_RANGE_REF:
2074 case BIT_FIELD_REF:
2075 case OBJ_TYPE_REF:
2077 case REALPART_EXPR:
2078 case IMAGPART_EXPR:
2079 case PREINCREMENT_EXPR:
2080 case PREDECREMENT_EXPR:
2081 case SAVE_EXPR:
2082 case TRY_CATCH_EXPR:
2083 case WITH_CLEANUP_EXPR:
2084 case COMPOUND_EXPR:
2085 case MODIFY_EXPR:
2086 case TARGET_EXPR:
2087 case COND_EXPR:
2088 case BIND_EXPR:
2089 break;
2091 default:
2092 /* Assume the worst for front-end tree codes. */
2093 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2094 break;
2095 return false;
2098 return true;
2101 /* Return an expr equal to X but certainly not valid as an lvalue. */
2103 tree
2104 non_lvalue_loc (location_t loc, tree x)
2106 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2107 us. */
2108 if (in_gimple_form)
2109 return x;
2111 if (! maybe_lvalue_p (x))
2112 return x;
2113 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2116 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2117 Zero means allow extended lvalues. */
2119 int pedantic_lvalues;
2121 /* When pedantic, return an expr equal to X but certainly not valid as a
2122 pedantic lvalue. Otherwise, return X. */
2124 static tree
2125 pedantic_non_lvalue_loc (location_t loc, tree x)
2127 if (pedantic_lvalues)
2128 return non_lvalue_loc (loc, x);
2130 return protected_set_expr_location_unshare (x, loc);
2133 /* Given a tree comparison code, return the code that is the logical inverse.
2134 It is generally not safe to do this for floating-point comparisons, except
2135 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2136 ERROR_MARK in this case. */
2138 enum tree_code
2139 invert_tree_comparison (enum tree_code code, bool honor_nans)
2141 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2142 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2143 return ERROR_MARK;
2145 switch (code)
2147 case EQ_EXPR:
2148 return NE_EXPR;
2149 case NE_EXPR:
2150 return EQ_EXPR;
2151 case GT_EXPR:
2152 return honor_nans ? UNLE_EXPR : LE_EXPR;
2153 case GE_EXPR:
2154 return honor_nans ? UNLT_EXPR : LT_EXPR;
2155 case LT_EXPR:
2156 return honor_nans ? UNGE_EXPR : GE_EXPR;
2157 case LE_EXPR:
2158 return honor_nans ? UNGT_EXPR : GT_EXPR;
2159 case LTGT_EXPR:
2160 return UNEQ_EXPR;
2161 case UNEQ_EXPR:
2162 return LTGT_EXPR;
2163 case UNGT_EXPR:
2164 return LE_EXPR;
2165 case UNGE_EXPR:
2166 return LT_EXPR;
2167 case UNLT_EXPR:
2168 return GE_EXPR;
2169 case UNLE_EXPR:
2170 return GT_EXPR;
2171 case ORDERED_EXPR:
2172 return UNORDERED_EXPR;
2173 case UNORDERED_EXPR:
2174 return ORDERED_EXPR;
2175 default:
2176 gcc_unreachable ();
2180 /* Similar, but return the comparison that results if the operands are
2181 swapped. This is safe for floating-point. */
2183 enum tree_code
2184 swap_tree_comparison (enum tree_code code)
2186 switch (code)
2188 case EQ_EXPR:
2189 case NE_EXPR:
2190 case ORDERED_EXPR:
2191 case UNORDERED_EXPR:
2192 case LTGT_EXPR:
2193 case UNEQ_EXPR:
2194 return code;
2195 case GT_EXPR:
2196 return LT_EXPR;
2197 case GE_EXPR:
2198 return LE_EXPR;
2199 case LT_EXPR:
2200 return GT_EXPR;
2201 case LE_EXPR:
2202 return GE_EXPR;
2203 case UNGT_EXPR:
2204 return UNLT_EXPR;
2205 case UNGE_EXPR:
2206 return UNLE_EXPR;
2207 case UNLT_EXPR:
2208 return UNGT_EXPR;
2209 case UNLE_EXPR:
2210 return UNGE_EXPR;
2211 default:
2212 gcc_unreachable ();
2217 /* Convert a comparison tree code from an enum tree_code representation
2218 into a compcode bit-based encoding. This function is the inverse of
2219 compcode_to_comparison. */
2221 static enum comparison_code
2222 comparison_to_compcode (enum tree_code code)
2224 switch (code)
2226 case LT_EXPR:
2227 return COMPCODE_LT;
2228 case EQ_EXPR:
2229 return COMPCODE_EQ;
2230 case LE_EXPR:
2231 return COMPCODE_LE;
2232 case GT_EXPR:
2233 return COMPCODE_GT;
2234 case NE_EXPR:
2235 return COMPCODE_NE;
2236 case GE_EXPR:
2237 return COMPCODE_GE;
2238 case ORDERED_EXPR:
2239 return COMPCODE_ORD;
2240 case UNORDERED_EXPR:
2241 return COMPCODE_UNORD;
2242 case UNLT_EXPR:
2243 return COMPCODE_UNLT;
2244 case UNEQ_EXPR:
2245 return COMPCODE_UNEQ;
2246 case UNLE_EXPR:
2247 return COMPCODE_UNLE;
2248 case UNGT_EXPR:
2249 return COMPCODE_UNGT;
2250 case LTGT_EXPR:
2251 return COMPCODE_LTGT;
2252 case UNGE_EXPR:
2253 return COMPCODE_UNGE;
2254 default:
2255 gcc_unreachable ();
2259 /* Convert a compcode bit-based encoding of a comparison operator back
2260 to GCC's enum tree_code representation. This function is the
2261 inverse of comparison_to_compcode. */
2263 static enum tree_code
2264 compcode_to_comparison (enum comparison_code code)
2266 switch (code)
2268 case COMPCODE_LT:
2269 return LT_EXPR;
2270 case COMPCODE_EQ:
2271 return EQ_EXPR;
2272 case COMPCODE_LE:
2273 return LE_EXPR;
2274 case COMPCODE_GT:
2275 return GT_EXPR;
2276 case COMPCODE_NE:
2277 return NE_EXPR;
2278 case COMPCODE_GE:
2279 return GE_EXPR;
2280 case COMPCODE_ORD:
2281 return ORDERED_EXPR;
2282 case COMPCODE_UNORD:
2283 return UNORDERED_EXPR;
2284 case COMPCODE_UNLT:
2285 return UNLT_EXPR;
2286 case COMPCODE_UNEQ:
2287 return UNEQ_EXPR;
2288 case COMPCODE_UNLE:
2289 return UNLE_EXPR;
2290 case COMPCODE_UNGT:
2291 return UNGT_EXPR;
2292 case COMPCODE_LTGT:
2293 return LTGT_EXPR;
2294 case COMPCODE_UNGE:
2295 return UNGE_EXPR;
2296 default:
2297 gcc_unreachable ();
2301 /* Return a tree for the comparison which is the combination of
2302 doing the AND or OR (depending on CODE) of the two operations LCODE
2303 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2304 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2305 if this makes the transformation invalid. */
2307 tree
2308 combine_comparisons (location_t loc,
2309 enum tree_code code, enum tree_code lcode,
2310 enum tree_code rcode, tree truth_type,
2311 tree ll_arg, tree lr_arg)
2313 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2314 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2315 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2316 int compcode;
2318 switch (code)
2320 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2321 compcode = lcompcode & rcompcode;
2322 break;
2324 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2325 compcode = lcompcode | rcompcode;
2326 break;
2328 default:
2329 return NULL_TREE;
2332 if (!honor_nans)
2334 /* Eliminate unordered comparisons, as well as LTGT and ORD
2335 which are not used unless the mode has NaNs. */
2336 compcode &= ~COMPCODE_UNORD;
2337 if (compcode == COMPCODE_LTGT)
2338 compcode = COMPCODE_NE;
2339 else if (compcode == COMPCODE_ORD)
2340 compcode = COMPCODE_TRUE;
2342 else if (flag_trapping_math)
2344 /* Check that the original operation and the optimized ones will trap
2345 under the same condition. */
2346 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2347 && (lcompcode != COMPCODE_EQ)
2348 && (lcompcode != COMPCODE_ORD);
2349 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2350 && (rcompcode != COMPCODE_EQ)
2351 && (rcompcode != COMPCODE_ORD);
2352 bool trap = (compcode & COMPCODE_UNORD) == 0
2353 && (compcode != COMPCODE_EQ)
2354 && (compcode != COMPCODE_ORD);
2356 /* In a short-circuited boolean expression the LHS might be
2357 such that the RHS, if evaluated, will never trap. For
2358 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2359 if neither x nor y is NaN. (This is a mixed blessing: for
2360 example, the expression above will never trap, hence
2361 optimizing it to x < y would be invalid). */
2362 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2363 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2364 rtrap = false;
2366 /* If the comparison was short-circuited, and only the RHS
2367 trapped, we may now generate a spurious trap. */
2368 if (rtrap && !ltrap
2369 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2370 return NULL_TREE;
2372 /* If we changed the conditions that cause a trap, we lose. */
2373 if ((ltrap || rtrap) != trap)
2374 return NULL_TREE;
2377 if (compcode == COMPCODE_TRUE)
2378 return constant_boolean_node (true, truth_type);
2379 else if (compcode == COMPCODE_FALSE)
2380 return constant_boolean_node (false, truth_type);
2381 else
2383 enum tree_code tcode;
2385 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2386 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2390 /* Return nonzero if two operands (typically of the same tree node)
2391 are necessarily equal. If either argument has side-effects this
2392 function returns zero. FLAGS modifies behavior as follows:
2394 If OEP_ONLY_CONST is set, only return nonzero for constants.
2395 This function tests whether the operands are indistinguishable;
2396 it does not test whether they are equal using C's == operation.
2397 The distinction is important for IEEE floating point, because
2398 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2399 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2401 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2402 even though it may hold multiple values during a function.
2403 This is because a GCC tree node guarantees that nothing else is
2404 executed between the evaluation of its "operands" (which may often
2405 be evaluated in arbitrary order). Hence if the operands themselves
2406 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2407 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2408 unset means assuming isochronic (or instantaneous) tree equivalence.
2409 Unless comparing arbitrary expression trees, such as from different
2410 statements, this flag can usually be left unset.
2412 If OEP_PURE_SAME is set, then pure functions with identical arguments
2413 are considered the same. It is used when the caller has other ways
2414 to ensure that global memory is unchanged in between. */
2417 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2419 /* If either is ERROR_MARK, they aren't equal. */
2420 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2421 || TREE_TYPE (arg0) == error_mark_node
2422 || TREE_TYPE (arg1) == error_mark_node)
2423 return 0;
2425 /* Similar, if either does not have a type (like a released SSA name),
2426 they aren't equal. */
2427 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2428 return 0;
2430 /* Check equality of integer constants before bailing out due to
2431 precision differences. */
2432 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2433 return tree_int_cst_equal (arg0, arg1);
2435 /* If both types don't have the same signedness, then we can't consider
2436 them equal. We must check this before the STRIP_NOPS calls
2437 because they may change the signedness of the arguments. As pointers
2438 strictly don't have a signedness, require either two pointers or
2439 two non-pointers as well. */
2440 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2441 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2442 return 0;
2444 /* We cannot consider pointers to different address space equal. */
2445 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2446 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2447 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2448 return 0;
2450 /* If both types don't have the same precision, then it is not safe
2451 to strip NOPs. */
2452 if (element_precision (TREE_TYPE (arg0))
2453 != element_precision (TREE_TYPE (arg1)))
2454 return 0;
2456 STRIP_NOPS (arg0);
2457 STRIP_NOPS (arg1);
2459 /* In case both args are comparisons but with different comparison
2460 code, try to swap the comparison operands of one arg to produce
2461 a match and compare that variant. */
2462 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2463 && COMPARISON_CLASS_P (arg0)
2464 && COMPARISON_CLASS_P (arg1))
2466 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2468 if (TREE_CODE (arg0) == swap_code)
2469 return operand_equal_p (TREE_OPERAND (arg0, 0),
2470 TREE_OPERAND (arg1, 1), flags)
2471 && operand_equal_p (TREE_OPERAND (arg0, 1),
2472 TREE_OPERAND (arg1, 0), flags);
2475 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2476 /* This is needed for conversions and for COMPONENT_REF.
2477 Might as well play it safe and always test this. */
2478 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2479 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2480 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2481 return 0;
2483 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2484 We don't care about side effects in that case because the SAVE_EXPR
2485 takes care of that for us. In all other cases, two expressions are
2486 equal if they have no side effects. If we have two identical
2487 expressions with side effects that should be treated the same due
2488 to the only side effects being identical SAVE_EXPR's, that will
2489 be detected in the recursive calls below.
2490 If we are taking an invariant address of two identical objects
2491 they are necessarily equal as well. */
2492 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2493 && (TREE_CODE (arg0) == SAVE_EXPR
2494 || (flags & OEP_CONSTANT_ADDRESS_OF)
2495 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2496 return 1;
2498 /* Next handle constant cases, those for which we can return 1 even
2499 if ONLY_CONST is set. */
2500 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2501 switch (TREE_CODE (arg0))
2503 case INTEGER_CST:
2504 return tree_int_cst_equal (arg0, arg1);
2506 case FIXED_CST:
2507 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2508 TREE_FIXED_CST (arg1));
2510 case REAL_CST:
2511 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2512 TREE_REAL_CST (arg1)))
2513 return 1;
2516 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2518 /* If we do not distinguish between signed and unsigned zero,
2519 consider them equal. */
2520 if (real_zerop (arg0) && real_zerop (arg1))
2521 return 1;
2523 return 0;
2525 case VECTOR_CST:
2527 unsigned i;
2529 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2530 return 0;
2532 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2534 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2535 VECTOR_CST_ELT (arg1, i), flags))
2536 return 0;
2538 return 1;
2541 case COMPLEX_CST:
2542 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2543 flags)
2544 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2545 flags));
2547 case STRING_CST:
2548 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2549 && ! memcmp (TREE_STRING_POINTER (arg0),
2550 TREE_STRING_POINTER (arg1),
2551 TREE_STRING_LENGTH (arg0)));
2553 case ADDR_EXPR:
2554 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2555 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2556 ? OEP_CONSTANT_ADDRESS_OF : 0);
2557 default:
2558 break;
2561 if (flags & OEP_ONLY_CONST)
2562 return 0;
2564 /* Define macros to test an operand from arg0 and arg1 for equality and a
2565 variant that allows null and views null as being different from any
2566 non-null value. In the latter case, if either is null, the both
2567 must be; otherwise, do the normal comparison. */
2568 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2569 TREE_OPERAND (arg1, N), flags)
2571 #define OP_SAME_WITH_NULL(N) \
2572 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2573 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2575 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2577 case tcc_unary:
2578 /* Two conversions are equal only if signedness and modes match. */
2579 switch (TREE_CODE (arg0))
2581 CASE_CONVERT:
2582 case FIX_TRUNC_EXPR:
2583 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2584 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2585 return 0;
2586 break;
2587 default:
2588 break;
2591 return OP_SAME (0);
2594 case tcc_comparison:
2595 case tcc_binary:
2596 if (OP_SAME (0) && OP_SAME (1))
2597 return 1;
2599 /* For commutative ops, allow the other order. */
2600 return (commutative_tree_code (TREE_CODE (arg0))
2601 && operand_equal_p (TREE_OPERAND (arg0, 0),
2602 TREE_OPERAND (arg1, 1), flags)
2603 && operand_equal_p (TREE_OPERAND (arg0, 1),
2604 TREE_OPERAND (arg1, 0), flags));
2606 case tcc_reference:
2607 /* If either of the pointer (or reference) expressions we are
2608 dereferencing contain a side effect, these cannot be equal,
2609 but their addresses can be. */
2610 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2611 && (TREE_SIDE_EFFECTS (arg0)
2612 || TREE_SIDE_EFFECTS (arg1)))
2613 return 0;
2615 switch (TREE_CODE (arg0))
2617 case INDIRECT_REF:
2618 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2619 return OP_SAME (0);
2621 case REALPART_EXPR:
2622 case IMAGPART_EXPR:
2623 return OP_SAME (0);
2625 case TARGET_MEM_REF:
2626 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2627 /* Require equal extra operands and then fall through to MEM_REF
2628 handling of the two common operands. */
2629 if (!OP_SAME_WITH_NULL (2)
2630 || !OP_SAME_WITH_NULL (3)
2631 || !OP_SAME_WITH_NULL (4))
2632 return 0;
2633 /* Fallthru. */
2634 case MEM_REF:
2635 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2636 /* Require equal access sizes, and similar pointer types.
2637 We can have incomplete types for array references of
2638 variable-sized arrays from the Fortran frontend
2639 though. Also verify the types are compatible. */
2640 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2641 || (TYPE_SIZE (TREE_TYPE (arg0))
2642 && TYPE_SIZE (TREE_TYPE (arg1))
2643 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2644 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2645 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2646 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2647 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2648 && OP_SAME (0) && OP_SAME (1));
2650 case ARRAY_REF:
2651 case ARRAY_RANGE_REF:
2652 /* Operands 2 and 3 may be null.
2653 Compare the array index by value if it is constant first as we
2654 may have different types but same value here. */
2655 if (!OP_SAME (0))
2656 return 0;
2657 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2658 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2659 TREE_OPERAND (arg1, 1))
2660 || OP_SAME (1))
2661 && OP_SAME_WITH_NULL (2)
2662 && OP_SAME_WITH_NULL (3));
2664 case COMPONENT_REF:
2665 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2666 may be NULL when we're called to compare MEM_EXPRs. */
2667 if (!OP_SAME_WITH_NULL (0))
2668 return 0;
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2672 case BIT_FIELD_REF:
2673 if (!OP_SAME (0))
2674 return 0;
2675 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2676 return OP_SAME (1) && OP_SAME (2);
2678 default:
2679 return 0;
2682 case tcc_expression:
2683 switch (TREE_CODE (arg0))
2685 case ADDR_EXPR:
2686 case TRUTH_NOT_EXPR:
2687 return OP_SAME (0);
2689 case TRUTH_ANDIF_EXPR:
2690 case TRUTH_ORIF_EXPR:
2691 return OP_SAME (0) && OP_SAME (1);
2693 case FMA_EXPR:
2694 case WIDEN_MULT_PLUS_EXPR:
2695 case WIDEN_MULT_MINUS_EXPR:
2696 if (!OP_SAME (2))
2697 return 0;
2698 /* The multiplcation operands are commutative. */
2699 /* FALLTHRU */
2701 case TRUTH_AND_EXPR:
2702 case TRUTH_OR_EXPR:
2703 case TRUTH_XOR_EXPR:
2704 if (OP_SAME (0) && OP_SAME (1))
2705 return 1;
2707 /* Otherwise take into account this is a commutative operation. */
2708 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2709 TREE_OPERAND (arg1, 1), flags)
2710 && operand_equal_p (TREE_OPERAND (arg0, 1),
2711 TREE_OPERAND (arg1, 0), flags));
2713 case COND_EXPR:
2714 case VEC_COND_EXPR:
2715 case DOT_PROD_EXPR:
2716 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2718 default:
2719 return 0;
2722 case tcc_vl_exp:
2723 switch (TREE_CODE (arg0))
2725 case CALL_EXPR:
2726 /* If the CALL_EXPRs call different functions, then they
2727 clearly can not be equal. */
2728 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2729 flags))
2730 return 0;
2733 unsigned int cef = call_expr_flags (arg0);
2734 if (flags & OEP_PURE_SAME)
2735 cef &= ECF_CONST | ECF_PURE;
2736 else
2737 cef &= ECF_CONST;
2738 if (!cef)
2739 return 0;
2742 /* Now see if all the arguments are the same. */
2744 const_call_expr_arg_iterator iter0, iter1;
2745 const_tree a0, a1;
2746 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2747 a1 = first_const_call_expr_arg (arg1, &iter1);
2748 a0 && a1;
2749 a0 = next_const_call_expr_arg (&iter0),
2750 a1 = next_const_call_expr_arg (&iter1))
2751 if (! operand_equal_p (a0, a1, flags))
2752 return 0;
2754 /* If we get here and both argument lists are exhausted
2755 then the CALL_EXPRs are equal. */
2756 return ! (a0 || a1);
2758 default:
2759 return 0;
2762 case tcc_declaration:
2763 /* Consider __builtin_sqrt equal to sqrt. */
2764 return (TREE_CODE (arg0) == FUNCTION_DECL
2765 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2766 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2767 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2769 default:
2770 return 0;
2773 #undef OP_SAME
2774 #undef OP_SAME_WITH_NULL
2777 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2778 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2780 When in doubt, return 0. */
2782 static int
2783 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2785 int unsignedp1, unsignedpo;
2786 tree primarg0, primarg1, primother;
2787 unsigned int correct_width;
2789 if (operand_equal_p (arg0, arg1, 0))
2790 return 1;
2792 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2793 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2794 return 0;
2796 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2797 and see if the inner values are the same. This removes any
2798 signedness comparison, which doesn't matter here. */
2799 primarg0 = arg0, primarg1 = arg1;
2800 STRIP_NOPS (primarg0);
2801 STRIP_NOPS (primarg1);
2802 if (operand_equal_p (primarg0, primarg1, 0))
2803 return 1;
2805 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2806 actual comparison operand, ARG0.
2808 First throw away any conversions to wider types
2809 already present in the operands. */
2811 primarg1 = get_narrower (arg1, &unsignedp1);
2812 primother = get_narrower (other, &unsignedpo);
2814 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2815 if (unsignedp1 == unsignedpo
2816 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2817 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2819 tree type = TREE_TYPE (arg0);
2821 /* Make sure shorter operand is extended the right way
2822 to match the longer operand. */
2823 primarg1 = fold_convert (signed_or_unsigned_type_for
2824 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2826 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2827 return 1;
2830 return 0;
2833 /* See if ARG is an expression that is either a comparison or is performing
2834 arithmetic on comparisons. The comparisons must only be comparing
2835 two different values, which will be stored in *CVAL1 and *CVAL2; if
2836 they are nonzero it means that some operands have already been found.
2837 No variables may be used anywhere else in the expression except in the
2838 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2839 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2841 If this is true, return 1. Otherwise, return zero. */
2843 static int
2844 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2846 enum tree_code code = TREE_CODE (arg);
2847 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2849 /* We can handle some of the tcc_expression cases here. */
2850 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2851 tclass = tcc_unary;
2852 else if (tclass == tcc_expression
2853 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2854 || code == COMPOUND_EXPR))
2855 tclass = tcc_binary;
2857 else if (tclass == tcc_expression && code == SAVE_EXPR
2858 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2860 /* If we've already found a CVAL1 or CVAL2, this expression is
2861 two complex to handle. */
2862 if (*cval1 || *cval2)
2863 return 0;
2865 tclass = tcc_unary;
2866 *save_p = 1;
2869 switch (tclass)
2871 case tcc_unary:
2872 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2874 case tcc_binary:
2875 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2876 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2877 cval1, cval2, save_p));
2879 case tcc_constant:
2880 return 1;
2882 case tcc_expression:
2883 if (code == COND_EXPR)
2884 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2885 cval1, cval2, save_p)
2886 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2887 cval1, cval2, save_p)
2888 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2889 cval1, cval2, save_p));
2890 return 0;
2892 case tcc_comparison:
2893 /* First see if we can handle the first operand, then the second. For
2894 the second operand, we know *CVAL1 can't be zero. It must be that
2895 one side of the comparison is each of the values; test for the
2896 case where this isn't true by failing if the two operands
2897 are the same. */
2899 if (operand_equal_p (TREE_OPERAND (arg, 0),
2900 TREE_OPERAND (arg, 1), 0))
2901 return 0;
2903 if (*cval1 == 0)
2904 *cval1 = TREE_OPERAND (arg, 0);
2905 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2907 else if (*cval2 == 0)
2908 *cval2 = TREE_OPERAND (arg, 0);
2909 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2911 else
2912 return 0;
2914 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2916 else if (*cval2 == 0)
2917 *cval2 = TREE_OPERAND (arg, 1);
2918 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2920 else
2921 return 0;
2923 return 1;
2925 default:
2926 return 0;
2930 /* ARG is a tree that is known to contain just arithmetic operations and
2931 comparisons. Evaluate the operations in the tree substituting NEW0 for
2932 any occurrence of OLD0 as an operand of a comparison and likewise for
2933 NEW1 and OLD1. */
2935 static tree
2936 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2937 tree old1, tree new1)
2939 tree type = TREE_TYPE (arg);
2940 enum tree_code code = TREE_CODE (arg);
2941 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2943 /* We can handle some of the tcc_expression cases here. */
2944 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2945 tclass = tcc_unary;
2946 else if (tclass == tcc_expression
2947 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2948 tclass = tcc_binary;
2950 switch (tclass)
2952 case tcc_unary:
2953 return fold_build1_loc (loc, code, type,
2954 eval_subst (loc, TREE_OPERAND (arg, 0),
2955 old0, new0, old1, new1));
2957 case tcc_binary:
2958 return fold_build2_loc (loc, code, type,
2959 eval_subst (loc, TREE_OPERAND (arg, 0),
2960 old0, new0, old1, new1),
2961 eval_subst (loc, TREE_OPERAND (arg, 1),
2962 old0, new0, old1, new1));
2964 case tcc_expression:
2965 switch (code)
2967 case SAVE_EXPR:
2968 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2969 old1, new1);
2971 case COMPOUND_EXPR:
2972 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2973 old1, new1);
2975 case COND_EXPR:
2976 return fold_build3_loc (loc, code, type,
2977 eval_subst (loc, TREE_OPERAND (arg, 0),
2978 old0, new0, old1, new1),
2979 eval_subst (loc, TREE_OPERAND (arg, 1),
2980 old0, new0, old1, new1),
2981 eval_subst (loc, TREE_OPERAND (arg, 2),
2982 old0, new0, old1, new1));
2983 default:
2984 break;
2986 /* Fall through - ??? */
2988 case tcc_comparison:
2990 tree arg0 = TREE_OPERAND (arg, 0);
2991 tree arg1 = TREE_OPERAND (arg, 1);
2993 /* We need to check both for exact equality and tree equality. The
2994 former will be true if the operand has a side-effect. In that
2995 case, we know the operand occurred exactly once. */
2997 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2998 arg0 = new0;
2999 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3000 arg0 = new1;
3002 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3003 arg1 = new0;
3004 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3005 arg1 = new1;
3007 return fold_build2_loc (loc, code, type, arg0, arg1);
3010 default:
3011 return arg;
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED was previously an operand of the expression
3017 but is now not needed (e.g., we folded OMITTED * 0).
3019 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3020 the conversion of RESULT to TYPE. */
3022 tree
3023 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3025 tree t = fold_convert_loc (loc, type, result);
3027 /* If the resulting operand is an empty statement, just return the omitted
3028 statement casted to void. */
3029 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3030 return build1_loc (loc, NOP_EXPR, void_type_node,
3031 fold_ignored_result (omitted));
3033 if (TREE_SIDE_EFFECTS (omitted))
3034 return build2_loc (loc, COMPOUND_EXPR, type,
3035 fold_ignored_result (omitted), t);
3037 return non_lvalue_loc (loc, t);
3040 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3042 static tree
3043 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3044 tree omitted)
3046 tree t = fold_convert_loc (loc, type, result);
3048 /* If the resulting operand is an empty statement, just return the omitted
3049 statement casted to void. */
3050 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3051 return build1_loc (loc, NOP_EXPR, void_type_node,
3052 fold_ignored_result (omitted));
3054 if (TREE_SIDE_EFFECTS (omitted))
3055 return build2_loc (loc, COMPOUND_EXPR, type,
3056 fold_ignored_result (omitted), t);
3058 return pedantic_non_lvalue_loc (loc, t);
3061 /* Return a tree for the case when the result of an expression is RESULT
3062 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3063 of the expression but are now not needed.
3065 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3066 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3067 evaluated before OMITTED2. Otherwise, if neither has side effects,
3068 just do the conversion of RESULT to TYPE. */
3070 tree
3071 omit_two_operands_loc (location_t loc, tree type, tree result,
3072 tree omitted1, tree omitted2)
3074 tree t = fold_convert_loc (loc, type, result);
3076 if (TREE_SIDE_EFFECTS (omitted2))
3077 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3078 if (TREE_SIDE_EFFECTS (omitted1))
3079 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3081 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3085 /* Return a simplified tree node for the truth-negation of ARG. This
3086 never alters ARG itself. We assume that ARG is an operation that
3087 returns a truth value (0 or 1).
3089 FIXME: one would think we would fold the result, but it causes
3090 problems with the dominator optimizer. */
3092 static tree
3093 fold_truth_not_expr (location_t loc, tree arg)
3095 tree type = TREE_TYPE (arg);
3096 enum tree_code code = TREE_CODE (arg);
3097 location_t loc1, loc2;
3099 /* If this is a comparison, we can simply invert it, except for
3100 floating-point non-equality comparisons, in which case we just
3101 enclose a TRUTH_NOT_EXPR around what we have. */
3103 if (TREE_CODE_CLASS (code) == tcc_comparison)
3105 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3106 if (FLOAT_TYPE_P (op_type)
3107 && flag_trapping_math
3108 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3109 && code != NE_EXPR && code != EQ_EXPR)
3110 return NULL_TREE;
3112 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3113 if (code == ERROR_MARK)
3114 return NULL_TREE;
3116 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3117 TREE_OPERAND (arg, 1));
3120 switch (code)
3122 case INTEGER_CST:
3123 return constant_boolean_node (integer_zerop (arg), type);
3125 case TRUTH_AND_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3127 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 return build2_loc (loc, TRUTH_OR_EXPR, type,
3129 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3130 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3132 case TRUTH_OR_EXPR:
3133 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3134 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3135 return build2_loc (loc, TRUTH_AND_EXPR, type,
3136 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3137 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3139 case TRUTH_XOR_EXPR:
3140 /* Here we can invert either operand. We invert the first operand
3141 unless the second operand is a TRUTH_NOT_EXPR in which case our
3142 result is the XOR of the first operand with the inside of the
3143 negation of the second operand. */
3145 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3146 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3147 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3148 else
3149 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3150 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3151 TREE_OPERAND (arg, 1));
3153 case TRUTH_ANDIF_EXPR:
3154 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3155 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3156 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3158 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3160 case TRUTH_ORIF_EXPR:
3161 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3162 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3163 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3164 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3165 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3167 case TRUTH_NOT_EXPR:
3168 return TREE_OPERAND (arg, 0);
3170 case COND_EXPR:
3172 tree arg1 = TREE_OPERAND (arg, 1);
3173 tree arg2 = TREE_OPERAND (arg, 2);
3175 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3176 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3178 /* A COND_EXPR may have a throw as one operand, which
3179 then has void type. Just leave void operands
3180 as they are. */
3181 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3182 VOID_TYPE_P (TREE_TYPE (arg1))
3183 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3184 VOID_TYPE_P (TREE_TYPE (arg2))
3185 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3188 case COMPOUND_EXPR:
3189 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3190 return build2_loc (loc, COMPOUND_EXPR, type,
3191 TREE_OPERAND (arg, 0),
3192 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3194 case NON_LVALUE_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3198 CASE_CONVERT:
3199 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3200 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3202 /* ... fall through ... */
3204 case FLOAT_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 return build1_loc (loc, TREE_CODE (arg), type,
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3209 case BIT_AND_EXPR:
3210 if (!integer_onep (TREE_OPERAND (arg, 1)))
3211 return NULL_TREE;
3212 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3214 case SAVE_EXPR:
3215 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3217 case CLEANUP_POINT_EXPR:
3218 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3219 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3222 default:
3223 return NULL_TREE;
3227 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3228 assume that ARG is an operation that returns a truth value (0 or 1
3229 for scalars, 0 or -1 for vectors). Return the folded expression if
3230 folding is successful. Otherwise, return NULL_TREE. */
3232 static tree
3233 fold_invert_truthvalue (location_t loc, tree arg)
3235 tree type = TREE_TYPE (arg);
3236 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3237 ? BIT_NOT_EXPR
3238 : TRUTH_NOT_EXPR,
3239 type, arg);
3242 /* Return a simplified tree node for the truth-negation of ARG. This
3243 never alters ARG itself. We assume that ARG is an operation that
3244 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3246 tree
3247 invert_truthvalue_loc (location_t loc, tree arg)
3249 if (TREE_CODE (arg) == ERROR_MARK)
3250 return arg;
3252 tree type = TREE_TYPE (arg);
3253 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3254 ? BIT_NOT_EXPR
3255 : TRUTH_NOT_EXPR,
3256 type, arg);
3259 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3260 operands are another bit-wise operation with a common input. If so,
3261 distribute the bit operations to save an operation and possibly two if
3262 constants are involved. For example, convert
3263 (A | B) & (A | C) into A | (B & C)
3264 Further simplification will occur if B and C are constants.
3266 If this optimization cannot be done, 0 will be returned. */
3268 static tree
3269 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3270 tree arg0, tree arg1)
3272 tree common;
3273 tree left, right;
3275 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3276 || TREE_CODE (arg0) == code
3277 || (TREE_CODE (arg0) != BIT_AND_EXPR
3278 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3279 return 0;
3281 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3283 common = TREE_OPERAND (arg0, 0);
3284 left = TREE_OPERAND (arg0, 1);
3285 right = TREE_OPERAND (arg1, 1);
3287 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3289 common = TREE_OPERAND (arg0, 0);
3290 left = TREE_OPERAND (arg0, 1);
3291 right = TREE_OPERAND (arg1, 0);
3293 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3295 common = TREE_OPERAND (arg0, 1);
3296 left = TREE_OPERAND (arg0, 0);
3297 right = TREE_OPERAND (arg1, 1);
3299 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3301 common = TREE_OPERAND (arg0, 1);
3302 left = TREE_OPERAND (arg0, 0);
3303 right = TREE_OPERAND (arg1, 0);
3305 else
3306 return 0;
3308 common = fold_convert_loc (loc, type, common);
3309 left = fold_convert_loc (loc, type, left);
3310 right = fold_convert_loc (loc, type, right);
3311 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3312 fold_build2_loc (loc, code, type, left, right));
3315 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3316 with code CODE. This optimization is unsafe. */
3317 static tree
3318 distribute_real_division (location_t loc, enum tree_code code, tree type,
3319 tree arg0, tree arg1)
3321 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3322 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3324 /* (A / C) +- (B / C) -> (A +- B) / C. */
3325 if (mul0 == mul1
3326 && operand_equal_p (TREE_OPERAND (arg0, 1),
3327 TREE_OPERAND (arg1, 1), 0))
3328 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3329 fold_build2_loc (loc, code, type,
3330 TREE_OPERAND (arg0, 0),
3331 TREE_OPERAND (arg1, 0)),
3332 TREE_OPERAND (arg0, 1));
3334 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3335 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3336 TREE_OPERAND (arg1, 0), 0)
3337 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3338 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3340 REAL_VALUE_TYPE r0, r1;
3341 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3342 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3343 if (!mul0)
3344 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3345 if (!mul1)
3346 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3347 real_arithmetic (&r0, code, &r0, &r1);
3348 return fold_build2_loc (loc, MULT_EXPR, type,
3349 TREE_OPERAND (arg0, 0),
3350 build_real (type, r0));
3353 return NULL_TREE;
3356 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3357 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3359 static tree
3360 make_bit_field_ref (location_t loc, tree inner, tree type,
3361 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3363 tree result, bftype;
3365 if (bitpos == 0)
3367 tree size = TYPE_SIZE (TREE_TYPE (inner));
3368 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3369 || POINTER_TYPE_P (TREE_TYPE (inner)))
3370 && host_integerp (size, 0)
3371 && tree_low_cst (size, 0) == bitsize)
3372 return fold_convert_loc (loc, type, inner);
3375 bftype = type;
3376 if (TYPE_PRECISION (bftype) != bitsize
3377 || TYPE_UNSIGNED (bftype) == !unsignedp)
3378 bftype = build_nonstandard_integer_type (bitsize, 0);
3380 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3381 size_int (bitsize), bitsize_int (bitpos));
3383 if (bftype != type)
3384 result = fold_convert_loc (loc, type, result);
3386 return result;
3389 /* Optimize a bit-field compare.
3391 There are two cases: First is a compare against a constant and the
3392 second is a comparison of two items where the fields are at the same
3393 bit position relative to the start of a chunk (byte, halfword, word)
3394 large enough to contain it. In these cases we can avoid the shift
3395 implicit in bitfield extractions.
3397 For constants, we emit a compare of the shifted constant with the
3398 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3399 compared. For two fields at the same position, we do the ANDs with the
3400 similar mask and compare the result of the ANDs.
3402 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3403 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3404 are the left and right operands of the comparison, respectively.
3406 If the optimization described above can be done, we return the resulting
3407 tree. Otherwise we return zero. */
3409 static tree
3410 optimize_bit_field_compare (location_t loc, enum tree_code code,
3411 tree compare_type, tree lhs, tree rhs)
3413 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3414 tree type = TREE_TYPE (lhs);
3415 tree signed_type, unsigned_type;
3416 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3417 enum machine_mode lmode, rmode, nmode;
3418 int lunsignedp, runsignedp;
3419 int lvolatilep = 0, rvolatilep = 0;
3420 tree linner, rinner = NULL_TREE;
3421 tree mask;
3422 tree offset;
3424 /* In the strict volatile bitfields case, doing code changes here may prevent
3425 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3426 if (flag_strict_volatile_bitfields > 0)
3427 return 0;
3429 /* Get all the information about the extractions being done. If the bit size
3430 if the same as the size of the underlying object, we aren't doing an
3431 extraction at all and so can do nothing. We also don't want to
3432 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3433 then will no longer be able to replace it. */
3434 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3435 &lunsignedp, &lvolatilep, false);
3436 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3437 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3438 return 0;
3440 if (!const_p)
3442 /* If this is not a constant, we can only do something if bit positions,
3443 sizes, and signedness are the same. */
3444 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3445 &runsignedp, &rvolatilep, false);
3447 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3448 || lunsignedp != runsignedp || offset != 0
3449 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3450 return 0;
3453 /* See if we can find a mode to refer to this field. We should be able to,
3454 but fail if we can't. */
3455 if (lvolatilep
3456 && GET_MODE_BITSIZE (lmode) > 0
3457 && flag_strict_volatile_bitfields > 0)
3458 nmode = lmode;
3459 else
3460 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3461 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3462 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3463 TYPE_ALIGN (TREE_TYPE (rinner))),
3464 word_mode, lvolatilep || rvolatilep);
3465 if (nmode == VOIDmode)
3466 return 0;
3468 /* Set signed and unsigned types of the precision of this mode for the
3469 shifts below. */
3470 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3471 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3473 /* Compute the bit position and size for the new reference and our offset
3474 within it. If the new reference is the same size as the original, we
3475 won't optimize anything, so return zero. */
3476 nbitsize = GET_MODE_BITSIZE (nmode);
3477 nbitpos = lbitpos & ~ (nbitsize - 1);
3478 lbitpos -= nbitpos;
3479 if (nbitsize == lbitsize)
3480 return 0;
3482 if (BYTES_BIG_ENDIAN)
3483 lbitpos = nbitsize - lbitsize - lbitpos;
3485 /* Make the mask to be used against the extracted field. */
3486 mask = build_int_cst_type (unsigned_type, -1);
3487 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3488 mask = const_binop (RSHIFT_EXPR, mask,
3489 size_int (nbitsize - lbitsize - lbitpos));
3491 if (! const_p)
3492 /* If not comparing with constant, just rework the comparison
3493 and return. */
3494 return fold_build2_loc (loc, code, compare_type,
3495 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3496 make_bit_field_ref (loc, linner,
3497 unsigned_type,
3498 nbitsize, nbitpos,
3500 mask),
3501 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3502 make_bit_field_ref (loc, rinner,
3503 unsigned_type,
3504 nbitsize, nbitpos,
3506 mask));
3508 /* Otherwise, we are handling the constant case. See if the constant is too
3509 big for the field. Warn and return a tree of for 0 (false) if so. We do
3510 this not only for its own sake, but to avoid having to test for this
3511 error case below. If we didn't, we might generate wrong code.
3513 For unsigned fields, the constant shifted right by the field length should
3514 be all zero. For signed fields, the high-order bits should agree with
3515 the sign bit. */
3517 if (lunsignedp)
3519 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3520 fold_convert_loc (loc,
3521 unsigned_type, rhs),
3522 size_int (lbitsize))))
3524 warning (0, "comparison is always %d due to width of bit-field",
3525 code == NE_EXPR);
3526 return constant_boolean_node (code == NE_EXPR, compare_type);
3529 else
3531 tree tem = const_binop (RSHIFT_EXPR,
3532 fold_convert_loc (loc, signed_type, rhs),
3533 size_int (lbitsize - 1));
3534 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3536 warning (0, "comparison is always %d due to width of bit-field",
3537 code == NE_EXPR);
3538 return constant_boolean_node (code == NE_EXPR, compare_type);
3542 /* Single-bit compares should always be against zero. */
3543 if (lbitsize == 1 && ! integer_zerop (rhs))
3545 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3546 rhs = build_int_cst (type, 0);
3549 /* Make a new bitfield reference, shift the constant over the
3550 appropriate number of bits and mask it with the computed mask
3551 (in case this was a signed field). If we changed it, make a new one. */
3552 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3553 if (lvolatilep)
3555 TREE_SIDE_EFFECTS (lhs) = 1;
3556 TREE_THIS_VOLATILE (lhs) = 1;
3559 rhs = const_binop (BIT_AND_EXPR,
3560 const_binop (LSHIFT_EXPR,
3561 fold_convert_loc (loc, unsigned_type, rhs),
3562 size_int (lbitpos)),
3563 mask);
3565 lhs = build2_loc (loc, code, compare_type,
3566 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3567 return lhs;
3570 /* Subroutine for fold_truth_andor_1: decode a field reference.
3572 If EXP is a comparison reference, we return the innermost reference.
3574 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3575 set to the starting bit number.
3577 If the innermost field can be completely contained in a mode-sized
3578 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3580 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3581 otherwise it is not changed.
3583 *PUNSIGNEDP is set to the signedness of the field.
3585 *PMASK is set to the mask used. This is either contained in a
3586 BIT_AND_EXPR or derived from the width of the field.
3588 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3590 Return 0 if this is not a component reference or is one that we can't
3591 do anything with. */
3593 static tree
3594 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3595 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3596 int *punsignedp, int *pvolatilep,
3597 tree *pmask, tree *pand_mask)
3599 tree outer_type = 0;
3600 tree and_mask = 0;
3601 tree mask, inner, offset;
3602 tree unsigned_type;
3603 unsigned int precision;
3605 /* All the optimizations using this function assume integer fields.
3606 There are problems with FP fields since the type_for_size call
3607 below can fail for, e.g., XFmode. */
3608 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3609 return 0;
3611 /* We are interested in the bare arrangement of bits, so strip everything
3612 that doesn't affect the machine mode. However, record the type of the
3613 outermost expression if it may matter below. */
3614 if (CONVERT_EXPR_P (exp)
3615 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3616 outer_type = TREE_TYPE (exp);
3617 STRIP_NOPS (exp);
3619 if (TREE_CODE (exp) == BIT_AND_EXPR)
3621 and_mask = TREE_OPERAND (exp, 1);
3622 exp = TREE_OPERAND (exp, 0);
3623 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3624 if (TREE_CODE (and_mask) != INTEGER_CST)
3625 return 0;
3628 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3629 punsignedp, pvolatilep, false);
3630 if ((inner == exp && and_mask == 0)
3631 || *pbitsize < 0 || offset != 0
3632 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3633 return 0;
3635 /* If the number of bits in the reference is the same as the bitsize of
3636 the outer type, then the outer type gives the signedness. Otherwise
3637 (in case of a small bitfield) the signedness is unchanged. */
3638 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3639 *punsignedp = TYPE_UNSIGNED (outer_type);
3641 /* Compute the mask to access the bitfield. */
3642 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3643 precision = TYPE_PRECISION (unsigned_type);
3645 mask = build_int_cst_type (unsigned_type, -1);
3647 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3648 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3650 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3651 if (and_mask != 0)
3652 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3653 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3655 *pmask = mask;
3656 *pand_mask = and_mask;
3657 return inner;
3660 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3661 bit positions. */
3663 static int
3664 all_ones_mask_p (const_tree mask, int size)
3666 tree type = TREE_TYPE (mask);
3667 unsigned int precision = TYPE_PRECISION (type);
3668 tree tmask;
3670 tmask = build_int_cst_type (signed_type_for (type), -1);
3672 return
3673 tree_int_cst_equal (mask,
3674 const_binop (RSHIFT_EXPR,
3675 const_binop (LSHIFT_EXPR, tmask,
3676 size_int (precision - size)),
3677 size_int (precision - size)));
3680 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3681 represents the sign bit of EXP's type. If EXP represents a sign
3682 or zero extension, also test VAL against the unextended type.
3683 The return value is the (sub)expression whose sign bit is VAL,
3684 or NULL_TREE otherwise. */
3686 static tree
3687 sign_bit_p (tree exp, const_tree val)
3689 unsigned HOST_WIDE_INT mask_lo, lo;
3690 HOST_WIDE_INT mask_hi, hi;
3691 int width;
3692 tree t;
3694 /* Tree EXP must have an integral type. */
3695 t = TREE_TYPE (exp);
3696 if (! INTEGRAL_TYPE_P (t))
3697 return NULL_TREE;
3699 /* Tree VAL must be an integer constant. */
3700 if (TREE_CODE (val) != INTEGER_CST
3701 || TREE_OVERFLOW (val))
3702 return NULL_TREE;
3704 width = TYPE_PRECISION (t);
3705 if (width > HOST_BITS_PER_WIDE_INT)
3707 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3708 lo = 0;
3710 mask_hi = ((unsigned HOST_WIDE_INT) -1
3711 >> (HOST_BITS_PER_DOUBLE_INT - width));
3712 mask_lo = -1;
3714 else
3716 hi = 0;
3717 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3719 mask_hi = 0;
3720 mask_lo = ((unsigned HOST_WIDE_INT) -1
3721 >> (HOST_BITS_PER_WIDE_INT - width));
3724 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3725 treat VAL as if it were unsigned. */
3726 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3727 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3728 return exp;
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3733 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3735 return NULL_TREE;
3738 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3741 static int
3742 simple_operand_p (const_tree exp)
3744 /* Strip any conversions that don't change the machine mode. */
3745 STRIP_NOPS (exp);
3747 return (CONSTANT_CLASS_P (exp)
3748 || TREE_CODE (exp) == SSA_NAME
3749 || (DECL_P (exp)
3750 && ! TREE_ADDRESSABLE (exp)
3751 && ! TREE_THIS_VOLATILE (exp)
3752 && ! DECL_NONLOCAL (exp)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp)
3757 && ! DECL_EXTERNAL (exp)
3758 /* Loading a static variable is unduly expensive, but global
3759 registers aren't expensive. */
3760 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3763 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3764 to be evaluated unconditionally.
3765 I addition to simple_operand_p, we assume that comparisons, conversions,
3766 and logic-not operations are simple, if their operands are simple, too. */
3768 static bool
3769 simple_operand_p_2 (tree exp)
3771 enum tree_code code;
3773 if (TREE_SIDE_EFFECTS (exp)
3774 || tree_could_trap_p (exp))
3775 return false;
3777 while (CONVERT_EXPR_P (exp))
3778 exp = TREE_OPERAND (exp, 0);
3780 code = TREE_CODE (exp);
3782 if (TREE_CODE_CLASS (code) == tcc_comparison)
3783 return (simple_operand_p (TREE_OPERAND (exp, 0))
3784 && simple_operand_p (TREE_OPERAND (exp, 1)));
3786 if (code == TRUTH_NOT_EXPR)
3787 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3789 return simple_operand_p (exp);
3793 /* The following functions are subroutines to fold_range_test and allow it to
3794 try to change a logical combination of comparisons into a range test.
3796 For example, both
3797 X == 2 || X == 3 || X == 4 || X == 5
3799 X >= 2 && X <= 5
3800 are converted to
3801 (unsigned) (X - 2) <= 3
3803 We describe each set of comparisons as being either inside or outside
3804 a range, using a variable named like IN_P, and then describe the
3805 range with a lower and upper bound. If one of the bounds is omitted,
3806 it represents either the highest or lowest value of the type.
3808 In the comments below, we represent a range by two numbers in brackets
3809 preceded by a "+" to designate being inside that range, or a "-" to
3810 designate being outside that range, so the condition can be inverted by
3811 flipping the prefix. An omitted bound is represented by a "-". For
3812 example, "- [-, 10]" means being outside the range starting at the lowest
3813 possible value and ending at 10, in other words, being greater than 10.
3814 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3815 always false.
3817 We set up things so that the missing bounds are handled in a consistent
3818 manner so neither a missing bound nor "true" and "false" need to be
3819 handled using a special case. */
3821 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3822 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3823 and UPPER1_P are nonzero if the respective argument is an upper bound
3824 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3825 must be specified for a comparison. ARG1 will be converted to ARG0's
3826 type if both are specified. */
3828 static tree
3829 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3830 tree arg1, int upper1_p)
3832 tree tem;
3833 int result;
3834 int sgn0, sgn1;
3836 /* If neither arg represents infinity, do the normal operation.
3837 Else, if not a comparison, return infinity. Else handle the special
3838 comparison rules. Note that most of the cases below won't occur, but
3839 are handled for consistency. */
3841 if (arg0 != 0 && arg1 != 0)
3843 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3844 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3845 STRIP_NOPS (tem);
3846 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3849 if (TREE_CODE_CLASS (code) != tcc_comparison)
3850 return 0;
3852 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3853 for neither. In real maths, we cannot assume open ended ranges are
3854 the same. But, this is computer arithmetic, where numbers are finite.
3855 We can therefore make the transformation of any unbounded range with
3856 the value Z, Z being greater than any representable number. This permits
3857 us to treat unbounded ranges as equal. */
3858 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3859 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3860 switch (code)
3862 case EQ_EXPR:
3863 result = sgn0 == sgn1;
3864 break;
3865 case NE_EXPR:
3866 result = sgn0 != sgn1;
3867 break;
3868 case LT_EXPR:
3869 result = sgn0 < sgn1;
3870 break;
3871 case LE_EXPR:
3872 result = sgn0 <= sgn1;
3873 break;
3874 case GT_EXPR:
3875 result = sgn0 > sgn1;
3876 break;
3877 case GE_EXPR:
3878 result = sgn0 >= sgn1;
3879 break;
3880 default:
3881 gcc_unreachable ();
3884 return constant_boolean_node (result, type);
3887 /* Helper routine for make_range. Perform one step for it, return
3888 new expression if the loop should continue or NULL_TREE if it should
3889 stop. */
3891 tree
3892 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3893 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3894 bool *strict_overflow_p)
3896 tree arg0_type = TREE_TYPE (arg0);
3897 tree n_low, n_high, low = *p_low, high = *p_high;
3898 int in_p = *p_in_p, n_in_p;
3900 switch (code)
3902 case TRUTH_NOT_EXPR:
3903 /* We can only do something if the range is testing for zero. */
3904 if (low == NULL_TREE || high == NULL_TREE
3905 || ! integer_zerop (low) || ! integer_zerop (high))
3906 return NULL_TREE;
3907 *p_in_p = ! in_p;
3908 return arg0;
3910 case EQ_EXPR: case NE_EXPR:
3911 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3912 /* We can only do something if the range is testing for zero
3913 and if the second operand is an integer constant. Note that
3914 saying something is "in" the range we make is done by
3915 complementing IN_P since it will set in the initial case of
3916 being not equal to zero; "out" is leaving it alone. */
3917 if (low == NULL_TREE || high == NULL_TREE
3918 || ! integer_zerop (low) || ! integer_zerop (high)
3919 || TREE_CODE (arg1) != INTEGER_CST)
3920 return NULL_TREE;
3922 switch (code)
3924 case NE_EXPR: /* - [c, c] */
3925 low = high = arg1;
3926 break;
3927 case EQ_EXPR: /* + [c, c] */
3928 in_p = ! in_p, low = high = arg1;
3929 break;
3930 case GT_EXPR: /* - [-, c] */
3931 low = 0, high = arg1;
3932 break;
3933 case GE_EXPR: /* + [c, -] */
3934 in_p = ! in_p, low = arg1, high = 0;
3935 break;
3936 case LT_EXPR: /* - [c, -] */
3937 low = arg1, high = 0;
3938 break;
3939 case LE_EXPR: /* + [-, c] */
3940 in_p = ! in_p, low = 0, high = arg1;
3941 break;
3942 default:
3943 gcc_unreachable ();
3946 /* If this is an unsigned comparison, we also know that EXP is
3947 greater than or equal to zero. We base the range tests we make
3948 on that fact, so we record it here so we can parse existing
3949 range tests. We test arg0_type since often the return type
3950 of, e.g. EQ_EXPR, is boolean. */
3951 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3953 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3954 in_p, low, high, 1,
3955 build_int_cst (arg0_type, 0),
3956 NULL_TREE))
3957 return NULL_TREE;
3959 in_p = n_in_p, low = n_low, high = n_high;
3961 /* If the high bound is missing, but we have a nonzero low
3962 bound, reverse the range so it goes from zero to the low bound
3963 minus 1. */
3964 if (high == 0 && low && ! integer_zerop (low))
3966 in_p = ! in_p;
3967 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3968 integer_one_node, 0);
3969 low = build_int_cst (arg0_type, 0);
3973 *p_low = low;
3974 *p_high = high;
3975 *p_in_p = in_p;
3976 return arg0;
3978 case NEGATE_EXPR:
3979 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3980 low and high are non-NULL, then normalize will DTRT. */
3981 if (!TYPE_UNSIGNED (arg0_type)
3982 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3984 if (low == NULL_TREE)
3985 low = TYPE_MIN_VALUE (arg0_type);
3986 if (high == NULL_TREE)
3987 high = TYPE_MAX_VALUE (arg0_type);
3990 /* (-x) IN [a,b] -> x in [-b, -a] */
3991 n_low = range_binop (MINUS_EXPR, exp_type,
3992 build_int_cst (exp_type, 0),
3993 0, high, 1);
3994 n_high = range_binop (MINUS_EXPR, exp_type,
3995 build_int_cst (exp_type, 0),
3996 0, low, 0);
3997 if (n_high != 0 && TREE_OVERFLOW (n_high))
3998 return NULL_TREE;
3999 goto normalize;
4001 case BIT_NOT_EXPR:
4002 /* ~ X -> -X - 1 */
4003 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4004 build_int_cst (exp_type, 1));
4006 case PLUS_EXPR:
4007 case MINUS_EXPR:
4008 if (TREE_CODE (arg1) != INTEGER_CST)
4009 return NULL_TREE;
4011 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4012 move a constant to the other side. */
4013 if (!TYPE_UNSIGNED (arg0_type)
4014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4015 return NULL_TREE;
4017 /* If EXP is signed, any overflow in the computation is undefined,
4018 so we don't worry about it so long as our computations on
4019 the bounds don't overflow. For unsigned, overflow is defined
4020 and this is exactly the right thing. */
4021 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4022 arg0_type, low, 0, arg1, 0);
4023 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4024 arg0_type, high, 1, arg1, 0);
4025 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4026 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4027 return NULL_TREE;
4029 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4030 *strict_overflow_p = true;
4032 normalize:
4033 /* Check for an unsigned range which has wrapped around the maximum
4034 value thus making n_high < n_low, and normalize it. */
4035 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4037 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4038 integer_one_node, 0);
4039 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4040 integer_one_node, 0);
4042 /* If the range is of the form +/- [ x+1, x ], we won't
4043 be able to normalize it. But then, it represents the
4044 whole range or the empty set, so make it
4045 +/- [ -, - ]. */
4046 if (tree_int_cst_equal (n_low, low)
4047 && tree_int_cst_equal (n_high, high))
4048 low = high = 0;
4049 else
4050 in_p = ! in_p;
4052 else
4053 low = n_low, high = n_high;
4055 *p_low = low;
4056 *p_high = high;
4057 *p_in_p = in_p;
4058 return arg0;
4060 CASE_CONVERT:
4061 case NON_LVALUE_EXPR:
4062 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4063 return NULL_TREE;
4065 if (! INTEGRAL_TYPE_P (arg0_type)
4066 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4067 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4068 return NULL_TREE;
4070 n_low = low, n_high = high;
4072 if (n_low != 0)
4073 n_low = fold_convert_loc (loc, arg0_type, n_low);
4075 if (n_high != 0)
4076 n_high = fold_convert_loc (loc, arg0_type, n_high);
4078 /* If we're converting arg0 from an unsigned type, to exp,
4079 a signed type, we will be doing the comparison as unsigned.
4080 The tests above have already verified that LOW and HIGH
4081 are both positive.
4083 So we have to ensure that we will handle large unsigned
4084 values the same way that the current signed bounds treat
4085 negative values. */
4087 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4089 tree high_positive;
4090 tree equiv_type;
4091 /* For fixed-point modes, we need to pass the saturating flag
4092 as the 2nd parameter. */
4093 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4094 equiv_type
4095 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4096 TYPE_SATURATING (arg0_type));
4097 else
4098 equiv_type
4099 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4101 /* A range without an upper bound is, naturally, unbounded.
4102 Since convert would have cropped a very large value, use
4103 the max value for the destination type. */
4104 high_positive
4105 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4106 : TYPE_MAX_VALUE (arg0_type);
4108 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4109 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4110 fold_convert_loc (loc, arg0_type,
4111 high_positive),
4112 build_int_cst (arg0_type, 1));
4114 /* If the low bound is specified, "and" the range with the
4115 range for which the original unsigned value will be
4116 positive. */
4117 if (low != 0)
4119 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4120 1, fold_convert_loc (loc, arg0_type,
4121 integer_zero_node),
4122 high_positive))
4123 return NULL_TREE;
4125 in_p = (n_in_p == in_p);
4127 else
4129 /* Otherwise, "or" the range with the range of the input
4130 that will be interpreted as negative. */
4131 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4132 1, fold_convert_loc (loc, arg0_type,
4133 integer_zero_node),
4134 high_positive))
4135 return NULL_TREE;
4137 in_p = (in_p != n_in_p);
4141 *p_low = n_low;
4142 *p_high = n_high;
4143 *p_in_p = in_p;
4144 return arg0;
4146 default:
4147 return NULL_TREE;
4151 /* Given EXP, a logical expression, set the range it is testing into
4152 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4153 actually being tested. *PLOW and *PHIGH will be made of the same
4154 type as the returned expression. If EXP is not a comparison, we
4155 will most likely not be returning a useful value and range. Set
4156 *STRICT_OVERFLOW_P to true if the return value is only valid
4157 because signed overflow is undefined; otherwise, do not change
4158 *STRICT_OVERFLOW_P. */
4160 tree
4161 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4162 bool *strict_overflow_p)
4164 enum tree_code code;
4165 tree arg0, arg1 = NULL_TREE;
4166 tree exp_type, nexp;
4167 int in_p;
4168 tree low, high;
4169 location_t loc = EXPR_LOCATION (exp);
4171 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4172 and see if we can refine the range. Some of the cases below may not
4173 happen, but it doesn't seem worth worrying about this. We "continue"
4174 the outer loop when we've changed something; otherwise we "break"
4175 the switch, which will "break" the while. */
4177 in_p = 0;
4178 low = high = build_int_cst (TREE_TYPE (exp), 0);
4180 while (1)
4182 code = TREE_CODE (exp);
4183 exp_type = TREE_TYPE (exp);
4184 arg0 = NULL_TREE;
4186 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4188 if (TREE_OPERAND_LENGTH (exp) > 0)
4189 arg0 = TREE_OPERAND (exp, 0);
4190 if (TREE_CODE_CLASS (code) == tcc_binary
4191 || TREE_CODE_CLASS (code) == tcc_comparison
4192 || (TREE_CODE_CLASS (code) == tcc_expression
4193 && TREE_OPERAND_LENGTH (exp) > 1))
4194 arg1 = TREE_OPERAND (exp, 1);
4196 if (arg0 == NULL_TREE)
4197 break;
4199 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4200 &high, &in_p, strict_overflow_p);
4201 if (nexp == NULL_TREE)
4202 break;
4203 exp = nexp;
4206 /* If EXP is a constant, we can evaluate whether this is true or false. */
4207 if (TREE_CODE (exp) == INTEGER_CST)
4209 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4210 exp, 0, low, 0))
4211 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4212 exp, 1, high, 1)));
4213 low = high = 0;
4214 exp = 0;
4217 *pin_p = in_p, *plow = low, *phigh = high;
4218 return exp;
4221 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4222 type, TYPE, return an expression to test if EXP is in (or out of, depending
4223 on IN_P) the range. Return 0 if the test couldn't be created. */
4225 tree
4226 build_range_check (location_t loc, tree type, tree exp, int in_p,
4227 tree low, tree high)
4229 tree etype = TREE_TYPE (exp), value;
4231 #ifdef HAVE_canonicalize_funcptr_for_compare
4232 /* Disable this optimization for function pointer expressions
4233 on targets that require function pointer canonicalization. */
4234 if (HAVE_canonicalize_funcptr_for_compare
4235 && TREE_CODE (etype) == POINTER_TYPE
4236 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4237 return NULL_TREE;
4238 #endif
4240 if (! in_p)
4242 value = build_range_check (loc, type, exp, 1, low, high);
4243 if (value != 0)
4244 return invert_truthvalue_loc (loc, value);
4246 return 0;
4249 if (low == 0 && high == 0)
4250 return build_int_cst (type, 1);
4252 if (low == 0)
4253 return fold_build2_loc (loc, LE_EXPR, type, exp,
4254 fold_convert_loc (loc, etype, high));
4256 if (high == 0)
4257 return fold_build2_loc (loc, GE_EXPR, type, exp,
4258 fold_convert_loc (loc, etype, low));
4260 if (operand_equal_p (low, high, 0))
4261 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4262 fold_convert_loc (loc, etype, low));
4264 if (integer_zerop (low))
4266 if (! TYPE_UNSIGNED (etype))
4268 etype = unsigned_type_for (etype);
4269 high = fold_convert_loc (loc, etype, high);
4270 exp = fold_convert_loc (loc, etype, exp);
4272 return build_range_check (loc, type, exp, 1, 0, high);
4275 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4276 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4278 unsigned HOST_WIDE_INT lo;
4279 HOST_WIDE_INT hi;
4280 int prec;
4282 prec = TYPE_PRECISION (etype);
4283 if (prec <= HOST_BITS_PER_WIDE_INT)
4285 hi = 0;
4286 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4288 else
4290 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4291 lo = (unsigned HOST_WIDE_INT) -1;
4294 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4296 if (TYPE_UNSIGNED (etype))
4298 tree signed_etype = signed_type_for (etype);
4299 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4300 etype
4301 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4302 else
4303 etype = signed_etype;
4304 exp = fold_convert_loc (loc, etype, exp);
4306 return fold_build2_loc (loc, GT_EXPR, type, exp,
4307 build_int_cst (etype, 0));
4311 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4312 This requires wrap-around arithmetics for the type of the expression.
4313 First make sure that arithmetics in this type is valid, then make sure
4314 that it wraps around. */
4315 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4316 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4317 TYPE_UNSIGNED (etype));
4319 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4321 tree utype, minv, maxv;
4323 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4324 for the type in question, as we rely on this here. */
4325 utype = unsigned_type_for (etype);
4326 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4327 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4328 integer_one_node, 1);
4329 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4331 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4332 minv, 1, maxv, 1)))
4333 etype = utype;
4334 else
4335 return 0;
4338 high = fold_convert_loc (loc, etype, high);
4339 low = fold_convert_loc (loc, etype, low);
4340 exp = fold_convert_loc (loc, etype, exp);
4342 value = const_binop (MINUS_EXPR, high, low);
4345 if (POINTER_TYPE_P (etype))
4347 if (value != 0 && !TREE_OVERFLOW (value))
4349 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4350 return build_range_check (loc, type,
4351 fold_build_pointer_plus_loc (loc, exp, low),
4352 1, build_int_cst (etype, 0), value);
4354 return 0;
4357 if (value != 0 && !TREE_OVERFLOW (value))
4358 return build_range_check (loc, type,
4359 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4360 1, build_int_cst (etype, 0), value);
4362 return 0;
4365 /* Return the predecessor of VAL in its type, handling the infinite case. */
4367 static tree
4368 range_predecessor (tree val)
4370 tree type = TREE_TYPE (val);
4372 if (INTEGRAL_TYPE_P (type)
4373 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4374 return 0;
4375 else
4376 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4379 /* Return the successor of VAL in its type, handling the infinite case. */
4381 static tree
4382 range_successor (tree val)
4384 tree type = TREE_TYPE (val);
4386 if (INTEGRAL_TYPE_P (type)
4387 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4388 return 0;
4389 else
4390 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4393 /* Given two ranges, see if we can merge them into one. Return 1 if we
4394 can, 0 if we can't. Set the output range into the specified parameters. */
4396 bool
4397 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4398 tree high0, int in1_p, tree low1, tree high1)
4400 int no_overlap;
4401 int subset;
4402 int temp;
4403 tree tem;
4404 int in_p;
4405 tree low, high;
4406 int lowequal = ((low0 == 0 && low1 == 0)
4407 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4408 low0, 0, low1, 0)));
4409 int highequal = ((high0 == 0 && high1 == 0)
4410 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4411 high0, 1, high1, 1)));
4413 /* Make range 0 be the range that starts first, or ends last if they
4414 start at the same value. Swap them if it isn't. */
4415 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4416 low0, 0, low1, 0))
4417 || (lowequal
4418 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4419 high1, 1, high0, 1))))
4421 temp = in0_p, in0_p = in1_p, in1_p = temp;
4422 tem = low0, low0 = low1, low1 = tem;
4423 tem = high0, high0 = high1, high1 = tem;
4426 /* Now flag two cases, whether the ranges are disjoint or whether the
4427 second range is totally subsumed in the first. Note that the tests
4428 below are simplified by the ones above. */
4429 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4430 high0, 1, low1, 0));
4431 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4432 high1, 1, high0, 1));
4434 /* We now have four cases, depending on whether we are including or
4435 excluding the two ranges. */
4436 if (in0_p && in1_p)
4438 /* If they don't overlap, the result is false. If the second range
4439 is a subset it is the result. Otherwise, the range is from the start
4440 of the second to the end of the first. */
4441 if (no_overlap)
4442 in_p = 0, low = high = 0;
4443 else if (subset)
4444 in_p = 1, low = low1, high = high1;
4445 else
4446 in_p = 1, low = low1, high = high0;
4449 else if (in0_p && ! in1_p)
4451 /* If they don't overlap, the result is the first range. If they are
4452 equal, the result is false. If the second range is a subset of the
4453 first, and the ranges begin at the same place, we go from just after
4454 the end of the second range to the end of the first. If the second
4455 range is not a subset of the first, or if it is a subset and both
4456 ranges end at the same place, the range starts at the start of the
4457 first range and ends just before the second range.
4458 Otherwise, we can't describe this as a single range. */
4459 if (no_overlap)
4460 in_p = 1, low = low0, high = high0;
4461 else if (lowequal && highequal)
4462 in_p = 0, low = high = 0;
4463 else if (subset && lowequal)
4465 low = range_successor (high1);
4466 high = high0;
4467 in_p = 1;
4468 if (low == 0)
4470 /* We are in the weird situation where high0 > high1 but
4471 high1 has no successor. Punt. */
4472 return 0;
4475 else if (! subset || highequal)
4477 low = low0;
4478 high = range_predecessor (low1);
4479 in_p = 1;
4480 if (high == 0)
4482 /* low0 < low1 but low1 has no predecessor. Punt. */
4483 return 0;
4486 else
4487 return 0;
4490 else if (! in0_p && in1_p)
4492 /* If they don't overlap, the result is the second range. If the second
4493 is a subset of the first, the result is false. Otherwise,
4494 the range starts just after the first range and ends at the
4495 end of the second. */
4496 if (no_overlap)
4497 in_p = 1, low = low1, high = high1;
4498 else if (subset || highequal)
4499 in_p = 0, low = high = 0;
4500 else
4502 low = range_successor (high0);
4503 high = high1;
4504 in_p = 1;
4505 if (low == 0)
4507 /* high1 > high0 but high0 has no successor. Punt. */
4508 return 0;
4513 else
4515 /* The case where we are excluding both ranges. Here the complex case
4516 is if they don't overlap. In that case, the only time we have a
4517 range is if they are adjacent. If the second is a subset of the
4518 first, the result is the first. Otherwise, the range to exclude
4519 starts at the beginning of the first range and ends at the end of the
4520 second. */
4521 if (no_overlap)
4523 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4524 range_successor (high0),
4525 1, low1, 0)))
4526 in_p = 0, low = low0, high = high1;
4527 else
4529 /* Canonicalize - [min, x] into - [-, x]. */
4530 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4531 switch (TREE_CODE (TREE_TYPE (low0)))
4533 case ENUMERAL_TYPE:
4534 if (TYPE_PRECISION (TREE_TYPE (low0))
4535 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4536 break;
4537 /* FALLTHROUGH */
4538 case INTEGER_TYPE:
4539 if (tree_int_cst_equal (low0,
4540 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4541 low0 = 0;
4542 break;
4543 case POINTER_TYPE:
4544 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4545 && integer_zerop (low0))
4546 low0 = 0;
4547 break;
4548 default:
4549 break;
4552 /* Canonicalize - [x, max] into - [x, -]. */
4553 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4554 switch (TREE_CODE (TREE_TYPE (high1)))
4556 case ENUMERAL_TYPE:
4557 if (TYPE_PRECISION (TREE_TYPE (high1))
4558 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4559 break;
4560 /* FALLTHROUGH */
4561 case INTEGER_TYPE:
4562 if (tree_int_cst_equal (high1,
4563 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4564 high1 = 0;
4565 break;
4566 case POINTER_TYPE:
4567 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4568 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4569 high1, 1,
4570 integer_one_node, 1)))
4571 high1 = 0;
4572 break;
4573 default:
4574 break;
4577 /* The ranges might be also adjacent between the maximum and
4578 minimum values of the given type. For
4579 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4580 return + [x + 1, y - 1]. */
4581 if (low0 == 0 && high1 == 0)
4583 low = range_successor (high0);
4584 high = range_predecessor (low1);
4585 if (low == 0 || high == 0)
4586 return 0;
4588 in_p = 1;
4590 else
4591 return 0;
4594 else if (subset)
4595 in_p = 0, low = low0, high = high0;
4596 else
4597 in_p = 0, low = low0, high = high1;
4600 *pin_p = in_p, *plow = low, *phigh = high;
4601 return 1;
4605 /* Subroutine of fold, looking inside expressions of the form
4606 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4607 of the COND_EXPR. This function is being used also to optimize
4608 A op B ? C : A, by reversing the comparison first.
4610 Return a folded expression whose code is not a COND_EXPR
4611 anymore, or NULL_TREE if no folding opportunity is found. */
4613 static tree
4614 fold_cond_expr_with_comparison (location_t loc, tree type,
4615 tree arg0, tree arg1, tree arg2)
4617 enum tree_code comp_code = TREE_CODE (arg0);
4618 tree arg00 = TREE_OPERAND (arg0, 0);
4619 tree arg01 = TREE_OPERAND (arg0, 1);
4620 tree arg1_type = TREE_TYPE (arg1);
4621 tree tem;
4623 STRIP_NOPS (arg1);
4624 STRIP_NOPS (arg2);
4626 /* If we have A op 0 ? A : -A, consider applying the following
4627 transformations:
4629 A == 0? A : -A same as -A
4630 A != 0? A : -A same as A
4631 A >= 0? A : -A same as abs (A)
4632 A > 0? A : -A same as abs (A)
4633 A <= 0? A : -A same as -abs (A)
4634 A < 0? A : -A same as -abs (A)
4636 None of these transformations work for modes with signed
4637 zeros. If A is +/-0, the first two transformations will
4638 change the sign of the result (from +0 to -0, or vice
4639 versa). The last four will fix the sign of the result,
4640 even though the original expressions could be positive or
4641 negative, depending on the sign of A.
4643 Note that all these transformations are correct if A is
4644 NaN, since the two alternatives (A and -A) are also NaNs. */
4645 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4646 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4647 ? real_zerop (arg01)
4648 : integer_zerop (arg01))
4649 && ((TREE_CODE (arg2) == NEGATE_EXPR
4650 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4651 /* In the case that A is of the form X-Y, '-A' (arg2) may
4652 have already been folded to Y-X, check for that. */
4653 || (TREE_CODE (arg1) == MINUS_EXPR
4654 && TREE_CODE (arg2) == MINUS_EXPR
4655 && operand_equal_p (TREE_OPERAND (arg1, 0),
4656 TREE_OPERAND (arg2, 1), 0)
4657 && operand_equal_p (TREE_OPERAND (arg1, 1),
4658 TREE_OPERAND (arg2, 0), 0))))
4659 switch (comp_code)
4661 case EQ_EXPR:
4662 case UNEQ_EXPR:
4663 tem = fold_convert_loc (loc, arg1_type, arg1);
4664 return pedantic_non_lvalue_loc (loc,
4665 fold_convert_loc (loc, type,
4666 negate_expr (tem)));
4667 case NE_EXPR:
4668 case LTGT_EXPR:
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4670 case UNGE_EXPR:
4671 case UNGT_EXPR:
4672 if (flag_trapping_math)
4673 break;
4674 /* Fall through. */
4675 case GE_EXPR:
4676 case GT_EXPR:
4677 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4678 arg1 = fold_convert_loc (loc, signed_type_for
4679 (TREE_TYPE (arg1)), arg1);
4680 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4681 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4682 case UNLE_EXPR:
4683 case UNLT_EXPR:
4684 if (flag_trapping_math)
4685 break;
4686 case LE_EXPR:
4687 case LT_EXPR:
4688 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4689 arg1 = fold_convert_loc (loc, signed_type_for
4690 (TREE_TYPE (arg1)), arg1);
4691 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4692 return negate_expr (fold_convert_loc (loc, type, tem));
4693 default:
4694 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4695 break;
4698 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4699 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4700 both transformations are correct when A is NaN: A != 0
4701 is then true, and A == 0 is false. */
4703 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4704 && integer_zerop (arg01) && integer_zerop (arg2))
4706 if (comp_code == NE_EXPR)
4707 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4708 else if (comp_code == EQ_EXPR)
4709 return build_zero_cst (type);
4712 /* Try some transformations of A op B ? A : B.
4714 A == B? A : B same as B
4715 A != B? A : B same as A
4716 A >= B? A : B same as max (A, B)
4717 A > B? A : B same as max (B, A)
4718 A <= B? A : B same as min (A, B)
4719 A < B? A : B same as min (B, A)
4721 As above, these transformations don't work in the presence
4722 of signed zeros. For example, if A and B are zeros of
4723 opposite sign, the first two transformations will change
4724 the sign of the result. In the last four, the original
4725 expressions give different results for (A=+0, B=-0) and
4726 (A=-0, B=+0), but the transformed expressions do not.
4728 The first two transformations are correct if either A or B
4729 is a NaN. In the first transformation, the condition will
4730 be false, and B will indeed be chosen. In the case of the
4731 second transformation, the condition A != B will be true,
4732 and A will be chosen.
4734 The conversions to max() and min() are not correct if B is
4735 a number and A is not. The conditions in the original
4736 expressions will be false, so all four give B. The min()
4737 and max() versions would give a NaN instead. */
4738 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4739 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4740 /* Avoid these transformations if the COND_EXPR may be used
4741 as an lvalue in the C++ front-end. PR c++/19199. */
4742 && (in_gimple_form
4743 || VECTOR_TYPE_P (type)
4744 || (strcmp (lang_hooks.name, "GNU C++") != 0
4745 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4746 || ! maybe_lvalue_p (arg1)
4747 || ! maybe_lvalue_p (arg2)))
4749 tree comp_op0 = arg00;
4750 tree comp_op1 = arg01;
4751 tree comp_type = TREE_TYPE (comp_op0);
4753 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4754 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4756 comp_type = type;
4757 comp_op0 = arg1;
4758 comp_op1 = arg2;
4761 switch (comp_code)
4763 case EQ_EXPR:
4764 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4765 case NE_EXPR:
4766 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4767 case LE_EXPR:
4768 case LT_EXPR:
4769 case UNLE_EXPR:
4770 case UNLT_EXPR:
4771 /* In C++ a ?: expression can be an lvalue, so put the
4772 operand which will be used if they are equal first
4773 so that we can convert this back to the
4774 corresponding COND_EXPR. */
4775 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4777 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4778 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4779 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4780 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4781 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4782 comp_op1, comp_op0);
4783 return pedantic_non_lvalue_loc (loc,
4784 fold_convert_loc (loc, type, tem));
4786 break;
4787 case GE_EXPR:
4788 case GT_EXPR:
4789 case UNGE_EXPR:
4790 case UNGT_EXPR:
4791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4793 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4794 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4795 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4796 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4797 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4798 comp_op1, comp_op0);
4799 return pedantic_non_lvalue_loc (loc,
4800 fold_convert_loc (loc, type, tem));
4802 break;
4803 case UNEQ_EXPR:
4804 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4805 return pedantic_non_lvalue_loc (loc,
4806 fold_convert_loc (loc, type, arg2));
4807 break;
4808 case LTGT_EXPR:
4809 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4810 return pedantic_non_lvalue_loc (loc,
4811 fold_convert_loc (loc, type, arg1));
4812 break;
4813 default:
4814 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4815 break;
4819 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4820 we might still be able to simplify this. For example,
4821 if C1 is one less or one more than C2, this might have started
4822 out as a MIN or MAX and been transformed by this function.
4823 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4825 if (INTEGRAL_TYPE_P (type)
4826 && TREE_CODE (arg01) == INTEGER_CST
4827 && TREE_CODE (arg2) == INTEGER_CST)
4828 switch (comp_code)
4830 case EQ_EXPR:
4831 if (TREE_CODE (arg1) == INTEGER_CST)
4832 break;
4833 /* We can replace A with C1 in this case. */
4834 arg1 = fold_convert_loc (loc, type, arg01);
4835 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4837 case LT_EXPR:
4838 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4839 MIN_EXPR, to preserve the signedness of the comparison. */
4840 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4841 OEP_ONLY_CONST)
4842 && operand_equal_p (arg01,
4843 const_binop (PLUS_EXPR, arg2,
4844 build_int_cst (type, 1)),
4845 OEP_ONLY_CONST))
4847 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4848 fold_convert_loc (loc, TREE_TYPE (arg00),
4849 arg2));
4850 return pedantic_non_lvalue_loc (loc,
4851 fold_convert_loc (loc, type, tem));
4853 break;
4855 case LE_EXPR:
4856 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4857 as above. */
4858 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4859 OEP_ONLY_CONST)
4860 && operand_equal_p (arg01,
4861 const_binop (MINUS_EXPR, arg2,
4862 build_int_cst (type, 1)),
4863 OEP_ONLY_CONST))
4865 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4866 fold_convert_loc (loc, TREE_TYPE (arg00),
4867 arg2));
4868 return pedantic_non_lvalue_loc (loc,
4869 fold_convert_loc (loc, type, tem));
4871 break;
4873 case GT_EXPR:
4874 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4875 MAX_EXPR, to preserve the signedness of the comparison. */
4876 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4877 OEP_ONLY_CONST)
4878 && operand_equal_p (arg01,
4879 const_binop (MINUS_EXPR, arg2,
4880 build_int_cst (type, 1)),
4881 OEP_ONLY_CONST))
4883 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4884 fold_convert_loc (loc, TREE_TYPE (arg00),
4885 arg2));
4886 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4888 break;
4890 case GE_EXPR:
4891 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4892 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4893 OEP_ONLY_CONST)
4894 && operand_equal_p (arg01,
4895 const_binop (PLUS_EXPR, arg2,
4896 build_int_cst (type, 1)),
4897 OEP_ONLY_CONST))
4899 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4900 fold_convert_loc (loc, TREE_TYPE (arg00),
4901 arg2));
4902 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4904 break;
4905 case NE_EXPR:
4906 break;
4907 default:
4908 gcc_unreachable ();
4911 return NULL_TREE;
4916 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4917 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4918 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4919 false) >= 2)
4920 #endif
4922 /* EXP is some logical combination of boolean tests. See if we can
4923 merge it into some range test. Return the new tree if so. */
4925 static tree
4926 fold_range_test (location_t loc, enum tree_code code, tree type,
4927 tree op0, tree op1)
4929 int or_op = (code == TRUTH_ORIF_EXPR
4930 || code == TRUTH_OR_EXPR);
4931 int in0_p, in1_p, in_p;
4932 tree low0, low1, low, high0, high1, high;
4933 bool strict_overflow_p = false;
4934 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4935 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4936 tree tem;
4937 const char * const warnmsg = G_("assuming signed overflow does not occur "
4938 "when simplifying range test");
4940 /* If this is an OR operation, invert both sides; we will invert
4941 again at the end. */
4942 if (or_op)
4943 in0_p = ! in0_p, in1_p = ! in1_p;
4945 /* If both expressions are the same, if we can merge the ranges, and we
4946 can build the range test, return it or it inverted. If one of the
4947 ranges is always true or always false, consider it to be the same
4948 expression as the other. */
4949 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4950 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4951 in1_p, low1, high1)
4952 && 0 != (tem = (build_range_check (loc, type,
4953 lhs != 0 ? lhs
4954 : rhs != 0 ? rhs : integer_zero_node,
4955 in_p, low, high))))
4957 if (strict_overflow_p)
4958 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4959 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4962 /* On machines where the branch cost is expensive, if this is a
4963 short-circuited branch and the underlying object on both sides
4964 is the same, make a non-short-circuit operation. */
4965 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4966 && lhs != 0 && rhs != 0
4967 && (code == TRUTH_ANDIF_EXPR
4968 || code == TRUTH_ORIF_EXPR)
4969 && operand_equal_p (lhs, rhs, 0))
4971 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4972 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4973 which cases we can't do this. */
4974 if (simple_operand_p (lhs))
4975 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4977 type, op0, op1);
4979 else if (!lang_hooks.decls.global_bindings_p ()
4980 && !CONTAINS_PLACEHOLDER_P (lhs))
4982 tree common = save_expr (lhs);
4984 if (0 != (lhs = build_range_check (loc, type, common,
4985 or_op ? ! in0_p : in0_p,
4986 low0, high0))
4987 && (0 != (rhs = build_range_check (loc, type, common,
4988 or_op ? ! in1_p : in1_p,
4989 low1, high1))))
4991 if (strict_overflow_p)
4992 fold_overflow_warning (warnmsg,
4993 WARN_STRICT_OVERFLOW_COMPARISON);
4994 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4995 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4996 type, lhs, rhs);
5001 return 0;
5004 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5005 bit value. Arrange things so the extra bits will be set to zero if and
5006 only if C is signed-extended to its full width. If MASK is nonzero,
5007 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5009 static tree
5010 unextend (tree c, int p, int unsignedp, tree mask)
5012 tree type = TREE_TYPE (c);
5013 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5014 tree temp;
5016 if (p == modesize || unsignedp)
5017 return c;
5019 /* We work by getting just the sign bit into the low-order bit, then
5020 into the high-order bit, then sign-extend. We then XOR that value
5021 with C. */
5022 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5023 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5025 /* We must use a signed type in order to get an arithmetic right shift.
5026 However, we must also avoid introducing accidental overflows, so that
5027 a subsequent call to integer_zerop will work. Hence we must
5028 do the type conversion here. At this point, the constant is either
5029 zero or one, and the conversion to a signed type can never overflow.
5030 We could get an overflow if this conversion is done anywhere else. */
5031 if (TYPE_UNSIGNED (type))
5032 temp = fold_convert (signed_type_for (type), temp);
5034 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5035 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5036 if (mask != 0)
5037 temp = const_binop (BIT_AND_EXPR, temp,
5038 fold_convert (TREE_TYPE (c), mask));
5039 /* If necessary, convert the type back to match the type of C. */
5040 if (TYPE_UNSIGNED (type))
5041 temp = fold_convert (type, temp);
5043 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5046 /* For an expression that has the form
5047 (A && B) || ~B
5049 (A || B) && ~B,
5050 we can drop one of the inner expressions and simplify to
5051 A || ~B
5053 A && ~B
5054 LOC is the location of the resulting expression. OP is the inner
5055 logical operation; the left-hand side in the examples above, while CMPOP
5056 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5057 removing a condition that guards another, as in
5058 (A != NULL && A->...) || A == NULL
5059 which we must not transform. If RHS_ONLY is true, only eliminate the
5060 right-most operand of the inner logical operation. */
5062 static tree
5063 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5064 bool rhs_only)
5066 tree type = TREE_TYPE (cmpop);
5067 enum tree_code code = TREE_CODE (cmpop);
5068 enum tree_code truthop_code = TREE_CODE (op);
5069 tree lhs = TREE_OPERAND (op, 0);
5070 tree rhs = TREE_OPERAND (op, 1);
5071 tree orig_lhs = lhs, orig_rhs = rhs;
5072 enum tree_code rhs_code = TREE_CODE (rhs);
5073 enum tree_code lhs_code = TREE_CODE (lhs);
5074 enum tree_code inv_code;
5076 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5077 return NULL_TREE;
5079 if (TREE_CODE_CLASS (code) != tcc_comparison)
5080 return NULL_TREE;
5082 if (rhs_code == truthop_code)
5084 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5085 if (newrhs != NULL_TREE)
5087 rhs = newrhs;
5088 rhs_code = TREE_CODE (rhs);
5091 if (lhs_code == truthop_code && !rhs_only)
5093 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5094 if (newlhs != NULL_TREE)
5096 lhs = newlhs;
5097 lhs_code = TREE_CODE (lhs);
5101 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5102 if (inv_code == rhs_code
5103 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5105 return lhs;
5106 if (!rhs_only && inv_code == lhs_code
5107 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5108 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5109 return rhs;
5110 if (rhs != orig_rhs || lhs != orig_lhs)
5111 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5112 lhs, rhs);
5113 return NULL_TREE;
5116 /* Find ways of folding logical expressions of LHS and RHS:
5117 Try to merge two comparisons to the same innermost item.
5118 Look for range tests like "ch >= '0' && ch <= '9'".
5119 Look for combinations of simple terms on machines with expensive branches
5120 and evaluate the RHS unconditionally.
5122 For example, if we have p->a == 2 && p->b == 4 and we can make an
5123 object large enough to span both A and B, we can do this with a comparison
5124 against the object ANDed with the a mask.
5126 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5127 operations to do this with one comparison.
5129 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5130 function and the one above.
5132 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5133 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5135 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5136 two operands.
5138 We return the simplified tree or 0 if no optimization is possible. */
5140 static tree
5141 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5142 tree lhs, tree rhs)
5144 /* If this is the "or" of two comparisons, we can do something if
5145 the comparisons are NE_EXPR. If this is the "and", we can do something
5146 if the comparisons are EQ_EXPR. I.e.,
5147 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5149 WANTED_CODE is this operation code. For single bit fields, we can
5150 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5151 comparison for one-bit fields. */
5153 enum tree_code wanted_code;
5154 enum tree_code lcode, rcode;
5155 tree ll_arg, lr_arg, rl_arg, rr_arg;
5156 tree ll_inner, lr_inner, rl_inner, rr_inner;
5157 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5158 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5159 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5160 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5161 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5162 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5163 enum machine_mode lnmode, rnmode;
5164 tree ll_mask, lr_mask, rl_mask, rr_mask;
5165 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5166 tree l_const, r_const;
5167 tree lntype, rntype, result;
5168 HOST_WIDE_INT first_bit, end_bit;
5169 int volatilep;
5171 /* Start by getting the comparison codes. Fail if anything is volatile.
5172 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5173 it were surrounded with a NE_EXPR. */
5175 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5176 return 0;
5178 lcode = TREE_CODE (lhs);
5179 rcode = TREE_CODE (rhs);
5181 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5183 lhs = build2 (NE_EXPR, truth_type, lhs,
5184 build_int_cst (TREE_TYPE (lhs), 0));
5185 lcode = NE_EXPR;
5188 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5190 rhs = build2 (NE_EXPR, truth_type, rhs,
5191 build_int_cst (TREE_TYPE (rhs), 0));
5192 rcode = NE_EXPR;
5195 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5196 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5197 return 0;
5199 ll_arg = TREE_OPERAND (lhs, 0);
5200 lr_arg = TREE_OPERAND (lhs, 1);
5201 rl_arg = TREE_OPERAND (rhs, 0);
5202 rr_arg = TREE_OPERAND (rhs, 1);
5204 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5205 if (simple_operand_p (ll_arg)
5206 && simple_operand_p (lr_arg))
5208 if (operand_equal_p (ll_arg, rl_arg, 0)
5209 && operand_equal_p (lr_arg, rr_arg, 0))
5211 result = combine_comparisons (loc, code, lcode, rcode,
5212 truth_type, ll_arg, lr_arg);
5213 if (result)
5214 return result;
5216 else if (operand_equal_p (ll_arg, rr_arg, 0)
5217 && operand_equal_p (lr_arg, rl_arg, 0))
5219 result = combine_comparisons (loc, code, lcode,
5220 swap_tree_comparison (rcode),
5221 truth_type, ll_arg, lr_arg);
5222 if (result)
5223 return result;
5227 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5228 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5230 /* If the RHS can be evaluated unconditionally and its operands are
5231 simple, it wins to evaluate the RHS unconditionally on machines
5232 with expensive branches. In this case, this isn't a comparison
5233 that can be merged. */
5235 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5236 false) >= 2
5237 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5238 && simple_operand_p (rl_arg)
5239 && simple_operand_p (rr_arg))
5241 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5242 if (code == TRUTH_OR_EXPR
5243 && lcode == NE_EXPR && integer_zerop (lr_arg)
5244 && rcode == NE_EXPR && integer_zerop (rr_arg)
5245 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5246 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5247 return build2_loc (loc, NE_EXPR, truth_type,
5248 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5249 ll_arg, rl_arg),
5250 build_int_cst (TREE_TYPE (ll_arg), 0));
5252 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5253 if (code == TRUTH_AND_EXPR
5254 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5255 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5256 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5257 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5258 return build2_loc (loc, EQ_EXPR, truth_type,
5259 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5260 ll_arg, rl_arg),
5261 build_int_cst (TREE_TYPE (ll_arg), 0));
5264 /* See if the comparisons can be merged. Then get all the parameters for
5265 each side. */
5267 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5268 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5269 return 0;
5271 volatilep = 0;
5272 ll_inner = decode_field_reference (loc, ll_arg,
5273 &ll_bitsize, &ll_bitpos, &ll_mode,
5274 &ll_unsignedp, &volatilep, &ll_mask,
5275 &ll_and_mask);
5276 lr_inner = decode_field_reference (loc, lr_arg,
5277 &lr_bitsize, &lr_bitpos, &lr_mode,
5278 &lr_unsignedp, &volatilep, &lr_mask,
5279 &lr_and_mask);
5280 rl_inner = decode_field_reference (loc, rl_arg,
5281 &rl_bitsize, &rl_bitpos, &rl_mode,
5282 &rl_unsignedp, &volatilep, &rl_mask,
5283 &rl_and_mask);
5284 rr_inner = decode_field_reference (loc, rr_arg,
5285 &rr_bitsize, &rr_bitpos, &rr_mode,
5286 &rr_unsignedp, &volatilep, &rr_mask,
5287 &rr_and_mask);
5289 /* It must be true that the inner operation on the lhs of each
5290 comparison must be the same if we are to be able to do anything.
5291 Then see if we have constants. If not, the same must be true for
5292 the rhs's. */
5293 if (volatilep || ll_inner == 0 || rl_inner == 0
5294 || ! operand_equal_p (ll_inner, rl_inner, 0))
5295 return 0;
5297 if (TREE_CODE (lr_arg) == INTEGER_CST
5298 && TREE_CODE (rr_arg) == INTEGER_CST)
5299 l_const = lr_arg, r_const = rr_arg;
5300 else if (lr_inner == 0 || rr_inner == 0
5301 || ! operand_equal_p (lr_inner, rr_inner, 0))
5302 return 0;
5303 else
5304 l_const = r_const = 0;
5306 /* If either comparison code is not correct for our logical operation,
5307 fail. However, we can convert a one-bit comparison against zero into
5308 the opposite comparison against that bit being set in the field. */
5310 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5311 if (lcode != wanted_code)
5313 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5315 /* Make the left operand unsigned, since we are only interested
5316 in the value of one bit. Otherwise we are doing the wrong
5317 thing below. */
5318 ll_unsignedp = 1;
5319 l_const = ll_mask;
5321 else
5322 return 0;
5325 /* This is analogous to the code for l_const above. */
5326 if (rcode != wanted_code)
5328 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5330 rl_unsignedp = 1;
5331 r_const = rl_mask;
5333 else
5334 return 0;
5337 /* See if we can find a mode that contains both fields being compared on
5338 the left. If we can't, fail. Otherwise, update all constants and masks
5339 to be relative to a field of that size. */
5340 first_bit = MIN (ll_bitpos, rl_bitpos);
5341 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5342 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5343 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5344 volatilep);
5345 if (lnmode == VOIDmode)
5346 return 0;
5348 lnbitsize = GET_MODE_BITSIZE (lnmode);
5349 lnbitpos = first_bit & ~ (lnbitsize - 1);
5350 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5351 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5353 if (BYTES_BIG_ENDIAN)
5355 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5356 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5359 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5360 size_int (xll_bitpos));
5361 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5362 size_int (xrl_bitpos));
5364 if (l_const)
5366 l_const = fold_convert_loc (loc, lntype, l_const);
5367 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5368 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5369 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5370 fold_build1_loc (loc, BIT_NOT_EXPR,
5371 lntype, ll_mask))))
5373 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5375 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5378 if (r_const)
5380 r_const = fold_convert_loc (loc, lntype, r_const);
5381 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5382 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5383 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5384 fold_build1_loc (loc, BIT_NOT_EXPR,
5385 lntype, rl_mask))))
5387 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5389 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5393 /* If the right sides are not constant, do the same for it. Also,
5394 disallow this optimization if a size or signedness mismatch occurs
5395 between the left and right sides. */
5396 if (l_const == 0)
5398 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5399 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5400 /* Make sure the two fields on the right
5401 correspond to the left without being swapped. */
5402 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5403 return 0;
5405 first_bit = MIN (lr_bitpos, rr_bitpos);
5406 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5407 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5408 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5409 volatilep);
5410 if (rnmode == VOIDmode)
5411 return 0;
5413 rnbitsize = GET_MODE_BITSIZE (rnmode);
5414 rnbitpos = first_bit & ~ (rnbitsize - 1);
5415 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5416 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5418 if (BYTES_BIG_ENDIAN)
5420 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5421 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5424 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5425 rntype, lr_mask),
5426 size_int (xlr_bitpos));
5427 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5428 rntype, rr_mask),
5429 size_int (xrr_bitpos));
5431 /* Make a mask that corresponds to both fields being compared.
5432 Do this for both items being compared. If the operands are the
5433 same size and the bits being compared are in the same position
5434 then we can do this by masking both and comparing the masked
5435 results. */
5436 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5437 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5438 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5440 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5441 ll_unsignedp || rl_unsignedp);
5442 if (! all_ones_mask_p (ll_mask, lnbitsize))
5443 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5445 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5446 lr_unsignedp || rr_unsignedp);
5447 if (! all_ones_mask_p (lr_mask, rnbitsize))
5448 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5450 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5453 /* There is still another way we can do something: If both pairs of
5454 fields being compared are adjacent, we may be able to make a wider
5455 field containing them both.
5457 Note that we still must mask the lhs/rhs expressions. Furthermore,
5458 the mask must be shifted to account for the shift done by
5459 make_bit_field_ref. */
5460 if ((ll_bitsize + ll_bitpos == rl_bitpos
5461 && lr_bitsize + lr_bitpos == rr_bitpos)
5462 || (ll_bitpos == rl_bitpos + rl_bitsize
5463 && lr_bitpos == rr_bitpos + rr_bitsize))
5465 tree type;
5467 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5468 ll_bitsize + rl_bitsize,
5469 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5470 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5471 lr_bitsize + rr_bitsize,
5472 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5474 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5475 size_int (MIN (xll_bitpos, xrl_bitpos)));
5476 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5477 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5479 /* Convert to the smaller type before masking out unwanted bits. */
5480 type = lntype;
5481 if (lntype != rntype)
5483 if (lnbitsize > rnbitsize)
5485 lhs = fold_convert_loc (loc, rntype, lhs);
5486 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5487 type = rntype;
5489 else if (lnbitsize < rnbitsize)
5491 rhs = fold_convert_loc (loc, lntype, rhs);
5492 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5493 type = lntype;
5497 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5498 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5500 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5501 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5503 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5506 return 0;
5509 /* Handle the case of comparisons with constants. If there is something in
5510 common between the masks, those bits of the constants must be the same.
5511 If not, the condition is always false. Test for this to avoid generating
5512 incorrect code below. */
5513 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5514 if (! integer_zerop (result)
5515 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5516 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5518 if (wanted_code == NE_EXPR)
5520 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5521 return constant_boolean_node (true, truth_type);
5523 else
5525 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5526 return constant_boolean_node (false, truth_type);
5530 /* Construct the expression we will return. First get the component
5531 reference we will make. Unless the mask is all ones the width of
5532 that field, perform the mask operation. Then compare with the
5533 merged constant. */
5534 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5535 ll_unsignedp || rl_unsignedp);
5537 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5538 if (! all_ones_mask_p (ll_mask, lnbitsize))
5539 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5541 return build2_loc (loc, wanted_code, truth_type, result,
5542 const_binop (BIT_IOR_EXPR, l_const, r_const));
5545 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5546 constant. */
5548 static tree
5549 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5550 tree op0, tree op1)
5552 tree arg0 = op0;
5553 enum tree_code op_code;
5554 tree comp_const;
5555 tree minmax_const;
5556 int consts_equal, consts_lt;
5557 tree inner;
5559 STRIP_SIGN_NOPS (arg0);
5561 op_code = TREE_CODE (arg0);
5562 minmax_const = TREE_OPERAND (arg0, 1);
5563 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5564 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5565 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5566 inner = TREE_OPERAND (arg0, 0);
5568 /* If something does not permit us to optimize, return the original tree. */
5569 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5570 || TREE_CODE (comp_const) != INTEGER_CST
5571 || TREE_OVERFLOW (comp_const)
5572 || TREE_CODE (minmax_const) != INTEGER_CST
5573 || TREE_OVERFLOW (minmax_const))
5574 return NULL_TREE;
5576 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5577 and GT_EXPR, doing the rest with recursive calls using logical
5578 simplifications. */
5579 switch (code)
5581 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5583 tree tem
5584 = optimize_minmax_comparison (loc,
5585 invert_tree_comparison (code, false),
5586 type, op0, op1);
5587 if (tem)
5588 return invert_truthvalue_loc (loc, tem);
5589 return NULL_TREE;
5592 case GE_EXPR:
5593 return
5594 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5595 optimize_minmax_comparison
5596 (loc, EQ_EXPR, type, arg0, comp_const),
5597 optimize_minmax_comparison
5598 (loc, GT_EXPR, type, arg0, comp_const));
5600 case EQ_EXPR:
5601 if (op_code == MAX_EXPR && consts_equal)
5602 /* MAX (X, 0) == 0 -> X <= 0 */
5603 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5605 else if (op_code == MAX_EXPR && consts_lt)
5606 /* MAX (X, 0) == 5 -> X == 5 */
5607 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5609 else if (op_code == MAX_EXPR)
5610 /* MAX (X, 0) == -1 -> false */
5611 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5613 else if (consts_equal)
5614 /* MIN (X, 0) == 0 -> X >= 0 */
5615 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5617 else if (consts_lt)
5618 /* MIN (X, 0) == 5 -> false */
5619 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5621 else
5622 /* MIN (X, 0) == -1 -> X == -1 */
5623 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5625 case GT_EXPR:
5626 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5627 /* MAX (X, 0) > 0 -> X > 0
5628 MAX (X, 0) > 5 -> X > 5 */
5629 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5631 else if (op_code == MAX_EXPR)
5632 /* MAX (X, 0) > -1 -> true */
5633 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5635 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5636 /* MIN (X, 0) > 0 -> false
5637 MIN (X, 0) > 5 -> false */
5638 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5640 else
5641 /* MIN (X, 0) > -1 -> X > -1 */
5642 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5644 default:
5645 return NULL_TREE;
5649 /* T is an integer expression that is being multiplied, divided, or taken a
5650 modulus (CODE says which and what kind of divide or modulus) by a
5651 constant C. See if we can eliminate that operation by folding it with
5652 other operations already in T. WIDE_TYPE, if non-null, is a type that
5653 should be used for the computation if wider than our type.
5655 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5656 (X * 2) + (Y * 4). We must, however, be assured that either the original
5657 expression would not overflow or that overflow is undefined for the type
5658 in the language in question.
5660 If we return a non-null expression, it is an equivalent form of the
5661 original computation, but need not be in the original type.
5663 We set *STRICT_OVERFLOW_P to true if the return values depends on
5664 signed overflow being undefined. Otherwise we do not change
5665 *STRICT_OVERFLOW_P. */
5667 static tree
5668 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5669 bool *strict_overflow_p)
5671 /* To avoid exponential search depth, refuse to allow recursion past
5672 three levels. Beyond that (1) it's highly unlikely that we'll find
5673 something interesting and (2) we've probably processed it before
5674 when we built the inner expression. */
5676 static int depth;
5677 tree ret;
5679 if (depth > 3)
5680 return NULL;
5682 depth++;
5683 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5684 depth--;
5686 return ret;
5689 static tree
5690 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5691 bool *strict_overflow_p)
5693 tree type = TREE_TYPE (t);
5694 enum tree_code tcode = TREE_CODE (t);
5695 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5696 > GET_MODE_SIZE (TYPE_MODE (type)))
5697 ? wide_type : type);
5698 tree t1, t2;
5699 int same_p = tcode == code;
5700 tree op0 = NULL_TREE, op1 = NULL_TREE;
5701 bool sub_strict_overflow_p;
5703 /* Don't deal with constants of zero here; they confuse the code below. */
5704 if (integer_zerop (c))
5705 return NULL_TREE;
5707 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5708 op0 = TREE_OPERAND (t, 0);
5710 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5711 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5713 /* Note that we need not handle conditional operations here since fold
5714 already handles those cases. So just do arithmetic here. */
5715 switch (tcode)
5717 case INTEGER_CST:
5718 /* For a constant, we can always simplify if we are a multiply
5719 or (for divide and modulus) if it is a multiple of our constant. */
5720 if (code == MULT_EXPR
5721 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5722 return const_binop (code, fold_convert (ctype, t),
5723 fold_convert (ctype, c));
5724 break;
5726 CASE_CONVERT: case NON_LVALUE_EXPR:
5727 /* If op0 is an expression ... */
5728 if ((COMPARISON_CLASS_P (op0)
5729 || UNARY_CLASS_P (op0)
5730 || BINARY_CLASS_P (op0)
5731 || VL_EXP_CLASS_P (op0)
5732 || EXPRESSION_CLASS_P (op0))
5733 /* ... and has wrapping overflow, and its type is smaller
5734 than ctype, then we cannot pass through as widening. */
5735 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5736 && (TYPE_PRECISION (ctype)
5737 > TYPE_PRECISION (TREE_TYPE (op0))))
5738 /* ... or this is a truncation (t is narrower than op0),
5739 then we cannot pass through this narrowing. */
5740 || (TYPE_PRECISION (type)
5741 < TYPE_PRECISION (TREE_TYPE (op0)))
5742 /* ... or signedness changes for division or modulus,
5743 then we cannot pass through this conversion. */
5744 || (code != MULT_EXPR
5745 && (TYPE_UNSIGNED (ctype)
5746 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5747 /* ... or has undefined overflow while the converted to
5748 type has not, we cannot do the operation in the inner type
5749 as that would introduce undefined overflow. */
5750 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5751 && !TYPE_OVERFLOW_UNDEFINED (type))))
5752 break;
5754 /* Pass the constant down and see if we can make a simplification. If
5755 we can, replace this expression with the inner simplification for
5756 possible later conversion to our or some other type. */
5757 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5758 && TREE_CODE (t2) == INTEGER_CST
5759 && !TREE_OVERFLOW (t2)
5760 && (0 != (t1 = extract_muldiv (op0, t2, code,
5761 code == MULT_EXPR
5762 ? ctype : NULL_TREE,
5763 strict_overflow_p))))
5764 return t1;
5765 break;
5767 case ABS_EXPR:
5768 /* If widening the type changes it from signed to unsigned, then we
5769 must avoid building ABS_EXPR itself as unsigned. */
5770 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5772 tree cstype = (*signed_type_for) (ctype);
5773 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5774 != 0)
5776 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5777 return fold_convert (ctype, t1);
5779 break;
5781 /* If the constant is negative, we cannot simplify this. */
5782 if (tree_int_cst_sgn (c) == -1)
5783 break;
5784 /* FALLTHROUGH */
5785 case NEGATE_EXPR:
5786 /* For division and modulus, type can't be unsigned, as e.g.
5787 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5788 For signed types, even with wrapping overflow, this is fine. */
5789 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5790 break;
5791 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5792 != 0)
5793 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5794 break;
5796 case MIN_EXPR: case MAX_EXPR:
5797 /* If widening the type changes the signedness, then we can't perform
5798 this optimization as that changes the result. */
5799 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5800 break;
5802 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5803 sub_strict_overflow_p = false;
5804 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5805 &sub_strict_overflow_p)) != 0
5806 && (t2 = extract_muldiv (op1, c, code, wide_type,
5807 &sub_strict_overflow_p)) != 0)
5809 if (tree_int_cst_sgn (c) < 0)
5810 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5811 if (sub_strict_overflow_p)
5812 *strict_overflow_p = true;
5813 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5814 fold_convert (ctype, t2));
5816 break;
5818 case LSHIFT_EXPR: case RSHIFT_EXPR:
5819 /* If the second operand is constant, this is a multiplication
5820 or floor division, by a power of two, so we can treat it that
5821 way unless the multiplier or divisor overflows. Signed
5822 left-shift overflow is implementation-defined rather than
5823 undefined in C90, so do not convert signed left shift into
5824 multiplication. */
5825 if (TREE_CODE (op1) == INTEGER_CST
5826 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5827 /* const_binop may not detect overflow correctly,
5828 so check for it explicitly here. */
5829 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5830 && TREE_INT_CST_HIGH (op1) == 0
5831 && 0 != (t1 = fold_convert (ctype,
5832 const_binop (LSHIFT_EXPR,
5833 size_one_node,
5834 op1)))
5835 && !TREE_OVERFLOW (t1))
5836 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5837 ? MULT_EXPR : FLOOR_DIV_EXPR,
5838 ctype,
5839 fold_convert (ctype, op0),
5840 t1),
5841 c, code, wide_type, strict_overflow_p);
5842 break;
5844 case PLUS_EXPR: case MINUS_EXPR:
5845 /* See if we can eliminate the operation on both sides. If we can, we
5846 can return a new PLUS or MINUS. If we can't, the only remaining
5847 cases where we can do anything are if the second operand is a
5848 constant. */
5849 sub_strict_overflow_p = false;
5850 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5851 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5852 if (t1 != 0 && t2 != 0
5853 && (code == MULT_EXPR
5854 /* If not multiplication, we can only do this if both operands
5855 are divisible by c. */
5856 || (multiple_of_p (ctype, op0, c)
5857 && multiple_of_p (ctype, op1, c))))
5859 if (sub_strict_overflow_p)
5860 *strict_overflow_p = true;
5861 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5862 fold_convert (ctype, t2));
5865 /* If this was a subtraction, negate OP1 and set it to be an addition.
5866 This simplifies the logic below. */
5867 if (tcode == MINUS_EXPR)
5869 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5870 /* If OP1 was not easily negatable, the constant may be OP0. */
5871 if (TREE_CODE (op0) == INTEGER_CST)
5873 tree tem = op0;
5874 op0 = op1;
5875 op1 = tem;
5876 tem = t1;
5877 t1 = t2;
5878 t2 = tem;
5882 if (TREE_CODE (op1) != INTEGER_CST)
5883 break;
5885 /* If either OP1 or C are negative, this optimization is not safe for
5886 some of the division and remainder types while for others we need
5887 to change the code. */
5888 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5890 if (code == CEIL_DIV_EXPR)
5891 code = FLOOR_DIV_EXPR;
5892 else if (code == FLOOR_DIV_EXPR)
5893 code = CEIL_DIV_EXPR;
5894 else if (code != MULT_EXPR
5895 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5896 break;
5899 /* If it's a multiply or a division/modulus operation of a multiple
5900 of our constant, do the operation and verify it doesn't overflow. */
5901 if (code == MULT_EXPR
5902 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5904 op1 = const_binop (code, fold_convert (ctype, op1),
5905 fold_convert (ctype, c));
5906 /* We allow the constant to overflow with wrapping semantics. */
5907 if (op1 == 0
5908 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5909 break;
5911 else
5912 break;
5914 /* If we have an unsigned type, we cannot widen the operation since it
5915 will change the result if the original computation overflowed. */
5916 if (TYPE_UNSIGNED (ctype) && ctype != type)
5917 break;
5919 /* If we were able to eliminate our operation from the first side,
5920 apply our operation to the second side and reform the PLUS. */
5921 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5924 /* The last case is if we are a multiply. In that case, we can
5925 apply the distributive law to commute the multiply and addition
5926 if the multiplication of the constants doesn't overflow
5927 and overflow is defined. With undefined overflow
5928 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5929 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5930 return fold_build2 (tcode, ctype,
5931 fold_build2 (code, ctype,
5932 fold_convert (ctype, op0),
5933 fold_convert (ctype, c)),
5934 op1);
5936 break;
5938 case MULT_EXPR:
5939 /* We have a special case here if we are doing something like
5940 (C * 8) % 4 since we know that's zero. */
5941 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5942 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5943 /* If the multiplication can overflow we cannot optimize this. */
5944 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5945 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5946 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5948 *strict_overflow_p = true;
5949 return omit_one_operand (type, integer_zero_node, op0);
5952 /* ... fall through ... */
5954 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5955 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5956 /* If we can extract our operation from the LHS, do so and return a
5957 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5958 do something only if the second operand is a constant. */
5959 if (same_p
5960 && (t1 = extract_muldiv (op0, c, code, wide_type,
5961 strict_overflow_p)) != 0)
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5963 fold_convert (ctype, op1));
5964 else if (tcode == MULT_EXPR && code == MULT_EXPR
5965 && (t1 = extract_muldiv (op1, c, code, wide_type,
5966 strict_overflow_p)) != 0)
5967 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5968 fold_convert (ctype, t1));
5969 else if (TREE_CODE (op1) != INTEGER_CST)
5970 return 0;
5972 /* If these are the same operation types, we can associate them
5973 assuming no overflow. */
5974 if (tcode == code)
5976 double_int mul;
5977 bool overflow_p;
5978 unsigned prec = TYPE_PRECISION (ctype);
5979 bool uns = TYPE_UNSIGNED (ctype);
5980 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5981 double_int dic = tree_to_double_int (c).ext (prec, uns);
5982 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5983 overflow_p = ((!uns && overflow_p)
5984 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5985 if (!double_int_fits_to_tree_p (ctype, mul)
5986 && ((uns && tcode != MULT_EXPR) || !uns))
5987 overflow_p = 1;
5988 if (!overflow_p)
5989 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5990 double_int_to_tree (ctype, mul));
5993 /* If these operations "cancel" each other, we have the main
5994 optimizations of this pass, which occur when either constant is a
5995 multiple of the other, in which case we replace this with either an
5996 operation or CODE or TCODE.
5998 If we have an unsigned type, we cannot do this since it will change
5999 the result if the original computation overflowed. */
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6001 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6002 || (tcode == MULT_EXPR
6003 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6004 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6005 && code != MULT_EXPR)))
6007 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6009 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6010 *strict_overflow_p = true;
6011 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6012 fold_convert (ctype,
6013 const_binop (TRUNC_DIV_EXPR,
6014 op1, c)));
6016 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6018 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6019 *strict_overflow_p = true;
6020 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6021 fold_convert (ctype,
6022 const_binop (TRUNC_DIV_EXPR,
6023 c, op1)));
6026 break;
6028 default:
6029 break;
6032 return 0;
6035 /* Return a node which has the indicated constant VALUE (either 0 or
6036 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6037 and is of the indicated TYPE. */
6039 tree
6040 constant_boolean_node (bool value, tree type)
6042 if (type == integer_type_node)
6043 return value ? integer_one_node : integer_zero_node;
6044 else if (type == boolean_type_node)
6045 return value ? boolean_true_node : boolean_false_node;
6046 else if (TREE_CODE (type) == VECTOR_TYPE)
6047 return build_vector_from_val (type,
6048 build_int_cst (TREE_TYPE (type),
6049 value ? -1 : 0));
6050 else
6051 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6055 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6056 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6057 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6058 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6059 COND is the first argument to CODE; otherwise (as in the example
6060 given here), it is the second argument. TYPE is the type of the
6061 original expression. Return NULL_TREE if no simplification is
6062 possible. */
6064 static tree
6065 fold_binary_op_with_conditional_arg (location_t loc,
6066 enum tree_code code,
6067 tree type, tree op0, tree op1,
6068 tree cond, tree arg, int cond_first_p)
6070 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6071 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6072 tree test, true_value, false_value;
6073 tree lhs = NULL_TREE;
6074 tree rhs = NULL_TREE;
6075 enum tree_code cond_code = COND_EXPR;
6077 if (TREE_CODE (cond) == COND_EXPR
6078 || TREE_CODE (cond) == VEC_COND_EXPR)
6080 test = TREE_OPERAND (cond, 0);
6081 true_value = TREE_OPERAND (cond, 1);
6082 false_value = TREE_OPERAND (cond, 2);
6083 /* If this operand throws an expression, then it does not make
6084 sense to try to perform a logical or arithmetic operation
6085 involving it. */
6086 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6087 lhs = true_value;
6088 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6089 rhs = false_value;
6091 else
6093 tree testtype = TREE_TYPE (cond);
6094 test = cond;
6095 true_value = constant_boolean_node (true, testtype);
6096 false_value = constant_boolean_node (false, testtype);
6099 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6100 cond_code = VEC_COND_EXPR;
6102 /* This transformation is only worthwhile if we don't have to wrap ARG
6103 in a SAVE_EXPR and the operation can be simplified without recursing
6104 on at least one of the branches once its pushed inside the COND_EXPR. */
6105 if (!TREE_CONSTANT (arg)
6106 && (TREE_SIDE_EFFECTS (arg)
6107 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6108 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6109 return NULL_TREE;
6111 arg = fold_convert_loc (loc, arg_type, arg);
6112 if (lhs == 0)
6114 true_value = fold_convert_loc (loc, cond_type, true_value);
6115 if (cond_first_p)
6116 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6117 else
6118 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6120 if (rhs == 0)
6122 false_value = fold_convert_loc (loc, cond_type, false_value);
6123 if (cond_first_p)
6124 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6125 else
6126 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6129 /* Check that we have simplified at least one of the branches. */
6130 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6131 return NULL_TREE;
6133 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6137 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6139 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6140 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6141 ADDEND is the same as X.
6143 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6144 and finite. The problematic cases are when X is zero, and its mode
6145 has signed zeros. In the case of rounding towards -infinity,
6146 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6147 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6149 bool
6150 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6152 if (!real_zerop (addend))
6153 return false;
6155 /* Don't allow the fold with -fsignaling-nans. */
6156 if (HONOR_SNANS (TYPE_MODE (type)))
6157 return false;
6159 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6160 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6161 return true;
6163 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6164 if (TREE_CODE (addend) == REAL_CST
6165 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6166 negate = !negate;
6168 /* The mode has signed zeros, and we have to honor their sign.
6169 In this situation, there is only one case we can return true for.
6170 X - 0 is the same as X unless rounding towards -infinity is
6171 supported. */
6172 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6175 /* Subroutine of fold() that checks comparisons of built-in math
6176 functions against real constants.
6178 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6179 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6180 is the type of the result and ARG0 and ARG1 are the operands of the
6181 comparison. ARG1 must be a TREE_REAL_CST.
6183 The function returns the constant folded tree if a simplification
6184 can be made, and NULL_TREE otherwise. */
6186 static tree
6187 fold_mathfn_compare (location_t loc,
6188 enum built_in_function fcode, enum tree_code code,
6189 tree type, tree arg0, tree arg1)
6191 REAL_VALUE_TYPE c;
6193 if (BUILTIN_SQRT_P (fcode))
6195 tree arg = CALL_EXPR_ARG (arg0, 0);
6196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6198 c = TREE_REAL_CST (arg1);
6199 if (REAL_VALUE_NEGATIVE (c))
6201 /* sqrt(x) < y is always false, if y is negative. */
6202 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6203 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6205 /* sqrt(x) > y is always true, if y is negative and we
6206 don't care about NaNs, i.e. negative values of x. */
6207 if (code == NE_EXPR || !HONOR_NANS (mode))
6208 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6210 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6211 return fold_build2_loc (loc, GE_EXPR, type, arg,
6212 build_real (TREE_TYPE (arg), dconst0));
6214 else if (code == GT_EXPR || code == GE_EXPR)
6216 REAL_VALUE_TYPE c2;
6218 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6219 real_convert (&c2, mode, &c2);
6221 if (REAL_VALUE_ISINF (c2))
6223 /* sqrt(x) > y is x == +Inf, when y is very large. */
6224 if (HONOR_INFINITIES (mode))
6225 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), c2));
6228 /* sqrt(x) > y is always false, when y is very large
6229 and we don't care about infinities. */
6230 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6233 /* sqrt(x) > c is the same as x > c*c. */
6234 return fold_build2_loc (loc, code, type, arg,
6235 build_real (TREE_TYPE (arg), c2));
6237 else if (code == LT_EXPR || code == LE_EXPR)
6239 REAL_VALUE_TYPE c2;
6241 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6242 real_convert (&c2, mode, &c2);
6244 if (REAL_VALUE_ISINF (c2))
6246 /* sqrt(x) < y is always true, when y is a very large
6247 value and we don't care about NaNs or Infinities. */
6248 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6249 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6251 /* sqrt(x) < y is x != +Inf when y is very large and we
6252 don't care about NaNs. */
6253 if (! HONOR_NANS (mode))
6254 return fold_build2_loc (loc, NE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg), c2));
6257 /* sqrt(x) < y is x >= 0 when y is very large and we
6258 don't care about Infinities. */
6259 if (! HONOR_INFINITIES (mode))
6260 return fold_build2_loc (loc, GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg), dconst0));
6263 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6264 arg = save_expr (arg);
6265 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6266 fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg),
6268 dconst0)),
6269 fold_build2_loc (loc, NE_EXPR, type, arg,
6270 build_real (TREE_TYPE (arg),
6271 c2)));
6274 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6275 if (! HONOR_NANS (mode))
6276 return fold_build2_loc (loc, code, type, arg,
6277 build_real (TREE_TYPE (arg), c2));
6279 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6280 arg = save_expr (arg);
6281 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6282 fold_build2_loc (loc, GE_EXPR, type, arg,
6283 build_real (TREE_TYPE (arg),
6284 dconst0)),
6285 fold_build2_loc (loc, code, type, arg,
6286 build_real (TREE_TYPE (arg),
6287 c2)));
6291 return NULL_TREE;
6294 /* Subroutine of fold() that optimizes comparisons against Infinities,
6295 either +Inf or -Inf.
6297 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6298 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6299 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6301 The function returns the constant folded tree if a simplification
6302 can be made, and NULL_TREE otherwise. */
6304 static tree
6305 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6306 tree arg0, tree arg1)
6308 enum machine_mode mode;
6309 REAL_VALUE_TYPE max;
6310 tree temp;
6311 bool neg;
6313 mode = TYPE_MODE (TREE_TYPE (arg0));
6315 /* For negative infinity swap the sense of the comparison. */
6316 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6317 if (neg)
6318 code = swap_tree_comparison (code);
6320 switch (code)
6322 case GT_EXPR:
6323 /* x > +Inf is always false, if with ignore sNANs. */
6324 if (HONOR_SNANS (mode))
6325 return NULL_TREE;
6326 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6328 case LE_EXPR:
6329 /* x <= +Inf is always true, if we don't case about NaNs. */
6330 if (! HONOR_NANS (mode))
6331 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6333 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6334 arg0 = save_expr (arg0);
6335 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6337 case EQ_EXPR:
6338 case GE_EXPR:
6339 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6340 real_maxval (&max, neg, mode);
6341 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6344 case LT_EXPR:
6345 /* x < +Inf is always equal to x <= DBL_MAX. */
6346 real_maxval (&max, neg, mode);
6347 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6348 arg0, build_real (TREE_TYPE (arg0), max));
6350 case NE_EXPR:
6351 /* x != +Inf is always equal to !(x > DBL_MAX). */
6352 real_maxval (&max, neg, mode);
6353 if (! HONOR_NANS (mode))
6354 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6355 arg0, build_real (TREE_TYPE (arg0), max));
6357 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6358 arg0, build_real (TREE_TYPE (arg0), max));
6359 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6361 default:
6362 break;
6365 return NULL_TREE;
6368 /* Subroutine of fold() that optimizes comparisons of a division by
6369 a nonzero integer constant against an integer constant, i.e.
6370 X/C1 op C2.
6372 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6373 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6374 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6376 The function returns the constant folded tree if a simplification
6377 can be made, and NULL_TREE otherwise. */
6379 static tree
6380 fold_div_compare (location_t loc,
6381 enum tree_code code, tree type, tree arg0, tree arg1)
6383 tree prod, tmp, hi, lo;
6384 tree arg00 = TREE_OPERAND (arg0, 0);
6385 tree arg01 = TREE_OPERAND (arg0, 1);
6386 double_int val;
6387 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6388 bool neg_overflow;
6389 bool overflow;
6391 /* We have to do this the hard way to detect unsigned overflow.
6392 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6393 val = TREE_INT_CST (arg01)
6394 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6395 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6396 neg_overflow = false;
6398 if (unsigned_p)
6400 tmp = int_const_binop (MINUS_EXPR, arg01,
6401 build_int_cst (TREE_TYPE (arg01), 1));
6402 lo = prod;
6404 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6405 val = TREE_INT_CST (prod)
6406 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6407 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6408 -1, overflow | TREE_OVERFLOW (prod));
6410 else if (tree_int_cst_sgn (arg01) >= 0)
6412 tmp = int_const_binop (MINUS_EXPR, arg01,
6413 build_int_cst (TREE_TYPE (arg01), 1));
6414 switch (tree_int_cst_sgn (arg1))
6416 case -1:
6417 neg_overflow = true;
6418 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6419 hi = prod;
6420 break;
6422 case 0:
6423 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6424 hi = tmp;
6425 break;
6427 case 1:
6428 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6429 lo = prod;
6430 break;
6432 default:
6433 gcc_unreachable ();
6436 else
6438 /* A negative divisor reverses the relational operators. */
6439 code = swap_tree_comparison (code);
6441 tmp = int_const_binop (PLUS_EXPR, arg01,
6442 build_int_cst (TREE_TYPE (arg01), 1));
6443 switch (tree_int_cst_sgn (arg1))
6445 case -1:
6446 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6447 lo = prod;
6448 break;
6450 case 0:
6451 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6452 lo = tmp;
6453 break;
6455 case 1:
6456 neg_overflow = true;
6457 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6458 hi = prod;
6459 break;
6461 default:
6462 gcc_unreachable ();
6466 switch (code)
6468 case EQ_EXPR:
6469 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6470 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6471 if (TREE_OVERFLOW (hi))
6472 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6473 if (TREE_OVERFLOW (lo))
6474 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6475 return build_range_check (loc, type, arg00, 1, lo, hi);
6477 case NE_EXPR:
6478 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6479 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6480 if (TREE_OVERFLOW (hi))
6481 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6482 if (TREE_OVERFLOW (lo))
6483 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6484 return build_range_check (loc, type, arg00, 0, lo, hi);
6486 case LT_EXPR:
6487 if (TREE_OVERFLOW (lo))
6489 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6494 case LE_EXPR:
6495 if (TREE_OVERFLOW (hi))
6497 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6500 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6502 case GT_EXPR:
6503 if (TREE_OVERFLOW (hi))
6505 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6506 return omit_one_operand_loc (loc, type, tmp, arg00);
6508 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6510 case GE_EXPR:
6511 if (TREE_OVERFLOW (lo))
6513 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6514 return omit_one_operand_loc (loc, type, tmp, arg00);
6516 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6518 default:
6519 break;
6522 return NULL_TREE;
6526 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6527 equality/inequality test, then return a simplified form of the test
6528 using a sign testing. Otherwise return NULL. TYPE is the desired
6529 result type. */
6531 static tree
6532 fold_single_bit_test_into_sign_test (location_t loc,
6533 enum tree_code code, tree arg0, tree arg1,
6534 tree result_type)
6536 /* If this is testing a single bit, we can optimize the test. */
6537 if ((code == NE_EXPR || code == EQ_EXPR)
6538 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6539 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6541 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6542 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6543 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6545 if (arg00 != NULL_TREE
6546 /* This is only a win if casting to a signed type is cheap,
6547 i.e. when arg00's type is not a partial mode. */
6548 && TYPE_PRECISION (TREE_TYPE (arg00))
6549 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6551 tree stype = signed_type_for (TREE_TYPE (arg00));
6552 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6553 result_type,
6554 fold_convert_loc (loc, stype, arg00),
6555 build_int_cst (stype, 0));
6559 return NULL_TREE;
6562 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6563 equality/inequality test, then return a simplified form of
6564 the test using shifts and logical operations. Otherwise return
6565 NULL. TYPE is the desired result type. */
6567 tree
6568 fold_single_bit_test (location_t loc, enum tree_code code,
6569 tree arg0, tree arg1, tree result_type)
6571 /* If this is testing a single bit, we can optimize the test. */
6572 if ((code == NE_EXPR || code == EQ_EXPR)
6573 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6574 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6576 tree inner = TREE_OPERAND (arg0, 0);
6577 tree type = TREE_TYPE (arg0);
6578 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6579 enum machine_mode operand_mode = TYPE_MODE (type);
6580 int ops_unsigned;
6581 tree signed_type, unsigned_type, intermediate_type;
6582 tree tem, one;
6584 /* First, see if we can fold the single bit test into a sign-bit
6585 test. */
6586 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6587 result_type);
6588 if (tem)
6589 return tem;
6591 /* Otherwise we have (A & C) != 0 where C is a single bit,
6592 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6593 Similarly for (A & C) == 0. */
6595 /* If INNER is a right shift of a constant and it plus BITNUM does
6596 not overflow, adjust BITNUM and INNER. */
6597 if (TREE_CODE (inner) == RSHIFT_EXPR
6598 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6599 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6600 && bitnum < TYPE_PRECISION (type)
6601 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6602 bitnum - TYPE_PRECISION (type)))
6604 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6605 inner = TREE_OPERAND (inner, 0);
6608 /* If we are going to be able to omit the AND below, we must do our
6609 operations as unsigned. If we must use the AND, we have a choice.
6610 Normally unsigned is faster, but for some machines signed is. */
6611 #ifdef LOAD_EXTEND_OP
6612 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6613 && !flag_syntax_only) ? 0 : 1;
6614 #else
6615 ops_unsigned = 1;
6616 #endif
6618 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6619 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6620 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6621 inner = fold_convert_loc (loc, intermediate_type, inner);
6623 if (bitnum != 0)
6624 inner = build2 (RSHIFT_EXPR, intermediate_type,
6625 inner, size_int (bitnum));
6627 one = build_int_cst (intermediate_type, 1);
6629 if (code == EQ_EXPR)
6630 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6632 /* Put the AND last so it can combine with more things. */
6633 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6635 /* Make sure to return the proper type. */
6636 inner = fold_convert_loc (loc, result_type, inner);
6638 return inner;
6640 return NULL_TREE;
6643 /* Check whether we are allowed to reorder operands arg0 and arg1,
6644 such that the evaluation of arg1 occurs before arg0. */
6646 static bool
6647 reorder_operands_p (const_tree arg0, const_tree arg1)
6649 if (! flag_evaluation_order)
6650 return true;
6651 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6652 return true;
6653 return ! TREE_SIDE_EFFECTS (arg0)
6654 && ! TREE_SIDE_EFFECTS (arg1);
6657 /* Test whether it is preferable two swap two operands, ARG0 and
6658 ARG1, for example because ARG0 is an integer constant and ARG1
6659 isn't. If REORDER is true, only recommend swapping if we can
6660 evaluate the operands in reverse order. */
6662 bool
6663 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6665 STRIP_SIGN_NOPS (arg0);
6666 STRIP_SIGN_NOPS (arg1);
6668 if (TREE_CODE (arg1) == INTEGER_CST)
6669 return 0;
6670 if (TREE_CODE (arg0) == INTEGER_CST)
6671 return 1;
6673 if (TREE_CODE (arg1) == REAL_CST)
6674 return 0;
6675 if (TREE_CODE (arg0) == REAL_CST)
6676 return 1;
6678 if (TREE_CODE (arg1) == FIXED_CST)
6679 return 0;
6680 if (TREE_CODE (arg0) == FIXED_CST)
6681 return 1;
6683 if (TREE_CODE (arg1) == COMPLEX_CST)
6684 return 0;
6685 if (TREE_CODE (arg0) == COMPLEX_CST)
6686 return 1;
6688 if (TREE_CONSTANT (arg1))
6689 return 0;
6690 if (TREE_CONSTANT (arg0))
6691 return 1;
6693 if (optimize_function_for_size_p (cfun))
6694 return 0;
6696 if (reorder && flag_evaluation_order
6697 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6698 return 0;
6700 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6701 for commutative and comparison operators. Ensuring a canonical
6702 form allows the optimizers to find additional redundancies without
6703 having to explicitly check for both orderings. */
6704 if (TREE_CODE (arg0) == SSA_NAME
6705 && TREE_CODE (arg1) == SSA_NAME
6706 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6707 return 1;
6709 /* Put SSA_NAMEs last. */
6710 if (TREE_CODE (arg1) == SSA_NAME)
6711 return 0;
6712 if (TREE_CODE (arg0) == SSA_NAME)
6713 return 1;
6715 /* Put variables last. */
6716 if (DECL_P (arg1))
6717 return 0;
6718 if (DECL_P (arg0))
6719 return 1;
6721 return 0;
6724 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6725 ARG0 is extended to a wider type. */
6727 static tree
6728 fold_widened_comparison (location_t loc, enum tree_code code,
6729 tree type, tree arg0, tree arg1)
6731 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6732 tree arg1_unw;
6733 tree shorter_type, outer_type;
6734 tree min, max;
6735 bool above, below;
6737 if (arg0_unw == arg0)
6738 return NULL_TREE;
6739 shorter_type = TREE_TYPE (arg0_unw);
6741 #ifdef HAVE_canonicalize_funcptr_for_compare
6742 /* Disable this optimization if we're casting a function pointer
6743 type on targets that require function pointer canonicalization. */
6744 if (HAVE_canonicalize_funcptr_for_compare
6745 && TREE_CODE (shorter_type) == POINTER_TYPE
6746 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6747 return NULL_TREE;
6748 #endif
6750 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6751 return NULL_TREE;
6753 arg1_unw = get_unwidened (arg1, NULL_TREE);
6755 /* If possible, express the comparison in the shorter mode. */
6756 if ((code == EQ_EXPR || code == NE_EXPR
6757 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6758 && (TREE_TYPE (arg1_unw) == shorter_type
6759 || ((TYPE_PRECISION (shorter_type)
6760 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6761 && (TYPE_UNSIGNED (shorter_type)
6762 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6763 || (TREE_CODE (arg1_unw) == INTEGER_CST
6764 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6765 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6766 && int_fits_type_p (arg1_unw, shorter_type))))
6767 return fold_build2_loc (loc, code, type, arg0_unw,
6768 fold_convert_loc (loc, shorter_type, arg1_unw));
6770 if (TREE_CODE (arg1_unw) != INTEGER_CST
6771 || TREE_CODE (shorter_type) != INTEGER_TYPE
6772 || !int_fits_type_p (arg1_unw, shorter_type))
6773 return NULL_TREE;
6775 /* If we are comparing with the integer that does not fit into the range
6776 of the shorter type, the result is known. */
6777 outer_type = TREE_TYPE (arg1_unw);
6778 min = lower_bound_in_type (outer_type, shorter_type);
6779 max = upper_bound_in_type (outer_type, shorter_type);
6781 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6782 max, arg1_unw));
6783 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6784 arg1_unw, min));
6786 switch (code)
6788 case EQ_EXPR:
6789 if (above || below)
6790 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6791 break;
6793 case NE_EXPR:
6794 if (above || below)
6795 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6796 break;
6798 case LT_EXPR:
6799 case LE_EXPR:
6800 if (above)
6801 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6802 else if (below)
6803 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6805 case GT_EXPR:
6806 case GE_EXPR:
6807 if (above)
6808 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6809 else if (below)
6810 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6812 default:
6813 break;
6816 return NULL_TREE;
6819 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6820 ARG0 just the signedness is changed. */
6822 static tree
6823 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6824 tree arg0, tree arg1)
6826 tree arg0_inner;
6827 tree inner_type, outer_type;
6829 if (!CONVERT_EXPR_P (arg0))
6830 return NULL_TREE;
6832 outer_type = TREE_TYPE (arg0);
6833 arg0_inner = TREE_OPERAND (arg0, 0);
6834 inner_type = TREE_TYPE (arg0_inner);
6836 #ifdef HAVE_canonicalize_funcptr_for_compare
6837 /* Disable this optimization if we're casting a function pointer
6838 type on targets that require function pointer canonicalization. */
6839 if (HAVE_canonicalize_funcptr_for_compare
6840 && TREE_CODE (inner_type) == POINTER_TYPE
6841 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6842 return NULL_TREE;
6843 #endif
6845 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6846 return NULL_TREE;
6848 if (TREE_CODE (arg1) != INTEGER_CST
6849 && !(CONVERT_EXPR_P (arg1)
6850 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6851 return NULL_TREE;
6853 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6854 && code != NE_EXPR
6855 && code != EQ_EXPR)
6856 return NULL_TREE;
6858 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6859 return NULL_TREE;
6861 if (TREE_CODE (arg1) == INTEGER_CST)
6862 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6863 0, TREE_OVERFLOW (arg1));
6864 else
6865 arg1 = fold_convert_loc (loc, inner_type, arg1);
6867 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6870 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6871 step of the array. Reconstructs s and delta in the case of s *
6872 delta being an integer constant (and thus already folded). ADDR is
6873 the address. MULT is the multiplicative expression. If the
6874 function succeeds, the new address expression is returned.
6875 Otherwise NULL_TREE is returned. LOC is the location of the
6876 resulting expression. */
6878 static tree
6879 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6881 tree s, delta, step;
6882 tree ref = TREE_OPERAND (addr, 0), pref;
6883 tree ret, pos;
6884 tree itype;
6885 bool mdim = false;
6887 /* Strip the nops that might be added when converting op1 to sizetype. */
6888 STRIP_NOPS (op1);
6890 /* Canonicalize op1 into a possibly non-constant delta
6891 and an INTEGER_CST s. */
6892 if (TREE_CODE (op1) == MULT_EXPR)
6894 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6896 STRIP_NOPS (arg0);
6897 STRIP_NOPS (arg1);
6899 if (TREE_CODE (arg0) == INTEGER_CST)
6901 s = arg0;
6902 delta = arg1;
6904 else if (TREE_CODE (arg1) == INTEGER_CST)
6906 s = arg1;
6907 delta = arg0;
6909 else
6910 return NULL_TREE;
6912 else if (TREE_CODE (op1) == INTEGER_CST)
6914 delta = op1;
6915 s = NULL_TREE;
6917 else
6919 /* Simulate we are delta * 1. */
6920 delta = op1;
6921 s = integer_one_node;
6924 /* Handle &x.array the same as we would handle &x.array[0]. */
6925 if (TREE_CODE (ref) == COMPONENT_REF
6926 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6928 tree domain;
6930 /* Remember if this was a multi-dimensional array. */
6931 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6932 mdim = true;
6934 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6935 if (! domain)
6936 goto cont;
6937 itype = TREE_TYPE (domain);
6939 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6940 if (TREE_CODE (step) != INTEGER_CST)
6941 goto cont;
6943 if (s)
6945 if (! tree_int_cst_equal (step, s))
6946 goto cont;
6948 else
6950 /* Try if delta is a multiple of step. */
6951 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6952 if (! tmp)
6953 goto cont;
6954 delta = tmp;
6957 /* Only fold here if we can verify we do not overflow one
6958 dimension of a multi-dimensional array. */
6959 if (mdim)
6961 tree tmp;
6963 if (!TYPE_MIN_VALUE (domain)
6964 || !TYPE_MAX_VALUE (domain)
6965 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6966 goto cont;
6968 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6969 fold_convert_loc (loc, itype,
6970 TYPE_MIN_VALUE (domain)),
6971 fold_convert_loc (loc, itype, delta));
6972 if (TREE_CODE (tmp) != INTEGER_CST
6973 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6974 goto cont;
6977 /* We found a suitable component reference. */
6979 pref = TREE_OPERAND (addr, 0);
6980 ret = copy_node (pref);
6981 SET_EXPR_LOCATION (ret, loc);
6983 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6984 fold_build2_loc
6985 (loc, PLUS_EXPR, itype,
6986 fold_convert_loc (loc, itype,
6987 TYPE_MIN_VALUE
6988 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6989 fold_convert_loc (loc, itype, delta)),
6990 NULL_TREE, NULL_TREE);
6991 return build_fold_addr_expr_loc (loc, ret);
6994 cont:
6996 for (;; ref = TREE_OPERAND (ref, 0))
6998 if (TREE_CODE (ref) == ARRAY_REF)
7000 tree domain;
7002 /* Remember if this was a multi-dimensional array. */
7003 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7004 mdim = true;
7006 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7007 if (! domain)
7008 continue;
7009 itype = TREE_TYPE (domain);
7011 step = array_ref_element_size (ref);
7012 if (TREE_CODE (step) != INTEGER_CST)
7013 continue;
7015 if (s)
7017 if (! tree_int_cst_equal (step, s))
7018 continue;
7020 else
7022 /* Try if delta is a multiple of step. */
7023 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7024 if (! tmp)
7025 continue;
7026 delta = tmp;
7029 /* Only fold here if we can verify we do not overflow one
7030 dimension of a multi-dimensional array. */
7031 if (mdim)
7033 tree tmp;
7035 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7036 || !TYPE_MAX_VALUE (domain)
7037 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7038 continue;
7040 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7041 fold_convert_loc (loc, itype,
7042 TREE_OPERAND (ref, 1)),
7043 fold_convert_loc (loc, itype, delta));
7044 if (!tmp
7045 || TREE_CODE (tmp) != INTEGER_CST
7046 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7047 continue;
7050 break;
7052 else
7053 mdim = false;
7055 if (!handled_component_p (ref))
7056 return NULL_TREE;
7059 /* We found the suitable array reference. So copy everything up to it,
7060 and replace the index. */
7062 pref = TREE_OPERAND (addr, 0);
7063 ret = copy_node (pref);
7064 SET_EXPR_LOCATION (ret, loc);
7065 pos = ret;
7067 while (pref != ref)
7069 pref = TREE_OPERAND (pref, 0);
7070 TREE_OPERAND (pos, 0) = copy_node (pref);
7071 pos = TREE_OPERAND (pos, 0);
7074 TREE_OPERAND (pos, 1)
7075 = fold_build2_loc (loc, PLUS_EXPR, itype,
7076 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7077 fold_convert_loc (loc, itype, delta));
7078 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7082 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7083 means A >= Y && A != MAX, but in this case we know that
7084 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7086 static tree
7087 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7089 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7091 if (TREE_CODE (bound) == LT_EXPR)
7092 a = TREE_OPERAND (bound, 0);
7093 else if (TREE_CODE (bound) == GT_EXPR)
7094 a = TREE_OPERAND (bound, 1);
7095 else
7096 return NULL_TREE;
7098 typea = TREE_TYPE (a);
7099 if (!INTEGRAL_TYPE_P (typea)
7100 && !POINTER_TYPE_P (typea))
7101 return NULL_TREE;
7103 if (TREE_CODE (ineq) == LT_EXPR)
7105 a1 = TREE_OPERAND (ineq, 1);
7106 y = TREE_OPERAND (ineq, 0);
7108 else if (TREE_CODE (ineq) == GT_EXPR)
7110 a1 = TREE_OPERAND (ineq, 0);
7111 y = TREE_OPERAND (ineq, 1);
7113 else
7114 return NULL_TREE;
7116 if (TREE_TYPE (a1) != typea)
7117 return NULL_TREE;
7119 if (POINTER_TYPE_P (typea))
7121 /* Convert the pointer types into integer before taking the difference. */
7122 tree ta = fold_convert_loc (loc, ssizetype, a);
7123 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7124 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7126 else
7127 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7129 if (!diff || !integer_onep (diff))
7130 return NULL_TREE;
7132 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7135 /* Fold a sum or difference of at least one multiplication.
7136 Returns the folded tree or NULL if no simplification could be made. */
7138 static tree
7139 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7140 tree arg0, tree arg1)
7142 tree arg00, arg01, arg10, arg11;
7143 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7145 /* (A * C) +- (B * C) -> (A+-B) * C.
7146 (A * C) +- A -> A * (C+-1).
7147 We are most concerned about the case where C is a constant,
7148 but other combinations show up during loop reduction. Since
7149 it is not difficult, try all four possibilities. */
7151 if (TREE_CODE (arg0) == MULT_EXPR)
7153 arg00 = TREE_OPERAND (arg0, 0);
7154 arg01 = TREE_OPERAND (arg0, 1);
7156 else if (TREE_CODE (arg0) == INTEGER_CST)
7158 arg00 = build_one_cst (type);
7159 arg01 = arg0;
7161 else
7163 /* We cannot generate constant 1 for fract. */
7164 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7165 return NULL_TREE;
7166 arg00 = arg0;
7167 arg01 = build_one_cst (type);
7169 if (TREE_CODE (arg1) == MULT_EXPR)
7171 arg10 = TREE_OPERAND (arg1, 0);
7172 arg11 = TREE_OPERAND (arg1, 1);
7174 else if (TREE_CODE (arg1) == INTEGER_CST)
7176 arg10 = build_one_cst (type);
7177 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7178 the purpose of this canonicalization. */
7179 if (TREE_INT_CST_HIGH (arg1) == -1
7180 && negate_expr_p (arg1)
7181 && code == PLUS_EXPR)
7183 arg11 = negate_expr (arg1);
7184 code = MINUS_EXPR;
7186 else
7187 arg11 = arg1;
7189 else
7191 /* We cannot generate constant 1 for fract. */
7192 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7193 return NULL_TREE;
7194 arg10 = arg1;
7195 arg11 = build_one_cst (type);
7197 same = NULL_TREE;
7199 if (operand_equal_p (arg01, arg11, 0))
7200 same = arg01, alt0 = arg00, alt1 = arg10;
7201 else if (operand_equal_p (arg00, arg10, 0))
7202 same = arg00, alt0 = arg01, alt1 = arg11;
7203 else if (operand_equal_p (arg00, arg11, 0))
7204 same = arg00, alt0 = arg01, alt1 = arg10;
7205 else if (operand_equal_p (arg01, arg10, 0))
7206 same = arg01, alt0 = arg00, alt1 = arg11;
7208 /* No identical multiplicands; see if we can find a common
7209 power-of-two factor in non-power-of-two multiplies. This
7210 can help in multi-dimensional array access. */
7211 else if (host_integerp (arg01, 0)
7212 && host_integerp (arg11, 0))
7214 HOST_WIDE_INT int01, int11, tmp;
7215 bool swap = false;
7216 tree maybe_same;
7217 int01 = TREE_INT_CST_LOW (arg01);
7218 int11 = TREE_INT_CST_LOW (arg11);
7220 /* Move min of absolute values to int11. */
7221 if (absu_hwi (int01) < absu_hwi (int11))
7223 tmp = int01, int01 = int11, int11 = tmp;
7224 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7225 maybe_same = arg01;
7226 swap = true;
7228 else
7229 maybe_same = arg11;
7231 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7232 /* The remainder should not be a constant, otherwise we
7233 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7234 increased the number of multiplications necessary. */
7235 && TREE_CODE (arg10) != INTEGER_CST)
7237 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7238 build_int_cst (TREE_TYPE (arg00),
7239 int01 / int11));
7240 alt1 = arg10;
7241 same = maybe_same;
7242 if (swap)
7243 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7247 if (same)
7248 return fold_build2_loc (loc, MULT_EXPR, type,
7249 fold_build2_loc (loc, code, type,
7250 fold_convert_loc (loc, type, alt0),
7251 fold_convert_loc (loc, type, alt1)),
7252 fold_convert_loc (loc, type, same));
7254 return NULL_TREE;
7257 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7258 specified by EXPR into the buffer PTR of length LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero
7260 upon failure. */
7262 static int
7263 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7265 tree type = TREE_TYPE (expr);
7266 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7267 int byte, offset, word, words;
7268 unsigned char value;
7270 if (total_bytes > len)
7271 return 0;
7272 words = total_bytes / UNITS_PER_WORD;
7274 for (byte = 0; byte < total_bytes; byte++)
7276 int bitpos = byte * BITS_PER_UNIT;
7277 if (bitpos < HOST_BITS_PER_WIDE_INT)
7278 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7279 else
7280 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7281 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7283 if (total_bytes > UNITS_PER_WORD)
7285 word = byte / UNITS_PER_WORD;
7286 if (WORDS_BIG_ENDIAN)
7287 word = (words - 1) - word;
7288 offset = word * UNITS_PER_WORD;
7289 if (BYTES_BIG_ENDIAN)
7290 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7291 else
7292 offset += byte % UNITS_PER_WORD;
7294 else
7295 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7296 ptr[offset] = value;
7298 return total_bytes;
7302 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7303 specified by EXPR into the buffer PTR of length LEN bytes.
7304 Return the number of bytes placed in the buffer, or zero
7305 upon failure. */
7307 static int
7308 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7310 tree type = TREE_TYPE (expr);
7311 enum machine_mode mode = TYPE_MODE (type);
7312 int total_bytes = GET_MODE_SIZE (mode);
7313 FIXED_VALUE_TYPE value;
7314 tree i_value, i_type;
7316 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7317 return 0;
7319 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7321 if (NULL_TREE == i_type
7322 || TYPE_PRECISION (i_type) != total_bytes)
7323 return 0;
7325 value = TREE_FIXED_CST (expr);
7326 i_value = double_int_to_tree (i_type, value.data);
7328 return native_encode_int (i_value, ptr, len);
7332 /* Subroutine of native_encode_expr. Encode the REAL_CST
7333 specified by EXPR into the buffer PTR of length LEN bytes.
7334 Return the number of bytes placed in the buffer, or zero
7335 upon failure. */
7337 static int
7338 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7340 tree type = TREE_TYPE (expr);
7341 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7342 int byte, offset, word, words, bitpos;
7343 unsigned char value;
7345 /* There are always 32 bits in each long, no matter the size of
7346 the hosts long. We handle floating point representations with
7347 up to 192 bits. */
7348 long tmp[6];
7350 if (total_bytes > len)
7351 return 0;
7352 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7354 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7356 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7357 bitpos += BITS_PER_UNIT)
7359 byte = (bitpos / BITS_PER_UNIT) & 3;
7360 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7362 if (UNITS_PER_WORD < 4)
7364 word = byte / UNITS_PER_WORD;
7365 if (WORDS_BIG_ENDIAN)
7366 word = (words - 1) - word;
7367 offset = word * UNITS_PER_WORD;
7368 if (BYTES_BIG_ENDIAN)
7369 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7370 else
7371 offset += byte % UNITS_PER_WORD;
7373 else
7374 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7375 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7377 return total_bytes;
7380 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7381 specified by EXPR into the buffer PTR of length LEN bytes.
7382 Return the number of bytes placed in the buffer, or zero
7383 upon failure. */
7385 static int
7386 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7388 int rsize, isize;
7389 tree part;
7391 part = TREE_REALPART (expr);
7392 rsize = native_encode_expr (part, ptr, len);
7393 if (rsize == 0)
7394 return 0;
7395 part = TREE_IMAGPART (expr);
7396 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7397 if (isize != rsize)
7398 return 0;
7399 return rsize + isize;
7403 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7404 specified by EXPR into the buffer PTR of length LEN bytes.
7405 Return the number of bytes placed in the buffer, or zero
7406 upon failure. */
7408 static int
7409 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7411 unsigned i, count;
7412 int size, offset;
7413 tree itype, elem;
7415 offset = 0;
7416 count = VECTOR_CST_NELTS (expr);
7417 itype = TREE_TYPE (TREE_TYPE (expr));
7418 size = GET_MODE_SIZE (TYPE_MODE (itype));
7419 for (i = 0; i < count; i++)
7421 elem = VECTOR_CST_ELT (expr, i);
7422 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7423 return 0;
7424 offset += size;
7426 return offset;
7430 /* Subroutine of native_encode_expr. Encode the STRING_CST
7431 specified by EXPR into the buffer PTR of length LEN bytes.
7432 Return the number of bytes placed in the buffer, or zero
7433 upon failure. */
7435 static int
7436 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7438 tree type = TREE_TYPE (expr);
7439 HOST_WIDE_INT total_bytes;
7441 if (TREE_CODE (type) != ARRAY_TYPE
7442 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7443 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7444 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7445 return 0;
7446 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7447 if (total_bytes > len)
7448 return 0;
7449 if (TREE_STRING_LENGTH (expr) < total_bytes)
7451 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7452 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7453 total_bytes - TREE_STRING_LENGTH (expr));
7455 else
7456 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7457 return total_bytes;
7461 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7462 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7463 buffer PTR of length LEN bytes. Return the number of bytes
7464 placed in the buffer, or zero upon failure. */
7467 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7469 switch (TREE_CODE (expr))
7471 case INTEGER_CST:
7472 return native_encode_int (expr, ptr, len);
7474 case REAL_CST:
7475 return native_encode_real (expr, ptr, len);
7477 case FIXED_CST:
7478 return native_encode_fixed (expr, ptr, len);
7480 case COMPLEX_CST:
7481 return native_encode_complex (expr, ptr, len);
7483 case VECTOR_CST:
7484 return native_encode_vector (expr, ptr, len);
7486 case STRING_CST:
7487 return native_encode_string (expr, ptr, len);
7489 default:
7490 return 0;
7495 /* Subroutine of native_interpret_expr. Interpret the contents of
7496 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7497 If the buffer cannot be interpreted, return NULL_TREE. */
7499 static tree
7500 native_interpret_int (tree type, const unsigned char *ptr, int len)
7502 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7503 double_int result;
7505 if (total_bytes > len
7506 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7507 return NULL_TREE;
7509 result = double_int::from_buffer (ptr, total_bytes);
7511 return double_int_to_tree (type, result);
7515 /* Subroutine of native_interpret_expr. Interpret the contents of
7516 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7517 If the buffer cannot be interpreted, return NULL_TREE. */
7519 static tree
7520 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7522 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7523 double_int result;
7524 FIXED_VALUE_TYPE fixed_value;
7526 if (total_bytes > len
7527 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7528 return NULL_TREE;
7530 result = double_int::from_buffer (ptr, total_bytes);
7531 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7533 return build_fixed (type, fixed_value);
7537 /* Subroutine of native_interpret_expr. Interpret the contents of
7538 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7539 If the buffer cannot be interpreted, return NULL_TREE. */
7541 static tree
7542 native_interpret_real (tree type, const unsigned char *ptr, int len)
7544 enum machine_mode mode = TYPE_MODE (type);
7545 int total_bytes = GET_MODE_SIZE (mode);
7546 int byte, offset, word, words, bitpos;
7547 unsigned char value;
7548 /* There are always 32 bits in each long, no matter the size of
7549 the hosts long. We handle floating point representations with
7550 up to 192 bits. */
7551 REAL_VALUE_TYPE r;
7552 long tmp[6];
7554 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7555 if (total_bytes > len || total_bytes > 24)
7556 return NULL_TREE;
7557 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7559 memset (tmp, 0, sizeof (tmp));
7560 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7561 bitpos += BITS_PER_UNIT)
7563 byte = (bitpos / BITS_PER_UNIT) & 3;
7564 if (UNITS_PER_WORD < 4)
7566 word = byte / UNITS_PER_WORD;
7567 if (WORDS_BIG_ENDIAN)
7568 word = (words - 1) - word;
7569 offset = word * UNITS_PER_WORD;
7570 if (BYTES_BIG_ENDIAN)
7571 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7572 else
7573 offset += byte % UNITS_PER_WORD;
7575 else
7576 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7577 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7579 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7582 real_from_target (&r, tmp, mode);
7583 return build_real (type, r);
7587 /* Subroutine of native_interpret_expr. Interpret the contents of
7588 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7589 If the buffer cannot be interpreted, return NULL_TREE. */
7591 static tree
7592 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7594 tree etype, rpart, ipart;
7595 int size;
7597 etype = TREE_TYPE (type);
7598 size = GET_MODE_SIZE (TYPE_MODE (etype));
7599 if (size * 2 > len)
7600 return NULL_TREE;
7601 rpart = native_interpret_expr (etype, ptr, size);
7602 if (!rpart)
7603 return NULL_TREE;
7604 ipart = native_interpret_expr (etype, ptr+size, size);
7605 if (!ipart)
7606 return NULL_TREE;
7607 return build_complex (type, rpart, ipart);
7611 /* Subroutine of native_interpret_expr. Interpret the contents of
7612 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7613 If the buffer cannot be interpreted, return NULL_TREE. */
7615 static tree
7616 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7618 tree etype, elem;
7619 int i, size, count;
7620 tree *elements;
7622 etype = TREE_TYPE (type);
7623 size = GET_MODE_SIZE (TYPE_MODE (etype));
7624 count = TYPE_VECTOR_SUBPARTS (type);
7625 if (size * count > len)
7626 return NULL_TREE;
7628 elements = XALLOCAVEC (tree, count);
7629 for (i = count - 1; i >= 0; i--)
7631 elem = native_interpret_expr (etype, ptr+(i*size), size);
7632 if (!elem)
7633 return NULL_TREE;
7634 elements[i] = elem;
7636 return build_vector (type, elements);
7640 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7641 the buffer PTR of length LEN as a constant of type TYPE. For
7642 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7643 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7644 return NULL_TREE. */
7646 tree
7647 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7649 switch (TREE_CODE (type))
7651 case INTEGER_TYPE:
7652 case ENUMERAL_TYPE:
7653 case BOOLEAN_TYPE:
7654 case POINTER_TYPE:
7655 case REFERENCE_TYPE:
7656 return native_interpret_int (type, ptr, len);
7658 case REAL_TYPE:
7659 return native_interpret_real (type, ptr, len);
7661 case FIXED_POINT_TYPE:
7662 return native_interpret_fixed (type, ptr, len);
7664 case COMPLEX_TYPE:
7665 return native_interpret_complex (type, ptr, len);
7667 case VECTOR_TYPE:
7668 return native_interpret_vector (type, ptr, len);
7670 default:
7671 return NULL_TREE;
7675 /* Returns true if we can interpret the contents of a native encoding
7676 as TYPE. */
7678 static bool
7679 can_native_interpret_type_p (tree type)
7681 switch (TREE_CODE (type))
7683 case INTEGER_TYPE:
7684 case ENUMERAL_TYPE:
7685 case BOOLEAN_TYPE:
7686 case POINTER_TYPE:
7687 case REFERENCE_TYPE:
7688 case FIXED_POINT_TYPE:
7689 case REAL_TYPE:
7690 case COMPLEX_TYPE:
7691 case VECTOR_TYPE:
7692 return true;
7693 default:
7694 return false;
7698 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7699 TYPE at compile-time. If we're unable to perform the conversion
7700 return NULL_TREE. */
7702 static tree
7703 fold_view_convert_expr (tree type, tree expr)
7705 /* We support up to 512-bit values (for V8DFmode). */
7706 unsigned char buffer[64];
7707 int len;
7709 /* Check that the host and target are sane. */
7710 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7711 return NULL_TREE;
7713 len = native_encode_expr (expr, buffer, sizeof (buffer));
7714 if (len == 0)
7715 return NULL_TREE;
7717 return native_interpret_expr (type, buffer, len);
7720 /* Build an expression for the address of T. Folds away INDIRECT_REF
7721 to avoid confusing the gimplify process. */
7723 tree
7724 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7726 /* The size of the object is not relevant when talking about its address. */
7727 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7728 t = TREE_OPERAND (t, 0);
7730 if (TREE_CODE (t) == INDIRECT_REF)
7732 t = TREE_OPERAND (t, 0);
7734 if (TREE_TYPE (t) != ptrtype)
7735 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7737 else if (TREE_CODE (t) == MEM_REF
7738 && integer_zerop (TREE_OPERAND (t, 1)))
7739 return TREE_OPERAND (t, 0);
7740 else if (TREE_CODE (t) == MEM_REF
7741 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7742 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7743 TREE_OPERAND (t, 0),
7744 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7745 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7747 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7749 if (TREE_TYPE (t) != ptrtype)
7750 t = fold_convert_loc (loc, ptrtype, t);
7752 else
7753 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7755 return t;
7758 /* Build an expression for the address of T. */
7760 tree
7761 build_fold_addr_expr_loc (location_t loc, tree t)
7763 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7765 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7768 static bool vec_cst_ctor_to_array (tree, tree *);
7770 /* Fold a unary expression of code CODE and type TYPE with operand
7771 OP0. Return the folded expression if folding is successful.
7772 Otherwise, return NULL_TREE. */
7774 tree
7775 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7777 tree tem;
7778 tree arg0;
7779 enum tree_code_class kind = TREE_CODE_CLASS (code);
7781 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7782 && TREE_CODE_LENGTH (code) == 1);
7784 arg0 = op0;
7785 if (arg0)
7787 if (CONVERT_EXPR_CODE_P (code)
7788 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7790 /* Don't use STRIP_NOPS, because signedness of argument type
7791 matters. */
7792 STRIP_SIGN_NOPS (arg0);
7794 else
7796 /* Strip any conversions that don't change the mode. This
7797 is safe for every expression, except for a comparison
7798 expression because its signedness is derived from its
7799 operands.
7801 Note that this is done as an internal manipulation within
7802 the constant folder, in order to find the simplest
7803 representation of the arguments so that their form can be
7804 studied. In any cases, the appropriate type conversions
7805 should be put back in the tree that will get out of the
7806 constant folder. */
7807 STRIP_NOPS (arg0);
7811 if (TREE_CODE_CLASS (code) == tcc_unary)
7813 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7814 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7815 fold_build1_loc (loc, code, type,
7816 fold_convert_loc (loc, TREE_TYPE (op0),
7817 TREE_OPERAND (arg0, 1))));
7818 else if (TREE_CODE (arg0) == COND_EXPR)
7820 tree arg01 = TREE_OPERAND (arg0, 1);
7821 tree arg02 = TREE_OPERAND (arg0, 2);
7822 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7823 arg01 = fold_build1_loc (loc, code, type,
7824 fold_convert_loc (loc,
7825 TREE_TYPE (op0), arg01));
7826 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7827 arg02 = fold_build1_loc (loc, code, type,
7828 fold_convert_loc (loc,
7829 TREE_TYPE (op0), arg02));
7830 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7831 arg01, arg02);
7833 /* If this was a conversion, and all we did was to move into
7834 inside the COND_EXPR, bring it back out. But leave it if
7835 it is a conversion from integer to integer and the
7836 result precision is no wider than a word since such a
7837 conversion is cheap and may be optimized away by combine,
7838 while it couldn't if it were outside the COND_EXPR. Then return
7839 so we don't get into an infinite recursion loop taking the
7840 conversion out and then back in. */
7842 if ((CONVERT_EXPR_CODE_P (code)
7843 || code == NON_LVALUE_EXPR)
7844 && TREE_CODE (tem) == COND_EXPR
7845 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7846 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7847 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7848 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7849 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7850 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7851 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7852 && (INTEGRAL_TYPE_P
7853 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7854 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7855 || flag_syntax_only))
7856 tem = build1_loc (loc, code, type,
7857 build3 (COND_EXPR,
7858 TREE_TYPE (TREE_OPERAND
7859 (TREE_OPERAND (tem, 1), 0)),
7860 TREE_OPERAND (tem, 0),
7861 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7862 TREE_OPERAND (TREE_OPERAND (tem, 2),
7863 0)));
7864 return tem;
7868 switch (code)
7870 case PAREN_EXPR:
7871 /* Re-association barriers around constants and other re-association
7872 barriers can be removed. */
7873 if (CONSTANT_CLASS_P (op0)
7874 || TREE_CODE (op0) == PAREN_EXPR)
7875 return fold_convert_loc (loc, type, op0);
7876 return NULL_TREE;
7878 CASE_CONVERT:
7879 case FLOAT_EXPR:
7880 case FIX_TRUNC_EXPR:
7881 if (TREE_TYPE (op0) == type)
7882 return op0;
7884 if (COMPARISON_CLASS_P (op0))
7886 /* If we have (type) (a CMP b) and type is an integral type, return
7887 new expression involving the new type. Canonicalize
7888 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7889 non-integral type.
7890 Do not fold the result as that would not simplify further, also
7891 folding again results in recursions. */
7892 if (TREE_CODE (type) == BOOLEAN_TYPE)
7893 return build2_loc (loc, TREE_CODE (op0), type,
7894 TREE_OPERAND (op0, 0),
7895 TREE_OPERAND (op0, 1));
7896 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7897 && TREE_CODE (type) != VECTOR_TYPE)
7898 return build3_loc (loc, COND_EXPR, type, op0,
7899 constant_boolean_node (true, type),
7900 constant_boolean_node (false, type));
7903 /* Handle cases of two conversions in a row. */
7904 if (CONVERT_EXPR_P (op0))
7906 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7907 tree inter_type = TREE_TYPE (op0);
7908 int inside_int = INTEGRAL_TYPE_P (inside_type);
7909 int inside_ptr = POINTER_TYPE_P (inside_type);
7910 int inside_float = FLOAT_TYPE_P (inside_type);
7911 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7912 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7913 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7914 int inter_int = INTEGRAL_TYPE_P (inter_type);
7915 int inter_ptr = POINTER_TYPE_P (inter_type);
7916 int inter_float = FLOAT_TYPE_P (inter_type);
7917 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7918 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7919 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7920 int final_int = INTEGRAL_TYPE_P (type);
7921 int final_ptr = POINTER_TYPE_P (type);
7922 int final_float = FLOAT_TYPE_P (type);
7923 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7924 unsigned int final_prec = TYPE_PRECISION (type);
7925 int final_unsignedp = TYPE_UNSIGNED (type);
7927 /* check for cases specific to UPC, involving pointer types */
7928 if (final_ptr || inter_ptr || inside_ptr)
7930 int final_pts = final_ptr
7931 && upc_shared_type_p (TREE_TYPE (type));
7932 int inter_pts = inter_ptr
7933 && upc_shared_type_p (TREE_TYPE (inter_type));
7934 int inside_pts = inside_ptr
7935 && upc_shared_type_p (TREE_TYPE (inside_type));
7936 if (final_pts || inter_pts || inside_pts)
7938 if (!((final_pts && inter_pts)
7939 && TREE_TYPE (type) == TREE_TYPE (inter_type))
7940 || ((inter_pts && inside_pts)
7941 && (TREE_TYPE (inter_type)
7942 == TREE_TYPE (inside_type))))
7943 return NULL;
7947 /* In addition to the cases of two conversions in a row
7948 handled below, if we are converting something to its own
7949 type via an object of identical or wider precision, neither
7950 conversion is needed. */
7951 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7952 && (((inter_int || inter_ptr) && final_int)
7953 || (inter_float && final_float))
7954 && inter_prec >= final_prec)
7955 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7957 /* Likewise, if the intermediate and initial types are either both
7958 float or both integer, we don't need the middle conversion if the
7959 former is wider than the latter and doesn't change the signedness
7960 (for integers). Avoid this if the final type is a pointer since
7961 then we sometimes need the middle conversion. Likewise if the
7962 final type has a precision not equal to the size of its mode. */
7963 if (((inter_int && inside_int)
7964 || (inter_float && inside_float)
7965 || (inter_vec && inside_vec))
7966 && inter_prec >= inside_prec
7967 && (inter_float || inter_vec
7968 || inter_unsignedp == inside_unsignedp)
7969 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7970 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7971 && ! final_ptr
7972 && (! final_vec || inter_prec == inside_prec))
7973 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7975 /* If we have a sign-extension of a zero-extended value, we can
7976 replace that by a single zero-extension. Likewise if the
7977 final conversion does not change precision we can drop the
7978 intermediate conversion. */
7979 if (inside_int && inter_int && final_int
7980 && ((inside_prec < inter_prec && inter_prec < final_prec
7981 && inside_unsignedp && !inter_unsignedp)
7982 || final_prec == inter_prec))
7983 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7985 /* Two conversions in a row are not needed unless:
7986 - some conversion is floating-point (overstrict for now), or
7987 - some conversion is a vector (overstrict for now), or
7988 - the intermediate type is narrower than both initial and
7989 final, or
7990 - the intermediate type and innermost type differ in signedness,
7991 and the outermost type is wider than the intermediate, or
7992 - the initial type is a pointer type and the precisions of the
7993 intermediate and final types differ, or
7994 - the final type is a pointer type and the precisions of the
7995 initial and intermediate types differ. */
7996 if (! inside_float && ! inter_float && ! final_float
7997 && ! inside_vec && ! inter_vec && ! final_vec
7998 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7999 && ! (inside_int && inter_int
8000 && inter_unsignedp != inside_unsignedp
8001 && inter_prec < final_prec)
8002 && ((inter_unsignedp && inter_prec > inside_prec)
8003 == (final_unsignedp && final_prec > inter_prec))
8004 && ! (inside_ptr && inter_prec != final_prec)
8005 && ! (final_ptr && inside_prec != inter_prec)
8006 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8007 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8008 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8011 /* Handle (T *)&A.B.C for A being of type T and B and C
8012 living at offset zero. This occurs frequently in
8013 C++ upcasting and then accessing the base. */
8014 if (TREE_CODE (op0) == ADDR_EXPR
8015 && POINTER_TYPE_P (type)
8016 && handled_component_p (TREE_OPERAND (op0, 0)))
8018 HOST_WIDE_INT bitsize, bitpos;
8019 tree offset;
8020 enum machine_mode mode;
8021 int unsignedp, volatilep;
8022 tree base = TREE_OPERAND (op0, 0);
8023 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8024 &mode, &unsignedp, &volatilep, false);
8025 /* If the reference was to a (constant) zero offset, we can use
8026 the address of the base if it has the same base type
8027 as the result type and the pointer type is unqualified. */
8028 if (! offset && bitpos == 0
8029 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8030 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8031 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8032 return fold_convert_loc (loc, type,
8033 build_fold_addr_expr_loc (loc, base));
8036 if (TREE_CODE (op0) == MODIFY_EXPR
8037 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8038 /* Detect assigning a bitfield. */
8039 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8040 && DECL_BIT_FIELD
8041 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8043 /* Don't leave an assignment inside a conversion
8044 unless assigning a bitfield. */
8045 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8046 /* First do the assignment, then return converted constant. */
8047 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8048 TREE_NO_WARNING (tem) = 1;
8049 TREE_USED (tem) = 1;
8050 return tem;
8053 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8054 constants (if x has signed type, the sign bit cannot be set
8055 in c). This folds extension into the BIT_AND_EXPR.
8056 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8057 very likely don't have maximal range for their precision and this
8058 transformation effectively doesn't preserve non-maximal ranges. */
8059 if (TREE_CODE (type) == INTEGER_TYPE
8060 && TREE_CODE (op0) == BIT_AND_EXPR
8061 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8063 tree and_expr = op0;
8064 tree and0 = TREE_OPERAND (and_expr, 0);
8065 tree and1 = TREE_OPERAND (and_expr, 1);
8066 int change = 0;
8068 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8069 || (TYPE_PRECISION (type)
8070 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8071 change = 1;
8072 else if (TYPE_PRECISION (TREE_TYPE (and1))
8073 <= HOST_BITS_PER_WIDE_INT
8074 && host_integerp (and1, 1))
8076 unsigned HOST_WIDE_INT cst;
8078 cst = tree_low_cst (and1, 1);
8079 cst &= (HOST_WIDE_INT) -1
8080 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8081 change = (cst == 0);
8082 #ifdef LOAD_EXTEND_OP
8083 if (change
8084 && !flag_syntax_only
8085 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8086 == ZERO_EXTEND))
8088 tree uns = unsigned_type_for (TREE_TYPE (and0));
8089 and0 = fold_convert_loc (loc, uns, and0);
8090 and1 = fold_convert_loc (loc, uns, and1);
8092 #endif
8094 if (change)
8096 tem = force_fit_type_double (type, tree_to_double_int (and1),
8097 0, TREE_OVERFLOW (and1));
8098 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8099 fold_convert_loc (loc, type, and0), tem);
8103 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8104 when one of the new casts will fold away. Conservatively we assume
8105 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8106 if (POINTER_TYPE_P (type)
8107 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8108 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8109 && !upc_shared_type_p (TREE_TYPE (type))
8110 && !upc_shared_type_p (TREE_TYPE (
8111 TREE_TYPE (TREE_OPERAND (arg0, 0))))
8112 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8113 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8114 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8116 tree arg00 = TREE_OPERAND (arg0, 0);
8117 tree arg01 = TREE_OPERAND (arg0, 1);
8119 return fold_build_pointer_plus_loc
8120 (loc, fold_convert_loc (loc, type, arg00), arg01);
8123 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8124 of the same precision, and X is an integer type not narrower than
8125 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8126 if (INTEGRAL_TYPE_P (type)
8127 && TREE_CODE (op0) == BIT_NOT_EXPR
8128 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8129 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8130 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8132 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8133 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8134 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8135 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8136 fold_convert_loc (loc, type, tem));
8139 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8140 type of X and Y (integer types only). */
8141 if (INTEGRAL_TYPE_P (type)
8142 && TREE_CODE (op0) == MULT_EXPR
8143 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8144 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8146 /* Be careful not to introduce new overflows. */
8147 tree mult_type;
8148 if (TYPE_OVERFLOW_WRAPS (type))
8149 mult_type = type;
8150 else
8151 mult_type = unsigned_type_for (type);
8153 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8155 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8156 fold_convert_loc (loc, mult_type,
8157 TREE_OPERAND (op0, 0)),
8158 fold_convert_loc (loc, mult_type,
8159 TREE_OPERAND (op0, 1)));
8160 return fold_convert_loc (loc, type, tem);
8164 tem = fold_convert_const (code, type, op0);
8165 return tem ? tem : NULL_TREE;
8167 case ADDR_SPACE_CONVERT_EXPR:
8168 if (integer_zerop (arg0))
8169 return fold_convert_const (code, type, arg0);
8170 return NULL_TREE;
8172 case FIXED_CONVERT_EXPR:
8173 tem = fold_convert_const (code, type, arg0);
8174 return tem ? tem : NULL_TREE;
8176 case VIEW_CONVERT_EXPR:
8177 if (TREE_TYPE (op0) == type)
8178 return op0;
8179 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8180 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8181 type, TREE_OPERAND (op0, 0));
8182 if (TREE_CODE (op0) == MEM_REF)
8183 return fold_build2_loc (loc, MEM_REF, type,
8184 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8186 /* For integral conversions with the same precision or pointer
8187 conversions use a NOP_EXPR instead. */
8188 if ((INTEGRAL_TYPE_P (type)
8189 || (POINTER_TYPE_P (type)
8190 && !upc_shared_type_p (TREE_TYPE (type))))
8191 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8192 || (POINTER_TYPE_P (TREE_TYPE (op0))
8193 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8194 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8195 return fold_convert_loc (loc, type, op0);
8197 /* Strip inner integral conversions that do not change the precision. */
8198 if (CONVERT_EXPR_P (op0)
8199 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8200 || (POINTER_TYPE_P (TREE_TYPE (op0))
8201 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8202 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8203 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8204 && !upc_shared_type_p (TREE_TYPE (
8205 TREE_TYPE (
8206 TREE_OPERAND (op0, 0))))))
8207 && (TYPE_PRECISION (TREE_TYPE (op0))
8208 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8209 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8210 type, TREE_OPERAND (op0, 0));
8212 return fold_view_convert_expr (type, op0);
8214 case NEGATE_EXPR:
8215 tem = fold_negate_expr (loc, arg0);
8216 if (tem)
8217 return fold_convert_loc (loc, type, tem);
8218 return NULL_TREE;
8220 case ABS_EXPR:
8221 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8222 return fold_abs_const (arg0, type);
8223 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8224 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8225 /* Convert fabs((double)float) into (double)fabsf(float). */
8226 else if (TREE_CODE (arg0) == NOP_EXPR
8227 && TREE_CODE (type) == REAL_TYPE)
8229 tree targ0 = strip_float_extensions (arg0);
8230 if (targ0 != arg0)
8231 return fold_convert_loc (loc, type,
8232 fold_build1_loc (loc, ABS_EXPR,
8233 TREE_TYPE (targ0),
8234 targ0));
8236 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8237 else if (TREE_CODE (arg0) == ABS_EXPR)
8238 return arg0;
8239 else if (tree_expr_nonnegative_p (arg0))
8240 return arg0;
8242 /* Strip sign ops from argument. */
8243 if (TREE_CODE (type) == REAL_TYPE)
8245 tem = fold_strip_sign_ops (arg0);
8246 if (tem)
8247 return fold_build1_loc (loc, ABS_EXPR, type,
8248 fold_convert_loc (loc, type, tem));
8250 return NULL_TREE;
8252 case CONJ_EXPR:
8253 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8254 return fold_convert_loc (loc, type, arg0);
8255 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8257 tree itype = TREE_TYPE (type);
8258 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8259 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8260 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8261 negate_expr (ipart));
8263 if (TREE_CODE (arg0) == COMPLEX_CST)
8265 tree itype = TREE_TYPE (type);
8266 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8267 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8268 return build_complex (type, rpart, negate_expr (ipart));
8270 if (TREE_CODE (arg0) == CONJ_EXPR)
8271 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8272 return NULL_TREE;
8274 case BIT_NOT_EXPR:
8275 if (TREE_CODE (arg0) == INTEGER_CST)
8276 return fold_not_const (arg0, type);
8277 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8278 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8279 /* Convert ~ (-A) to A - 1. */
8280 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8281 return fold_build2_loc (loc, MINUS_EXPR, type,
8282 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8283 build_int_cst (type, 1));
8284 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8285 else if (INTEGRAL_TYPE_P (type)
8286 && ((TREE_CODE (arg0) == MINUS_EXPR
8287 && integer_onep (TREE_OPERAND (arg0, 1)))
8288 || (TREE_CODE (arg0) == PLUS_EXPR
8289 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8290 return fold_build1_loc (loc, NEGATE_EXPR, type,
8291 fold_convert_loc (loc, type,
8292 TREE_OPERAND (arg0, 0)));
8293 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8294 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8295 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8296 fold_convert_loc (loc, type,
8297 TREE_OPERAND (arg0, 0)))))
8298 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8299 fold_convert_loc (loc, type,
8300 TREE_OPERAND (arg0, 1)));
8301 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8302 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8303 fold_convert_loc (loc, type,
8304 TREE_OPERAND (arg0, 1)))))
8305 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8306 fold_convert_loc (loc, type,
8307 TREE_OPERAND (arg0, 0)), tem);
8308 /* Perform BIT_NOT_EXPR on each element individually. */
8309 else if (TREE_CODE (arg0) == VECTOR_CST)
8311 tree *elements;
8312 tree elem;
8313 unsigned count = VECTOR_CST_NELTS (arg0), i;
8315 elements = XALLOCAVEC (tree, count);
8316 for (i = 0; i < count; i++)
8318 elem = VECTOR_CST_ELT (arg0, i);
8319 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8320 if (elem == NULL_TREE)
8321 break;
8322 elements[i] = elem;
8324 if (i == count)
8325 return build_vector (type, elements);
8327 else if (COMPARISON_CLASS_P (arg0)
8328 && (VECTOR_TYPE_P (type)
8329 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8331 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8332 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8333 HONOR_NANS (TYPE_MODE (op_type)));
8334 if (subcode != ERROR_MARK)
8335 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8336 TREE_OPERAND (arg0, 1));
8340 return NULL_TREE;
8342 case TRUTH_NOT_EXPR:
8343 /* Note that the operand of this must be an int
8344 and its values must be 0 or 1.
8345 ("true" is a fixed value perhaps depending on the language,
8346 but we don't handle values other than 1 correctly yet.) */
8347 tem = fold_truth_not_expr (loc, arg0);
8348 if (!tem)
8349 return NULL_TREE;
8350 return fold_convert_loc (loc, type, tem);
8352 case REALPART_EXPR:
8353 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8354 return fold_convert_loc (loc, type, arg0);
8355 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8356 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8357 TREE_OPERAND (arg0, 1));
8358 if (TREE_CODE (arg0) == COMPLEX_CST)
8359 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8360 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8362 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8363 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8364 fold_build1_loc (loc, REALPART_EXPR, itype,
8365 TREE_OPERAND (arg0, 0)),
8366 fold_build1_loc (loc, REALPART_EXPR, itype,
8367 TREE_OPERAND (arg0, 1)));
8368 return fold_convert_loc (loc, type, tem);
8370 if (TREE_CODE (arg0) == CONJ_EXPR)
8372 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8373 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8374 TREE_OPERAND (arg0, 0));
8375 return fold_convert_loc (loc, type, tem);
8377 if (TREE_CODE (arg0) == CALL_EXPR)
8379 tree fn = get_callee_fndecl (arg0);
8380 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8381 switch (DECL_FUNCTION_CODE (fn))
8383 CASE_FLT_FN (BUILT_IN_CEXPI):
8384 fn = mathfn_built_in (type, BUILT_IN_COS);
8385 if (fn)
8386 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8387 break;
8389 default:
8390 break;
8393 return NULL_TREE;
8395 case IMAGPART_EXPR:
8396 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8397 return build_zero_cst (type);
8398 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8399 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8400 TREE_OPERAND (arg0, 0));
8401 if (TREE_CODE (arg0) == COMPLEX_CST)
8402 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8403 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8405 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8406 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8407 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8408 TREE_OPERAND (arg0, 0)),
8409 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8410 TREE_OPERAND (arg0, 1)));
8411 return fold_convert_loc (loc, type, tem);
8413 if (TREE_CODE (arg0) == CONJ_EXPR)
8415 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8416 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8417 return fold_convert_loc (loc, type, negate_expr (tem));
8419 if (TREE_CODE (arg0) == CALL_EXPR)
8421 tree fn = get_callee_fndecl (arg0);
8422 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8423 switch (DECL_FUNCTION_CODE (fn))
8425 CASE_FLT_FN (BUILT_IN_CEXPI):
8426 fn = mathfn_built_in (type, BUILT_IN_SIN);
8427 if (fn)
8428 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8429 break;
8431 default:
8432 break;
8435 return NULL_TREE;
8437 case INDIRECT_REF:
8438 /* Fold *&X to X if X is an lvalue. */
8439 if (TREE_CODE (op0) == ADDR_EXPR)
8441 tree op00 = TREE_OPERAND (op0, 0);
8442 if ((TREE_CODE (op00) == VAR_DECL
8443 || TREE_CODE (op00) == PARM_DECL
8444 || TREE_CODE (op00) == RESULT_DECL)
8445 && !TREE_READONLY (op00))
8446 return op00;
8448 return NULL_TREE;
8450 case VEC_UNPACK_LO_EXPR:
8451 case VEC_UNPACK_HI_EXPR:
8452 case VEC_UNPACK_FLOAT_LO_EXPR:
8453 case VEC_UNPACK_FLOAT_HI_EXPR:
8455 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8456 tree *elts;
8457 enum tree_code subcode;
8459 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8460 if (TREE_CODE (arg0) != VECTOR_CST)
8461 return NULL_TREE;
8463 elts = XALLOCAVEC (tree, nelts * 2);
8464 if (!vec_cst_ctor_to_array (arg0, elts))
8465 return NULL_TREE;
8467 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8468 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8469 elts += nelts;
8471 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8472 subcode = NOP_EXPR;
8473 else
8474 subcode = FLOAT_EXPR;
8476 for (i = 0; i < nelts; i++)
8478 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8479 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8480 return NULL_TREE;
8483 return build_vector (type, elts);
8486 case REDUC_MIN_EXPR:
8487 case REDUC_MAX_EXPR:
8488 case REDUC_PLUS_EXPR:
8490 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8491 tree *elts;
8492 enum tree_code subcode;
8494 if (TREE_CODE (op0) != VECTOR_CST)
8495 return NULL_TREE;
8497 elts = XALLOCAVEC (tree, nelts);
8498 if (!vec_cst_ctor_to_array (op0, elts))
8499 return NULL_TREE;
8501 switch (code)
8503 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8504 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8505 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8506 default: gcc_unreachable ();
8509 for (i = 1; i < nelts; i++)
8511 elts[0] = const_binop (subcode, elts[0], elts[i]);
8512 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8513 return NULL_TREE;
8514 elts[i] = build_zero_cst (TREE_TYPE (type));
8517 return build_vector (type, elts);
8520 default:
8521 return NULL_TREE;
8522 } /* switch (code) */
8526 /* If the operation was a conversion do _not_ mark a resulting constant
8527 with TREE_OVERFLOW if the original constant was not. These conversions
8528 have implementation defined behavior and retaining the TREE_OVERFLOW
8529 flag here would confuse later passes such as VRP. */
8530 tree
8531 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8532 tree type, tree op0)
8534 tree res = fold_unary_loc (loc, code, type, op0);
8535 if (res
8536 && TREE_CODE (res) == INTEGER_CST
8537 && TREE_CODE (op0) == INTEGER_CST
8538 && CONVERT_EXPR_CODE_P (code))
8539 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8541 return res;
8544 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8545 operands OP0 and OP1. LOC is the location of the resulting expression.
8546 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8547 Return the folded expression if folding is successful. Otherwise,
8548 return NULL_TREE. */
8549 static tree
8550 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8551 tree arg0, tree arg1, tree op0, tree op1)
8553 tree tem;
8555 /* We only do these simplifications if we are optimizing. */
8556 if (!optimize)
8557 return NULL_TREE;
8559 /* Check for things like (A || B) && (A || C). We can convert this
8560 to A || (B && C). Note that either operator can be any of the four
8561 truth and/or operations and the transformation will still be
8562 valid. Also note that we only care about order for the
8563 ANDIF and ORIF operators. If B contains side effects, this
8564 might change the truth-value of A. */
8565 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8566 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8567 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8568 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8569 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8570 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8572 tree a00 = TREE_OPERAND (arg0, 0);
8573 tree a01 = TREE_OPERAND (arg0, 1);
8574 tree a10 = TREE_OPERAND (arg1, 0);
8575 tree a11 = TREE_OPERAND (arg1, 1);
8576 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8577 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8578 && (code == TRUTH_AND_EXPR
8579 || code == TRUTH_OR_EXPR));
8581 if (operand_equal_p (a00, a10, 0))
8582 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8583 fold_build2_loc (loc, code, type, a01, a11));
8584 else if (commutative && operand_equal_p (a00, a11, 0))
8585 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8586 fold_build2_loc (loc, code, type, a01, a10));
8587 else if (commutative && operand_equal_p (a01, a10, 0))
8588 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8589 fold_build2_loc (loc, code, type, a00, a11));
8591 /* This case if tricky because we must either have commutative
8592 operators or else A10 must not have side-effects. */
8594 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8595 && operand_equal_p (a01, a11, 0))
8596 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8597 fold_build2_loc (loc, code, type, a00, a10),
8598 a01);
8601 /* See if we can build a range comparison. */
8602 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8603 return tem;
8605 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8606 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8608 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8609 if (tem)
8610 return fold_build2_loc (loc, code, type, tem, arg1);
8613 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8614 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8616 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8617 if (tem)
8618 return fold_build2_loc (loc, code, type, arg0, tem);
8621 /* Check for the possibility of merging component references. If our
8622 lhs is another similar operation, try to merge its rhs with our
8623 rhs. Then try to merge our lhs and rhs. */
8624 if (TREE_CODE (arg0) == code
8625 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8626 TREE_OPERAND (arg0, 1), arg1)))
8627 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8629 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8630 return tem;
8632 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8633 && (code == TRUTH_AND_EXPR
8634 || code == TRUTH_ANDIF_EXPR
8635 || code == TRUTH_OR_EXPR
8636 || code == TRUTH_ORIF_EXPR))
8638 enum tree_code ncode, icode;
8640 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8641 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8642 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8644 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8645 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8646 We don't want to pack more than two leafs to a non-IF AND/OR
8647 expression.
8648 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8649 equal to IF-CODE, then we don't want to add right-hand operand.
8650 If the inner right-hand side of left-hand operand has
8651 side-effects, or isn't simple, then we can't add to it,
8652 as otherwise we might destroy if-sequence. */
8653 if (TREE_CODE (arg0) == icode
8654 && simple_operand_p_2 (arg1)
8655 /* Needed for sequence points to handle trappings, and
8656 side-effects. */
8657 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8659 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8660 arg1);
8661 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8662 tem);
8664 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8665 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8666 else if (TREE_CODE (arg1) == icode
8667 && simple_operand_p_2 (arg0)
8668 /* Needed for sequence points to handle trappings, and
8669 side-effects. */
8670 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8672 tem = fold_build2_loc (loc, ncode, type,
8673 arg0, TREE_OPERAND (arg1, 0));
8674 return fold_build2_loc (loc, icode, type, tem,
8675 TREE_OPERAND (arg1, 1));
8677 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8678 into (A OR B).
8679 For sequence point consistancy, we need to check for trapping,
8680 and side-effects. */
8681 else if (code == icode && simple_operand_p_2 (arg0)
8682 && simple_operand_p_2 (arg1))
8683 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8686 return NULL_TREE;
8689 /* Fold a binary expression of code CODE and type TYPE with operands
8690 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8691 Return the folded expression if folding is successful. Otherwise,
8692 return NULL_TREE. */
8694 static tree
8695 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8697 enum tree_code compl_code;
8699 if (code == MIN_EXPR)
8700 compl_code = MAX_EXPR;
8701 else if (code == MAX_EXPR)
8702 compl_code = MIN_EXPR;
8703 else
8704 gcc_unreachable ();
8706 /* MIN (MAX (a, b), b) == b. */
8707 if (TREE_CODE (op0) == compl_code
8708 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8709 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8711 /* MIN (MAX (b, a), b) == b. */
8712 if (TREE_CODE (op0) == compl_code
8713 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8714 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8715 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8717 /* MIN (a, MAX (a, b)) == a. */
8718 if (TREE_CODE (op1) == compl_code
8719 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8720 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8721 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8723 /* MIN (a, MAX (b, a)) == a. */
8724 if (TREE_CODE (op1) == compl_code
8725 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8726 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8727 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8729 return NULL_TREE;
8732 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8733 by changing CODE to reduce the magnitude of constants involved in
8734 ARG0 of the comparison.
8735 Returns a canonicalized comparison tree if a simplification was
8736 possible, otherwise returns NULL_TREE.
8737 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8738 valid if signed overflow is undefined. */
8740 static tree
8741 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8742 tree arg0, tree arg1,
8743 bool *strict_overflow_p)
8745 enum tree_code code0 = TREE_CODE (arg0);
8746 tree t, cst0 = NULL_TREE;
8747 int sgn0;
8748 bool swap = false;
8750 /* Match A +- CST code arg1 and CST code arg1. We can change the
8751 first form only if overflow is undefined. */
8752 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8753 /* In principle pointers also have undefined overflow behavior,
8754 but that causes problems elsewhere. */
8755 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8756 && (code0 == MINUS_EXPR
8757 || code0 == PLUS_EXPR)
8758 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8759 || code0 == INTEGER_CST))
8760 return NULL_TREE;
8762 /* Identify the constant in arg0 and its sign. */
8763 if (code0 == INTEGER_CST)
8764 cst0 = arg0;
8765 else
8766 cst0 = TREE_OPERAND (arg0, 1);
8767 sgn0 = tree_int_cst_sgn (cst0);
8769 /* Overflowed constants and zero will cause problems. */
8770 if (integer_zerop (cst0)
8771 || TREE_OVERFLOW (cst0))
8772 return NULL_TREE;
8774 /* See if we can reduce the magnitude of the constant in
8775 arg0 by changing the comparison code. */
8776 if (code0 == INTEGER_CST)
8778 /* CST <= arg1 -> CST-1 < arg1. */
8779 if (code == LE_EXPR && sgn0 == 1)
8780 code = LT_EXPR;
8781 /* -CST < arg1 -> -CST-1 <= arg1. */
8782 else if (code == LT_EXPR && sgn0 == -1)
8783 code = LE_EXPR;
8784 /* CST > arg1 -> CST-1 >= arg1. */
8785 else if (code == GT_EXPR && sgn0 == 1)
8786 code = GE_EXPR;
8787 /* -CST >= arg1 -> -CST-1 > arg1. */
8788 else if (code == GE_EXPR && sgn0 == -1)
8789 code = GT_EXPR;
8790 else
8791 return NULL_TREE;
8792 /* arg1 code' CST' might be more canonical. */
8793 swap = true;
8795 else
8797 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8798 if (code == LT_EXPR
8799 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8800 code = LE_EXPR;
8801 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8802 else if (code == GT_EXPR
8803 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8804 code = GE_EXPR;
8805 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8806 else if (code == LE_EXPR
8807 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8808 code = LT_EXPR;
8809 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8810 else if (code == GE_EXPR
8811 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8812 code = GT_EXPR;
8813 else
8814 return NULL_TREE;
8815 *strict_overflow_p = true;
8818 /* Now build the constant reduced in magnitude. But not if that
8819 would produce one outside of its types range. */
8820 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8821 && ((sgn0 == 1
8822 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8823 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8824 || (sgn0 == -1
8825 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8826 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8827 /* We cannot swap the comparison here as that would cause us to
8828 endlessly recurse. */
8829 return NULL_TREE;
8831 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8832 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8833 if (code0 != INTEGER_CST)
8834 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8835 t = fold_convert (TREE_TYPE (arg1), t);
8837 /* If swapping might yield to a more canonical form, do so. */
8838 if (swap)
8839 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8840 else
8841 return fold_build2_loc (loc, code, type, t, arg1);
8844 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8845 overflow further. Try to decrease the magnitude of constants involved
8846 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8847 and put sole constants at the second argument position.
8848 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8850 static tree
8851 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8852 tree arg0, tree arg1)
8854 tree t;
8855 bool strict_overflow_p;
8856 const char * const warnmsg = G_("assuming signed overflow does not occur "
8857 "when reducing constant in comparison");
8859 /* Try canonicalization by simplifying arg0. */
8860 strict_overflow_p = false;
8861 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8862 &strict_overflow_p);
8863 if (t)
8865 if (strict_overflow_p)
8866 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8867 return t;
8870 /* Try canonicalization by simplifying arg1 using the swapped
8871 comparison. */
8872 code = swap_tree_comparison (code);
8873 strict_overflow_p = false;
8874 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8875 &strict_overflow_p);
8876 if (t && strict_overflow_p)
8877 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8878 return t;
8881 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8882 space. This is used to avoid issuing overflow warnings for
8883 expressions like &p->x which can not wrap. */
8885 static bool
8886 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8888 double_int di_offset, total;
8890 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8891 return true;
8893 if (bitpos < 0)
8894 return true;
8896 if (offset == NULL_TREE)
8897 di_offset = double_int_zero;
8898 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8899 return true;
8900 else
8901 di_offset = TREE_INT_CST (offset);
8903 bool overflow;
8904 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8905 total = di_offset.add_with_sign (units, true, &overflow);
8906 if (overflow)
8907 return true;
8909 if (total.high != 0)
8910 return true;
8912 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8913 if (size <= 0)
8914 return true;
8916 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8917 array. */
8918 if (TREE_CODE (base) == ADDR_EXPR)
8920 HOST_WIDE_INT base_size;
8922 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8923 if (base_size > 0 && size < base_size)
8924 size = base_size;
8927 return total.low > (unsigned HOST_WIDE_INT) size;
8930 /* Subroutine of fold_binary. This routine performs all of the
8931 transformations that are common to the equality/inequality
8932 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8933 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8934 fold_binary should call fold_binary. Fold a comparison with
8935 tree code CODE and type TYPE with operands OP0 and OP1. Return
8936 the folded comparison or NULL_TREE. */
8938 static tree
8939 fold_comparison (location_t loc, enum tree_code code, tree type,
8940 tree op0, tree op1)
8942 tree arg0, arg1, tem;
8944 arg0 = op0;
8945 arg1 = op1;
8947 STRIP_SIGN_NOPS (arg0);
8948 STRIP_SIGN_NOPS (arg1);
8950 tem = fold_relational_const (code, type, arg0, arg1);
8951 if (tem != NULL_TREE)
8952 return tem;
8954 /* If one arg is a real or integer constant, put it last. */
8955 if (tree_swap_operands_p (arg0, arg1, true))
8956 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8958 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8959 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8960 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8961 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8962 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8963 && (TREE_CODE (arg1) == INTEGER_CST
8964 && !TREE_OVERFLOW (arg1)))
8966 tree const1 = TREE_OPERAND (arg0, 1);
8967 tree const2 = arg1;
8968 tree variable = TREE_OPERAND (arg0, 0);
8969 tree lhs;
8970 int lhs_add;
8971 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8973 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8974 TREE_TYPE (arg1), const2, const1);
8976 /* If the constant operation overflowed this can be
8977 simplified as a comparison against INT_MAX/INT_MIN. */
8978 if (TREE_CODE (lhs) == INTEGER_CST
8979 && TREE_OVERFLOW (lhs))
8981 int const1_sgn = tree_int_cst_sgn (const1);
8982 enum tree_code code2 = code;
8984 /* Get the sign of the constant on the lhs if the
8985 operation were VARIABLE + CONST1. */
8986 if (TREE_CODE (arg0) == MINUS_EXPR)
8987 const1_sgn = -const1_sgn;
8989 /* The sign of the constant determines if we overflowed
8990 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8991 Canonicalize to the INT_MIN overflow by swapping the comparison
8992 if necessary. */
8993 if (const1_sgn == -1)
8994 code2 = swap_tree_comparison (code);
8996 /* We now can look at the canonicalized case
8997 VARIABLE + 1 CODE2 INT_MIN
8998 and decide on the result. */
8999 if (code2 == LT_EXPR
9000 || code2 == LE_EXPR
9001 || code2 == EQ_EXPR)
9002 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9003 else if (code2 == NE_EXPR
9004 || code2 == GE_EXPR
9005 || code2 == GT_EXPR)
9006 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9009 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9010 && (TREE_CODE (lhs) != INTEGER_CST
9011 || !TREE_OVERFLOW (lhs)))
9013 if (code != EQ_EXPR && code != NE_EXPR)
9014 fold_overflow_warning ("assuming signed overflow does not occur "
9015 "when changing X +- C1 cmp C2 to "
9016 "X cmp C1 +- C2",
9017 WARN_STRICT_OVERFLOW_COMPARISON);
9018 return fold_build2_loc (loc, code, type, variable, lhs);
9022 /* For comparisons of pointers we can decompose it to a compile time
9023 comparison of the base objects and the offsets into the object.
9024 This requires at least one operand being an ADDR_EXPR or a
9025 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9026 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9027 && (TREE_CODE (arg0) == ADDR_EXPR
9028 || TREE_CODE (arg1) == ADDR_EXPR
9029 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9030 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9032 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9033 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9034 enum machine_mode mode;
9035 int volatilep, unsignedp;
9036 bool indirect_base0 = false, indirect_base1 = false;
9038 /* Get base and offset for the access. Strip ADDR_EXPR for
9039 get_inner_reference, but put it back by stripping INDIRECT_REF
9040 off the base object if possible. indirect_baseN will be true
9041 if baseN is not an address but refers to the object itself. */
9042 base0 = arg0;
9043 if (TREE_CODE (arg0) == ADDR_EXPR)
9045 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9046 &bitsize, &bitpos0, &offset0, &mode,
9047 &unsignedp, &volatilep, false);
9048 if (TREE_CODE (base0) == INDIRECT_REF)
9049 base0 = TREE_OPERAND (base0, 0);
9050 else
9051 indirect_base0 = true;
9053 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9055 base0 = TREE_OPERAND (arg0, 0);
9056 STRIP_SIGN_NOPS (base0);
9057 if (TREE_CODE (base0) == ADDR_EXPR)
9059 base0 = TREE_OPERAND (base0, 0);
9060 indirect_base0 = true;
9062 offset0 = TREE_OPERAND (arg0, 1);
9063 if (host_integerp (offset0, 0))
9065 HOST_WIDE_INT off = size_low_cst (offset0);
9066 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9067 * BITS_PER_UNIT)
9068 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9070 bitpos0 = off * BITS_PER_UNIT;
9071 offset0 = NULL_TREE;
9076 base1 = arg1;
9077 if (TREE_CODE (arg1) == ADDR_EXPR)
9079 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9080 &bitsize, &bitpos1, &offset1, &mode,
9081 &unsignedp, &volatilep, false);
9082 if (TREE_CODE (base1) == INDIRECT_REF)
9083 base1 = TREE_OPERAND (base1, 0);
9084 else
9085 indirect_base1 = true;
9087 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9089 base1 = TREE_OPERAND (arg1, 0);
9090 STRIP_SIGN_NOPS (base1);
9091 if (TREE_CODE (base1) == ADDR_EXPR)
9093 base1 = TREE_OPERAND (base1, 0);
9094 indirect_base1 = true;
9096 offset1 = TREE_OPERAND (arg1, 1);
9097 if (host_integerp (offset1, 0))
9099 HOST_WIDE_INT off = size_low_cst (offset1);
9100 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9101 * BITS_PER_UNIT)
9102 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9104 bitpos1 = off * BITS_PER_UNIT;
9105 offset1 = NULL_TREE;
9110 /* A local variable can never be pointed to by
9111 the default SSA name of an incoming parameter. */
9112 if ((TREE_CODE (arg0) == ADDR_EXPR
9113 && indirect_base0
9114 && TREE_CODE (base0) == VAR_DECL
9115 && auto_var_in_fn_p (base0, current_function_decl)
9116 && !indirect_base1
9117 && TREE_CODE (base1) == SSA_NAME
9118 && SSA_NAME_IS_DEFAULT_DEF (base1)
9119 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9120 || (TREE_CODE (arg1) == ADDR_EXPR
9121 && indirect_base1
9122 && TREE_CODE (base1) == VAR_DECL
9123 && auto_var_in_fn_p (base1, current_function_decl)
9124 && !indirect_base0
9125 && TREE_CODE (base0) == SSA_NAME
9126 && SSA_NAME_IS_DEFAULT_DEF (base0)
9127 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9129 if (code == NE_EXPR)
9130 return constant_boolean_node (1, type);
9131 else if (code == EQ_EXPR)
9132 return constant_boolean_node (0, type);
9134 /* If we have equivalent bases we might be able to simplify. */
9135 else if (indirect_base0 == indirect_base1
9136 && operand_equal_p (base0, base1, 0))
9138 /* We can fold this expression to a constant if the non-constant
9139 offset parts are equal. */
9140 if ((offset0 == offset1
9141 || (offset0 && offset1
9142 && operand_equal_p (offset0, offset1, 0)))
9143 && (code == EQ_EXPR
9144 || code == NE_EXPR
9145 || (indirect_base0 && DECL_P (base0))
9146 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9149 if (code != EQ_EXPR
9150 && code != NE_EXPR
9151 && bitpos0 != bitpos1
9152 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9153 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9154 fold_overflow_warning (("assuming pointer wraparound does not "
9155 "occur when comparing P +- C1 with "
9156 "P +- C2"),
9157 WARN_STRICT_OVERFLOW_CONDITIONAL);
9159 switch (code)
9161 case EQ_EXPR:
9162 return constant_boolean_node (bitpos0 == bitpos1, type);
9163 case NE_EXPR:
9164 return constant_boolean_node (bitpos0 != bitpos1, type);
9165 case LT_EXPR:
9166 return constant_boolean_node (bitpos0 < bitpos1, type);
9167 case LE_EXPR:
9168 return constant_boolean_node (bitpos0 <= bitpos1, type);
9169 case GE_EXPR:
9170 return constant_boolean_node (bitpos0 >= bitpos1, type);
9171 case GT_EXPR:
9172 return constant_boolean_node (bitpos0 > bitpos1, type);
9173 default:;
9176 /* We can simplify the comparison to a comparison of the variable
9177 offset parts if the constant offset parts are equal.
9178 Be careful to use signed sizetype here because otherwise we
9179 mess with array offsets in the wrong way. This is possible
9180 because pointer arithmetic is restricted to retain within an
9181 object and overflow on pointer differences is undefined as of
9182 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9183 else if (bitpos0 == bitpos1
9184 && ((code == EQ_EXPR || code == NE_EXPR)
9185 || (indirect_base0 && DECL_P (base0))
9186 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9188 /* By converting to signed sizetype we cover middle-end pointer
9189 arithmetic which operates on unsigned pointer types of size
9190 type size and ARRAY_REF offsets which are properly sign or
9191 zero extended from their type in case it is narrower than
9192 sizetype. */
9193 if (offset0 == NULL_TREE)
9194 offset0 = build_int_cst (ssizetype, 0);
9195 else
9196 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9197 if (offset1 == NULL_TREE)
9198 offset1 = build_int_cst (ssizetype, 0);
9199 else
9200 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9202 if (code != EQ_EXPR
9203 && code != NE_EXPR
9204 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9205 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9206 fold_overflow_warning (("assuming pointer wraparound does not "
9207 "occur when comparing P +- C1 with "
9208 "P +- C2"),
9209 WARN_STRICT_OVERFLOW_COMPARISON);
9211 return fold_build2_loc (loc, code, type, offset0, offset1);
9214 /* For non-equal bases we can simplify if they are addresses
9215 of local binding decls or constants. */
9216 else if (indirect_base0 && indirect_base1
9217 /* We know that !operand_equal_p (base0, base1, 0)
9218 because the if condition was false. But make
9219 sure two decls are not the same. */
9220 && base0 != base1
9221 && TREE_CODE (arg0) == ADDR_EXPR
9222 && TREE_CODE (arg1) == ADDR_EXPR
9223 && (((TREE_CODE (base0) == VAR_DECL
9224 || TREE_CODE (base0) == PARM_DECL)
9225 && (targetm.binds_local_p (base0)
9226 || CONSTANT_CLASS_P (base1)))
9227 || CONSTANT_CLASS_P (base0))
9228 && (((TREE_CODE (base1) == VAR_DECL
9229 || TREE_CODE (base1) == PARM_DECL)
9230 && (targetm.binds_local_p (base1)
9231 || CONSTANT_CLASS_P (base0)))
9232 || CONSTANT_CLASS_P (base1)))
9234 if (code == EQ_EXPR)
9235 return omit_two_operands_loc (loc, type, boolean_false_node,
9236 arg0, arg1);
9237 else if (code == NE_EXPR)
9238 return omit_two_operands_loc (loc, type, boolean_true_node,
9239 arg0, arg1);
9241 /* For equal offsets we can simplify to a comparison of the
9242 base addresses. */
9243 else if (bitpos0 == bitpos1
9244 && (indirect_base0
9245 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9246 && (indirect_base1
9247 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9248 && ((offset0 == offset1)
9249 || (offset0 && offset1
9250 && operand_equal_p (offset0, offset1, 0))))
9252 if (indirect_base0)
9253 base0 = build_fold_addr_expr_loc (loc, base0);
9254 if (indirect_base1)
9255 base1 = build_fold_addr_expr_loc (loc, base1);
9256 return fold_build2_loc (loc, code, type, base0, base1);
9260 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9261 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9262 the resulting offset is smaller in absolute value than the
9263 original one. */
9264 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9265 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9266 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9267 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9268 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9269 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9270 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9272 tree const1 = TREE_OPERAND (arg0, 1);
9273 tree const2 = TREE_OPERAND (arg1, 1);
9274 tree variable1 = TREE_OPERAND (arg0, 0);
9275 tree variable2 = TREE_OPERAND (arg1, 0);
9276 tree cst;
9277 const char * const warnmsg = G_("assuming signed overflow does not "
9278 "occur when combining constants around "
9279 "a comparison");
9281 /* Put the constant on the side where it doesn't overflow and is
9282 of lower absolute value than before. */
9283 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9284 ? MINUS_EXPR : PLUS_EXPR,
9285 const2, const1);
9286 if (!TREE_OVERFLOW (cst)
9287 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9289 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9290 return fold_build2_loc (loc, code, type,
9291 variable1,
9292 fold_build2_loc (loc,
9293 TREE_CODE (arg1), TREE_TYPE (arg1),
9294 variable2, cst));
9297 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9298 ? MINUS_EXPR : PLUS_EXPR,
9299 const1, const2);
9300 if (!TREE_OVERFLOW (cst)
9301 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9303 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9304 return fold_build2_loc (loc, code, type,
9305 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9306 variable1, cst),
9307 variable2);
9311 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9312 signed arithmetic case. That form is created by the compiler
9313 often enough for folding it to be of value. One example is in
9314 computing loop trip counts after Operator Strength Reduction. */
9315 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9316 && TREE_CODE (arg0) == MULT_EXPR
9317 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9318 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9319 && integer_zerop (arg1))
9321 tree const1 = TREE_OPERAND (arg0, 1);
9322 tree const2 = arg1; /* zero */
9323 tree variable1 = TREE_OPERAND (arg0, 0);
9324 enum tree_code cmp_code = code;
9326 /* Handle unfolded multiplication by zero. */
9327 if (integer_zerop (const1))
9328 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9330 fold_overflow_warning (("assuming signed overflow does not occur when "
9331 "eliminating multiplication in comparison "
9332 "with zero"),
9333 WARN_STRICT_OVERFLOW_COMPARISON);
9335 /* If const1 is negative we swap the sense of the comparison. */
9336 if (tree_int_cst_sgn (const1) < 0)
9337 cmp_code = swap_tree_comparison (cmp_code);
9339 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9342 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9343 if (tem)
9344 return tem;
9346 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9348 tree targ0 = strip_float_extensions (arg0);
9349 tree targ1 = strip_float_extensions (arg1);
9350 tree newtype = TREE_TYPE (targ0);
9352 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9353 newtype = TREE_TYPE (targ1);
9355 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9356 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9357 return fold_build2_loc (loc, code, type,
9358 fold_convert_loc (loc, newtype, targ0),
9359 fold_convert_loc (loc, newtype, targ1));
9361 /* (-a) CMP (-b) -> b CMP a */
9362 if (TREE_CODE (arg0) == NEGATE_EXPR
9363 && TREE_CODE (arg1) == NEGATE_EXPR)
9364 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9365 TREE_OPERAND (arg0, 0));
9367 if (TREE_CODE (arg1) == REAL_CST)
9369 REAL_VALUE_TYPE cst;
9370 cst = TREE_REAL_CST (arg1);
9372 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9373 if (TREE_CODE (arg0) == NEGATE_EXPR)
9374 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9375 TREE_OPERAND (arg0, 0),
9376 build_real (TREE_TYPE (arg1),
9377 real_value_negate (&cst)));
9379 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9380 /* a CMP (-0) -> a CMP 0 */
9381 if (REAL_VALUE_MINUS_ZERO (cst))
9382 return fold_build2_loc (loc, code, type, arg0,
9383 build_real (TREE_TYPE (arg1), dconst0));
9385 /* x != NaN is always true, other ops are always false. */
9386 if (REAL_VALUE_ISNAN (cst)
9387 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9389 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9390 return omit_one_operand_loc (loc, type, tem, arg0);
9393 /* Fold comparisons against infinity. */
9394 if (REAL_VALUE_ISINF (cst)
9395 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9397 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9398 if (tem != NULL_TREE)
9399 return tem;
9403 /* If this is a comparison of a real constant with a PLUS_EXPR
9404 or a MINUS_EXPR of a real constant, we can convert it into a
9405 comparison with a revised real constant as long as no overflow
9406 occurs when unsafe_math_optimizations are enabled. */
9407 if (flag_unsafe_math_optimizations
9408 && TREE_CODE (arg1) == REAL_CST
9409 && (TREE_CODE (arg0) == PLUS_EXPR
9410 || TREE_CODE (arg0) == MINUS_EXPR)
9411 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9412 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9413 ? MINUS_EXPR : PLUS_EXPR,
9414 arg1, TREE_OPERAND (arg0, 1)))
9415 && !TREE_OVERFLOW (tem))
9416 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9418 /* Likewise, we can simplify a comparison of a real constant with
9419 a MINUS_EXPR whose first operand is also a real constant, i.e.
9420 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9421 floating-point types only if -fassociative-math is set. */
9422 if (flag_associative_math
9423 && TREE_CODE (arg1) == REAL_CST
9424 && TREE_CODE (arg0) == MINUS_EXPR
9425 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9426 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9427 arg1))
9428 && !TREE_OVERFLOW (tem))
9429 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9430 TREE_OPERAND (arg0, 1), tem);
9432 /* Fold comparisons against built-in math functions. */
9433 if (TREE_CODE (arg1) == REAL_CST
9434 && flag_unsafe_math_optimizations
9435 && ! flag_errno_math)
9437 enum built_in_function fcode = builtin_mathfn_code (arg0);
9439 if (fcode != END_BUILTINS)
9441 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9442 if (tem != NULL_TREE)
9443 return tem;
9448 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9449 && CONVERT_EXPR_P (arg0))
9451 /* If we are widening one operand of an integer comparison,
9452 see if the other operand is similarly being widened. Perhaps we
9453 can do the comparison in the narrower type. */
9454 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9455 if (tem)
9456 return tem;
9458 /* Or if we are changing signedness. */
9459 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9460 if (tem)
9461 return tem;
9464 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9465 constant, we can simplify it. */
9466 if (TREE_CODE (arg1) == INTEGER_CST
9467 && (TREE_CODE (arg0) == MIN_EXPR
9468 || TREE_CODE (arg0) == MAX_EXPR)
9469 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9471 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9472 if (tem)
9473 return tem;
9476 /* Simplify comparison of something with itself. (For IEEE
9477 floating-point, we can only do some of these simplifications.) */
9478 if (operand_equal_p (arg0, arg1, 0))
9480 switch (code)
9482 case EQ_EXPR:
9483 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9484 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9485 return constant_boolean_node (1, type);
9486 break;
9488 case GE_EXPR:
9489 case LE_EXPR:
9490 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9491 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9492 return constant_boolean_node (1, type);
9493 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9495 case NE_EXPR:
9496 /* For NE, we can only do this simplification if integer
9497 or we don't honor IEEE floating point NaNs. */
9498 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9499 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9500 break;
9501 /* ... fall through ... */
9502 case GT_EXPR:
9503 case LT_EXPR:
9504 return constant_boolean_node (0, type);
9505 default:
9506 gcc_unreachable ();
9510 /* If we are comparing an expression that just has comparisons
9511 of two integer values, arithmetic expressions of those comparisons,
9512 and constants, we can simplify it. There are only three cases
9513 to check: the two values can either be equal, the first can be
9514 greater, or the second can be greater. Fold the expression for
9515 those three values. Since each value must be 0 or 1, we have
9516 eight possibilities, each of which corresponds to the constant 0
9517 or 1 or one of the six possible comparisons.
9519 This handles common cases like (a > b) == 0 but also handles
9520 expressions like ((x > y) - (y > x)) > 0, which supposedly
9521 occur in macroized code. */
9523 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9525 tree cval1 = 0, cval2 = 0;
9526 int save_p = 0;
9528 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9529 /* Don't handle degenerate cases here; they should already
9530 have been handled anyway. */
9531 && cval1 != 0 && cval2 != 0
9532 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9533 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9534 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9535 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9536 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9537 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9538 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9540 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9541 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9543 /* We can't just pass T to eval_subst in case cval1 or cval2
9544 was the same as ARG1. */
9546 tree high_result
9547 = fold_build2_loc (loc, code, type,
9548 eval_subst (loc, arg0, cval1, maxval,
9549 cval2, minval),
9550 arg1);
9551 tree equal_result
9552 = fold_build2_loc (loc, code, type,
9553 eval_subst (loc, arg0, cval1, maxval,
9554 cval2, maxval),
9555 arg1);
9556 tree low_result
9557 = fold_build2_loc (loc, code, type,
9558 eval_subst (loc, arg0, cval1, minval,
9559 cval2, maxval),
9560 arg1);
9562 /* All three of these results should be 0 or 1. Confirm they are.
9563 Then use those values to select the proper code to use. */
9565 if (TREE_CODE (high_result) == INTEGER_CST
9566 && TREE_CODE (equal_result) == INTEGER_CST
9567 && TREE_CODE (low_result) == INTEGER_CST)
9569 /* Make a 3-bit mask with the high-order bit being the
9570 value for `>', the next for '=', and the low for '<'. */
9571 switch ((integer_onep (high_result) * 4)
9572 + (integer_onep (equal_result) * 2)
9573 + integer_onep (low_result))
9575 case 0:
9576 /* Always false. */
9577 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9578 case 1:
9579 code = LT_EXPR;
9580 break;
9581 case 2:
9582 code = EQ_EXPR;
9583 break;
9584 case 3:
9585 code = LE_EXPR;
9586 break;
9587 case 4:
9588 code = GT_EXPR;
9589 break;
9590 case 5:
9591 code = NE_EXPR;
9592 break;
9593 case 6:
9594 code = GE_EXPR;
9595 break;
9596 case 7:
9597 /* Always true. */
9598 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9601 if (save_p)
9603 tem = save_expr (build2 (code, type, cval1, cval2));
9604 SET_EXPR_LOCATION (tem, loc);
9605 return tem;
9607 return fold_build2_loc (loc, code, type, cval1, cval2);
9612 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9613 into a single range test. */
9614 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9615 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9616 && TREE_CODE (arg1) == INTEGER_CST
9617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9618 && !integer_zerop (TREE_OPERAND (arg0, 1))
9619 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9620 && !TREE_OVERFLOW (arg1))
9622 tem = fold_div_compare (loc, code, type, arg0, arg1);
9623 if (tem != NULL_TREE)
9624 return tem;
9627 /* Fold ~X op ~Y as Y op X. */
9628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9629 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9631 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9632 return fold_build2_loc (loc, code, type,
9633 fold_convert_loc (loc, cmp_type,
9634 TREE_OPERAND (arg1, 0)),
9635 TREE_OPERAND (arg0, 0));
9638 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9639 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9640 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9642 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9643 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9644 TREE_OPERAND (arg0, 0),
9645 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9646 fold_convert_loc (loc, cmp_type, arg1)));
9649 return NULL_TREE;
9653 /* Subroutine of fold_binary. Optimize complex multiplications of the
9654 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9655 argument EXPR represents the expression "z" of type TYPE. */
9657 static tree
9658 fold_mult_zconjz (location_t loc, tree type, tree expr)
9660 tree itype = TREE_TYPE (type);
9661 tree rpart, ipart, tem;
9663 if (TREE_CODE (expr) == COMPLEX_EXPR)
9665 rpart = TREE_OPERAND (expr, 0);
9666 ipart = TREE_OPERAND (expr, 1);
9668 else if (TREE_CODE (expr) == COMPLEX_CST)
9670 rpart = TREE_REALPART (expr);
9671 ipart = TREE_IMAGPART (expr);
9673 else
9675 expr = save_expr (expr);
9676 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9677 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9680 rpart = save_expr (rpart);
9681 ipart = save_expr (ipart);
9682 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9683 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9684 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9685 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9686 build_zero_cst (itype));
9690 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9691 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9692 guarantees that P and N have the same least significant log2(M) bits.
9693 N is not otherwise constrained. In particular, N is not normalized to
9694 0 <= N < M as is common. In general, the precise value of P is unknown.
9695 M is chosen as large as possible such that constant N can be determined.
9697 Returns M and sets *RESIDUE to N.
9699 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9700 account. This is not always possible due to PR 35705.
9703 static unsigned HOST_WIDE_INT
9704 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9705 bool allow_func_align)
9707 enum tree_code code;
9709 *residue = 0;
9711 code = TREE_CODE (expr);
9712 if (code == ADDR_EXPR)
9714 unsigned int bitalign;
9715 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9716 *residue /= BITS_PER_UNIT;
9717 return bitalign / BITS_PER_UNIT;
9719 else if (code == POINTER_PLUS_EXPR)
9721 tree op0, op1;
9722 unsigned HOST_WIDE_INT modulus;
9723 enum tree_code inner_code;
9725 op0 = TREE_OPERAND (expr, 0);
9726 STRIP_NOPS (op0);
9727 modulus = get_pointer_modulus_and_residue (op0, residue,
9728 allow_func_align);
9730 op1 = TREE_OPERAND (expr, 1);
9731 STRIP_NOPS (op1);
9732 inner_code = TREE_CODE (op1);
9733 if (inner_code == INTEGER_CST)
9735 *residue += TREE_INT_CST_LOW (op1);
9736 return modulus;
9738 else if (inner_code == MULT_EXPR)
9740 op1 = TREE_OPERAND (op1, 1);
9741 if (TREE_CODE (op1) == INTEGER_CST)
9743 unsigned HOST_WIDE_INT align;
9745 /* Compute the greatest power-of-2 divisor of op1. */
9746 align = TREE_INT_CST_LOW (op1);
9747 align &= -align;
9749 /* If align is non-zero and less than *modulus, replace
9750 *modulus with align., If align is 0, then either op1 is 0
9751 or the greatest power-of-2 divisor of op1 doesn't fit in an
9752 unsigned HOST_WIDE_INT. In either case, no additional
9753 constraint is imposed. */
9754 if (align)
9755 modulus = MIN (modulus, align);
9757 return modulus;
9762 /* If we get here, we were unable to determine anything useful about the
9763 expression. */
9764 return 1;
9767 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9768 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9770 static bool
9771 vec_cst_ctor_to_array (tree arg, tree *elts)
9773 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9775 if (TREE_CODE (arg) == VECTOR_CST)
9777 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9778 elts[i] = VECTOR_CST_ELT (arg, i);
9780 else if (TREE_CODE (arg) == CONSTRUCTOR)
9782 constructor_elt *elt;
9784 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9785 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9786 return false;
9787 else
9788 elts[i] = elt->value;
9790 else
9791 return false;
9792 for (; i < nelts; i++)
9793 elts[i]
9794 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9795 return true;
9798 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9799 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9800 NULL_TREE otherwise. */
9802 static tree
9803 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9805 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9806 tree *elts;
9807 bool need_ctor = false;
9809 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9810 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9811 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9812 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9813 return NULL_TREE;
9815 elts = XALLOCAVEC (tree, nelts * 3);
9816 if (!vec_cst_ctor_to_array (arg0, elts)
9817 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9818 return NULL_TREE;
9820 for (i = 0; i < nelts; i++)
9822 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9823 need_ctor = true;
9824 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9827 if (need_ctor)
9829 vec<constructor_elt, va_gc> *v;
9830 vec_alloc (v, nelts);
9831 for (i = 0; i < nelts; i++)
9832 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9833 return build_constructor (type, v);
9835 else
9836 return build_vector (type, &elts[2 * nelts]);
9839 /* Try to fold a pointer difference of type TYPE two address expressions of
9840 array references AREF0 and AREF1 using location LOC. Return a
9841 simplified expression for the difference or NULL_TREE. */
9843 static tree
9844 fold_addr_of_array_ref_difference (location_t loc, tree type,
9845 tree aref0, tree aref1)
9847 tree base0 = TREE_OPERAND (aref0, 0);
9848 tree base1 = TREE_OPERAND (aref1, 0);
9849 tree base_offset = build_int_cst (type, 0);
9851 /* If the bases are array references as well, recurse. If the bases
9852 are pointer indirections compute the difference of the pointers.
9853 If the bases are equal, we are set. */
9854 if ((TREE_CODE (base0) == ARRAY_REF
9855 && TREE_CODE (base1) == ARRAY_REF
9856 && (base_offset
9857 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9858 || (INDIRECT_REF_P (base0)
9859 && INDIRECT_REF_P (base1)
9860 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9861 TREE_OPERAND (base0, 0),
9862 TREE_OPERAND (base1, 0))))
9863 || operand_equal_p (base0, base1, 0))
9865 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9866 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9867 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9868 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9869 return fold_build2_loc (loc, PLUS_EXPR, type,
9870 base_offset,
9871 fold_build2_loc (loc, MULT_EXPR, type,
9872 diff, esz));
9874 return NULL_TREE;
9877 /* If the real or vector real constant CST of type TYPE has an exact
9878 inverse, return it, else return NULL. */
9880 static tree
9881 exact_inverse (tree type, tree cst)
9883 REAL_VALUE_TYPE r;
9884 tree unit_type, *elts;
9885 enum machine_mode mode;
9886 unsigned vec_nelts, i;
9888 switch (TREE_CODE (cst))
9890 case REAL_CST:
9891 r = TREE_REAL_CST (cst);
9893 if (exact_real_inverse (TYPE_MODE (type), &r))
9894 return build_real (type, r);
9896 return NULL_TREE;
9898 case VECTOR_CST:
9899 vec_nelts = VECTOR_CST_NELTS (cst);
9900 elts = XALLOCAVEC (tree, vec_nelts);
9901 unit_type = TREE_TYPE (type);
9902 mode = TYPE_MODE (unit_type);
9904 for (i = 0; i < vec_nelts; i++)
9906 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9907 if (!exact_real_inverse (mode, &r))
9908 return NULL_TREE;
9909 elts[i] = build_real (unit_type, r);
9912 return build_vector (type, elts);
9914 default:
9915 return NULL_TREE;
9919 /* Fold a binary expression of code CODE and type TYPE with operands
9920 OP0 and OP1. LOC is the location of the resulting expression.
9921 Return the folded expression if folding is successful. Otherwise,
9922 return NULL_TREE. */
9924 tree
9925 fold_binary_loc (location_t loc,
9926 enum tree_code code, tree type, tree op0, tree op1)
9928 enum tree_code_class kind = TREE_CODE_CLASS (code);
9929 tree arg0, arg1, tem;
9930 tree t1 = NULL_TREE;
9931 bool strict_overflow_p;
9932 unsigned int prec;
9934 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9935 && TREE_CODE_LENGTH (code) == 2
9936 && op0 != NULL_TREE
9937 && op1 != NULL_TREE);
9939 arg0 = op0;
9940 arg1 = op1;
9942 /* Strip any conversions that don't change the mode. This is
9943 safe for every expression, except for a comparison expression
9944 because its signedness is derived from its operands. So, in
9945 the latter case, only strip conversions that don't change the
9946 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9947 preserved.
9949 Note that this is done as an internal manipulation within the
9950 constant folder, in order to find the simplest representation
9951 of the arguments so that their form can be studied. In any
9952 cases, the appropriate type conversions should be put back in
9953 the tree that will get out of the constant folder. */
9955 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9957 STRIP_SIGN_NOPS (arg0);
9958 STRIP_SIGN_NOPS (arg1);
9960 else
9962 STRIP_NOPS (arg0);
9963 STRIP_NOPS (arg1);
9966 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9967 constant but we can't do arithmetic on them. */
9968 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9969 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9970 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9971 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9972 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9973 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9974 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9976 if (kind == tcc_binary)
9978 /* Make sure type and arg0 have the same saturating flag. */
9979 gcc_assert (TYPE_SATURATING (type)
9980 == TYPE_SATURATING (TREE_TYPE (arg0)));
9981 tem = const_binop (code, arg0, arg1);
9983 else if (kind == tcc_comparison)
9984 tem = fold_relational_const (code, type, arg0, arg1);
9985 else
9986 tem = NULL_TREE;
9988 if (tem != NULL_TREE)
9990 if (TREE_TYPE (tem) != type)
9991 tem = fold_convert_loc (loc, type, tem);
9992 return tem;
9996 /* If this is a commutative operation, and ARG0 is a constant, move it
9997 to ARG1 to reduce the number of tests below. */
9998 if (commutative_tree_code (code)
9999 && tree_swap_operands_p (arg0, arg1, true))
10000 return fold_build2_loc (loc, code, type, op1, op0);
10002 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10004 First check for cases where an arithmetic operation is applied to a
10005 compound, conditional, or comparison operation. Push the arithmetic
10006 operation inside the compound or conditional to see if any folding
10007 can then be done. Convert comparison to conditional for this purpose.
10008 The also optimizes non-constant cases that used to be done in
10009 expand_expr.
10011 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10012 one of the operands is a comparison and the other is a comparison, a
10013 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10014 code below would make the expression more complex. Change it to a
10015 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10016 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10018 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10019 || code == EQ_EXPR || code == NE_EXPR)
10020 && TREE_CODE (type) != VECTOR_TYPE
10021 && ((truth_value_p (TREE_CODE (arg0))
10022 && (truth_value_p (TREE_CODE (arg1))
10023 || (TREE_CODE (arg1) == BIT_AND_EXPR
10024 && integer_onep (TREE_OPERAND (arg1, 1)))))
10025 || (truth_value_p (TREE_CODE (arg1))
10026 && (truth_value_p (TREE_CODE (arg0))
10027 || (TREE_CODE (arg0) == BIT_AND_EXPR
10028 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10030 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10031 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10032 : TRUTH_XOR_EXPR,
10033 boolean_type_node,
10034 fold_convert_loc (loc, boolean_type_node, arg0),
10035 fold_convert_loc (loc, boolean_type_node, arg1));
10037 if (code == EQ_EXPR)
10038 tem = invert_truthvalue_loc (loc, tem);
10040 return fold_convert_loc (loc, type, tem);
10043 if (TREE_CODE_CLASS (code) == tcc_binary
10044 || TREE_CODE_CLASS (code) == tcc_comparison)
10046 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10048 tem = fold_build2_loc (loc, code, type,
10049 fold_convert_loc (loc, TREE_TYPE (op0),
10050 TREE_OPERAND (arg0, 1)), op1);
10051 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10052 tem);
10054 if (TREE_CODE (arg1) == COMPOUND_EXPR
10055 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10057 tem = fold_build2_loc (loc, code, type, op0,
10058 fold_convert_loc (loc, TREE_TYPE (op1),
10059 TREE_OPERAND (arg1, 1)));
10060 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10061 tem);
10064 if (TREE_CODE (arg0) == COND_EXPR
10065 || TREE_CODE (arg0) == VEC_COND_EXPR
10066 || COMPARISON_CLASS_P (arg0))
10068 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10069 arg0, arg1,
10070 /*cond_first_p=*/1);
10071 if (tem != NULL_TREE)
10072 return tem;
10075 if (TREE_CODE (arg1) == COND_EXPR
10076 || TREE_CODE (arg1) == VEC_COND_EXPR
10077 || COMPARISON_CLASS_P (arg1))
10079 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10080 arg1, arg0,
10081 /*cond_first_p=*/0);
10082 if (tem != NULL_TREE)
10083 return tem;
10087 switch (code)
10089 case MEM_REF:
10090 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10091 if (TREE_CODE (arg0) == ADDR_EXPR
10092 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10094 tree iref = TREE_OPERAND (arg0, 0);
10095 return fold_build2 (MEM_REF, type,
10096 TREE_OPERAND (iref, 0),
10097 int_const_binop (PLUS_EXPR, arg1,
10098 TREE_OPERAND (iref, 1)));
10101 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10102 if (TREE_CODE (arg0) == ADDR_EXPR
10103 && handled_component_p (TREE_OPERAND (arg0, 0)))
10105 tree base;
10106 HOST_WIDE_INT coffset;
10107 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10108 &coffset);
10109 if (!base)
10110 return NULL_TREE;
10111 return fold_build2 (MEM_REF, type,
10112 build_fold_addr_expr (base),
10113 int_const_binop (PLUS_EXPR, arg1,
10114 size_int (coffset)));
10117 return NULL_TREE;
10119 case POINTER_PLUS_EXPR:
10120 /* 0 +p index -> (type)index */
10121 if (integer_zerop (arg0))
10122 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10124 /* PTR +p 0 -> PTR */
10125 if (integer_zerop (arg1))
10126 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10128 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10129 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10130 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10131 return fold_convert_loc (loc, type,
10132 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10133 fold_convert_loc (loc, sizetype,
10134 arg1),
10135 fold_convert_loc (loc, sizetype,
10136 arg0)));
10138 /* (PTR +p B) +p A -> PTR +p (B + A) */
10139 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10140 && !upc_shared_type_p (TREE_TYPE (type)))
10142 tree inner;
10143 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10144 tree arg00 = TREE_OPERAND (arg0, 0);
10145 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10146 arg01, fold_convert_loc (loc, sizetype, arg1));
10147 return fold_convert_loc (loc, type,
10148 fold_build_pointer_plus_loc (loc,
10149 arg00, inner));
10152 /* PTR_CST +p CST -> CST1 */
10153 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10154 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10155 fold_convert_loc (loc, type, arg1));
10157 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10158 of the array. Loop optimizer sometimes produce this type of
10159 expressions. */
10160 if (TREE_CODE (arg0) == ADDR_EXPR)
10162 tem = try_move_mult_to_index (loc, arg0,
10163 fold_convert_loc (loc,
10164 ssizetype, arg1));
10165 if (tem)
10166 return fold_convert_loc (loc, type, tem);
10169 return NULL_TREE;
10171 case PLUS_EXPR:
10172 /* A + (-B) -> A - B */
10173 if (TREE_CODE (arg1) == NEGATE_EXPR)
10174 return fold_build2_loc (loc, MINUS_EXPR, type,
10175 fold_convert_loc (loc, type, arg0),
10176 fold_convert_loc (loc, type,
10177 TREE_OPERAND (arg1, 0)));
10178 /* (-A) + B -> B - A */
10179 if (TREE_CODE (arg0) == NEGATE_EXPR
10180 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10181 return fold_build2_loc (loc, MINUS_EXPR, type,
10182 fold_convert_loc (loc, type, arg1),
10183 fold_convert_loc (loc, type,
10184 TREE_OPERAND (arg0, 0)));
10186 /* Disable further optimizations involving UPC shared pointers,
10187 because integers are not interoperable with shared pointers. */
10188 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10189 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10190 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10191 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10192 return NULL_TREE;
10194 if (INTEGRAL_TYPE_P (type))
10196 /* Convert ~A + 1 to -A. */
10197 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10198 && integer_onep (arg1))
10199 return fold_build1_loc (loc, NEGATE_EXPR, type,
10200 fold_convert_loc (loc, type,
10201 TREE_OPERAND (arg0, 0)));
10203 /* ~X + X is -1. */
10204 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10205 && !TYPE_OVERFLOW_TRAPS (type))
10207 tree tem = TREE_OPERAND (arg0, 0);
10209 STRIP_NOPS (tem);
10210 if (operand_equal_p (tem, arg1, 0))
10212 t1 = build_minus_one_cst (type);
10213 return omit_one_operand_loc (loc, type, t1, arg1);
10217 /* X + ~X is -1. */
10218 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10219 && !TYPE_OVERFLOW_TRAPS (type))
10221 tree tem = TREE_OPERAND (arg1, 0);
10223 STRIP_NOPS (tem);
10224 if (operand_equal_p (arg0, tem, 0))
10226 t1 = build_minus_one_cst (type);
10227 return omit_one_operand_loc (loc, type, t1, arg0);
10231 /* X + (X / CST) * -CST is X % CST. */
10232 if (TREE_CODE (arg1) == MULT_EXPR
10233 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10234 && operand_equal_p (arg0,
10235 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10237 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10238 tree cst1 = TREE_OPERAND (arg1, 1);
10239 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10240 cst1, cst0);
10241 if (sum && integer_zerop (sum))
10242 return fold_convert_loc (loc, type,
10243 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10244 TREE_TYPE (arg0), arg0,
10245 cst0));
10249 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10250 one. Make sure the type is not saturating and has the signedness of
10251 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10252 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10253 if ((TREE_CODE (arg0) == MULT_EXPR
10254 || TREE_CODE (arg1) == MULT_EXPR)
10255 && !TYPE_SATURATING (type)
10256 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10257 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10258 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10260 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10261 if (tem)
10262 return tem;
10265 if (! FLOAT_TYPE_P (type))
10267 if (integer_zerop (arg1))
10268 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10270 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10271 with a constant, and the two constants have no bits in common,
10272 we should treat this as a BIT_IOR_EXPR since this may produce more
10273 simplifications. */
10274 if (TREE_CODE (arg0) == BIT_AND_EXPR
10275 && TREE_CODE (arg1) == BIT_AND_EXPR
10276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10277 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10278 && integer_zerop (const_binop (BIT_AND_EXPR,
10279 TREE_OPERAND (arg0, 1),
10280 TREE_OPERAND (arg1, 1))))
10282 code = BIT_IOR_EXPR;
10283 goto bit_ior;
10286 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10287 (plus (plus (mult) (mult)) (foo)) so that we can
10288 take advantage of the factoring cases below. */
10289 if (TYPE_OVERFLOW_WRAPS (type)
10290 && (((TREE_CODE (arg0) == PLUS_EXPR
10291 || TREE_CODE (arg0) == MINUS_EXPR)
10292 && TREE_CODE (arg1) == MULT_EXPR)
10293 || ((TREE_CODE (arg1) == PLUS_EXPR
10294 || TREE_CODE (arg1) == MINUS_EXPR)
10295 && TREE_CODE (arg0) == MULT_EXPR)))
10297 tree parg0, parg1, parg, marg;
10298 enum tree_code pcode;
10300 if (TREE_CODE (arg1) == MULT_EXPR)
10301 parg = arg0, marg = arg1;
10302 else
10303 parg = arg1, marg = arg0;
10304 pcode = TREE_CODE (parg);
10305 parg0 = TREE_OPERAND (parg, 0);
10306 parg1 = TREE_OPERAND (parg, 1);
10307 STRIP_NOPS (parg0);
10308 STRIP_NOPS (parg1);
10310 if (TREE_CODE (parg0) == MULT_EXPR
10311 && TREE_CODE (parg1) != MULT_EXPR)
10312 return fold_build2_loc (loc, pcode, type,
10313 fold_build2_loc (loc, PLUS_EXPR, type,
10314 fold_convert_loc (loc, type,
10315 parg0),
10316 fold_convert_loc (loc, type,
10317 marg)),
10318 fold_convert_loc (loc, type, parg1));
10319 if (TREE_CODE (parg0) != MULT_EXPR
10320 && TREE_CODE (parg1) == MULT_EXPR)
10321 return
10322 fold_build2_loc (loc, PLUS_EXPR, type,
10323 fold_convert_loc (loc, type, parg0),
10324 fold_build2_loc (loc, pcode, type,
10325 fold_convert_loc (loc, type, marg),
10326 fold_convert_loc (loc, type,
10327 parg1)));
10330 else
10332 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10333 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10334 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10336 /* Likewise if the operands are reversed. */
10337 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10338 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10340 /* Convert X + -C into X - C. */
10341 if (TREE_CODE (arg1) == REAL_CST
10342 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10344 tem = fold_negate_const (arg1, type);
10345 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10346 return fold_build2_loc (loc, MINUS_EXPR, type,
10347 fold_convert_loc (loc, type, arg0),
10348 fold_convert_loc (loc, type, tem));
10351 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10352 to __complex__ ( x, y ). This is not the same for SNaNs or
10353 if signed zeros are involved. */
10354 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10355 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10356 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10358 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10359 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10360 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10361 bool arg0rz = false, arg0iz = false;
10362 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10363 || (arg0i && (arg0iz = real_zerop (arg0i))))
10365 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10366 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10367 if (arg0rz && arg1i && real_zerop (arg1i))
10369 tree rp = arg1r ? arg1r
10370 : build1 (REALPART_EXPR, rtype, arg1);
10371 tree ip = arg0i ? arg0i
10372 : build1 (IMAGPART_EXPR, rtype, arg0);
10373 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10375 else if (arg0iz && arg1r && real_zerop (arg1r))
10377 tree rp = arg0r ? arg0r
10378 : build1 (REALPART_EXPR, rtype, arg0);
10379 tree ip = arg1i ? arg1i
10380 : build1 (IMAGPART_EXPR, rtype, arg1);
10381 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10386 if (flag_unsafe_math_optimizations
10387 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10388 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10389 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10390 return tem;
10392 /* Convert x+x into x*2.0. */
10393 if (operand_equal_p (arg0, arg1, 0)
10394 && SCALAR_FLOAT_TYPE_P (type))
10395 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10396 build_real (type, dconst2));
10398 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10399 We associate floats only if the user has specified
10400 -fassociative-math. */
10401 if (flag_associative_math
10402 && TREE_CODE (arg1) == PLUS_EXPR
10403 && TREE_CODE (arg0) != MULT_EXPR)
10405 tree tree10 = TREE_OPERAND (arg1, 0);
10406 tree tree11 = TREE_OPERAND (arg1, 1);
10407 if (TREE_CODE (tree11) == MULT_EXPR
10408 && TREE_CODE (tree10) == MULT_EXPR)
10410 tree tree0;
10411 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10412 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10415 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10416 We associate floats only if the user has specified
10417 -fassociative-math. */
10418 if (flag_associative_math
10419 && TREE_CODE (arg0) == PLUS_EXPR
10420 && TREE_CODE (arg1) != MULT_EXPR)
10422 tree tree00 = TREE_OPERAND (arg0, 0);
10423 tree tree01 = TREE_OPERAND (arg0, 1);
10424 if (TREE_CODE (tree01) == MULT_EXPR
10425 && TREE_CODE (tree00) == MULT_EXPR)
10427 tree tree0;
10428 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10429 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10434 bit_rotate:
10435 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10436 is a rotate of A by C1 bits. */
10437 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10438 is a rotate of A by B bits. */
10440 enum tree_code code0, code1;
10441 tree rtype;
10442 code0 = TREE_CODE (arg0);
10443 code1 = TREE_CODE (arg1);
10444 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10445 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10446 && operand_equal_p (TREE_OPERAND (arg0, 0),
10447 TREE_OPERAND (arg1, 0), 0)
10448 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10449 TYPE_UNSIGNED (rtype))
10450 /* Only create rotates in complete modes. Other cases are not
10451 expanded properly. */
10452 && (element_precision (rtype)
10453 == element_precision (TYPE_MODE (rtype))))
10455 tree tree01, tree11;
10456 enum tree_code code01, code11;
10458 tree01 = TREE_OPERAND (arg0, 1);
10459 tree11 = TREE_OPERAND (arg1, 1);
10460 STRIP_NOPS (tree01);
10461 STRIP_NOPS (tree11);
10462 code01 = TREE_CODE (tree01);
10463 code11 = TREE_CODE (tree11);
10464 if (code01 == INTEGER_CST
10465 && code11 == INTEGER_CST
10466 && TREE_INT_CST_HIGH (tree01) == 0
10467 && TREE_INT_CST_HIGH (tree11) == 0
10468 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10469 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10471 tem = build2_loc (loc, LROTATE_EXPR,
10472 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10473 TREE_OPERAND (arg0, 0),
10474 code0 == LSHIFT_EXPR ? tree01 : tree11);
10475 return fold_convert_loc (loc, type, tem);
10477 else if (code11 == MINUS_EXPR)
10479 tree tree110, tree111;
10480 tree110 = TREE_OPERAND (tree11, 0);
10481 tree111 = TREE_OPERAND (tree11, 1);
10482 STRIP_NOPS (tree110);
10483 STRIP_NOPS (tree111);
10484 if (TREE_CODE (tree110) == INTEGER_CST
10485 && 0 == compare_tree_int (tree110,
10486 element_precision
10487 (TREE_TYPE (TREE_OPERAND
10488 (arg0, 0))))
10489 && operand_equal_p (tree01, tree111, 0))
10490 return
10491 fold_convert_loc (loc, type,
10492 build2 ((code0 == LSHIFT_EXPR
10493 ? LROTATE_EXPR
10494 : RROTATE_EXPR),
10495 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10496 TREE_OPERAND (arg0, 0), tree01));
10498 else if (code01 == MINUS_EXPR)
10500 tree tree010, tree011;
10501 tree010 = TREE_OPERAND (tree01, 0);
10502 tree011 = TREE_OPERAND (tree01, 1);
10503 STRIP_NOPS (tree010);
10504 STRIP_NOPS (tree011);
10505 if (TREE_CODE (tree010) == INTEGER_CST
10506 && 0 == compare_tree_int (tree010,
10507 element_precision
10508 (TREE_TYPE (TREE_OPERAND
10509 (arg0, 0))))
10510 && operand_equal_p (tree11, tree011, 0))
10511 return fold_convert_loc
10512 (loc, type,
10513 build2 ((code0 != LSHIFT_EXPR
10514 ? LROTATE_EXPR
10515 : RROTATE_EXPR),
10516 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10517 TREE_OPERAND (arg0, 0), tree11));
10522 associate:
10523 /* In most languages, can't associate operations on floats through
10524 parentheses. Rather than remember where the parentheses were, we
10525 don't associate floats at all, unless the user has specified
10526 -fassociative-math.
10527 And, we need to make sure type is not saturating. */
10529 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10530 && !TYPE_SATURATING (type))
10532 tree var0, con0, lit0, minus_lit0;
10533 tree var1, con1, lit1, minus_lit1;
10534 tree atype = type;
10535 bool ok = true;
10537 /* Split both trees into variables, constants, and literals. Then
10538 associate each group together, the constants with literals,
10539 then the result with variables. This increases the chances of
10540 literals being recombined later and of generating relocatable
10541 expressions for the sum of a constant and literal. */
10542 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10543 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10544 code == MINUS_EXPR);
10546 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10547 if (code == MINUS_EXPR)
10548 code = PLUS_EXPR;
10550 /* With undefined overflow prefer doing association in a type
10551 which wraps on overflow, if that is one of the operand types. */
10552 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10553 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10555 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10556 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10557 atype = TREE_TYPE (arg0);
10558 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10559 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10560 atype = TREE_TYPE (arg1);
10561 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10564 /* With undefined overflow we can only associate constants with one
10565 variable, and constants whose association doesn't overflow. */
10566 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10567 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10569 if (var0 && var1)
10571 tree tmp0 = var0;
10572 tree tmp1 = var1;
10574 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10575 tmp0 = TREE_OPERAND (tmp0, 0);
10576 if (CONVERT_EXPR_P (tmp0)
10577 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10578 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10579 <= TYPE_PRECISION (atype)))
10580 tmp0 = TREE_OPERAND (tmp0, 0);
10581 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10582 tmp1 = TREE_OPERAND (tmp1, 0);
10583 if (CONVERT_EXPR_P (tmp1)
10584 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10586 <= TYPE_PRECISION (atype)))
10587 tmp1 = TREE_OPERAND (tmp1, 0);
10588 /* The only case we can still associate with two variables
10589 is if they are the same, modulo negation and bit-pattern
10590 preserving conversions. */
10591 if (!operand_equal_p (tmp0, tmp1, 0))
10592 ok = false;
10596 /* Only do something if we found more than two objects. Otherwise,
10597 nothing has changed and we risk infinite recursion. */
10598 if (ok
10599 && (2 < ((var0 != 0) + (var1 != 0)
10600 + (con0 != 0) + (con1 != 0)
10601 + (lit0 != 0) + (lit1 != 0)
10602 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10604 bool any_overflows = false;
10605 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10606 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10607 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10608 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10609 var0 = associate_trees (loc, var0, var1, code, atype);
10610 con0 = associate_trees (loc, con0, con1, code, atype);
10611 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10612 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10613 code, atype);
10615 /* Preserve the MINUS_EXPR if the negative part of the literal is
10616 greater than the positive part. Otherwise, the multiplicative
10617 folding code (i.e extract_muldiv) may be fooled in case
10618 unsigned constants are subtracted, like in the following
10619 example: ((X*2 + 4) - 8U)/2. */
10620 if (minus_lit0 && lit0)
10622 if (TREE_CODE (lit0) == INTEGER_CST
10623 && TREE_CODE (minus_lit0) == INTEGER_CST
10624 && tree_int_cst_lt (lit0, minus_lit0))
10626 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10627 MINUS_EXPR, atype);
10628 lit0 = 0;
10630 else
10632 lit0 = associate_trees (loc, lit0, minus_lit0,
10633 MINUS_EXPR, atype);
10634 minus_lit0 = 0;
10638 /* Don't introduce overflows through reassociation. */
10639 if (!any_overflows
10640 && ((lit0 && TREE_OVERFLOW (lit0))
10641 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10642 return NULL_TREE;
10644 if (minus_lit0)
10646 if (con0 == 0)
10647 return
10648 fold_convert_loc (loc, type,
10649 associate_trees (loc, var0, minus_lit0,
10650 MINUS_EXPR, atype));
10651 else
10653 con0 = associate_trees (loc, con0, minus_lit0,
10654 MINUS_EXPR, atype);
10655 return
10656 fold_convert_loc (loc, type,
10657 associate_trees (loc, var0, con0,
10658 PLUS_EXPR, atype));
10662 con0 = associate_trees (loc, con0, lit0, code, atype);
10663 return
10664 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10665 code, atype));
10669 return NULL_TREE;
10671 case MINUS_EXPR:
10672 /* Pointer simplifications for subtraction, simple reassociations. */
10673 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10675 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10676 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10677 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10679 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10680 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10681 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10682 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10683 return fold_build2_loc (loc, PLUS_EXPR, type,
10684 fold_build2_loc (loc, MINUS_EXPR, type,
10685 arg00, arg10),
10686 fold_build2_loc (loc, MINUS_EXPR, type,
10687 arg01, arg11));
10689 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10690 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10692 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10693 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10694 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10695 fold_convert_loc (loc, type, arg1));
10696 if (tmp)
10697 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10700 /* A - (-B) -> A + B */
10701 if (TREE_CODE (arg1) == NEGATE_EXPR)
10702 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10703 fold_convert_loc (loc, type,
10704 TREE_OPERAND (arg1, 0)));
10706 /* Disable further optimizations involving UPC shared pointers,
10707 because integers are not interoperable with shared pointers.
10708 (The test below also detects pointer difference between
10709 shared pointers, which cannot be folded. */
10711 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10712 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10713 return NULL_TREE;
10715 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10716 if (TREE_CODE (arg0) == NEGATE_EXPR
10717 && (FLOAT_TYPE_P (type)
10718 || INTEGRAL_TYPE_P (type))
10719 && negate_expr_p (arg1)
10720 && reorder_operands_p (arg0, arg1))
10721 return fold_build2_loc (loc, MINUS_EXPR, type,
10722 fold_convert_loc (loc, type,
10723 negate_expr (arg1)),
10724 fold_convert_loc (loc, type,
10725 TREE_OPERAND (arg0, 0)));
10726 /* Convert -A - 1 to ~A. */
10727 if (INTEGRAL_TYPE_P (type)
10728 && TREE_CODE (arg0) == NEGATE_EXPR
10729 && integer_onep (arg1)
10730 && !TYPE_OVERFLOW_TRAPS (type))
10731 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10732 fold_convert_loc (loc, type,
10733 TREE_OPERAND (arg0, 0)));
10735 /* Convert -1 - A to ~A. */
10736 if (INTEGRAL_TYPE_P (type)
10737 && integer_all_onesp (arg0))
10738 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10741 /* X - (X / CST) * CST is X % CST. */
10742 if (INTEGRAL_TYPE_P (type)
10743 && TREE_CODE (arg1) == MULT_EXPR
10744 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10745 && operand_equal_p (arg0,
10746 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10747 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10748 TREE_OPERAND (arg1, 1), 0))
10749 return
10750 fold_convert_loc (loc, type,
10751 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10752 arg0, TREE_OPERAND (arg1, 1)));
10754 if (! FLOAT_TYPE_P (type))
10756 if (integer_zerop (arg0))
10757 return negate_expr (fold_convert_loc (loc, type, arg1));
10758 if (integer_zerop (arg1))
10759 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10761 /* Fold A - (A & B) into ~B & A. */
10762 if (!TREE_SIDE_EFFECTS (arg0)
10763 && TREE_CODE (arg1) == BIT_AND_EXPR)
10765 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10767 tree arg10 = fold_convert_loc (loc, type,
10768 TREE_OPERAND (arg1, 0));
10769 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10770 fold_build1_loc (loc, BIT_NOT_EXPR,
10771 type, arg10),
10772 fold_convert_loc (loc, type, arg0));
10774 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10776 tree arg11 = fold_convert_loc (loc,
10777 type, TREE_OPERAND (arg1, 1));
10778 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10779 fold_build1_loc (loc, BIT_NOT_EXPR,
10780 type, arg11),
10781 fold_convert_loc (loc, type, arg0));
10785 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10786 any power of 2 minus 1. */
10787 if (TREE_CODE (arg0) == BIT_AND_EXPR
10788 && TREE_CODE (arg1) == BIT_AND_EXPR
10789 && operand_equal_p (TREE_OPERAND (arg0, 0),
10790 TREE_OPERAND (arg1, 0), 0))
10792 tree mask0 = TREE_OPERAND (arg0, 1);
10793 tree mask1 = TREE_OPERAND (arg1, 1);
10794 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10796 if (operand_equal_p (tem, mask1, 0))
10798 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10799 TREE_OPERAND (arg0, 0), mask1);
10800 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10805 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10806 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10807 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10809 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10810 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10811 (-ARG1 + ARG0) reduces to -ARG1. */
10812 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10813 return negate_expr (fold_convert_loc (loc, type, arg1));
10815 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10816 __complex__ ( x, -y ). This is not the same for SNaNs or if
10817 signed zeros are involved. */
10818 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10819 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10820 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10822 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10823 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10824 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10825 bool arg0rz = false, arg0iz = false;
10826 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10827 || (arg0i && (arg0iz = real_zerop (arg0i))))
10829 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10830 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10831 if (arg0rz && arg1i && real_zerop (arg1i))
10833 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10834 arg1r ? arg1r
10835 : build1 (REALPART_EXPR, rtype, arg1));
10836 tree ip = arg0i ? arg0i
10837 : build1 (IMAGPART_EXPR, rtype, arg0);
10838 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10840 else if (arg0iz && arg1r && real_zerop (arg1r))
10842 tree rp = arg0r ? arg0r
10843 : build1 (REALPART_EXPR, rtype, arg0);
10844 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10845 arg1i ? arg1i
10846 : build1 (IMAGPART_EXPR, rtype, arg1));
10847 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10852 /* Fold &x - &x. This can happen from &x.foo - &x.
10853 This is unsafe for certain floats even in non-IEEE formats.
10854 In IEEE, it is unsafe because it does wrong for NaNs.
10855 Also note that operand_equal_p is always false if an operand
10856 is volatile. */
10858 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10859 && operand_equal_p (arg0, arg1, 0))
10860 return build_zero_cst (type);
10862 /* A - B -> A + (-B) if B is easily negatable. */
10863 if (negate_expr_p (arg1)
10864 && ((FLOAT_TYPE_P (type)
10865 /* Avoid this transformation if B is a positive REAL_CST. */
10866 && (TREE_CODE (arg1) != REAL_CST
10867 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10868 || INTEGRAL_TYPE_P (type)))
10869 return fold_build2_loc (loc, PLUS_EXPR, type,
10870 fold_convert_loc (loc, type, arg0),
10871 fold_convert_loc (loc, type,
10872 negate_expr (arg1)));
10874 /* Try folding difference of addresses. */
10876 HOST_WIDE_INT diff;
10878 if ((TREE_CODE (arg0) == ADDR_EXPR
10879 || TREE_CODE (arg1) == ADDR_EXPR)
10880 && ptr_difference_const (arg0, arg1, &diff))
10881 return build_int_cst_type (type, diff);
10884 /* Fold &a[i] - &a[j] to i-j. */
10885 if (TREE_CODE (arg0) == ADDR_EXPR
10886 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10887 && TREE_CODE (arg1) == ADDR_EXPR
10888 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10890 tree tem = fold_addr_of_array_ref_difference (loc, type,
10891 TREE_OPERAND (arg0, 0),
10892 TREE_OPERAND (arg1, 0));
10893 if (tem)
10894 return tem;
10897 if (FLOAT_TYPE_P (type)
10898 && flag_unsafe_math_optimizations
10899 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10900 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10901 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10902 return tem;
10904 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10905 one. Make sure the type is not saturating and has the signedness of
10906 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10907 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10908 if ((TREE_CODE (arg0) == MULT_EXPR
10909 || TREE_CODE (arg1) == MULT_EXPR)
10910 && !TYPE_SATURATING (type)
10911 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10912 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10913 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10915 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10916 if (tem)
10917 return tem;
10920 goto associate;
10922 case MULT_EXPR:
10923 /* (-A) * (-B) -> A * B */
10924 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10925 return fold_build2_loc (loc, MULT_EXPR, type,
10926 fold_convert_loc (loc, type,
10927 TREE_OPERAND (arg0, 0)),
10928 fold_convert_loc (loc, type,
10929 negate_expr (arg1)));
10930 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10931 return fold_build2_loc (loc, MULT_EXPR, type,
10932 fold_convert_loc (loc, type,
10933 negate_expr (arg0)),
10934 fold_convert_loc (loc, type,
10935 TREE_OPERAND (arg1, 0)));
10937 if (! FLOAT_TYPE_P (type))
10939 if (integer_zerop (arg1))
10940 return omit_one_operand_loc (loc, type, arg1, arg0);
10941 if (integer_onep (arg1))
10942 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10943 /* Transform x * -1 into -x. Make sure to do the negation
10944 on the original operand with conversions not stripped
10945 because we can only strip non-sign-changing conversions. */
10946 if (integer_minus_onep (arg1))
10947 return fold_convert_loc (loc, type, negate_expr (op0));
10948 /* Transform x * -C into -x * C if x is easily negatable. */
10949 if (TREE_CODE (arg1) == INTEGER_CST
10950 && tree_int_cst_sgn (arg1) == -1
10951 && negate_expr_p (arg0)
10952 && (tem = negate_expr (arg1)) != arg1
10953 && !TREE_OVERFLOW (tem))
10954 return fold_build2_loc (loc, MULT_EXPR, type,
10955 fold_convert_loc (loc, type,
10956 negate_expr (arg0)),
10957 tem);
10959 /* (a * (1 << b)) is (a << b) */
10960 if (TREE_CODE (arg1) == LSHIFT_EXPR
10961 && integer_onep (TREE_OPERAND (arg1, 0)))
10962 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10963 TREE_OPERAND (arg1, 1));
10964 if (TREE_CODE (arg0) == LSHIFT_EXPR
10965 && integer_onep (TREE_OPERAND (arg0, 0)))
10966 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10967 TREE_OPERAND (arg0, 1));
10969 /* (A + A) * C -> A * 2 * C */
10970 if (TREE_CODE (arg0) == PLUS_EXPR
10971 && TREE_CODE (arg1) == INTEGER_CST
10972 && operand_equal_p (TREE_OPERAND (arg0, 0),
10973 TREE_OPERAND (arg0, 1), 0))
10974 return fold_build2_loc (loc, MULT_EXPR, type,
10975 omit_one_operand_loc (loc, type,
10976 TREE_OPERAND (arg0, 0),
10977 TREE_OPERAND (arg0, 1)),
10978 fold_build2_loc (loc, MULT_EXPR, type,
10979 build_int_cst (type, 2) , arg1));
10981 strict_overflow_p = false;
10982 if (TREE_CODE (arg1) == INTEGER_CST
10983 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10984 &strict_overflow_p)))
10986 if (strict_overflow_p)
10987 fold_overflow_warning (("assuming signed overflow does not "
10988 "occur when simplifying "
10989 "multiplication"),
10990 WARN_STRICT_OVERFLOW_MISC);
10991 return fold_convert_loc (loc, type, tem);
10994 /* Optimize z * conj(z) for integer complex numbers. */
10995 if (TREE_CODE (arg0) == CONJ_EXPR
10996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10997 return fold_mult_zconjz (loc, type, arg1);
10998 if (TREE_CODE (arg1) == CONJ_EXPR
10999 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11000 return fold_mult_zconjz (loc, type, arg0);
11002 else
11004 /* Maybe fold x * 0 to 0. The expressions aren't the same
11005 when x is NaN, since x * 0 is also NaN. Nor are they the
11006 same in modes with signed zeros, since multiplying a
11007 negative value by 0 gives -0, not +0. */
11008 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11009 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11010 && real_zerop (arg1))
11011 return omit_one_operand_loc (loc, type, arg1, arg0);
11012 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11013 Likewise for complex arithmetic with signed zeros. */
11014 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11015 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11016 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11017 && real_onep (arg1))
11018 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11020 /* Transform x * -1.0 into -x. */
11021 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11022 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11023 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11024 && real_minus_onep (arg1))
11025 return fold_convert_loc (loc, type, negate_expr (arg0));
11027 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11028 the result for floating point types due to rounding so it is applied
11029 only if -fassociative-math was specify. */
11030 if (flag_associative_math
11031 && TREE_CODE (arg0) == RDIV_EXPR
11032 && TREE_CODE (arg1) == REAL_CST
11033 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11035 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11036 arg1);
11037 if (tem)
11038 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11039 TREE_OPERAND (arg0, 1));
11042 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11043 if (operand_equal_p (arg0, arg1, 0))
11045 tree tem = fold_strip_sign_ops (arg0);
11046 if (tem != NULL_TREE)
11048 tem = fold_convert_loc (loc, type, tem);
11049 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11053 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11054 This is not the same for NaNs or if signed zeros are
11055 involved. */
11056 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11057 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11058 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11059 && TREE_CODE (arg1) == COMPLEX_CST
11060 && real_zerop (TREE_REALPART (arg1)))
11062 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11063 if (real_onep (TREE_IMAGPART (arg1)))
11064 return
11065 fold_build2_loc (loc, COMPLEX_EXPR, type,
11066 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11067 rtype, arg0)),
11068 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11069 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11070 return
11071 fold_build2_loc (loc, COMPLEX_EXPR, type,
11072 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11073 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11074 rtype, arg0)));
11077 /* Optimize z * conj(z) for floating point complex numbers.
11078 Guarded by flag_unsafe_math_optimizations as non-finite
11079 imaginary components don't produce scalar results. */
11080 if (flag_unsafe_math_optimizations
11081 && TREE_CODE (arg0) == CONJ_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11083 return fold_mult_zconjz (loc, type, arg1);
11084 if (flag_unsafe_math_optimizations
11085 && TREE_CODE (arg1) == CONJ_EXPR
11086 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11087 return fold_mult_zconjz (loc, type, arg0);
11089 if (flag_unsafe_math_optimizations)
11091 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11092 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11094 /* Optimizations of root(...)*root(...). */
11095 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11097 tree rootfn, arg;
11098 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11099 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11101 /* Optimize sqrt(x)*sqrt(x) as x. */
11102 if (BUILTIN_SQRT_P (fcode0)
11103 && operand_equal_p (arg00, arg10, 0)
11104 && ! HONOR_SNANS (TYPE_MODE (type)))
11105 return arg00;
11107 /* Optimize root(x)*root(y) as root(x*y). */
11108 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11109 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11110 return build_call_expr_loc (loc, rootfn, 1, arg);
11113 /* Optimize expN(x)*expN(y) as expN(x+y). */
11114 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11116 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11117 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11118 CALL_EXPR_ARG (arg0, 0),
11119 CALL_EXPR_ARG (arg1, 0));
11120 return build_call_expr_loc (loc, expfn, 1, arg);
11123 /* Optimizations of pow(...)*pow(...). */
11124 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11125 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11126 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11128 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11129 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11130 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11131 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11133 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11134 if (operand_equal_p (arg01, arg11, 0))
11136 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11137 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11138 arg00, arg10);
11139 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11142 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11143 if (operand_equal_p (arg00, arg10, 0))
11145 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11146 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11147 arg01, arg11);
11148 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11152 /* Optimize tan(x)*cos(x) as sin(x). */
11153 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11154 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11155 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11156 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11157 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11158 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11159 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11160 CALL_EXPR_ARG (arg1, 0), 0))
11162 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11164 if (sinfn != NULL_TREE)
11165 return build_call_expr_loc (loc, sinfn, 1,
11166 CALL_EXPR_ARG (arg0, 0));
11169 /* Optimize x*pow(x,c) as pow(x,c+1). */
11170 if (fcode1 == BUILT_IN_POW
11171 || fcode1 == BUILT_IN_POWF
11172 || fcode1 == BUILT_IN_POWL)
11174 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11175 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11176 if (TREE_CODE (arg11) == REAL_CST
11177 && !TREE_OVERFLOW (arg11)
11178 && operand_equal_p (arg0, arg10, 0))
11180 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11181 REAL_VALUE_TYPE c;
11182 tree arg;
11184 c = TREE_REAL_CST (arg11);
11185 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11186 arg = build_real (type, c);
11187 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11191 /* Optimize pow(x,c)*x as pow(x,c+1). */
11192 if (fcode0 == BUILT_IN_POW
11193 || fcode0 == BUILT_IN_POWF
11194 || fcode0 == BUILT_IN_POWL)
11196 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11197 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11198 if (TREE_CODE (arg01) == REAL_CST
11199 && !TREE_OVERFLOW (arg01)
11200 && operand_equal_p (arg1, arg00, 0))
11202 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11203 REAL_VALUE_TYPE c;
11204 tree arg;
11206 c = TREE_REAL_CST (arg01);
11207 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11208 arg = build_real (type, c);
11209 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11213 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11214 if (!in_gimple_form
11215 && optimize
11216 && operand_equal_p (arg0, arg1, 0))
11218 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11220 if (powfn)
11222 tree arg = build_real (type, dconst2);
11223 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11228 goto associate;
11230 case BIT_IOR_EXPR:
11231 bit_ior:
11232 if (integer_all_onesp (arg1))
11233 return omit_one_operand_loc (loc, type, arg1, arg0);
11234 if (integer_zerop (arg1))
11235 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11236 if (operand_equal_p (arg0, arg1, 0))
11237 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11239 /* ~X | X is -1. */
11240 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11241 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11243 t1 = build_zero_cst (type);
11244 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11245 return omit_one_operand_loc (loc, type, t1, arg1);
11248 /* X | ~X is -1. */
11249 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11250 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11252 t1 = build_zero_cst (type);
11253 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11254 return omit_one_operand_loc (loc, type, t1, arg0);
11257 /* Canonicalize (X & C1) | C2. */
11258 if (TREE_CODE (arg0) == BIT_AND_EXPR
11259 && TREE_CODE (arg1) == INTEGER_CST
11260 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11262 double_int c1, c2, c3, msk;
11263 int width = TYPE_PRECISION (type), w;
11264 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11265 c2 = tree_to_double_int (arg1);
11267 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11268 if ((c1 & c2) == c1)
11269 return omit_one_operand_loc (loc, type, arg1,
11270 TREE_OPERAND (arg0, 0));
11272 msk = double_int::mask (width);
11274 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11275 if (msk.and_not (c1 | c2).is_zero ())
11276 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11277 TREE_OPERAND (arg0, 0), arg1);
11279 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11280 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11281 mode which allows further optimizations. */
11282 c1 &= msk;
11283 c2 &= msk;
11284 c3 = c1.and_not (c2);
11285 for (w = BITS_PER_UNIT;
11286 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11287 w <<= 1)
11289 unsigned HOST_WIDE_INT mask
11290 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11291 if (((c1.low | c2.low) & mask) == mask
11292 && (c1.low & ~mask) == 0 && c1.high == 0)
11294 c3 = double_int::from_uhwi (mask);
11295 break;
11298 if (c3 != c1)
11299 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11300 fold_build2_loc (loc, BIT_AND_EXPR, type,
11301 TREE_OPERAND (arg0, 0),
11302 double_int_to_tree (type,
11303 c3)),
11304 arg1);
11307 /* (X & Y) | Y is (X, Y). */
11308 if (TREE_CODE (arg0) == BIT_AND_EXPR
11309 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11310 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11311 /* (X & Y) | X is (Y, X). */
11312 if (TREE_CODE (arg0) == BIT_AND_EXPR
11313 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11314 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11315 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11316 /* X | (X & Y) is (Y, X). */
11317 if (TREE_CODE (arg1) == BIT_AND_EXPR
11318 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11319 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11320 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11321 /* X | (Y & X) is (Y, X). */
11322 if (TREE_CODE (arg1) == BIT_AND_EXPR
11323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11324 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11325 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11327 /* (X & ~Y) | (~X & Y) is X ^ Y */
11328 if (TREE_CODE (arg0) == BIT_AND_EXPR
11329 && TREE_CODE (arg1) == BIT_AND_EXPR)
11331 tree a0, a1, l0, l1, n0, n1;
11333 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11334 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11336 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11337 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11339 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11340 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11342 if ((operand_equal_p (n0, a0, 0)
11343 && operand_equal_p (n1, a1, 0))
11344 || (operand_equal_p (n0, a1, 0)
11345 && operand_equal_p (n1, a0, 0)))
11346 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11349 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11350 if (t1 != NULL_TREE)
11351 return t1;
11353 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11355 This results in more efficient code for machines without a NAND
11356 instruction. Combine will canonicalize to the first form
11357 which will allow use of NAND instructions provided by the
11358 backend if they exist. */
11359 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11360 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11362 return
11363 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11364 build2 (BIT_AND_EXPR, type,
11365 fold_convert_loc (loc, type,
11366 TREE_OPERAND (arg0, 0)),
11367 fold_convert_loc (loc, type,
11368 TREE_OPERAND (arg1, 0))));
11371 /* See if this can be simplified into a rotate first. If that
11372 is unsuccessful continue in the association code. */
11373 goto bit_rotate;
11375 case BIT_XOR_EXPR:
11376 if (integer_zerop (arg1))
11377 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11378 if (integer_all_onesp (arg1))
11379 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11380 if (operand_equal_p (arg0, arg1, 0))
11381 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11383 /* ~X ^ X is -1. */
11384 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11385 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11387 t1 = build_zero_cst (type);
11388 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11389 return omit_one_operand_loc (loc, type, t1, arg1);
11392 /* X ^ ~X is -1. */
11393 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11394 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11396 t1 = build_zero_cst (type);
11397 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11398 return omit_one_operand_loc (loc, type, t1, arg0);
11401 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11402 with a constant, and the two constants have no bits in common,
11403 we should treat this as a BIT_IOR_EXPR since this may produce more
11404 simplifications. */
11405 if (TREE_CODE (arg0) == BIT_AND_EXPR
11406 && TREE_CODE (arg1) == BIT_AND_EXPR
11407 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11408 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11409 && integer_zerop (const_binop (BIT_AND_EXPR,
11410 TREE_OPERAND (arg0, 1),
11411 TREE_OPERAND (arg1, 1))))
11413 code = BIT_IOR_EXPR;
11414 goto bit_ior;
11417 /* (X | Y) ^ X -> Y & ~ X*/
11418 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11421 tree t2 = TREE_OPERAND (arg0, 1);
11422 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11423 arg1);
11424 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11425 fold_convert_loc (loc, type, t2),
11426 fold_convert_loc (loc, type, t1));
11427 return t1;
11430 /* (Y | X) ^ X -> Y & ~ X*/
11431 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11432 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11434 tree t2 = TREE_OPERAND (arg0, 0);
11435 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11436 arg1);
11437 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11438 fold_convert_loc (loc, type, t2),
11439 fold_convert_loc (loc, type, t1));
11440 return t1;
11443 /* X ^ (X | Y) -> Y & ~ X*/
11444 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11445 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11447 tree t2 = TREE_OPERAND (arg1, 1);
11448 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11449 arg0);
11450 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11451 fold_convert_loc (loc, type, t2),
11452 fold_convert_loc (loc, type, t1));
11453 return t1;
11456 /* X ^ (Y | X) -> Y & ~ X*/
11457 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11460 tree t2 = TREE_OPERAND (arg1, 0);
11461 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11462 arg0);
11463 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11464 fold_convert_loc (loc, type, t2),
11465 fold_convert_loc (loc, type, t1));
11466 return t1;
11469 /* Convert ~X ^ ~Y to X ^ Y. */
11470 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11471 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11472 return fold_build2_loc (loc, code, type,
11473 fold_convert_loc (loc, type,
11474 TREE_OPERAND (arg0, 0)),
11475 fold_convert_loc (loc, type,
11476 TREE_OPERAND (arg1, 0)));
11478 /* Convert ~X ^ C to X ^ ~C. */
11479 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11480 && TREE_CODE (arg1) == INTEGER_CST)
11481 return fold_build2_loc (loc, code, type,
11482 fold_convert_loc (loc, type,
11483 TREE_OPERAND (arg0, 0)),
11484 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11486 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11487 if (TREE_CODE (arg0) == BIT_AND_EXPR
11488 && integer_onep (TREE_OPERAND (arg0, 1))
11489 && integer_onep (arg1))
11490 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11491 build_zero_cst (TREE_TYPE (arg0)));
11493 /* Fold (X & Y) ^ Y as ~X & Y. */
11494 if (TREE_CODE (arg0) == BIT_AND_EXPR
11495 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11497 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11498 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11499 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11500 fold_convert_loc (loc, type, arg1));
11502 /* Fold (X & Y) ^ X as ~Y & X. */
11503 if (TREE_CODE (arg0) == BIT_AND_EXPR
11504 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11505 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11507 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11508 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11509 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11510 fold_convert_loc (loc, type, arg1));
11512 /* Fold X ^ (X & Y) as X & ~Y. */
11513 if (TREE_CODE (arg1) == BIT_AND_EXPR
11514 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11516 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11517 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11518 fold_convert_loc (loc, type, arg0),
11519 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11521 /* Fold X ^ (Y & X) as ~Y & X. */
11522 if (TREE_CODE (arg1) == BIT_AND_EXPR
11523 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11524 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11526 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11527 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11528 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11529 fold_convert_loc (loc, type, arg0));
11532 /* See if this can be simplified into a rotate first. If that
11533 is unsuccessful continue in the association code. */
11534 goto bit_rotate;
11536 case BIT_AND_EXPR:
11537 if (integer_all_onesp (arg1))
11538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11539 if (integer_zerop (arg1))
11540 return omit_one_operand_loc (loc, type, arg1, arg0);
11541 if (operand_equal_p (arg0, arg1, 0))
11542 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11544 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11545 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11546 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11547 || (TREE_CODE (arg0) == EQ_EXPR
11548 && integer_zerop (TREE_OPERAND (arg0, 1))))
11549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11550 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11552 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11553 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11554 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11555 || (TREE_CODE (arg1) == EQ_EXPR
11556 && integer_zerop (TREE_OPERAND (arg1, 1))))
11557 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11558 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11560 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11561 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11562 && TREE_CODE (arg1) == INTEGER_CST
11563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11565 tree tmp1 = fold_convert_loc (loc, type, arg1);
11566 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11567 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11568 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11569 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11570 return
11571 fold_convert_loc (loc, type,
11572 fold_build2_loc (loc, BIT_IOR_EXPR,
11573 type, tmp2, tmp3));
11576 /* (X | Y) & Y is (X, Y). */
11577 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11578 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11579 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11580 /* (X | Y) & X is (Y, X). */
11581 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11582 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11583 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11584 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11585 /* X & (X | Y) is (Y, X). */
11586 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11587 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11588 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11589 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11590 /* X & (Y | X) is (Y, X). */
11591 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11592 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11593 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11594 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11596 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11597 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11598 && integer_onep (TREE_OPERAND (arg0, 1))
11599 && integer_onep (arg1))
11601 tree tem2;
11602 tem = TREE_OPERAND (arg0, 0);
11603 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11604 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11605 tem, tem2);
11606 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11607 build_zero_cst (TREE_TYPE (tem)));
11609 /* Fold ~X & 1 as (X & 1) == 0. */
11610 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11611 && integer_onep (arg1))
11613 tree tem2;
11614 tem = TREE_OPERAND (arg0, 0);
11615 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11616 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11617 tem, tem2);
11618 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11619 build_zero_cst (TREE_TYPE (tem)));
11621 /* Fold !X & 1 as X == 0. */
11622 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11623 && integer_onep (arg1))
11625 tem = TREE_OPERAND (arg0, 0);
11626 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11627 build_zero_cst (TREE_TYPE (tem)));
11630 /* Fold (X ^ Y) & Y as ~X & Y. */
11631 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11632 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11634 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11635 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11636 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11637 fold_convert_loc (loc, type, arg1));
11639 /* Fold (X ^ Y) & X as ~Y & X. */
11640 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11641 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11642 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11644 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11645 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11646 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11647 fold_convert_loc (loc, type, arg1));
11649 /* Fold X & (X ^ Y) as X & ~Y. */
11650 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11651 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11653 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11654 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11655 fold_convert_loc (loc, type, arg0),
11656 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11658 /* Fold X & (Y ^ X) as ~Y & X. */
11659 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11660 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11661 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11663 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11664 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11665 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11666 fold_convert_loc (loc, type, arg0));
11669 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11670 multiple of 1 << CST. */
11671 if (TREE_CODE (arg1) == INTEGER_CST)
11673 double_int cst1 = tree_to_double_int (arg1);
11674 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11675 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11676 if ((cst1 & ncst1) == ncst1
11677 && multiple_of_p (type, arg0,
11678 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11679 return fold_convert_loc (loc, type, arg0);
11682 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11683 bits from CST2. */
11684 if (TREE_CODE (arg1) == INTEGER_CST
11685 && TREE_CODE (arg0) == MULT_EXPR
11686 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11688 int arg1tz
11689 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11690 if (arg1tz > 0)
11692 double_int arg1mask, masked;
11693 arg1mask = ~double_int::mask (arg1tz);
11694 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11695 TYPE_UNSIGNED (type));
11696 masked = arg1mask & tree_to_double_int (arg1);
11697 if (masked.is_zero ())
11698 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11699 arg0, arg1);
11700 else if (masked != tree_to_double_int (arg1))
11701 return fold_build2_loc (loc, code, type, op0,
11702 double_int_to_tree (type, masked));
11706 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11707 ((A & N) + B) & M -> (A + B) & M
11708 Similarly if (N & M) == 0,
11709 ((A | N) + B) & M -> (A + B) & M
11710 and for - instead of + (or unary - instead of +)
11711 and/or ^ instead of |.
11712 If B is constant and (B & M) == 0, fold into A & M. */
11713 if (host_integerp (arg1, 1))
11715 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11716 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11717 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11718 && (TREE_CODE (arg0) == PLUS_EXPR
11719 || TREE_CODE (arg0) == MINUS_EXPR
11720 || TREE_CODE (arg0) == NEGATE_EXPR)
11721 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11722 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11724 tree pmop[2];
11725 int which = 0;
11726 unsigned HOST_WIDE_INT cst0;
11728 /* Now we know that arg0 is (C + D) or (C - D) or
11729 -C and arg1 (M) is == (1LL << cst) - 1.
11730 Store C into PMOP[0] and D into PMOP[1]. */
11731 pmop[0] = TREE_OPERAND (arg0, 0);
11732 pmop[1] = NULL;
11733 if (TREE_CODE (arg0) != NEGATE_EXPR)
11735 pmop[1] = TREE_OPERAND (arg0, 1);
11736 which = 1;
11739 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11740 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11741 & cst1) != cst1)
11742 which = -1;
11744 for (; which >= 0; which--)
11745 switch (TREE_CODE (pmop[which]))
11747 case BIT_AND_EXPR:
11748 case BIT_IOR_EXPR:
11749 case BIT_XOR_EXPR:
11750 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11751 != INTEGER_CST)
11752 break;
11753 /* tree_low_cst not used, because we don't care about
11754 the upper bits. */
11755 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11756 cst0 &= cst1;
11757 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11759 if (cst0 != cst1)
11760 break;
11762 else if (cst0 != 0)
11763 break;
11764 /* If C or D is of the form (A & N) where
11765 (N & M) == M, or of the form (A | N) or
11766 (A ^ N) where (N & M) == 0, replace it with A. */
11767 pmop[which] = TREE_OPERAND (pmop[which], 0);
11768 break;
11769 case INTEGER_CST:
11770 /* If C or D is a N where (N & M) == 0, it can be
11771 omitted (assumed 0). */
11772 if ((TREE_CODE (arg0) == PLUS_EXPR
11773 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11774 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11775 pmop[which] = NULL;
11776 break;
11777 default:
11778 break;
11781 /* Only build anything new if we optimized one or both arguments
11782 above. */
11783 if (pmop[0] != TREE_OPERAND (arg0, 0)
11784 || (TREE_CODE (arg0) != NEGATE_EXPR
11785 && pmop[1] != TREE_OPERAND (arg0, 1)))
11787 tree utype = TREE_TYPE (arg0);
11788 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11790 /* Perform the operations in a type that has defined
11791 overflow behavior. */
11792 utype = unsigned_type_for (TREE_TYPE (arg0));
11793 if (pmop[0] != NULL)
11794 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11795 if (pmop[1] != NULL)
11796 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11799 if (TREE_CODE (arg0) == NEGATE_EXPR)
11800 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11801 else if (TREE_CODE (arg0) == PLUS_EXPR)
11803 if (pmop[0] != NULL && pmop[1] != NULL)
11804 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11805 pmop[0], pmop[1]);
11806 else if (pmop[0] != NULL)
11807 tem = pmop[0];
11808 else if (pmop[1] != NULL)
11809 tem = pmop[1];
11810 else
11811 return build_int_cst (type, 0);
11813 else if (pmop[0] == NULL)
11814 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11815 else
11816 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11817 pmop[0], pmop[1]);
11818 /* TEM is now the new binary +, - or unary - replacement. */
11819 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11820 fold_convert_loc (loc, utype, arg1));
11821 return fold_convert_loc (loc, type, tem);
11826 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11827 if (t1 != NULL_TREE)
11828 return t1;
11829 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11830 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11831 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11833 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11835 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11836 && (~TREE_INT_CST_LOW (arg1)
11837 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11838 return
11839 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11842 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11844 This results in more efficient code for machines without a NOR
11845 instruction. Combine will canonicalize to the first form
11846 which will allow use of NOR instructions provided by the
11847 backend if they exist. */
11848 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11849 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11851 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11852 build2 (BIT_IOR_EXPR, type,
11853 fold_convert_loc (loc, type,
11854 TREE_OPERAND (arg0, 0)),
11855 fold_convert_loc (loc, type,
11856 TREE_OPERAND (arg1, 0))));
11859 /* If arg0 is derived from the address of an object or function, we may
11860 be able to fold this expression using the object or function's
11861 alignment. */
11862 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11864 unsigned HOST_WIDE_INT modulus, residue;
11865 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11867 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11868 integer_onep (arg1));
11870 /* This works because modulus is a power of 2. If this weren't the
11871 case, we'd have to replace it by its greatest power-of-2
11872 divisor: modulus & -modulus. */
11873 if (low < modulus)
11874 return build_int_cst (type, residue & low);
11877 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11878 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11879 if the new mask might be further optimized. */
11880 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11881 || TREE_CODE (arg0) == RSHIFT_EXPR)
11882 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11883 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11884 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11885 < TYPE_PRECISION (TREE_TYPE (arg0))
11886 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11887 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11889 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11890 unsigned HOST_WIDE_INT mask
11891 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11892 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11893 tree shift_type = TREE_TYPE (arg0);
11895 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11896 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11897 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11898 && TYPE_PRECISION (TREE_TYPE (arg0))
11899 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11901 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11902 tree arg00 = TREE_OPERAND (arg0, 0);
11903 /* See if more bits can be proven as zero because of
11904 zero extension. */
11905 if (TREE_CODE (arg00) == NOP_EXPR
11906 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11908 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11909 if (TYPE_PRECISION (inner_type)
11910 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11911 && TYPE_PRECISION (inner_type) < prec)
11913 prec = TYPE_PRECISION (inner_type);
11914 /* See if we can shorten the right shift. */
11915 if (shiftc < prec)
11916 shift_type = inner_type;
11919 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11920 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11921 zerobits <<= prec - shiftc;
11922 /* For arithmetic shift if sign bit could be set, zerobits
11923 can contain actually sign bits, so no transformation is
11924 possible, unless MASK masks them all away. In that
11925 case the shift needs to be converted into logical shift. */
11926 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11927 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11929 if ((mask & zerobits) == 0)
11930 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11931 else
11932 zerobits = 0;
11936 /* ((X << 16) & 0xff00) is (X, 0). */
11937 if ((mask & zerobits) == mask)
11938 return omit_one_operand_loc (loc, type,
11939 build_int_cst (type, 0), arg0);
11941 newmask = mask | zerobits;
11942 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11944 /* Only do the transformation if NEWMASK is some integer
11945 mode's mask. */
11946 for (prec = BITS_PER_UNIT;
11947 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11948 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11949 break;
11950 if (prec < HOST_BITS_PER_WIDE_INT
11951 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11953 tree newmaskt;
11955 if (shift_type != TREE_TYPE (arg0))
11957 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11958 fold_convert_loc (loc, shift_type,
11959 TREE_OPERAND (arg0, 0)),
11960 TREE_OPERAND (arg0, 1));
11961 tem = fold_convert_loc (loc, type, tem);
11963 else
11964 tem = op0;
11965 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11966 if (!tree_int_cst_equal (newmaskt, arg1))
11967 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11972 goto associate;
11974 case RDIV_EXPR:
11975 /* Don't touch a floating-point divide by zero unless the mode
11976 of the constant can represent infinity. */
11977 if (TREE_CODE (arg1) == REAL_CST
11978 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11979 && real_zerop (arg1))
11980 return NULL_TREE;
11982 /* Optimize A / A to 1.0 if we don't care about
11983 NaNs or Infinities. Skip the transformation
11984 for non-real operands. */
11985 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11986 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11987 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11988 && operand_equal_p (arg0, arg1, 0))
11990 tree r = build_real (TREE_TYPE (arg0), dconst1);
11992 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11995 /* The complex version of the above A / A optimization. */
11996 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11997 && operand_equal_p (arg0, arg1, 0))
11999 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12000 if (! HONOR_NANS (TYPE_MODE (elem_type))
12001 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12003 tree r = build_real (elem_type, dconst1);
12004 /* omit_two_operands will call fold_convert for us. */
12005 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12009 /* (-A) / (-B) -> A / B */
12010 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12011 return fold_build2_loc (loc, RDIV_EXPR, type,
12012 TREE_OPERAND (arg0, 0),
12013 negate_expr (arg1));
12014 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12015 return fold_build2_loc (loc, RDIV_EXPR, type,
12016 negate_expr (arg0),
12017 TREE_OPERAND (arg1, 0));
12019 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12020 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12021 && real_onep (arg1))
12022 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12024 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12025 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12026 && real_minus_onep (arg1))
12027 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12028 negate_expr (arg0)));
12030 /* If ARG1 is a constant, we can convert this to a multiply by the
12031 reciprocal. This does not have the same rounding properties,
12032 so only do this if -freciprocal-math. We can actually
12033 always safely do it if ARG1 is a power of two, but it's hard to
12034 tell if it is or not in a portable manner. */
12035 if (optimize
12036 && (TREE_CODE (arg1) == REAL_CST
12037 || (TREE_CODE (arg1) == COMPLEX_CST
12038 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12039 || (TREE_CODE (arg1) == VECTOR_CST
12040 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12042 if (flag_reciprocal_math
12043 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12044 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12045 /* Find the reciprocal if optimizing and the result is exact.
12046 TODO: Complex reciprocal not implemented. */
12047 if (TREE_CODE (arg1) != COMPLEX_CST)
12049 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12051 if (inverse)
12052 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12055 /* Convert A/B/C to A/(B*C). */
12056 if (flag_reciprocal_math
12057 && TREE_CODE (arg0) == RDIV_EXPR)
12058 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12059 fold_build2_loc (loc, MULT_EXPR, type,
12060 TREE_OPERAND (arg0, 1), arg1));
12062 /* Convert A/(B/C) to (A/B)*C. */
12063 if (flag_reciprocal_math
12064 && TREE_CODE (arg1) == RDIV_EXPR)
12065 return fold_build2_loc (loc, MULT_EXPR, type,
12066 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12067 TREE_OPERAND (arg1, 0)),
12068 TREE_OPERAND (arg1, 1));
12070 /* Convert C1/(X*C2) into (C1/C2)/X. */
12071 if (flag_reciprocal_math
12072 && TREE_CODE (arg1) == MULT_EXPR
12073 && TREE_CODE (arg0) == REAL_CST
12074 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12076 tree tem = const_binop (RDIV_EXPR, arg0,
12077 TREE_OPERAND (arg1, 1));
12078 if (tem)
12079 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12080 TREE_OPERAND (arg1, 0));
12083 if (flag_unsafe_math_optimizations)
12085 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12086 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12088 /* Optimize sin(x)/cos(x) as tan(x). */
12089 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12090 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12091 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12092 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12093 CALL_EXPR_ARG (arg1, 0), 0))
12095 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12097 if (tanfn != NULL_TREE)
12098 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12101 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12102 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12103 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12104 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12105 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12106 CALL_EXPR_ARG (arg1, 0), 0))
12108 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12110 if (tanfn != NULL_TREE)
12112 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12113 CALL_EXPR_ARG (arg0, 0));
12114 return fold_build2_loc (loc, RDIV_EXPR, type,
12115 build_real (type, dconst1), tmp);
12119 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12120 NaNs or Infinities. */
12121 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12122 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12123 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12125 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12126 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12128 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12129 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12130 && operand_equal_p (arg00, arg01, 0))
12132 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12134 if (cosfn != NULL_TREE)
12135 return build_call_expr_loc (loc, cosfn, 1, arg00);
12139 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12140 NaNs or Infinities. */
12141 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12142 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12143 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12145 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12146 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12148 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12149 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12150 && operand_equal_p (arg00, arg01, 0))
12152 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12154 if (cosfn != NULL_TREE)
12156 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12157 return fold_build2_loc (loc, RDIV_EXPR, type,
12158 build_real (type, dconst1),
12159 tmp);
12164 /* Optimize pow(x,c)/x as pow(x,c-1). */
12165 if (fcode0 == BUILT_IN_POW
12166 || fcode0 == BUILT_IN_POWF
12167 || fcode0 == BUILT_IN_POWL)
12169 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12170 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12171 if (TREE_CODE (arg01) == REAL_CST
12172 && !TREE_OVERFLOW (arg01)
12173 && operand_equal_p (arg1, arg00, 0))
12175 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12176 REAL_VALUE_TYPE c;
12177 tree arg;
12179 c = TREE_REAL_CST (arg01);
12180 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12181 arg = build_real (type, c);
12182 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12186 /* Optimize a/root(b/c) into a*root(c/b). */
12187 if (BUILTIN_ROOT_P (fcode1))
12189 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12191 if (TREE_CODE (rootarg) == RDIV_EXPR)
12193 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12194 tree b = TREE_OPERAND (rootarg, 0);
12195 tree c = TREE_OPERAND (rootarg, 1);
12197 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12199 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12200 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12204 /* Optimize x/expN(y) into x*expN(-y). */
12205 if (BUILTIN_EXPONENT_P (fcode1))
12207 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12208 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12209 arg1 = build_call_expr_loc (loc,
12210 expfn, 1,
12211 fold_convert_loc (loc, type, arg));
12212 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12215 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12216 if (fcode1 == BUILT_IN_POW
12217 || fcode1 == BUILT_IN_POWF
12218 || fcode1 == BUILT_IN_POWL)
12220 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12221 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12222 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12223 tree neg11 = fold_convert_loc (loc, type,
12224 negate_expr (arg11));
12225 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12226 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12229 return NULL_TREE;
12231 case TRUNC_DIV_EXPR:
12232 /* Optimize (X & (-A)) / A where A is a power of 2,
12233 to X >> log2(A) */
12234 if (TREE_CODE (arg0) == BIT_AND_EXPR
12235 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12236 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12238 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12239 arg1, TREE_OPERAND (arg0, 1));
12240 if (sum && integer_zerop (sum)) {
12241 unsigned long pow2;
12243 if (TREE_INT_CST_LOW (arg1))
12244 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12245 else
12246 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12247 + HOST_BITS_PER_WIDE_INT;
12249 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12250 TREE_OPERAND (arg0, 0),
12251 build_int_cst (integer_type_node, pow2));
12255 /* Fall through */
12257 case FLOOR_DIV_EXPR:
12258 /* Simplify A / (B << N) where A and B are positive and B is
12259 a power of 2, to A >> (N + log2(B)). */
12260 strict_overflow_p = false;
12261 if (TREE_CODE (arg1) == LSHIFT_EXPR
12262 && (TYPE_UNSIGNED (type)
12263 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12265 tree sval = TREE_OPERAND (arg1, 0);
12266 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12268 tree sh_cnt = TREE_OPERAND (arg1, 1);
12269 unsigned long pow2;
12271 if (TREE_INT_CST_LOW (sval))
12272 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12273 else
12274 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12275 + HOST_BITS_PER_WIDE_INT;
12277 if (strict_overflow_p)
12278 fold_overflow_warning (("assuming signed overflow does not "
12279 "occur when simplifying A / (B << N)"),
12280 WARN_STRICT_OVERFLOW_MISC);
12282 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12283 sh_cnt,
12284 build_int_cst (TREE_TYPE (sh_cnt),
12285 pow2));
12286 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12287 fold_convert_loc (loc, type, arg0), sh_cnt);
12291 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12292 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12293 if (INTEGRAL_TYPE_P (type)
12294 && TYPE_UNSIGNED (type)
12295 && code == FLOOR_DIV_EXPR)
12296 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12298 /* Fall through */
12300 case ROUND_DIV_EXPR:
12301 case CEIL_DIV_EXPR:
12302 case EXACT_DIV_EXPR:
12303 if (integer_onep (arg1))
12304 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12305 if (integer_zerop (arg1))
12306 return NULL_TREE;
12307 /* X / -1 is -X. */
12308 if (!TYPE_UNSIGNED (type)
12309 && TREE_CODE (arg1) == INTEGER_CST
12310 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12311 && TREE_INT_CST_HIGH (arg1) == -1)
12312 return fold_convert_loc (loc, type, negate_expr (arg0));
12314 /* Convert -A / -B to A / B when the type is signed and overflow is
12315 undefined. */
12316 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12317 && TREE_CODE (arg0) == NEGATE_EXPR
12318 && negate_expr_p (arg1))
12320 if (INTEGRAL_TYPE_P (type))
12321 fold_overflow_warning (("assuming signed overflow does not occur "
12322 "when distributing negation across "
12323 "division"),
12324 WARN_STRICT_OVERFLOW_MISC);
12325 return fold_build2_loc (loc, code, type,
12326 fold_convert_loc (loc, type,
12327 TREE_OPERAND (arg0, 0)),
12328 fold_convert_loc (loc, type,
12329 negate_expr (arg1)));
12331 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12332 && TREE_CODE (arg1) == NEGATE_EXPR
12333 && negate_expr_p (arg0))
12335 if (INTEGRAL_TYPE_P (type))
12336 fold_overflow_warning (("assuming signed overflow does not occur "
12337 "when distributing negation across "
12338 "division"),
12339 WARN_STRICT_OVERFLOW_MISC);
12340 return fold_build2_loc (loc, code, type,
12341 fold_convert_loc (loc, type,
12342 negate_expr (arg0)),
12343 fold_convert_loc (loc, type,
12344 TREE_OPERAND (arg1, 0)));
12347 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12348 operation, EXACT_DIV_EXPR.
12350 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12351 At one time others generated faster code, it's not clear if they do
12352 after the last round to changes to the DIV code in expmed.c. */
12353 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12354 && multiple_of_p (type, arg0, arg1))
12355 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12357 strict_overflow_p = false;
12358 if (TREE_CODE (arg1) == INTEGER_CST
12359 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12360 &strict_overflow_p)))
12362 if (strict_overflow_p)
12363 fold_overflow_warning (("assuming signed overflow does not occur "
12364 "when simplifying division"),
12365 WARN_STRICT_OVERFLOW_MISC);
12366 return fold_convert_loc (loc, type, tem);
12369 return NULL_TREE;
12371 case CEIL_MOD_EXPR:
12372 case FLOOR_MOD_EXPR:
12373 case ROUND_MOD_EXPR:
12374 case TRUNC_MOD_EXPR:
12375 /* X % 1 is always zero, but be sure to preserve any side
12376 effects in X. */
12377 if (integer_onep (arg1))
12378 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12380 /* X % 0, return X % 0 unchanged so that we can get the
12381 proper warnings and errors. */
12382 if (integer_zerop (arg1))
12383 return NULL_TREE;
12385 /* 0 % X is always zero, but be sure to preserve any side
12386 effects in X. Place this after checking for X == 0. */
12387 if (integer_zerop (arg0))
12388 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12390 /* X % -1 is zero. */
12391 if (!TYPE_UNSIGNED (type)
12392 && TREE_CODE (arg1) == INTEGER_CST
12393 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12394 && TREE_INT_CST_HIGH (arg1) == -1)
12395 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12397 /* X % -C is the same as X % C. */
12398 if (code == TRUNC_MOD_EXPR
12399 && !TYPE_UNSIGNED (type)
12400 && TREE_CODE (arg1) == INTEGER_CST
12401 && !TREE_OVERFLOW (arg1)
12402 && TREE_INT_CST_HIGH (arg1) < 0
12403 && !TYPE_OVERFLOW_TRAPS (type)
12404 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12405 && !sign_bit_p (arg1, arg1))
12406 return fold_build2_loc (loc, code, type,
12407 fold_convert_loc (loc, type, arg0),
12408 fold_convert_loc (loc, type,
12409 negate_expr (arg1)));
12411 /* X % -Y is the same as X % Y. */
12412 if (code == TRUNC_MOD_EXPR
12413 && !TYPE_UNSIGNED (type)
12414 && TREE_CODE (arg1) == NEGATE_EXPR
12415 && !TYPE_OVERFLOW_TRAPS (type))
12416 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12417 fold_convert_loc (loc, type,
12418 TREE_OPERAND (arg1, 0)));
12420 strict_overflow_p = false;
12421 if (TREE_CODE (arg1) == INTEGER_CST
12422 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12423 &strict_overflow_p)))
12425 if (strict_overflow_p)
12426 fold_overflow_warning (("assuming signed overflow does not occur "
12427 "when simplifying modulus"),
12428 WARN_STRICT_OVERFLOW_MISC);
12429 return fold_convert_loc (loc, type, tem);
12432 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12433 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12434 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12435 && (TYPE_UNSIGNED (type)
12436 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12438 tree c = arg1;
12439 /* Also optimize A % (C << N) where C is a power of 2,
12440 to A & ((C << N) - 1). */
12441 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12442 c = TREE_OPERAND (arg1, 0);
12444 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12446 tree mask
12447 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12448 build_int_cst (TREE_TYPE (arg1), 1));
12449 if (strict_overflow_p)
12450 fold_overflow_warning (("assuming signed overflow does not "
12451 "occur when simplifying "
12452 "X % (power of two)"),
12453 WARN_STRICT_OVERFLOW_MISC);
12454 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12455 fold_convert_loc (loc, type, arg0),
12456 fold_convert_loc (loc, type, mask));
12460 return NULL_TREE;
12462 case LROTATE_EXPR:
12463 case RROTATE_EXPR:
12464 if (integer_all_onesp (arg0))
12465 return omit_one_operand_loc (loc, type, arg0, arg1);
12466 goto shift;
12468 case RSHIFT_EXPR:
12469 /* Optimize -1 >> x for arithmetic right shifts. */
12470 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12471 && tree_expr_nonnegative_p (arg1))
12472 return omit_one_operand_loc (loc, type, arg0, arg1);
12473 /* ... fall through ... */
12475 case LSHIFT_EXPR:
12476 shift:
12477 if (integer_zerop (arg1))
12478 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12479 if (integer_zerop (arg0))
12480 return omit_one_operand_loc (loc, type, arg0, arg1);
12482 /* Prefer vector1 << scalar to vector1 << vector2
12483 if vector2 is uniform. */
12484 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12485 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12486 return fold_build2_loc (loc, code, type, op0, tem);
12488 /* Since negative shift count is not well-defined,
12489 don't try to compute it in the compiler. */
12490 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12491 return NULL_TREE;
12493 prec = element_precision (type);
12495 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12496 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12497 && TREE_INT_CST_LOW (arg1) < prec
12498 && host_integerp (TREE_OPERAND (arg0, 1), true)
12499 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12501 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12502 + TREE_INT_CST_LOW (arg1));
12504 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12505 being well defined. */
12506 if (low >= prec)
12508 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12509 low = low % prec;
12510 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12511 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12512 TREE_OPERAND (arg0, 0));
12513 else
12514 low = prec - 1;
12517 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12518 build_int_cst (TREE_TYPE (arg1), low));
12521 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12522 into x & ((unsigned)-1 >> c) for unsigned types. */
12523 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12524 || (TYPE_UNSIGNED (type)
12525 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12526 && host_integerp (arg1, false)
12527 && TREE_INT_CST_LOW (arg1) < prec
12528 && host_integerp (TREE_OPERAND (arg0, 1), false)
12529 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12531 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12532 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12533 tree lshift;
12534 tree arg00;
12536 if (low0 == low1)
12538 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12540 lshift = build_minus_one_cst (type);
12541 lshift = const_binop (code, lshift, arg1);
12543 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12547 /* Rewrite an LROTATE_EXPR by a constant into an
12548 RROTATE_EXPR by a new constant. */
12549 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12551 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12552 tem = const_binop (MINUS_EXPR, tem, arg1);
12553 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12556 /* If we have a rotate of a bit operation with the rotate count and
12557 the second operand of the bit operation both constant,
12558 permute the two operations. */
12559 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12560 && (TREE_CODE (arg0) == BIT_AND_EXPR
12561 || TREE_CODE (arg0) == BIT_IOR_EXPR
12562 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12564 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12565 fold_build2_loc (loc, code, type,
12566 TREE_OPERAND (arg0, 0), arg1),
12567 fold_build2_loc (loc, code, type,
12568 TREE_OPERAND (arg0, 1), arg1));
12570 /* Two consecutive rotates adding up to the precision of the
12571 type can be ignored. */
12572 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12573 && TREE_CODE (arg0) == RROTATE_EXPR
12574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12575 && TREE_INT_CST_HIGH (arg1) == 0
12576 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12577 && ((TREE_INT_CST_LOW (arg1)
12578 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12579 == prec))
12580 return TREE_OPERAND (arg0, 0);
12582 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12583 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12584 if the latter can be further optimized. */
12585 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12586 && TREE_CODE (arg0) == BIT_AND_EXPR
12587 && TREE_CODE (arg1) == INTEGER_CST
12588 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12590 tree mask = fold_build2_loc (loc, code, type,
12591 fold_convert_loc (loc, type,
12592 TREE_OPERAND (arg0, 1)),
12593 arg1);
12594 tree shift = fold_build2_loc (loc, code, type,
12595 fold_convert_loc (loc, type,
12596 TREE_OPERAND (arg0, 0)),
12597 arg1);
12598 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12599 if (tem)
12600 return tem;
12603 return NULL_TREE;
12605 case MIN_EXPR:
12606 if (operand_equal_p (arg0, arg1, 0))
12607 return omit_one_operand_loc (loc, type, arg0, arg1);
12608 if (INTEGRAL_TYPE_P (type)
12609 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12610 return omit_one_operand_loc (loc, type, arg1, arg0);
12611 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12612 if (tem)
12613 return tem;
12614 goto associate;
12616 case MAX_EXPR:
12617 if (operand_equal_p (arg0, arg1, 0))
12618 return omit_one_operand_loc (loc, type, arg0, arg1);
12619 if (INTEGRAL_TYPE_P (type)
12620 && TYPE_MAX_VALUE (type)
12621 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12622 return omit_one_operand_loc (loc, type, arg1, arg0);
12623 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12624 if (tem)
12625 return tem;
12626 goto associate;
12628 case TRUTH_ANDIF_EXPR:
12629 /* Note that the operands of this must be ints
12630 and their values must be 0 or 1.
12631 ("true" is a fixed value perhaps depending on the language.) */
12632 /* If first arg is constant zero, return it. */
12633 if (integer_zerop (arg0))
12634 return fold_convert_loc (loc, type, arg0);
12635 case TRUTH_AND_EXPR:
12636 /* If either arg is constant true, drop it. */
12637 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12638 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12639 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12640 /* Preserve sequence points. */
12641 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12642 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12643 /* If second arg is constant zero, result is zero, but first arg
12644 must be evaluated. */
12645 if (integer_zerop (arg1))
12646 return omit_one_operand_loc (loc, type, arg1, arg0);
12647 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12648 case will be handled here. */
12649 if (integer_zerop (arg0))
12650 return omit_one_operand_loc (loc, type, arg0, arg1);
12652 /* !X && X is always false. */
12653 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12654 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12655 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12656 /* X && !X is always false. */
12657 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12658 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12659 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12661 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12662 means A >= Y && A != MAX, but in this case we know that
12663 A < X <= MAX. */
12665 if (!TREE_SIDE_EFFECTS (arg0)
12666 && !TREE_SIDE_EFFECTS (arg1))
12668 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12669 if (tem && !operand_equal_p (tem, arg0, 0))
12670 return fold_build2_loc (loc, code, type, tem, arg1);
12672 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12673 if (tem && !operand_equal_p (tem, arg1, 0))
12674 return fold_build2_loc (loc, code, type, arg0, tem);
12677 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12678 != NULL_TREE)
12679 return tem;
12681 return NULL_TREE;
12683 case TRUTH_ORIF_EXPR:
12684 /* Note that the operands of this must be ints
12685 and their values must be 0 or true.
12686 ("true" is a fixed value perhaps depending on the language.) */
12687 /* If first arg is constant true, return it. */
12688 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12689 return fold_convert_loc (loc, type, arg0);
12690 case TRUTH_OR_EXPR:
12691 /* If either arg is constant zero, drop it. */
12692 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12693 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12694 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12695 /* Preserve sequence points. */
12696 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12697 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12698 /* If second arg is constant true, result is true, but we must
12699 evaluate first arg. */
12700 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12701 return omit_one_operand_loc (loc, type, arg1, arg0);
12702 /* Likewise for first arg, but note this only occurs here for
12703 TRUTH_OR_EXPR. */
12704 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12705 return omit_one_operand_loc (loc, type, arg0, arg1);
12707 /* !X || X is always true. */
12708 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12709 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12710 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12711 /* X || !X is always true. */
12712 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12713 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12714 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12716 /* (X && !Y) || (!X && Y) is X ^ Y */
12717 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12718 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12720 tree a0, a1, l0, l1, n0, n1;
12722 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12723 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12725 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12726 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12728 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12729 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12731 if ((operand_equal_p (n0, a0, 0)
12732 && operand_equal_p (n1, a1, 0))
12733 || (operand_equal_p (n0, a1, 0)
12734 && operand_equal_p (n1, a0, 0)))
12735 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12738 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12739 != NULL_TREE)
12740 return tem;
12742 return NULL_TREE;
12744 case TRUTH_XOR_EXPR:
12745 /* If the second arg is constant zero, drop it. */
12746 if (integer_zerop (arg1))
12747 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12748 /* If the second arg is constant true, this is a logical inversion. */
12749 if (integer_onep (arg1))
12751 tem = invert_truthvalue_loc (loc, arg0);
12752 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12754 /* Identical arguments cancel to zero. */
12755 if (operand_equal_p (arg0, arg1, 0))
12756 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12758 /* !X ^ X is always true. */
12759 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12760 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12761 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12763 /* X ^ !X is always true. */
12764 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12765 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12766 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12768 return NULL_TREE;
12770 case EQ_EXPR:
12771 case NE_EXPR:
12772 STRIP_NOPS (arg0);
12773 STRIP_NOPS (arg1);
12775 tem = fold_comparison (loc, code, type, op0, op1);
12776 if (tem != NULL_TREE)
12777 return tem;
12779 /* bool_var != 0 becomes bool_var. */
12780 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12781 && code == NE_EXPR)
12782 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12784 /* bool_var == 1 becomes bool_var. */
12785 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12786 && code == EQ_EXPR)
12787 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12789 /* bool_var != 1 becomes !bool_var. */
12790 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12791 && code == NE_EXPR)
12792 return fold_convert_loc (loc, type,
12793 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12794 TREE_TYPE (arg0), arg0));
12796 /* bool_var == 0 becomes !bool_var. */
12797 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12798 && code == EQ_EXPR)
12799 return fold_convert_loc (loc, type,
12800 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12801 TREE_TYPE (arg0), arg0));
12803 /* !exp != 0 becomes !exp */
12804 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12805 && code == NE_EXPR)
12806 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12808 /* If this is an equality comparison of the address of two non-weak,
12809 unaliased symbols neither of which are extern (since we do not
12810 have access to attributes for externs), then we know the result. */
12811 if (TREE_CODE (arg0) == ADDR_EXPR
12812 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12813 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12814 && ! lookup_attribute ("alias",
12815 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12816 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12817 && TREE_CODE (arg1) == ADDR_EXPR
12818 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12819 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12820 && ! lookup_attribute ("alias",
12821 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12822 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12824 /* We know that we're looking at the address of two
12825 non-weak, unaliased, static _DECL nodes.
12827 It is both wasteful and incorrect to call operand_equal_p
12828 to compare the two ADDR_EXPR nodes. It is wasteful in that
12829 all we need to do is test pointer equality for the arguments
12830 to the two ADDR_EXPR nodes. It is incorrect to use
12831 operand_equal_p as that function is NOT equivalent to a
12832 C equality test. It can in fact return false for two
12833 objects which would test as equal using the C equality
12834 operator. */
12835 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12836 return constant_boolean_node (equal
12837 ? code == EQ_EXPR : code != EQ_EXPR,
12838 type);
12841 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12842 a MINUS_EXPR of a constant, we can convert it into a comparison with
12843 a revised constant as long as no overflow occurs. */
12844 if (TREE_CODE (arg1) == INTEGER_CST
12845 && (TREE_CODE (arg0) == PLUS_EXPR
12846 || TREE_CODE (arg0) == MINUS_EXPR)
12847 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12848 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12849 ? MINUS_EXPR : PLUS_EXPR,
12850 fold_convert_loc (loc, TREE_TYPE (arg0),
12851 arg1),
12852 TREE_OPERAND (arg0, 1)))
12853 && !TREE_OVERFLOW (tem))
12854 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12856 /* Similarly for a NEGATE_EXPR. */
12857 if (TREE_CODE (arg0) == NEGATE_EXPR
12858 && TREE_CODE (arg1) == INTEGER_CST
12859 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12860 arg1)))
12861 && TREE_CODE (tem) == INTEGER_CST
12862 && !TREE_OVERFLOW (tem))
12863 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12865 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12866 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12867 && TREE_CODE (arg1) == INTEGER_CST
12868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12869 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12870 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12871 fold_convert_loc (loc,
12872 TREE_TYPE (arg0),
12873 arg1),
12874 TREE_OPERAND (arg0, 1)));
12876 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12877 if ((TREE_CODE (arg0) == PLUS_EXPR
12878 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12879 || TREE_CODE (arg0) == MINUS_EXPR)
12880 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12881 0)),
12882 arg1, 0)
12883 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12884 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12886 tree val = TREE_OPERAND (arg0, 1);
12887 return omit_two_operands_loc (loc, type,
12888 fold_build2_loc (loc, code, type,
12889 val,
12890 build_int_cst (TREE_TYPE (val),
12891 0)),
12892 TREE_OPERAND (arg0, 0), arg1);
12895 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12896 if (TREE_CODE (arg0) == MINUS_EXPR
12897 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12898 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12899 1)),
12900 arg1, 0)
12901 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12903 return omit_two_operands_loc (loc, type,
12904 code == NE_EXPR
12905 ? boolean_true_node : boolean_false_node,
12906 TREE_OPERAND (arg0, 1), arg1);
12909 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12910 for !=. Don't do this for ordered comparisons due to overflow. */
12911 if (TREE_CODE (arg0) == MINUS_EXPR
12912 && integer_zerop (arg1))
12913 return fold_build2_loc (loc, code, type,
12914 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12916 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12917 if (TREE_CODE (arg0) == ABS_EXPR
12918 && (integer_zerop (arg1) || real_zerop (arg1)))
12919 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12921 /* If this is an EQ or NE comparison with zero and ARG0 is
12922 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12923 two operations, but the latter can be done in one less insn
12924 on machines that have only two-operand insns or on which a
12925 constant cannot be the first operand. */
12926 if (TREE_CODE (arg0) == BIT_AND_EXPR
12927 && integer_zerop (arg1))
12929 tree arg00 = TREE_OPERAND (arg0, 0);
12930 tree arg01 = TREE_OPERAND (arg0, 1);
12931 if (TREE_CODE (arg00) == LSHIFT_EXPR
12932 && integer_onep (TREE_OPERAND (arg00, 0)))
12934 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12935 arg01, TREE_OPERAND (arg00, 1));
12936 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12937 build_int_cst (TREE_TYPE (arg0), 1));
12938 return fold_build2_loc (loc, code, type,
12939 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12940 arg1);
12942 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12943 && integer_onep (TREE_OPERAND (arg01, 0)))
12945 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12946 arg00, TREE_OPERAND (arg01, 1));
12947 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12948 build_int_cst (TREE_TYPE (arg0), 1));
12949 return fold_build2_loc (loc, code, type,
12950 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12951 arg1);
12955 /* If this is an NE or EQ comparison of zero against the result of a
12956 signed MOD operation whose second operand is a power of 2, make
12957 the MOD operation unsigned since it is simpler and equivalent. */
12958 if (integer_zerop (arg1)
12959 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12960 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12961 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12962 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12963 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12964 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12966 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12967 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12968 fold_convert_loc (loc, newtype,
12969 TREE_OPERAND (arg0, 0)),
12970 fold_convert_loc (loc, newtype,
12971 TREE_OPERAND (arg0, 1)));
12973 return fold_build2_loc (loc, code, type, newmod,
12974 fold_convert_loc (loc, newtype, arg1));
12977 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12978 C1 is a valid shift constant, and C2 is a power of two, i.e.
12979 a single bit. */
12980 if (TREE_CODE (arg0) == BIT_AND_EXPR
12981 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12982 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12983 == INTEGER_CST
12984 && integer_pow2p (TREE_OPERAND (arg0, 1))
12985 && integer_zerop (arg1))
12987 tree itype = TREE_TYPE (arg0);
12988 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12989 prec = TYPE_PRECISION (itype);
12991 /* Check for a valid shift count. */
12992 if (TREE_INT_CST_HIGH (arg001) == 0
12993 && TREE_INT_CST_LOW (arg001) < prec)
12995 tree arg01 = TREE_OPERAND (arg0, 1);
12996 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12997 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12998 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12999 can be rewritten as (X & (C2 << C1)) != 0. */
13000 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13002 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13003 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13004 return fold_build2_loc (loc, code, type, tem,
13005 fold_convert_loc (loc, itype, arg1));
13007 /* Otherwise, for signed (arithmetic) shifts,
13008 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13009 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13010 else if (!TYPE_UNSIGNED (itype))
13011 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13012 arg000, build_int_cst (itype, 0));
13013 /* Otherwise, of unsigned (logical) shifts,
13014 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13015 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13016 else
13017 return omit_one_operand_loc (loc, type,
13018 code == EQ_EXPR ? integer_one_node
13019 : integer_zero_node,
13020 arg000);
13024 /* If we have (A & C) == C where C is a power of 2, convert this into
13025 (A & C) != 0. Similarly for NE_EXPR. */
13026 if (TREE_CODE (arg0) == BIT_AND_EXPR
13027 && integer_pow2p (TREE_OPERAND (arg0, 1))
13028 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13029 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13030 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13031 integer_zero_node));
13033 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13034 bit, then fold the expression into A < 0 or A >= 0. */
13035 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13036 if (tem)
13037 return tem;
13039 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13040 Similarly for NE_EXPR. */
13041 if (TREE_CODE (arg0) == BIT_AND_EXPR
13042 && TREE_CODE (arg1) == INTEGER_CST
13043 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13045 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13046 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13047 TREE_OPERAND (arg0, 1));
13048 tree dandnotc
13049 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13050 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13051 notc);
13052 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13053 if (integer_nonzerop (dandnotc))
13054 return omit_one_operand_loc (loc, type, rslt, arg0);
13057 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13058 Similarly for NE_EXPR. */
13059 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13060 && TREE_CODE (arg1) == INTEGER_CST
13061 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13063 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13064 tree candnotd
13065 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13066 TREE_OPERAND (arg0, 1),
13067 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13068 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13069 if (integer_nonzerop (candnotd))
13070 return omit_one_operand_loc (loc, type, rslt, arg0);
13073 /* If this is a comparison of a field, we may be able to simplify it. */
13074 if ((TREE_CODE (arg0) == COMPONENT_REF
13075 || TREE_CODE (arg0) == BIT_FIELD_REF)
13076 /* Handle the constant case even without -O
13077 to make sure the warnings are given. */
13078 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13080 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13081 if (t1)
13082 return t1;
13085 /* Optimize comparisons of strlen vs zero to a compare of the
13086 first character of the string vs zero. To wit,
13087 strlen(ptr) == 0 => *ptr == 0
13088 strlen(ptr) != 0 => *ptr != 0
13089 Other cases should reduce to one of these two (or a constant)
13090 due to the return value of strlen being unsigned. */
13091 if (TREE_CODE (arg0) == CALL_EXPR
13092 && integer_zerop (arg1))
13094 tree fndecl = get_callee_fndecl (arg0);
13096 if (fndecl
13097 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13098 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13099 && call_expr_nargs (arg0) == 1
13100 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13102 tree iref = build_fold_indirect_ref_loc (loc,
13103 CALL_EXPR_ARG (arg0, 0));
13104 return fold_build2_loc (loc, code, type, iref,
13105 build_int_cst (TREE_TYPE (iref), 0));
13109 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13110 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13111 if (TREE_CODE (arg0) == RSHIFT_EXPR
13112 && integer_zerop (arg1)
13113 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13115 tree arg00 = TREE_OPERAND (arg0, 0);
13116 tree arg01 = TREE_OPERAND (arg0, 1);
13117 tree itype = TREE_TYPE (arg00);
13118 if (TREE_INT_CST_HIGH (arg01) == 0
13119 && TREE_INT_CST_LOW (arg01)
13120 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13122 if (TYPE_UNSIGNED (itype))
13124 itype = signed_type_for (itype);
13125 arg00 = fold_convert_loc (loc, itype, arg00);
13127 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13128 type, arg00, build_zero_cst (itype));
13132 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13133 if (integer_zerop (arg1)
13134 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13135 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13136 TREE_OPERAND (arg0, 1));
13138 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13139 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13140 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13141 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13142 build_zero_cst (TREE_TYPE (arg0)));
13143 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13144 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13145 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13146 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13147 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13148 build_zero_cst (TREE_TYPE (arg0)));
13150 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13151 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13152 && TREE_CODE (arg1) == INTEGER_CST
13153 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13154 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13155 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13156 TREE_OPERAND (arg0, 1), arg1));
13158 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13159 (X & C) == 0 when C is a single bit. */
13160 if (TREE_CODE (arg0) == BIT_AND_EXPR
13161 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13162 && integer_zerop (arg1)
13163 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13165 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13166 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13167 TREE_OPERAND (arg0, 1));
13168 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13169 type, tem,
13170 fold_convert_loc (loc, TREE_TYPE (arg0),
13171 arg1));
13174 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13175 constant C is a power of two, i.e. a single bit. */
13176 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13177 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13178 && integer_zerop (arg1)
13179 && integer_pow2p (TREE_OPERAND (arg0, 1))
13180 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13181 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13183 tree arg00 = TREE_OPERAND (arg0, 0);
13184 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13185 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13188 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13189 when is C is a power of two, i.e. a single bit. */
13190 if (TREE_CODE (arg0) == BIT_AND_EXPR
13191 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13192 && integer_zerop (arg1)
13193 && integer_pow2p (TREE_OPERAND (arg0, 1))
13194 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13195 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13197 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13198 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13199 arg000, TREE_OPERAND (arg0, 1));
13200 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13201 tem, build_int_cst (TREE_TYPE (tem), 0));
13204 if (integer_zerop (arg1)
13205 && tree_expr_nonzero_p (arg0))
13207 tree res = constant_boolean_node (code==NE_EXPR, type);
13208 return omit_one_operand_loc (loc, type, res, arg0);
13211 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13212 if (TREE_CODE (arg0) == NEGATE_EXPR
13213 && TREE_CODE (arg1) == NEGATE_EXPR)
13214 return fold_build2_loc (loc, code, type,
13215 TREE_OPERAND (arg0, 0),
13216 fold_convert_loc (loc, TREE_TYPE (arg0),
13217 TREE_OPERAND (arg1, 0)));
13219 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13220 if (TREE_CODE (arg0) == BIT_AND_EXPR
13221 && TREE_CODE (arg1) == BIT_AND_EXPR)
13223 tree arg00 = TREE_OPERAND (arg0, 0);
13224 tree arg01 = TREE_OPERAND (arg0, 1);
13225 tree arg10 = TREE_OPERAND (arg1, 0);
13226 tree arg11 = TREE_OPERAND (arg1, 1);
13227 tree itype = TREE_TYPE (arg0);
13229 if (operand_equal_p (arg01, arg11, 0))
13230 return fold_build2_loc (loc, code, type,
13231 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13232 fold_build2_loc (loc,
13233 BIT_XOR_EXPR, itype,
13234 arg00, arg10),
13235 arg01),
13236 build_zero_cst (itype));
13238 if (operand_equal_p (arg01, arg10, 0))
13239 return fold_build2_loc (loc, code, type,
13240 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13241 fold_build2_loc (loc,
13242 BIT_XOR_EXPR, itype,
13243 arg00, arg11),
13244 arg01),
13245 build_zero_cst (itype));
13247 if (operand_equal_p (arg00, arg11, 0))
13248 return fold_build2_loc (loc, code, type,
13249 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13250 fold_build2_loc (loc,
13251 BIT_XOR_EXPR, itype,
13252 arg01, arg10),
13253 arg00),
13254 build_zero_cst (itype));
13256 if (operand_equal_p (arg00, arg10, 0))
13257 return fold_build2_loc (loc, code, type,
13258 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13259 fold_build2_loc (loc,
13260 BIT_XOR_EXPR, itype,
13261 arg01, arg11),
13262 arg00),
13263 build_zero_cst (itype));
13266 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13267 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13269 tree arg00 = TREE_OPERAND (arg0, 0);
13270 tree arg01 = TREE_OPERAND (arg0, 1);
13271 tree arg10 = TREE_OPERAND (arg1, 0);
13272 tree arg11 = TREE_OPERAND (arg1, 1);
13273 tree itype = TREE_TYPE (arg0);
13275 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13276 operand_equal_p guarantees no side-effects so we don't need
13277 to use omit_one_operand on Z. */
13278 if (operand_equal_p (arg01, arg11, 0))
13279 return fold_build2_loc (loc, code, type, arg00,
13280 fold_convert_loc (loc, TREE_TYPE (arg00),
13281 arg10));
13282 if (operand_equal_p (arg01, arg10, 0))
13283 return fold_build2_loc (loc, code, type, arg00,
13284 fold_convert_loc (loc, TREE_TYPE (arg00),
13285 arg11));
13286 if (operand_equal_p (arg00, arg11, 0))
13287 return fold_build2_loc (loc, code, type, arg01,
13288 fold_convert_loc (loc, TREE_TYPE (arg01),
13289 arg10));
13290 if (operand_equal_p (arg00, arg10, 0))
13291 return fold_build2_loc (loc, code, type, arg01,
13292 fold_convert_loc (loc, TREE_TYPE (arg01),
13293 arg11));
13295 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13296 if (TREE_CODE (arg01) == INTEGER_CST
13297 && TREE_CODE (arg11) == INTEGER_CST)
13299 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13300 fold_convert_loc (loc, itype, arg11));
13301 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13302 return fold_build2_loc (loc, code, type, tem,
13303 fold_convert_loc (loc, itype, arg10));
13307 /* Attempt to simplify equality/inequality comparisons of complex
13308 values. Only lower the comparison if the result is known or
13309 can be simplified to a single scalar comparison. */
13310 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13311 || TREE_CODE (arg0) == COMPLEX_CST)
13312 && (TREE_CODE (arg1) == COMPLEX_EXPR
13313 || TREE_CODE (arg1) == COMPLEX_CST))
13315 tree real0, imag0, real1, imag1;
13316 tree rcond, icond;
13318 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13320 real0 = TREE_OPERAND (arg0, 0);
13321 imag0 = TREE_OPERAND (arg0, 1);
13323 else
13325 real0 = TREE_REALPART (arg0);
13326 imag0 = TREE_IMAGPART (arg0);
13329 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13331 real1 = TREE_OPERAND (arg1, 0);
13332 imag1 = TREE_OPERAND (arg1, 1);
13334 else
13336 real1 = TREE_REALPART (arg1);
13337 imag1 = TREE_IMAGPART (arg1);
13340 rcond = fold_binary_loc (loc, code, type, real0, real1);
13341 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13343 if (integer_zerop (rcond))
13345 if (code == EQ_EXPR)
13346 return omit_two_operands_loc (loc, type, boolean_false_node,
13347 imag0, imag1);
13348 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13350 else
13352 if (code == NE_EXPR)
13353 return omit_two_operands_loc (loc, type, boolean_true_node,
13354 imag0, imag1);
13355 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13359 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13360 if (icond && TREE_CODE (icond) == INTEGER_CST)
13362 if (integer_zerop (icond))
13364 if (code == EQ_EXPR)
13365 return omit_two_operands_loc (loc, type, boolean_false_node,
13366 real0, real1);
13367 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13369 else
13371 if (code == NE_EXPR)
13372 return omit_two_operands_loc (loc, type, boolean_true_node,
13373 real0, real1);
13374 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13379 return NULL_TREE;
13381 case LT_EXPR:
13382 case GT_EXPR:
13383 case LE_EXPR:
13384 case GE_EXPR:
13385 tem = fold_comparison (loc, code, type, op0, op1);
13386 if (tem != NULL_TREE)
13387 return tem;
13389 /* Transform comparisons of the form X +- C CMP X. */
13390 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13391 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13392 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13393 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13394 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13395 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13397 tree arg01 = TREE_OPERAND (arg0, 1);
13398 enum tree_code code0 = TREE_CODE (arg0);
13399 int is_positive;
13401 if (TREE_CODE (arg01) == REAL_CST)
13402 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13403 else
13404 is_positive = tree_int_cst_sgn (arg01);
13406 /* (X - c) > X becomes false. */
13407 if (code == GT_EXPR
13408 && ((code0 == MINUS_EXPR && is_positive >= 0)
13409 || (code0 == PLUS_EXPR && is_positive <= 0)))
13411 if (TREE_CODE (arg01) == INTEGER_CST
13412 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13413 fold_overflow_warning (("assuming signed overflow does not "
13414 "occur when assuming that (X - c) > X "
13415 "is always false"),
13416 WARN_STRICT_OVERFLOW_ALL);
13417 return constant_boolean_node (0, type);
13420 /* Likewise (X + c) < X becomes false. */
13421 if (code == LT_EXPR
13422 && ((code0 == PLUS_EXPR && is_positive >= 0)
13423 || (code0 == MINUS_EXPR && is_positive <= 0)))
13425 if (TREE_CODE (arg01) == INTEGER_CST
13426 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13427 fold_overflow_warning (("assuming signed overflow does not "
13428 "occur when assuming that "
13429 "(X + c) < X is always false"),
13430 WARN_STRICT_OVERFLOW_ALL);
13431 return constant_boolean_node (0, type);
13434 /* Convert (X - c) <= X to true. */
13435 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13436 && code == LE_EXPR
13437 && ((code0 == MINUS_EXPR && is_positive >= 0)
13438 || (code0 == PLUS_EXPR && is_positive <= 0)))
13440 if (TREE_CODE (arg01) == INTEGER_CST
13441 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13442 fold_overflow_warning (("assuming signed overflow does not "
13443 "occur when assuming that "
13444 "(X - c) <= X is always true"),
13445 WARN_STRICT_OVERFLOW_ALL);
13446 return constant_boolean_node (1, type);
13449 /* Convert (X + c) >= X to true. */
13450 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13451 && code == GE_EXPR
13452 && ((code0 == PLUS_EXPR && is_positive >= 0)
13453 || (code0 == MINUS_EXPR && is_positive <= 0)))
13455 if (TREE_CODE (arg01) == INTEGER_CST
13456 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13457 fold_overflow_warning (("assuming signed overflow does not "
13458 "occur when assuming that "
13459 "(X + c) >= X is always true"),
13460 WARN_STRICT_OVERFLOW_ALL);
13461 return constant_boolean_node (1, type);
13464 if (TREE_CODE (arg01) == INTEGER_CST)
13466 /* Convert X + c > X and X - c < X to true for integers. */
13467 if (code == GT_EXPR
13468 && ((code0 == PLUS_EXPR && is_positive > 0)
13469 || (code0 == MINUS_EXPR && is_positive < 0)))
13471 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13472 fold_overflow_warning (("assuming signed overflow does "
13473 "not occur when assuming that "
13474 "(X + c) > X is always true"),
13475 WARN_STRICT_OVERFLOW_ALL);
13476 return constant_boolean_node (1, type);
13479 if (code == LT_EXPR
13480 && ((code0 == MINUS_EXPR && is_positive > 0)
13481 || (code0 == PLUS_EXPR && is_positive < 0)))
13483 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13484 fold_overflow_warning (("assuming signed overflow does "
13485 "not occur when assuming that "
13486 "(X - c) < X is always true"),
13487 WARN_STRICT_OVERFLOW_ALL);
13488 return constant_boolean_node (1, type);
13491 /* Convert X + c <= X and X - c >= X to false for integers. */
13492 if (code == LE_EXPR
13493 && ((code0 == PLUS_EXPR && is_positive > 0)
13494 || (code0 == MINUS_EXPR && is_positive < 0)))
13496 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13497 fold_overflow_warning (("assuming signed overflow does "
13498 "not occur when assuming that "
13499 "(X + c) <= X is always false"),
13500 WARN_STRICT_OVERFLOW_ALL);
13501 return constant_boolean_node (0, type);
13504 if (code == GE_EXPR
13505 && ((code0 == MINUS_EXPR && is_positive > 0)
13506 || (code0 == PLUS_EXPR && is_positive < 0)))
13508 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13509 fold_overflow_warning (("assuming signed overflow does "
13510 "not occur when assuming that "
13511 "(X - c) >= X is always false"),
13512 WARN_STRICT_OVERFLOW_ALL);
13513 return constant_boolean_node (0, type);
13518 /* Comparisons with the highest or lowest possible integer of
13519 the specified precision will have known values. */
13521 tree arg1_type = TREE_TYPE (arg1);
13522 unsigned int width = TYPE_PRECISION (arg1_type);
13524 if (TREE_CODE (arg1) == INTEGER_CST
13525 && width <= HOST_BITS_PER_DOUBLE_INT
13526 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13528 HOST_WIDE_INT signed_max_hi;
13529 unsigned HOST_WIDE_INT signed_max_lo;
13530 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13532 if (width <= HOST_BITS_PER_WIDE_INT)
13534 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13535 - 1;
13536 signed_max_hi = 0;
13537 max_hi = 0;
13539 if (TYPE_UNSIGNED (arg1_type))
13541 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13542 min_lo = 0;
13543 min_hi = 0;
13545 else
13547 max_lo = signed_max_lo;
13548 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13549 min_hi = -1;
13552 else
13554 width -= HOST_BITS_PER_WIDE_INT;
13555 signed_max_lo = -1;
13556 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13557 - 1;
13558 max_lo = -1;
13559 min_lo = 0;
13561 if (TYPE_UNSIGNED (arg1_type))
13563 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13564 min_hi = 0;
13566 else
13568 max_hi = signed_max_hi;
13569 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13573 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13574 && TREE_INT_CST_LOW (arg1) == max_lo)
13575 switch (code)
13577 case GT_EXPR:
13578 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13580 case GE_EXPR:
13581 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13583 case LE_EXPR:
13584 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13586 case LT_EXPR:
13587 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13589 /* The GE_EXPR and LT_EXPR cases above are not normally
13590 reached because of previous transformations. */
13592 default:
13593 break;
13595 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13596 == max_hi
13597 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13598 switch (code)
13600 case GT_EXPR:
13601 arg1 = const_binop (PLUS_EXPR, arg1,
13602 build_int_cst (TREE_TYPE (arg1), 1));
13603 return fold_build2_loc (loc, EQ_EXPR, type,
13604 fold_convert_loc (loc,
13605 TREE_TYPE (arg1), arg0),
13606 arg1);
13607 case LE_EXPR:
13608 arg1 = const_binop (PLUS_EXPR, arg1,
13609 build_int_cst (TREE_TYPE (arg1), 1));
13610 return fold_build2_loc (loc, NE_EXPR, type,
13611 fold_convert_loc (loc, TREE_TYPE (arg1),
13612 arg0),
13613 arg1);
13614 default:
13615 break;
13617 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13618 == min_hi
13619 && TREE_INT_CST_LOW (arg1) == min_lo)
13620 switch (code)
13622 case LT_EXPR:
13623 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13625 case LE_EXPR:
13626 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13628 case GE_EXPR:
13629 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13631 case GT_EXPR:
13632 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13634 default:
13635 break;
13637 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13638 == min_hi
13639 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13640 switch (code)
13642 case GE_EXPR:
13643 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13644 return fold_build2_loc (loc, NE_EXPR, type,
13645 fold_convert_loc (loc,
13646 TREE_TYPE (arg1), arg0),
13647 arg1);
13648 case LT_EXPR:
13649 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13650 return fold_build2_loc (loc, EQ_EXPR, type,
13651 fold_convert_loc (loc, TREE_TYPE (arg1),
13652 arg0),
13653 arg1);
13654 default:
13655 break;
13658 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13659 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13660 && TYPE_UNSIGNED (arg1_type)
13661 /* We will flip the signedness of the comparison operator
13662 associated with the mode of arg1, so the sign bit is
13663 specified by this mode. Check that arg1 is the signed
13664 max associated with this sign bit. */
13665 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13666 /* signed_type does not work on pointer types. */
13667 && INTEGRAL_TYPE_P (arg1_type))
13669 /* The following case also applies to X < signed_max+1
13670 and X >= signed_max+1 because previous transformations. */
13671 if (code == LE_EXPR || code == GT_EXPR)
13673 tree st;
13674 st = signed_type_for (TREE_TYPE (arg1));
13675 return fold_build2_loc (loc,
13676 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13677 type, fold_convert_loc (loc, st, arg0),
13678 build_int_cst (st, 0));
13684 /* If we are comparing an ABS_EXPR with a constant, we can
13685 convert all the cases into explicit comparisons, but they may
13686 well not be faster than doing the ABS and one comparison.
13687 But ABS (X) <= C is a range comparison, which becomes a subtraction
13688 and a comparison, and is probably faster. */
13689 if (code == LE_EXPR
13690 && TREE_CODE (arg1) == INTEGER_CST
13691 && TREE_CODE (arg0) == ABS_EXPR
13692 && ! TREE_SIDE_EFFECTS (arg0)
13693 && (0 != (tem = negate_expr (arg1)))
13694 && TREE_CODE (tem) == INTEGER_CST
13695 && !TREE_OVERFLOW (tem))
13696 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13697 build2 (GE_EXPR, type,
13698 TREE_OPERAND (arg0, 0), tem),
13699 build2 (LE_EXPR, type,
13700 TREE_OPERAND (arg0, 0), arg1));
13702 /* Convert ABS_EXPR<x> >= 0 to true. */
13703 strict_overflow_p = false;
13704 if (code == GE_EXPR
13705 && (integer_zerop (arg1)
13706 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13707 && real_zerop (arg1)))
13708 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13710 if (strict_overflow_p)
13711 fold_overflow_warning (("assuming signed overflow does not occur "
13712 "when simplifying comparison of "
13713 "absolute value and zero"),
13714 WARN_STRICT_OVERFLOW_CONDITIONAL);
13715 return omit_one_operand_loc (loc, type,
13716 constant_boolean_node (true, type),
13717 arg0);
13720 /* Convert ABS_EXPR<x> < 0 to false. */
13721 strict_overflow_p = false;
13722 if (code == LT_EXPR
13723 && (integer_zerop (arg1) || real_zerop (arg1))
13724 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13726 if (strict_overflow_p)
13727 fold_overflow_warning (("assuming signed overflow does not occur "
13728 "when simplifying comparison of "
13729 "absolute value and zero"),
13730 WARN_STRICT_OVERFLOW_CONDITIONAL);
13731 return omit_one_operand_loc (loc, type,
13732 constant_boolean_node (false, type),
13733 arg0);
13736 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13737 and similarly for >= into !=. */
13738 if ((code == LT_EXPR || code == GE_EXPR)
13739 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13740 && TREE_CODE (arg1) == LSHIFT_EXPR
13741 && integer_onep (TREE_OPERAND (arg1, 0)))
13742 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13743 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13744 TREE_OPERAND (arg1, 1)),
13745 build_zero_cst (TREE_TYPE (arg0)));
13747 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13748 otherwise Y might be >= # of bits in X's type and thus e.g.
13749 (unsigned char) (1 << Y) for Y 15 might be 0.
13750 If the cast is widening, then 1 << Y should have unsigned type,
13751 otherwise if Y is number of bits in the signed shift type minus 1,
13752 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13753 31 might be 0xffffffff80000000. */
13754 if ((code == LT_EXPR || code == GE_EXPR)
13755 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13756 && CONVERT_EXPR_P (arg1)
13757 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13758 && (TYPE_PRECISION (TREE_TYPE (arg1))
13759 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13760 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13761 || (TYPE_PRECISION (TREE_TYPE (arg1))
13762 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13763 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13765 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13766 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13767 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13768 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13769 build_zero_cst (TREE_TYPE (arg0)));
13772 return NULL_TREE;
13774 case UNORDERED_EXPR:
13775 case ORDERED_EXPR:
13776 case UNLT_EXPR:
13777 case UNLE_EXPR:
13778 case UNGT_EXPR:
13779 case UNGE_EXPR:
13780 case UNEQ_EXPR:
13781 case LTGT_EXPR:
13782 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13784 t1 = fold_relational_const (code, type, arg0, arg1);
13785 if (t1 != NULL_TREE)
13786 return t1;
13789 /* If the first operand is NaN, the result is constant. */
13790 if (TREE_CODE (arg0) == REAL_CST
13791 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13792 && (code != LTGT_EXPR || ! flag_trapping_math))
13794 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13795 ? integer_zero_node
13796 : integer_one_node;
13797 return omit_one_operand_loc (loc, type, t1, arg1);
13800 /* If the second operand is NaN, the result is constant. */
13801 if (TREE_CODE (arg1) == REAL_CST
13802 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13803 && (code != LTGT_EXPR || ! flag_trapping_math))
13805 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13806 ? integer_zero_node
13807 : integer_one_node;
13808 return omit_one_operand_loc (loc, type, t1, arg0);
13811 /* Simplify unordered comparison of something with itself. */
13812 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13813 && operand_equal_p (arg0, arg1, 0))
13814 return constant_boolean_node (1, type);
13816 if (code == LTGT_EXPR
13817 && !flag_trapping_math
13818 && operand_equal_p (arg0, arg1, 0))
13819 return constant_boolean_node (0, type);
13821 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13823 tree targ0 = strip_float_extensions (arg0);
13824 tree targ1 = strip_float_extensions (arg1);
13825 tree newtype = TREE_TYPE (targ0);
13827 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13828 newtype = TREE_TYPE (targ1);
13830 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13831 return fold_build2_loc (loc, code, type,
13832 fold_convert_loc (loc, newtype, targ0),
13833 fold_convert_loc (loc, newtype, targ1));
13836 return NULL_TREE;
13838 case COMPOUND_EXPR:
13839 /* When pedantic, a compound expression can be neither an lvalue
13840 nor an integer constant expression. */
13841 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13842 return NULL_TREE;
13843 /* Don't let (0, 0) be null pointer constant. */
13844 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13845 : fold_convert_loc (loc, type, arg1);
13846 return pedantic_non_lvalue_loc (loc, tem);
13848 case COMPLEX_EXPR:
13849 if ((TREE_CODE (arg0) == REAL_CST
13850 && TREE_CODE (arg1) == REAL_CST)
13851 || (TREE_CODE (arg0) == INTEGER_CST
13852 && TREE_CODE (arg1) == INTEGER_CST))
13853 return build_complex (type, arg0, arg1);
13854 if (TREE_CODE (arg0) == REALPART_EXPR
13855 && TREE_CODE (arg1) == IMAGPART_EXPR
13856 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13857 && operand_equal_p (TREE_OPERAND (arg0, 0),
13858 TREE_OPERAND (arg1, 0), 0))
13859 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13860 TREE_OPERAND (arg1, 0));
13861 return NULL_TREE;
13863 case ASSERT_EXPR:
13864 /* An ASSERT_EXPR should never be passed to fold_binary. */
13865 gcc_unreachable ();
13867 case VEC_PACK_TRUNC_EXPR:
13868 case VEC_PACK_FIX_TRUNC_EXPR:
13870 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13871 tree *elts;
13873 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13874 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13875 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13876 return NULL_TREE;
13878 elts = XALLOCAVEC (tree, nelts);
13879 if (!vec_cst_ctor_to_array (arg0, elts)
13880 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13881 return NULL_TREE;
13883 for (i = 0; i < nelts; i++)
13885 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13886 ? NOP_EXPR : FIX_TRUNC_EXPR,
13887 TREE_TYPE (type), elts[i]);
13888 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13889 return NULL_TREE;
13892 return build_vector (type, elts);
13895 case VEC_WIDEN_MULT_LO_EXPR:
13896 case VEC_WIDEN_MULT_HI_EXPR:
13897 case VEC_WIDEN_MULT_EVEN_EXPR:
13898 case VEC_WIDEN_MULT_ODD_EXPR:
13900 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13901 unsigned int out, ofs, scale;
13902 tree *elts;
13904 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13905 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13906 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13907 return NULL_TREE;
13909 elts = XALLOCAVEC (tree, nelts * 4);
13910 if (!vec_cst_ctor_to_array (arg0, elts)
13911 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13912 return NULL_TREE;
13914 if (code == VEC_WIDEN_MULT_LO_EXPR)
13915 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13916 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13917 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13918 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13919 scale = 1, ofs = 0;
13920 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13921 scale = 1, ofs = 1;
13923 for (out = 0; out < nelts; out++)
13925 unsigned int in1 = (out << scale) + ofs;
13926 unsigned int in2 = in1 + nelts * 2;
13927 tree t1, t2;
13929 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13930 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13932 if (t1 == NULL_TREE || t2 == NULL_TREE)
13933 return NULL_TREE;
13934 elts[out] = const_binop (MULT_EXPR, t1, t2);
13935 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13936 return NULL_TREE;
13939 return build_vector (type, elts);
13942 default:
13943 return NULL_TREE;
13944 } /* switch (code) */
13947 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13948 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13949 of GOTO_EXPR. */
13951 static tree
13952 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13954 switch (TREE_CODE (*tp))
13956 case LABEL_EXPR:
13957 return *tp;
13959 case GOTO_EXPR:
13960 *walk_subtrees = 0;
13962 /* ... fall through ... */
13964 default:
13965 return NULL_TREE;
13969 /* Return whether the sub-tree ST contains a label which is accessible from
13970 outside the sub-tree. */
13972 static bool
13973 contains_label_p (tree st)
13975 return
13976 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13979 /* Fold a ternary expression of code CODE and type TYPE with operands
13980 OP0, OP1, and OP2. Return the folded expression if folding is
13981 successful. Otherwise, return NULL_TREE. */
13983 tree
13984 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13985 tree op0, tree op1, tree op2)
13987 tree tem;
13988 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13989 enum tree_code_class kind = TREE_CODE_CLASS (code);
13991 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13992 && TREE_CODE_LENGTH (code) == 3);
13994 /* Strip any conversions that don't change the mode. This is safe
13995 for every expression, except for a comparison expression because
13996 its signedness is derived from its operands. So, in the latter
13997 case, only strip conversions that don't change the signedness.
13999 Note that this is done as an internal manipulation within the
14000 constant folder, in order to find the simplest representation of
14001 the arguments so that their form can be studied. In any cases,
14002 the appropriate type conversions should be put back in the tree
14003 that will get out of the constant folder. */
14004 if (op0)
14006 arg0 = op0;
14007 STRIP_NOPS (arg0);
14010 if (op1)
14012 arg1 = op1;
14013 STRIP_NOPS (arg1);
14016 if (op2)
14018 arg2 = op2;
14019 STRIP_NOPS (arg2);
14022 switch (code)
14024 case COMPONENT_REF:
14025 if (TREE_CODE (arg0) == CONSTRUCTOR
14026 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14028 unsigned HOST_WIDE_INT idx;
14029 tree field, value;
14030 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14031 if (field == arg1)
14032 return value;
14034 return NULL_TREE;
14036 case COND_EXPR:
14037 case VEC_COND_EXPR:
14038 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14039 so all simple results must be passed through pedantic_non_lvalue. */
14040 if (TREE_CODE (arg0) == INTEGER_CST)
14042 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14043 tem = integer_zerop (arg0) ? op2 : op1;
14044 /* Only optimize constant conditions when the selected branch
14045 has the same type as the COND_EXPR. This avoids optimizing
14046 away "c ? x : throw", where the throw has a void type.
14047 Avoid throwing away that operand which contains label. */
14048 if ((!TREE_SIDE_EFFECTS (unused_op)
14049 || !contains_label_p (unused_op))
14050 && (! VOID_TYPE_P (TREE_TYPE (tem))
14051 || VOID_TYPE_P (type)))
14052 return pedantic_non_lvalue_loc (loc, tem);
14053 return NULL_TREE;
14055 else if (TREE_CODE (arg0) == VECTOR_CST)
14057 if (integer_all_onesp (arg0))
14058 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14059 if (integer_zerop (arg0))
14060 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14062 if ((TREE_CODE (arg1) == VECTOR_CST
14063 || TREE_CODE (arg1) == CONSTRUCTOR)
14064 && (TREE_CODE (arg2) == VECTOR_CST
14065 || TREE_CODE (arg2) == CONSTRUCTOR))
14067 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14068 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14069 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14070 for (i = 0; i < nelts; i++)
14072 tree val = VECTOR_CST_ELT (arg0, i);
14073 if (integer_all_onesp (val))
14074 sel[i] = i;
14075 else if (integer_zerop (val))
14076 sel[i] = nelts + i;
14077 else /* Currently unreachable. */
14078 return NULL_TREE;
14080 tree t = fold_vec_perm (type, arg1, arg2, sel);
14081 if (t != NULL_TREE)
14082 return t;
14086 if (operand_equal_p (arg1, op2, 0))
14087 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14089 /* If we have A op B ? A : C, we may be able to convert this to a
14090 simpler expression, depending on the operation and the values
14091 of B and C. Signed zeros prevent all of these transformations,
14092 for reasons given above each one.
14094 Also try swapping the arguments and inverting the conditional. */
14095 if (COMPARISON_CLASS_P (arg0)
14096 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14097 arg1, TREE_OPERAND (arg0, 1))
14098 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14100 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14101 if (tem)
14102 return tem;
14105 if (COMPARISON_CLASS_P (arg0)
14106 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14107 op2,
14108 TREE_OPERAND (arg0, 1))
14109 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14111 location_t loc0 = expr_location_or (arg0, loc);
14112 tem = fold_invert_truthvalue (loc0, arg0);
14113 if (tem && COMPARISON_CLASS_P (tem))
14115 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14116 if (tem)
14117 return tem;
14121 /* If the second operand is simpler than the third, swap them
14122 since that produces better jump optimization results. */
14123 if (truth_value_p (TREE_CODE (arg0))
14124 && tree_swap_operands_p (op1, op2, false))
14126 location_t loc0 = expr_location_or (arg0, loc);
14127 /* See if this can be inverted. If it can't, possibly because
14128 it was a floating-point inequality comparison, don't do
14129 anything. */
14130 tem = fold_invert_truthvalue (loc0, arg0);
14131 if (tem)
14132 return fold_build3_loc (loc, code, type, tem, op2, op1);
14135 /* Convert A ? 1 : 0 to simply A. */
14136 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14137 : (integer_onep (op1)
14138 && !VECTOR_TYPE_P (type)))
14139 && integer_zerop (op2)
14140 /* If we try to convert OP0 to our type, the
14141 call to fold will try to move the conversion inside
14142 a COND, which will recurse. In that case, the COND_EXPR
14143 is probably the best choice, so leave it alone. */
14144 && type == TREE_TYPE (arg0))
14145 return pedantic_non_lvalue_loc (loc, arg0);
14147 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14148 over COND_EXPR in cases such as floating point comparisons. */
14149 if (integer_zerop (op1)
14150 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14151 : (integer_onep (op2)
14152 && !VECTOR_TYPE_P (type)))
14153 && truth_value_p (TREE_CODE (arg0)))
14154 return pedantic_non_lvalue_loc (loc,
14155 fold_convert_loc (loc, type,
14156 invert_truthvalue_loc (loc,
14157 arg0)));
14159 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14160 if (TREE_CODE (arg0) == LT_EXPR
14161 && integer_zerop (TREE_OPERAND (arg0, 1))
14162 && integer_zerop (op2)
14163 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14165 /* sign_bit_p only checks ARG1 bits within A's precision.
14166 If <sign bit of A> has wider type than A, bits outside
14167 of A's precision in <sign bit of A> need to be checked.
14168 If they are all 0, this optimization needs to be done
14169 in unsigned A's type, if they are all 1 in signed A's type,
14170 otherwise this can't be done. */
14171 if (TYPE_PRECISION (TREE_TYPE (tem))
14172 < TYPE_PRECISION (TREE_TYPE (arg1))
14173 && TYPE_PRECISION (TREE_TYPE (tem))
14174 < TYPE_PRECISION (type))
14176 unsigned HOST_WIDE_INT mask_lo;
14177 HOST_WIDE_INT mask_hi;
14178 int inner_width, outer_width;
14179 tree tem_type;
14181 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14182 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14183 if (outer_width > TYPE_PRECISION (type))
14184 outer_width = TYPE_PRECISION (type);
14186 if (outer_width > HOST_BITS_PER_WIDE_INT)
14188 mask_hi = ((unsigned HOST_WIDE_INT) -1
14189 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14190 mask_lo = -1;
14192 else
14194 mask_hi = 0;
14195 mask_lo = ((unsigned HOST_WIDE_INT) -1
14196 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14198 if (inner_width > HOST_BITS_PER_WIDE_INT)
14200 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14201 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14202 mask_lo = 0;
14204 else
14205 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14206 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14208 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14209 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14211 tem_type = signed_type_for (TREE_TYPE (tem));
14212 tem = fold_convert_loc (loc, tem_type, tem);
14214 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14215 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14217 tem_type = unsigned_type_for (TREE_TYPE (tem));
14218 tem = fold_convert_loc (loc, tem_type, tem);
14220 else
14221 tem = NULL;
14224 if (tem)
14225 return
14226 fold_convert_loc (loc, type,
14227 fold_build2_loc (loc, BIT_AND_EXPR,
14228 TREE_TYPE (tem), tem,
14229 fold_convert_loc (loc,
14230 TREE_TYPE (tem),
14231 arg1)));
14234 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14235 already handled above. */
14236 if (TREE_CODE (arg0) == BIT_AND_EXPR
14237 && integer_onep (TREE_OPERAND (arg0, 1))
14238 && integer_zerop (op2)
14239 && integer_pow2p (arg1))
14241 tree tem = TREE_OPERAND (arg0, 0);
14242 STRIP_NOPS (tem);
14243 if (TREE_CODE (tem) == RSHIFT_EXPR
14244 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14245 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14246 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14247 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14248 TREE_OPERAND (tem, 0), arg1);
14251 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14252 is probably obsolete because the first operand should be a
14253 truth value (that's why we have the two cases above), but let's
14254 leave it in until we can confirm this for all front-ends. */
14255 if (integer_zerop (op2)
14256 && TREE_CODE (arg0) == NE_EXPR
14257 && integer_zerop (TREE_OPERAND (arg0, 1))
14258 && integer_pow2p (arg1)
14259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14260 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14261 arg1, OEP_ONLY_CONST))
14262 return pedantic_non_lvalue_loc (loc,
14263 fold_convert_loc (loc, type,
14264 TREE_OPERAND (arg0, 0)));
14266 /* Disable the transformations below for vectors, since
14267 fold_binary_op_with_conditional_arg may undo them immediately,
14268 yielding an infinite loop. */
14269 if (code == VEC_COND_EXPR)
14270 return NULL_TREE;
14272 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14273 if (integer_zerop (op2)
14274 && truth_value_p (TREE_CODE (arg0))
14275 && truth_value_p (TREE_CODE (arg1))
14276 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14277 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14278 : TRUTH_ANDIF_EXPR,
14279 type, fold_convert_loc (loc, type, arg0), arg1);
14281 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14282 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14283 && truth_value_p (TREE_CODE (arg0))
14284 && truth_value_p (TREE_CODE (arg1))
14285 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14287 location_t loc0 = expr_location_or (arg0, loc);
14288 /* Only perform transformation if ARG0 is easily inverted. */
14289 tem = fold_invert_truthvalue (loc0, arg0);
14290 if (tem)
14291 return fold_build2_loc (loc, code == VEC_COND_EXPR
14292 ? BIT_IOR_EXPR
14293 : TRUTH_ORIF_EXPR,
14294 type, fold_convert_loc (loc, type, tem),
14295 arg1);
14298 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14299 if (integer_zerop (arg1)
14300 && truth_value_p (TREE_CODE (arg0))
14301 && truth_value_p (TREE_CODE (op2))
14302 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14304 location_t loc0 = expr_location_or (arg0, loc);
14305 /* Only perform transformation if ARG0 is easily inverted. */
14306 tem = fold_invert_truthvalue (loc0, arg0);
14307 if (tem)
14308 return fold_build2_loc (loc, code == VEC_COND_EXPR
14309 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14310 type, fold_convert_loc (loc, type, tem),
14311 op2);
14314 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14315 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14316 && truth_value_p (TREE_CODE (arg0))
14317 && truth_value_p (TREE_CODE (op2))
14318 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14319 return fold_build2_loc (loc, code == VEC_COND_EXPR
14320 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14321 type, fold_convert_loc (loc, type, arg0), op2);
14323 return NULL_TREE;
14325 case CALL_EXPR:
14326 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14327 of fold_ternary on them. */
14328 gcc_unreachable ();
14330 case BIT_FIELD_REF:
14331 if ((TREE_CODE (arg0) == VECTOR_CST
14332 || (TREE_CODE (arg0) == CONSTRUCTOR
14333 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14334 && (type == TREE_TYPE (TREE_TYPE (arg0))
14335 || (TREE_CODE (type) == VECTOR_TYPE
14336 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14338 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14339 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14340 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14341 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14343 if (n != 0
14344 && (idx % width) == 0
14345 && (n % width) == 0
14346 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14348 idx = idx / width;
14349 n = n / width;
14351 if (TREE_CODE (arg0) == VECTOR_CST)
14353 if (n == 1)
14354 return VECTOR_CST_ELT (arg0, idx);
14356 tree *vals = XALLOCAVEC (tree, n);
14357 for (unsigned i = 0; i < n; ++i)
14358 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14359 return build_vector (type, vals);
14362 /* Constructor elements can be subvectors. */
14363 unsigned HOST_WIDE_INT k = 1;
14364 if (CONSTRUCTOR_NELTS (arg0) != 0)
14366 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14367 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14368 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14371 /* We keep an exact subset of the constructor elements. */
14372 if ((idx % k) == 0 && (n % k) == 0)
14374 if (CONSTRUCTOR_NELTS (arg0) == 0)
14375 return build_constructor (type, NULL);
14376 idx /= k;
14377 n /= k;
14378 if (n == 1)
14380 if (idx < CONSTRUCTOR_NELTS (arg0))
14381 return CONSTRUCTOR_ELT (arg0, idx)->value;
14382 return build_zero_cst (type);
14385 vec<constructor_elt, va_gc> *vals;
14386 vec_alloc (vals, n);
14387 for (unsigned i = 0;
14388 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14389 ++i)
14390 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14391 CONSTRUCTOR_ELT
14392 (arg0, idx + i)->value);
14393 return build_constructor (type, vals);
14395 /* The bitfield references a single constructor element. */
14396 else if (idx + n <= (idx / k + 1) * k)
14398 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14399 return build_zero_cst (type);
14400 else if (n == k)
14401 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14402 else
14403 return fold_build3_loc (loc, code, type,
14404 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14405 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14410 /* A bit-field-ref that referenced the full argument can be stripped. */
14411 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14412 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14413 && integer_zerop (op2))
14414 return fold_convert_loc (loc, type, arg0);
14416 /* On constants we can use native encode/interpret to constant
14417 fold (nearly) all BIT_FIELD_REFs. */
14418 if (CONSTANT_CLASS_P (arg0)
14419 && can_native_interpret_type_p (type)
14420 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14421 /* This limitation should not be necessary, we just need to
14422 round this up to mode size. */
14423 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14424 /* Need bit-shifting of the buffer to relax the following. */
14425 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14427 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14428 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14429 unsigned HOST_WIDE_INT clen;
14430 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14431 /* ??? We cannot tell native_encode_expr to start at
14432 some random byte only. So limit us to a reasonable amount
14433 of work. */
14434 if (clen <= 4096)
14436 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14437 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14438 if (len > 0
14439 && len * BITS_PER_UNIT >= bitpos + bitsize)
14441 tree v = native_interpret_expr (type,
14442 b + bitpos / BITS_PER_UNIT,
14443 bitsize / BITS_PER_UNIT);
14444 if (v)
14445 return v;
14450 return NULL_TREE;
14452 case FMA_EXPR:
14453 /* For integers we can decompose the FMA if possible. */
14454 if (TREE_CODE (arg0) == INTEGER_CST
14455 && TREE_CODE (arg1) == INTEGER_CST)
14456 return fold_build2_loc (loc, PLUS_EXPR, type,
14457 const_binop (MULT_EXPR, arg0, arg1), arg2);
14458 if (integer_zerop (arg2))
14459 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14461 return fold_fma (loc, type, arg0, arg1, arg2);
14463 case VEC_PERM_EXPR:
14464 if (TREE_CODE (arg2) == VECTOR_CST)
14466 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14467 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14468 tree t;
14469 bool need_mask_canon = false;
14470 bool all_in_vec0 = true;
14471 bool all_in_vec1 = true;
14472 bool maybe_identity = true;
14473 bool single_arg = (op0 == op1);
14474 bool changed = false;
14476 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14477 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14478 for (i = 0; i < nelts; i++)
14480 tree val = VECTOR_CST_ELT (arg2, i);
14481 if (TREE_CODE (val) != INTEGER_CST)
14482 return NULL_TREE;
14484 sel[i] = TREE_INT_CST_LOW (val) & mask;
14485 if (TREE_INT_CST_HIGH (val)
14486 || ((unsigned HOST_WIDE_INT)
14487 TREE_INT_CST_LOW (val) != sel[i]))
14488 need_mask_canon = true;
14490 if (sel[i] < nelts)
14491 all_in_vec1 = false;
14492 else
14493 all_in_vec0 = false;
14495 if ((sel[i] & (nelts-1)) != i)
14496 maybe_identity = false;
14499 if (maybe_identity)
14501 if (all_in_vec0)
14502 return op0;
14503 if (all_in_vec1)
14504 return op1;
14507 if (all_in_vec0)
14508 op1 = op0;
14509 else if (all_in_vec1)
14511 op0 = op1;
14512 for (i = 0; i < nelts; i++)
14513 sel[i] -= nelts;
14514 need_mask_canon = true;
14517 if ((TREE_CODE (op0) == VECTOR_CST
14518 || TREE_CODE (op0) == CONSTRUCTOR)
14519 && (TREE_CODE (op1) == VECTOR_CST
14520 || TREE_CODE (op1) == CONSTRUCTOR))
14522 t = fold_vec_perm (type, op0, op1, sel);
14523 if (t != NULL_TREE)
14524 return t;
14527 if (op0 == op1 && !single_arg)
14528 changed = true;
14530 if (need_mask_canon && arg2 == op2)
14532 tree *tsel = XALLOCAVEC (tree, nelts);
14533 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14534 for (i = 0; i < nelts; i++)
14535 tsel[i] = build_int_cst (eltype, sel[i]);
14536 op2 = build_vector (TREE_TYPE (arg2), tsel);
14537 changed = true;
14540 if (changed)
14541 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14543 return NULL_TREE;
14545 default:
14546 return NULL_TREE;
14547 } /* switch (code) */
14550 /* Perform constant folding and related simplification of EXPR.
14551 The related simplifications include x*1 => x, x*0 => 0, etc.,
14552 and application of the associative law.
14553 NOP_EXPR conversions may be removed freely (as long as we
14554 are careful not to change the type of the overall expression).
14555 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14556 but we can constant-fold them if they have constant operands. */
14558 #ifdef ENABLE_FOLD_CHECKING
14559 # define fold(x) fold_1 (x)
14560 static tree fold_1 (tree);
14561 static
14562 #endif
14563 tree
14564 fold (tree expr)
14566 const tree t = expr;
14567 enum tree_code code = TREE_CODE (t);
14568 enum tree_code_class kind = TREE_CODE_CLASS (code);
14569 tree tem;
14570 location_t loc = EXPR_LOCATION (expr);
14572 /* Return right away if a constant. */
14573 if (kind == tcc_constant)
14574 return t;
14576 /* CALL_EXPR-like objects with variable numbers of operands are
14577 treated specially. */
14578 if (kind == tcc_vl_exp)
14580 if (code == CALL_EXPR)
14582 tem = fold_call_expr (loc, expr, false);
14583 return tem ? tem : expr;
14585 return expr;
14588 if (IS_EXPR_CODE_CLASS (kind))
14590 tree type = TREE_TYPE (t);
14591 tree op0, op1, op2;
14593 switch (TREE_CODE_LENGTH (code))
14595 case 1:
14596 op0 = TREE_OPERAND (t, 0);
14597 tem = fold_unary_loc (loc, code, type, op0);
14598 return tem ? tem : expr;
14599 case 2:
14600 op0 = TREE_OPERAND (t, 0);
14601 op1 = TREE_OPERAND (t, 1);
14602 tem = fold_binary_loc (loc, code, type, op0, op1);
14603 return tem ? tem : expr;
14604 case 3:
14605 op0 = TREE_OPERAND (t, 0);
14606 op1 = TREE_OPERAND (t, 1);
14607 op2 = TREE_OPERAND (t, 2);
14608 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14609 return tem ? tem : expr;
14610 default:
14611 break;
14615 switch (code)
14617 case ARRAY_REF:
14619 tree op0 = TREE_OPERAND (t, 0);
14620 tree op1 = TREE_OPERAND (t, 1);
14622 if (TREE_CODE (op1) == INTEGER_CST
14623 && TREE_CODE (op0) == CONSTRUCTOR
14624 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14626 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14627 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14628 unsigned HOST_WIDE_INT begin = 0;
14630 /* Find a matching index by means of a binary search. */
14631 while (begin != end)
14633 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14634 tree index = (*elts)[middle].index;
14636 if (TREE_CODE (index) == INTEGER_CST
14637 && tree_int_cst_lt (index, op1))
14638 begin = middle + 1;
14639 else if (TREE_CODE (index) == INTEGER_CST
14640 && tree_int_cst_lt (op1, index))
14641 end = middle;
14642 else if (TREE_CODE (index) == RANGE_EXPR
14643 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14644 begin = middle + 1;
14645 else if (TREE_CODE (index) == RANGE_EXPR
14646 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14647 end = middle;
14648 else
14649 return (*elts)[middle].value;
14653 return t;
14656 /* Return a VECTOR_CST if possible. */
14657 case CONSTRUCTOR:
14659 tree type = TREE_TYPE (t);
14660 if (TREE_CODE (type) != VECTOR_TYPE)
14661 return t;
14663 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14664 unsigned HOST_WIDE_INT idx, pos = 0;
14665 tree value;
14667 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14669 if (!CONSTANT_CLASS_P (value))
14670 return t;
14671 if (TREE_CODE (value) == VECTOR_CST)
14673 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14674 vec[pos++] = VECTOR_CST_ELT (value, i);
14676 else
14677 vec[pos++] = value;
14679 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14680 vec[pos] = build_zero_cst (TREE_TYPE (type));
14682 return build_vector (type, vec);
14685 case CONST_DECL:
14686 return fold (DECL_INITIAL (t));
14688 default:
14689 return t;
14690 } /* switch (code) */
14693 #ifdef ENABLE_FOLD_CHECKING
14694 #undef fold
14696 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14697 hash_table <pointer_hash <tree_node> >);
14698 static void fold_check_failed (const_tree, const_tree);
14699 void print_fold_checksum (const_tree);
14701 /* When --enable-checking=fold, compute a digest of expr before
14702 and after actual fold call to see if fold did not accidentally
14703 change original expr. */
14705 tree
14706 fold (tree expr)
14708 tree ret;
14709 struct md5_ctx ctx;
14710 unsigned char checksum_before[16], checksum_after[16];
14711 hash_table <pointer_hash <tree_node> > ht;
14713 ht.create (32);
14714 md5_init_ctx (&ctx);
14715 fold_checksum_tree (expr, &ctx, ht);
14716 md5_finish_ctx (&ctx, checksum_before);
14717 ht.empty ();
14719 ret = fold_1 (expr);
14721 md5_init_ctx (&ctx);
14722 fold_checksum_tree (expr, &ctx, ht);
14723 md5_finish_ctx (&ctx, checksum_after);
14724 ht.dispose ();
14726 if (memcmp (checksum_before, checksum_after, 16))
14727 fold_check_failed (expr, ret);
14729 return ret;
14732 void
14733 print_fold_checksum (const_tree expr)
14735 struct md5_ctx ctx;
14736 unsigned char checksum[16], cnt;
14737 hash_table <pointer_hash <tree_node> > ht;
14739 ht.create (32);
14740 md5_init_ctx (&ctx);
14741 fold_checksum_tree (expr, &ctx, ht);
14742 md5_finish_ctx (&ctx, checksum);
14743 ht.dispose ();
14744 for (cnt = 0; cnt < 16; ++cnt)
14745 fprintf (stderr, "%02x", checksum[cnt]);
14746 putc ('\n', stderr);
14749 static void
14750 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14752 internal_error ("fold check: original tree changed by fold");
14755 static void
14756 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14757 hash_table <pointer_hash <tree_node> > ht)
14759 tree_node **slot;
14760 enum tree_code code;
14761 union tree_node buf;
14762 int i, len;
14764 recursive_label:
14765 if (expr == NULL)
14766 return;
14767 slot = ht.find_slot (expr, INSERT);
14768 if (*slot != NULL)
14769 return;
14770 *slot = CONST_CAST_TREE (expr);
14771 code = TREE_CODE (expr);
14772 if (TREE_CODE_CLASS (code) == tcc_declaration
14773 && DECL_ASSEMBLER_NAME_SET_P (expr))
14775 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14776 memcpy ((char *) &buf, expr, tree_size (expr));
14777 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14778 expr = (tree) &buf;
14780 else if (TREE_CODE_CLASS (code) == tcc_type
14781 && (TYPE_POINTER_TO (expr)
14782 || TYPE_REFERENCE_TO (expr)
14783 || TYPE_CACHED_VALUES_P (expr)
14784 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14785 || TYPE_NEXT_VARIANT (expr)))
14787 /* Allow these fields to be modified. */
14788 tree tmp;
14789 memcpy ((char *) &buf, expr, tree_size (expr));
14790 expr = tmp = (tree) &buf;
14791 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14792 TYPE_POINTER_TO (tmp) = NULL;
14793 TYPE_REFERENCE_TO (tmp) = NULL;
14794 TYPE_NEXT_VARIANT (tmp) = NULL;
14795 if (TYPE_CACHED_VALUES_P (tmp))
14797 TYPE_CACHED_VALUES_P (tmp) = 0;
14798 TYPE_CACHED_VALUES (tmp) = NULL;
14801 md5_process_bytes (expr, tree_size (expr), ctx);
14802 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14803 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14804 if (TREE_CODE_CLASS (code) != tcc_type
14805 && TREE_CODE_CLASS (code) != tcc_declaration
14806 && code != TREE_LIST
14807 && code != SSA_NAME
14808 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14809 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14810 switch (TREE_CODE_CLASS (code))
14812 case tcc_constant:
14813 switch (code)
14815 case STRING_CST:
14816 md5_process_bytes (TREE_STRING_POINTER (expr),
14817 TREE_STRING_LENGTH (expr), ctx);
14818 break;
14819 case COMPLEX_CST:
14820 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14821 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14822 break;
14823 case VECTOR_CST:
14824 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14825 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14826 break;
14827 default:
14828 break;
14830 break;
14831 case tcc_exceptional:
14832 switch (code)
14834 case TREE_LIST:
14835 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14836 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14837 expr = TREE_CHAIN (expr);
14838 goto recursive_label;
14839 break;
14840 case TREE_VEC:
14841 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14842 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14843 break;
14844 default:
14845 break;
14847 break;
14848 case tcc_expression:
14849 case tcc_reference:
14850 case tcc_comparison:
14851 case tcc_unary:
14852 case tcc_binary:
14853 case tcc_statement:
14854 case tcc_vl_exp:
14855 len = TREE_OPERAND_LENGTH (expr);
14856 for (i = 0; i < len; ++i)
14857 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14858 break;
14859 case tcc_declaration:
14860 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14861 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14862 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14864 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14865 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14866 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14867 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14868 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14870 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14871 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14873 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14875 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14876 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14877 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14879 break;
14880 case tcc_type:
14881 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14882 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14883 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14884 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14885 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14886 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14887 if (INTEGRAL_TYPE_P (expr)
14888 || SCALAR_FLOAT_TYPE_P (expr))
14890 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14891 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14893 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14894 if (TREE_CODE (expr) == RECORD_TYPE
14895 || TREE_CODE (expr) == UNION_TYPE
14896 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14897 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14898 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14899 break;
14900 default:
14901 break;
14905 /* Helper function for outputting the checksum of a tree T. When
14906 debugging with gdb, you can "define mynext" to be "next" followed
14907 by "call debug_fold_checksum (op0)", then just trace down till the
14908 outputs differ. */
14910 DEBUG_FUNCTION void
14911 debug_fold_checksum (const_tree t)
14913 int i;
14914 unsigned char checksum[16];
14915 struct md5_ctx ctx;
14916 hash_table <pointer_hash <tree_node> > ht;
14917 ht.create (32);
14919 md5_init_ctx (&ctx);
14920 fold_checksum_tree (t, &ctx, ht);
14921 md5_finish_ctx (&ctx, checksum);
14922 ht.empty ();
14924 for (i = 0; i < 16; i++)
14925 fprintf (stderr, "%d ", checksum[i]);
14927 fprintf (stderr, "\n");
14930 #endif
14932 /* Fold a unary tree expression with code CODE of type TYPE with an
14933 operand OP0. LOC is the location of the resulting expression.
14934 Return a folded expression if successful. Otherwise, return a tree
14935 expression with code CODE of type TYPE with an operand OP0. */
14937 tree
14938 fold_build1_stat_loc (location_t loc,
14939 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14941 tree tem;
14942 #ifdef ENABLE_FOLD_CHECKING
14943 unsigned char checksum_before[16], checksum_after[16];
14944 struct md5_ctx ctx;
14945 hash_table <pointer_hash <tree_node> > ht;
14947 ht.create (32);
14948 md5_init_ctx (&ctx);
14949 fold_checksum_tree (op0, &ctx, ht);
14950 md5_finish_ctx (&ctx, checksum_before);
14951 ht.empty ();
14952 #endif
14954 tem = fold_unary_loc (loc, code, type, op0);
14955 if (!tem)
14956 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14958 #ifdef ENABLE_FOLD_CHECKING
14959 md5_init_ctx (&ctx);
14960 fold_checksum_tree (op0, &ctx, ht);
14961 md5_finish_ctx (&ctx, checksum_after);
14962 ht.dispose ();
14964 if (memcmp (checksum_before, checksum_after, 16))
14965 fold_check_failed (op0, tem);
14966 #endif
14967 return tem;
14970 /* Fold a binary tree expression with code CODE of type TYPE with
14971 operands OP0 and OP1. LOC is the location of the resulting
14972 expression. Return a folded expression if successful. Otherwise,
14973 return a tree expression with code CODE of type TYPE with operands
14974 OP0 and OP1. */
14976 tree
14977 fold_build2_stat_loc (location_t loc,
14978 enum tree_code code, tree type, tree op0, tree op1
14979 MEM_STAT_DECL)
14981 tree tem;
14982 #ifdef ENABLE_FOLD_CHECKING
14983 unsigned char checksum_before_op0[16],
14984 checksum_before_op1[16],
14985 checksum_after_op0[16],
14986 checksum_after_op1[16];
14987 struct md5_ctx ctx;
14988 hash_table <pointer_hash <tree_node> > ht;
14990 ht.create (32);
14991 md5_init_ctx (&ctx);
14992 fold_checksum_tree (op0, &ctx, ht);
14993 md5_finish_ctx (&ctx, checksum_before_op0);
14994 ht.empty ();
14996 md5_init_ctx (&ctx);
14997 fold_checksum_tree (op1, &ctx, ht);
14998 md5_finish_ctx (&ctx, checksum_before_op1);
14999 ht.empty ();
15000 #endif
15002 tem = fold_binary_loc (loc, code, type, op0, op1);
15003 if (!tem)
15004 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15006 #ifdef ENABLE_FOLD_CHECKING
15007 md5_init_ctx (&ctx);
15008 fold_checksum_tree (op0, &ctx, ht);
15009 md5_finish_ctx (&ctx, checksum_after_op0);
15010 ht.empty ();
15012 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15013 fold_check_failed (op0, tem);
15015 md5_init_ctx (&ctx);
15016 fold_checksum_tree (op1, &ctx, ht);
15017 md5_finish_ctx (&ctx, checksum_after_op1);
15018 ht.dispose ();
15020 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15021 fold_check_failed (op1, tem);
15022 #endif
15023 return tem;
15026 /* Fold a ternary tree expression with code CODE of type TYPE with
15027 operands OP0, OP1, and OP2. Return a folded expression if
15028 successful. Otherwise, return a tree expression with code CODE of
15029 type TYPE with operands OP0, OP1, and OP2. */
15031 tree
15032 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15033 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15035 tree tem;
15036 #ifdef ENABLE_FOLD_CHECKING
15037 unsigned char checksum_before_op0[16],
15038 checksum_before_op1[16],
15039 checksum_before_op2[16],
15040 checksum_after_op0[16],
15041 checksum_after_op1[16],
15042 checksum_after_op2[16];
15043 struct md5_ctx ctx;
15044 hash_table <pointer_hash <tree_node> > ht;
15046 ht.create (32);
15047 md5_init_ctx (&ctx);
15048 fold_checksum_tree (op0, &ctx, ht);
15049 md5_finish_ctx (&ctx, checksum_before_op0);
15050 ht.empty ();
15052 md5_init_ctx (&ctx);
15053 fold_checksum_tree (op1, &ctx, ht);
15054 md5_finish_ctx (&ctx, checksum_before_op1);
15055 ht.empty ();
15057 md5_init_ctx (&ctx);
15058 fold_checksum_tree (op2, &ctx, ht);
15059 md5_finish_ctx (&ctx, checksum_before_op2);
15060 ht.empty ();
15061 #endif
15063 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15064 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15065 if (!tem)
15066 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15068 #ifdef ENABLE_FOLD_CHECKING
15069 md5_init_ctx (&ctx);
15070 fold_checksum_tree (op0, &ctx, ht);
15071 md5_finish_ctx (&ctx, checksum_after_op0);
15072 ht.empty ();
15074 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15075 fold_check_failed (op0, tem);
15077 md5_init_ctx (&ctx);
15078 fold_checksum_tree (op1, &ctx, ht);
15079 md5_finish_ctx (&ctx, checksum_after_op1);
15080 ht.empty ();
15082 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15083 fold_check_failed (op1, tem);
15085 md5_init_ctx (&ctx);
15086 fold_checksum_tree (op2, &ctx, ht);
15087 md5_finish_ctx (&ctx, checksum_after_op2);
15088 ht.dispose ();
15090 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15091 fold_check_failed (op2, tem);
15092 #endif
15093 return tem;
15096 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15097 arguments in ARGARRAY, and a null static chain.
15098 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15099 of type TYPE from the given operands as constructed by build_call_array. */
15101 tree
15102 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15103 int nargs, tree *argarray)
15105 tree tem;
15106 #ifdef ENABLE_FOLD_CHECKING
15107 unsigned char checksum_before_fn[16],
15108 checksum_before_arglist[16],
15109 checksum_after_fn[16],
15110 checksum_after_arglist[16];
15111 struct md5_ctx ctx;
15112 hash_table <pointer_hash <tree_node> > ht;
15113 int i;
15115 ht.create (32);
15116 md5_init_ctx (&ctx);
15117 fold_checksum_tree (fn, &ctx, ht);
15118 md5_finish_ctx (&ctx, checksum_before_fn);
15119 ht.empty ();
15121 md5_init_ctx (&ctx);
15122 for (i = 0; i < nargs; i++)
15123 fold_checksum_tree (argarray[i], &ctx, ht);
15124 md5_finish_ctx (&ctx, checksum_before_arglist);
15125 ht.empty ();
15126 #endif
15128 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15130 #ifdef ENABLE_FOLD_CHECKING
15131 md5_init_ctx (&ctx);
15132 fold_checksum_tree (fn, &ctx, ht);
15133 md5_finish_ctx (&ctx, checksum_after_fn);
15134 ht.empty ();
15136 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15137 fold_check_failed (fn, tem);
15139 md5_init_ctx (&ctx);
15140 for (i = 0; i < nargs; i++)
15141 fold_checksum_tree (argarray[i], &ctx, ht);
15142 md5_finish_ctx (&ctx, checksum_after_arglist);
15143 ht.dispose ();
15145 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15146 fold_check_failed (NULL_TREE, tem);
15147 #endif
15148 return tem;
15151 /* Perform constant folding and related simplification of initializer
15152 expression EXPR. These behave identically to "fold_buildN" but ignore
15153 potential run-time traps and exceptions that fold must preserve. */
15155 #define START_FOLD_INIT \
15156 int saved_signaling_nans = flag_signaling_nans;\
15157 int saved_trapping_math = flag_trapping_math;\
15158 int saved_rounding_math = flag_rounding_math;\
15159 int saved_trapv = flag_trapv;\
15160 int saved_folding_initializer = folding_initializer;\
15161 flag_signaling_nans = 0;\
15162 flag_trapping_math = 0;\
15163 flag_rounding_math = 0;\
15164 flag_trapv = 0;\
15165 folding_initializer = 1;
15167 #define END_FOLD_INIT \
15168 flag_signaling_nans = saved_signaling_nans;\
15169 flag_trapping_math = saved_trapping_math;\
15170 flag_rounding_math = saved_rounding_math;\
15171 flag_trapv = saved_trapv;\
15172 folding_initializer = saved_folding_initializer;
15174 tree
15175 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15176 tree type, tree op)
15178 tree result;
15179 START_FOLD_INIT;
15181 result = fold_build1_loc (loc, code, type, op);
15183 END_FOLD_INIT;
15184 return result;
15187 tree
15188 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15189 tree type, tree op0, tree op1)
15191 tree result;
15192 START_FOLD_INIT;
15194 result = fold_build2_loc (loc, code, type, op0, op1);
15196 END_FOLD_INIT;
15197 return result;
15200 tree
15201 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15202 tree type, tree op0, tree op1, tree op2)
15204 tree result;
15205 START_FOLD_INIT;
15207 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15209 END_FOLD_INIT;
15210 return result;
15213 tree
15214 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15215 int nargs, tree *argarray)
15217 tree result;
15218 START_FOLD_INIT;
15220 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15222 END_FOLD_INIT;
15223 return result;
15226 #undef START_FOLD_INIT
15227 #undef END_FOLD_INIT
15229 /* Determine if first argument is a multiple of second argument. Return 0 if
15230 it is not, or we cannot easily determined it to be.
15232 An example of the sort of thing we care about (at this point; this routine
15233 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15234 fold cases do now) is discovering that
15236 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15238 is a multiple of
15240 SAVE_EXPR (J * 8)
15242 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15244 This code also handles discovering that
15246 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15248 is a multiple of 8 so we don't have to worry about dealing with a
15249 possible remainder.
15251 Note that we *look* inside a SAVE_EXPR only to determine how it was
15252 calculated; it is not safe for fold to do much of anything else with the
15253 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15254 at run time. For example, the latter example above *cannot* be implemented
15255 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15256 evaluation time of the original SAVE_EXPR is not necessarily the same at
15257 the time the new expression is evaluated. The only optimization of this
15258 sort that would be valid is changing
15260 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15262 divided by 8 to
15264 SAVE_EXPR (I) * SAVE_EXPR (J)
15266 (where the same SAVE_EXPR (J) is used in the original and the
15267 transformed version). */
15270 multiple_of_p (tree type, const_tree top, const_tree bottom)
15272 if (operand_equal_p (top, bottom, 0))
15273 return 1;
15275 if (TREE_CODE (type) != INTEGER_TYPE)
15276 return 0;
15278 switch (TREE_CODE (top))
15280 case BIT_AND_EXPR:
15281 /* Bitwise and provides a power of two multiple. If the mask is
15282 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15283 if (!integer_pow2p (bottom))
15284 return 0;
15285 /* FALLTHRU */
15287 case MULT_EXPR:
15288 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15289 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15291 case PLUS_EXPR:
15292 case MINUS_EXPR:
15293 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15294 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15296 case LSHIFT_EXPR:
15297 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15299 tree op1, t1;
15301 op1 = TREE_OPERAND (top, 1);
15302 /* const_binop may not detect overflow correctly,
15303 so check for it explicitly here. */
15304 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15305 > TREE_INT_CST_LOW (op1)
15306 && TREE_INT_CST_HIGH (op1) == 0
15307 && 0 != (t1 = fold_convert (type,
15308 const_binop (LSHIFT_EXPR,
15309 size_one_node,
15310 op1)))
15311 && !TREE_OVERFLOW (t1))
15312 return multiple_of_p (type, t1, bottom);
15314 return 0;
15316 case NOP_EXPR:
15317 /* Can't handle conversions from non-integral or wider integral type. */
15318 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15319 || (TYPE_PRECISION (type)
15320 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15321 return 0;
15323 /* .. fall through ... */
15325 case SAVE_EXPR:
15326 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15328 case COND_EXPR:
15329 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15330 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15332 case INTEGER_CST:
15333 if (TREE_CODE (bottom) != INTEGER_CST
15334 || integer_zerop (bottom)
15335 || (TYPE_UNSIGNED (type)
15336 && (tree_int_cst_sgn (top) < 0
15337 || tree_int_cst_sgn (bottom) < 0)))
15338 return 0;
15339 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15340 top, bottom));
15342 default:
15343 return 0;
15347 /* Return true if CODE or TYPE is known to be non-negative. */
15349 static bool
15350 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15352 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15353 && truth_value_p (code))
15354 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15355 have a signed:1 type (where the value is -1 and 0). */
15356 return true;
15357 return false;
15360 /* Return true if (CODE OP0) is known to be non-negative. If the return
15361 value is based on the assumption that signed overflow is undefined,
15362 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15363 *STRICT_OVERFLOW_P. */
15365 bool
15366 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15367 bool *strict_overflow_p)
15369 if (TYPE_UNSIGNED (type))
15370 return true;
15372 switch (code)
15374 case ABS_EXPR:
15375 /* We can't return 1 if flag_wrapv is set because
15376 ABS_EXPR<INT_MIN> = INT_MIN. */
15377 if (!INTEGRAL_TYPE_P (type))
15378 return true;
15379 if (TYPE_OVERFLOW_UNDEFINED (type))
15381 *strict_overflow_p = true;
15382 return true;
15384 break;
15386 case NON_LVALUE_EXPR:
15387 case FLOAT_EXPR:
15388 case FIX_TRUNC_EXPR:
15389 return tree_expr_nonnegative_warnv_p (op0,
15390 strict_overflow_p);
15392 case NOP_EXPR:
15394 tree inner_type = TREE_TYPE (op0);
15395 tree outer_type = type;
15397 if (TREE_CODE (outer_type) == REAL_TYPE)
15399 if (TREE_CODE (inner_type) == REAL_TYPE)
15400 return tree_expr_nonnegative_warnv_p (op0,
15401 strict_overflow_p);
15402 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15404 if (TYPE_UNSIGNED (inner_type))
15405 return true;
15406 return tree_expr_nonnegative_warnv_p (op0,
15407 strict_overflow_p);
15410 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15412 if (TREE_CODE (inner_type) == REAL_TYPE)
15413 return tree_expr_nonnegative_warnv_p (op0,
15414 strict_overflow_p);
15415 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15416 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15417 && TYPE_UNSIGNED (inner_type);
15420 break;
15422 default:
15423 return tree_simple_nonnegative_warnv_p (code, type);
15426 /* We don't know sign of `t', so be conservative and return false. */
15427 return false;
15430 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15431 value is based on the assumption that signed overflow is undefined,
15432 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15433 *STRICT_OVERFLOW_P. */
15435 bool
15436 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15437 tree op1, bool *strict_overflow_p)
15439 if (TYPE_UNSIGNED (type))
15440 return true;
15442 switch (code)
15444 case POINTER_PLUS_EXPR:
15445 case PLUS_EXPR:
15446 if (FLOAT_TYPE_P (type))
15447 return (tree_expr_nonnegative_warnv_p (op0,
15448 strict_overflow_p)
15449 && tree_expr_nonnegative_warnv_p (op1,
15450 strict_overflow_p));
15452 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15453 both unsigned and at least 2 bits shorter than the result. */
15454 if (TREE_CODE (type) == INTEGER_TYPE
15455 && TREE_CODE (op0) == NOP_EXPR
15456 && TREE_CODE (op1) == NOP_EXPR)
15458 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15459 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15460 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15461 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15463 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15464 TYPE_PRECISION (inner2)) + 1;
15465 return prec < TYPE_PRECISION (type);
15468 break;
15470 case MULT_EXPR:
15471 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15473 /* x * x is always non-negative for floating point x
15474 or without overflow. */
15475 if (operand_equal_p (op0, op1, 0)
15476 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15477 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15479 if (TYPE_OVERFLOW_UNDEFINED (type))
15480 *strict_overflow_p = true;
15481 return true;
15485 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15486 both unsigned and their total bits is shorter than the result. */
15487 if (TREE_CODE (type) == INTEGER_TYPE
15488 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15489 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15491 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15492 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15493 : TREE_TYPE (op0);
15494 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15495 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15496 : TREE_TYPE (op1);
15498 bool unsigned0 = TYPE_UNSIGNED (inner0);
15499 bool unsigned1 = TYPE_UNSIGNED (inner1);
15501 if (TREE_CODE (op0) == INTEGER_CST)
15502 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15504 if (TREE_CODE (op1) == INTEGER_CST)
15505 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15507 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15508 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15510 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15511 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15512 : TYPE_PRECISION (inner0);
15514 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15515 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15516 : TYPE_PRECISION (inner1);
15518 return precision0 + precision1 < TYPE_PRECISION (type);
15521 return false;
15523 case BIT_AND_EXPR:
15524 case MAX_EXPR:
15525 return (tree_expr_nonnegative_warnv_p (op0,
15526 strict_overflow_p)
15527 || tree_expr_nonnegative_warnv_p (op1,
15528 strict_overflow_p));
15530 case BIT_IOR_EXPR:
15531 case BIT_XOR_EXPR:
15532 case MIN_EXPR:
15533 case RDIV_EXPR:
15534 case TRUNC_DIV_EXPR:
15535 case CEIL_DIV_EXPR:
15536 case FLOOR_DIV_EXPR:
15537 case ROUND_DIV_EXPR:
15538 return (tree_expr_nonnegative_warnv_p (op0,
15539 strict_overflow_p)
15540 && tree_expr_nonnegative_warnv_p (op1,
15541 strict_overflow_p));
15543 case TRUNC_MOD_EXPR:
15544 case CEIL_MOD_EXPR:
15545 case FLOOR_MOD_EXPR:
15546 case ROUND_MOD_EXPR:
15547 return tree_expr_nonnegative_warnv_p (op0,
15548 strict_overflow_p);
15549 default:
15550 return tree_simple_nonnegative_warnv_p (code, type);
15553 /* We don't know sign of `t', so be conservative and return false. */
15554 return false;
15557 /* Return true if T is known to be non-negative. If the return
15558 value is based on the assumption that signed overflow is undefined,
15559 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15560 *STRICT_OVERFLOW_P. */
15562 bool
15563 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15565 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15566 return true;
15568 switch (TREE_CODE (t))
15570 case INTEGER_CST:
15571 return tree_int_cst_sgn (t) >= 0;
15573 case REAL_CST:
15574 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15576 case FIXED_CST:
15577 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15579 case COND_EXPR:
15580 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15581 strict_overflow_p)
15582 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15583 strict_overflow_p));
15584 default:
15585 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15586 TREE_TYPE (t));
15588 /* We don't know sign of `t', so be conservative and return false. */
15589 return false;
15592 /* Return true if T is known to be non-negative. If the return
15593 value is based on the assumption that signed overflow is undefined,
15594 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15595 *STRICT_OVERFLOW_P. */
15597 bool
15598 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15599 tree arg0, tree arg1, bool *strict_overflow_p)
15601 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15602 switch (DECL_FUNCTION_CODE (fndecl))
15604 CASE_FLT_FN (BUILT_IN_ACOS):
15605 CASE_FLT_FN (BUILT_IN_ACOSH):
15606 CASE_FLT_FN (BUILT_IN_CABS):
15607 CASE_FLT_FN (BUILT_IN_COSH):
15608 CASE_FLT_FN (BUILT_IN_ERFC):
15609 CASE_FLT_FN (BUILT_IN_EXP):
15610 CASE_FLT_FN (BUILT_IN_EXP10):
15611 CASE_FLT_FN (BUILT_IN_EXP2):
15612 CASE_FLT_FN (BUILT_IN_FABS):
15613 CASE_FLT_FN (BUILT_IN_FDIM):
15614 CASE_FLT_FN (BUILT_IN_HYPOT):
15615 CASE_FLT_FN (BUILT_IN_POW10):
15616 CASE_INT_FN (BUILT_IN_FFS):
15617 CASE_INT_FN (BUILT_IN_PARITY):
15618 CASE_INT_FN (BUILT_IN_POPCOUNT):
15619 case BUILT_IN_BSWAP32:
15620 case BUILT_IN_BSWAP64:
15621 /* Always true. */
15622 return true;
15624 CASE_FLT_FN (BUILT_IN_SQRT):
15625 /* sqrt(-0.0) is -0.0. */
15626 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15627 return true;
15628 return tree_expr_nonnegative_warnv_p (arg0,
15629 strict_overflow_p);
15631 CASE_FLT_FN (BUILT_IN_ASINH):
15632 CASE_FLT_FN (BUILT_IN_ATAN):
15633 CASE_FLT_FN (BUILT_IN_ATANH):
15634 CASE_FLT_FN (BUILT_IN_CBRT):
15635 CASE_FLT_FN (BUILT_IN_CEIL):
15636 CASE_FLT_FN (BUILT_IN_ERF):
15637 CASE_FLT_FN (BUILT_IN_EXPM1):
15638 CASE_FLT_FN (BUILT_IN_FLOOR):
15639 CASE_FLT_FN (BUILT_IN_FMOD):
15640 CASE_FLT_FN (BUILT_IN_FREXP):
15641 CASE_FLT_FN (BUILT_IN_ICEIL):
15642 CASE_FLT_FN (BUILT_IN_IFLOOR):
15643 CASE_FLT_FN (BUILT_IN_IRINT):
15644 CASE_FLT_FN (BUILT_IN_IROUND):
15645 CASE_FLT_FN (BUILT_IN_LCEIL):
15646 CASE_FLT_FN (BUILT_IN_LDEXP):
15647 CASE_FLT_FN (BUILT_IN_LFLOOR):
15648 CASE_FLT_FN (BUILT_IN_LLCEIL):
15649 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15650 CASE_FLT_FN (BUILT_IN_LLRINT):
15651 CASE_FLT_FN (BUILT_IN_LLROUND):
15652 CASE_FLT_FN (BUILT_IN_LRINT):
15653 CASE_FLT_FN (BUILT_IN_LROUND):
15654 CASE_FLT_FN (BUILT_IN_MODF):
15655 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15656 CASE_FLT_FN (BUILT_IN_RINT):
15657 CASE_FLT_FN (BUILT_IN_ROUND):
15658 CASE_FLT_FN (BUILT_IN_SCALB):
15659 CASE_FLT_FN (BUILT_IN_SCALBLN):
15660 CASE_FLT_FN (BUILT_IN_SCALBN):
15661 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15662 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15663 CASE_FLT_FN (BUILT_IN_SINH):
15664 CASE_FLT_FN (BUILT_IN_TANH):
15665 CASE_FLT_FN (BUILT_IN_TRUNC):
15666 /* True if the 1st argument is nonnegative. */
15667 return tree_expr_nonnegative_warnv_p (arg0,
15668 strict_overflow_p);
15670 CASE_FLT_FN (BUILT_IN_FMAX):
15671 /* True if the 1st OR 2nd arguments are nonnegative. */
15672 return (tree_expr_nonnegative_warnv_p (arg0,
15673 strict_overflow_p)
15674 || (tree_expr_nonnegative_warnv_p (arg1,
15675 strict_overflow_p)));
15677 CASE_FLT_FN (BUILT_IN_FMIN):
15678 /* True if the 1st AND 2nd arguments are nonnegative. */
15679 return (tree_expr_nonnegative_warnv_p (arg0,
15680 strict_overflow_p)
15681 && (tree_expr_nonnegative_warnv_p (arg1,
15682 strict_overflow_p)));
15684 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15685 /* True if the 2nd argument is nonnegative. */
15686 return tree_expr_nonnegative_warnv_p (arg1,
15687 strict_overflow_p);
15689 CASE_FLT_FN (BUILT_IN_POWI):
15690 /* True if the 1st argument is nonnegative or the second
15691 argument is an even integer. */
15692 if (TREE_CODE (arg1) == INTEGER_CST
15693 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15694 return true;
15695 return tree_expr_nonnegative_warnv_p (arg0,
15696 strict_overflow_p);
15698 CASE_FLT_FN (BUILT_IN_POW):
15699 /* True if the 1st argument is nonnegative or the second
15700 argument is an even integer valued real. */
15701 if (TREE_CODE (arg1) == REAL_CST)
15703 REAL_VALUE_TYPE c;
15704 HOST_WIDE_INT n;
15706 c = TREE_REAL_CST (arg1);
15707 n = real_to_integer (&c);
15708 if ((n & 1) == 0)
15710 REAL_VALUE_TYPE cint;
15711 real_from_integer (&cint, VOIDmode, n,
15712 n < 0 ? -1 : 0, 0);
15713 if (real_identical (&c, &cint))
15714 return true;
15717 return tree_expr_nonnegative_warnv_p (arg0,
15718 strict_overflow_p);
15720 default:
15721 break;
15723 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15724 type);
15727 /* Return true if T is known to be non-negative. If the return
15728 value is based on the assumption that signed overflow is undefined,
15729 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15730 *STRICT_OVERFLOW_P. */
15732 bool
15733 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15735 enum tree_code code = TREE_CODE (t);
15736 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15737 return true;
15739 switch (code)
15741 case TARGET_EXPR:
15743 tree temp = TARGET_EXPR_SLOT (t);
15744 t = TARGET_EXPR_INITIAL (t);
15746 /* If the initializer is non-void, then it's a normal expression
15747 that will be assigned to the slot. */
15748 if (!VOID_TYPE_P (t))
15749 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15751 /* Otherwise, the initializer sets the slot in some way. One common
15752 way is an assignment statement at the end of the initializer. */
15753 while (1)
15755 if (TREE_CODE (t) == BIND_EXPR)
15756 t = expr_last (BIND_EXPR_BODY (t));
15757 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15758 || TREE_CODE (t) == TRY_CATCH_EXPR)
15759 t = expr_last (TREE_OPERAND (t, 0));
15760 else if (TREE_CODE (t) == STATEMENT_LIST)
15761 t = expr_last (t);
15762 else
15763 break;
15765 if (TREE_CODE (t) == MODIFY_EXPR
15766 && TREE_OPERAND (t, 0) == temp)
15767 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15768 strict_overflow_p);
15770 return false;
15773 case CALL_EXPR:
15775 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15776 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15778 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15779 get_callee_fndecl (t),
15780 arg0,
15781 arg1,
15782 strict_overflow_p);
15784 case COMPOUND_EXPR:
15785 case MODIFY_EXPR:
15786 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15787 strict_overflow_p);
15788 case BIND_EXPR:
15789 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15790 strict_overflow_p);
15791 case SAVE_EXPR:
15792 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15793 strict_overflow_p);
15795 default:
15796 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15797 TREE_TYPE (t));
15800 /* We don't know sign of `t', so be conservative and return false. */
15801 return false;
15804 /* Return true if T is known to be non-negative. If the return
15805 value is based on the assumption that signed overflow is undefined,
15806 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15807 *STRICT_OVERFLOW_P. */
15809 bool
15810 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15812 enum tree_code code;
15813 if (t == error_mark_node)
15814 return false;
15816 code = TREE_CODE (t);
15817 switch (TREE_CODE_CLASS (code))
15819 case tcc_binary:
15820 case tcc_comparison:
15821 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15822 TREE_TYPE (t),
15823 TREE_OPERAND (t, 0),
15824 TREE_OPERAND (t, 1),
15825 strict_overflow_p);
15827 case tcc_unary:
15828 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15829 TREE_TYPE (t),
15830 TREE_OPERAND (t, 0),
15831 strict_overflow_p);
15833 case tcc_constant:
15834 case tcc_declaration:
15835 case tcc_reference:
15836 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15838 default:
15839 break;
15842 switch (code)
15844 case TRUTH_AND_EXPR:
15845 case TRUTH_OR_EXPR:
15846 case TRUTH_XOR_EXPR:
15847 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15848 TREE_TYPE (t),
15849 TREE_OPERAND (t, 0),
15850 TREE_OPERAND (t, 1),
15851 strict_overflow_p);
15852 case TRUTH_NOT_EXPR:
15853 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15854 TREE_TYPE (t),
15855 TREE_OPERAND (t, 0),
15856 strict_overflow_p);
15858 case COND_EXPR:
15859 case CONSTRUCTOR:
15860 case OBJ_TYPE_REF:
15861 case ASSERT_EXPR:
15862 case ADDR_EXPR:
15863 case WITH_SIZE_EXPR:
15864 case SSA_NAME:
15865 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15867 default:
15868 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15872 /* Return true if `t' is known to be non-negative. Handle warnings
15873 about undefined signed overflow. */
15875 bool
15876 tree_expr_nonnegative_p (tree t)
15878 bool ret, strict_overflow_p;
15880 strict_overflow_p = false;
15881 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15882 if (strict_overflow_p)
15883 fold_overflow_warning (("assuming signed overflow does not occur when "
15884 "determining that expression is always "
15885 "non-negative"),
15886 WARN_STRICT_OVERFLOW_MISC);
15887 return ret;
15891 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15892 For floating point we further ensure that T is not denormal.
15893 Similar logic is present in nonzero_address in rtlanal.h.
15895 If the return value is based on the assumption that signed overflow
15896 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15897 change *STRICT_OVERFLOW_P. */
15899 bool
15900 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15901 bool *strict_overflow_p)
15903 switch (code)
15905 case ABS_EXPR:
15906 return tree_expr_nonzero_warnv_p (op0,
15907 strict_overflow_p);
15909 case NOP_EXPR:
15911 tree inner_type = TREE_TYPE (op0);
15912 tree outer_type = type;
15914 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15915 && tree_expr_nonzero_warnv_p (op0,
15916 strict_overflow_p));
15918 break;
15920 case NON_LVALUE_EXPR:
15921 return tree_expr_nonzero_warnv_p (op0,
15922 strict_overflow_p);
15924 default:
15925 break;
15928 return false;
15931 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15932 For floating point we further ensure that T is not denormal.
15933 Similar logic is present in nonzero_address in rtlanal.h.
15935 If the return value is based on the assumption that signed overflow
15936 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15937 change *STRICT_OVERFLOW_P. */
15939 bool
15940 tree_binary_nonzero_warnv_p (enum tree_code code,
15941 tree type,
15942 tree op0,
15943 tree op1, bool *strict_overflow_p)
15945 bool sub_strict_overflow_p;
15946 switch (code)
15948 case POINTER_PLUS_EXPR:
15949 case PLUS_EXPR:
15950 if (TYPE_OVERFLOW_UNDEFINED (type))
15952 /* With the presence of negative values it is hard
15953 to say something. */
15954 sub_strict_overflow_p = false;
15955 if (!tree_expr_nonnegative_warnv_p (op0,
15956 &sub_strict_overflow_p)
15957 || !tree_expr_nonnegative_warnv_p (op1,
15958 &sub_strict_overflow_p))
15959 return false;
15960 /* One of operands must be positive and the other non-negative. */
15961 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15962 overflows, on a twos-complement machine the sum of two
15963 nonnegative numbers can never be zero. */
15964 return (tree_expr_nonzero_warnv_p (op0,
15965 strict_overflow_p)
15966 || tree_expr_nonzero_warnv_p (op1,
15967 strict_overflow_p));
15969 break;
15971 case MULT_EXPR:
15972 if (TYPE_OVERFLOW_UNDEFINED (type))
15974 if (tree_expr_nonzero_warnv_p (op0,
15975 strict_overflow_p)
15976 && tree_expr_nonzero_warnv_p (op1,
15977 strict_overflow_p))
15979 *strict_overflow_p = true;
15980 return true;
15983 break;
15985 case MIN_EXPR:
15986 sub_strict_overflow_p = false;
15987 if (tree_expr_nonzero_warnv_p (op0,
15988 &sub_strict_overflow_p)
15989 && tree_expr_nonzero_warnv_p (op1,
15990 &sub_strict_overflow_p))
15992 if (sub_strict_overflow_p)
15993 *strict_overflow_p = true;
15995 break;
15997 case MAX_EXPR:
15998 sub_strict_overflow_p = false;
15999 if (tree_expr_nonzero_warnv_p (op0,
16000 &sub_strict_overflow_p))
16002 if (sub_strict_overflow_p)
16003 *strict_overflow_p = true;
16005 /* When both operands are nonzero, then MAX must be too. */
16006 if (tree_expr_nonzero_warnv_p (op1,
16007 strict_overflow_p))
16008 return true;
16010 /* MAX where operand 0 is positive is positive. */
16011 return tree_expr_nonnegative_warnv_p (op0,
16012 strict_overflow_p);
16014 /* MAX where operand 1 is positive is positive. */
16015 else if (tree_expr_nonzero_warnv_p (op1,
16016 &sub_strict_overflow_p)
16017 && tree_expr_nonnegative_warnv_p (op1,
16018 &sub_strict_overflow_p))
16020 if (sub_strict_overflow_p)
16021 *strict_overflow_p = true;
16022 return true;
16024 break;
16026 case BIT_IOR_EXPR:
16027 return (tree_expr_nonzero_warnv_p (op1,
16028 strict_overflow_p)
16029 || tree_expr_nonzero_warnv_p (op0,
16030 strict_overflow_p));
16032 default:
16033 break;
16036 return false;
16039 /* Return true when T is an address and is known to be nonzero.
16040 For floating point we further ensure that T is not denormal.
16041 Similar logic is present in nonzero_address in rtlanal.h.
16043 If the return value is based on the assumption that signed overflow
16044 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16045 change *STRICT_OVERFLOW_P. */
16047 bool
16048 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16050 bool sub_strict_overflow_p;
16051 switch (TREE_CODE (t))
16053 case INTEGER_CST:
16054 return !integer_zerop (t);
16056 case ADDR_EXPR:
16058 tree base = TREE_OPERAND (t, 0);
16059 if (!DECL_P (base))
16060 base = get_base_address (base);
16062 if (!base)
16063 return false;
16065 /* Weak declarations may link to NULL. Other things may also be NULL
16066 so protect with -fdelete-null-pointer-checks; but not variables
16067 allocated on the stack. */
16068 if (DECL_P (base)
16069 && (flag_delete_null_pointer_checks
16070 || (DECL_CONTEXT (base)
16071 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16072 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16073 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16075 /* Constants are never weak. */
16076 if (CONSTANT_CLASS_P (base))
16077 return true;
16079 return false;
16082 case COND_EXPR:
16083 sub_strict_overflow_p = false;
16084 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16085 &sub_strict_overflow_p)
16086 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16087 &sub_strict_overflow_p))
16089 if (sub_strict_overflow_p)
16090 *strict_overflow_p = true;
16091 return true;
16093 break;
16095 default:
16096 break;
16098 return false;
16101 /* Return true when T is an address and is known to be nonzero.
16102 For floating point we further ensure that T is not denormal.
16103 Similar logic is present in nonzero_address in rtlanal.h.
16105 If the return value is based on the assumption that signed overflow
16106 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16107 change *STRICT_OVERFLOW_P. */
16109 bool
16110 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16112 tree type = TREE_TYPE (t);
16113 enum tree_code code;
16115 /* Doing something useful for floating point would need more work. */
16116 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16117 return false;
16119 code = TREE_CODE (t);
16120 switch (TREE_CODE_CLASS (code))
16122 case tcc_unary:
16123 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16124 strict_overflow_p);
16125 case tcc_binary:
16126 case tcc_comparison:
16127 return tree_binary_nonzero_warnv_p (code, type,
16128 TREE_OPERAND (t, 0),
16129 TREE_OPERAND (t, 1),
16130 strict_overflow_p);
16131 case tcc_constant:
16132 case tcc_declaration:
16133 case tcc_reference:
16134 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16136 default:
16137 break;
16140 switch (code)
16142 case TRUTH_NOT_EXPR:
16143 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16144 strict_overflow_p);
16146 case TRUTH_AND_EXPR:
16147 case TRUTH_OR_EXPR:
16148 case TRUTH_XOR_EXPR:
16149 return tree_binary_nonzero_warnv_p (code, type,
16150 TREE_OPERAND (t, 0),
16151 TREE_OPERAND (t, 1),
16152 strict_overflow_p);
16154 case COND_EXPR:
16155 case CONSTRUCTOR:
16156 case OBJ_TYPE_REF:
16157 case ASSERT_EXPR:
16158 case ADDR_EXPR:
16159 case WITH_SIZE_EXPR:
16160 case SSA_NAME:
16161 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16163 case COMPOUND_EXPR:
16164 case MODIFY_EXPR:
16165 case BIND_EXPR:
16166 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16167 strict_overflow_p);
16169 case SAVE_EXPR:
16170 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16171 strict_overflow_p);
16173 case CALL_EXPR:
16174 return alloca_call_p (t);
16176 default:
16177 break;
16179 return false;
16182 /* Return true when T is an address and is known to be nonzero.
16183 Handle warnings about undefined signed overflow. */
16185 bool
16186 tree_expr_nonzero_p (tree t)
16188 bool ret, strict_overflow_p;
16190 strict_overflow_p = false;
16191 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16192 if (strict_overflow_p)
16193 fold_overflow_warning (("assuming signed overflow does not occur when "
16194 "determining that expression is always "
16195 "non-zero"),
16196 WARN_STRICT_OVERFLOW_MISC);
16197 return ret;
16200 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16201 attempt to fold the expression to a constant without modifying TYPE,
16202 OP0 or OP1.
16204 If the expression could be simplified to a constant, then return
16205 the constant. If the expression would not be simplified to a
16206 constant, then return NULL_TREE. */
16208 tree
16209 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16211 tree tem = fold_binary (code, type, op0, op1);
16212 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16215 /* Given the components of a unary expression CODE, TYPE and OP0,
16216 attempt to fold the expression to a constant without modifying
16217 TYPE or OP0.
16219 If the expression could be simplified to a constant, then return
16220 the constant. If the expression would not be simplified to a
16221 constant, then return NULL_TREE. */
16223 tree
16224 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16226 tree tem = fold_unary (code, type, op0);
16227 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16230 /* If EXP represents referencing an element in a constant string
16231 (either via pointer arithmetic or array indexing), return the
16232 tree representing the value accessed, otherwise return NULL. */
16234 tree
16235 fold_read_from_constant_string (tree exp)
16237 if ((TREE_CODE (exp) == INDIRECT_REF
16238 || TREE_CODE (exp) == ARRAY_REF)
16239 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16241 tree exp1 = TREE_OPERAND (exp, 0);
16242 tree index;
16243 tree string;
16244 location_t loc = EXPR_LOCATION (exp);
16246 if (TREE_CODE (exp) == INDIRECT_REF)
16247 string = string_constant (exp1, &index);
16248 else
16250 tree low_bound = array_ref_low_bound (exp);
16251 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16253 /* Optimize the special-case of a zero lower bound.
16255 We convert the low_bound to sizetype to avoid some problems
16256 with constant folding. (E.g. suppose the lower bound is 1,
16257 and its mode is QI. Without the conversion,l (ARRAY
16258 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16259 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16260 if (! integer_zerop (low_bound))
16261 index = size_diffop_loc (loc, index,
16262 fold_convert_loc (loc, sizetype, low_bound));
16264 string = exp1;
16267 if (string
16268 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16269 && TREE_CODE (string) == STRING_CST
16270 && TREE_CODE (index) == INTEGER_CST
16271 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16272 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16273 == MODE_INT)
16274 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16275 return build_int_cst_type (TREE_TYPE (exp),
16276 (TREE_STRING_POINTER (string)
16277 [TREE_INT_CST_LOW (index)]));
16279 return NULL;
16282 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16283 an integer constant, real, or fixed-point constant.
16285 TYPE is the type of the result. */
16287 static tree
16288 fold_negate_const (tree arg0, tree type)
16290 tree t = NULL_TREE;
16292 switch (TREE_CODE (arg0))
16294 case INTEGER_CST:
16296 double_int val = tree_to_double_int (arg0);
16297 bool overflow;
16298 val = val.neg_with_overflow (&overflow);
16299 t = force_fit_type_double (type, val, 1,
16300 (overflow | TREE_OVERFLOW (arg0))
16301 && !TYPE_UNSIGNED (type));
16302 break;
16305 case REAL_CST:
16306 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16307 break;
16309 case FIXED_CST:
16311 FIXED_VALUE_TYPE f;
16312 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16313 &(TREE_FIXED_CST (arg0)), NULL,
16314 TYPE_SATURATING (type));
16315 t = build_fixed (type, f);
16316 /* Propagate overflow flags. */
16317 if (overflow_p | TREE_OVERFLOW (arg0))
16318 TREE_OVERFLOW (t) = 1;
16319 break;
16322 default:
16323 gcc_unreachable ();
16326 return t;
16329 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16330 an integer constant or real constant.
16332 TYPE is the type of the result. */
16334 tree
16335 fold_abs_const (tree arg0, tree type)
16337 tree t = NULL_TREE;
16339 switch (TREE_CODE (arg0))
16341 case INTEGER_CST:
16343 double_int val = tree_to_double_int (arg0);
16345 /* If the value is unsigned or non-negative, then the absolute value
16346 is the same as the ordinary value. */
16347 if (TYPE_UNSIGNED (type)
16348 || !val.is_negative ())
16349 t = arg0;
16351 /* If the value is negative, then the absolute value is
16352 its negation. */
16353 else
16355 bool overflow;
16356 val = val.neg_with_overflow (&overflow);
16357 t = force_fit_type_double (type, val, -1,
16358 overflow | TREE_OVERFLOW (arg0));
16361 break;
16363 case REAL_CST:
16364 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16365 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16366 else
16367 t = arg0;
16368 break;
16370 default:
16371 gcc_unreachable ();
16374 return t;
16377 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16378 constant. TYPE is the type of the result. */
16380 static tree
16381 fold_not_const (const_tree arg0, tree type)
16383 double_int val;
16385 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16387 val = ~tree_to_double_int (arg0);
16388 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16391 /* Given CODE, a relational operator, the target type, TYPE and two
16392 constant operands OP0 and OP1, return the result of the
16393 relational operation. If the result is not a compile time
16394 constant, then return NULL_TREE. */
16396 static tree
16397 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16399 int result, invert;
16401 /* From here on, the only cases we handle are when the result is
16402 known to be a constant. */
16404 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16406 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16407 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16409 /* Handle the cases where either operand is a NaN. */
16410 if (real_isnan (c0) || real_isnan (c1))
16412 switch (code)
16414 case EQ_EXPR:
16415 case ORDERED_EXPR:
16416 result = 0;
16417 break;
16419 case NE_EXPR:
16420 case UNORDERED_EXPR:
16421 case UNLT_EXPR:
16422 case UNLE_EXPR:
16423 case UNGT_EXPR:
16424 case UNGE_EXPR:
16425 case UNEQ_EXPR:
16426 result = 1;
16427 break;
16429 case LT_EXPR:
16430 case LE_EXPR:
16431 case GT_EXPR:
16432 case GE_EXPR:
16433 case LTGT_EXPR:
16434 if (flag_trapping_math)
16435 return NULL_TREE;
16436 result = 0;
16437 break;
16439 default:
16440 gcc_unreachable ();
16443 return constant_boolean_node (result, type);
16446 return constant_boolean_node (real_compare (code, c0, c1), type);
16449 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16451 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16452 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16453 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16456 /* Handle equality/inequality of complex constants. */
16457 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16459 tree rcond = fold_relational_const (code, type,
16460 TREE_REALPART (op0),
16461 TREE_REALPART (op1));
16462 tree icond = fold_relational_const (code, type,
16463 TREE_IMAGPART (op0),
16464 TREE_IMAGPART (op1));
16465 if (code == EQ_EXPR)
16466 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16467 else if (code == NE_EXPR)
16468 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16469 else
16470 return NULL_TREE;
16473 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16475 unsigned count = VECTOR_CST_NELTS (op0);
16476 tree *elts = XALLOCAVEC (tree, count);
16477 gcc_assert (VECTOR_CST_NELTS (op1) == count
16478 && TYPE_VECTOR_SUBPARTS (type) == count);
16480 for (unsigned i = 0; i < count; i++)
16482 tree elem_type = TREE_TYPE (type);
16483 tree elem0 = VECTOR_CST_ELT (op0, i);
16484 tree elem1 = VECTOR_CST_ELT (op1, i);
16486 tree tem = fold_relational_const (code, elem_type,
16487 elem0, elem1);
16489 if (tem == NULL_TREE)
16490 return NULL_TREE;
16492 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16495 return build_vector (type, elts);
16498 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16500 To compute GT, swap the arguments and do LT.
16501 To compute GE, do LT and invert the result.
16502 To compute LE, swap the arguments, do LT and invert the result.
16503 To compute NE, do EQ and invert the result.
16505 Therefore, the code below must handle only EQ and LT. */
16507 if (code == LE_EXPR || code == GT_EXPR)
16509 tree tem = op0;
16510 op0 = op1;
16511 op1 = tem;
16512 code = swap_tree_comparison (code);
16515 /* Note that it is safe to invert for real values here because we
16516 have already handled the one case that it matters. */
16518 invert = 0;
16519 if (code == NE_EXPR || code == GE_EXPR)
16521 invert = 1;
16522 code = invert_tree_comparison (code, false);
16525 /* Compute a result for LT or EQ if args permit;
16526 Otherwise return T. */
16527 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16529 if (code == EQ_EXPR)
16530 result = tree_int_cst_equal (op0, op1);
16531 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16532 result = INT_CST_LT_UNSIGNED (op0, op1);
16533 else
16534 result = INT_CST_LT (op0, op1);
16536 else
16537 return NULL_TREE;
16539 if (invert)
16540 result ^= 1;
16541 return constant_boolean_node (result, type);
16544 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16545 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16546 itself. */
16548 tree
16549 fold_build_cleanup_point_expr (tree type, tree expr)
16551 /* If the expression does not have side effects then we don't have to wrap
16552 it with a cleanup point expression. */
16553 if (!TREE_SIDE_EFFECTS (expr))
16554 return expr;
16556 /* If the expression is a return, check to see if the expression inside the
16557 return has no side effects or the right hand side of the modify expression
16558 inside the return. If either don't have side effects set we don't need to
16559 wrap the expression in a cleanup point expression. Note we don't check the
16560 left hand side of the modify because it should always be a return decl. */
16561 if (TREE_CODE (expr) == RETURN_EXPR)
16563 tree op = TREE_OPERAND (expr, 0);
16564 if (!op || !TREE_SIDE_EFFECTS (op))
16565 return expr;
16566 op = TREE_OPERAND (op, 1);
16567 if (!TREE_SIDE_EFFECTS (op))
16568 return expr;
16571 return build1 (CLEANUP_POINT_EXPR, type, expr);
16574 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16575 of an indirection through OP0, or NULL_TREE if no simplification is
16576 possible. */
16578 tree
16579 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16581 tree sub = op0;
16582 tree subtype;
16584 STRIP_NOPS (sub);
16585 subtype = TREE_TYPE (sub);
16586 if (!POINTER_TYPE_P (subtype))
16587 return NULL_TREE;
16589 if (TREE_CODE (sub) == ADDR_EXPR)
16591 tree op = TREE_OPERAND (sub, 0);
16592 tree optype = TREE_TYPE (op);
16593 /* *&CONST_DECL -> to the value of the const decl. */
16594 if (TREE_CODE (op) == CONST_DECL)
16595 return DECL_INITIAL (op);
16596 /* *&p => p; make sure to handle *&"str"[cst] here. */
16597 if (type == optype)
16599 tree fop = fold_read_from_constant_string (op);
16600 if (fop)
16601 return fop;
16602 else
16603 return op;
16605 /* *(foo *)&fooarray => fooarray[0] */
16606 else if (TREE_CODE (optype) == ARRAY_TYPE
16607 && type == TREE_TYPE (optype)
16608 && (!in_gimple_form
16609 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16611 tree type_domain = TYPE_DOMAIN (optype);
16612 tree min_val = size_zero_node;
16613 if (type_domain && TYPE_MIN_VALUE (type_domain))
16614 min_val = TYPE_MIN_VALUE (type_domain);
16615 if (in_gimple_form
16616 && TREE_CODE (min_val) != INTEGER_CST)
16617 return NULL_TREE;
16618 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16619 NULL_TREE, NULL_TREE);
16621 /* *(foo *)&complexfoo => __real__ complexfoo */
16622 else if (TREE_CODE (optype) == COMPLEX_TYPE
16623 && type == TREE_TYPE (optype))
16624 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16625 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16626 else if (TREE_CODE (optype) == VECTOR_TYPE
16627 && type == TREE_TYPE (optype))
16629 tree part_width = TYPE_SIZE (type);
16630 tree index = bitsize_int (0);
16631 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16635 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16636 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16638 tree op00 = TREE_OPERAND (sub, 0);
16639 tree op01 = TREE_OPERAND (sub, 1);
16641 STRIP_NOPS (op00);
16642 if (TREE_CODE (op00) == ADDR_EXPR)
16644 tree op00type;
16645 op00 = TREE_OPERAND (op00, 0);
16646 op00type = TREE_TYPE (op00);
16648 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16649 if (TREE_CODE (op00type) == VECTOR_TYPE
16650 && type == TREE_TYPE (op00type))
16652 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16653 tree part_width = TYPE_SIZE (type);
16654 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16655 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16656 tree index = bitsize_int (indexi);
16658 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16659 return fold_build3_loc (loc,
16660 BIT_FIELD_REF, type, op00,
16661 part_width, index);
16664 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16665 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16666 && type == TREE_TYPE (op00type))
16668 tree size = TYPE_SIZE_UNIT (type);
16669 if (tree_int_cst_equal (size, op01))
16670 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16672 /* ((foo *)&fooarray)[1] => fooarray[1] */
16673 else if (TREE_CODE (op00type) == ARRAY_TYPE
16674 && type == TREE_TYPE (op00type))
16676 tree type_domain = TYPE_DOMAIN (op00type);
16677 tree min_val = size_zero_node;
16678 if (type_domain && TYPE_MIN_VALUE (type_domain))
16679 min_val = TYPE_MIN_VALUE (type_domain);
16680 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16681 TYPE_SIZE_UNIT (type));
16682 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16683 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16684 NULL_TREE, NULL_TREE);
16689 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16690 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16691 && type == TREE_TYPE (TREE_TYPE (subtype))
16692 && (!in_gimple_form
16693 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16695 tree type_domain;
16696 tree min_val = size_zero_node;
16697 sub = build_fold_indirect_ref_loc (loc, sub);
16698 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16699 if (type_domain && TYPE_MIN_VALUE (type_domain))
16700 min_val = TYPE_MIN_VALUE (type_domain);
16701 if (in_gimple_form
16702 && TREE_CODE (min_val) != INTEGER_CST)
16703 return NULL_TREE;
16704 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16705 NULL_TREE);
16708 return NULL_TREE;
16711 /* Builds an expression for an indirection through T, simplifying some
16712 cases. */
16714 tree
16715 build_fold_indirect_ref_loc (location_t loc, tree t)
16717 tree type = TREE_TYPE (TREE_TYPE (t));
16718 tree sub = fold_indirect_ref_1 (loc, type, t);
16720 if (sub)
16721 return sub;
16723 return build1_loc (loc, INDIRECT_REF, type, t);
16726 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16728 tree
16729 fold_indirect_ref_loc (location_t loc, tree t)
16731 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16733 if (sub)
16734 return sub;
16735 else
16736 return t;
16739 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16740 whose result is ignored. The type of the returned tree need not be
16741 the same as the original expression. */
16743 tree
16744 fold_ignored_result (tree t)
16746 if (!TREE_SIDE_EFFECTS (t))
16747 return integer_zero_node;
16749 for (;;)
16750 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16752 case tcc_unary:
16753 t = TREE_OPERAND (t, 0);
16754 break;
16756 case tcc_binary:
16757 case tcc_comparison:
16758 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16759 t = TREE_OPERAND (t, 0);
16760 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16761 t = TREE_OPERAND (t, 1);
16762 else
16763 return t;
16764 break;
16766 case tcc_expression:
16767 switch (TREE_CODE (t))
16769 case COMPOUND_EXPR:
16770 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16771 return t;
16772 t = TREE_OPERAND (t, 0);
16773 break;
16775 case COND_EXPR:
16776 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16777 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16778 return t;
16779 t = TREE_OPERAND (t, 0);
16780 break;
16782 default:
16783 return t;
16785 break;
16787 default:
16788 return t;
16792 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16793 This can only be applied to objects of a sizetype. */
16795 tree
16796 round_up_loc (location_t loc, tree value, int divisor)
16798 tree div = NULL_TREE;
16800 gcc_assert (divisor > 0);
16801 if (divisor == 1)
16802 return value;
16804 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16805 have to do anything. Only do this when we are not given a const,
16806 because in that case, this check is more expensive than just
16807 doing it. */
16808 if (TREE_CODE (value) != INTEGER_CST)
16810 div = build_int_cst (TREE_TYPE (value), divisor);
16812 if (multiple_of_p (TREE_TYPE (value), value, div))
16813 return value;
16816 /* If divisor is a power of two, simplify this to bit manipulation. */
16817 if (divisor == (divisor & -divisor))
16819 if (TREE_CODE (value) == INTEGER_CST)
16821 double_int val = tree_to_double_int (value);
16822 bool overflow_p;
16824 if ((val.low & (divisor - 1)) == 0)
16825 return value;
16827 overflow_p = TREE_OVERFLOW (value);
16828 val.low &= ~(divisor - 1);
16829 val.low += divisor;
16830 if (val.low == 0)
16832 val.high++;
16833 if (val.high == 0)
16834 overflow_p = true;
16837 return force_fit_type_double (TREE_TYPE (value), val,
16838 -1, overflow_p);
16840 else
16842 tree t;
16844 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16845 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16846 t = build_int_cst (TREE_TYPE (value), -divisor);
16847 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16850 else
16852 if (!div)
16853 div = build_int_cst (TREE_TYPE (value), divisor);
16854 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16855 value = size_binop_loc (loc, MULT_EXPR, value, div);
16858 return value;
16861 /* Likewise, but round down. */
16863 tree
16864 round_down_loc (location_t loc, tree value, int divisor)
16866 tree div = NULL_TREE;
16868 gcc_assert (divisor > 0);
16869 if (divisor == 1)
16870 return value;
16872 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16873 have to do anything. Only do this when we are not given a const,
16874 because in that case, this check is more expensive than just
16875 doing it. */
16876 if (TREE_CODE (value) != INTEGER_CST)
16878 div = build_int_cst (TREE_TYPE (value), divisor);
16880 if (multiple_of_p (TREE_TYPE (value), value, div))
16881 return value;
16884 /* If divisor is a power of two, simplify this to bit manipulation. */
16885 if (divisor == (divisor & -divisor))
16887 tree t;
16889 t = build_int_cst (TREE_TYPE (value), -divisor);
16890 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16892 else
16894 if (!div)
16895 div = build_int_cst (TREE_TYPE (value), divisor);
16896 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16897 value = size_binop_loc (loc, MULT_EXPR, value, div);
16900 return value;
16903 /* Returns the pointer to the base of the object addressed by EXP and
16904 extracts the information about the offset of the access, storing it
16905 to PBITPOS and POFFSET. */
16907 static tree
16908 split_address_to_core_and_offset (tree exp,
16909 HOST_WIDE_INT *pbitpos, tree *poffset)
16911 tree core;
16912 enum machine_mode mode;
16913 int unsignedp, volatilep;
16914 HOST_WIDE_INT bitsize;
16915 location_t loc = EXPR_LOCATION (exp);
16917 if (TREE_CODE (exp) == ADDR_EXPR)
16919 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16920 poffset, &mode, &unsignedp, &volatilep,
16921 false);
16922 core = build_fold_addr_expr_loc (loc, core);
16924 else
16926 core = exp;
16927 *pbitpos = 0;
16928 *poffset = NULL_TREE;
16931 return core;
16934 /* Returns true if addresses of E1 and E2 differ by a constant, false
16935 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16937 bool
16938 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16940 tree core1, core2;
16941 HOST_WIDE_INT bitpos1, bitpos2;
16942 tree toffset1, toffset2, tdiff, type;
16944 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16945 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16947 if (bitpos1 % BITS_PER_UNIT != 0
16948 || bitpos2 % BITS_PER_UNIT != 0
16949 || !operand_equal_p (core1, core2, 0))
16950 return false;
16952 if (toffset1 && toffset2)
16954 type = TREE_TYPE (toffset1);
16955 if (type != TREE_TYPE (toffset2))
16956 toffset2 = fold_convert (type, toffset2);
16958 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16959 if (!cst_and_fits_in_hwi (tdiff))
16960 return false;
16962 *diff = int_cst_value (tdiff);
16964 else if (toffset1 || toffset2)
16966 /* If only one of the offsets is non-constant, the difference cannot
16967 be a constant. */
16968 return false;
16970 else
16971 *diff = 0;
16973 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16974 return true;
16977 /* Simplify the floating point expression EXP when the sign of the
16978 result is not significant. Return NULL_TREE if no simplification
16979 is possible. */
16981 tree
16982 fold_strip_sign_ops (tree exp)
16984 tree arg0, arg1;
16985 location_t loc = EXPR_LOCATION (exp);
16987 switch (TREE_CODE (exp))
16989 case ABS_EXPR:
16990 case NEGATE_EXPR:
16991 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16992 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16994 case MULT_EXPR:
16995 case RDIV_EXPR:
16996 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16997 return NULL_TREE;
16998 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16999 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17000 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17001 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17002 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17003 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17004 break;
17006 case COMPOUND_EXPR:
17007 arg0 = TREE_OPERAND (exp, 0);
17008 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17009 if (arg1)
17010 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17011 break;
17013 case COND_EXPR:
17014 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17015 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17016 if (arg0 || arg1)
17017 return fold_build3_loc (loc,
17018 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17019 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17020 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17021 break;
17023 case CALL_EXPR:
17025 const enum built_in_function fcode = builtin_mathfn_code (exp);
17026 switch (fcode)
17028 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17029 /* Strip copysign function call, return the 1st argument. */
17030 arg0 = CALL_EXPR_ARG (exp, 0);
17031 arg1 = CALL_EXPR_ARG (exp, 1);
17032 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17034 default:
17035 /* Strip sign ops from the argument of "odd" math functions. */
17036 if (negate_mathfn_p (fcode))
17038 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17039 if (arg0)
17040 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17042 break;
17045 break;
17047 default:
17048 break;
17050 return NULL_TREE;