Merge trunk version 206243 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blob12423c8965f6583564d2302b6f02f06aae136ca8
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
151 static location_t
152 expr_location_or (tree t, location_t loc)
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
173 return x;
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
180 tree
181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
183 double_int quo, rem;
184 int uns;
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
191 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 uns, code, &rem);
194 if (rem.is_zero ())
195 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
197 return NULL_TREE;
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
209 static int fold_deferring_overflow_warnings;
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
216 static const char* fold_deferred_overflow_warning;
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
226 void
227 fold_defer_overflow_warnings (void)
229 ++fold_deferring_overflow_warnings;
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
244 const char *warnmsg;
245 location_t locus;
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
261 if (!issue || warnmsg == NULL)
262 return;
264 if (gimple_no_warning_p (stmt))
265 return;
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
272 if (!issue_strict_overflow_warning (code))
273 return;
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
288 fold_undefer_overflow_warnings (false, NULL, 0);
291 /* Whether we are deferring overflow warnings. */
293 bool
294 fold_deferring_overflow_warnings_p (void)
296 return fold_deferring_overflow_warnings > 0;
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
305 if (fold_deferring_overflow_warnings > 0)
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
321 static bool
322 negate_mathfn_p (enum built_in_function code)
324 switch (code)
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
357 default:
358 break;
360 return false;
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
366 bool
367 may_negate_without_overflow_p (const_tree t)
369 unsigned HOST_WIDE_INT val;
370 unsigned int prec;
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 prec = TYPE_PRECISION (type);
380 if (prec > HOST_BITS_PER_WIDE_INT)
382 if (TREE_INT_CST_LOW (t) != 0)
383 return true;
384 prec -= HOST_BITS_PER_WIDE_INT;
385 val = TREE_INT_CST_HIGH (t);
387 else
388 val = TREE_INT_CST_LOW (t);
389 if (prec < HOST_BITS_PER_WIDE_INT)
390 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
397 static bool
398 negate_expr_p (tree t)
400 tree type;
402 if (t == 0)
403 return false;
405 type = TREE_TYPE (t);
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
410 case INTEGER_CST:
411 if (TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
420 case FIXED_CST:
421 case NEGATE_EXPR:
422 return true;
424 case REAL_CST:
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
429 case COMPLEX_CST:
430 return negate_expr_p (TREE_REALPART (t))
431 && negate_expr_p (TREE_IMAGPART (t));
433 case VECTOR_CST:
435 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 return true;
438 int count = TYPE_VECTOR_SUBPARTS (type), i;
440 for (i = 0; i < count; i++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 return false;
444 return true;
447 case COMPLEX_EXPR:
448 return negate_expr_p (TREE_OPERAND (t, 0))
449 && negate_expr_p (TREE_OPERAND (t, 1));
451 case CONJ_EXPR:
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case PLUS_EXPR:
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 return false;
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t, 1))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1)))
462 return true;
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t, 0));
466 case MINUS_EXPR:
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1));
473 case MULT_EXPR:
474 if (TYPE_UNSIGNED (TREE_TYPE (t)))
475 break;
477 /* Fall through. */
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case FLOOR_DIV_EXPR:
488 case CEIL_DIV_EXPR:
489 case EXACT_DIV_EXPR:
490 /* In general we can't negate A / B, because if A is INT_MIN and
491 B is 1, we may turn this into INT_MIN / -1 which is undefined
492 and actually traps on some architectures. But if overflow is
493 undefined, we can negate, because - (INT_MIN / 1) is an
494 overflow. */
495 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
497 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
498 break;
499 /* If overflow is undefined then we have to be careful because
500 we ask whether it's ok to associate the negate with the
501 division which is not ok for example for
502 -((a - b) / c) where (-(a - b)) / c may invoke undefined
503 overflow because of negating INT_MIN. So do not use
504 negate_expr_p here but open-code the two important cases. */
505 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
506 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
507 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
508 return true;
510 else if (negate_expr_p (TREE_OPERAND (t, 0)))
511 return true;
512 return negate_expr_p (TREE_OPERAND (t, 1));
514 case NOP_EXPR:
515 /* Negate -((double)float) as (double)(-float). */
516 if (TREE_CODE (type) == REAL_TYPE)
518 tree tem = strip_float_extensions (t);
519 if (tem != t)
520 return negate_expr_p (tem);
522 break;
524 case CALL_EXPR:
525 /* Negate -f(x) as f(-x). */
526 if (negate_mathfn_p (builtin_mathfn_code (t)))
527 return negate_expr_p (CALL_EXPR_ARG (t, 0));
528 break;
530 case RSHIFT_EXPR:
531 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
532 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
534 tree op1 = TREE_OPERAND (t, 1);
535 if (TREE_INT_CST_HIGH (op1) == 0
536 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
537 == TREE_INT_CST_LOW (op1))
538 return true;
540 break;
542 default:
543 break;
545 return false;
548 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
549 simplification is possible.
550 If negate_expr_p would return true for T, NULL_TREE will never be
551 returned. */
553 static tree
554 fold_negate_expr (location_t loc, tree t)
556 tree type = TREE_TYPE (t);
557 tree tem;
559 switch (TREE_CODE (t))
561 /* Convert - (~A) to A + 1. */
562 case BIT_NOT_EXPR:
563 if (INTEGRAL_TYPE_P (type))
564 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
565 build_one_cst (type));
566 break;
568 case INTEGER_CST:
569 tem = fold_negate_const (t, type);
570 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
571 || !TYPE_OVERFLOW_TRAPS (type))
572 return tem;
573 break;
575 case REAL_CST:
576 tem = fold_negate_const (t, type);
577 /* Two's complement FP formats, such as c4x, may overflow. */
578 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
579 return tem;
580 break;
582 case FIXED_CST:
583 tem = fold_negate_const (t, type);
584 return tem;
586 case COMPLEX_CST:
588 tree rpart = negate_expr (TREE_REALPART (t));
589 tree ipart = negate_expr (TREE_IMAGPART (t));
591 if ((TREE_CODE (rpart) == REAL_CST
592 && TREE_CODE (ipart) == REAL_CST)
593 || (TREE_CODE (rpart) == INTEGER_CST
594 && TREE_CODE (ipart) == INTEGER_CST))
595 return build_complex (type, rpart, ipart);
597 break;
599 case VECTOR_CST:
601 int count = TYPE_VECTOR_SUBPARTS (type), i;
602 tree *elts = XALLOCAVEC (tree, count);
604 for (i = 0; i < count; i++)
606 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
607 if (elts[i] == NULL_TREE)
608 return NULL_TREE;
611 return build_vector (type, elts);
614 case COMPLEX_EXPR:
615 if (negate_expr_p (t))
616 return fold_build2_loc (loc, COMPLEX_EXPR, type,
617 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
618 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
619 break;
621 case CONJ_EXPR:
622 if (negate_expr_p (t))
623 return fold_build1_loc (loc, CONJ_EXPR, type,
624 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
625 break;
627 case NEGATE_EXPR:
628 return TREE_OPERAND (t, 0);
630 case PLUS_EXPR:
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
634 /* -(A + B) -> (-B) - A. */
635 if (negate_expr_p (TREE_OPERAND (t, 1))
636 && reorder_operands_p (TREE_OPERAND (t, 0),
637 TREE_OPERAND (t, 1)))
639 tem = negate_expr (TREE_OPERAND (t, 1));
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 tem, TREE_OPERAND (t, 0));
644 /* -(A + B) -> (-A) - B. */
645 if (negate_expr_p (TREE_OPERAND (t, 0)))
647 tem = negate_expr (TREE_OPERAND (t, 0));
648 return fold_build2_loc (loc, MINUS_EXPR, type,
649 tem, TREE_OPERAND (t, 1));
652 break;
654 case MINUS_EXPR:
655 /* - (A - B) -> B - A */
656 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
657 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
658 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
659 return fold_build2_loc (loc, MINUS_EXPR, type,
660 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
661 break;
663 case MULT_EXPR:
664 if (TYPE_UNSIGNED (type))
665 break;
667 /* Fall through. */
669 case RDIV_EXPR:
670 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
672 tem = TREE_OPERAND (t, 1);
673 if (negate_expr_p (tem))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 negate_expr (tem), TREE_OPERAND (t, 1));
681 break;
683 case TRUNC_DIV_EXPR:
684 case ROUND_DIV_EXPR:
685 case FLOOR_DIV_EXPR:
686 case CEIL_DIV_EXPR:
687 case EXACT_DIV_EXPR:
688 /* In general we can't negate A / B, because if A is INT_MIN and
689 B is 1, we may turn this into INT_MIN / -1 which is undefined
690 and actually traps on some architectures. But if overflow is
691 undefined, we can negate, because - (INT_MIN / 1) is an
692 overflow. */
693 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
695 const char * const warnmsg = G_("assuming signed overflow does not "
696 "occur when negating a division");
697 tem = TREE_OPERAND (t, 1);
698 if (negate_expr_p (tem))
700 if (INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) != INTEGER_CST
702 || integer_onep (tem)))
703 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 TREE_OPERAND (t, 0), negate_expr (tem));
707 /* If overflow is undefined then we have to be careful because
708 we ask whether it's ok to associate the negate with the
709 division which is not ok for example for
710 -((a - b) / c) where (-(a - b)) / c may invoke undefined
711 overflow because of negating INT_MIN. So do not use
712 negate_expr_p here but open-code the two important cases. */
713 tem = TREE_OPERAND (t, 0);
714 if ((INTEGRAL_TYPE_P (type)
715 && (TREE_CODE (tem) == NEGATE_EXPR
716 || (TREE_CODE (tem) == INTEGER_CST
717 && may_negate_without_overflow_p (tem))))
718 || !INTEGRAL_TYPE_P (type))
719 return fold_build2_loc (loc, TREE_CODE (t), type,
720 negate_expr (tem), TREE_OPERAND (t, 1));
722 break;
724 case NOP_EXPR:
725 /* Convert -((double)float) into (double)(-float). */
726 if (TREE_CODE (type) == REAL_TYPE)
728 tem = strip_float_extensions (t);
729 if (tem != t && negate_expr_p (tem))
730 return fold_convert_loc (loc, type, negate_expr (tem));
732 break;
734 case CALL_EXPR:
735 /* Negate -f(x) as f(-x). */
736 if (negate_mathfn_p (builtin_mathfn_code (t))
737 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
739 tree fndecl, arg;
741 fndecl = get_callee_fndecl (t);
742 arg = negate_expr (CALL_EXPR_ARG (t, 0));
743 return build_call_expr_loc (loc, fndecl, 1, arg);
745 break;
747 case RSHIFT_EXPR:
748 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
749 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
751 tree op1 = TREE_OPERAND (t, 1);
752 if (TREE_INT_CST_HIGH (op1) == 0
753 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
754 == TREE_INT_CST_LOW (op1))
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
764 break;
766 default:
767 break;
770 return NULL_TREE;
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
777 static tree
778 negate_expr (tree t)
780 tree type, tem;
781 location_t loc;
783 if (t == NULL_TREE)
784 return NULL_TREE;
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
811 If IN is itself a literal or constant, return it as appropriate.
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
820 tree var = 0;
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
889 if (negate_p)
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
899 return var;
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
921 if (code == PLUS_EXPR)
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
936 else if (code == MINUS_EXPR)
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
961 switch (code)
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
969 default:
970 break;
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
985 int overflowable)
987 double_int op1, op2, res, tmp;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 bool uns = TYPE_UNSIGNED (type);
991 bool overflow = false;
993 op1 = tree_to_double_int (arg1);
994 op2 = tree_to_double_int (arg2);
996 switch (code)
998 case BIT_IOR_EXPR:
999 res = op1 | op2;
1000 break;
1002 case BIT_XOR_EXPR:
1003 res = op1 ^ op2;
1004 break;
1006 case BIT_AND_EXPR:
1007 res = op1 & op2;
1008 break;
1010 case RSHIFT_EXPR:
1011 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1012 break;
1014 case LSHIFT_EXPR:
1015 /* It's unclear from the C standard whether shifts can overflow.
1016 The following code ignores overflow; perhaps a C standard
1017 interpretation ruling is needed. */
1018 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1019 break;
1021 case RROTATE_EXPR:
1022 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023 break;
1025 case LROTATE_EXPR:
1026 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1027 break;
1029 case PLUS_EXPR:
1030 res = op1.add_with_sign (op2, false, &overflow);
1031 break;
1033 case MINUS_EXPR:
1034 res = op1.sub_with_overflow (op2, &overflow);
1035 break;
1037 case MULT_EXPR:
1038 res = op1.mul_with_sign (op2, false, &overflow);
1039 break;
1041 case MULT_HIGHPART_EXPR:
1042 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1044 bool dummy_overflow;
1045 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1046 return NULL_TREE;
1047 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1049 else
1051 bool dummy_overflow;
1052 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1053 is performed in twice the precision of arguments. */
1054 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1055 res = tmp.rshift (TYPE_PRECISION (type),
1056 2 * TYPE_PRECISION (type), !uns);
1058 break;
1060 case TRUNC_DIV_EXPR:
1061 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1062 case EXACT_DIV_EXPR:
1063 /* This is a shortcut for a common special case. */
1064 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1065 && !TREE_OVERFLOW (arg1)
1066 && !TREE_OVERFLOW (arg2)
1067 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1069 if (code == CEIL_DIV_EXPR)
1070 op1.low += op2.low - 1;
1072 res.low = op1.low / op2.low, res.high = 0;
1073 break;
1076 /* ... fall through ... */
1078 case ROUND_DIV_EXPR:
1079 if (op2.is_zero ())
1080 return NULL_TREE;
1081 if (op2.is_one ())
1083 res = op1;
1084 break;
1086 if (op1 == op2 && !op1.is_zero ())
1088 res = double_int_one;
1089 break;
1091 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1092 break;
1094 case TRUNC_MOD_EXPR:
1095 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1096 /* This is a shortcut for a common special case. */
1097 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1098 && !TREE_OVERFLOW (arg1)
1099 && !TREE_OVERFLOW (arg2)
1100 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1102 if (code == CEIL_MOD_EXPR)
1103 op1.low += op2.low - 1;
1104 res.low = op1.low % op2.low, res.high = 0;
1105 break;
1108 /* ... fall through ... */
1110 case ROUND_MOD_EXPR:
1111 if (op2.is_zero ())
1112 return NULL_TREE;
1114 /* Check for the case the case of INT_MIN % -1 and return
1115 overflow and result = 0. The TImode case is handled properly
1116 in double-int. */
1117 if (TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
1118 && !uns
1119 && op2.is_minus_one ()
1120 && op1.high == (HOST_WIDE_INT) -1
1121 && (HOST_WIDE_INT) op1.low
1122 == (((HOST_WIDE_INT)-1) << (TYPE_PRECISION (type) - 1)))
1124 overflow = 1;
1125 res = double_int_zero;
1127 else
1128 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1129 break;
1131 case MIN_EXPR:
1132 res = op1.min (op2, uns);
1133 break;
1135 case MAX_EXPR:
1136 res = op1.max (op2, uns);
1137 break;
1139 default:
1140 return NULL_TREE;
1143 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1144 (!uns && overflow)
1145 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1147 return t;
1150 tree
1151 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1153 return int_const_binop_1 (code, arg1, arg2, 1);
1156 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1157 constant. We assume ARG1 and ARG2 have the same data type, or at least
1158 are the same kind of constant and the same machine mode. Return zero if
1159 combining the constants is not allowed in the current operating mode. */
1161 static tree
1162 const_binop (enum tree_code code, tree arg1, tree arg2)
1164 /* Sanity check for the recursive cases. */
1165 if (!arg1 || !arg2)
1166 return NULL_TREE;
1168 STRIP_NOPS (arg1);
1169 STRIP_NOPS (arg2);
1171 if (TREE_CODE (arg1) == INTEGER_CST)
1172 return int_const_binop (code, arg1, arg2);
1174 if (TREE_CODE (arg1) == REAL_CST)
1176 enum machine_mode mode;
1177 REAL_VALUE_TYPE d1;
1178 REAL_VALUE_TYPE d2;
1179 REAL_VALUE_TYPE value;
1180 REAL_VALUE_TYPE result;
1181 bool inexact;
1182 tree t, type;
1184 /* The following codes are handled by real_arithmetic. */
1185 switch (code)
1187 case PLUS_EXPR:
1188 case MINUS_EXPR:
1189 case MULT_EXPR:
1190 case RDIV_EXPR:
1191 case MIN_EXPR:
1192 case MAX_EXPR:
1193 break;
1195 default:
1196 return NULL_TREE;
1199 d1 = TREE_REAL_CST (arg1);
1200 d2 = TREE_REAL_CST (arg2);
1202 type = TREE_TYPE (arg1);
1203 mode = TYPE_MODE (type);
1205 /* Don't perform operation if we honor signaling NaNs and
1206 either operand is a NaN. */
1207 if (HONOR_SNANS (mode)
1208 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1209 return NULL_TREE;
1211 /* Don't perform operation if it would raise a division
1212 by zero exception. */
1213 if (code == RDIV_EXPR
1214 && REAL_VALUES_EQUAL (d2, dconst0)
1215 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1216 return NULL_TREE;
1218 /* If either operand is a NaN, just return it. Otherwise, set up
1219 for floating-point trap; we return an overflow. */
1220 if (REAL_VALUE_ISNAN (d1))
1221 return arg1;
1222 else if (REAL_VALUE_ISNAN (d2))
1223 return arg2;
1225 inexact = real_arithmetic (&value, code, &d1, &d2);
1226 real_convert (&result, mode, &value);
1228 /* Don't constant fold this floating point operation if
1229 the result has overflowed and flag_trapping_math. */
1230 if (flag_trapping_math
1231 && MODE_HAS_INFINITIES (mode)
1232 && REAL_VALUE_ISINF (result)
1233 && !REAL_VALUE_ISINF (d1)
1234 && !REAL_VALUE_ISINF (d2))
1235 return NULL_TREE;
1237 /* Don't constant fold this floating point operation if the
1238 result may dependent upon the run-time rounding mode and
1239 flag_rounding_math is set, or if GCC's software emulation
1240 is unable to accurately represent the result. */
1241 if ((flag_rounding_math
1242 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1243 && (inexact || !real_identical (&result, &value)))
1244 return NULL_TREE;
1246 t = build_real (type, result);
1248 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1249 return t;
1252 if (TREE_CODE (arg1) == FIXED_CST)
1254 FIXED_VALUE_TYPE f1;
1255 FIXED_VALUE_TYPE f2;
1256 FIXED_VALUE_TYPE result;
1257 tree t, type;
1258 int sat_p;
1259 bool overflow_p;
1261 /* The following codes are handled by fixed_arithmetic. */
1262 switch (code)
1264 case PLUS_EXPR:
1265 case MINUS_EXPR:
1266 case MULT_EXPR:
1267 case TRUNC_DIV_EXPR:
1268 f2 = TREE_FIXED_CST (arg2);
1269 break;
1271 case LSHIFT_EXPR:
1272 case RSHIFT_EXPR:
1273 f2.data.high = TREE_INT_CST_HIGH (arg2);
1274 f2.data.low = TREE_INT_CST_LOW (arg2);
1275 f2.mode = SImode;
1276 break;
1278 default:
1279 return NULL_TREE;
1282 f1 = TREE_FIXED_CST (arg1);
1283 type = TREE_TYPE (arg1);
1284 sat_p = TYPE_SATURATING (type);
1285 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1286 t = build_fixed (type, result);
1287 /* Propagate overflow flags. */
1288 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1289 TREE_OVERFLOW (t) = 1;
1290 return t;
1293 if (TREE_CODE (arg1) == COMPLEX_CST)
1295 tree type = TREE_TYPE (arg1);
1296 tree r1 = TREE_REALPART (arg1);
1297 tree i1 = TREE_IMAGPART (arg1);
1298 tree r2 = TREE_REALPART (arg2);
1299 tree i2 = TREE_IMAGPART (arg2);
1300 tree real, imag;
1302 switch (code)
1304 case PLUS_EXPR:
1305 case MINUS_EXPR:
1306 real = const_binop (code, r1, r2);
1307 imag = const_binop (code, i1, i2);
1308 break;
1310 case MULT_EXPR:
1311 if (COMPLEX_FLOAT_TYPE_P (type))
1312 return do_mpc_arg2 (arg1, arg2, type,
1313 /* do_nonfinite= */ folding_initializer,
1314 mpc_mul);
1316 real = const_binop (MINUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 imag = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r1, i2),
1321 const_binop (MULT_EXPR, i1, r2));
1322 break;
1324 case RDIV_EXPR:
1325 if (COMPLEX_FLOAT_TYPE_P (type))
1326 return do_mpc_arg2 (arg1, arg2, type,
1327 /* do_nonfinite= */ folding_initializer,
1328 mpc_div);
1329 /* Fallthru ... */
1330 case TRUNC_DIV_EXPR:
1331 case CEIL_DIV_EXPR:
1332 case FLOOR_DIV_EXPR:
1333 case ROUND_DIV_EXPR:
1334 if (flag_complex_method == 0)
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_straight().
1339 Expand complex division to scalars, straightforward algorithm.
1340 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1341 t = br*br + bi*bi
1343 tree magsquared
1344 = const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR, r2, r2),
1346 const_binop (MULT_EXPR, i2, i2));
1347 tree t1
1348 = const_binop (PLUS_EXPR,
1349 const_binop (MULT_EXPR, r1, r2),
1350 const_binop (MULT_EXPR, i1, i2));
1351 tree t2
1352 = const_binop (MINUS_EXPR,
1353 const_binop (MULT_EXPR, i1, r2),
1354 const_binop (MULT_EXPR, r1, i2));
1356 real = const_binop (code, t1, magsquared);
1357 imag = const_binop (code, t2, magsquared);
1359 else
1361 /* Keep this algorithm in sync with
1362 tree-complex.c:expand_complex_div_wide().
1364 Expand complex division to scalars, modified algorithm to minimize
1365 overflow with wide input ranges. */
1366 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1367 fold_abs_const (r2, TREE_TYPE (type)),
1368 fold_abs_const (i2, TREE_TYPE (type)));
1370 if (integer_nonzerop (compare))
1372 /* In the TRUE branch, we compute
1373 ratio = br/bi;
1374 div = (br * ratio) + bi;
1375 tr = (ar * ratio) + ai;
1376 ti = (ai * ratio) - ar;
1377 tr = tr / div;
1378 ti = ti / div; */
1379 tree ratio = const_binop (code, r2, i2);
1380 tree div = const_binop (PLUS_EXPR, i2,
1381 const_binop (MULT_EXPR, r2, ratio));
1382 real = const_binop (MULT_EXPR, r1, ratio);
1383 real = const_binop (PLUS_EXPR, real, i1);
1384 real = const_binop (code, real, div);
1386 imag = const_binop (MULT_EXPR, i1, ratio);
1387 imag = const_binop (MINUS_EXPR, imag, r1);
1388 imag = const_binop (code, imag, div);
1390 else
1392 /* In the FALSE branch, we compute
1393 ratio = d/c;
1394 divisor = (d * ratio) + c;
1395 tr = (b * ratio) + a;
1396 ti = b - (a * ratio);
1397 tr = tr / div;
1398 ti = ti / div; */
1399 tree ratio = const_binop (code, i2, r2);
1400 tree div = const_binop (PLUS_EXPR, r2,
1401 const_binop (MULT_EXPR, i2, ratio));
1403 real = const_binop (MULT_EXPR, i1, ratio);
1404 real = const_binop (PLUS_EXPR, real, r1);
1405 real = const_binop (code, real, div);
1407 imag = const_binop (MULT_EXPR, r1, ratio);
1408 imag = const_binop (MINUS_EXPR, i1, imag);
1409 imag = const_binop (code, imag, div);
1412 break;
1414 default:
1415 return NULL_TREE;
1418 if (real && imag)
1419 return build_complex (type, real, imag);
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == VECTOR_CST)
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1429 for (i = 0; i < count; i++)
1431 tree elem1 = VECTOR_CST_ELT (arg1, i);
1432 tree elem2 = VECTOR_CST_ELT (arg2, i);
1434 elts[i] = const_binop (code, elem1, elem2);
1436 /* It is possible that const_binop cannot handle the given
1437 code and return NULL_TREE */
1438 if (elts[i] == NULL_TREE)
1439 return NULL_TREE;
1442 return build_vector (type, elts);
1445 /* Shifts allow a scalar offset for a vector. */
1446 if (TREE_CODE (arg1) == VECTOR_CST
1447 && TREE_CODE (arg2) == INTEGER_CST)
1449 tree type = TREE_TYPE (arg1);
1450 int count = TYPE_VECTOR_SUBPARTS (type), i;
1451 tree *elts = XALLOCAVEC (tree, count);
1453 if (code == VEC_LSHIFT_EXPR
1454 || code == VEC_RSHIFT_EXPR)
1456 if (!tree_fits_uhwi_p (arg2))
1457 return NULL_TREE;
1459 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1460 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1461 unsigned HOST_WIDE_INT innerc
1462 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1463 if (shiftc >= outerc || (shiftc % innerc) != 0)
1464 return NULL_TREE;
1465 int offset = shiftc / innerc;
1466 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1467 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1468 for !BYTES_BIG_ENDIAN picks first vector element, but
1469 for BYTES_BIG_ENDIAN last element from the vector. */
1470 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1471 offset = -offset;
1472 tree zero = build_zero_cst (TREE_TYPE (type));
1473 for (i = 0; i < count; i++)
1475 if (i + offset < 0 || i + offset >= count)
1476 elts[i] = zero;
1477 else
1478 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1481 else
1482 for (i = 0; i < count; i++)
1484 tree elem1 = VECTOR_CST_ELT (arg1, i);
1486 elts[i] = const_binop (code, elem1, arg2);
1488 /* It is possible that const_binop cannot handle the given
1489 code and return NULL_TREE */
1490 if (elts[i] == NULL_TREE)
1491 return NULL_TREE;
1494 return build_vector (type, elts);
1496 return NULL_TREE;
1499 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1500 indicates which particular sizetype to create. */
1502 tree
1503 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1505 return build_int_cst (sizetype_tab[(int) kind], number);
1508 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1509 is a tree code. The type of the result is taken from the operands.
1510 Both must be equivalent integer types, ala int_binop_types_match_p.
1511 If the operands are constant, so is the result. */
1513 tree
1514 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1516 tree type = TREE_TYPE (arg0);
1518 if (arg0 == error_mark_node || arg1 == error_mark_node)
1519 return error_mark_node;
1521 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1522 TREE_TYPE (arg1)));
1524 /* Handle the special case of two integer constants faster. */
1525 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1527 /* And some specific cases even faster than that. */
1528 if (code == PLUS_EXPR)
1530 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1531 return arg1;
1532 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1533 return arg0;
1535 else if (code == MINUS_EXPR)
1537 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1538 return arg0;
1540 else if (code == MULT_EXPR)
1542 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1543 return arg1;
1546 /* Handle general case of two integer constants. For sizetype
1547 constant calculations we always want to know about overflow,
1548 even in the unsigned case. */
1549 return int_const_binop_1 (code, arg0, arg1, -1);
1552 return fold_build2_loc (loc, code, type, arg0, arg1);
1555 /* Given two values, either both of sizetype or both of bitsizetype,
1556 compute the difference between the two values. Return the value
1557 in signed type corresponding to the type of the operands. */
1559 tree
1560 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1562 tree type = TREE_TYPE (arg0);
1563 tree ctype;
1565 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1566 TREE_TYPE (arg1)));
1568 /* If the type is already signed, just do the simple thing. */
1569 if (!TYPE_UNSIGNED (type))
1570 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1572 if (type == sizetype)
1573 ctype = ssizetype;
1574 else if (type == bitsizetype)
1575 ctype = sbitsizetype;
1576 else
1577 ctype = signed_type_for (type);
1579 /* If either operand is not a constant, do the conversions to the signed
1580 type and subtract. The hardware will do the right thing with any
1581 overflow in the subtraction. */
1582 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1583 return size_binop_loc (loc, MINUS_EXPR,
1584 fold_convert_loc (loc, ctype, arg0),
1585 fold_convert_loc (loc, ctype, arg1));
1587 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1588 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1589 overflow) and negate (which can't either). Special-case a result
1590 of zero while we're here. */
1591 if (tree_int_cst_equal (arg0, arg1))
1592 return build_int_cst (ctype, 0);
1593 else if (tree_int_cst_lt (arg1, arg0))
1594 return fold_convert_loc (loc, ctype,
1595 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1596 else
1597 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1598 fold_convert_loc (loc, ctype,
1599 size_binop_loc (loc,
1600 MINUS_EXPR,
1601 arg1, arg0)));
1604 /* A subroutine of fold_convert_const handling conversions of an
1605 INTEGER_CST to another integer type. */
1607 static tree
1608 fold_convert_const_int_from_int (tree type, const_tree arg1)
1610 tree t;
1612 /* Given an integer constant, make new constant with new type,
1613 appropriately sign-extended or truncated. */
1614 t = force_fit_type_double (type, tree_to_double_int (arg1),
1615 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1616 (TREE_INT_CST_HIGH (arg1) < 0
1617 && (TYPE_UNSIGNED (type)
1618 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1619 | TREE_OVERFLOW (arg1));
1621 return t;
1624 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1625 to an integer type. */
1627 static tree
1628 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1630 int overflow = 0;
1631 tree t;
1633 /* The following code implements the floating point to integer
1634 conversion rules required by the Java Language Specification,
1635 that IEEE NaNs are mapped to zero and values that overflow
1636 the target precision saturate, i.e. values greater than
1637 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1638 are mapped to INT_MIN. These semantics are allowed by the
1639 C and C++ standards that simply state that the behavior of
1640 FP-to-integer conversion is unspecified upon overflow. */
1642 double_int val;
1643 REAL_VALUE_TYPE r;
1644 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1646 switch (code)
1648 case FIX_TRUNC_EXPR:
1649 real_trunc (&r, VOIDmode, &x);
1650 break;
1652 default:
1653 gcc_unreachable ();
1656 /* If R is NaN, return zero and show we have an overflow. */
1657 if (REAL_VALUE_ISNAN (r))
1659 overflow = 1;
1660 val = double_int_zero;
1663 /* See if R is less than the lower bound or greater than the
1664 upper bound. */
1666 if (! overflow)
1668 tree lt = TYPE_MIN_VALUE (type);
1669 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1670 if (REAL_VALUES_LESS (r, l))
1672 overflow = 1;
1673 val = tree_to_double_int (lt);
1677 if (! overflow)
1679 tree ut = TYPE_MAX_VALUE (type);
1680 if (ut)
1682 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1683 if (REAL_VALUES_LESS (u, r))
1685 overflow = 1;
1686 val = tree_to_double_int (ut);
1691 if (! overflow)
1692 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1694 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1695 return t;
1698 /* A subroutine of fold_convert_const handling conversions of a
1699 FIXED_CST to an integer type. */
1701 static tree
1702 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1704 tree t;
1705 double_int temp, temp_trunc;
1706 unsigned int mode;
1708 /* Right shift FIXED_CST to temp by fbit. */
1709 temp = TREE_FIXED_CST (arg1).data;
1710 mode = TREE_FIXED_CST (arg1).mode;
1711 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1713 temp = temp.rshift (GET_MODE_FBIT (mode),
1714 HOST_BITS_PER_DOUBLE_INT,
1715 SIGNED_FIXED_POINT_MODE_P (mode));
1717 /* Left shift temp to temp_trunc by fbit. */
1718 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1719 HOST_BITS_PER_DOUBLE_INT,
1720 SIGNED_FIXED_POINT_MODE_P (mode));
1722 else
1724 temp = double_int_zero;
1725 temp_trunc = double_int_zero;
1728 /* If FIXED_CST is negative, we need to round the value toward 0.
1729 By checking if the fractional bits are not zero to add 1 to temp. */
1730 if (SIGNED_FIXED_POINT_MODE_P (mode)
1731 && temp_trunc.is_negative ()
1732 && TREE_FIXED_CST (arg1).data != temp_trunc)
1733 temp += double_int_one;
1735 /* Given a fixed-point constant, make new constant with new type,
1736 appropriately sign-extended or truncated. */
1737 t = force_fit_type_double (type, temp, -1,
1738 (temp.is_negative ()
1739 && (TYPE_UNSIGNED (type)
1740 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1741 | TREE_OVERFLOW (arg1));
1743 return t;
1746 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to another floating point type. */
1749 static tree
1750 fold_convert_const_real_from_real (tree type, const_tree arg1)
1752 REAL_VALUE_TYPE value;
1753 tree t;
1755 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1756 t = build_real (type, value);
1758 /* If converting an infinity or NAN to a representation that doesn't
1759 have one, set the overflow bit so that we can produce some kind of
1760 error message at the appropriate point if necessary. It's not the
1761 most user-friendly message, but it's better than nothing. */
1762 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1763 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1764 TREE_OVERFLOW (t) = 1;
1765 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1766 && !MODE_HAS_NANS (TYPE_MODE (type)))
1767 TREE_OVERFLOW (t) = 1;
1768 /* Regular overflow, conversion produced an infinity in a mode that
1769 can't represent them. */
1770 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1771 && REAL_VALUE_ISINF (value)
1772 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1773 TREE_OVERFLOW (t) = 1;
1774 else
1775 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1776 return t;
1779 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1780 to a floating point type. */
1782 static tree
1783 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1785 REAL_VALUE_TYPE value;
1786 tree t;
1788 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1789 t = build_real (type, value);
1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1792 return t;
1795 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1796 to another fixed-point type. */
1798 static tree
1799 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1801 FIXED_VALUE_TYPE value;
1802 tree t;
1803 bool overflow_p;
1805 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1806 TYPE_SATURATING (type));
1807 t = build_fixed (type, value);
1809 /* Propagate overflow flags. */
1810 if (overflow_p | TREE_OVERFLOW (arg1))
1811 TREE_OVERFLOW (t) = 1;
1812 return t;
1815 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1816 to a fixed-point type. */
1818 static tree
1819 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1821 FIXED_VALUE_TYPE value;
1822 tree t;
1823 bool overflow_p;
1825 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1826 TREE_INT_CST (arg1),
1827 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1828 TYPE_SATURATING (type));
1829 t = build_fixed (type, value);
1831 /* Propagate overflow flags. */
1832 if (overflow_p | TREE_OVERFLOW (arg1))
1833 TREE_OVERFLOW (t) = 1;
1834 return t;
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to a fixed-point type. */
1840 static tree
1841 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1843 FIXED_VALUE_TYPE value;
1844 tree t;
1845 bool overflow_p;
1847 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1848 &TREE_REAL_CST (arg1),
1849 TYPE_SATURATING (type));
1850 t = build_fixed (type, value);
1852 /* Propagate overflow flags. */
1853 if (overflow_p | TREE_OVERFLOW (arg1))
1854 TREE_OVERFLOW (t) = 1;
1855 return t;
1858 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1859 type TYPE. If no simplification can be done return NULL_TREE. */
1861 static tree
1862 fold_convert_const (enum tree_code code, tree type, tree arg1)
1864 if (TREE_TYPE (arg1) == type)
1865 return arg1;
1867 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1868 || TREE_CODE (type) == OFFSET_TYPE)
1870 if (TREE_CODE (arg1) == INTEGER_CST)
1871 return fold_convert_const_int_from_int (type, arg1);
1872 else if (TREE_CODE (arg1) == REAL_CST)
1873 return fold_convert_const_int_from_real (code, type, arg1);
1874 else if (TREE_CODE (arg1) == FIXED_CST)
1875 return fold_convert_const_int_from_fixed (type, arg1);
1877 else if (TREE_CODE (type) == REAL_TYPE)
1879 if (TREE_CODE (arg1) == INTEGER_CST)
1880 return build_real_from_int_cst (type, arg1);
1881 else if (TREE_CODE (arg1) == REAL_CST)
1882 return fold_convert_const_real_from_real (type, arg1);
1883 else if (TREE_CODE (arg1) == FIXED_CST)
1884 return fold_convert_const_real_from_fixed (type, arg1);
1886 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1888 if (TREE_CODE (arg1) == FIXED_CST)
1889 return fold_convert_const_fixed_from_fixed (type, arg1);
1890 else if (TREE_CODE (arg1) == INTEGER_CST)
1891 return fold_convert_const_fixed_from_int (type, arg1);
1892 else if (TREE_CODE (arg1) == REAL_CST)
1893 return fold_convert_const_fixed_from_real (type, arg1);
1895 return NULL_TREE;
1898 /* Construct a vector of zero elements of vector type TYPE. */
1900 static tree
1901 build_zero_vector (tree type)
1903 tree t;
1905 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1906 return build_vector_from_val (type, t);
1909 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1911 bool
1912 fold_convertible_p (const_tree type, const_tree arg)
1914 tree orig = TREE_TYPE (arg);
1916 if (type == orig)
1917 return true;
1919 if (TREE_CODE (arg) == ERROR_MARK
1920 || TREE_CODE (type) == ERROR_MARK
1921 || TREE_CODE (orig) == ERROR_MARK)
1922 return false;
1924 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1925 return true;
1927 switch (TREE_CODE (type))
1929 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1933 || TREE_CODE (orig) == OFFSET_TYPE)
1934 return true;
1935 return (TREE_CODE (orig) == VECTOR_TYPE
1936 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 case COMPLEX_TYPE:
1941 case VECTOR_TYPE:
1942 case VOID_TYPE:
1943 return TREE_CODE (type) == TREE_CODE (orig);
1945 default:
1946 return false;
1950 /* Convert expression ARG to type TYPE. Used by the middle-end for
1951 simple conversions in preference to calling the front-end's convert. */
1953 tree
1954 fold_convert_loc (location_t loc, tree type, tree arg)
1956 tree orig = TREE_TYPE (arg);
1957 tree tem;
1959 if (type == orig)
1960 return arg;
1962 if (TREE_CODE (arg) == ERROR_MARK
1963 || TREE_CODE (type) == ERROR_MARK
1964 || TREE_CODE (orig) == ERROR_MARK)
1965 return error_mark_node;
1967 switch (TREE_CODE (type))
1969 case POINTER_TYPE:
1970 case REFERENCE_TYPE:
1971 /* Handle conversions between pointers to different address spaces. */
1972 if (POINTER_TYPE_P (orig)
1973 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1974 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1975 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1976 /* fall through */
1978 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1979 case OFFSET_TYPE:
1980 if (TREE_CODE (arg) == INTEGER_CST)
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1986 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1987 || TREE_CODE (orig) == OFFSET_TYPE)
1988 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1989 if (TREE_CODE (orig) == COMPLEX_TYPE)
1990 return fold_convert_loc (loc, type,
1991 fold_build1_loc (loc, REALPART_EXPR,
1992 TREE_TYPE (orig), arg));
1993 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1994 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1995 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1997 case REAL_TYPE:
1998 if (TREE_CODE (arg) == INTEGER_CST)
2000 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2001 if (tem != NULL_TREE)
2002 return tem;
2004 else if (TREE_CODE (arg) == REAL_CST)
2006 tem = fold_convert_const (NOP_EXPR, type, arg);
2007 if (tem != NULL_TREE)
2008 return tem;
2010 else if (TREE_CODE (arg) == FIXED_CST)
2012 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2013 if (tem != NULL_TREE)
2014 return tem;
2017 switch (TREE_CODE (orig))
2019 case INTEGER_TYPE:
2020 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2021 case POINTER_TYPE: case REFERENCE_TYPE:
2022 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2024 case REAL_TYPE:
2025 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2027 case FIXED_POINT_TYPE:
2028 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2030 case COMPLEX_TYPE:
2031 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2032 return fold_convert_loc (loc, type, tem);
2034 default:
2035 gcc_unreachable ();
2038 case FIXED_POINT_TYPE:
2039 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2040 || TREE_CODE (arg) == REAL_CST)
2042 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2043 if (tem != NULL_TREE)
2044 goto fold_convert_exit;
2047 switch (TREE_CODE (orig))
2049 case FIXED_POINT_TYPE:
2050 case INTEGER_TYPE:
2051 case ENUMERAL_TYPE:
2052 case BOOLEAN_TYPE:
2053 case REAL_TYPE:
2054 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2056 case COMPLEX_TYPE:
2057 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2058 return fold_convert_loc (loc, type, tem);
2060 default:
2061 gcc_unreachable ();
2064 case COMPLEX_TYPE:
2065 switch (TREE_CODE (orig))
2067 case INTEGER_TYPE:
2068 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2069 case POINTER_TYPE: case REFERENCE_TYPE:
2070 case REAL_TYPE:
2071 case FIXED_POINT_TYPE:
2072 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2073 fold_convert_loc (loc, TREE_TYPE (type), arg),
2074 fold_convert_loc (loc, TREE_TYPE (type),
2075 integer_zero_node));
2076 case COMPLEX_TYPE:
2078 tree rpart, ipart;
2080 if (TREE_CODE (arg) == COMPLEX_EXPR)
2082 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2083 TREE_OPERAND (arg, 0));
2084 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2085 TREE_OPERAND (arg, 1));
2086 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2089 arg = save_expr (arg);
2090 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2091 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2092 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2093 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2094 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2097 default:
2098 gcc_unreachable ();
2101 case VECTOR_TYPE:
2102 if (integer_zerop (arg))
2103 return build_zero_vector (type);
2104 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2105 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2106 || TREE_CODE (orig) == VECTOR_TYPE);
2107 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2109 case VOID_TYPE:
2110 tem = fold_ignored_result (arg);
2111 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2113 default:
2114 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2115 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2116 gcc_unreachable ();
2118 fold_convert_exit:
2119 protected_set_expr_location_unshare (tem, loc);
2120 return tem;
2123 /* Return false if expr can be assumed not to be an lvalue, true
2124 otherwise. */
2126 static bool
2127 maybe_lvalue_p (const_tree x)
2129 /* We only need to wrap lvalue tree codes. */
2130 switch (TREE_CODE (x))
2132 case VAR_DECL:
2133 case PARM_DECL:
2134 case RESULT_DECL:
2135 case LABEL_DECL:
2136 case FUNCTION_DECL:
2137 case SSA_NAME:
2139 case COMPONENT_REF:
2140 case MEM_REF:
2141 case INDIRECT_REF:
2142 case ARRAY_REF:
2143 case ARRAY_RANGE_REF:
2144 case BIT_FIELD_REF:
2145 case OBJ_TYPE_REF:
2147 case REALPART_EXPR:
2148 case IMAGPART_EXPR:
2149 case PREINCREMENT_EXPR:
2150 case PREDECREMENT_EXPR:
2151 case SAVE_EXPR:
2152 case TRY_CATCH_EXPR:
2153 case WITH_CLEANUP_EXPR:
2154 case COMPOUND_EXPR:
2155 case MODIFY_EXPR:
2156 case TARGET_EXPR:
2157 case COND_EXPR:
2158 case BIND_EXPR:
2159 break;
2161 default:
2162 /* Assume the worst for front-end tree codes. */
2163 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2164 break;
2165 return false;
2168 return true;
2171 /* Return an expr equal to X but certainly not valid as an lvalue. */
2173 tree
2174 non_lvalue_loc (location_t loc, tree x)
2176 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2177 us. */
2178 if (in_gimple_form)
2179 return x;
2181 if (! maybe_lvalue_p (x))
2182 return x;
2183 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2186 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2187 Zero means allow extended lvalues. */
2189 int pedantic_lvalues;
2191 /* When pedantic, return an expr equal to X but certainly not valid as a
2192 pedantic lvalue. Otherwise, return X. */
2194 static tree
2195 pedantic_non_lvalue_loc (location_t loc, tree x)
2197 if (pedantic_lvalues)
2198 return non_lvalue_loc (loc, x);
2200 return protected_set_expr_location_unshare (x, loc);
2203 /* Given a tree comparison code, return the code that is the logical inverse.
2204 It is generally not safe to do this for floating-point comparisons, except
2205 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2206 ERROR_MARK in this case. */
2208 enum tree_code
2209 invert_tree_comparison (enum tree_code code, bool honor_nans)
2211 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2212 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2213 return ERROR_MARK;
2215 switch (code)
2217 case EQ_EXPR:
2218 return NE_EXPR;
2219 case NE_EXPR:
2220 return EQ_EXPR;
2221 case GT_EXPR:
2222 return honor_nans ? UNLE_EXPR : LE_EXPR;
2223 case GE_EXPR:
2224 return honor_nans ? UNLT_EXPR : LT_EXPR;
2225 case LT_EXPR:
2226 return honor_nans ? UNGE_EXPR : GE_EXPR;
2227 case LE_EXPR:
2228 return honor_nans ? UNGT_EXPR : GT_EXPR;
2229 case LTGT_EXPR:
2230 return UNEQ_EXPR;
2231 case UNEQ_EXPR:
2232 return LTGT_EXPR;
2233 case UNGT_EXPR:
2234 return LE_EXPR;
2235 case UNGE_EXPR:
2236 return LT_EXPR;
2237 case UNLT_EXPR:
2238 return GE_EXPR;
2239 case UNLE_EXPR:
2240 return GT_EXPR;
2241 case ORDERED_EXPR:
2242 return UNORDERED_EXPR;
2243 case UNORDERED_EXPR:
2244 return ORDERED_EXPR;
2245 default:
2246 gcc_unreachable ();
2250 /* Similar, but return the comparison that results if the operands are
2251 swapped. This is safe for floating-point. */
2253 enum tree_code
2254 swap_tree_comparison (enum tree_code code)
2256 switch (code)
2258 case EQ_EXPR:
2259 case NE_EXPR:
2260 case ORDERED_EXPR:
2261 case UNORDERED_EXPR:
2262 case LTGT_EXPR:
2263 case UNEQ_EXPR:
2264 return code;
2265 case GT_EXPR:
2266 return LT_EXPR;
2267 case GE_EXPR:
2268 return LE_EXPR;
2269 case LT_EXPR:
2270 return GT_EXPR;
2271 case LE_EXPR:
2272 return GE_EXPR;
2273 case UNGT_EXPR:
2274 return UNLT_EXPR;
2275 case UNGE_EXPR:
2276 return UNLE_EXPR;
2277 case UNLT_EXPR:
2278 return UNGT_EXPR;
2279 case UNLE_EXPR:
2280 return UNGE_EXPR;
2281 default:
2282 gcc_unreachable ();
2287 /* Convert a comparison tree code from an enum tree_code representation
2288 into a compcode bit-based encoding. This function is the inverse of
2289 compcode_to_comparison. */
2291 static enum comparison_code
2292 comparison_to_compcode (enum tree_code code)
2294 switch (code)
2296 case LT_EXPR:
2297 return COMPCODE_LT;
2298 case EQ_EXPR:
2299 return COMPCODE_EQ;
2300 case LE_EXPR:
2301 return COMPCODE_LE;
2302 case GT_EXPR:
2303 return COMPCODE_GT;
2304 case NE_EXPR:
2305 return COMPCODE_NE;
2306 case GE_EXPR:
2307 return COMPCODE_GE;
2308 case ORDERED_EXPR:
2309 return COMPCODE_ORD;
2310 case UNORDERED_EXPR:
2311 return COMPCODE_UNORD;
2312 case UNLT_EXPR:
2313 return COMPCODE_UNLT;
2314 case UNEQ_EXPR:
2315 return COMPCODE_UNEQ;
2316 case UNLE_EXPR:
2317 return COMPCODE_UNLE;
2318 case UNGT_EXPR:
2319 return COMPCODE_UNGT;
2320 case LTGT_EXPR:
2321 return COMPCODE_LTGT;
2322 case UNGE_EXPR:
2323 return COMPCODE_UNGE;
2324 default:
2325 gcc_unreachable ();
2329 /* Convert a compcode bit-based encoding of a comparison operator back
2330 to GCC's enum tree_code representation. This function is the
2331 inverse of comparison_to_compcode. */
2333 static enum tree_code
2334 compcode_to_comparison (enum comparison_code code)
2336 switch (code)
2338 case COMPCODE_LT:
2339 return LT_EXPR;
2340 case COMPCODE_EQ:
2341 return EQ_EXPR;
2342 case COMPCODE_LE:
2343 return LE_EXPR;
2344 case COMPCODE_GT:
2345 return GT_EXPR;
2346 case COMPCODE_NE:
2347 return NE_EXPR;
2348 case COMPCODE_GE:
2349 return GE_EXPR;
2350 case COMPCODE_ORD:
2351 return ORDERED_EXPR;
2352 case COMPCODE_UNORD:
2353 return UNORDERED_EXPR;
2354 case COMPCODE_UNLT:
2355 return UNLT_EXPR;
2356 case COMPCODE_UNEQ:
2357 return UNEQ_EXPR;
2358 case COMPCODE_UNLE:
2359 return UNLE_EXPR;
2360 case COMPCODE_UNGT:
2361 return UNGT_EXPR;
2362 case COMPCODE_LTGT:
2363 return LTGT_EXPR;
2364 case COMPCODE_UNGE:
2365 return UNGE_EXPR;
2366 default:
2367 gcc_unreachable ();
2371 /* Return a tree for the comparison which is the combination of
2372 doing the AND or OR (depending on CODE) of the two operations LCODE
2373 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2374 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2375 if this makes the transformation invalid. */
2377 tree
2378 combine_comparisons (location_t loc,
2379 enum tree_code code, enum tree_code lcode,
2380 enum tree_code rcode, tree truth_type,
2381 tree ll_arg, tree lr_arg)
2383 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2384 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2385 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2386 int compcode;
2388 switch (code)
2390 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2391 compcode = lcompcode & rcompcode;
2392 break;
2394 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2395 compcode = lcompcode | rcompcode;
2396 break;
2398 default:
2399 return NULL_TREE;
2402 if (!honor_nans)
2404 /* Eliminate unordered comparisons, as well as LTGT and ORD
2405 which are not used unless the mode has NaNs. */
2406 compcode &= ~COMPCODE_UNORD;
2407 if (compcode == COMPCODE_LTGT)
2408 compcode = COMPCODE_NE;
2409 else if (compcode == COMPCODE_ORD)
2410 compcode = COMPCODE_TRUE;
2412 else if (flag_trapping_math)
2414 /* Check that the original operation and the optimized ones will trap
2415 under the same condition. */
2416 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2417 && (lcompcode != COMPCODE_EQ)
2418 && (lcompcode != COMPCODE_ORD);
2419 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2420 && (rcompcode != COMPCODE_EQ)
2421 && (rcompcode != COMPCODE_ORD);
2422 bool trap = (compcode & COMPCODE_UNORD) == 0
2423 && (compcode != COMPCODE_EQ)
2424 && (compcode != COMPCODE_ORD);
2426 /* In a short-circuited boolean expression the LHS might be
2427 such that the RHS, if evaluated, will never trap. For
2428 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2429 if neither x nor y is NaN. (This is a mixed blessing: for
2430 example, the expression above will never trap, hence
2431 optimizing it to x < y would be invalid). */
2432 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2433 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2434 rtrap = false;
2436 /* If the comparison was short-circuited, and only the RHS
2437 trapped, we may now generate a spurious trap. */
2438 if (rtrap && !ltrap
2439 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2440 return NULL_TREE;
2442 /* If we changed the conditions that cause a trap, we lose. */
2443 if ((ltrap || rtrap) != trap)
2444 return NULL_TREE;
2447 if (compcode == COMPCODE_TRUE)
2448 return constant_boolean_node (true, truth_type);
2449 else if (compcode == COMPCODE_FALSE)
2450 return constant_boolean_node (false, truth_type);
2451 else
2453 enum tree_code tcode;
2455 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2456 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2460 /* Return nonzero if two operands (typically of the same tree node)
2461 are necessarily equal. If either argument has side-effects this
2462 function returns zero. FLAGS modifies behavior as follows:
2464 If OEP_ONLY_CONST is set, only return nonzero for constants.
2465 This function tests whether the operands are indistinguishable;
2466 it does not test whether they are equal using C's == operation.
2467 The distinction is important for IEEE floating point, because
2468 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2469 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2471 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2472 even though it may hold multiple values during a function.
2473 This is because a GCC tree node guarantees that nothing else is
2474 executed between the evaluation of its "operands" (which may often
2475 be evaluated in arbitrary order). Hence if the operands themselves
2476 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2477 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2478 unset means assuming isochronic (or instantaneous) tree equivalence.
2479 Unless comparing arbitrary expression trees, such as from different
2480 statements, this flag can usually be left unset.
2482 If OEP_PURE_SAME is set, then pure functions with identical arguments
2483 are considered the same. It is used when the caller has other ways
2484 to ensure that global memory is unchanged in between. */
2487 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2489 /* If either is ERROR_MARK, they aren't equal. */
2490 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2491 || TREE_TYPE (arg0) == error_mark_node
2492 || TREE_TYPE (arg1) == error_mark_node)
2493 return 0;
2495 /* Similar, if either does not have a type (like a released SSA name),
2496 they aren't equal. */
2497 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2498 return 0;
2500 /* Check equality of integer constants before bailing out due to
2501 precision differences. */
2502 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2503 return tree_int_cst_equal (arg0, arg1);
2505 /* If both types don't have the same signedness, then we can't consider
2506 them equal. We must check this before the STRIP_NOPS calls
2507 because they may change the signedness of the arguments. As pointers
2508 strictly don't have a signedness, require either two pointers or
2509 two non-pointers as well. */
2510 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2511 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2512 return 0;
2514 /* We cannot consider pointers to different address space equal. */
2515 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2516 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2517 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2518 return 0;
2520 /* If both types don't have the same precision, then it is not safe
2521 to strip NOPs. */
2522 if (element_precision (TREE_TYPE (arg0))
2523 != element_precision (TREE_TYPE (arg1)))
2524 return 0;
2526 STRIP_NOPS (arg0);
2527 STRIP_NOPS (arg1);
2529 /* In case both args are comparisons but with different comparison
2530 code, try to swap the comparison operands of one arg to produce
2531 a match and compare that variant. */
2532 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2533 && COMPARISON_CLASS_P (arg0)
2534 && COMPARISON_CLASS_P (arg1))
2536 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2538 if (TREE_CODE (arg0) == swap_code)
2539 return operand_equal_p (TREE_OPERAND (arg0, 0),
2540 TREE_OPERAND (arg1, 1), flags)
2541 && operand_equal_p (TREE_OPERAND (arg0, 1),
2542 TREE_OPERAND (arg1, 0), flags);
2545 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2546 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2547 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2548 return 0;
2550 /* This is needed for conversions and for COMPONENT_REF.
2551 Might as well play it safe and always test this. */
2552 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2553 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2554 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2555 return 0;
2557 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2558 We don't care about side effects in that case because the SAVE_EXPR
2559 takes care of that for us. In all other cases, two expressions are
2560 equal if they have no side effects. If we have two identical
2561 expressions with side effects that should be treated the same due
2562 to the only side effects being identical SAVE_EXPR's, that will
2563 be detected in the recursive calls below.
2564 If we are taking an invariant address of two identical objects
2565 they are necessarily equal as well. */
2566 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2567 && (TREE_CODE (arg0) == SAVE_EXPR
2568 || (flags & OEP_CONSTANT_ADDRESS_OF)
2569 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2570 return 1;
2572 /* Next handle constant cases, those for which we can return 1 even
2573 if ONLY_CONST is set. */
2574 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2575 switch (TREE_CODE (arg0))
2577 case INTEGER_CST:
2578 return tree_int_cst_equal (arg0, arg1);
2580 case FIXED_CST:
2581 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2582 TREE_FIXED_CST (arg1));
2584 case REAL_CST:
2585 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2586 TREE_REAL_CST (arg1)))
2587 return 1;
2590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2592 /* If we do not distinguish between signed and unsigned zero,
2593 consider them equal. */
2594 if (real_zerop (arg0) && real_zerop (arg1))
2595 return 1;
2597 return 0;
2599 case VECTOR_CST:
2601 unsigned i;
2603 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2604 return 0;
2606 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2608 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2609 VECTOR_CST_ELT (arg1, i), flags))
2610 return 0;
2612 return 1;
2615 case COMPLEX_CST:
2616 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2617 flags)
2618 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2619 flags));
2621 case STRING_CST:
2622 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2623 && ! memcmp (TREE_STRING_POINTER (arg0),
2624 TREE_STRING_POINTER (arg1),
2625 TREE_STRING_LENGTH (arg0)));
2627 case ADDR_EXPR:
2628 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2629 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2630 ? OEP_CONSTANT_ADDRESS_OF : 0);
2631 default:
2632 break;
2635 if (flags & OEP_ONLY_CONST)
2636 return 0;
2638 /* Define macros to test an operand from arg0 and arg1 for equality and a
2639 variant that allows null and views null as being different from any
2640 non-null value. In the latter case, if either is null, the both
2641 must be; otherwise, do the normal comparison. */
2642 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2643 TREE_OPERAND (arg1, N), flags)
2645 #define OP_SAME_WITH_NULL(N) \
2646 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2647 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2649 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2651 case tcc_unary:
2652 /* Two conversions are equal only if signedness and modes match. */
2653 switch (TREE_CODE (arg0))
2655 CASE_CONVERT:
2656 case FIX_TRUNC_EXPR:
2657 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2658 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2659 return 0;
2660 break;
2661 default:
2662 break;
2665 return OP_SAME (0);
2668 case tcc_comparison:
2669 case tcc_binary:
2670 if (OP_SAME (0) && OP_SAME (1))
2671 return 1;
2673 /* For commutative ops, allow the other order. */
2674 return (commutative_tree_code (TREE_CODE (arg0))
2675 && operand_equal_p (TREE_OPERAND (arg0, 0),
2676 TREE_OPERAND (arg1, 1), flags)
2677 && operand_equal_p (TREE_OPERAND (arg0, 1),
2678 TREE_OPERAND (arg1, 0), flags));
2680 case tcc_reference:
2681 /* If either of the pointer (or reference) expressions we are
2682 dereferencing contain a side effect, these cannot be equal,
2683 but their addresses can be. */
2684 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2685 && (TREE_SIDE_EFFECTS (arg0)
2686 || TREE_SIDE_EFFECTS (arg1)))
2687 return 0;
2689 switch (TREE_CODE (arg0))
2691 case INDIRECT_REF:
2692 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2693 return OP_SAME (0);
2695 case REALPART_EXPR:
2696 case IMAGPART_EXPR:
2697 return OP_SAME (0);
2699 case TARGET_MEM_REF:
2700 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2701 /* Require equal extra operands and then fall through to MEM_REF
2702 handling of the two common operands. */
2703 if (!OP_SAME_WITH_NULL (2)
2704 || !OP_SAME_WITH_NULL (3)
2705 || !OP_SAME_WITH_NULL (4))
2706 return 0;
2707 /* Fallthru. */
2708 case MEM_REF:
2709 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2710 /* Require equal access sizes, and similar pointer types.
2711 We can have incomplete types for array references of
2712 variable-sized arrays from the Fortran frontend
2713 though. Also verify the types are compatible. */
2714 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2715 || (TYPE_SIZE (TREE_TYPE (arg0))
2716 && TYPE_SIZE (TREE_TYPE (arg1))
2717 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2718 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2719 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2720 && alias_ptr_types_compatible_p
2721 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2722 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2723 && OP_SAME (0) && OP_SAME (1));
2725 case ARRAY_REF:
2726 case ARRAY_RANGE_REF:
2727 /* Operands 2 and 3 may be null.
2728 Compare the array index by value if it is constant first as we
2729 may have different types but same value here. */
2730 if (!OP_SAME (0))
2731 return 0;
2732 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2733 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2734 TREE_OPERAND (arg1, 1))
2735 || OP_SAME (1))
2736 && OP_SAME_WITH_NULL (2)
2737 && OP_SAME_WITH_NULL (3));
2739 case COMPONENT_REF:
2740 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2741 may be NULL when we're called to compare MEM_EXPRs. */
2742 if (!OP_SAME_WITH_NULL (0)
2743 || !OP_SAME (1))
2744 return 0;
2745 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2746 return OP_SAME_WITH_NULL (2);
2748 case BIT_FIELD_REF:
2749 if (!OP_SAME (0))
2750 return 0;
2751 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2752 return OP_SAME (1) && OP_SAME (2);
2754 default:
2755 return 0;
2758 case tcc_expression:
2759 switch (TREE_CODE (arg0))
2761 case ADDR_EXPR:
2762 case TRUTH_NOT_EXPR:
2763 return OP_SAME (0);
2765 case TRUTH_ANDIF_EXPR:
2766 case TRUTH_ORIF_EXPR:
2767 return OP_SAME (0) && OP_SAME (1);
2769 case FMA_EXPR:
2770 case WIDEN_MULT_PLUS_EXPR:
2771 case WIDEN_MULT_MINUS_EXPR:
2772 if (!OP_SAME (2))
2773 return 0;
2774 /* The multiplcation operands are commutative. */
2775 /* FALLTHRU */
2777 case TRUTH_AND_EXPR:
2778 case TRUTH_OR_EXPR:
2779 case TRUTH_XOR_EXPR:
2780 if (OP_SAME (0) && OP_SAME (1))
2781 return 1;
2783 /* Otherwise take into account this is a commutative operation. */
2784 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2785 TREE_OPERAND (arg1, 1), flags)
2786 && operand_equal_p (TREE_OPERAND (arg0, 1),
2787 TREE_OPERAND (arg1, 0), flags));
2789 case COND_EXPR:
2790 case VEC_COND_EXPR:
2791 case DOT_PROD_EXPR:
2792 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2794 default:
2795 return 0;
2798 case tcc_vl_exp:
2799 switch (TREE_CODE (arg0))
2801 case CALL_EXPR:
2802 /* If the CALL_EXPRs call different functions, then they
2803 clearly can not be equal. */
2804 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2805 flags))
2806 return 0;
2809 unsigned int cef = call_expr_flags (arg0);
2810 if (flags & OEP_PURE_SAME)
2811 cef &= ECF_CONST | ECF_PURE;
2812 else
2813 cef &= ECF_CONST;
2814 if (!cef)
2815 return 0;
2818 /* Now see if all the arguments are the same. */
2820 const_call_expr_arg_iterator iter0, iter1;
2821 const_tree a0, a1;
2822 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2823 a1 = first_const_call_expr_arg (arg1, &iter1);
2824 a0 && a1;
2825 a0 = next_const_call_expr_arg (&iter0),
2826 a1 = next_const_call_expr_arg (&iter1))
2827 if (! operand_equal_p (a0, a1, flags))
2828 return 0;
2830 /* If we get here and both argument lists are exhausted
2831 then the CALL_EXPRs are equal. */
2832 return ! (a0 || a1);
2834 default:
2835 return 0;
2838 case tcc_declaration:
2839 /* Consider __builtin_sqrt equal to sqrt. */
2840 return (TREE_CODE (arg0) == FUNCTION_DECL
2841 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2842 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2843 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2845 default:
2846 return 0;
2849 #undef OP_SAME
2850 #undef OP_SAME_WITH_NULL
2853 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2854 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2856 When in doubt, return 0. */
2858 static int
2859 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2861 int unsignedp1, unsignedpo;
2862 tree primarg0, primarg1, primother;
2863 unsigned int correct_width;
2865 if (operand_equal_p (arg0, arg1, 0))
2866 return 1;
2868 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2869 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2870 return 0;
2872 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2873 and see if the inner values are the same. This removes any
2874 signedness comparison, which doesn't matter here. */
2875 primarg0 = arg0, primarg1 = arg1;
2876 STRIP_NOPS (primarg0);
2877 STRIP_NOPS (primarg1);
2878 if (operand_equal_p (primarg0, primarg1, 0))
2879 return 1;
2881 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2882 actual comparison operand, ARG0.
2884 First throw away any conversions to wider types
2885 already present in the operands. */
2887 primarg1 = get_narrower (arg1, &unsignedp1);
2888 primother = get_narrower (other, &unsignedpo);
2890 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2891 if (unsignedp1 == unsignedpo
2892 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2893 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2895 tree type = TREE_TYPE (arg0);
2897 /* Make sure shorter operand is extended the right way
2898 to match the longer operand. */
2899 primarg1 = fold_convert (signed_or_unsigned_type_for
2900 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2902 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2903 return 1;
2906 return 0;
2909 /* See if ARG is an expression that is either a comparison or is performing
2910 arithmetic on comparisons. The comparisons must only be comparing
2911 two different values, which will be stored in *CVAL1 and *CVAL2; if
2912 they are nonzero it means that some operands have already been found.
2913 No variables may be used anywhere else in the expression except in the
2914 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2915 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2917 If this is true, return 1. Otherwise, return zero. */
2919 static int
2920 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2922 enum tree_code code = TREE_CODE (arg);
2923 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2925 /* We can handle some of the tcc_expression cases here. */
2926 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2927 tclass = tcc_unary;
2928 else if (tclass == tcc_expression
2929 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2930 || code == COMPOUND_EXPR))
2931 tclass = tcc_binary;
2933 else if (tclass == tcc_expression && code == SAVE_EXPR
2934 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2936 /* If we've already found a CVAL1 or CVAL2, this expression is
2937 two complex to handle. */
2938 if (*cval1 || *cval2)
2939 return 0;
2941 tclass = tcc_unary;
2942 *save_p = 1;
2945 switch (tclass)
2947 case tcc_unary:
2948 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2950 case tcc_binary:
2951 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2952 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2953 cval1, cval2, save_p));
2955 case tcc_constant:
2956 return 1;
2958 case tcc_expression:
2959 if (code == COND_EXPR)
2960 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2961 cval1, cval2, save_p)
2962 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2963 cval1, cval2, save_p)
2964 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2965 cval1, cval2, save_p));
2966 return 0;
2968 case tcc_comparison:
2969 /* First see if we can handle the first operand, then the second. For
2970 the second operand, we know *CVAL1 can't be zero. It must be that
2971 one side of the comparison is each of the values; test for the
2972 case where this isn't true by failing if the two operands
2973 are the same. */
2975 if (operand_equal_p (TREE_OPERAND (arg, 0),
2976 TREE_OPERAND (arg, 1), 0))
2977 return 0;
2979 if (*cval1 == 0)
2980 *cval1 = TREE_OPERAND (arg, 0);
2981 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2983 else if (*cval2 == 0)
2984 *cval2 = TREE_OPERAND (arg, 0);
2985 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2987 else
2988 return 0;
2990 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2992 else if (*cval2 == 0)
2993 *cval2 = TREE_OPERAND (arg, 1);
2994 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2996 else
2997 return 0;
2999 return 1;
3001 default:
3002 return 0;
3006 /* ARG is a tree that is known to contain just arithmetic operations and
3007 comparisons. Evaluate the operations in the tree substituting NEW0 for
3008 any occurrence of OLD0 as an operand of a comparison and likewise for
3009 NEW1 and OLD1. */
3011 static tree
3012 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3013 tree old1, tree new1)
3015 tree type = TREE_TYPE (arg);
3016 enum tree_code code = TREE_CODE (arg);
3017 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3019 /* We can handle some of the tcc_expression cases here. */
3020 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3021 tclass = tcc_unary;
3022 else if (tclass == tcc_expression
3023 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3024 tclass = tcc_binary;
3026 switch (tclass)
3028 case tcc_unary:
3029 return fold_build1_loc (loc, code, type,
3030 eval_subst (loc, TREE_OPERAND (arg, 0),
3031 old0, new0, old1, new1));
3033 case tcc_binary:
3034 return fold_build2_loc (loc, code, type,
3035 eval_subst (loc, TREE_OPERAND (arg, 0),
3036 old0, new0, old1, new1),
3037 eval_subst (loc, TREE_OPERAND (arg, 1),
3038 old0, new0, old1, new1));
3040 case tcc_expression:
3041 switch (code)
3043 case SAVE_EXPR:
3044 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3045 old1, new1);
3047 case COMPOUND_EXPR:
3048 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3049 old1, new1);
3051 case COND_EXPR:
3052 return fold_build3_loc (loc, code, type,
3053 eval_subst (loc, TREE_OPERAND (arg, 0),
3054 old0, new0, old1, new1),
3055 eval_subst (loc, TREE_OPERAND (arg, 1),
3056 old0, new0, old1, new1),
3057 eval_subst (loc, TREE_OPERAND (arg, 2),
3058 old0, new0, old1, new1));
3059 default:
3060 break;
3062 /* Fall through - ??? */
3064 case tcc_comparison:
3066 tree arg0 = TREE_OPERAND (arg, 0);
3067 tree arg1 = TREE_OPERAND (arg, 1);
3069 /* We need to check both for exact equality and tree equality. The
3070 former will be true if the operand has a side-effect. In that
3071 case, we know the operand occurred exactly once. */
3073 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3074 arg0 = new0;
3075 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3076 arg0 = new1;
3078 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3079 arg1 = new0;
3080 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3081 arg1 = new1;
3083 return fold_build2_loc (loc, code, type, arg0, arg1);
3086 default:
3087 return arg;
3091 /* Return a tree for the case when the result of an expression is RESULT
3092 converted to TYPE and OMITTED was previously an operand of the expression
3093 but is now not needed (e.g., we folded OMITTED * 0).
3095 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3096 the conversion of RESULT to TYPE. */
3098 tree
3099 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3101 tree t = fold_convert_loc (loc, type, result);
3103 /* If the resulting operand is an empty statement, just return the omitted
3104 statement casted to void. */
3105 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3106 return build1_loc (loc, NOP_EXPR, void_type_node,
3107 fold_ignored_result (omitted));
3109 if (TREE_SIDE_EFFECTS (omitted))
3110 return build2_loc (loc, COMPOUND_EXPR, type,
3111 fold_ignored_result (omitted), t);
3113 return non_lvalue_loc (loc, t);
3116 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3118 static tree
3119 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3120 tree omitted)
3122 tree t = fold_convert_loc (loc, type, result);
3124 /* If the resulting operand is an empty statement, just return the omitted
3125 statement casted to void. */
3126 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3127 return build1_loc (loc, NOP_EXPR, void_type_node,
3128 fold_ignored_result (omitted));
3130 if (TREE_SIDE_EFFECTS (omitted))
3131 return build2_loc (loc, COMPOUND_EXPR, type,
3132 fold_ignored_result (omitted), t);
3134 return pedantic_non_lvalue_loc (loc, t);
3137 /* Return a tree for the case when the result of an expression is RESULT
3138 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3139 of the expression but are now not needed.
3141 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3142 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3143 evaluated before OMITTED2. Otherwise, if neither has side effects,
3144 just do the conversion of RESULT to TYPE. */
3146 tree
3147 omit_two_operands_loc (location_t loc, tree type, tree result,
3148 tree omitted1, tree omitted2)
3150 tree t = fold_convert_loc (loc, type, result);
3152 if (TREE_SIDE_EFFECTS (omitted2))
3153 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3154 if (TREE_SIDE_EFFECTS (omitted1))
3155 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3157 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3161 /* Return a simplified tree node for the truth-negation of ARG. This
3162 never alters ARG itself. We assume that ARG is an operation that
3163 returns a truth value (0 or 1).
3165 FIXME: one would think we would fold the result, but it causes
3166 problems with the dominator optimizer. */
3168 static tree
3169 fold_truth_not_expr (location_t loc, tree arg)
3171 tree type = TREE_TYPE (arg);
3172 enum tree_code code = TREE_CODE (arg);
3173 location_t loc1, loc2;
3175 /* If this is a comparison, we can simply invert it, except for
3176 floating-point non-equality comparisons, in which case we just
3177 enclose a TRUTH_NOT_EXPR around what we have. */
3179 if (TREE_CODE_CLASS (code) == tcc_comparison)
3181 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3182 if (FLOAT_TYPE_P (op_type)
3183 && flag_trapping_math
3184 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3185 && code != NE_EXPR && code != EQ_EXPR)
3186 return NULL_TREE;
3188 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3189 if (code == ERROR_MARK)
3190 return NULL_TREE;
3192 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3193 TREE_OPERAND (arg, 1));
3196 switch (code)
3198 case INTEGER_CST:
3199 return constant_boolean_node (integer_zerop (arg), type);
3201 case TRUTH_AND_EXPR:
3202 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3203 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3204 return build2_loc (loc, TRUTH_OR_EXPR, type,
3205 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3206 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3208 case TRUTH_OR_EXPR:
3209 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3210 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3211 return build2_loc (loc, TRUTH_AND_EXPR, type,
3212 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3213 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3215 case TRUTH_XOR_EXPR:
3216 /* Here we can invert either operand. We invert the first operand
3217 unless the second operand is a TRUTH_NOT_EXPR in which case our
3218 result is the XOR of the first operand with the inside of the
3219 negation of the second operand. */
3221 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3222 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3223 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3224 else
3225 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3226 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3227 TREE_OPERAND (arg, 1));
3229 case TRUTH_ANDIF_EXPR:
3230 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3231 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3232 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3233 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3234 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3236 case TRUTH_ORIF_EXPR:
3237 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3238 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3239 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3240 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3241 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3243 case TRUTH_NOT_EXPR:
3244 return TREE_OPERAND (arg, 0);
3246 case COND_EXPR:
3248 tree arg1 = TREE_OPERAND (arg, 1);
3249 tree arg2 = TREE_OPERAND (arg, 2);
3251 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3252 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3254 /* A COND_EXPR may have a throw as one operand, which
3255 then has void type. Just leave void operands
3256 as they are. */
3257 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3258 VOID_TYPE_P (TREE_TYPE (arg1))
3259 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3260 VOID_TYPE_P (TREE_TYPE (arg2))
3261 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3264 case COMPOUND_EXPR:
3265 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3266 return build2_loc (loc, COMPOUND_EXPR, type,
3267 TREE_OPERAND (arg, 0),
3268 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3270 case NON_LVALUE_EXPR:
3271 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3272 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3274 CASE_CONVERT:
3275 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3276 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3278 /* ... fall through ... */
3280 case FLOAT_EXPR:
3281 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3282 return build1_loc (loc, TREE_CODE (arg), type,
3283 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3285 case BIT_AND_EXPR:
3286 if (!integer_onep (TREE_OPERAND (arg, 1)))
3287 return NULL_TREE;
3288 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3290 case SAVE_EXPR:
3291 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3293 case CLEANUP_POINT_EXPR:
3294 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3295 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3296 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3298 default:
3299 return NULL_TREE;
3303 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3304 assume that ARG is an operation that returns a truth value (0 or 1
3305 for scalars, 0 or -1 for vectors). Return the folded expression if
3306 folding is successful. Otherwise, return NULL_TREE. */
3308 static tree
3309 fold_invert_truthvalue (location_t loc, tree arg)
3311 tree type = TREE_TYPE (arg);
3312 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3313 ? BIT_NOT_EXPR
3314 : TRUTH_NOT_EXPR,
3315 type, arg);
3318 /* Return a simplified tree node for the truth-negation of ARG. This
3319 never alters ARG itself. We assume that ARG is an operation that
3320 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3322 tree
3323 invert_truthvalue_loc (location_t loc, tree arg)
3325 if (TREE_CODE (arg) == ERROR_MARK)
3326 return arg;
3328 tree type = TREE_TYPE (arg);
3329 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3330 ? BIT_NOT_EXPR
3331 : TRUTH_NOT_EXPR,
3332 type, arg);
3335 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3336 operands are another bit-wise operation with a common input. If so,
3337 distribute the bit operations to save an operation and possibly two if
3338 constants are involved. For example, convert
3339 (A | B) & (A | C) into A | (B & C)
3340 Further simplification will occur if B and C are constants.
3342 If this optimization cannot be done, 0 will be returned. */
3344 static tree
3345 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3346 tree arg0, tree arg1)
3348 tree common;
3349 tree left, right;
3351 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3352 || TREE_CODE (arg0) == code
3353 || (TREE_CODE (arg0) != BIT_AND_EXPR
3354 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3355 return 0;
3357 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3359 common = TREE_OPERAND (arg0, 0);
3360 left = TREE_OPERAND (arg0, 1);
3361 right = TREE_OPERAND (arg1, 1);
3363 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3365 common = TREE_OPERAND (arg0, 0);
3366 left = TREE_OPERAND (arg0, 1);
3367 right = TREE_OPERAND (arg1, 0);
3369 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3371 common = TREE_OPERAND (arg0, 1);
3372 left = TREE_OPERAND (arg0, 0);
3373 right = TREE_OPERAND (arg1, 1);
3375 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3377 common = TREE_OPERAND (arg0, 1);
3378 left = TREE_OPERAND (arg0, 0);
3379 right = TREE_OPERAND (arg1, 0);
3381 else
3382 return 0;
3384 common = fold_convert_loc (loc, type, common);
3385 left = fold_convert_loc (loc, type, left);
3386 right = fold_convert_loc (loc, type, right);
3387 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3388 fold_build2_loc (loc, code, type, left, right));
3391 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3392 with code CODE. This optimization is unsafe. */
3393 static tree
3394 distribute_real_division (location_t loc, enum tree_code code, tree type,
3395 tree arg0, tree arg1)
3397 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3398 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3400 /* (A / C) +- (B / C) -> (A +- B) / C. */
3401 if (mul0 == mul1
3402 && operand_equal_p (TREE_OPERAND (arg0, 1),
3403 TREE_OPERAND (arg1, 1), 0))
3404 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3405 fold_build2_loc (loc, code, type,
3406 TREE_OPERAND (arg0, 0),
3407 TREE_OPERAND (arg1, 0)),
3408 TREE_OPERAND (arg0, 1));
3410 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3411 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3412 TREE_OPERAND (arg1, 0), 0)
3413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3414 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3416 REAL_VALUE_TYPE r0, r1;
3417 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3418 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3419 if (!mul0)
3420 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3421 if (!mul1)
3422 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3423 real_arithmetic (&r0, code, &r0, &r1);
3424 return fold_build2_loc (loc, MULT_EXPR, type,
3425 TREE_OPERAND (arg0, 0),
3426 build_real (type, r0));
3429 return NULL_TREE;
3432 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3433 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3435 static tree
3436 make_bit_field_ref (location_t loc, tree inner, tree type,
3437 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3439 tree result, bftype;
3441 if (bitpos == 0)
3443 tree size = TYPE_SIZE (TREE_TYPE (inner));
3444 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3445 || POINTER_TYPE_P (TREE_TYPE (inner)))
3446 && tree_fits_shwi_p (size)
3447 && tree_to_shwi (size) == bitsize)
3448 return fold_convert_loc (loc, type, inner);
3451 bftype = type;
3452 if (TYPE_PRECISION (bftype) != bitsize
3453 || TYPE_UNSIGNED (bftype) == !unsignedp)
3454 bftype = build_nonstandard_integer_type (bitsize, 0);
3456 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3457 size_int (bitsize), bitsize_int (bitpos));
3459 if (bftype != type)
3460 result = fold_convert_loc (loc, type, result);
3462 return result;
3465 /* Optimize a bit-field compare.
3467 There are two cases: First is a compare against a constant and the
3468 second is a comparison of two items where the fields are at the same
3469 bit position relative to the start of a chunk (byte, halfword, word)
3470 large enough to contain it. In these cases we can avoid the shift
3471 implicit in bitfield extractions.
3473 For constants, we emit a compare of the shifted constant with the
3474 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3475 compared. For two fields at the same position, we do the ANDs with the
3476 similar mask and compare the result of the ANDs.
3478 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3479 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3480 are the left and right operands of the comparison, respectively.
3482 If the optimization described above can be done, we return the resulting
3483 tree. Otherwise we return zero. */
3485 static tree
3486 optimize_bit_field_compare (location_t loc, enum tree_code code,
3487 tree compare_type, tree lhs, tree rhs)
3489 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3490 tree type = TREE_TYPE (lhs);
3491 tree signed_type, unsigned_type;
3492 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3493 enum machine_mode lmode, rmode, nmode;
3494 int lunsignedp, runsignedp;
3495 int lvolatilep = 0, rvolatilep = 0;
3496 tree linner, rinner = NULL_TREE;
3497 tree mask;
3498 tree offset;
3500 /* Get all the information about the extractions being done. If the bit size
3501 if the same as the size of the underlying object, we aren't doing an
3502 extraction at all and so can do nothing. We also don't want to
3503 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3504 then will no longer be able to replace it. */
3505 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3506 &lunsignedp, &lvolatilep, false);
3507 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3508 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3509 return 0;
3511 if (!const_p)
3513 /* If this is not a constant, we can only do something if bit positions,
3514 sizes, and signedness are the same. */
3515 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3516 &runsignedp, &rvolatilep, false);
3518 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3519 || lunsignedp != runsignedp || offset != 0
3520 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3521 return 0;
3524 /* See if we can find a mode to refer to this field. We should be able to,
3525 but fail if we can't. */
3526 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3527 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3528 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3529 TYPE_ALIGN (TREE_TYPE (rinner))),
3530 word_mode, false);
3531 if (nmode == VOIDmode)
3532 return 0;
3534 /* Set signed and unsigned types of the precision of this mode for the
3535 shifts below. */
3536 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3537 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3539 /* Compute the bit position and size for the new reference and our offset
3540 within it. If the new reference is the same size as the original, we
3541 won't optimize anything, so return zero. */
3542 nbitsize = GET_MODE_BITSIZE (nmode);
3543 nbitpos = lbitpos & ~ (nbitsize - 1);
3544 lbitpos -= nbitpos;
3545 if (nbitsize == lbitsize)
3546 return 0;
3548 if (BYTES_BIG_ENDIAN)
3549 lbitpos = nbitsize - lbitsize - lbitpos;
3551 /* Make the mask to be used against the extracted field. */
3552 mask = build_int_cst_type (unsigned_type, -1);
3553 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3554 mask = const_binop (RSHIFT_EXPR, mask,
3555 size_int (nbitsize - lbitsize - lbitpos));
3557 if (! const_p)
3558 /* If not comparing with constant, just rework the comparison
3559 and return. */
3560 return fold_build2_loc (loc, code, compare_type,
3561 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3562 make_bit_field_ref (loc, linner,
3563 unsigned_type,
3564 nbitsize, nbitpos,
3566 mask),
3567 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3568 make_bit_field_ref (loc, rinner,
3569 unsigned_type,
3570 nbitsize, nbitpos,
3572 mask));
3574 /* Otherwise, we are handling the constant case. See if the constant is too
3575 big for the field. Warn and return a tree of for 0 (false) if so. We do
3576 this not only for its own sake, but to avoid having to test for this
3577 error case below. If we didn't, we might generate wrong code.
3579 For unsigned fields, the constant shifted right by the field length should
3580 be all zero. For signed fields, the high-order bits should agree with
3581 the sign bit. */
3583 if (lunsignedp)
3585 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3586 fold_convert_loc (loc,
3587 unsigned_type, rhs),
3588 size_int (lbitsize))))
3590 warning (0, "comparison is always %d due to width of bit-field",
3591 code == NE_EXPR);
3592 return constant_boolean_node (code == NE_EXPR, compare_type);
3595 else
3597 tree tem = const_binop (RSHIFT_EXPR,
3598 fold_convert_loc (loc, signed_type, rhs),
3599 size_int (lbitsize - 1));
3600 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3602 warning (0, "comparison is always %d due to width of bit-field",
3603 code == NE_EXPR);
3604 return constant_boolean_node (code == NE_EXPR, compare_type);
3608 /* Single-bit compares should always be against zero. */
3609 if (lbitsize == 1 && ! integer_zerop (rhs))
3611 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3612 rhs = build_int_cst (type, 0);
3615 /* Make a new bitfield reference, shift the constant over the
3616 appropriate number of bits and mask it with the computed mask
3617 (in case this was a signed field). If we changed it, make a new one. */
3618 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3620 rhs = const_binop (BIT_AND_EXPR,
3621 const_binop (LSHIFT_EXPR,
3622 fold_convert_loc (loc, unsigned_type, rhs),
3623 size_int (lbitpos)),
3624 mask);
3626 lhs = build2_loc (loc, code, compare_type,
3627 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3628 return lhs;
3631 /* Subroutine for fold_truth_andor_1: decode a field reference.
3633 If EXP is a comparison reference, we return the innermost reference.
3635 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3636 set to the starting bit number.
3638 If the innermost field can be completely contained in a mode-sized
3639 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3641 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3642 otherwise it is not changed.
3644 *PUNSIGNEDP is set to the signedness of the field.
3646 *PMASK is set to the mask used. This is either contained in a
3647 BIT_AND_EXPR or derived from the width of the field.
3649 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3651 Return 0 if this is not a component reference or is one that we can't
3652 do anything with. */
3654 static tree
3655 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3656 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3657 int *punsignedp, int *pvolatilep,
3658 tree *pmask, tree *pand_mask)
3660 tree outer_type = 0;
3661 tree and_mask = 0;
3662 tree mask, inner, offset;
3663 tree unsigned_type;
3664 unsigned int precision;
3666 /* All the optimizations using this function assume integer fields.
3667 There are problems with FP fields since the type_for_size call
3668 below can fail for, e.g., XFmode. */
3669 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3670 return 0;
3672 /* We are interested in the bare arrangement of bits, so strip everything
3673 that doesn't affect the machine mode. However, record the type of the
3674 outermost expression if it may matter below. */
3675 if (CONVERT_EXPR_P (exp)
3676 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3677 outer_type = TREE_TYPE (exp);
3678 STRIP_NOPS (exp);
3680 if (TREE_CODE (exp) == BIT_AND_EXPR)
3682 and_mask = TREE_OPERAND (exp, 1);
3683 exp = TREE_OPERAND (exp, 0);
3684 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3685 if (TREE_CODE (and_mask) != INTEGER_CST)
3686 return 0;
3689 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3690 punsignedp, pvolatilep, false);
3691 if ((inner == exp && and_mask == 0)
3692 || *pbitsize < 0 || offset != 0
3693 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3694 return 0;
3696 /* If the number of bits in the reference is the same as the bitsize of
3697 the outer type, then the outer type gives the signedness. Otherwise
3698 (in case of a small bitfield) the signedness is unchanged. */
3699 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3700 *punsignedp = TYPE_UNSIGNED (outer_type);
3702 /* Compute the mask to access the bitfield. */
3703 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3704 precision = TYPE_PRECISION (unsigned_type);
3706 mask = build_int_cst_type (unsigned_type, -1);
3708 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3709 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3711 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3712 if (and_mask != 0)
3713 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3714 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3716 *pmask = mask;
3717 *pand_mask = and_mask;
3718 return inner;
3721 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3722 bit positions. */
3724 static int
3725 all_ones_mask_p (const_tree mask, int size)
3727 tree type = TREE_TYPE (mask);
3728 unsigned int precision = TYPE_PRECISION (type);
3729 tree tmask;
3731 tmask = build_int_cst_type (signed_type_for (type), -1);
3733 return
3734 tree_int_cst_equal (mask,
3735 const_binop (RSHIFT_EXPR,
3736 const_binop (LSHIFT_EXPR, tmask,
3737 size_int (precision - size)),
3738 size_int (precision - size)));
3741 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3742 represents the sign bit of EXP's type. If EXP represents a sign
3743 or zero extension, also test VAL against the unextended type.
3744 The return value is the (sub)expression whose sign bit is VAL,
3745 or NULL_TREE otherwise. */
3747 static tree
3748 sign_bit_p (tree exp, const_tree val)
3750 unsigned HOST_WIDE_INT mask_lo, lo;
3751 HOST_WIDE_INT mask_hi, hi;
3752 int width;
3753 tree t;
3755 /* Tree EXP must have an integral type. */
3756 t = TREE_TYPE (exp);
3757 if (! INTEGRAL_TYPE_P (t))
3758 return NULL_TREE;
3760 /* Tree VAL must be an integer constant. */
3761 if (TREE_CODE (val) != INTEGER_CST
3762 || TREE_OVERFLOW (val))
3763 return NULL_TREE;
3765 width = TYPE_PRECISION (t);
3766 if (width > HOST_BITS_PER_WIDE_INT)
3768 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3769 lo = 0;
3771 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3772 mask_lo = -1;
3774 else
3776 hi = 0;
3777 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3779 mask_hi = 0;
3780 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3783 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3784 treat VAL as if it were unsigned. */
3785 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3786 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3787 return exp;
3789 /* Handle extension from a narrower type. */
3790 if (TREE_CODE (exp) == NOP_EXPR
3791 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3792 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3794 return NULL_TREE;
3797 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3798 to be evaluated unconditionally. */
3800 static int
3801 simple_operand_p (const_tree exp)
3803 /* Strip any conversions that don't change the machine mode. */
3804 STRIP_NOPS (exp);
3806 return (CONSTANT_CLASS_P (exp)
3807 || TREE_CODE (exp) == SSA_NAME
3808 || (DECL_P (exp)
3809 && ! TREE_ADDRESSABLE (exp)
3810 && ! TREE_THIS_VOLATILE (exp)
3811 && ! DECL_NONLOCAL (exp)
3812 /* Don't regard global variables as simple. They may be
3813 allocated in ways unknown to the compiler (shared memory,
3814 #pragma weak, etc). */
3815 && ! TREE_PUBLIC (exp)
3816 && ! DECL_EXTERNAL (exp)
3817 /* Weakrefs are not safe to be read, since they can be NULL.
3818 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3819 have DECL_WEAK flag set. */
3820 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3821 /* Loading a static variable is unduly expensive, but global
3822 registers aren't expensive. */
3823 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3826 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3827 to be evaluated unconditionally.
3828 I addition to simple_operand_p, we assume that comparisons, conversions,
3829 and logic-not operations are simple, if their operands are simple, too. */
3831 static bool
3832 simple_operand_p_2 (tree exp)
3834 enum tree_code code;
3836 if (TREE_SIDE_EFFECTS (exp)
3837 || tree_could_trap_p (exp))
3838 return false;
3840 while (CONVERT_EXPR_P (exp))
3841 exp = TREE_OPERAND (exp, 0);
3843 code = TREE_CODE (exp);
3845 if (TREE_CODE_CLASS (code) == tcc_comparison)
3846 return (simple_operand_p (TREE_OPERAND (exp, 0))
3847 && simple_operand_p (TREE_OPERAND (exp, 1)));
3849 if (code == TRUTH_NOT_EXPR)
3850 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3852 return simple_operand_p (exp);
3856 /* The following functions are subroutines to fold_range_test and allow it to
3857 try to change a logical combination of comparisons into a range test.
3859 For example, both
3860 X == 2 || X == 3 || X == 4 || X == 5
3862 X >= 2 && X <= 5
3863 are converted to
3864 (unsigned) (X - 2) <= 3
3866 We describe each set of comparisons as being either inside or outside
3867 a range, using a variable named like IN_P, and then describe the
3868 range with a lower and upper bound. If one of the bounds is omitted,
3869 it represents either the highest or lowest value of the type.
3871 In the comments below, we represent a range by two numbers in brackets
3872 preceded by a "+" to designate being inside that range, or a "-" to
3873 designate being outside that range, so the condition can be inverted by
3874 flipping the prefix. An omitted bound is represented by a "-". For
3875 example, "- [-, 10]" means being outside the range starting at the lowest
3876 possible value and ending at 10, in other words, being greater than 10.
3877 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3878 always false.
3880 We set up things so that the missing bounds are handled in a consistent
3881 manner so neither a missing bound nor "true" and "false" need to be
3882 handled using a special case. */
3884 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3885 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3886 and UPPER1_P are nonzero if the respective argument is an upper bound
3887 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3888 must be specified for a comparison. ARG1 will be converted to ARG0's
3889 type if both are specified. */
3891 static tree
3892 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3893 tree arg1, int upper1_p)
3895 tree tem;
3896 int result;
3897 int sgn0, sgn1;
3899 /* If neither arg represents infinity, do the normal operation.
3900 Else, if not a comparison, return infinity. Else handle the special
3901 comparison rules. Note that most of the cases below won't occur, but
3902 are handled for consistency. */
3904 if (arg0 != 0 && arg1 != 0)
3906 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3907 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3908 STRIP_NOPS (tem);
3909 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3912 if (TREE_CODE_CLASS (code) != tcc_comparison)
3913 return 0;
3915 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3916 for neither. In real maths, we cannot assume open ended ranges are
3917 the same. But, this is computer arithmetic, where numbers are finite.
3918 We can therefore make the transformation of any unbounded range with
3919 the value Z, Z being greater than any representable number. This permits
3920 us to treat unbounded ranges as equal. */
3921 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3922 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3923 switch (code)
3925 case EQ_EXPR:
3926 result = sgn0 == sgn1;
3927 break;
3928 case NE_EXPR:
3929 result = sgn0 != sgn1;
3930 break;
3931 case LT_EXPR:
3932 result = sgn0 < sgn1;
3933 break;
3934 case LE_EXPR:
3935 result = sgn0 <= sgn1;
3936 break;
3937 case GT_EXPR:
3938 result = sgn0 > sgn1;
3939 break;
3940 case GE_EXPR:
3941 result = sgn0 >= sgn1;
3942 break;
3943 default:
3944 gcc_unreachable ();
3947 return constant_boolean_node (result, type);
3950 /* Helper routine for make_range. Perform one step for it, return
3951 new expression if the loop should continue or NULL_TREE if it should
3952 stop. */
3954 tree
3955 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3956 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3957 bool *strict_overflow_p)
3959 tree arg0_type = TREE_TYPE (arg0);
3960 tree n_low, n_high, low = *p_low, high = *p_high;
3961 int in_p = *p_in_p, n_in_p;
3963 switch (code)
3965 case TRUTH_NOT_EXPR:
3966 /* We can only do something if the range is testing for zero. */
3967 if (low == NULL_TREE || high == NULL_TREE
3968 || ! integer_zerop (low) || ! integer_zerop (high))
3969 return NULL_TREE;
3970 *p_in_p = ! in_p;
3971 return arg0;
3973 case EQ_EXPR: case NE_EXPR:
3974 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3975 /* We can only do something if the range is testing for zero
3976 and if the second operand is an integer constant. Note that
3977 saying something is "in" the range we make is done by
3978 complementing IN_P since it will set in the initial case of
3979 being not equal to zero; "out" is leaving it alone. */
3980 if (low == NULL_TREE || high == NULL_TREE
3981 || ! integer_zerop (low) || ! integer_zerop (high)
3982 || TREE_CODE (arg1) != INTEGER_CST)
3983 return NULL_TREE;
3985 switch (code)
3987 case NE_EXPR: /* - [c, c] */
3988 low = high = arg1;
3989 break;
3990 case EQ_EXPR: /* + [c, c] */
3991 in_p = ! in_p, low = high = arg1;
3992 break;
3993 case GT_EXPR: /* - [-, c] */
3994 low = 0, high = arg1;
3995 break;
3996 case GE_EXPR: /* + [c, -] */
3997 in_p = ! in_p, low = arg1, high = 0;
3998 break;
3999 case LT_EXPR: /* - [c, -] */
4000 low = arg1, high = 0;
4001 break;
4002 case LE_EXPR: /* + [-, c] */
4003 in_p = ! in_p, low = 0, high = arg1;
4004 break;
4005 default:
4006 gcc_unreachable ();
4009 /* If this is an unsigned comparison, we also know that EXP is
4010 greater than or equal to zero. We base the range tests we make
4011 on that fact, so we record it here so we can parse existing
4012 range tests. We test arg0_type since often the return type
4013 of, e.g. EQ_EXPR, is boolean. */
4014 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4016 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4017 in_p, low, high, 1,
4018 build_int_cst (arg0_type, 0),
4019 NULL_TREE))
4020 return NULL_TREE;
4022 in_p = n_in_p, low = n_low, high = n_high;
4024 /* If the high bound is missing, but we have a nonzero low
4025 bound, reverse the range so it goes from zero to the low bound
4026 minus 1. */
4027 if (high == 0 && low && ! integer_zerop (low))
4029 in_p = ! in_p;
4030 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4031 integer_one_node, 0);
4032 low = build_int_cst (arg0_type, 0);
4036 *p_low = low;
4037 *p_high = high;
4038 *p_in_p = in_p;
4039 return arg0;
4041 case NEGATE_EXPR:
4042 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4043 low and high are non-NULL, then normalize will DTRT. */
4044 if (!TYPE_UNSIGNED (arg0_type)
4045 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4047 if (low == NULL_TREE)
4048 low = TYPE_MIN_VALUE (arg0_type);
4049 if (high == NULL_TREE)
4050 high = TYPE_MAX_VALUE (arg0_type);
4053 /* (-x) IN [a,b] -> x in [-b, -a] */
4054 n_low = range_binop (MINUS_EXPR, exp_type,
4055 build_int_cst (exp_type, 0),
4056 0, high, 1);
4057 n_high = range_binop (MINUS_EXPR, exp_type,
4058 build_int_cst (exp_type, 0),
4059 0, low, 0);
4060 if (n_high != 0 && TREE_OVERFLOW (n_high))
4061 return NULL_TREE;
4062 goto normalize;
4064 case BIT_NOT_EXPR:
4065 /* ~ X -> -X - 1 */
4066 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4067 build_int_cst (exp_type, 1));
4069 case PLUS_EXPR:
4070 case MINUS_EXPR:
4071 if (TREE_CODE (arg1) != INTEGER_CST)
4072 return NULL_TREE;
4074 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4075 move a constant to the other side. */
4076 if (!TYPE_UNSIGNED (arg0_type)
4077 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4078 return NULL_TREE;
4080 /* If EXP is signed, any overflow in the computation is undefined,
4081 so we don't worry about it so long as our computations on
4082 the bounds don't overflow. For unsigned, overflow is defined
4083 and this is exactly the right thing. */
4084 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4085 arg0_type, low, 0, arg1, 0);
4086 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4087 arg0_type, high, 1, arg1, 0);
4088 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4089 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4090 return NULL_TREE;
4092 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4093 *strict_overflow_p = true;
4095 normalize:
4096 /* Check for an unsigned range which has wrapped around the maximum
4097 value thus making n_high < n_low, and normalize it. */
4098 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4100 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4101 integer_one_node, 0);
4102 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4103 integer_one_node, 0);
4105 /* If the range is of the form +/- [ x+1, x ], we won't
4106 be able to normalize it. But then, it represents the
4107 whole range or the empty set, so make it
4108 +/- [ -, - ]. */
4109 if (tree_int_cst_equal (n_low, low)
4110 && tree_int_cst_equal (n_high, high))
4111 low = high = 0;
4112 else
4113 in_p = ! in_p;
4115 else
4116 low = n_low, high = n_high;
4118 *p_low = low;
4119 *p_high = high;
4120 *p_in_p = in_p;
4121 return arg0;
4123 CASE_CONVERT:
4124 case NON_LVALUE_EXPR:
4125 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4126 return NULL_TREE;
4128 if (! INTEGRAL_TYPE_P (arg0_type)
4129 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4130 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4131 return NULL_TREE;
4133 n_low = low, n_high = high;
4135 if (n_low != 0)
4136 n_low = fold_convert_loc (loc, arg0_type, n_low);
4138 if (n_high != 0)
4139 n_high = fold_convert_loc (loc, arg0_type, n_high);
4141 /* If we're converting arg0 from an unsigned type, to exp,
4142 a signed type, we will be doing the comparison as unsigned.
4143 The tests above have already verified that LOW and HIGH
4144 are both positive.
4146 So we have to ensure that we will handle large unsigned
4147 values the same way that the current signed bounds treat
4148 negative values. */
4150 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4152 tree high_positive;
4153 tree equiv_type;
4154 /* For fixed-point modes, we need to pass the saturating flag
4155 as the 2nd parameter. */
4156 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4157 equiv_type
4158 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4159 TYPE_SATURATING (arg0_type));
4160 else
4161 equiv_type
4162 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4164 /* A range without an upper bound is, naturally, unbounded.
4165 Since convert would have cropped a very large value, use
4166 the max value for the destination type. */
4167 high_positive
4168 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4169 : TYPE_MAX_VALUE (arg0_type);
4171 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4172 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4173 fold_convert_loc (loc, arg0_type,
4174 high_positive),
4175 build_int_cst (arg0_type, 1));
4177 /* If the low bound is specified, "and" the range with the
4178 range for which the original unsigned value will be
4179 positive. */
4180 if (low != 0)
4182 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4183 1, fold_convert_loc (loc, arg0_type,
4184 integer_zero_node),
4185 high_positive))
4186 return NULL_TREE;
4188 in_p = (n_in_p == in_p);
4190 else
4192 /* Otherwise, "or" the range with the range of the input
4193 that will be interpreted as negative. */
4194 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4195 1, fold_convert_loc (loc, arg0_type,
4196 integer_zero_node),
4197 high_positive))
4198 return NULL_TREE;
4200 in_p = (in_p != n_in_p);
4204 *p_low = n_low;
4205 *p_high = n_high;
4206 *p_in_p = in_p;
4207 return arg0;
4209 default:
4210 return NULL_TREE;
4214 /* Given EXP, a logical expression, set the range it is testing into
4215 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4216 actually being tested. *PLOW and *PHIGH will be made of the same
4217 type as the returned expression. If EXP is not a comparison, we
4218 will most likely not be returning a useful value and range. Set
4219 *STRICT_OVERFLOW_P to true if the return value is only valid
4220 because signed overflow is undefined; otherwise, do not change
4221 *STRICT_OVERFLOW_P. */
4223 tree
4224 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4225 bool *strict_overflow_p)
4227 enum tree_code code;
4228 tree arg0, arg1 = NULL_TREE;
4229 tree exp_type, nexp;
4230 int in_p;
4231 tree low, high;
4232 location_t loc = EXPR_LOCATION (exp);
4234 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4235 and see if we can refine the range. Some of the cases below may not
4236 happen, but it doesn't seem worth worrying about this. We "continue"
4237 the outer loop when we've changed something; otherwise we "break"
4238 the switch, which will "break" the while. */
4240 in_p = 0;
4241 low = high = build_int_cst (TREE_TYPE (exp), 0);
4243 while (1)
4245 code = TREE_CODE (exp);
4246 exp_type = TREE_TYPE (exp);
4247 arg0 = NULL_TREE;
4249 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4251 if (TREE_OPERAND_LENGTH (exp) > 0)
4252 arg0 = TREE_OPERAND (exp, 0);
4253 if (TREE_CODE_CLASS (code) == tcc_binary
4254 || TREE_CODE_CLASS (code) == tcc_comparison
4255 || (TREE_CODE_CLASS (code) == tcc_expression
4256 && TREE_OPERAND_LENGTH (exp) > 1))
4257 arg1 = TREE_OPERAND (exp, 1);
4259 if (arg0 == NULL_TREE)
4260 break;
4262 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4263 &high, &in_p, strict_overflow_p);
4264 if (nexp == NULL_TREE)
4265 break;
4266 exp = nexp;
4269 /* If EXP is a constant, we can evaluate whether this is true or false. */
4270 if (TREE_CODE (exp) == INTEGER_CST)
4272 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4273 exp, 0, low, 0))
4274 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4275 exp, 1, high, 1)));
4276 low = high = 0;
4277 exp = 0;
4280 *pin_p = in_p, *plow = low, *phigh = high;
4281 return exp;
4284 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4285 type, TYPE, return an expression to test if EXP is in (or out of, depending
4286 on IN_P) the range. Return 0 if the test couldn't be created. */
4288 tree
4289 build_range_check (location_t loc, tree type, tree exp, int in_p,
4290 tree low, tree high)
4292 tree etype = TREE_TYPE (exp), value;
4294 #ifdef HAVE_canonicalize_funcptr_for_compare
4295 /* Disable this optimization for function pointer expressions
4296 on targets that require function pointer canonicalization. */
4297 if (HAVE_canonicalize_funcptr_for_compare
4298 && TREE_CODE (etype) == POINTER_TYPE
4299 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4300 return NULL_TREE;
4301 #endif
4303 if (! in_p)
4305 value = build_range_check (loc, type, exp, 1, low, high);
4306 if (value != 0)
4307 return invert_truthvalue_loc (loc, value);
4309 return 0;
4312 if (low == 0 && high == 0)
4313 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4315 if (low == 0)
4316 return fold_build2_loc (loc, LE_EXPR, type, exp,
4317 fold_convert_loc (loc, etype, high));
4319 if (high == 0)
4320 return fold_build2_loc (loc, GE_EXPR, type, exp,
4321 fold_convert_loc (loc, etype, low));
4323 if (operand_equal_p (low, high, 0))
4324 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4325 fold_convert_loc (loc, etype, low));
4327 if (integer_zerop (low))
4329 if (! TYPE_UNSIGNED (etype))
4331 etype = unsigned_type_for (etype);
4332 high = fold_convert_loc (loc, etype, high);
4333 exp = fold_convert_loc (loc, etype, exp);
4335 return build_range_check (loc, type, exp, 1, 0, high);
4338 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4339 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4341 unsigned HOST_WIDE_INT lo;
4342 HOST_WIDE_INT hi;
4343 int prec;
4345 prec = TYPE_PRECISION (etype);
4346 if (prec <= HOST_BITS_PER_WIDE_INT)
4348 hi = 0;
4349 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4351 else
4353 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4354 lo = HOST_WIDE_INT_M1U;
4357 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4359 if (TYPE_UNSIGNED (etype))
4361 tree signed_etype = signed_type_for (etype);
4362 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4363 etype
4364 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4365 else
4366 etype = signed_etype;
4367 exp = fold_convert_loc (loc, etype, exp);
4369 return fold_build2_loc (loc, GT_EXPR, type, exp,
4370 build_int_cst (etype, 0));
4374 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4375 This requires wrap-around arithmetics for the type of the expression.
4376 First make sure that arithmetics in this type is valid, then make sure
4377 that it wraps around. */
4378 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4379 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4380 TYPE_UNSIGNED (etype));
4382 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4384 tree utype, minv, maxv;
4386 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4387 for the type in question, as we rely on this here. */
4388 utype = unsigned_type_for (etype);
4389 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4390 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4391 integer_one_node, 1);
4392 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4394 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4395 minv, 1, maxv, 1)))
4396 etype = utype;
4397 else
4398 return 0;
4401 high = fold_convert_loc (loc, etype, high);
4402 low = fold_convert_loc (loc, etype, low);
4403 exp = fold_convert_loc (loc, etype, exp);
4405 value = const_binop (MINUS_EXPR, high, low);
4408 if (POINTER_TYPE_P (etype))
4410 if (value != 0 && !TREE_OVERFLOW (value))
4412 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4413 return build_range_check (loc, type,
4414 fold_build_pointer_plus_loc (loc, exp, low),
4415 1, build_int_cst (etype, 0), value);
4417 return 0;
4420 if (value != 0 && !TREE_OVERFLOW (value))
4421 return build_range_check (loc, type,
4422 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4423 1, build_int_cst (etype, 0), value);
4425 return 0;
4428 /* Return the predecessor of VAL in its type, handling the infinite case. */
4430 static tree
4431 range_predecessor (tree val)
4433 tree type = TREE_TYPE (val);
4435 if (INTEGRAL_TYPE_P (type)
4436 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4437 return 0;
4438 else
4439 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4442 /* Return the successor of VAL in its type, handling the infinite case. */
4444 static tree
4445 range_successor (tree val)
4447 tree type = TREE_TYPE (val);
4449 if (INTEGRAL_TYPE_P (type)
4450 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4451 return 0;
4452 else
4453 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4456 /* Given two ranges, see if we can merge them into one. Return 1 if we
4457 can, 0 if we can't. Set the output range into the specified parameters. */
4459 bool
4460 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4461 tree high0, int in1_p, tree low1, tree high1)
4463 int no_overlap;
4464 int subset;
4465 int temp;
4466 tree tem;
4467 int in_p;
4468 tree low, high;
4469 int lowequal = ((low0 == 0 && low1 == 0)
4470 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4471 low0, 0, low1, 0)));
4472 int highequal = ((high0 == 0 && high1 == 0)
4473 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4474 high0, 1, high1, 1)));
4476 /* Make range 0 be the range that starts first, or ends last if they
4477 start at the same value. Swap them if it isn't. */
4478 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4479 low0, 0, low1, 0))
4480 || (lowequal
4481 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4482 high1, 1, high0, 1))))
4484 temp = in0_p, in0_p = in1_p, in1_p = temp;
4485 tem = low0, low0 = low1, low1 = tem;
4486 tem = high0, high0 = high1, high1 = tem;
4489 /* Now flag two cases, whether the ranges are disjoint or whether the
4490 second range is totally subsumed in the first. Note that the tests
4491 below are simplified by the ones above. */
4492 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4493 high0, 1, low1, 0));
4494 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4495 high1, 1, high0, 1));
4497 /* We now have four cases, depending on whether we are including or
4498 excluding the two ranges. */
4499 if (in0_p && in1_p)
4501 /* If they don't overlap, the result is false. If the second range
4502 is a subset it is the result. Otherwise, the range is from the start
4503 of the second to the end of the first. */
4504 if (no_overlap)
4505 in_p = 0, low = high = 0;
4506 else if (subset)
4507 in_p = 1, low = low1, high = high1;
4508 else
4509 in_p = 1, low = low1, high = high0;
4512 else if (in0_p && ! in1_p)
4514 /* If they don't overlap, the result is the first range. If they are
4515 equal, the result is false. If the second range is a subset of the
4516 first, and the ranges begin at the same place, we go from just after
4517 the end of the second range to the end of the first. If the second
4518 range is not a subset of the first, or if it is a subset and both
4519 ranges end at the same place, the range starts at the start of the
4520 first range and ends just before the second range.
4521 Otherwise, we can't describe this as a single range. */
4522 if (no_overlap)
4523 in_p = 1, low = low0, high = high0;
4524 else if (lowequal && highequal)
4525 in_p = 0, low = high = 0;
4526 else if (subset && lowequal)
4528 low = range_successor (high1);
4529 high = high0;
4530 in_p = 1;
4531 if (low == 0)
4533 /* We are in the weird situation where high0 > high1 but
4534 high1 has no successor. Punt. */
4535 return 0;
4538 else if (! subset || highequal)
4540 low = low0;
4541 high = range_predecessor (low1);
4542 in_p = 1;
4543 if (high == 0)
4545 /* low0 < low1 but low1 has no predecessor. Punt. */
4546 return 0;
4549 else
4550 return 0;
4553 else if (! in0_p && in1_p)
4555 /* If they don't overlap, the result is the second range. If the second
4556 is a subset of the first, the result is false. Otherwise,
4557 the range starts just after the first range and ends at the
4558 end of the second. */
4559 if (no_overlap)
4560 in_p = 1, low = low1, high = high1;
4561 else if (subset || highequal)
4562 in_p = 0, low = high = 0;
4563 else
4565 low = range_successor (high0);
4566 high = high1;
4567 in_p = 1;
4568 if (low == 0)
4570 /* high1 > high0 but high0 has no successor. Punt. */
4571 return 0;
4576 else
4578 /* The case where we are excluding both ranges. Here the complex case
4579 is if they don't overlap. In that case, the only time we have a
4580 range is if they are adjacent. If the second is a subset of the
4581 first, the result is the first. Otherwise, the range to exclude
4582 starts at the beginning of the first range and ends at the end of the
4583 second. */
4584 if (no_overlap)
4586 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4587 range_successor (high0),
4588 1, low1, 0)))
4589 in_p = 0, low = low0, high = high1;
4590 else
4592 /* Canonicalize - [min, x] into - [-, x]. */
4593 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4594 switch (TREE_CODE (TREE_TYPE (low0)))
4596 case ENUMERAL_TYPE:
4597 if (TYPE_PRECISION (TREE_TYPE (low0))
4598 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4599 break;
4600 /* FALLTHROUGH */
4601 case INTEGER_TYPE:
4602 if (tree_int_cst_equal (low0,
4603 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4604 low0 = 0;
4605 break;
4606 case POINTER_TYPE:
4607 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4608 && integer_zerop (low0))
4609 low0 = 0;
4610 break;
4611 default:
4612 break;
4615 /* Canonicalize - [x, max] into - [x, -]. */
4616 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4617 switch (TREE_CODE (TREE_TYPE (high1)))
4619 case ENUMERAL_TYPE:
4620 if (TYPE_PRECISION (TREE_TYPE (high1))
4621 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4622 break;
4623 /* FALLTHROUGH */
4624 case INTEGER_TYPE:
4625 if (tree_int_cst_equal (high1,
4626 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4627 high1 = 0;
4628 break;
4629 case POINTER_TYPE:
4630 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4631 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4632 high1, 1,
4633 integer_one_node, 1)))
4634 high1 = 0;
4635 break;
4636 default:
4637 break;
4640 /* The ranges might be also adjacent between the maximum and
4641 minimum values of the given type. For
4642 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4643 return + [x + 1, y - 1]. */
4644 if (low0 == 0 && high1 == 0)
4646 low = range_successor (high0);
4647 high = range_predecessor (low1);
4648 if (low == 0 || high == 0)
4649 return 0;
4651 in_p = 1;
4653 else
4654 return 0;
4657 else if (subset)
4658 in_p = 0, low = low0, high = high0;
4659 else
4660 in_p = 0, low = low0, high = high1;
4663 *pin_p = in_p, *plow = low, *phigh = high;
4664 return 1;
4668 /* Subroutine of fold, looking inside expressions of the form
4669 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4670 of the COND_EXPR. This function is being used also to optimize
4671 A op B ? C : A, by reversing the comparison first.
4673 Return a folded expression whose code is not a COND_EXPR
4674 anymore, or NULL_TREE if no folding opportunity is found. */
4676 static tree
4677 fold_cond_expr_with_comparison (location_t loc, tree type,
4678 tree arg0, tree arg1, tree arg2)
4680 enum tree_code comp_code = TREE_CODE (arg0);
4681 tree arg00 = TREE_OPERAND (arg0, 0);
4682 tree arg01 = TREE_OPERAND (arg0, 1);
4683 tree arg1_type = TREE_TYPE (arg1);
4684 tree tem;
4686 STRIP_NOPS (arg1);
4687 STRIP_NOPS (arg2);
4689 /* If we have A op 0 ? A : -A, consider applying the following
4690 transformations:
4692 A == 0? A : -A same as -A
4693 A != 0? A : -A same as A
4694 A >= 0? A : -A same as abs (A)
4695 A > 0? A : -A same as abs (A)
4696 A <= 0? A : -A same as -abs (A)
4697 A < 0? A : -A same as -abs (A)
4699 None of these transformations work for modes with signed
4700 zeros. If A is +/-0, the first two transformations will
4701 change the sign of the result (from +0 to -0, or vice
4702 versa). The last four will fix the sign of the result,
4703 even though the original expressions could be positive or
4704 negative, depending on the sign of A.
4706 Note that all these transformations are correct if A is
4707 NaN, since the two alternatives (A and -A) are also NaNs. */
4708 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4709 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4710 ? real_zerop (arg01)
4711 : integer_zerop (arg01))
4712 && ((TREE_CODE (arg2) == NEGATE_EXPR
4713 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4714 /* In the case that A is of the form X-Y, '-A' (arg2) may
4715 have already been folded to Y-X, check for that. */
4716 || (TREE_CODE (arg1) == MINUS_EXPR
4717 && TREE_CODE (arg2) == MINUS_EXPR
4718 && operand_equal_p (TREE_OPERAND (arg1, 0),
4719 TREE_OPERAND (arg2, 1), 0)
4720 && operand_equal_p (TREE_OPERAND (arg1, 1),
4721 TREE_OPERAND (arg2, 0), 0))))
4722 switch (comp_code)
4724 case EQ_EXPR:
4725 case UNEQ_EXPR:
4726 tem = fold_convert_loc (loc, arg1_type, arg1);
4727 return pedantic_non_lvalue_loc (loc,
4728 fold_convert_loc (loc, type,
4729 negate_expr (tem)));
4730 case NE_EXPR:
4731 case LTGT_EXPR:
4732 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4733 case UNGE_EXPR:
4734 case UNGT_EXPR:
4735 if (flag_trapping_math)
4736 break;
4737 /* Fall through. */
4738 case GE_EXPR:
4739 case GT_EXPR:
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert_loc (loc, signed_type_for
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4745 case UNLE_EXPR:
4746 case UNLT_EXPR:
4747 if (flag_trapping_math)
4748 break;
4749 case LE_EXPR:
4750 case LT_EXPR:
4751 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4752 arg1 = fold_convert_loc (loc, signed_type_for
4753 (TREE_TYPE (arg1)), arg1);
4754 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4755 return negate_expr (fold_convert_loc (loc, type, tem));
4756 default:
4757 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4758 break;
4761 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4762 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4763 both transformations are correct when A is NaN: A != 0
4764 is then true, and A == 0 is false. */
4766 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4767 && integer_zerop (arg01) && integer_zerop (arg2))
4769 if (comp_code == NE_EXPR)
4770 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4771 else if (comp_code == EQ_EXPR)
4772 return build_zero_cst (type);
4775 /* Try some transformations of A op B ? A : B.
4777 A == B? A : B same as B
4778 A != B? A : B same as A
4779 A >= B? A : B same as max (A, B)
4780 A > B? A : B same as max (B, A)
4781 A <= B? A : B same as min (A, B)
4782 A < B? A : B same as min (B, A)
4784 As above, these transformations don't work in the presence
4785 of signed zeros. For example, if A and B are zeros of
4786 opposite sign, the first two transformations will change
4787 the sign of the result. In the last four, the original
4788 expressions give different results for (A=+0, B=-0) and
4789 (A=-0, B=+0), but the transformed expressions do not.
4791 The first two transformations are correct if either A or B
4792 is a NaN. In the first transformation, the condition will
4793 be false, and B will indeed be chosen. In the case of the
4794 second transformation, the condition A != B will be true,
4795 and A will be chosen.
4797 The conversions to max() and min() are not correct if B is
4798 a number and A is not. The conditions in the original
4799 expressions will be false, so all four give B. The min()
4800 and max() versions would give a NaN instead. */
4801 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4802 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4803 /* Avoid these transformations if the COND_EXPR may be used
4804 as an lvalue in the C++ front-end. PR c++/19199. */
4805 && (in_gimple_form
4806 || VECTOR_TYPE_P (type)
4807 || (strcmp (lang_hooks.name, "GNU C++") != 0
4808 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4809 || ! maybe_lvalue_p (arg1)
4810 || ! maybe_lvalue_p (arg2)))
4812 tree comp_op0 = arg00;
4813 tree comp_op1 = arg01;
4814 tree comp_type = TREE_TYPE (comp_op0);
4816 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4817 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4819 comp_type = type;
4820 comp_op0 = arg1;
4821 comp_op1 = arg2;
4824 switch (comp_code)
4826 case EQ_EXPR:
4827 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4828 case NE_EXPR:
4829 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4830 case LE_EXPR:
4831 case LT_EXPR:
4832 case UNLE_EXPR:
4833 case UNLT_EXPR:
4834 /* In C++ a ?: expression can be an lvalue, so put the
4835 operand which will be used if they are equal first
4836 so that we can convert this back to the
4837 corresponding COND_EXPR. */
4838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4841 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4842 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4843 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4844 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4845 comp_op1, comp_op0);
4846 return pedantic_non_lvalue_loc (loc,
4847 fold_convert_loc (loc, type, tem));
4849 break;
4850 case GE_EXPR:
4851 case GT_EXPR:
4852 case UNGE_EXPR:
4853 case UNGT_EXPR:
4854 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4856 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4857 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4858 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4859 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4860 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4861 comp_op1, comp_op0);
4862 return pedantic_non_lvalue_loc (loc,
4863 fold_convert_loc (loc, type, tem));
4865 break;
4866 case UNEQ_EXPR:
4867 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4868 return pedantic_non_lvalue_loc (loc,
4869 fold_convert_loc (loc, type, arg2));
4870 break;
4871 case LTGT_EXPR:
4872 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4873 return pedantic_non_lvalue_loc (loc,
4874 fold_convert_loc (loc, type, arg1));
4875 break;
4876 default:
4877 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4878 break;
4882 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4883 we might still be able to simplify this. For example,
4884 if C1 is one less or one more than C2, this might have started
4885 out as a MIN or MAX and been transformed by this function.
4886 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4888 if (INTEGRAL_TYPE_P (type)
4889 && TREE_CODE (arg01) == INTEGER_CST
4890 && TREE_CODE (arg2) == INTEGER_CST)
4891 switch (comp_code)
4893 case EQ_EXPR:
4894 if (TREE_CODE (arg1) == INTEGER_CST)
4895 break;
4896 /* We can replace A with C1 in this case. */
4897 arg1 = fold_convert_loc (loc, type, arg01);
4898 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4900 case LT_EXPR:
4901 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4902 MIN_EXPR, to preserve the signedness of the comparison. */
4903 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4904 OEP_ONLY_CONST)
4905 && operand_equal_p (arg01,
4906 const_binop (PLUS_EXPR, arg2,
4907 build_int_cst (type, 1)),
4908 OEP_ONLY_CONST))
4910 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4911 fold_convert_loc (loc, TREE_TYPE (arg00),
4912 arg2));
4913 return pedantic_non_lvalue_loc (loc,
4914 fold_convert_loc (loc, type, tem));
4916 break;
4918 case LE_EXPR:
4919 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4920 as above. */
4921 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4922 OEP_ONLY_CONST)
4923 && operand_equal_p (arg01,
4924 const_binop (MINUS_EXPR, arg2,
4925 build_int_cst (type, 1)),
4926 OEP_ONLY_CONST))
4928 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4929 fold_convert_loc (loc, TREE_TYPE (arg00),
4930 arg2));
4931 return pedantic_non_lvalue_loc (loc,
4932 fold_convert_loc (loc, type, tem));
4934 break;
4936 case GT_EXPR:
4937 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4938 MAX_EXPR, to preserve the signedness of the comparison. */
4939 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4940 OEP_ONLY_CONST)
4941 && operand_equal_p (arg01,
4942 const_binop (MINUS_EXPR, arg2,
4943 build_int_cst (type, 1)),
4944 OEP_ONLY_CONST))
4946 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4947 fold_convert_loc (loc, TREE_TYPE (arg00),
4948 arg2));
4949 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4951 break;
4953 case GE_EXPR:
4954 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4955 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4956 OEP_ONLY_CONST)
4957 && operand_equal_p (arg01,
4958 const_binop (PLUS_EXPR, arg2,
4959 build_int_cst (type, 1)),
4960 OEP_ONLY_CONST))
4962 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4963 fold_convert_loc (loc, TREE_TYPE (arg00),
4964 arg2));
4965 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4967 break;
4968 case NE_EXPR:
4969 break;
4970 default:
4971 gcc_unreachable ();
4974 return NULL_TREE;
4979 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4980 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4981 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4982 false) >= 2)
4983 #endif
4985 /* EXP is some logical combination of boolean tests. See if we can
4986 merge it into some range test. Return the new tree if so. */
4988 static tree
4989 fold_range_test (location_t loc, enum tree_code code, tree type,
4990 tree op0, tree op1)
4992 int or_op = (code == TRUTH_ORIF_EXPR
4993 || code == TRUTH_OR_EXPR);
4994 int in0_p, in1_p, in_p;
4995 tree low0, low1, low, high0, high1, high;
4996 bool strict_overflow_p = false;
4997 tree tem, lhs, rhs;
4998 const char * const warnmsg = G_("assuming signed overflow does not occur "
4999 "when simplifying range test");
5001 if (!INTEGRAL_TYPE_P (type))
5002 return 0;
5004 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5005 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5007 /* If this is an OR operation, invert both sides; we will invert
5008 again at the end. */
5009 if (or_op)
5010 in0_p = ! in0_p, in1_p = ! in1_p;
5012 /* If both expressions are the same, if we can merge the ranges, and we
5013 can build the range test, return it or it inverted. If one of the
5014 ranges is always true or always false, consider it to be the same
5015 expression as the other. */
5016 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5017 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5018 in1_p, low1, high1)
5019 && 0 != (tem = (build_range_check (loc, type,
5020 lhs != 0 ? lhs
5021 : rhs != 0 ? rhs : integer_zero_node,
5022 in_p, low, high))))
5024 if (strict_overflow_p)
5025 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5026 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5029 /* On machines where the branch cost is expensive, if this is a
5030 short-circuited branch and the underlying object on both sides
5031 is the same, make a non-short-circuit operation. */
5032 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5033 && lhs != 0 && rhs != 0
5034 && (code == TRUTH_ANDIF_EXPR
5035 || code == TRUTH_ORIF_EXPR)
5036 && operand_equal_p (lhs, rhs, 0))
5038 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5039 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5040 which cases we can't do this. */
5041 if (simple_operand_p (lhs))
5042 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5043 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5044 type, op0, op1);
5046 else if (!lang_hooks.decls.global_bindings_p ()
5047 && !CONTAINS_PLACEHOLDER_P (lhs))
5049 tree common = save_expr (lhs);
5051 if (0 != (lhs = build_range_check (loc, type, common,
5052 or_op ? ! in0_p : in0_p,
5053 low0, high0))
5054 && (0 != (rhs = build_range_check (loc, type, common,
5055 or_op ? ! in1_p : in1_p,
5056 low1, high1))))
5058 if (strict_overflow_p)
5059 fold_overflow_warning (warnmsg,
5060 WARN_STRICT_OVERFLOW_COMPARISON);
5061 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5062 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5063 type, lhs, rhs);
5068 return 0;
5071 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5072 bit value. Arrange things so the extra bits will be set to zero if and
5073 only if C is signed-extended to its full width. If MASK is nonzero,
5074 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5076 static tree
5077 unextend (tree c, int p, int unsignedp, tree mask)
5079 tree type = TREE_TYPE (c);
5080 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5081 tree temp;
5083 if (p == modesize || unsignedp)
5084 return c;
5086 /* We work by getting just the sign bit into the low-order bit, then
5087 into the high-order bit, then sign-extend. We then XOR that value
5088 with C. */
5089 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5090 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5092 /* We must use a signed type in order to get an arithmetic right shift.
5093 However, we must also avoid introducing accidental overflows, so that
5094 a subsequent call to integer_zerop will work. Hence we must
5095 do the type conversion here. At this point, the constant is either
5096 zero or one, and the conversion to a signed type can never overflow.
5097 We could get an overflow if this conversion is done anywhere else. */
5098 if (TYPE_UNSIGNED (type))
5099 temp = fold_convert (signed_type_for (type), temp);
5101 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5102 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5103 if (mask != 0)
5104 temp = const_binop (BIT_AND_EXPR, temp,
5105 fold_convert (TREE_TYPE (c), mask));
5106 /* If necessary, convert the type back to match the type of C. */
5107 if (TYPE_UNSIGNED (type))
5108 temp = fold_convert (type, temp);
5110 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5113 /* For an expression that has the form
5114 (A && B) || ~B
5116 (A || B) && ~B,
5117 we can drop one of the inner expressions and simplify to
5118 A || ~B
5120 A && ~B
5121 LOC is the location of the resulting expression. OP is the inner
5122 logical operation; the left-hand side in the examples above, while CMPOP
5123 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5124 removing a condition that guards another, as in
5125 (A != NULL && A->...) || A == NULL
5126 which we must not transform. If RHS_ONLY is true, only eliminate the
5127 right-most operand of the inner logical operation. */
5129 static tree
5130 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5131 bool rhs_only)
5133 tree type = TREE_TYPE (cmpop);
5134 enum tree_code code = TREE_CODE (cmpop);
5135 enum tree_code truthop_code = TREE_CODE (op);
5136 tree lhs = TREE_OPERAND (op, 0);
5137 tree rhs = TREE_OPERAND (op, 1);
5138 tree orig_lhs = lhs, orig_rhs = rhs;
5139 enum tree_code rhs_code = TREE_CODE (rhs);
5140 enum tree_code lhs_code = TREE_CODE (lhs);
5141 enum tree_code inv_code;
5143 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5144 return NULL_TREE;
5146 if (TREE_CODE_CLASS (code) != tcc_comparison)
5147 return NULL_TREE;
5149 if (rhs_code == truthop_code)
5151 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5152 if (newrhs != NULL_TREE)
5154 rhs = newrhs;
5155 rhs_code = TREE_CODE (rhs);
5158 if (lhs_code == truthop_code && !rhs_only)
5160 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5161 if (newlhs != NULL_TREE)
5163 lhs = newlhs;
5164 lhs_code = TREE_CODE (lhs);
5168 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5169 if (inv_code == rhs_code
5170 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5171 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5172 return lhs;
5173 if (!rhs_only && inv_code == lhs_code
5174 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5175 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5176 return rhs;
5177 if (rhs != orig_rhs || lhs != orig_lhs)
5178 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5179 lhs, rhs);
5180 return NULL_TREE;
5183 /* Find ways of folding logical expressions of LHS and RHS:
5184 Try to merge two comparisons to the same innermost item.
5185 Look for range tests like "ch >= '0' && ch <= '9'".
5186 Look for combinations of simple terms on machines with expensive branches
5187 and evaluate the RHS unconditionally.
5189 For example, if we have p->a == 2 && p->b == 4 and we can make an
5190 object large enough to span both A and B, we can do this with a comparison
5191 against the object ANDed with the a mask.
5193 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5194 operations to do this with one comparison.
5196 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5197 function and the one above.
5199 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5200 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5202 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5203 two operands.
5205 We return the simplified tree or 0 if no optimization is possible. */
5207 static tree
5208 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5209 tree lhs, tree rhs)
5211 /* If this is the "or" of two comparisons, we can do something if
5212 the comparisons are NE_EXPR. If this is the "and", we can do something
5213 if the comparisons are EQ_EXPR. I.e.,
5214 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5216 WANTED_CODE is this operation code. For single bit fields, we can
5217 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5218 comparison for one-bit fields. */
5220 enum tree_code wanted_code;
5221 enum tree_code lcode, rcode;
5222 tree ll_arg, lr_arg, rl_arg, rr_arg;
5223 tree ll_inner, lr_inner, rl_inner, rr_inner;
5224 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5225 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5226 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5227 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5228 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5229 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5230 enum machine_mode lnmode, rnmode;
5231 tree ll_mask, lr_mask, rl_mask, rr_mask;
5232 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5233 tree l_const, r_const;
5234 tree lntype, rntype, result;
5235 HOST_WIDE_INT first_bit, end_bit;
5236 int volatilep;
5238 /* Start by getting the comparison codes. Fail if anything is volatile.
5239 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5240 it were surrounded with a NE_EXPR. */
5242 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5243 return 0;
5245 lcode = TREE_CODE (lhs);
5246 rcode = TREE_CODE (rhs);
5248 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5250 lhs = build2 (NE_EXPR, truth_type, lhs,
5251 build_int_cst (TREE_TYPE (lhs), 0));
5252 lcode = NE_EXPR;
5255 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5257 rhs = build2 (NE_EXPR, truth_type, rhs,
5258 build_int_cst (TREE_TYPE (rhs), 0));
5259 rcode = NE_EXPR;
5262 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5263 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5264 return 0;
5266 ll_arg = TREE_OPERAND (lhs, 0);
5267 lr_arg = TREE_OPERAND (lhs, 1);
5268 rl_arg = TREE_OPERAND (rhs, 0);
5269 rr_arg = TREE_OPERAND (rhs, 1);
5271 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5272 if (simple_operand_p (ll_arg)
5273 && simple_operand_p (lr_arg))
5275 if (operand_equal_p (ll_arg, rl_arg, 0)
5276 && operand_equal_p (lr_arg, rr_arg, 0))
5278 result = combine_comparisons (loc, code, lcode, rcode,
5279 truth_type, ll_arg, lr_arg);
5280 if (result)
5281 return result;
5283 else if (operand_equal_p (ll_arg, rr_arg, 0)
5284 && operand_equal_p (lr_arg, rl_arg, 0))
5286 result = combine_comparisons (loc, code, lcode,
5287 swap_tree_comparison (rcode),
5288 truth_type, ll_arg, lr_arg);
5289 if (result)
5290 return result;
5294 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5295 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5297 /* If the RHS can be evaluated unconditionally and its operands are
5298 simple, it wins to evaluate the RHS unconditionally on machines
5299 with expensive branches. In this case, this isn't a comparison
5300 that can be merged. */
5302 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5303 false) >= 2
5304 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5305 && simple_operand_p (rl_arg)
5306 && simple_operand_p (rr_arg))
5308 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5309 if (code == TRUTH_OR_EXPR
5310 && lcode == NE_EXPR && integer_zerop (lr_arg)
5311 && rcode == NE_EXPR && integer_zerop (rr_arg)
5312 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5313 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5314 return build2_loc (loc, NE_EXPR, truth_type,
5315 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5316 ll_arg, rl_arg),
5317 build_int_cst (TREE_TYPE (ll_arg), 0));
5319 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5320 if (code == TRUTH_AND_EXPR
5321 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5322 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5323 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5324 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5325 return build2_loc (loc, EQ_EXPR, truth_type,
5326 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5327 ll_arg, rl_arg),
5328 build_int_cst (TREE_TYPE (ll_arg), 0));
5331 /* See if the comparisons can be merged. Then get all the parameters for
5332 each side. */
5334 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5335 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5336 return 0;
5338 volatilep = 0;
5339 ll_inner = decode_field_reference (loc, ll_arg,
5340 &ll_bitsize, &ll_bitpos, &ll_mode,
5341 &ll_unsignedp, &volatilep, &ll_mask,
5342 &ll_and_mask);
5343 lr_inner = decode_field_reference (loc, lr_arg,
5344 &lr_bitsize, &lr_bitpos, &lr_mode,
5345 &lr_unsignedp, &volatilep, &lr_mask,
5346 &lr_and_mask);
5347 rl_inner = decode_field_reference (loc, rl_arg,
5348 &rl_bitsize, &rl_bitpos, &rl_mode,
5349 &rl_unsignedp, &volatilep, &rl_mask,
5350 &rl_and_mask);
5351 rr_inner = decode_field_reference (loc, rr_arg,
5352 &rr_bitsize, &rr_bitpos, &rr_mode,
5353 &rr_unsignedp, &volatilep, &rr_mask,
5354 &rr_and_mask);
5356 /* It must be true that the inner operation on the lhs of each
5357 comparison must be the same if we are to be able to do anything.
5358 Then see if we have constants. If not, the same must be true for
5359 the rhs's. */
5360 if (volatilep || ll_inner == 0 || rl_inner == 0
5361 || ! operand_equal_p (ll_inner, rl_inner, 0))
5362 return 0;
5364 if (TREE_CODE (lr_arg) == INTEGER_CST
5365 && TREE_CODE (rr_arg) == INTEGER_CST)
5366 l_const = lr_arg, r_const = rr_arg;
5367 else if (lr_inner == 0 || rr_inner == 0
5368 || ! operand_equal_p (lr_inner, rr_inner, 0))
5369 return 0;
5370 else
5371 l_const = r_const = 0;
5373 /* If either comparison code is not correct for our logical operation,
5374 fail. However, we can convert a one-bit comparison against zero into
5375 the opposite comparison against that bit being set in the field. */
5377 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5378 if (lcode != wanted_code)
5380 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5382 /* Make the left operand unsigned, since we are only interested
5383 in the value of one bit. Otherwise we are doing the wrong
5384 thing below. */
5385 ll_unsignedp = 1;
5386 l_const = ll_mask;
5388 else
5389 return 0;
5392 /* This is analogous to the code for l_const above. */
5393 if (rcode != wanted_code)
5395 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5397 rl_unsignedp = 1;
5398 r_const = rl_mask;
5400 else
5401 return 0;
5404 /* See if we can find a mode that contains both fields being compared on
5405 the left. If we can't, fail. Otherwise, update all constants and masks
5406 to be relative to a field of that size. */
5407 first_bit = MIN (ll_bitpos, rl_bitpos);
5408 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5409 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5410 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5411 volatilep);
5412 if (lnmode == VOIDmode)
5413 return 0;
5415 lnbitsize = GET_MODE_BITSIZE (lnmode);
5416 lnbitpos = first_bit & ~ (lnbitsize - 1);
5417 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5418 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5420 if (BYTES_BIG_ENDIAN)
5422 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5423 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5426 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5427 size_int (xll_bitpos));
5428 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5429 size_int (xrl_bitpos));
5431 if (l_const)
5433 l_const = fold_convert_loc (loc, lntype, l_const);
5434 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5435 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5436 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5437 fold_build1_loc (loc, BIT_NOT_EXPR,
5438 lntype, ll_mask))))
5440 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5442 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5445 if (r_const)
5447 r_const = fold_convert_loc (loc, lntype, r_const);
5448 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5449 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5450 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5451 fold_build1_loc (loc, BIT_NOT_EXPR,
5452 lntype, rl_mask))))
5454 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5456 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5460 /* If the right sides are not constant, do the same for it. Also,
5461 disallow this optimization if a size or signedness mismatch occurs
5462 between the left and right sides. */
5463 if (l_const == 0)
5465 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5466 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5467 /* Make sure the two fields on the right
5468 correspond to the left without being swapped. */
5469 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5470 return 0;
5472 first_bit = MIN (lr_bitpos, rr_bitpos);
5473 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5474 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5475 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5476 volatilep);
5477 if (rnmode == VOIDmode)
5478 return 0;
5480 rnbitsize = GET_MODE_BITSIZE (rnmode);
5481 rnbitpos = first_bit & ~ (rnbitsize - 1);
5482 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5483 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5485 if (BYTES_BIG_ENDIAN)
5487 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5488 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5491 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5492 rntype, lr_mask),
5493 size_int (xlr_bitpos));
5494 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5495 rntype, rr_mask),
5496 size_int (xrr_bitpos));
5498 /* Make a mask that corresponds to both fields being compared.
5499 Do this for both items being compared. If the operands are the
5500 same size and the bits being compared are in the same position
5501 then we can do this by masking both and comparing the masked
5502 results. */
5503 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5504 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5505 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5507 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5508 ll_unsignedp || rl_unsignedp);
5509 if (! all_ones_mask_p (ll_mask, lnbitsize))
5510 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5512 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5513 lr_unsignedp || rr_unsignedp);
5514 if (! all_ones_mask_p (lr_mask, rnbitsize))
5515 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5517 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5520 /* There is still another way we can do something: If both pairs of
5521 fields being compared are adjacent, we may be able to make a wider
5522 field containing them both.
5524 Note that we still must mask the lhs/rhs expressions. Furthermore,
5525 the mask must be shifted to account for the shift done by
5526 make_bit_field_ref. */
5527 if ((ll_bitsize + ll_bitpos == rl_bitpos
5528 && lr_bitsize + lr_bitpos == rr_bitpos)
5529 || (ll_bitpos == rl_bitpos + rl_bitsize
5530 && lr_bitpos == rr_bitpos + rr_bitsize))
5532 tree type;
5534 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5535 ll_bitsize + rl_bitsize,
5536 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5537 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5538 lr_bitsize + rr_bitsize,
5539 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5541 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5542 size_int (MIN (xll_bitpos, xrl_bitpos)));
5543 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5544 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5546 /* Convert to the smaller type before masking out unwanted bits. */
5547 type = lntype;
5548 if (lntype != rntype)
5550 if (lnbitsize > rnbitsize)
5552 lhs = fold_convert_loc (loc, rntype, lhs);
5553 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5554 type = rntype;
5556 else if (lnbitsize < rnbitsize)
5558 rhs = fold_convert_loc (loc, lntype, rhs);
5559 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5560 type = lntype;
5564 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5565 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5567 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5568 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5570 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5573 return 0;
5576 /* Handle the case of comparisons with constants. If there is something in
5577 common between the masks, those bits of the constants must be the same.
5578 If not, the condition is always false. Test for this to avoid generating
5579 incorrect code below. */
5580 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5581 if (! integer_zerop (result)
5582 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5583 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5585 if (wanted_code == NE_EXPR)
5587 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5588 return constant_boolean_node (true, truth_type);
5590 else
5592 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5593 return constant_boolean_node (false, truth_type);
5597 /* Construct the expression we will return. First get the component
5598 reference we will make. Unless the mask is all ones the width of
5599 that field, perform the mask operation. Then compare with the
5600 merged constant. */
5601 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5602 ll_unsignedp || rl_unsignedp);
5604 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5605 if (! all_ones_mask_p (ll_mask, lnbitsize))
5606 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5608 return build2_loc (loc, wanted_code, truth_type, result,
5609 const_binop (BIT_IOR_EXPR, l_const, r_const));
5612 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5613 constant. */
5615 static tree
5616 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5617 tree op0, tree op1)
5619 tree arg0 = op0;
5620 enum tree_code op_code;
5621 tree comp_const;
5622 tree minmax_const;
5623 int consts_equal, consts_lt;
5624 tree inner;
5626 STRIP_SIGN_NOPS (arg0);
5628 op_code = TREE_CODE (arg0);
5629 minmax_const = TREE_OPERAND (arg0, 1);
5630 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5631 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5632 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5633 inner = TREE_OPERAND (arg0, 0);
5635 /* If something does not permit us to optimize, return the original tree. */
5636 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5637 || TREE_CODE (comp_const) != INTEGER_CST
5638 || TREE_OVERFLOW (comp_const)
5639 || TREE_CODE (minmax_const) != INTEGER_CST
5640 || TREE_OVERFLOW (minmax_const))
5641 return NULL_TREE;
5643 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5644 and GT_EXPR, doing the rest with recursive calls using logical
5645 simplifications. */
5646 switch (code)
5648 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5650 tree tem
5651 = optimize_minmax_comparison (loc,
5652 invert_tree_comparison (code, false),
5653 type, op0, op1);
5654 if (tem)
5655 return invert_truthvalue_loc (loc, tem);
5656 return NULL_TREE;
5659 case GE_EXPR:
5660 return
5661 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5662 optimize_minmax_comparison
5663 (loc, EQ_EXPR, type, arg0, comp_const),
5664 optimize_minmax_comparison
5665 (loc, GT_EXPR, type, arg0, comp_const));
5667 case EQ_EXPR:
5668 if (op_code == MAX_EXPR && consts_equal)
5669 /* MAX (X, 0) == 0 -> X <= 0 */
5670 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5672 else if (op_code == MAX_EXPR && consts_lt)
5673 /* MAX (X, 0) == 5 -> X == 5 */
5674 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5676 else if (op_code == MAX_EXPR)
5677 /* MAX (X, 0) == -1 -> false */
5678 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5680 else if (consts_equal)
5681 /* MIN (X, 0) == 0 -> X >= 0 */
5682 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5684 else if (consts_lt)
5685 /* MIN (X, 0) == 5 -> false */
5686 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5688 else
5689 /* MIN (X, 0) == -1 -> X == -1 */
5690 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5692 case GT_EXPR:
5693 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5694 /* MAX (X, 0) > 0 -> X > 0
5695 MAX (X, 0) > 5 -> X > 5 */
5696 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5698 else if (op_code == MAX_EXPR)
5699 /* MAX (X, 0) > -1 -> true */
5700 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5702 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5703 /* MIN (X, 0) > 0 -> false
5704 MIN (X, 0) > 5 -> false */
5705 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5707 else
5708 /* MIN (X, 0) > -1 -> X > -1 */
5709 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5711 default:
5712 return NULL_TREE;
5716 /* T is an integer expression that is being multiplied, divided, or taken a
5717 modulus (CODE says which and what kind of divide or modulus) by a
5718 constant C. See if we can eliminate that operation by folding it with
5719 other operations already in T. WIDE_TYPE, if non-null, is a type that
5720 should be used for the computation if wider than our type.
5722 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5723 (X * 2) + (Y * 4). We must, however, be assured that either the original
5724 expression would not overflow or that overflow is undefined for the type
5725 in the language in question.
5727 If we return a non-null expression, it is an equivalent form of the
5728 original computation, but need not be in the original type.
5730 We set *STRICT_OVERFLOW_P to true if the return values depends on
5731 signed overflow being undefined. Otherwise we do not change
5732 *STRICT_OVERFLOW_P. */
5734 static tree
5735 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5736 bool *strict_overflow_p)
5738 /* To avoid exponential search depth, refuse to allow recursion past
5739 three levels. Beyond that (1) it's highly unlikely that we'll find
5740 something interesting and (2) we've probably processed it before
5741 when we built the inner expression. */
5743 static int depth;
5744 tree ret;
5746 if (depth > 3)
5747 return NULL;
5749 depth++;
5750 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5751 depth--;
5753 return ret;
5756 static tree
5757 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5758 bool *strict_overflow_p)
5760 tree type = TREE_TYPE (t);
5761 enum tree_code tcode = TREE_CODE (t);
5762 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5763 > GET_MODE_SIZE (TYPE_MODE (type)))
5764 ? wide_type : type);
5765 tree t1, t2;
5766 int same_p = tcode == code;
5767 tree op0 = NULL_TREE, op1 = NULL_TREE;
5768 bool sub_strict_overflow_p;
5770 /* Don't deal with constants of zero here; they confuse the code below. */
5771 if (integer_zerop (c))
5772 return NULL_TREE;
5774 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5775 op0 = TREE_OPERAND (t, 0);
5777 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5778 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5780 /* Note that we need not handle conditional operations here since fold
5781 already handles those cases. So just do arithmetic here. */
5782 switch (tcode)
5784 case INTEGER_CST:
5785 /* For a constant, we can always simplify if we are a multiply
5786 or (for divide and modulus) if it is a multiple of our constant. */
5787 if (code == MULT_EXPR
5788 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5789 return const_binop (code, fold_convert (ctype, t),
5790 fold_convert (ctype, c));
5791 break;
5793 CASE_CONVERT: case NON_LVALUE_EXPR:
5794 /* If op0 is an expression ... */
5795 if ((COMPARISON_CLASS_P (op0)
5796 || UNARY_CLASS_P (op0)
5797 || BINARY_CLASS_P (op0)
5798 || VL_EXP_CLASS_P (op0)
5799 || EXPRESSION_CLASS_P (op0))
5800 /* ... and has wrapping overflow, and its type is smaller
5801 than ctype, then we cannot pass through as widening. */
5802 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5803 && (TYPE_PRECISION (ctype)
5804 > TYPE_PRECISION (TREE_TYPE (op0))))
5805 /* ... or this is a truncation (t is narrower than op0),
5806 then we cannot pass through this narrowing. */
5807 || (TYPE_PRECISION (type)
5808 < TYPE_PRECISION (TREE_TYPE (op0)))
5809 /* ... or signedness changes for division or modulus,
5810 then we cannot pass through this conversion. */
5811 || (code != MULT_EXPR
5812 && (TYPE_UNSIGNED (ctype)
5813 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5814 /* ... or has undefined overflow while the converted to
5815 type has not, we cannot do the operation in the inner type
5816 as that would introduce undefined overflow. */
5817 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5818 && !TYPE_OVERFLOW_UNDEFINED (type))))
5819 break;
5821 /* Pass the constant down and see if we can make a simplification. If
5822 we can, replace this expression with the inner simplification for
5823 possible later conversion to our or some other type. */
5824 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5825 && TREE_CODE (t2) == INTEGER_CST
5826 && !TREE_OVERFLOW (t2)
5827 && (0 != (t1 = extract_muldiv (op0, t2, code,
5828 code == MULT_EXPR
5829 ? ctype : NULL_TREE,
5830 strict_overflow_p))))
5831 return t1;
5832 break;
5834 case ABS_EXPR:
5835 /* If widening the type changes it from signed to unsigned, then we
5836 must avoid building ABS_EXPR itself as unsigned. */
5837 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5839 tree cstype = (*signed_type_for) (ctype);
5840 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5841 != 0)
5843 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5844 return fold_convert (ctype, t1);
5846 break;
5848 /* If the constant is negative, we cannot simplify this. */
5849 if (tree_int_cst_sgn (c) == -1)
5850 break;
5851 /* FALLTHROUGH */
5852 case NEGATE_EXPR:
5853 /* For division and modulus, type can't be unsigned, as e.g.
5854 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5855 For signed types, even with wrapping overflow, this is fine. */
5856 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5857 break;
5858 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5859 != 0)
5860 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5861 break;
5863 case MIN_EXPR: case MAX_EXPR:
5864 /* If widening the type changes the signedness, then we can't perform
5865 this optimization as that changes the result. */
5866 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5867 break;
5869 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5870 sub_strict_overflow_p = false;
5871 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5872 &sub_strict_overflow_p)) != 0
5873 && (t2 = extract_muldiv (op1, c, code, wide_type,
5874 &sub_strict_overflow_p)) != 0)
5876 if (tree_int_cst_sgn (c) < 0)
5877 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5878 if (sub_strict_overflow_p)
5879 *strict_overflow_p = true;
5880 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5881 fold_convert (ctype, t2));
5883 break;
5885 case LSHIFT_EXPR: case RSHIFT_EXPR:
5886 /* If the second operand is constant, this is a multiplication
5887 or floor division, by a power of two, so we can treat it that
5888 way unless the multiplier or divisor overflows. Signed
5889 left-shift overflow is implementation-defined rather than
5890 undefined in C90, so do not convert signed left shift into
5891 multiplication. */
5892 if (TREE_CODE (op1) == INTEGER_CST
5893 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5894 /* const_binop may not detect overflow correctly,
5895 so check for it explicitly here. */
5896 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5897 && TREE_INT_CST_HIGH (op1) == 0
5898 && 0 != (t1 = fold_convert (ctype,
5899 const_binop (LSHIFT_EXPR,
5900 size_one_node,
5901 op1)))
5902 && !TREE_OVERFLOW (t1))
5903 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5904 ? MULT_EXPR : FLOOR_DIV_EXPR,
5905 ctype,
5906 fold_convert (ctype, op0),
5907 t1),
5908 c, code, wide_type, strict_overflow_p);
5909 break;
5911 case PLUS_EXPR: case MINUS_EXPR:
5912 /* See if we can eliminate the operation on both sides. If we can, we
5913 can return a new PLUS or MINUS. If we can't, the only remaining
5914 cases where we can do anything are if the second operand is a
5915 constant. */
5916 sub_strict_overflow_p = false;
5917 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5918 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5919 if (t1 != 0 && t2 != 0
5920 && (code == MULT_EXPR
5921 /* If not multiplication, we can only do this if both operands
5922 are divisible by c. */
5923 || (multiple_of_p (ctype, op0, c)
5924 && multiple_of_p (ctype, op1, c))))
5926 if (sub_strict_overflow_p)
5927 *strict_overflow_p = true;
5928 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5929 fold_convert (ctype, t2));
5932 /* If this was a subtraction, negate OP1 and set it to be an addition.
5933 This simplifies the logic below. */
5934 if (tcode == MINUS_EXPR)
5936 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5937 /* If OP1 was not easily negatable, the constant may be OP0. */
5938 if (TREE_CODE (op0) == INTEGER_CST)
5940 tree tem = op0;
5941 op0 = op1;
5942 op1 = tem;
5943 tem = t1;
5944 t1 = t2;
5945 t2 = tem;
5949 if (TREE_CODE (op1) != INTEGER_CST)
5950 break;
5952 /* If either OP1 or C are negative, this optimization is not safe for
5953 some of the division and remainder types while for others we need
5954 to change the code. */
5955 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5957 if (code == CEIL_DIV_EXPR)
5958 code = FLOOR_DIV_EXPR;
5959 else if (code == FLOOR_DIV_EXPR)
5960 code = CEIL_DIV_EXPR;
5961 else if (code != MULT_EXPR
5962 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5963 break;
5966 /* If it's a multiply or a division/modulus operation of a multiple
5967 of our constant, do the operation and verify it doesn't overflow. */
5968 if (code == MULT_EXPR
5969 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5971 op1 = const_binop (code, fold_convert (ctype, op1),
5972 fold_convert (ctype, c));
5973 /* We allow the constant to overflow with wrapping semantics. */
5974 if (op1 == 0
5975 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5976 break;
5978 else
5979 break;
5981 /* If we have an unsigned type, we cannot widen the operation since it
5982 will change the result if the original computation overflowed. */
5983 if (TYPE_UNSIGNED (ctype) && ctype != type)
5984 break;
5986 /* If we were able to eliminate our operation from the first side,
5987 apply our operation to the second side and reform the PLUS. */
5988 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5989 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5991 /* The last case is if we are a multiply. In that case, we can
5992 apply the distributive law to commute the multiply and addition
5993 if the multiplication of the constants doesn't overflow
5994 and overflow is defined. With undefined overflow
5995 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5996 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5997 return fold_build2 (tcode, ctype,
5998 fold_build2 (code, ctype,
5999 fold_convert (ctype, op0),
6000 fold_convert (ctype, c)),
6001 op1);
6003 break;
6005 case MULT_EXPR:
6006 /* We have a special case here if we are doing something like
6007 (C * 8) % 4 since we know that's zero. */
6008 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6009 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6010 /* If the multiplication can overflow we cannot optimize this. */
6011 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6012 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6013 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6015 *strict_overflow_p = true;
6016 return omit_one_operand (type, integer_zero_node, op0);
6019 /* ... fall through ... */
6021 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6022 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6023 /* If we can extract our operation from the LHS, do so and return a
6024 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6025 do something only if the second operand is a constant. */
6026 if (same_p
6027 && (t1 = extract_muldiv (op0, c, code, wide_type,
6028 strict_overflow_p)) != 0)
6029 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6030 fold_convert (ctype, op1));
6031 else if (tcode == MULT_EXPR && code == MULT_EXPR
6032 && (t1 = extract_muldiv (op1, c, code, wide_type,
6033 strict_overflow_p)) != 0)
6034 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6035 fold_convert (ctype, t1));
6036 else if (TREE_CODE (op1) != INTEGER_CST)
6037 return 0;
6039 /* If these are the same operation types, we can associate them
6040 assuming no overflow. */
6041 if (tcode == code)
6043 double_int mul;
6044 bool overflow_p;
6045 unsigned prec = TYPE_PRECISION (ctype);
6046 bool uns = TYPE_UNSIGNED (ctype);
6047 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6048 double_int dic = tree_to_double_int (c).ext (prec, uns);
6049 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6050 overflow_p = ((!uns && overflow_p)
6051 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6052 if (!double_int_fits_to_tree_p (ctype, mul)
6053 && ((uns && tcode != MULT_EXPR) || !uns))
6054 overflow_p = 1;
6055 if (!overflow_p)
6056 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6057 double_int_to_tree (ctype, mul));
6060 /* If these operations "cancel" each other, we have the main
6061 optimizations of this pass, which occur when either constant is a
6062 multiple of the other, in which case we replace this with either an
6063 operation or CODE or TCODE.
6065 If we have an unsigned type, we cannot do this since it will change
6066 the result if the original computation overflowed. */
6067 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6068 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6069 || (tcode == MULT_EXPR
6070 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6071 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6072 && code != MULT_EXPR)))
6074 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6076 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6077 *strict_overflow_p = true;
6078 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6079 fold_convert (ctype,
6080 const_binop (TRUNC_DIV_EXPR,
6081 op1, c)));
6083 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6085 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6086 *strict_overflow_p = true;
6087 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6088 fold_convert (ctype,
6089 const_binop (TRUNC_DIV_EXPR,
6090 c, op1)));
6093 break;
6095 default:
6096 break;
6099 return 0;
6102 /* Return a node which has the indicated constant VALUE (either 0 or
6103 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6104 and is of the indicated TYPE. */
6106 tree
6107 constant_boolean_node (bool value, tree type)
6109 if (type == integer_type_node)
6110 return value ? integer_one_node : integer_zero_node;
6111 else if (type == boolean_type_node)
6112 return value ? boolean_true_node : boolean_false_node;
6113 else if (TREE_CODE (type) == VECTOR_TYPE)
6114 return build_vector_from_val (type,
6115 build_int_cst (TREE_TYPE (type),
6116 value ? -1 : 0));
6117 else
6118 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6122 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6123 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6124 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6125 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6126 COND is the first argument to CODE; otherwise (as in the example
6127 given here), it is the second argument. TYPE is the type of the
6128 original expression. Return NULL_TREE if no simplification is
6129 possible. */
6131 static tree
6132 fold_binary_op_with_conditional_arg (location_t loc,
6133 enum tree_code code,
6134 tree type, tree op0, tree op1,
6135 tree cond, tree arg, int cond_first_p)
6137 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6138 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6139 tree test, true_value, false_value;
6140 tree lhs = NULL_TREE;
6141 tree rhs = NULL_TREE;
6142 enum tree_code cond_code = COND_EXPR;
6144 if (TREE_CODE (cond) == COND_EXPR
6145 || TREE_CODE (cond) == VEC_COND_EXPR)
6147 test = TREE_OPERAND (cond, 0);
6148 true_value = TREE_OPERAND (cond, 1);
6149 false_value = TREE_OPERAND (cond, 2);
6150 /* If this operand throws an expression, then it does not make
6151 sense to try to perform a logical or arithmetic operation
6152 involving it. */
6153 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6154 lhs = true_value;
6155 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6156 rhs = false_value;
6158 else
6160 tree testtype = TREE_TYPE (cond);
6161 test = cond;
6162 true_value = constant_boolean_node (true, testtype);
6163 false_value = constant_boolean_node (false, testtype);
6166 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6167 cond_code = VEC_COND_EXPR;
6169 /* This transformation is only worthwhile if we don't have to wrap ARG
6170 in a SAVE_EXPR and the operation can be simplified without recursing
6171 on at least one of the branches once its pushed inside the COND_EXPR. */
6172 if (!TREE_CONSTANT (arg)
6173 && (TREE_SIDE_EFFECTS (arg)
6174 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6175 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6176 return NULL_TREE;
6178 arg = fold_convert_loc (loc, arg_type, arg);
6179 if (lhs == 0)
6181 true_value = fold_convert_loc (loc, cond_type, true_value);
6182 if (cond_first_p)
6183 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6184 else
6185 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6187 if (rhs == 0)
6189 false_value = fold_convert_loc (loc, cond_type, false_value);
6190 if (cond_first_p)
6191 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6192 else
6193 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6196 /* Check that we have simplified at least one of the branches. */
6197 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6198 return NULL_TREE;
6200 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6204 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6206 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6207 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6208 ADDEND is the same as X.
6210 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6211 and finite. The problematic cases are when X is zero, and its mode
6212 has signed zeros. In the case of rounding towards -infinity,
6213 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6214 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6216 bool
6217 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6219 if (!real_zerop (addend))
6220 return false;
6222 /* Don't allow the fold with -fsignaling-nans. */
6223 if (HONOR_SNANS (TYPE_MODE (type)))
6224 return false;
6226 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6227 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6228 return true;
6230 /* In a vector or complex, we would need to check the sign of all zeros. */
6231 if (TREE_CODE (addend) != REAL_CST)
6232 return false;
6234 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6235 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6236 negate = !negate;
6238 /* The mode has signed zeros, and we have to honor their sign.
6239 In this situation, there is only one case we can return true for.
6240 X - 0 is the same as X unless rounding towards -infinity is
6241 supported. */
6242 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6245 /* Subroutine of fold() that checks comparisons of built-in math
6246 functions against real constants.
6248 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6249 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6250 is the type of the result and ARG0 and ARG1 are the operands of the
6251 comparison. ARG1 must be a TREE_REAL_CST.
6253 The function returns the constant folded tree if a simplification
6254 can be made, and NULL_TREE otherwise. */
6256 static tree
6257 fold_mathfn_compare (location_t loc,
6258 enum built_in_function fcode, enum tree_code code,
6259 tree type, tree arg0, tree arg1)
6261 REAL_VALUE_TYPE c;
6263 if (BUILTIN_SQRT_P (fcode))
6265 tree arg = CALL_EXPR_ARG (arg0, 0);
6266 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6268 c = TREE_REAL_CST (arg1);
6269 if (REAL_VALUE_NEGATIVE (c))
6271 /* sqrt(x) < y is always false, if y is negative. */
6272 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6273 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6275 /* sqrt(x) > y is always true, if y is negative and we
6276 don't care about NaNs, i.e. negative values of x. */
6277 if (code == NE_EXPR || !HONOR_NANS (mode))
6278 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6280 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6281 return fold_build2_loc (loc, GE_EXPR, type, arg,
6282 build_real (TREE_TYPE (arg), dconst0));
6284 else if (code == GT_EXPR || code == GE_EXPR)
6286 REAL_VALUE_TYPE c2;
6288 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6289 real_convert (&c2, mode, &c2);
6291 if (REAL_VALUE_ISINF (c2))
6293 /* sqrt(x) > y is x == +Inf, when y is very large. */
6294 if (HONOR_INFINITIES (mode))
6295 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6296 build_real (TREE_TYPE (arg), c2));
6298 /* sqrt(x) > y is always false, when y is very large
6299 and we don't care about infinities. */
6300 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6303 /* sqrt(x) > c is the same as x > c*c. */
6304 return fold_build2_loc (loc, code, type, arg,
6305 build_real (TREE_TYPE (arg), c2));
6307 else if (code == LT_EXPR || code == LE_EXPR)
6309 REAL_VALUE_TYPE c2;
6311 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6312 real_convert (&c2, mode, &c2);
6314 if (REAL_VALUE_ISINF (c2))
6316 /* sqrt(x) < y is always true, when y is a very large
6317 value and we don't care about NaNs or Infinities. */
6318 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6319 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6321 /* sqrt(x) < y is x != +Inf when y is very large and we
6322 don't care about NaNs. */
6323 if (! HONOR_NANS (mode))
6324 return fold_build2_loc (loc, NE_EXPR, type, arg,
6325 build_real (TREE_TYPE (arg), c2));
6327 /* sqrt(x) < y is x >= 0 when y is very large and we
6328 don't care about Infinities. */
6329 if (! HONOR_INFINITIES (mode))
6330 return fold_build2_loc (loc, GE_EXPR, type, arg,
6331 build_real (TREE_TYPE (arg), dconst0));
6333 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6334 arg = save_expr (arg);
6335 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6336 fold_build2_loc (loc, GE_EXPR, type, arg,
6337 build_real (TREE_TYPE (arg),
6338 dconst0)),
6339 fold_build2_loc (loc, NE_EXPR, type, arg,
6340 build_real (TREE_TYPE (arg),
6341 c2)));
6344 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6345 if (! HONOR_NANS (mode))
6346 return fold_build2_loc (loc, code, type, arg,
6347 build_real (TREE_TYPE (arg), c2));
6349 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6350 arg = save_expr (arg);
6351 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6352 fold_build2_loc (loc, GE_EXPR, type, arg,
6353 build_real (TREE_TYPE (arg),
6354 dconst0)),
6355 fold_build2_loc (loc, code, type, arg,
6356 build_real (TREE_TYPE (arg),
6357 c2)));
6361 return NULL_TREE;
6364 /* Subroutine of fold() that optimizes comparisons against Infinities,
6365 either +Inf or -Inf.
6367 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6368 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6369 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6371 The function returns the constant folded tree if a simplification
6372 can be made, and NULL_TREE otherwise. */
6374 static tree
6375 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6376 tree arg0, tree arg1)
6378 enum machine_mode mode;
6379 REAL_VALUE_TYPE max;
6380 tree temp;
6381 bool neg;
6383 mode = TYPE_MODE (TREE_TYPE (arg0));
6385 /* For negative infinity swap the sense of the comparison. */
6386 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6387 if (neg)
6388 code = swap_tree_comparison (code);
6390 switch (code)
6392 case GT_EXPR:
6393 /* x > +Inf is always false, if with ignore sNANs. */
6394 if (HONOR_SNANS (mode))
6395 return NULL_TREE;
6396 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6398 case LE_EXPR:
6399 /* x <= +Inf is always true, if we don't case about NaNs. */
6400 if (! HONOR_NANS (mode))
6401 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6403 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6404 arg0 = save_expr (arg0);
6405 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6407 case EQ_EXPR:
6408 case GE_EXPR:
6409 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6410 real_maxval (&max, neg, mode);
6411 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6412 arg0, build_real (TREE_TYPE (arg0), max));
6414 case LT_EXPR:
6415 /* x < +Inf is always equal to x <= DBL_MAX. */
6416 real_maxval (&max, neg, mode);
6417 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6418 arg0, build_real (TREE_TYPE (arg0), max));
6420 case NE_EXPR:
6421 /* x != +Inf is always equal to !(x > DBL_MAX). */
6422 real_maxval (&max, neg, mode);
6423 if (! HONOR_NANS (mode))
6424 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6425 arg0, build_real (TREE_TYPE (arg0), max));
6427 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6428 arg0, build_real (TREE_TYPE (arg0), max));
6429 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6431 default:
6432 break;
6435 return NULL_TREE;
6438 /* Subroutine of fold() that optimizes comparisons of a division by
6439 a nonzero integer constant against an integer constant, i.e.
6440 X/C1 op C2.
6442 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6443 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6444 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6446 The function returns the constant folded tree if a simplification
6447 can be made, and NULL_TREE otherwise. */
6449 static tree
6450 fold_div_compare (location_t loc,
6451 enum tree_code code, tree type, tree arg0, tree arg1)
6453 tree prod, tmp, hi, lo;
6454 tree arg00 = TREE_OPERAND (arg0, 0);
6455 tree arg01 = TREE_OPERAND (arg0, 1);
6456 double_int val;
6457 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6458 bool neg_overflow;
6459 bool overflow;
6461 /* We have to do this the hard way to detect unsigned overflow.
6462 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6463 val = TREE_INT_CST (arg01)
6464 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6465 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6466 neg_overflow = false;
6468 if (unsigned_p)
6470 tmp = int_const_binop (MINUS_EXPR, arg01,
6471 build_int_cst (TREE_TYPE (arg01), 1));
6472 lo = prod;
6474 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6475 val = TREE_INT_CST (prod)
6476 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6477 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6478 -1, overflow | TREE_OVERFLOW (prod));
6480 else if (tree_int_cst_sgn (arg01) >= 0)
6482 tmp = int_const_binop (MINUS_EXPR, arg01,
6483 build_int_cst (TREE_TYPE (arg01), 1));
6484 switch (tree_int_cst_sgn (arg1))
6486 case -1:
6487 neg_overflow = true;
6488 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6489 hi = prod;
6490 break;
6492 case 0:
6493 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6494 hi = tmp;
6495 break;
6497 case 1:
6498 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6499 lo = prod;
6500 break;
6502 default:
6503 gcc_unreachable ();
6506 else
6508 /* A negative divisor reverses the relational operators. */
6509 code = swap_tree_comparison (code);
6511 tmp = int_const_binop (PLUS_EXPR, arg01,
6512 build_int_cst (TREE_TYPE (arg01), 1));
6513 switch (tree_int_cst_sgn (arg1))
6515 case -1:
6516 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6517 lo = prod;
6518 break;
6520 case 0:
6521 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6522 lo = tmp;
6523 break;
6525 case 1:
6526 neg_overflow = true;
6527 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6528 hi = prod;
6529 break;
6531 default:
6532 gcc_unreachable ();
6536 switch (code)
6538 case EQ_EXPR:
6539 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6540 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6541 if (TREE_OVERFLOW (hi))
6542 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6543 if (TREE_OVERFLOW (lo))
6544 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6545 return build_range_check (loc, type, arg00, 1, lo, hi);
6547 case NE_EXPR:
6548 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6549 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6550 if (TREE_OVERFLOW (hi))
6551 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6552 if (TREE_OVERFLOW (lo))
6553 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6554 return build_range_check (loc, type, arg00, 0, lo, hi);
6556 case LT_EXPR:
6557 if (TREE_OVERFLOW (lo))
6559 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6560 return omit_one_operand_loc (loc, type, tmp, arg00);
6562 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6564 case LE_EXPR:
6565 if (TREE_OVERFLOW (hi))
6567 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6568 return omit_one_operand_loc (loc, type, tmp, arg00);
6570 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6572 case GT_EXPR:
6573 if (TREE_OVERFLOW (hi))
6575 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6576 return omit_one_operand_loc (loc, type, tmp, arg00);
6578 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6580 case GE_EXPR:
6581 if (TREE_OVERFLOW (lo))
6583 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6584 return omit_one_operand_loc (loc, type, tmp, arg00);
6586 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6588 default:
6589 break;
6592 return NULL_TREE;
6596 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6597 equality/inequality test, then return a simplified form of the test
6598 using a sign testing. Otherwise return NULL. TYPE is the desired
6599 result type. */
6601 static tree
6602 fold_single_bit_test_into_sign_test (location_t loc,
6603 enum tree_code code, tree arg0, tree arg1,
6604 tree result_type)
6606 /* If this is testing a single bit, we can optimize the test. */
6607 if ((code == NE_EXPR || code == EQ_EXPR)
6608 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6609 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6611 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6612 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6613 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6615 if (arg00 != NULL_TREE
6616 /* This is only a win if casting to a signed type is cheap,
6617 i.e. when arg00's type is not a partial mode. */
6618 && TYPE_PRECISION (TREE_TYPE (arg00))
6619 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6621 tree stype = signed_type_for (TREE_TYPE (arg00));
6622 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6623 result_type,
6624 fold_convert_loc (loc, stype, arg00),
6625 build_int_cst (stype, 0));
6629 return NULL_TREE;
6632 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6633 equality/inequality test, then return a simplified form of
6634 the test using shifts and logical operations. Otherwise return
6635 NULL. TYPE is the desired result type. */
6637 tree
6638 fold_single_bit_test (location_t loc, enum tree_code code,
6639 tree arg0, tree arg1, tree result_type)
6641 /* If this is testing a single bit, we can optimize the test. */
6642 if ((code == NE_EXPR || code == EQ_EXPR)
6643 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6644 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6646 tree inner = TREE_OPERAND (arg0, 0);
6647 tree type = TREE_TYPE (arg0);
6648 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6649 enum machine_mode operand_mode = TYPE_MODE (type);
6650 int ops_unsigned;
6651 tree signed_type, unsigned_type, intermediate_type;
6652 tree tem, one;
6654 /* First, see if we can fold the single bit test into a sign-bit
6655 test. */
6656 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6657 result_type);
6658 if (tem)
6659 return tem;
6661 /* Otherwise we have (A & C) != 0 where C is a single bit,
6662 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6663 Similarly for (A & C) == 0. */
6665 /* If INNER is a right shift of a constant and it plus BITNUM does
6666 not overflow, adjust BITNUM and INNER. */
6667 if (TREE_CODE (inner) == RSHIFT_EXPR
6668 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6669 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6670 && bitnum < TYPE_PRECISION (type)
6671 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6672 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6674 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6675 inner = TREE_OPERAND (inner, 0);
6678 /* If we are going to be able to omit the AND below, we must do our
6679 operations as unsigned. If we must use the AND, we have a choice.
6680 Normally unsigned is faster, but for some machines signed is. */
6681 #ifdef LOAD_EXTEND_OP
6682 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6683 && !flag_syntax_only) ? 0 : 1;
6684 #else
6685 ops_unsigned = 1;
6686 #endif
6688 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6689 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6690 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6691 inner = fold_convert_loc (loc, intermediate_type, inner);
6693 if (bitnum != 0)
6694 inner = build2 (RSHIFT_EXPR, intermediate_type,
6695 inner, size_int (bitnum));
6697 one = build_int_cst (intermediate_type, 1);
6699 if (code == EQ_EXPR)
6700 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6702 /* Put the AND last so it can combine with more things. */
6703 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6705 /* Make sure to return the proper type. */
6706 inner = fold_convert_loc (loc, result_type, inner);
6708 return inner;
6710 return NULL_TREE;
6713 /* Check whether we are allowed to reorder operands arg0 and arg1,
6714 such that the evaluation of arg1 occurs before arg0. */
6716 static bool
6717 reorder_operands_p (const_tree arg0, const_tree arg1)
6719 if (! flag_evaluation_order)
6720 return true;
6721 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6722 return true;
6723 return ! TREE_SIDE_EFFECTS (arg0)
6724 && ! TREE_SIDE_EFFECTS (arg1);
6727 /* Test whether it is preferable two swap two operands, ARG0 and
6728 ARG1, for example because ARG0 is an integer constant and ARG1
6729 isn't. If REORDER is true, only recommend swapping if we can
6730 evaluate the operands in reverse order. */
6732 bool
6733 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6735 STRIP_SIGN_NOPS (arg0);
6736 STRIP_SIGN_NOPS (arg1);
6738 if (TREE_CODE (arg1) == INTEGER_CST)
6739 return 0;
6740 if (TREE_CODE (arg0) == INTEGER_CST)
6741 return 1;
6743 if (TREE_CODE (arg1) == REAL_CST)
6744 return 0;
6745 if (TREE_CODE (arg0) == REAL_CST)
6746 return 1;
6748 if (TREE_CODE (arg1) == FIXED_CST)
6749 return 0;
6750 if (TREE_CODE (arg0) == FIXED_CST)
6751 return 1;
6753 if (TREE_CODE (arg1) == COMPLEX_CST)
6754 return 0;
6755 if (TREE_CODE (arg0) == COMPLEX_CST)
6756 return 1;
6758 if (TREE_CONSTANT (arg1))
6759 return 0;
6760 if (TREE_CONSTANT (arg0))
6761 return 1;
6763 if (optimize_function_for_size_p (cfun))
6764 return 0;
6766 if (reorder && flag_evaluation_order
6767 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6768 return 0;
6770 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6771 for commutative and comparison operators. Ensuring a canonical
6772 form allows the optimizers to find additional redundancies without
6773 having to explicitly check for both orderings. */
6774 if (TREE_CODE (arg0) == SSA_NAME
6775 && TREE_CODE (arg1) == SSA_NAME
6776 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6777 return 1;
6779 /* Put SSA_NAMEs last. */
6780 if (TREE_CODE (arg1) == SSA_NAME)
6781 return 0;
6782 if (TREE_CODE (arg0) == SSA_NAME)
6783 return 1;
6785 /* Put variables last. */
6786 if (DECL_P (arg1))
6787 return 0;
6788 if (DECL_P (arg0))
6789 return 1;
6791 return 0;
6794 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6795 ARG0 is extended to a wider type. */
6797 static tree
6798 fold_widened_comparison (location_t loc, enum tree_code code,
6799 tree type, tree arg0, tree arg1)
6801 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6802 tree arg1_unw;
6803 tree shorter_type, outer_type;
6804 tree min, max;
6805 bool above, below;
6807 if (arg0_unw == arg0)
6808 return NULL_TREE;
6809 shorter_type = TREE_TYPE (arg0_unw);
6811 #ifdef HAVE_canonicalize_funcptr_for_compare
6812 /* Disable this optimization if we're casting a function pointer
6813 type on targets that require function pointer canonicalization. */
6814 if (HAVE_canonicalize_funcptr_for_compare
6815 && TREE_CODE (shorter_type) == POINTER_TYPE
6816 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6817 return NULL_TREE;
6818 #endif
6820 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6821 return NULL_TREE;
6823 arg1_unw = get_unwidened (arg1, NULL_TREE);
6825 /* If possible, express the comparison in the shorter mode. */
6826 if ((code == EQ_EXPR || code == NE_EXPR
6827 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6828 && (TREE_TYPE (arg1_unw) == shorter_type
6829 || ((TYPE_PRECISION (shorter_type)
6830 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6831 && (TYPE_UNSIGNED (shorter_type)
6832 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6833 || (TREE_CODE (arg1_unw) == INTEGER_CST
6834 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6835 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6836 && int_fits_type_p (arg1_unw, shorter_type))))
6837 return fold_build2_loc (loc, code, type, arg0_unw,
6838 fold_convert_loc (loc, shorter_type, arg1_unw));
6840 if (TREE_CODE (arg1_unw) != INTEGER_CST
6841 || TREE_CODE (shorter_type) != INTEGER_TYPE
6842 || !int_fits_type_p (arg1_unw, shorter_type))
6843 return NULL_TREE;
6845 /* If we are comparing with the integer that does not fit into the range
6846 of the shorter type, the result is known. */
6847 outer_type = TREE_TYPE (arg1_unw);
6848 min = lower_bound_in_type (outer_type, shorter_type);
6849 max = upper_bound_in_type (outer_type, shorter_type);
6851 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6852 max, arg1_unw));
6853 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6854 arg1_unw, min));
6856 switch (code)
6858 case EQ_EXPR:
6859 if (above || below)
6860 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6861 break;
6863 case NE_EXPR:
6864 if (above || below)
6865 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6866 break;
6868 case LT_EXPR:
6869 case LE_EXPR:
6870 if (above)
6871 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6872 else if (below)
6873 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6875 case GT_EXPR:
6876 case GE_EXPR:
6877 if (above)
6878 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6879 else if (below)
6880 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6882 default:
6883 break;
6886 return NULL_TREE;
6889 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6890 ARG0 just the signedness is changed. */
6892 static tree
6893 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6894 tree arg0, tree arg1)
6896 tree arg0_inner;
6897 tree inner_type, outer_type;
6899 if (!CONVERT_EXPR_P (arg0))
6900 return NULL_TREE;
6902 outer_type = TREE_TYPE (arg0);
6903 arg0_inner = TREE_OPERAND (arg0, 0);
6904 inner_type = TREE_TYPE (arg0_inner);
6906 #ifdef HAVE_canonicalize_funcptr_for_compare
6907 /* Disable this optimization if we're casting a function pointer
6908 type on targets that require function pointer canonicalization. */
6909 if (HAVE_canonicalize_funcptr_for_compare
6910 && TREE_CODE (inner_type) == POINTER_TYPE
6911 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6912 return NULL_TREE;
6913 #endif
6915 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6916 return NULL_TREE;
6918 if (TREE_CODE (arg1) != INTEGER_CST
6919 && !(CONVERT_EXPR_P (arg1)
6920 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6921 return NULL_TREE;
6923 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6924 && code != NE_EXPR
6925 && code != EQ_EXPR)
6926 return NULL_TREE;
6928 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6929 return NULL_TREE;
6931 if (TREE_CODE (arg1) == INTEGER_CST)
6932 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6933 0, TREE_OVERFLOW (arg1));
6934 else
6935 arg1 = fold_convert_loc (loc, inner_type, arg1);
6937 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6940 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6941 step of the array. Reconstructs s and delta in the case of s *
6942 delta being an integer constant (and thus already folded). ADDR is
6943 the address. MULT is the multiplicative expression. If the
6944 function succeeds, the new address expression is returned.
6945 Otherwise NULL_TREE is returned. LOC is the location of the
6946 resulting expression. */
6948 static tree
6949 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6951 tree s, delta, step;
6952 tree ref = TREE_OPERAND (addr, 0), pref;
6953 tree ret, pos;
6954 tree itype;
6955 bool mdim = false;
6957 /* Strip the nops that might be added when converting op1 to sizetype. */
6958 STRIP_NOPS (op1);
6960 /* Canonicalize op1 into a possibly non-constant delta
6961 and an INTEGER_CST s. */
6962 if (TREE_CODE (op1) == MULT_EXPR)
6964 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6966 STRIP_NOPS (arg0);
6967 STRIP_NOPS (arg1);
6969 if (TREE_CODE (arg0) == INTEGER_CST)
6971 s = arg0;
6972 delta = arg1;
6974 else if (TREE_CODE (arg1) == INTEGER_CST)
6976 s = arg1;
6977 delta = arg0;
6979 else
6980 return NULL_TREE;
6982 else if (TREE_CODE (op1) == INTEGER_CST)
6984 delta = op1;
6985 s = NULL_TREE;
6987 else
6989 /* Simulate we are delta * 1. */
6990 delta = op1;
6991 s = integer_one_node;
6994 /* Handle &x.array the same as we would handle &x.array[0]. */
6995 if (TREE_CODE (ref) == COMPONENT_REF
6996 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6998 tree domain;
7000 /* Remember if this was a multi-dimensional array. */
7001 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7002 mdim = true;
7004 domain = TYPE_DOMAIN (TREE_TYPE (ref));
7005 if (! domain)
7006 goto cont;
7007 itype = TREE_TYPE (domain);
7009 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
7010 if (TREE_CODE (step) != INTEGER_CST)
7011 goto cont;
7013 if (s)
7015 if (! tree_int_cst_equal (step, s))
7016 goto cont;
7018 else
7020 /* Try if delta is a multiple of step. */
7021 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7022 if (! tmp)
7023 goto cont;
7024 delta = tmp;
7027 /* Only fold here if we can verify we do not overflow one
7028 dimension of a multi-dimensional array. */
7029 if (mdim)
7031 tree tmp;
7033 if (!TYPE_MIN_VALUE (domain)
7034 || !TYPE_MAX_VALUE (domain)
7035 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7036 goto cont;
7038 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7039 fold_convert_loc (loc, itype,
7040 TYPE_MIN_VALUE (domain)),
7041 fold_convert_loc (loc, itype, delta));
7042 if (TREE_CODE (tmp) != INTEGER_CST
7043 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7044 goto cont;
7047 /* We found a suitable component reference. */
7049 pref = TREE_OPERAND (addr, 0);
7050 ret = copy_node (pref);
7051 SET_EXPR_LOCATION (ret, loc);
7053 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7054 fold_build2_loc
7055 (loc, PLUS_EXPR, itype,
7056 fold_convert_loc (loc, itype,
7057 TYPE_MIN_VALUE
7058 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7059 fold_convert_loc (loc, itype, delta)),
7060 NULL_TREE, NULL_TREE);
7061 return build_fold_addr_expr_loc (loc, ret);
7064 cont:
7066 for (;; ref = TREE_OPERAND (ref, 0))
7068 if (TREE_CODE (ref) == ARRAY_REF)
7070 tree domain;
7072 /* Remember if this was a multi-dimensional array. */
7073 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7074 mdim = true;
7076 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7077 if (! domain)
7078 continue;
7079 itype = TREE_TYPE (domain);
7081 step = array_ref_element_size (ref);
7082 if (TREE_CODE (step) != INTEGER_CST)
7083 continue;
7085 if (s)
7087 if (! tree_int_cst_equal (step, s))
7088 continue;
7090 else
7092 /* Try if delta is a multiple of step. */
7093 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7094 if (! tmp)
7095 continue;
7096 delta = tmp;
7099 /* Only fold here if we can verify we do not overflow one
7100 dimension of a multi-dimensional array. */
7101 if (mdim)
7103 tree tmp;
7105 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7106 || !TYPE_MAX_VALUE (domain)
7107 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7108 continue;
7110 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7111 fold_convert_loc (loc, itype,
7112 TREE_OPERAND (ref, 1)),
7113 fold_convert_loc (loc, itype, delta));
7114 if (!tmp
7115 || TREE_CODE (tmp) != INTEGER_CST
7116 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7117 continue;
7120 break;
7122 else
7123 mdim = false;
7125 if (!handled_component_p (ref))
7126 return NULL_TREE;
7129 /* We found the suitable array reference. So copy everything up to it,
7130 and replace the index. */
7132 pref = TREE_OPERAND (addr, 0);
7133 ret = copy_node (pref);
7134 SET_EXPR_LOCATION (ret, loc);
7135 pos = ret;
7137 while (pref != ref)
7139 pref = TREE_OPERAND (pref, 0);
7140 TREE_OPERAND (pos, 0) = copy_node (pref);
7141 pos = TREE_OPERAND (pos, 0);
7144 TREE_OPERAND (pos, 1)
7145 = fold_build2_loc (loc, PLUS_EXPR, itype,
7146 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7147 fold_convert_loc (loc, itype, delta));
7148 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7152 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7153 means A >= Y && A != MAX, but in this case we know that
7154 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7156 static tree
7157 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7159 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7161 if (TREE_CODE (bound) == LT_EXPR)
7162 a = TREE_OPERAND (bound, 0);
7163 else if (TREE_CODE (bound) == GT_EXPR)
7164 a = TREE_OPERAND (bound, 1);
7165 else
7166 return NULL_TREE;
7168 typea = TREE_TYPE (a);
7169 if (!INTEGRAL_TYPE_P (typea)
7170 && !POINTER_TYPE_P (typea))
7171 return NULL_TREE;
7173 if (TREE_CODE (ineq) == LT_EXPR)
7175 a1 = TREE_OPERAND (ineq, 1);
7176 y = TREE_OPERAND (ineq, 0);
7178 else if (TREE_CODE (ineq) == GT_EXPR)
7180 a1 = TREE_OPERAND (ineq, 0);
7181 y = TREE_OPERAND (ineq, 1);
7183 else
7184 return NULL_TREE;
7186 if (TREE_TYPE (a1) != typea)
7187 return NULL_TREE;
7189 if (POINTER_TYPE_P (typea))
7191 /* Convert the pointer types into integer before taking the difference. */
7192 tree ta = fold_convert_loc (loc, ssizetype, a);
7193 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7194 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7196 else
7197 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7199 if (!diff || !integer_onep (diff))
7200 return NULL_TREE;
7202 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7205 /* Fold a sum or difference of at least one multiplication.
7206 Returns the folded tree or NULL if no simplification could be made. */
7208 static tree
7209 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7210 tree arg0, tree arg1)
7212 tree arg00, arg01, arg10, arg11;
7213 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7215 /* (A * C) +- (B * C) -> (A+-B) * C.
7216 (A * C) +- A -> A * (C+-1).
7217 We are most concerned about the case where C is a constant,
7218 but other combinations show up during loop reduction. Since
7219 it is not difficult, try all four possibilities. */
7221 if (TREE_CODE (arg0) == MULT_EXPR)
7223 arg00 = TREE_OPERAND (arg0, 0);
7224 arg01 = TREE_OPERAND (arg0, 1);
7226 else if (TREE_CODE (arg0) == INTEGER_CST)
7228 arg00 = build_one_cst (type);
7229 arg01 = arg0;
7231 else
7233 /* We cannot generate constant 1 for fract. */
7234 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7235 return NULL_TREE;
7236 arg00 = arg0;
7237 arg01 = build_one_cst (type);
7239 if (TREE_CODE (arg1) == MULT_EXPR)
7241 arg10 = TREE_OPERAND (arg1, 0);
7242 arg11 = TREE_OPERAND (arg1, 1);
7244 else if (TREE_CODE (arg1) == INTEGER_CST)
7246 arg10 = build_one_cst (type);
7247 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7248 the purpose of this canonicalization. */
7249 if (TREE_INT_CST_HIGH (arg1) == -1
7250 && negate_expr_p (arg1)
7251 && code == PLUS_EXPR)
7253 arg11 = negate_expr (arg1);
7254 code = MINUS_EXPR;
7256 else
7257 arg11 = arg1;
7259 else
7261 /* We cannot generate constant 1 for fract. */
7262 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7263 return NULL_TREE;
7264 arg10 = arg1;
7265 arg11 = build_one_cst (type);
7267 same = NULL_TREE;
7269 if (operand_equal_p (arg01, arg11, 0))
7270 same = arg01, alt0 = arg00, alt1 = arg10;
7271 else if (operand_equal_p (arg00, arg10, 0))
7272 same = arg00, alt0 = arg01, alt1 = arg11;
7273 else if (operand_equal_p (arg00, arg11, 0))
7274 same = arg00, alt0 = arg01, alt1 = arg10;
7275 else if (operand_equal_p (arg01, arg10, 0))
7276 same = arg01, alt0 = arg00, alt1 = arg11;
7278 /* No identical multiplicands; see if we can find a common
7279 power-of-two factor in non-power-of-two multiplies. This
7280 can help in multi-dimensional array access. */
7281 else if (tree_fits_shwi_p (arg01)
7282 && tree_fits_shwi_p (arg11))
7284 HOST_WIDE_INT int01, int11, tmp;
7285 bool swap = false;
7286 tree maybe_same;
7287 int01 = tree_to_shwi (arg01);
7288 int11 = tree_to_shwi (arg11);
7290 /* Move min of absolute values to int11. */
7291 if (absu_hwi (int01) < absu_hwi (int11))
7293 tmp = int01, int01 = int11, int11 = tmp;
7294 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7295 maybe_same = arg01;
7296 swap = true;
7298 else
7299 maybe_same = arg11;
7301 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7302 /* The remainder should not be a constant, otherwise we
7303 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7304 increased the number of multiplications necessary. */
7305 && TREE_CODE (arg10) != INTEGER_CST)
7307 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7308 build_int_cst (TREE_TYPE (arg00),
7309 int01 / int11));
7310 alt1 = arg10;
7311 same = maybe_same;
7312 if (swap)
7313 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7317 if (same)
7318 return fold_build2_loc (loc, MULT_EXPR, type,
7319 fold_build2_loc (loc, code, type,
7320 fold_convert_loc (loc, type, alt0),
7321 fold_convert_loc (loc, type, alt1)),
7322 fold_convert_loc (loc, type, same));
7324 return NULL_TREE;
7327 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7328 specified by EXPR into the buffer PTR of length LEN bytes.
7329 Return the number of bytes placed in the buffer, or zero
7330 upon failure. */
7332 static int
7333 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7335 tree type = TREE_TYPE (expr);
7336 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7337 int byte, offset, word, words;
7338 unsigned char value;
7340 if (total_bytes > len)
7341 return 0;
7342 words = total_bytes / UNITS_PER_WORD;
7344 for (byte = 0; byte < total_bytes; byte++)
7346 int bitpos = byte * BITS_PER_UNIT;
7347 if (bitpos < HOST_BITS_PER_WIDE_INT)
7348 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7349 else
7350 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7351 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7353 if (total_bytes > UNITS_PER_WORD)
7355 word = byte / UNITS_PER_WORD;
7356 if (WORDS_BIG_ENDIAN)
7357 word = (words - 1) - word;
7358 offset = word * UNITS_PER_WORD;
7359 if (BYTES_BIG_ENDIAN)
7360 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7361 else
7362 offset += byte % UNITS_PER_WORD;
7364 else
7365 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7366 ptr[offset] = value;
7368 return total_bytes;
7372 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7373 specified by EXPR into the buffer PTR of length LEN bytes.
7374 Return the number of bytes placed in the buffer, or zero
7375 upon failure. */
7377 static int
7378 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7380 tree type = TREE_TYPE (expr);
7381 enum machine_mode mode = TYPE_MODE (type);
7382 int total_bytes = GET_MODE_SIZE (mode);
7383 FIXED_VALUE_TYPE value;
7384 tree i_value, i_type;
7386 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7387 return 0;
7389 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7391 if (NULL_TREE == i_type
7392 || TYPE_PRECISION (i_type) != total_bytes)
7393 return 0;
7395 value = TREE_FIXED_CST (expr);
7396 i_value = double_int_to_tree (i_type, value.data);
7398 return native_encode_int (i_value, ptr, len);
7402 /* Subroutine of native_encode_expr. Encode the REAL_CST
7403 specified by EXPR into the buffer PTR of length LEN bytes.
7404 Return the number of bytes placed in the buffer, or zero
7405 upon failure. */
7407 static int
7408 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7410 tree type = TREE_TYPE (expr);
7411 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7412 int byte, offset, word, words, bitpos;
7413 unsigned char value;
7415 /* There are always 32 bits in each long, no matter the size of
7416 the hosts long. We handle floating point representations with
7417 up to 192 bits. */
7418 long tmp[6];
7420 if (total_bytes > len)
7421 return 0;
7422 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7424 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7426 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7427 bitpos += BITS_PER_UNIT)
7429 byte = (bitpos / BITS_PER_UNIT) & 3;
7430 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7432 if (UNITS_PER_WORD < 4)
7434 word = byte / UNITS_PER_WORD;
7435 if (WORDS_BIG_ENDIAN)
7436 word = (words - 1) - word;
7437 offset = word * UNITS_PER_WORD;
7438 if (BYTES_BIG_ENDIAN)
7439 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7440 else
7441 offset += byte % UNITS_PER_WORD;
7443 else
7444 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7445 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7447 return total_bytes;
7450 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7451 specified by EXPR into the buffer PTR of length LEN bytes.
7452 Return the number of bytes placed in the buffer, or zero
7453 upon failure. */
7455 static int
7456 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7458 int rsize, isize;
7459 tree part;
7461 part = TREE_REALPART (expr);
7462 rsize = native_encode_expr (part, ptr, len);
7463 if (rsize == 0)
7464 return 0;
7465 part = TREE_IMAGPART (expr);
7466 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7467 if (isize != rsize)
7468 return 0;
7469 return rsize + isize;
7473 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7474 specified by EXPR into the buffer PTR of length LEN bytes.
7475 Return the number of bytes placed in the buffer, or zero
7476 upon failure. */
7478 static int
7479 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7481 unsigned i, count;
7482 int size, offset;
7483 tree itype, elem;
7485 offset = 0;
7486 count = VECTOR_CST_NELTS (expr);
7487 itype = TREE_TYPE (TREE_TYPE (expr));
7488 size = GET_MODE_SIZE (TYPE_MODE (itype));
7489 for (i = 0; i < count; i++)
7491 elem = VECTOR_CST_ELT (expr, i);
7492 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7493 return 0;
7494 offset += size;
7496 return offset;
7500 /* Subroutine of native_encode_expr. Encode the STRING_CST
7501 specified by EXPR into the buffer PTR of length LEN bytes.
7502 Return the number of bytes placed in the buffer, or zero
7503 upon failure. */
7505 static int
7506 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7508 tree type = TREE_TYPE (expr);
7509 HOST_WIDE_INT total_bytes;
7511 if (TREE_CODE (type) != ARRAY_TYPE
7512 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7513 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7514 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7515 return 0;
7516 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7517 if (total_bytes > len)
7518 return 0;
7519 if (TREE_STRING_LENGTH (expr) < total_bytes)
7521 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7522 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7523 total_bytes - TREE_STRING_LENGTH (expr));
7525 else
7526 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7527 return total_bytes;
7531 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7532 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7533 buffer PTR of length LEN bytes. Return the number of bytes
7534 placed in the buffer, or zero upon failure. */
7537 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7539 switch (TREE_CODE (expr))
7541 case INTEGER_CST:
7542 return native_encode_int (expr, ptr, len);
7544 case REAL_CST:
7545 return native_encode_real (expr, ptr, len);
7547 case FIXED_CST:
7548 return native_encode_fixed (expr, ptr, len);
7550 case COMPLEX_CST:
7551 return native_encode_complex (expr, ptr, len);
7553 case VECTOR_CST:
7554 return native_encode_vector (expr, ptr, len);
7556 case STRING_CST:
7557 return native_encode_string (expr, ptr, len);
7559 default:
7560 return 0;
7565 /* Subroutine of native_interpret_expr. Interpret the contents of
7566 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7567 If the buffer cannot be interpreted, return NULL_TREE. */
7569 static tree
7570 native_interpret_int (tree type, const unsigned char *ptr, int len)
7572 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7573 double_int result;
7575 if (total_bytes > len
7576 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7577 return NULL_TREE;
7579 result = double_int::from_buffer (ptr, total_bytes);
7581 return double_int_to_tree (type, result);
7585 /* Subroutine of native_interpret_expr. Interpret the contents of
7586 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7587 If the buffer cannot be interpreted, return NULL_TREE. */
7589 static tree
7590 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7592 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7593 double_int result;
7594 FIXED_VALUE_TYPE fixed_value;
7596 if (total_bytes > len
7597 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7598 return NULL_TREE;
7600 result = double_int::from_buffer (ptr, total_bytes);
7601 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7603 return build_fixed (type, fixed_value);
7607 /* Subroutine of native_interpret_expr. Interpret the contents of
7608 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7609 If the buffer cannot be interpreted, return NULL_TREE. */
7611 static tree
7612 native_interpret_real (tree type, const unsigned char *ptr, int len)
7614 enum machine_mode mode = TYPE_MODE (type);
7615 int total_bytes = GET_MODE_SIZE (mode);
7616 int byte, offset, word, words, bitpos;
7617 unsigned char value;
7618 /* There are always 32 bits in each long, no matter the size of
7619 the hosts long. We handle floating point representations with
7620 up to 192 bits. */
7621 REAL_VALUE_TYPE r;
7622 long tmp[6];
7624 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7625 if (total_bytes > len || total_bytes > 24)
7626 return NULL_TREE;
7627 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7629 memset (tmp, 0, sizeof (tmp));
7630 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7631 bitpos += BITS_PER_UNIT)
7633 byte = (bitpos / BITS_PER_UNIT) & 3;
7634 if (UNITS_PER_WORD < 4)
7636 word = byte / UNITS_PER_WORD;
7637 if (WORDS_BIG_ENDIAN)
7638 word = (words - 1) - word;
7639 offset = word * UNITS_PER_WORD;
7640 if (BYTES_BIG_ENDIAN)
7641 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7642 else
7643 offset += byte % UNITS_PER_WORD;
7645 else
7646 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7647 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7649 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7652 real_from_target (&r, tmp, mode);
7653 return build_real (type, r);
7657 /* Subroutine of native_interpret_expr. Interpret the contents of
7658 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7659 If the buffer cannot be interpreted, return NULL_TREE. */
7661 static tree
7662 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7664 tree etype, rpart, ipart;
7665 int size;
7667 etype = TREE_TYPE (type);
7668 size = GET_MODE_SIZE (TYPE_MODE (etype));
7669 if (size * 2 > len)
7670 return NULL_TREE;
7671 rpart = native_interpret_expr (etype, ptr, size);
7672 if (!rpart)
7673 return NULL_TREE;
7674 ipart = native_interpret_expr (etype, ptr+size, size);
7675 if (!ipart)
7676 return NULL_TREE;
7677 return build_complex (type, rpart, ipart);
7681 /* Subroutine of native_interpret_expr. Interpret the contents of
7682 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7683 If the buffer cannot be interpreted, return NULL_TREE. */
7685 static tree
7686 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7688 tree etype, elem;
7689 int i, size, count;
7690 tree *elements;
7692 etype = TREE_TYPE (type);
7693 size = GET_MODE_SIZE (TYPE_MODE (etype));
7694 count = TYPE_VECTOR_SUBPARTS (type);
7695 if (size * count > len)
7696 return NULL_TREE;
7698 elements = XALLOCAVEC (tree, count);
7699 for (i = count - 1; i >= 0; i--)
7701 elem = native_interpret_expr (etype, ptr+(i*size), size);
7702 if (!elem)
7703 return NULL_TREE;
7704 elements[i] = elem;
7706 return build_vector (type, elements);
7710 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7711 the buffer PTR of length LEN as a constant of type TYPE. For
7712 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7713 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7714 return NULL_TREE. */
7716 tree
7717 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7719 switch (TREE_CODE (type))
7721 case INTEGER_TYPE:
7722 case ENUMERAL_TYPE:
7723 case BOOLEAN_TYPE:
7724 case POINTER_TYPE:
7725 case REFERENCE_TYPE:
7726 return native_interpret_int (type, ptr, len);
7728 case REAL_TYPE:
7729 return native_interpret_real (type, ptr, len);
7731 case FIXED_POINT_TYPE:
7732 return native_interpret_fixed (type, ptr, len);
7734 case COMPLEX_TYPE:
7735 return native_interpret_complex (type, ptr, len);
7737 case VECTOR_TYPE:
7738 return native_interpret_vector (type, ptr, len);
7740 default:
7741 return NULL_TREE;
7745 /* Returns true if we can interpret the contents of a native encoding
7746 as TYPE. */
7748 static bool
7749 can_native_interpret_type_p (tree type)
7751 switch (TREE_CODE (type))
7753 case INTEGER_TYPE:
7754 case ENUMERAL_TYPE:
7755 case BOOLEAN_TYPE:
7756 case POINTER_TYPE:
7757 case REFERENCE_TYPE:
7758 case FIXED_POINT_TYPE:
7759 case REAL_TYPE:
7760 case COMPLEX_TYPE:
7761 case VECTOR_TYPE:
7762 return true;
7763 default:
7764 return false;
7768 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7769 TYPE at compile-time. If we're unable to perform the conversion
7770 return NULL_TREE. */
7772 static tree
7773 fold_view_convert_expr (tree type, tree expr)
7775 /* We support up to 512-bit values (for V8DFmode). */
7776 unsigned char buffer[64];
7777 int len;
7779 /* Check that the host and target are sane. */
7780 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7781 return NULL_TREE;
7783 len = native_encode_expr (expr, buffer, sizeof (buffer));
7784 if (len == 0)
7785 return NULL_TREE;
7787 return native_interpret_expr (type, buffer, len);
7790 /* Build an expression for the address of T. Folds away INDIRECT_REF
7791 to avoid confusing the gimplify process. */
7793 tree
7794 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7796 /* The size of the object is not relevant when talking about its address. */
7797 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7798 t = TREE_OPERAND (t, 0);
7800 if (TREE_CODE (t) == INDIRECT_REF)
7802 t = TREE_OPERAND (t, 0);
7804 if (TREE_TYPE (t) != ptrtype)
7805 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7807 else if (TREE_CODE (t) == MEM_REF
7808 && integer_zerop (TREE_OPERAND (t, 1)))
7809 return TREE_OPERAND (t, 0);
7810 else if (TREE_CODE (t) == MEM_REF
7811 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7812 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7813 TREE_OPERAND (t, 0),
7814 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7815 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7817 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7819 if (TREE_TYPE (t) != ptrtype)
7820 t = fold_convert_loc (loc, ptrtype, t);
7822 else
7823 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7825 return t;
7828 /* Build an expression for the address of T. */
7830 tree
7831 build_fold_addr_expr_loc (location_t loc, tree t)
7833 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7835 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7838 static bool vec_cst_ctor_to_array (tree, tree *);
7840 /* Fold a unary expression of code CODE and type TYPE with operand
7841 OP0. Return the folded expression if folding is successful.
7842 Otherwise, return NULL_TREE. */
7844 tree
7845 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7847 tree tem;
7848 tree arg0;
7849 enum tree_code_class kind = TREE_CODE_CLASS (code);
7851 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7852 && TREE_CODE_LENGTH (code) == 1);
7854 arg0 = op0;
7855 if (arg0)
7857 if (CONVERT_EXPR_CODE_P (code)
7858 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7860 /* Don't use STRIP_NOPS, because signedness of argument type
7861 matters. */
7862 STRIP_SIGN_NOPS (arg0);
7864 else
7866 /* Strip any conversions that don't change the mode. This
7867 is safe for every expression, except for a comparison
7868 expression because its signedness is derived from its
7869 operands.
7871 Note that this is done as an internal manipulation within
7872 the constant folder, in order to find the simplest
7873 representation of the arguments so that their form can be
7874 studied. In any cases, the appropriate type conversions
7875 should be put back in the tree that will get out of the
7876 constant folder. */
7877 STRIP_NOPS (arg0);
7881 if (TREE_CODE_CLASS (code) == tcc_unary)
7883 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7884 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7885 fold_build1_loc (loc, code, type,
7886 fold_convert_loc (loc, TREE_TYPE (op0),
7887 TREE_OPERAND (arg0, 1))));
7888 else if (TREE_CODE (arg0) == COND_EXPR)
7890 tree arg01 = TREE_OPERAND (arg0, 1);
7891 tree arg02 = TREE_OPERAND (arg0, 2);
7892 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7893 arg01 = fold_build1_loc (loc, code, type,
7894 fold_convert_loc (loc,
7895 TREE_TYPE (op0), arg01));
7896 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7897 arg02 = fold_build1_loc (loc, code, type,
7898 fold_convert_loc (loc,
7899 TREE_TYPE (op0), arg02));
7900 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7901 arg01, arg02);
7903 /* If this was a conversion, and all we did was to move into
7904 inside the COND_EXPR, bring it back out. But leave it if
7905 it is a conversion from integer to integer and the
7906 result precision is no wider than a word since such a
7907 conversion is cheap and may be optimized away by combine,
7908 while it couldn't if it were outside the COND_EXPR. Then return
7909 so we don't get into an infinite recursion loop taking the
7910 conversion out and then back in. */
7912 if ((CONVERT_EXPR_CODE_P (code)
7913 || code == NON_LVALUE_EXPR)
7914 && TREE_CODE (tem) == COND_EXPR
7915 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7916 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7917 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7918 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7919 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7920 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7921 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7922 && (INTEGRAL_TYPE_P
7923 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7924 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7925 || flag_syntax_only))
7926 tem = build1_loc (loc, code, type,
7927 build3 (COND_EXPR,
7928 TREE_TYPE (TREE_OPERAND
7929 (TREE_OPERAND (tem, 1), 0)),
7930 TREE_OPERAND (tem, 0),
7931 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7932 TREE_OPERAND (TREE_OPERAND (tem, 2),
7933 0)));
7934 return tem;
7938 switch (code)
7940 case PAREN_EXPR:
7941 /* Re-association barriers around constants and other re-association
7942 barriers can be removed. */
7943 if (CONSTANT_CLASS_P (op0)
7944 || TREE_CODE (op0) == PAREN_EXPR)
7945 return fold_convert_loc (loc, type, op0);
7946 return NULL_TREE;
7948 CASE_CONVERT:
7949 case FLOAT_EXPR:
7950 case FIX_TRUNC_EXPR:
7951 if (TREE_TYPE (op0) == type)
7952 return op0;
7954 if (COMPARISON_CLASS_P (op0))
7956 /* If we have (type) (a CMP b) and type is an integral type, return
7957 new expression involving the new type. Canonicalize
7958 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7959 non-integral type.
7960 Do not fold the result as that would not simplify further, also
7961 folding again results in recursions. */
7962 if (TREE_CODE (type) == BOOLEAN_TYPE)
7963 return build2_loc (loc, TREE_CODE (op0), type,
7964 TREE_OPERAND (op0, 0),
7965 TREE_OPERAND (op0, 1));
7966 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7967 && TREE_CODE (type) != VECTOR_TYPE)
7968 return build3_loc (loc, COND_EXPR, type, op0,
7969 constant_boolean_node (true, type),
7970 constant_boolean_node (false, type));
7973 /* Handle cases of two conversions in a row. */
7974 if (CONVERT_EXPR_P (op0))
7976 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7977 tree inter_type = TREE_TYPE (op0);
7978 int inside_int = INTEGRAL_TYPE_P (inside_type);
7979 int inside_ptr = POINTER_TYPE_P (inside_type);
7980 int inside_float = FLOAT_TYPE_P (inside_type);
7981 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7982 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7983 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7984 int inter_int = INTEGRAL_TYPE_P (inter_type);
7985 int inter_ptr = POINTER_TYPE_P (inter_type);
7986 int inter_float = FLOAT_TYPE_P (inter_type);
7987 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7988 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7989 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7990 int final_int = INTEGRAL_TYPE_P (type);
7991 int final_ptr = POINTER_TYPE_P (type);
7992 int final_float = FLOAT_TYPE_P (type);
7993 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7994 unsigned int final_prec = TYPE_PRECISION (type);
7995 int final_unsignedp = TYPE_UNSIGNED (type);
7997 /* check for cases specific to UPC, involving pointer types */
7998 if (final_ptr || inter_ptr || inside_ptr)
8000 int final_pts = final_ptr
8001 && upc_shared_type_p (TREE_TYPE (type));
8002 int inter_pts = inter_ptr
8003 && upc_shared_type_p (TREE_TYPE (inter_type));
8004 int inside_pts = inside_ptr
8005 && upc_shared_type_p (TREE_TYPE (inside_type));
8006 if (final_pts || inter_pts || inside_pts)
8008 if (!((final_pts && inter_pts)
8009 && TREE_TYPE (type) == TREE_TYPE (inter_type))
8010 || ((inter_pts && inside_pts)
8011 && (TREE_TYPE (inter_type)
8012 == TREE_TYPE (inside_type))))
8013 return NULL;
8017 /* In addition to the cases of two conversions in a row
8018 handled below, if we are converting something to its own
8019 type via an object of identical or wider precision, neither
8020 conversion is needed. */
8021 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8022 && (((inter_int || inter_ptr) && final_int)
8023 || (inter_float && final_float))
8024 && inter_prec >= final_prec)
8025 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8027 /* Likewise, if the intermediate and initial types are either both
8028 float or both integer, we don't need the middle conversion if the
8029 former is wider than the latter and doesn't change the signedness
8030 (for integers). Avoid this if the final type is a pointer since
8031 then we sometimes need the middle conversion. Likewise if the
8032 final type has a precision not equal to the size of its mode. */
8033 if (((inter_int && inside_int)
8034 || (inter_float && inside_float)
8035 || (inter_vec && inside_vec))
8036 && inter_prec >= inside_prec
8037 && (inter_float || inter_vec
8038 || inter_unsignedp == inside_unsignedp)
8039 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8040 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8041 && ! final_ptr
8042 && (! final_vec || inter_prec == inside_prec))
8043 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8045 /* If we have a sign-extension of a zero-extended value, we can
8046 replace that by a single zero-extension. Likewise if the
8047 final conversion does not change precision we can drop the
8048 intermediate conversion. */
8049 if (inside_int && inter_int && final_int
8050 && ((inside_prec < inter_prec && inter_prec < final_prec
8051 && inside_unsignedp && !inter_unsignedp)
8052 || final_prec == inter_prec))
8053 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8055 /* Two conversions in a row are not needed unless:
8056 - some conversion is floating-point (overstrict for now), or
8057 - some conversion is a vector (overstrict for now), or
8058 - the intermediate type is narrower than both initial and
8059 final, or
8060 - the intermediate type and innermost type differ in signedness,
8061 and the outermost type is wider than the intermediate, or
8062 - the initial type is a pointer type and the precisions of the
8063 intermediate and final types differ, or
8064 - the final type is a pointer type and the precisions of the
8065 initial and intermediate types differ. */
8066 if (! inside_float && ! inter_float && ! final_float
8067 && ! inside_vec && ! inter_vec && ! final_vec
8068 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8069 && ! (inside_int && inter_int
8070 && inter_unsignedp != inside_unsignedp
8071 && inter_prec < final_prec)
8072 && ((inter_unsignedp && inter_prec > inside_prec)
8073 == (final_unsignedp && final_prec > inter_prec))
8074 && ! (inside_ptr && inter_prec != final_prec)
8075 && ! (final_ptr && inside_prec != inter_prec)
8076 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8077 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8078 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8081 /* Handle (T *)&A.B.C for A being of type T and B and C
8082 living at offset zero. This occurs frequently in
8083 C++ upcasting and then accessing the base. */
8084 if (TREE_CODE (op0) == ADDR_EXPR
8085 && POINTER_TYPE_P (type)
8086 && handled_component_p (TREE_OPERAND (op0, 0)))
8088 HOST_WIDE_INT bitsize, bitpos;
8089 tree offset;
8090 enum machine_mode mode;
8091 int unsignedp, volatilep;
8092 tree base = TREE_OPERAND (op0, 0);
8093 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8094 &mode, &unsignedp, &volatilep, false);
8095 /* If the reference was to a (constant) zero offset, we can use
8096 the address of the base if it has the same base type
8097 as the result type and the pointer type is unqualified. */
8098 if (! offset && bitpos == 0
8099 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8100 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8101 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8102 return fold_convert_loc (loc, type,
8103 build_fold_addr_expr_loc (loc, base));
8106 if (TREE_CODE (op0) == MODIFY_EXPR
8107 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8108 /* Detect assigning a bitfield. */
8109 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8110 && DECL_BIT_FIELD
8111 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8113 /* Don't leave an assignment inside a conversion
8114 unless assigning a bitfield. */
8115 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8116 /* First do the assignment, then return converted constant. */
8117 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8118 TREE_NO_WARNING (tem) = 1;
8119 TREE_USED (tem) = 1;
8120 return tem;
8123 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8124 constants (if x has signed type, the sign bit cannot be set
8125 in c). This folds extension into the BIT_AND_EXPR.
8126 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8127 very likely don't have maximal range for their precision and this
8128 transformation effectively doesn't preserve non-maximal ranges. */
8129 if (TREE_CODE (type) == INTEGER_TYPE
8130 && TREE_CODE (op0) == BIT_AND_EXPR
8131 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8133 tree and_expr = op0;
8134 tree and0 = TREE_OPERAND (and_expr, 0);
8135 tree and1 = TREE_OPERAND (and_expr, 1);
8136 int change = 0;
8138 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8139 || (TYPE_PRECISION (type)
8140 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8141 change = 1;
8142 else if (TYPE_PRECISION (TREE_TYPE (and1))
8143 <= HOST_BITS_PER_WIDE_INT
8144 && tree_fits_uhwi_p (and1))
8146 unsigned HOST_WIDE_INT cst;
8148 cst = tree_to_uhwi (and1);
8149 cst &= HOST_WIDE_INT_M1U
8150 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8151 change = (cst == 0);
8152 #ifdef LOAD_EXTEND_OP
8153 if (change
8154 && !flag_syntax_only
8155 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8156 == ZERO_EXTEND))
8158 tree uns = unsigned_type_for (TREE_TYPE (and0));
8159 and0 = fold_convert_loc (loc, uns, and0);
8160 and1 = fold_convert_loc (loc, uns, and1);
8162 #endif
8164 if (change)
8166 tem = force_fit_type_double (type, tree_to_double_int (and1),
8167 0, TREE_OVERFLOW (and1));
8168 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8169 fold_convert_loc (loc, type, and0), tem);
8173 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8174 when one of the new casts will fold away. Conservatively we assume
8175 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8176 if (POINTER_TYPE_P (type)
8177 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8178 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8179 && !upc_shared_type_p (TREE_TYPE (type))
8180 && !upc_shared_type_p (TREE_TYPE (
8181 TREE_TYPE (TREE_OPERAND (arg0, 0))))
8182 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8183 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8184 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8186 tree arg00 = TREE_OPERAND (arg0, 0);
8187 tree arg01 = TREE_OPERAND (arg0, 1);
8189 return fold_build_pointer_plus_loc
8190 (loc, fold_convert_loc (loc, type, arg00), arg01);
8193 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8194 of the same precision, and X is an integer type not narrower than
8195 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8196 if (INTEGRAL_TYPE_P (type)
8197 && TREE_CODE (op0) == BIT_NOT_EXPR
8198 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8199 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8200 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8202 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8203 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8204 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8205 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8206 fold_convert_loc (loc, type, tem));
8209 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8210 type of X and Y (integer types only). */
8211 if (INTEGRAL_TYPE_P (type)
8212 && TREE_CODE (op0) == MULT_EXPR
8213 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8214 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8216 /* Be careful not to introduce new overflows. */
8217 tree mult_type;
8218 if (TYPE_OVERFLOW_WRAPS (type))
8219 mult_type = type;
8220 else
8221 mult_type = unsigned_type_for (type);
8223 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8225 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8226 fold_convert_loc (loc, mult_type,
8227 TREE_OPERAND (op0, 0)),
8228 fold_convert_loc (loc, mult_type,
8229 TREE_OPERAND (op0, 1)));
8230 return fold_convert_loc (loc, type, tem);
8234 tem = fold_convert_const (code, type, op0);
8235 return tem ? tem : NULL_TREE;
8237 case ADDR_SPACE_CONVERT_EXPR:
8238 if (integer_zerop (arg0))
8239 return fold_convert_const (code, type, arg0);
8240 return NULL_TREE;
8242 case FIXED_CONVERT_EXPR:
8243 tem = fold_convert_const (code, type, arg0);
8244 return tem ? tem : NULL_TREE;
8246 case VIEW_CONVERT_EXPR:
8247 if (TREE_TYPE (op0) == type)
8248 return op0;
8249 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8250 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8251 type, TREE_OPERAND (op0, 0));
8252 if (TREE_CODE (op0) == MEM_REF)
8253 return fold_build2_loc (loc, MEM_REF, type,
8254 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8256 /* For integral conversions with the same precision or pointer
8257 conversions use a NOP_EXPR instead. */
8258 if ((INTEGRAL_TYPE_P (type)
8259 || (POINTER_TYPE_P (type)
8260 && !upc_shared_type_p (TREE_TYPE (type))))
8261 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8262 || (POINTER_TYPE_P (TREE_TYPE (op0))
8263 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8264 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8265 return fold_convert_loc (loc, type, op0);
8267 /* Strip inner integral conversions that do not change the precision. */
8268 if (CONVERT_EXPR_P (op0)
8269 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8270 || (POINTER_TYPE_P (TREE_TYPE (op0))
8271 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8272 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8273 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8274 && !upc_shared_type_p (TREE_TYPE (
8275 TREE_TYPE (
8276 TREE_OPERAND (op0, 0))))))
8277 && (TYPE_PRECISION (TREE_TYPE (op0))
8278 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8279 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8280 type, TREE_OPERAND (op0, 0));
8282 return fold_view_convert_expr (type, op0);
8284 case NEGATE_EXPR:
8285 tem = fold_negate_expr (loc, arg0);
8286 if (tem)
8287 return fold_convert_loc (loc, type, tem);
8288 return NULL_TREE;
8290 case ABS_EXPR:
8291 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8292 return fold_abs_const (arg0, type);
8293 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8294 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8295 /* Convert fabs((double)float) into (double)fabsf(float). */
8296 else if (TREE_CODE (arg0) == NOP_EXPR
8297 && TREE_CODE (type) == REAL_TYPE)
8299 tree targ0 = strip_float_extensions (arg0);
8300 if (targ0 != arg0)
8301 return fold_convert_loc (loc, type,
8302 fold_build1_loc (loc, ABS_EXPR,
8303 TREE_TYPE (targ0),
8304 targ0));
8306 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8307 else if (TREE_CODE (arg0) == ABS_EXPR)
8308 return arg0;
8309 else if (tree_expr_nonnegative_p (arg0))
8310 return arg0;
8312 /* Strip sign ops from argument. */
8313 if (TREE_CODE (type) == REAL_TYPE)
8315 tem = fold_strip_sign_ops (arg0);
8316 if (tem)
8317 return fold_build1_loc (loc, ABS_EXPR, type,
8318 fold_convert_loc (loc, type, tem));
8320 return NULL_TREE;
8322 case CONJ_EXPR:
8323 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8324 return fold_convert_loc (loc, type, arg0);
8325 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8327 tree itype = TREE_TYPE (type);
8328 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8329 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8330 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8331 negate_expr (ipart));
8333 if (TREE_CODE (arg0) == COMPLEX_CST)
8335 tree itype = TREE_TYPE (type);
8336 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8337 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8338 return build_complex (type, rpart, negate_expr (ipart));
8340 if (TREE_CODE (arg0) == CONJ_EXPR)
8341 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8342 return NULL_TREE;
8344 case BIT_NOT_EXPR:
8345 if (TREE_CODE (arg0) == INTEGER_CST)
8346 return fold_not_const (arg0, type);
8347 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8348 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8349 /* Convert ~ (-A) to A - 1. */
8350 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8351 return fold_build2_loc (loc, MINUS_EXPR, type,
8352 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8353 build_int_cst (type, 1));
8354 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8355 else if (INTEGRAL_TYPE_P (type)
8356 && ((TREE_CODE (arg0) == MINUS_EXPR
8357 && integer_onep (TREE_OPERAND (arg0, 1)))
8358 || (TREE_CODE (arg0) == PLUS_EXPR
8359 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8360 return fold_build1_loc (loc, NEGATE_EXPR, type,
8361 fold_convert_loc (loc, type,
8362 TREE_OPERAND (arg0, 0)));
8363 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8364 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8365 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8366 fold_convert_loc (loc, type,
8367 TREE_OPERAND (arg0, 0)))))
8368 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8369 fold_convert_loc (loc, type,
8370 TREE_OPERAND (arg0, 1)));
8371 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8372 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8373 fold_convert_loc (loc, type,
8374 TREE_OPERAND (arg0, 1)))))
8375 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8376 fold_convert_loc (loc, type,
8377 TREE_OPERAND (arg0, 0)), tem);
8378 /* Perform BIT_NOT_EXPR on each element individually. */
8379 else if (TREE_CODE (arg0) == VECTOR_CST)
8381 tree *elements;
8382 tree elem;
8383 unsigned count = VECTOR_CST_NELTS (arg0), i;
8385 elements = XALLOCAVEC (tree, count);
8386 for (i = 0; i < count; i++)
8388 elem = VECTOR_CST_ELT (arg0, i);
8389 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8390 if (elem == NULL_TREE)
8391 break;
8392 elements[i] = elem;
8394 if (i == count)
8395 return build_vector (type, elements);
8397 else if (COMPARISON_CLASS_P (arg0)
8398 && (VECTOR_TYPE_P (type)
8399 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8401 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8402 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8403 HONOR_NANS (TYPE_MODE (op_type)));
8404 if (subcode != ERROR_MARK)
8405 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8406 TREE_OPERAND (arg0, 1));
8410 return NULL_TREE;
8412 case TRUTH_NOT_EXPR:
8413 /* Note that the operand of this must be an int
8414 and its values must be 0 or 1.
8415 ("true" is a fixed value perhaps depending on the language,
8416 but we don't handle values other than 1 correctly yet.) */
8417 tem = fold_truth_not_expr (loc, arg0);
8418 if (!tem)
8419 return NULL_TREE;
8420 return fold_convert_loc (loc, type, tem);
8422 case REALPART_EXPR:
8423 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8424 return fold_convert_loc (loc, type, arg0);
8425 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8426 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8427 TREE_OPERAND (arg0, 1));
8428 if (TREE_CODE (arg0) == COMPLEX_CST)
8429 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8430 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8432 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8433 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8434 fold_build1_loc (loc, REALPART_EXPR, itype,
8435 TREE_OPERAND (arg0, 0)),
8436 fold_build1_loc (loc, REALPART_EXPR, itype,
8437 TREE_OPERAND (arg0, 1)));
8438 return fold_convert_loc (loc, type, tem);
8440 if (TREE_CODE (arg0) == CONJ_EXPR)
8442 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8443 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8444 TREE_OPERAND (arg0, 0));
8445 return fold_convert_loc (loc, type, tem);
8447 if (TREE_CODE (arg0) == CALL_EXPR)
8449 tree fn = get_callee_fndecl (arg0);
8450 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8451 switch (DECL_FUNCTION_CODE (fn))
8453 CASE_FLT_FN (BUILT_IN_CEXPI):
8454 fn = mathfn_built_in (type, BUILT_IN_COS);
8455 if (fn)
8456 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8457 break;
8459 default:
8460 break;
8463 return NULL_TREE;
8465 case IMAGPART_EXPR:
8466 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8467 return build_zero_cst (type);
8468 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8469 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8470 TREE_OPERAND (arg0, 0));
8471 if (TREE_CODE (arg0) == COMPLEX_CST)
8472 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8473 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8475 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8476 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8477 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8478 TREE_OPERAND (arg0, 0)),
8479 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8480 TREE_OPERAND (arg0, 1)));
8481 return fold_convert_loc (loc, type, tem);
8483 if (TREE_CODE (arg0) == CONJ_EXPR)
8485 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8486 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8487 return fold_convert_loc (loc, type, negate_expr (tem));
8489 if (TREE_CODE (arg0) == CALL_EXPR)
8491 tree fn = get_callee_fndecl (arg0);
8492 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8493 switch (DECL_FUNCTION_CODE (fn))
8495 CASE_FLT_FN (BUILT_IN_CEXPI):
8496 fn = mathfn_built_in (type, BUILT_IN_SIN);
8497 if (fn)
8498 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8499 break;
8501 default:
8502 break;
8505 return NULL_TREE;
8507 case INDIRECT_REF:
8508 /* Fold *&X to X if X is an lvalue. */
8509 if (TREE_CODE (op0) == ADDR_EXPR)
8511 tree op00 = TREE_OPERAND (op0, 0);
8512 if ((TREE_CODE (op00) == VAR_DECL
8513 || TREE_CODE (op00) == PARM_DECL
8514 || TREE_CODE (op00) == RESULT_DECL)
8515 && !TREE_READONLY (op00))
8516 return op00;
8518 return NULL_TREE;
8520 case VEC_UNPACK_LO_EXPR:
8521 case VEC_UNPACK_HI_EXPR:
8522 case VEC_UNPACK_FLOAT_LO_EXPR:
8523 case VEC_UNPACK_FLOAT_HI_EXPR:
8525 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8526 tree *elts;
8527 enum tree_code subcode;
8529 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8530 if (TREE_CODE (arg0) != VECTOR_CST)
8531 return NULL_TREE;
8533 elts = XALLOCAVEC (tree, nelts * 2);
8534 if (!vec_cst_ctor_to_array (arg0, elts))
8535 return NULL_TREE;
8537 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8538 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8539 elts += nelts;
8541 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8542 subcode = NOP_EXPR;
8543 else
8544 subcode = FLOAT_EXPR;
8546 for (i = 0; i < nelts; i++)
8548 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8549 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8550 return NULL_TREE;
8553 return build_vector (type, elts);
8556 case REDUC_MIN_EXPR:
8557 case REDUC_MAX_EXPR:
8558 case REDUC_PLUS_EXPR:
8560 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8561 tree *elts;
8562 enum tree_code subcode;
8564 if (TREE_CODE (op0) != VECTOR_CST)
8565 return NULL_TREE;
8567 elts = XALLOCAVEC (tree, nelts);
8568 if (!vec_cst_ctor_to_array (op0, elts))
8569 return NULL_TREE;
8571 switch (code)
8573 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8574 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8575 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8576 default: gcc_unreachable ();
8579 for (i = 1; i < nelts; i++)
8581 elts[0] = const_binop (subcode, elts[0], elts[i]);
8582 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8583 return NULL_TREE;
8584 elts[i] = build_zero_cst (TREE_TYPE (type));
8587 return build_vector (type, elts);
8590 default:
8591 return NULL_TREE;
8592 } /* switch (code) */
8596 /* If the operation was a conversion do _not_ mark a resulting constant
8597 with TREE_OVERFLOW if the original constant was not. These conversions
8598 have implementation defined behavior and retaining the TREE_OVERFLOW
8599 flag here would confuse later passes such as VRP. */
8600 tree
8601 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8602 tree type, tree op0)
8604 tree res = fold_unary_loc (loc, code, type, op0);
8605 if (res
8606 && TREE_CODE (res) == INTEGER_CST
8607 && TREE_CODE (op0) == INTEGER_CST
8608 && CONVERT_EXPR_CODE_P (code))
8609 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8611 return res;
8614 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8615 operands OP0 and OP1. LOC is the location of the resulting expression.
8616 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8617 Return the folded expression if folding is successful. Otherwise,
8618 return NULL_TREE. */
8619 static tree
8620 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8621 tree arg0, tree arg1, tree op0, tree op1)
8623 tree tem;
8625 /* We only do these simplifications if we are optimizing. */
8626 if (!optimize)
8627 return NULL_TREE;
8629 /* Check for things like (A || B) && (A || C). We can convert this
8630 to A || (B && C). Note that either operator can be any of the four
8631 truth and/or operations and the transformation will still be
8632 valid. Also note that we only care about order for the
8633 ANDIF and ORIF operators. If B contains side effects, this
8634 might change the truth-value of A. */
8635 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8636 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8637 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8638 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8639 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8640 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8642 tree a00 = TREE_OPERAND (arg0, 0);
8643 tree a01 = TREE_OPERAND (arg0, 1);
8644 tree a10 = TREE_OPERAND (arg1, 0);
8645 tree a11 = TREE_OPERAND (arg1, 1);
8646 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8647 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8648 && (code == TRUTH_AND_EXPR
8649 || code == TRUTH_OR_EXPR));
8651 if (operand_equal_p (a00, a10, 0))
8652 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8653 fold_build2_loc (loc, code, type, a01, a11));
8654 else if (commutative && operand_equal_p (a00, a11, 0))
8655 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8656 fold_build2_loc (loc, code, type, a01, a10));
8657 else if (commutative && operand_equal_p (a01, a10, 0))
8658 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8659 fold_build2_loc (loc, code, type, a00, a11));
8661 /* This case if tricky because we must either have commutative
8662 operators or else A10 must not have side-effects. */
8664 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8665 && operand_equal_p (a01, a11, 0))
8666 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8667 fold_build2_loc (loc, code, type, a00, a10),
8668 a01);
8671 /* See if we can build a range comparison. */
8672 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8673 return tem;
8675 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8676 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8678 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8679 if (tem)
8680 return fold_build2_loc (loc, code, type, tem, arg1);
8683 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8684 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8686 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8687 if (tem)
8688 return fold_build2_loc (loc, code, type, arg0, tem);
8691 /* Check for the possibility of merging component references. If our
8692 lhs is another similar operation, try to merge its rhs with our
8693 rhs. Then try to merge our lhs and rhs. */
8694 if (TREE_CODE (arg0) == code
8695 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8696 TREE_OPERAND (arg0, 1), arg1)))
8697 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8699 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8700 return tem;
8702 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8703 && (code == TRUTH_AND_EXPR
8704 || code == TRUTH_ANDIF_EXPR
8705 || code == TRUTH_OR_EXPR
8706 || code == TRUTH_ORIF_EXPR))
8708 enum tree_code ncode, icode;
8710 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8711 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8712 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8714 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8715 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8716 We don't want to pack more than two leafs to a non-IF AND/OR
8717 expression.
8718 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8719 equal to IF-CODE, then we don't want to add right-hand operand.
8720 If the inner right-hand side of left-hand operand has
8721 side-effects, or isn't simple, then we can't add to it,
8722 as otherwise we might destroy if-sequence. */
8723 if (TREE_CODE (arg0) == icode
8724 && simple_operand_p_2 (arg1)
8725 /* Needed for sequence points to handle trappings, and
8726 side-effects. */
8727 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8729 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8730 arg1);
8731 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8732 tem);
8734 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8735 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8736 else if (TREE_CODE (arg1) == icode
8737 && simple_operand_p_2 (arg0)
8738 /* Needed for sequence points to handle trappings, and
8739 side-effects. */
8740 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8742 tem = fold_build2_loc (loc, ncode, type,
8743 arg0, TREE_OPERAND (arg1, 0));
8744 return fold_build2_loc (loc, icode, type, tem,
8745 TREE_OPERAND (arg1, 1));
8747 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8748 into (A OR B).
8749 For sequence point consistancy, we need to check for trapping,
8750 and side-effects. */
8751 else if (code == icode && simple_operand_p_2 (arg0)
8752 && simple_operand_p_2 (arg1))
8753 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8756 return NULL_TREE;
8759 /* Fold a binary expression of code CODE and type TYPE with operands
8760 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8761 Return the folded expression if folding is successful. Otherwise,
8762 return NULL_TREE. */
8764 static tree
8765 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8767 enum tree_code compl_code;
8769 if (code == MIN_EXPR)
8770 compl_code = MAX_EXPR;
8771 else if (code == MAX_EXPR)
8772 compl_code = MIN_EXPR;
8773 else
8774 gcc_unreachable ();
8776 /* MIN (MAX (a, b), b) == b. */
8777 if (TREE_CODE (op0) == compl_code
8778 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8779 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8781 /* MIN (MAX (b, a), b) == b. */
8782 if (TREE_CODE (op0) == compl_code
8783 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8784 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8785 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8787 /* MIN (a, MAX (a, b)) == a. */
8788 if (TREE_CODE (op1) == compl_code
8789 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8790 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8791 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8793 /* MIN (a, MAX (b, a)) == a. */
8794 if (TREE_CODE (op1) == compl_code
8795 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8796 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8797 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8799 return NULL_TREE;
8802 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8803 by changing CODE to reduce the magnitude of constants involved in
8804 ARG0 of the comparison.
8805 Returns a canonicalized comparison tree if a simplification was
8806 possible, otherwise returns NULL_TREE.
8807 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8808 valid if signed overflow is undefined. */
8810 static tree
8811 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8812 tree arg0, tree arg1,
8813 bool *strict_overflow_p)
8815 enum tree_code code0 = TREE_CODE (arg0);
8816 tree t, cst0 = NULL_TREE;
8817 int sgn0;
8818 bool swap = false;
8820 /* Match A +- CST code arg1 and CST code arg1. We can change the
8821 first form only if overflow is undefined. */
8822 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8823 /* In principle pointers also have undefined overflow behavior,
8824 but that causes problems elsewhere. */
8825 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8826 && (code0 == MINUS_EXPR
8827 || code0 == PLUS_EXPR)
8828 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8829 || code0 == INTEGER_CST))
8830 return NULL_TREE;
8832 /* Identify the constant in arg0 and its sign. */
8833 if (code0 == INTEGER_CST)
8834 cst0 = arg0;
8835 else
8836 cst0 = TREE_OPERAND (arg0, 1);
8837 sgn0 = tree_int_cst_sgn (cst0);
8839 /* Overflowed constants and zero will cause problems. */
8840 if (integer_zerop (cst0)
8841 || TREE_OVERFLOW (cst0))
8842 return NULL_TREE;
8844 /* See if we can reduce the magnitude of the constant in
8845 arg0 by changing the comparison code. */
8846 if (code0 == INTEGER_CST)
8848 /* CST <= arg1 -> CST-1 < arg1. */
8849 if (code == LE_EXPR && sgn0 == 1)
8850 code = LT_EXPR;
8851 /* -CST < arg1 -> -CST-1 <= arg1. */
8852 else if (code == LT_EXPR && sgn0 == -1)
8853 code = LE_EXPR;
8854 /* CST > arg1 -> CST-1 >= arg1. */
8855 else if (code == GT_EXPR && sgn0 == 1)
8856 code = GE_EXPR;
8857 /* -CST >= arg1 -> -CST-1 > arg1. */
8858 else if (code == GE_EXPR && sgn0 == -1)
8859 code = GT_EXPR;
8860 else
8861 return NULL_TREE;
8862 /* arg1 code' CST' might be more canonical. */
8863 swap = true;
8865 else
8867 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8868 if (code == LT_EXPR
8869 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8870 code = LE_EXPR;
8871 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8872 else if (code == GT_EXPR
8873 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8874 code = GE_EXPR;
8875 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8876 else if (code == LE_EXPR
8877 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8878 code = LT_EXPR;
8879 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8880 else if (code == GE_EXPR
8881 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8882 code = GT_EXPR;
8883 else
8884 return NULL_TREE;
8885 *strict_overflow_p = true;
8888 /* Now build the constant reduced in magnitude. But not if that
8889 would produce one outside of its types range. */
8890 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8891 && ((sgn0 == 1
8892 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8893 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8894 || (sgn0 == -1
8895 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8896 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8897 /* We cannot swap the comparison here as that would cause us to
8898 endlessly recurse. */
8899 return NULL_TREE;
8901 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8902 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8903 if (code0 != INTEGER_CST)
8904 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8905 t = fold_convert (TREE_TYPE (arg1), t);
8907 /* If swapping might yield to a more canonical form, do so. */
8908 if (swap)
8909 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8910 else
8911 return fold_build2_loc (loc, code, type, t, arg1);
8914 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8915 overflow further. Try to decrease the magnitude of constants involved
8916 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8917 and put sole constants at the second argument position.
8918 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8920 static tree
8921 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8922 tree arg0, tree arg1)
8924 tree t;
8925 bool strict_overflow_p;
8926 const char * const warnmsg = G_("assuming signed overflow does not occur "
8927 "when reducing constant in comparison");
8929 /* Try canonicalization by simplifying arg0. */
8930 strict_overflow_p = false;
8931 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8932 &strict_overflow_p);
8933 if (t)
8935 if (strict_overflow_p)
8936 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8937 return t;
8940 /* Try canonicalization by simplifying arg1 using the swapped
8941 comparison. */
8942 code = swap_tree_comparison (code);
8943 strict_overflow_p = false;
8944 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8945 &strict_overflow_p);
8946 if (t && strict_overflow_p)
8947 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8948 return t;
8951 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8952 space. This is used to avoid issuing overflow warnings for
8953 expressions like &p->x which can not wrap. */
8955 static bool
8956 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8958 double_int di_offset, total;
8960 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8961 return true;
8963 if (bitpos < 0)
8964 return true;
8966 if (offset == NULL_TREE)
8967 di_offset = double_int_zero;
8968 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8969 return true;
8970 else
8971 di_offset = TREE_INT_CST (offset);
8973 bool overflow;
8974 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8975 total = di_offset.add_with_sign (units, true, &overflow);
8976 if (overflow)
8977 return true;
8979 if (total.high != 0)
8980 return true;
8982 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8983 if (size <= 0)
8984 return true;
8986 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8987 array. */
8988 if (TREE_CODE (base) == ADDR_EXPR)
8990 HOST_WIDE_INT base_size;
8992 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8993 if (base_size > 0 && size < base_size)
8994 size = base_size;
8997 return total.low > (unsigned HOST_WIDE_INT) size;
9000 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
9001 kind INTEGER_CST. This makes sure to properly sign-extend the
9002 constant. */
9004 static HOST_WIDE_INT
9005 size_low_cst (const_tree t)
9007 double_int d = tree_to_double_int (t);
9008 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
9011 /* Subroutine of fold_binary. This routine performs all of the
9012 transformations that are common to the equality/inequality
9013 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9014 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9015 fold_binary should call fold_binary. Fold a comparison with
9016 tree code CODE and type TYPE with operands OP0 and OP1. Return
9017 the folded comparison or NULL_TREE. */
9019 static tree
9020 fold_comparison (location_t loc, enum tree_code code, tree type,
9021 tree op0, tree op1)
9023 tree arg0, arg1, tem;
9025 arg0 = op0;
9026 arg1 = op1;
9028 STRIP_SIGN_NOPS (arg0);
9029 STRIP_SIGN_NOPS (arg1);
9031 tem = fold_relational_const (code, type, arg0, arg1);
9032 if (tem != NULL_TREE)
9033 return tem;
9035 /* If one arg is a real or integer constant, put it last. */
9036 if (tree_swap_operands_p (arg0, arg1, true))
9037 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9039 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9040 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9041 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9042 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9043 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9044 && (TREE_CODE (arg1) == INTEGER_CST
9045 && !TREE_OVERFLOW (arg1)))
9047 tree const1 = TREE_OPERAND (arg0, 1);
9048 tree const2 = arg1;
9049 tree variable = TREE_OPERAND (arg0, 0);
9050 tree lhs;
9051 int lhs_add;
9052 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9054 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9055 TREE_TYPE (arg1), const2, const1);
9057 /* If the constant operation overflowed this can be
9058 simplified as a comparison against INT_MAX/INT_MIN. */
9059 if (TREE_CODE (lhs) == INTEGER_CST
9060 && TREE_OVERFLOW (lhs))
9062 int const1_sgn = tree_int_cst_sgn (const1);
9063 enum tree_code code2 = code;
9065 /* Get the sign of the constant on the lhs if the
9066 operation were VARIABLE + CONST1. */
9067 if (TREE_CODE (arg0) == MINUS_EXPR)
9068 const1_sgn = -const1_sgn;
9070 /* The sign of the constant determines if we overflowed
9071 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9072 Canonicalize to the INT_MIN overflow by swapping the comparison
9073 if necessary. */
9074 if (const1_sgn == -1)
9075 code2 = swap_tree_comparison (code);
9077 /* We now can look at the canonicalized case
9078 VARIABLE + 1 CODE2 INT_MIN
9079 and decide on the result. */
9080 if (code2 == LT_EXPR
9081 || code2 == LE_EXPR
9082 || code2 == EQ_EXPR)
9083 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9084 else if (code2 == NE_EXPR
9085 || code2 == GE_EXPR
9086 || code2 == GT_EXPR)
9087 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9090 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9091 && (TREE_CODE (lhs) != INTEGER_CST
9092 || !TREE_OVERFLOW (lhs)))
9094 if (code != EQ_EXPR && code != NE_EXPR)
9095 fold_overflow_warning ("assuming signed overflow does not occur "
9096 "when changing X +- C1 cmp C2 to "
9097 "X cmp C1 +- C2",
9098 WARN_STRICT_OVERFLOW_COMPARISON);
9099 return fold_build2_loc (loc, code, type, variable, lhs);
9103 /* For comparisons of pointers we can decompose it to a compile time
9104 comparison of the base objects and the offsets into the object.
9105 This requires at least one operand being an ADDR_EXPR or a
9106 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9107 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9108 && (TREE_CODE (arg0) == ADDR_EXPR
9109 || TREE_CODE (arg1) == ADDR_EXPR
9110 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9111 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9113 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9114 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9115 enum machine_mode mode;
9116 int volatilep, unsignedp;
9117 bool indirect_base0 = false, indirect_base1 = false;
9119 /* Get base and offset for the access. Strip ADDR_EXPR for
9120 get_inner_reference, but put it back by stripping INDIRECT_REF
9121 off the base object if possible. indirect_baseN will be true
9122 if baseN is not an address but refers to the object itself. */
9123 base0 = arg0;
9124 if (TREE_CODE (arg0) == ADDR_EXPR)
9126 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9127 &bitsize, &bitpos0, &offset0, &mode,
9128 &unsignedp, &volatilep, false);
9129 if (TREE_CODE (base0) == INDIRECT_REF)
9130 base0 = TREE_OPERAND (base0, 0);
9131 else
9132 indirect_base0 = true;
9134 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9136 base0 = TREE_OPERAND (arg0, 0);
9137 STRIP_SIGN_NOPS (base0);
9138 if (TREE_CODE (base0) == ADDR_EXPR)
9140 base0 = TREE_OPERAND (base0, 0);
9141 indirect_base0 = true;
9143 offset0 = TREE_OPERAND (arg0, 1);
9144 if (tree_fits_shwi_p (offset0))
9146 HOST_WIDE_INT off = size_low_cst (offset0);
9147 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9148 * BITS_PER_UNIT)
9149 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9151 bitpos0 = off * BITS_PER_UNIT;
9152 offset0 = NULL_TREE;
9157 base1 = arg1;
9158 if (TREE_CODE (arg1) == ADDR_EXPR)
9160 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9161 &bitsize, &bitpos1, &offset1, &mode,
9162 &unsignedp, &volatilep, false);
9163 if (TREE_CODE (base1) == INDIRECT_REF)
9164 base1 = TREE_OPERAND (base1, 0);
9165 else
9166 indirect_base1 = true;
9168 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9170 base1 = TREE_OPERAND (arg1, 0);
9171 STRIP_SIGN_NOPS (base1);
9172 if (TREE_CODE (base1) == ADDR_EXPR)
9174 base1 = TREE_OPERAND (base1, 0);
9175 indirect_base1 = true;
9177 offset1 = TREE_OPERAND (arg1, 1);
9178 if (tree_fits_shwi_p (offset1))
9180 HOST_WIDE_INT off = size_low_cst (offset1);
9181 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9182 * BITS_PER_UNIT)
9183 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9185 bitpos1 = off * BITS_PER_UNIT;
9186 offset1 = NULL_TREE;
9191 /* A local variable can never be pointed to by
9192 the default SSA name of an incoming parameter. */
9193 if ((TREE_CODE (arg0) == ADDR_EXPR
9194 && indirect_base0
9195 && TREE_CODE (base0) == VAR_DECL
9196 && auto_var_in_fn_p (base0, current_function_decl)
9197 && !indirect_base1
9198 && TREE_CODE (base1) == SSA_NAME
9199 && SSA_NAME_IS_DEFAULT_DEF (base1)
9200 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9201 || (TREE_CODE (arg1) == ADDR_EXPR
9202 && indirect_base1
9203 && TREE_CODE (base1) == VAR_DECL
9204 && auto_var_in_fn_p (base1, current_function_decl)
9205 && !indirect_base0
9206 && TREE_CODE (base0) == SSA_NAME
9207 && SSA_NAME_IS_DEFAULT_DEF (base0)
9208 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9210 if (code == NE_EXPR)
9211 return constant_boolean_node (1, type);
9212 else if (code == EQ_EXPR)
9213 return constant_boolean_node (0, type);
9215 /* If we have equivalent bases we might be able to simplify. */
9216 else if (indirect_base0 == indirect_base1
9217 && operand_equal_p (base0, base1, 0))
9219 /* We can fold this expression to a constant if the non-constant
9220 offset parts are equal. */
9221 if ((offset0 == offset1
9222 || (offset0 && offset1
9223 && operand_equal_p (offset0, offset1, 0)))
9224 && (code == EQ_EXPR
9225 || code == NE_EXPR
9226 || (indirect_base0 && DECL_P (base0))
9227 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9230 if (code != EQ_EXPR
9231 && code != NE_EXPR
9232 && bitpos0 != bitpos1
9233 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9234 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9235 fold_overflow_warning (("assuming pointer wraparound does not "
9236 "occur when comparing P +- C1 with "
9237 "P +- C2"),
9238 WARN_STRICT_OVERFLOW_CONDITIONAL);
9240 switch (code)
9242 case EQ_EXPR:
9243 return constant_boolean_node (bitpos0 == bitpos1, type);
9244 case NE_EXPR:
9245 return constant_boolean_node (bitpos0 != bitpos1, type);
9246 case LT_EXPR:
9247 return constant_boolean_node (bitpos0 < bitpos1, type);
9248 case LE_EXPR:
9249 return constant_boolean_node (bitpos0 <= bitpos1, type);
9250 case GE_EXPR:
9251 return constant_boolean_node (bitpos0 >= bitpos1, type);
9252 case GT_EXPR:
9253 return constant_boolean_node (bitpos0 > bitpos1, type);
9254 default:;
9257 /* We can simplify the comparison to a comparison of the variable
9258 offset parts if the constant offset parts are equal.
9259 Be careful to use signed sizetype here because otherwise we
9260 mess with array offsets in the wrong way. This is possible
9261 because pointer arithmetic is restricted to retain within an
9262 object and overflow on pointer differences is undefined as of
9263 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9264 else if (bitpos0 == bitpos1
9265 && ((code == EQ_EXPR || code == NE_EXPR)
9266 || (indirect_base0 && DECL_P (base0))
9267 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9269 /* By converting to signed sizetype we cover middle-end pointer
9270 arithmetic which operates on unsigned pointer types of size
9271 type size and ARRAY_REF offsets which are properly sign or
9272 zero extended from their type in case it is narrower than
9273 sizetype. */
9274 if (offset0 == NULL_TREE)
9275 offset0 = build_int_cst (ssizetype, 0);
9276 else
9277 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9278 if (offset1 == NULL_TREE)
9279 offset1 = build_int_cst (ssizetype, 0);
9280 else
9281 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9283 if (code != EQ_EXPR
9284 && code != NE_EXPR
9285 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9286 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9287 fold_overflow_warning (("assuming pointer wraparound does not "
9288 "occur when comparing P +- C1 with "
9289 "P +- C2"),
9290 WARN_STRICT_OVERFLOW_COMPARISON);
9292 return fold_build2_loc (loc, code, type, offset0, offset1);
9295 /* For non-equal bases we can simplify if they are addresses
9296 of local binding decls or constants. */
9297 else if (indirect_base0 && indirect_base1
9298 /* We know that !operand_equal_p (base0, base1, 0)
9299 because the if condition was false. But make
9300 sure two decls are not the same. */
9301 && base0 != base1
9302 && TREE_CODE (arg0) == ADDR_EXPR
9303 && TREE_CODE (arg1) == ADDR_EXPR
9304 && (((TREE_CODE (base0) == VAR_DECL
9305 || TREE_CODE (base0) == PARM_DECL)
9306 && (targetm.binds_local_p (base0)
9307 || CONSTANT_CLASS_P (base1)))
9308 || CONSTANT_CLASS_P (base0))
9309 && (((TREE_CODE (base1) == VAR_DECL
9310 || TREE_CODE (base1) == PARM_DECL)
9311 && (targetm.binds_local_p (base1)
9312 || CONSTANT_CLASS_P (base0)))
9313 || CONSTANT_CLASS_P (base1)))
9315 if (code == EQ_EXPR)
9316 return omit_two_operands_loc (loc, type, boolean_false_node,
9317 arg0, arg1);
9318 else if (code == NE_EXPR)
9319 return omit_two_operands_loc (loc, type, boolean_true_node,
9320 arg0, arg1);
9322 /* For equal offsets we can simplify to a comparison of the
9323 base addresses. */
9324 else if (bitpos0 == bitpos1
9325 && (indirect_base0
9326 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9327 && (indirect_base1
9328 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9329 && ((offset0 == offset1)
9330 || (offset0 && offset1
9331 && operand_equal_p (offset0, offset1, 0))))
9333 if (indirect_base0)
9334 base0 = build_fold_addr_expr_loc (loc, base0);
9335 if (indirect_base1)
9336 base1 = build_fold_addr_expr_loc (loc, base1);
9337 return fold_build2_loc (loc, code, type, base0, base1);
9341 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9342 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9343 the resulting offset is smaller in absolute value than the
9344 original one. */
9345 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9346 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9347 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9348 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9349 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9350 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9351 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9353 tree const1 = TREE_OPERAND (arg0, 1);
9354 tree const2 = TREE_OPERAND (arg1, 1);
9355 tree variable1 = TREE_OPERAND (arg0, 0);
9356 tree variable2 = TREE_OPERAND (arg1, 0);
9357 tree cst;
9358 const char * const warnmsg = G_("assuming signed overflow does not "
9359 "occur when combining constants around "
9360 "a comparison");
9362 /* Put the constant on the side where it doesn't overflow and is
9363 of lower absolute value than before. */
9364 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9365 ? MINUS_EXPR : PLUS_EXPR,
9366 const2, const1);
9367 if (!TREE_OVERFLOW (cst)
9368 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9370 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9371 return fold_build2_loc (loc, code, type,
9372 variable1,
9373 fold_build2_loc (loc,
9374 TREE_CODE (arg1), TREE_TYPE (arg1),
9375 variable2, cst));
9378 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9379 ? MINUS_EXPR : PLUS_EXPR,
9380 const1, const2);
9381 if (!TREE_OVERFLOW (cst)
9382 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9385 return fold_build2_loc (loc, code, type,
9386 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9387 variable1, cst),
9388 variable2);
9392 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9393 signed arithmetic case. That form is created by the compiler
9394 often enough for folding it to be of value. One example is in
9395 computing loop trip counts after Operator Strength Reduction. */
9396 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9397 && TREE_CODE (arg0) == MULT_EXPR
9398 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9399 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9400 && integer_zerop (arg1))
9402 tree const1 = TREE_OPERAND (arg0, 1);
9403 tree const2 = arg1; /* zero */
9404 tree variable1 = TREE_OPERAND (arg0, 0);
9405 enum tree_code cmp_code = code;
9407 /* Handle unfolded multiplication by zero. */
9408 if (integer_zerop (const1))
9409 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9411 fold_overflow_warning (("assuming signed overflow does not occur when "
9412 "eliminating multiplication in comparison "
9413 "with zero"),
9414 WARN_STRICT_OVERFLOW_COMPARISON);
9416 /* If const1 is negative we swap the sense of the comparison. */
9417 if (tree_int_cst_sgn (const1) < 0)
9418 cmp_code = swap_tree_comparison (cmp_code);
9420 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9423 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9424 if (tem)
9425 return tem;
9427 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9429 tree targ0 = strip_float_extensions (arg0);
9430 tree targ1 = strip_float_extensions (arg1);
9431 tree newtype = TREE_TYPE (targ0);
9433 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9434 newtype = TREE_TYPE (targ1);
9436 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9437 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9438 return fold_build2_loc (loc, code, type,
9439 fold_convert_loc (loc, newtype, targ0),
9440 fold_convert_loc (loc, newtype, targ1));
9442 /* (-a) CMP (-b) -> b CMP a */
9443 if (TREE_CODE (arg0) == NEGATE_EXPR
9444 && TREE_CODE (arg1) == NEGATE_EXPR)
9445 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9446 TREE_OPERAND (arg0, 0));
9448 if (TREE_CODE (arg1) == REAL_CST)
9450 REAL_VALUE_TYPE cst;
9451 cst = TREE_REAL_CST (arg1);
9453 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9454 if (TREE_CODE (arg0) == NEGATE_EXPR)
9455 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9456 TREE_OPERAND (arg0, 0),
9457 build_real (TREE_TYPE (arg1),
9458 real_value_negate (&cst)));
9460 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9461 /* a CMP (-0) -> a CMP 0 */
9462 if (REAL_VALUE_MINUS_ZERO (cst))
9463 return fold_build2_loc (loc, code, type, arg0,
9464 build_real (TREE_TYPE (arg1), dconst0));
9466 /* x != NaN is always true, other ops are always false. */
9467 if (REAL_VALUE_ISNAN (cst)
9468 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9470 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9471 return omit_one_operand_loc (loc, type, tem, arg0);
9474 /* Fold comparisons against infinity. */
9475 if (REAL_VALUE_ISINF (cst)
9476 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9478 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9479 if (tem != NULL_TREE)
9480 return tem;
9484 /* If this is a comparison of a real constant with a PLUS_EXPR
9485 or a MINUS_EXPR of a real constant, we can convert it into a
9486 comparison with a revised real constant as long as no overflow
9487 occurs when unsafe_math_optimizations are enabled. */
9488 if (flag_unsafe_math_optimizations
9489 && TREE_CODE (arg1) == REAL_CST
9490 && (TREE_CODE (arg0) == PLUS_EXPR
9491 || TREE_CODE (arg0) == MINUS_EXPR)
9492 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9493 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9494 ? MINUS_EXPR : PLUS_EXPR,
9495 arg1, TREE_OPERAND (arg0, 1)))
9496 && !TREE_OVERFLOW (tem))
9497 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9499 /* Likewise, we can simplify a comparison of a real constant with
9500 a MINUS_EXPR whose first operand is also a real constant, i.e.
9501 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9502 floating-point types only if -fassociative-math is set. */
9503 if (flag_associative_math
9504 && TREE_CODE (arg1) == REAL_CST
9505 && TREE_CODE (arg0) == MINUS_EXPR
9506 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9507 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9508 arg1))
9509 && !TREE_OVERFLOW (tem))
9510 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9511 TREE_OPERAND (arg0, 1), tem);
9513 /* Fold comparisons against built-in math functions. */
9514 if (TREE_CODE (arg1) == REAL_CST
9515 && flag_unsafe_math_optimizations
9516 && ! flag_errno_math)
9518 enum built_in_function fcode = builtin_mathfn_code (arg0);
9520 if (fcode != END_BUILTINS)
9522 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9523 if (tem != NULL_TREE)
9524 return tem;
9529 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9530 && CONVERT_EXPR_P (arg0))
9532 /* If we are widening one operand of an integer comparison,
9533 see if the other operand is similarly being widened. Perhaps we
9534 can do the comparison in the narrower type. */
9535 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9536 if (tem)
9537 return tem;
9539 /* Or if we are changing signedness. */
9540 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9541 if (tem)
9542 return tem;
9545 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9546 constant, we can simplify it. */
9547 if (TREE_CODE (arg1) == INTEGER_CST
9548 && (TREE_CODE (arg0) == MIN_EXPR
9549 || TREE_CODE (arg0) == MAX_EXPR)
9550 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9552 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9553 if (tem)
9554 return tem;
9557 /* Simplify comparison of something with itself. (For IEEE
9558 floating-point, we can only do some of these simplifications.) */
9559 if (operand_equal_p (arg0, arg1, 0))
9561 switch (code)
9563 case EQ_EXPR:
9564 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9565 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9566 return constant_boolean_node (1, type);
9567 break;
9569 case GE_EXPR:
9570 case LE_EXPR:
9571 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9572 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9573 return constant_boolean_node (1, type);
9574 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9576 case NE_EXPR:
9577 /* For NE, we can only do this simplification if integer
9578 or we don't honor IEEE floating point NaNs. */
9579 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9580 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9581 break;
9582 /* ... fall through ... */
9583 case GT_EXPR:
9584 case LT_EXPR:
9585 return constant_boolean_node (0, type);
9586 default:
9587 gcc_unreachable ();
9591 /* If we are comparing an expression that just has comparisons
9592 of two integer values, arithmetic expressions of those comparisons,
9593 and constants, we can simplify it. There are only three cases
9594 to check: the two values can either be equal, the first can be
9595 greater, or the second can be greater. Fold the expression for
9596 those three values. Since each value must be 0 or 1, we have
9597 eight possibilities, each of which corresponds to the constant 0
9598 or 1 or one of the six possible comparisons.
9600 This handles common cases like (a > b) == 0 but also handles
9601 expressions like ((x > y) - (y > x)) > 0, which supposedly
9602 occur in macroized code. */
9604 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9606 tree cval1 = 0, cval2 = 0;
9607 int save_p = 0;
9609 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9610 /* Don't handle degenerate cases here; they should already
9611 have been handled anyway. */
9612 && cval1 != 0 && cval2 != 0
9613 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9614 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9615 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9616 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9617 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9618 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9619 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9621 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9622 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9624 /* We can't just pass T to eval_subst in case cval1 or cval2
9625 was the same as ARG1. */
9627 tree high_result
9628 = fold_build2_loc (loc, code, type,
9629 eval_subst (loc, arg0, cval1, maxval,
9630 cval2, minval),
9631 arg1);
9632 tree equal_result
9633 = fold_build2_loc (loc, code, type,
9634 eval_subst (loc, arg0, cval1, maxval,
9635 cval2, maxval),
9636 arg1);
9637 tree low_result
9638 = fold_build2_loc (loc, code, type,
9639 eval_subst (loc, arg0, cval1, minval,
9640 cval2, maxval),
9641 arg1);
9643 /* All three of these results should be 0 or 1. Confirm they are.
9644 Then use those values to select the proper code to use. */
9646 if (TREE_CODE (high_result) == INTEGER_CST
9647 && TREE_CODE (equal_result) == INTEGER_CST
9648 && TREE_CODE (low_result) == INTEGER_CST)
9650 /* Make a 3-bit mask with the high-order bit being the
9651 value for `>', the next for '=', and the low for '<'. */
9652 switch ((integer_onep (high_result) * 4)
9653 + (integer_onep (equal_result) * 2)
9654 + integer_onep (low_result))
9656 case 0:
9657 /* Always false. */
9658 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9659 case 1:
9660 code = LT_EXPR;
9661 break;
9662 case 2:
9663 code = EQ_EXPR;
9664 break;
9665 case 3:
9666 code = LE_EXPR;
9667 break;
9668 case 4:
9669 code = GT_EXPR;
9670 break;
9671 case 5:
9672 code = NE_EXPR;
9673 break;
9674 case 6:
9675 code = GE_EXPR;
9676 break;
9677 case 7:
9678 /* Always true. */
9679 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9682 if (save_p)
9684 tem = save_expr (build2 (code, type, cval1, cval2));
9685 SET_EXPR_LOCATION (tem, loc);
9686 return tem;
9688 return fold_build2_loc (loc, code, type, cval1, cval2);
9693 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9694 into a single range test. */
9695 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9696 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9697 && TREE_CODE (arg1) == INTEGER_CST
9698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9699 && !integer_zerop (TREE_OPERAND (arg0, 1))
9700 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9701 && !TREE_OVERFLOW (arg1))
9703 tem = fold_div_compare (loc, code, type, arg0, arg1);
9704 if (tem != NULL_TREE)
9705 return tem;
9708 /* Fold ~X op ~Y as Y op X. */
9709 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9710 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9712 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9713 return fold_build2_loc (loc, code, type,
9714 fold_convert_loc (loc, cmp_type,
9715 TREE_OPERAND (arg1, 0)),
9716 TREE_OPERAND (arg0, 0));
9719 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9720 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9721 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9723 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9724 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9725 TREE_OPERAND (arg0, 0),
9726 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9727 fold_convert_loc (loc, cmp_type, arg1)));
9730 return NULL_TREE;
9734 /* Subroutine of fold_binary. Optimize complex multiplications of the
9735 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9736 argument EXPR represents the expression "z" of type TYPE. */
9738 static tree
9739 fold_mult_zconjz (location_t loc, tree type, tree expr)
9741 tree itype = TREE_TYPE (type);
9742 tree rpart, ipart, tem;
9744 if (TREE_CODE (expr) == COMPLEX_EXPR)
9746 rpart = TREE_OPERAND (expr, 0);
9747 ipart = TREE_OPERAND (expr, 1);
9749 else if (TREE_CODE (expr) == COMPLEX_CST)
9751 rpart = TREE_REALPART (expr);
9752 ipart = TREE_IMAGPART (expr);
9754 else
9756 expr = save_expr (expr);
9757 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9758 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9761 rpart = save_expr (rpart);
9762 ipart = save_expr (ipart);
9763 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9764 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9765 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9766 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9767 build_zero_cst (itype));
9771 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9772 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9773 guarantees that P and N have the same least significant log2(M) bits.
9774 N is not otherwise constrained. In particular, N is not normalized to
9775 0 <= N < M as is common. In general, the precise value of P is unknown.
9776 M is chosen as large as possible such that constant N can be determined.
9778 Returns M and sets *RESIDUE to N.
9780 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9781 account. This is not always possible due to PR 35705.
9784 static unsigned HOST_WIDE_INT
9785 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9786 bool allow_func_align)
9788 enum tree_code code;
9790 *residue = 0;
9792 code = TREE_CODE (expr);
9793 if (code == ADDR_EXPR)
9795 unsigned int bitalign;
9796 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9797 *residue /= BITS_PER_UNIT;
9798 return bitalign / BITS_PER_UNIT;
9800 else if (code == POINTER_PLUS_EXPR)
9802 tree op0, op1;
9803 unsigned HOST_WIDE_INT modulus;
9804 enum tree_code inner_code;
9806 op0 = TREE_OPERAND (expr, 0);
9807 STRIP_NOPS (op0);
9808 modulus = get_pointer_modulus_and_residue (op0, residue,
9809 allow_func_align);
9811 op1 = TREE_OPERAND (expr, 1);
9812 STRIP_NOPS (op1);
9813 inner_code = TREE_CODE (op1);
9814 if (inner_code == INTEGER_CST)
9816 *residue += TREE_INT_CST_LOW (op1);
9817 return modulus;
9819 else if (inner_code == MULT_EXPR)
9821 op1 = TREE_OPERAND (op1, 1);
9822 if (TREE_CODE (op1) == INTEGER_CST)
9824 unsigned HOST_WIDE_INT align;
9826 /* Compute the greatest power-of-2 divisor of op1. */
9827 align = TREE_INT_CST_LOW (op1);
9828 align &= -align;
9830 /* If align is non-zero and less than *modulus, replace
9831 *modulus with align., If align is 0, then either op1 is 0
9832 or the greatest power-of-2 divisor of op1 doesn't fit in an
9833 unsigned HOST_WIDE_INT. In either case, no additional
9834 constraint is imposed. */
9835 if (align)
9836 modulus = MIN (modulus, align);
9838 return modulus;
9843 /* If we get here, we were unable to determine anything useful about the
9844 expression. */
9845 return 1;
9848 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9849 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9851 static bool
9852 vec_cst_ctor_to_array (tree arg, tree *elts)
9854 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9856 if (TREE_CODE (arg) == VECTOR_CST)
9858 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9859 elts[i] = VECTOR_CST_ELT (arg, i);
9861 else if (TREE_CODE (arg) == CONSTRUCTOR)
9863 constructor_elt *elt;
9865 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9866 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9867 return false;
9868 else
9869 elts[i] = elt->value;
9871 else
9872 return false;
9873 for (; i < nelts; i++)
9874 elts[i]
9875 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9876 return true;
9879 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9880 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9881 NULL_TREE otherwise. */
9883 static tree
9884 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9886 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9887 tree *elts;
9888 bool need_ctor = false;
9890 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9891 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9892 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9893 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9894 return NULL_TREE;
9896 elts = XALLOCAVEC (tree, nelts * 3);
9897 if (!vec_cst_ctor_to_array (arg0, elts)
9898 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9899 return NULL_TREE;
9901 for (i = 0; i < nelts; i++)
9903 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9904 need_ctor = true;
9905 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9908 if (need_ctor)
9910 vec<constructor_elt, va_gc> *v;
9911 vec_alloc (v, nelts);
9912 for (i = 0; i < nelts; i++)
9913 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9914 return build_constructor (type, v);
9916 else
9917 return build_vector (type, &elts[2 * nelts]);
9920 /* Try to fold a pointer difference of type TYPE two address expressions of
9921 array references AREF0 and AREF1 using location LOC. Return a
9922 simplified expression for the difference or NULL_TREE. */
9924 static tree
9925 fold_addr_of_array_ref_difference (location_t loc, tree type,
9926 tree aref0, tree aref1)
9928 tree base0 = TREE_OPERAND (aref0, 0);
9929 tree base1 = TREE_OPERAND (aref1, 0);
9930 tree base_offset = build_int_cst (type, 0);
9932 /* If the bases are array references as well, recurse. If the bases
9933 are pointer indirections compute the difference of the pointers.
9934 If the bases are equal, we are set. */
9935 if ((TREE_CODE (base0) == ARRAY_REF
9936 && TREE_CODE (base1) == ARRAY_REF
9937 && (base_offset
9938 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9939 || (INDIRECT_REF_P (base0)
9940 && INDIRECT_REF_P (base1)
9941 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9942 TREE_OPERAND (base0, 0),
9943 TREE_OPERAND (base1, 0))))
9944 || operand_equal_p (base0, base1, 0))
9946 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9947 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9948 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9949 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9950 return fold_build2_loc (loc, PLUS_EXPR, type,
9951 base_offset,
9952 fold_build2_loc (loc, MULT_EXPR, type,
9953 diff, esz));
9955 return NULL_TREE;
9958 /* If the real or vector real constant CST of type TYPE has an exact
9959 inverse, return it, else return NULL. */
9961 static tree
9962 exact_inverse (tree type, tree cst)
9964 REAL_VALUE_TYPE r;
9965 tree unit_type, *elts;
9966 enum machine_mode mode;
9967 unsigned vec_nelts, i;
9969 switch (TREE_CODE (cst))
9971 case REAL_CST:
9972 r = TREE_REAL_CST (cst);
9974 if (exact_real_inverse (TYPE_MODE (type), &r))
9975 return build_real (type, r);
9977 return NULL_TREE;
9979 case VECTOR_CST:
9980 vec_nelts = VECTOR_CST_NELTS (cst);
9981 elts = XALLOCAVEC (tree, vec_nelts);
9982 unit_type = TREE_TYPE (type);
9983 mode = TYPE_MODE (unit_type);
9985 for (i = 0; i < vec_nelts; i++)
9987 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9988 if (!exact_real_inverse (mode, &r))
9989 return NULL_TREE;
9990 elts[i] = build_real (unit_type, r);
9993 return build_vector (type, elts);
9995 default:
9996 return NULL_TREE;
10000 /* Mask out the tz least significant bits of X of type TYPE where
10001 tz is the number of trailing zeroes in Y. */
10002 static double_int
10003 mask_with_tz (tree type, double_int x, double_int y)
10005 int tz = y.trailing_zeros ();
10007 if (tz > 0)
10009 double_int mask;
10011 mask = ~double_int::mask (tz);
10012 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
10013 return mask & x;
10015 return x;
10018 /* Return true when T is an address and is known to be nonzero.
10019 For floating point we further ensure that T is not denormal.
10020 Similar logic is present in nonzero_address in rtlanal.h.
10022 If the return value is based on the assumption that signed overflow
10023 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10024 change *STRICT_OVERFLOW_P. */
10026 static bool
10027 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10029 tree type = TREE_TYPE (t);
10030 enum tree_code code;
10032 /* Doing something useful for floating point would need more work. */
10033 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10034 return false;
10036 code = TREE_CODE (t);
10037 switch (TREE_CODE_CLASS (code))
10039 case tcc_unary:
10040 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10041 strict_overflow_p);
10042 case tcc_binary:
10043 case tcc_comparison:
10044 return tree_binary_nonzero_warnv_p (code, type,
10045 TREE_OPERAND (t, 0),
10046 TREE_OPERAND (t, 1),
10047 strict_overflow_p);
10048 case tcc_constant:
10049 case tcc_declaration:
10050 case tcc_reference:
10051 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10053 default:
10054 break;
10057 switch (code)
10059 case TRUTH_NOT_EXPR:
10060 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10061 strict_overflow_p);
10063 case TRUTH_AND_EXPR:
10064 case TRUTH_OR_EXPR:
10065 case TRUTH_XOR_EXPR:
10066 return tree_binary_nonzero_warnv_p (code, type,
10067 TREE_OPERAND (t, 0),
10068 TREE_OPERAND (t, 1),
10069 strict_overflow_p);
10071 case COND_EXPR:
10072 case CONSTRUCTOR:
10073 case OBJ_TYPE_REF:
10074 case ASSERT_EXPR:
10075 case ADDR_EXPR:
10076 case WITH_SIZE_EXPR:
10077 case SSA_NAME:
10078 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10080 case COMPOUND_EXPR:
10081 case MODIFY_EXPR:
10082 case BIND_EXPR:
10083 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10084 strict_overflow_p);
10086 case SAVE_EXPR:
10087 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10088 strict_overflow_p);
10090 case CALL_EXPR:
10092 tree fndecl = get_callee_fndecl (t);
10093 if (!fndecl) return false;
10094 if (flag_delete_null_pointer_checks && !flag_check_new
10095 && DECL_IS_OPERATOR_NEW (fndecl)
10096 && !TREE_NOTHROW (fndecl))
10097 return true;
10098 if (flag_delete_null_pointer_checks
10099 && lookup_attribute ("returns_nonnull",
10100 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10101 return true;
10102 return alloca_call_p (t);
10105 default:
10106 break;
10108 return false;
10111 /* Return true when T is an address and is known to be nonzero.
10112 Handle warnings about undefined signed overflow. */
10114 static bool
10115 tree_expr_nonzero_p (tree t)
10117 bool ret, strict_overflow_p;
10119 strict_overflow_p = false;
10120 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10121 if (strict_overflow_p)
10122 fold_overflow_warning (("assuming signed overflow does not occur when "
10123 "determining that expression is always "
10124 "non-zero"),
10125 WARN_STRICT_OVERFLOW_MISC);
10126 return ret;
10129 /* Fold a binary expression of code CODE and type TYPE with operands
10130 OP0 and OP1. LOC is the location of the resulting expression.
10131 Return the folded expression if folding is successful. Otherwise,
10132 return NULL_TREE. */
10134 tree
10135 fold_binary_loc (location_t loc,
10136 enum tree_code code, tree type, tree op0, tree op1)
10138 enum tree_code_class kind = TREE_CODE_CLASS (code);
10139 tree arg0, arg1, tem;
10140 tree t1 = NULL_TREE;
10141 bool strict_overflow_p;
10142 unsigned int prec;
10144 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10145 && TREE_CODE_LENGTH (code) == 2
10146 && op0 != NULL_TREE
10147 && op1 != NULL_TREE);
10149 arg0 = op0;
10150 arg1 = op1;
10152 /* Strip any conversions that don't change the mode. This is
10153 safe for every expression, except for a comparison expression
10154 because its signedness is derived from its operands. So, in
10155 the latter case, only strip conversions that don't change the
10156 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10157 preserved.
10159 Note that this is done as an internal manipulation within the
10160 constant folder, in order to find the simplest representation
10161 of the arguments so that their form can be studied. In any
10162 cases, the appropriate type conversions should be put back in
10163 the tree that will get out of the constant folder. */
10165 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10167 STRIP_SIGN_NOPS (arg0);
10168 STRIP_SIGN_NOPS (arg1);
10170 else
10172 STRIP_NOPS (arg0);
10173 STRIP_NOPS (arg1);
10176 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10177 constant but we can't do arithmetic on them. */
10178 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10179 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10180 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10181 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10182 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10183 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10184 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10186 if (kind == tcc_binary)
10188 /* Make sure type and arg0 have the same saturating flag. */
10189 gcc_assert (TYPE_SATURATING (type)
10190 == TYPE_SATURATING (TREE_TYPE (arg0)));
10191 tem = const_binop (code, arg0, arg1);
10193 else if (kind == tcc_comparison)
10194 tem = fold_relational_const (code, type, arg0, arg1);
10195 else
10196 tem = NULL_TREE;
10198 if (tem != NULL_TREE)
10200 if (TREE_TYPE (tem) != type)
10201 tem = fold_convert_loc (loc, type, tem);
10202 return tem;
10206 /* If this is a commutative operation, and ARG0 is a constant, move it
10207 to ARG1 to reduce the number of tests below. */
10208 if (commutative_tree_code (code)
10209 && tree_swap_operands_p (arg0, arg1, true))
10210 return fold_build2_loc (loc, code, type, op1, op0);
10212 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10214 First check for cases where an arithmetic operation is applied to a
10215 compound, conditional, or comparison operation. Push the arithmetic
10216 operation inside the compound or conditional to see if any folding
10217 can then be done. Convert comparison to conditional for this purpose.
10218 The also optimizes non-constant cases that used to be done in
10219 expand_expr.
10221 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10222 one of the operands is a comparison and the other is a comparison, a
10223 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10224 code below would make the expression more complex. Change it to a
10225 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10226 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10228 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10229 || code == EQ_EXPR || code == NE_EXPR)
10230 && TREE_CODE (type) != VECTOR_TYPE
10231 && ((truth_value_p (TREE_CODE (arg0))
10232 && (truth_value_p (TREE_CODE (arg1))
10233 || (TREE_CODE (arg1) == BIT_AND_EXPR
10234 && integer_onep (TREE_OPERAND (arg1, 1)))))
10235 || (truth_value_p (TREE_CODE (arg1))
10236 && (truth_value_p (TREE_CODE (arg0))
10237 || (TREE_CODE (arg0) == BIT_AND_EXPR
10238 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10240 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10241 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10242 : TRUTH_XOR_EXPR,
10243 boolean_type_node,
10244 fold_convert_loc (loc, boolean_type_node, arg0),
10245 fold_convert_loc (loc, boolean_type_node, arg1));
10247 if (code == EQ_EXPR)
10248 tem = invert_truthvalue_loc (loc, tem);
10250 return fold_convert_loc (loc, type, tem);
10253 if (TREE_CODE_CLASS (code) == tcc_binary
10254 || TREE_CODE_CLASS (code) == tcc_comparison)
10256 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10258 tem = fold_build2_loc (loc, code, type,
10259 fold_convert_loc (loc, TREE_TYPE (op0),
10260 TREE_OPERAND (arg0, 1)), op1);
10261 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10262 tem);
10264 if (TREE_CODE (arg1) == COMPOUND_EXPR
10265 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10267 tem = fold_build2_loc (loc, code, type, op0,
10268 fold_convert_loc (loc, TREE_TYPE (op1),
10269 TREE_OPERAND (arg1, 1)));
10270 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10271 tem);
10274 if (TREE_CODE (arg0) == COND_EXPR
10275 || TREE_CODE (arg0) == VEC_COND_EXPR
10276 || COMPARISON_CLASS_P (arg0))
10278 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10279 arg0, arg1,
10280 /*cond_first_p=*/1);
10281 if (tem != NULL_TREE)
10282 return tem;
10285 if (TREE_CODE (arg1) == COND_EXPR
10286 || TREE_CODE (arg1) == VEC_COND_EXPR
10287 || COMPARISON_CLASS_P (arg1))
10289 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10290 arg1, arg0,
10291 /*cond_first_p=*/0);
10292 if (tem != NULL_TREE)
10293 return tem;
10297 switch (code)
10299 case MEM_REF:
10300 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10301 if (TREE_CODE (arg0) == ADDR_EXPR
10302 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10304 tree iref = TREE_OPERAND (arg0, 0);
10305 return fold_build2 (MEM_REF, type,
10306 TREE_OPERAND (iref, 0),
10307 int_const_binop (PLUS_EXPR, arg1,
10308 TREE_OPERAND (iref, 1)));
10311 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10312 if (TREE_CODE (arg0) == ADDR_EXPR
10313 && handled_component_p (TREE_OPERAND (arg0, 0)))
10315 tree base;
10316 HOST_WIDE_INT coffset;
10317 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10318 &coffset);
10319 if (!base)
10320 return NULL_TREE;
10321 return fold_build2 (MEM_REF, type,
10322 build_fold_addr_expr (base),
10323 int_const_binop (PLUS_EXPR, arg1,
10324 size_int (coffset)));
10327 return NULL_TREE;
10329 case POINTER_PLUS_EXPR:
10330 /* 0 +p index -> (type)index */
10331 if (integer_zerop (arg0))
10332 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10334 /* PTR +p 0 -> PTR */
10335 if (integer_zerop (arg1))
10336 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10338 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10339 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10340 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10341 return fold_convert_loc (loc, type,
10342 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10343 fold_convert_loc (loc, sizetype,
10344 arg1),
10345 fold_convert_loc (loc, sizetype,
10346 arg0)));
10348 /* (PTR +p B) +p A -> PTR +p (B + A) */
10349 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10350 && !upc_shared_type_p (TREE_TYPE (type)))
10352 tree inner;
10353 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10354 tree arg00 = TREE_OPERAND (arg0, 0);
10355 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10356 arg01, fold_convert_loc (loc, sizetype, arg1));
10357 return fold_convert_loc (loc, type,
10358 fold_build_pointer_plus_loc (loc,
10359 arg00, inner));
10362 /* PTR_CST +p CST -> CST1 */
10363 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10364 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10365 fold_convert_loc (loc, type, arg1));
10367 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10368 of the array. Loop optimizer sometimes produce this type of
10369 expressions. */
10370 if (TREE_CODE (arg0) == ADDR_EXPR)
10372 tem = try_move_mult_to_index (loc, arg0,
10373 fold_convert_loc (loc,
10374 ssizetype, arg1));
10375 if (tem)
10376 return fold_convert_loc (loc, type, tem);
10379 return NULL_TREE;
10381 case PLUS_EXPR:
10382 /* A + (-B) -> A - B */
10383 if (TREE_CODE (arg1) == NEGATE_EXPR
10384 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10385 return fold_build2_loc (loc, MINUS_EXPR, type,
10386 fold_convert_loc (loc, type, arg0),
10387 fold_convert_loc (loc, type,
10388 TREE_OPERAND (arg1, 0)));
10389 /* (-A) + B -> B - A */
10390 if (TREE_CODE (arg0) == NEGATE_EXPR
10391 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10392 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10393 return fold_build2_loc (loc, MINUS_EXPR, type,
10394 fold_convert_loc (loc, type, arg1),
10395 fold_convert_loc (loc, type,
10396 TREE_OPERAND (arg0, 0)));
10398 /* Disable further optimizations involving UPC shared pointers,
10399 because integers are not interoperable with shared pointers. */
10400 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10401 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10402 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10403 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10404 return NULL_TREE;
10406 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10408 /* Convert ~A + 1 to -A. */
10409 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10410 && integer_onep (arg1))
10411 return fold_build1_loc (loc, NEGATE_EXPR, type,
10412 fold_convert_loc (loc, type,
10413 TREE_OPERAND (arg0, 0)));
10415 /* ~X + X is -1. */
10416 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10417 && !TYPE_OVERFLOW_TRAPS (type))
10419 tree tem = TREE_OPERAND (arg0, 0);
10421 STRIP_NOPS (tem);
10422 if (operand_equal_p (tem, arg1, 0))
10424 t1 = build_all_ones_cst (type);
10425 return omit_one_operand_loc (loc, type, t1, arg1);
10429 /* X + ~X is -1. */
10430 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10431 && !TYPE_OVERFLOW_TRAPS (type))
10433 tree tem = TREE_OPERAND (arg1, 0);
10435 STRIP_NOPS (tem);
10436 if (operand_equal_p (arg0, tem, 0))
10438 t1 = build_all_ones_cst (type);
10439 return omit_one_operand_loc (loc, type, t1, arg0);
10443 /* X + (X / CST) * -CST is X % CST. */
10444 if (TREE_CODE (arg1) == MULT_EXPR
10445 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10446 && operand_equal_p (arg0,
10447 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10449 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10450 tree cst1 = TREE_OPERAND (arg1, 1);
10451 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10452 cst1, cst0);
10453 if (sum && integer_zerop (sum))
10454 return fold_convert_loc (loc, type,
10455 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10456 TREE_TYPE (arg0), arg0,
10457 cst0));
10461 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10462 one. Make sure the type is not saturating and has the signedness of
10463 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10464 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10465 if ((TREE_CODE (arg0) == MULT_EXPR
10466 || TREE_CODE (arg1) == MULT_EXPR)
10467 && !TYPE_SATURATING (type)
10468 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10469 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10470 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10472 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10473 if (tem)
10474 return tem;
10477 if (! FLOAT_TYPE_P (type))
10479 if (integer_zerop (arg1))
10480 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10482 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10483 with a constant, and the two constants have no bits in common,
10484 we should treat this as a BIT_IOR_EXPR since this may produce more
10485 simplifications. */
10486 if (TREE_CODE (arg0) == BIT_AND_EXPR
10487 && TREE_CODE (arg1) == BIT_AND_EXPR
10488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10489 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10490 && integer_zerop (const_binop (BIT_AND_EXPR,
10491 TREE_OPERAND (arg0, 1),
10492 TREE_OPERAND (arg1, 1))))
10494 code = BIT_IOR_EXPR;
10495 goto bit_ior;
10498 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10499 (plus (plus (mult) (mult)) (foo)) so that we can
10500 take advantage of the factoring cases below. */
10501 if (TYPE_OVERFLOW_WRAPS (type)
10502 && (((TREE_CODE (arg0) == PLUS_EXPR
10503 || TREE_CODE (arg0) == MINUS_EXPR)
10504 && TREE_CODE (arg1) == MULT_EXPR)
10505 || ((TREE_CODE (arg1) == PLUS_EXPR
10506 || TREE_CODE (arg1) == MINUS_EXPR)
10507 && TREE_CODE (arg0) == MULT_EXPR)))
10509 tree parg0, parg1, parg, marg;
10510 enum tree_code pcode;
10512 if (TREE_CODE (arg1) == MULT_EXPR)
10513 parg = arg0, marg = arg1;
10514 else
10515 parg = arg1, marg = arg0;
10516 pcode = TREE_CODE (parg);
10517 parg0 = TREE_OPERAND (parg, 0);
10518 parg1 = TREE_OPERAND (parg, 1);
10519 STRIP_NOPS (parg0);
10520 STRIP_NOPS (parg1);
10522 if (TREE_CODE (parg0) == MULT_EXPR
10523 && TREE_CODE (parg1) != MULT_EXPR)
10524 return fold_build2_loc (loc, pcode, type,
10525 fold_build2_loc (loc, PLUS_EXPR, type,
10526 fold_convert_loc (loc, type,
10527 parg0),
10528 fold_convert_loc (loc, type,
10529 marg)),
10530 fold_convert_loc (loc, type, parg1));
10531 if (TREE_CODE (parg0) != MULT_EXPR
10532 && TREE_CODE (parg1) == MULT_EXPR)
10533 return
10534 fold_build2_loc (loc, PLUS_EXPR, type,
10535 fold_convert_loc (loc, type, parg0),
10536 fold_build2_loc (loc, pcode, type,
10537 fold_convert_loc (loc, type, marg),
10538 fold_convert_loc (loc, type,
10539 parg1)));
10542 else
10544 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10545 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10546 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10548 /* Likewise if the operands are reversed. */
10549 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10550 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10552 /* Convert X + -C into X - C. */
10553 if (TREE_CODE (arg1) == REAL_CST
10554 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10556 tem = fold_negate_const (arg1, type);
10557 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10558 return fold_build2_loc (loc, MINUS_EXPR, type,
10559 fold_convert_loc (loc, type, arg0),
10560 fold_convert_loc (loc, type, tem));
10563 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10564 to __complex__ ( x, y ). This is not the same for SNaNs or
10565 if signed zeros are involved. */
10566 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10567 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10568 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10570 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10571 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10572 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10573 bool arg0rz = false, arg0iz = false;
10574 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10575 || (arg0i && (arg0iz = real_zerop (arg0i))))
10577 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10578 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10579 if (arg0rz && arg1i && real_zerop (arg1i))
10581 tree rp = arg1r ? arg1r
10582 : build1 (REALPART_EXPR, rtype, arg1);
10583 tree ip = arg0i ? arg0i
10584 : build1 (IMAGPART_EXPR, rtype, arg0);
10585 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10587 else if (arg0iz && arg1r && real_zerop (arg1r))
10589 tree rp = arg0r ? arg0r
10590 : build1 (REALPART_EXPR, rtype, arg0);
10591 tree ip = arg1i ? arg1i
10592 : build1 (IMAGPART_EXPR, rtype, arg1);
10593 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10598 if (flag_unsafe_math_optimizations
10599 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10600 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10601 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10602 return tem;
10604 /* Convert x+x into x*2.0. */
10605 if (operand_equal_p (arg0, arg1, 0)
10606 && SCALAR_FLOAT_TYPE_P (type))
10607 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10608 build_real (type, dconst2));
10610 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10611 We associate floats only if the user has specified
10612 -fassociative-math. */
10613 if (flag_associative_math
10614 && TREE_CODE (arg1) == PLUS_EXPR
10615 && TREE_CODE (arg0) != MULT_EXPR)
10617 tree tree10 = TREE_OPERAND (arg1, 0);
10618 tree tree11 = TREE_OPERAND (arg1, 1);
10619 if (TREE_CODE (tree11) == MULT_EXPR
10620 && TREE_CODE (tree10) == MULT_EXPR)
10622 tree tree0;
10623 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10624 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10627 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10628 We associate floats only if the user has specified
10629 -fassociative-math. */
10630 if (flag_associative_math
10631 && TREE_CODE (arg0) == PLUS_EXPR
10632 && TREE_CODE (arg1) != MULT_EXPR)
10634 tree tree00 = TREE_OPERAND (arg0, 0);
10635 tree tree01 = TREE_OPERAND (arg0, 1);
10636 if (TREE_CODE (tree01) == MULT_EXPR
10637 && TREE_CODE (tree00) == MULT_EXPR)
10639 tree tree0;
10640 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10641 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10646 bit_rotate:
10647 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10648 is a rotate of A by C1 bits. */
10649 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10650 is a rotate of A by B bits. */
10652 enum tree_code code0, code1;
10653 tree rtype;
10654 code0 = TREE_CODE (arg0);
10655 code1 = TREE_CODE (arg1);
10656 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10657 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10658 && operand_equal_p (TREE_OPERAND (arg0, 0),
10659 TREE_OPERAND (arg1, 0), 0)
10660 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10661 TYPE_UNSIGNED (rtype))
10662 /* Only create rotates in complete modes. Other cases are not
10663 expanded properly. */
10664 && (element_precision (rtype)
10665 == element_precision (TYPE_MODE (rtype))))
10667 tree tree01, tree11;
10668 enum tree_code code01, code11;
10670 tree01 = TREE_OPERAND (arg0, 1);
10671 tree11 = TREE_OPERAND (arg1, 1);
10672 STRIP_NOPS (tree01);
10673 STRIP_NOPS (tree11);
10674 code01 = TREE_CODE (tree01);
10675 code11 = TREE_CODE (tree11);
10676 if (code01 == INTEGER_CST
10677 && code11 == INTEGER_CST
10678 && TREE_INT_CST_HIGH (tree01) == 0
10679 && TREE_INT_CST_HIGH (tree11) == 0
10680 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10681 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10683 tem = build2_loc (loc, LROTATE_EXPR,
10684 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10685 TREE_OPERAND (arg0, 0),
10686 code0 == LSHIFT_EXPR ? tree01 : tree11);
10687 return fold_convert_loc (loc, type, tem);
10689 else if (code11 == MINUS_EXPR)
10691 tree tree110, tree111;
10692 tree110 = TREE_OPERAND (tree11, 0);
10693 tree111 = TREE_OPERAND (tree11, 1);
10694 STRIP_NOPS (tree110);
10695 STRIP_NOPS (tree111);
10696 if (TREE_CODE (tree110) == INTEGER_CST
10697 && 0 == compare_tree_int (tree110,
10698 element_precision
10699 (TREE_TYPE (TREE_OPERAND
10700 (arg0, 0))))
10701 && operand_equal_p (tree01, tree111, 0))
10702 return
10703 fold_convert_loc (loc, type,
10704 build2 ((code0 == LSHIFT_EXPR
10705 ? LROTATE_EXPR
10706 : RROTATE_EXPR),
10707 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10708 TREE_OPERAND (arg0, 0), tree01));
10710 else if (code01 == MINUS_EXPR)
10712 tree tree010, tree011;
10713 tree010 = TREE_OPERAND (tree01, 0);
10714 tree011 = TREE_OPERAND (tree01, 1);
10715 STRIP_NOPS (tree010);
10716 STRIP_NOPS (tree011);
10717 if (TREE_CODE (tree010) == INTEGER_CST
10718 && 0 == compare_tree_int (tree010,
10719 element_precision
10720 (TREE_TYPE (TREE_OPERAND
10721 (arg0, 0))))
10722 && operand_equal_p (tree11, tree011, 0))
10723 return fold_convert_loc
10724 (loc, type,
10725 build2 ((code0 != LSHIFT_EXPR
10726 ? LROTATE_EXPR
10727 : RROTATE_EXPR),
10728 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10729 TREE_OPERAND (arg0, 0), tree11));
10734 associate:
10735 /* In most languages, can't associate operations on floats through
10736 parentheses. Rather than remember where the parentheses were, we
10737 don't associate floats at all, unless the user has specified
10738 -fassociative-math.
10739 And, we need to make sure type is not saturating. */
10741 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10742 && !TYPE_SATURATING (type))
10744 tree var0, con0, lit0, minus_lit0;
10745 tree var1, con1, lit1, minus_lit1;
10746 tree atype = type;
10747 bool ok = true;
10749 /* Split both trees into variables, constants, and literals. Then
10750 associate each group together, the constants with literals,
10751 then the result with variables. This increases the chances of
10752 literals being recombined later and of generating relocatable
10753 expressions for the sum of a constant and literal. */
10754 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10755 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10756 code == MINUS_EXPR);
10758 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10759 if (code == MINUS_EXPR)
10760 code = PLUS_EXPR;
10762 /* With undefined overflow prefer doing association in a type
10763 which wraps on overflow, if that is one of the operand types. */
10764 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10765 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10767 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10768 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10769 atype = TREE_TYPE (arg0);
10770 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10771 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10772 atype = TREE_TYPE (arg1);
10773 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10776 /* With undefined overflow we can only associate constants with one
10777 variable, and constants whose association doesn't overflow. */
10778 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10779 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10781 if (var0 && var1)
10783 tree tmp0 = var0;
10784 tree tmp1 = var1;
10786 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10787 tmp0 = TREE_OPERAND (tmp0, 0);
10788 if (CONVERT_EXPR_P (tmp0)
10789 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10790 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10791 <= TYPE_PRECISION (atype)))
10792 tmp0 = TREE_OPERAND (tmp0, 0);
10793 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10794 tmp1 = TREE_OPERAND (tmp1, 0);
10795 if (CONVERT_EXPR_P (tmp1)
10796 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10797 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10798 <= TYPE_PRECISION (atype)))
10799 tmp1 = TREE_OPERAND (tmp1, 0);
10800 /* The only case we can still associate with two variables
10801 is if they are the same, modulo negation and bit-pattern
10802 preserving conversions. */
10803 if (!operand_equal_p (tmp0, tmp1, 0))
10804 ok = false;
10808 /* Only do something if we found more than two objects. Otherwise,
10809 nothing has changed and we risk infinite recursion. */
10810 if (ok
10811 && (2 < ((var0 != 0) + (var1 != 0)
10812 + (con0 != 0) + (con1 != 0)
10813 + (lit0 != 0) + (lit1 != 0)
10814 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10816 bool any_overflows = false;
10817 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10818 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10819 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10820 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10821 var0 = associate_trees (loc, var0, var1, code, atype);
10822 con0 = associate_trees (loc, con0, con1, code, atype);
10823 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10824 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10825 code, atype);
10827 /* Preserve the MINUS_EXPR if the negative part of the literal is
10828 greater than the positive part. Otherwise, the multiplicative
10829 folding code (i.e extract_muldiv) may be fooled in case
10830 unsigned constants are subtracted, like in the following
10831 example: ((X*2 + 4) - 8U)/2. */
10832 if (minus_lit0 && lit0)
10834 if (TREE_CODE (lit0) == INTEGER_CST
10835 && TREE_CODE (minus_lit0) == INTEGER_CST
10836 && tree_int_cst_lt (lit0, minus_lit0))
10838 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10839 MINUS_EXPR, atype);
10840 lit0 = 0;
10842 else
10844 lit0 = associate_trees (loc, lit0, minus_lit0,
10845 MINUS_EXPR, atype);
10846 minus_lit0 = 0;
10850 /* Don't introduce overflows through reassociation. */
10851 if (!any_overflows
10852 && ((lit0 && TREE_OVERFLOW (lit0))
10853 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10854 return NULL_TREE;
10856 if (minus_lit0)
10858 if (con0 == 0)
10859 return
10860 fold_convert_loc (loc, type,
10861 associate_trees (loc, var0, minus_lit0,
10862 MINUS_EXPR, atype));
10863 else
10865 con0 = associate_trees (loc, con0, minus_lit0,
10866 MINUS_EXPR, atype);
10867 return
10868 fold_convert_loc (loc, type,
10869 associate_trees (loc, var0, con0,
10870 PLUS_EXPR, atype));
10874 con0 = associate_trees (loc, con0, lit0, code, atype);
10875 return
10876 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10877 code, atype));
10881 return NULL_TREE;
10883 case MINUS_EXPR:
10884 /* Pointer simplifications for subtraction, simple reassociations. */
10885 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10887 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10888 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10889 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10891 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10892 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10893 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10894 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10895 return fold_build2_loc (loc, PLUS_EXPR, type,
10896 fold_build2_loc (loc, MINUS_EXPR, type,
10897 arg00, arg10),
10898 fold_build2_loc (loc, MINUS_EXPR, type,
10899 arg01, arg11));
10901 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10902 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10904 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10905 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10906 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10907 fold_convert_loc (loc, type, arg1));
10908 if (tmp)
10909 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10912 /* A - (-B) -> A + B */
10913 if (TREE_CODE (arg1) == NEGATE_EXPR)
10914 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10915 fold_convert_loc (loc, type,
10916 TREE_OPERAND (arg1, 0)));
10918 /* Disable further optimizations involving UPC shared pointers,
10919 because integers are not interoperable with shared pointers.
10920 (The test below also detects pointer difference between
10921 shared pointers, which cannot be folded. */
10923 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10924 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10925 return NULL_TREE;
10927 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10928 if (TREE_CODE (arg0) == NEGATE_EXPR
10929 && negate_expr_p (arg1)
10930 && reorder_operands_p (arg0, arg1))
10931 return fold_build2_loc (loc, MINUS_EXPR, type,
10932 fold_convert_loc (loc, type,
10933 negate_expr (arg1)),
10934 fold_convert_loc (loc, type,
10935 TREE_OPERAND (arg0, 0)));
10936 /* Convert -A - 1 to ~A. */
10937 if (TREE_CODE (type) != COMPLEX_TYPE
10938 && TREE_CODE (arg0) == NEGATE_EXPR
10939 && integer_onep (arg1)
10940 && !TYPE_OVERFLOW_TRAPS (type))
10941 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10942 fold_convert_loc (loc, type,
10943 TREE_OPERAND (arg0, 0)));
10945 /* Convert -1 - A to ~A. */
10946 if (TREE_CODE (type) != COMPLEX_TYPE
10947 && integer_all_onesp (arg0))
10948 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10951 /* X - (X / Y) * Y is X % Y. */
10952 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10953 && TREE_CODE (arg1) == MULT_EXPR
10954 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10955 && operand_equal_p (arg0,
10956 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10957 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10958 TREE_OPERAND (arg1, 1), 0))
10959 return
10960 fold_convert_loc (loc, type,
10961 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10962 arg0, TREE_OPERAND (arg1, 1)));
10964 if (! FLOAT_TYPE_P (type))
10966 if (integer_zerop (arg0))
10967 return negate_expr (fold_convert_loc (loc, type, arg1));
10968 if (integer_zerop (arg1))
10969 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10971 /* Fold A - (A & B) into ~B & A. */
10972 if (!TREE_SIDE_EFFECTS (arg0)
10973 && TREE_CODE (arg1) == BIT_AND_EXPR)
10975 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10977 tree arg10 = fold_convert_loc (loc, type,
10978 TREE_OPERAND (arg1, 0));
10979 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10980 fold_build1_loc (loc, BIT_NOT_EXPR,
10981 type, arg10),
10982 fold_convert_loc (loc, type, arg0));
10984 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10986 tree arg11 = fold_convert_loc (loc,
10987 type, TREE_OPERAND (arg1, 1));
10988 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10989 fold_build1_loc (loc, BIT_NOT_EXPR,
10990 type, arg11),
10991 fold_convert_loc (loc, type, arg0));
10995 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10996 any power of 2 minus 1. */
10997 if (TREE_CODE (arg0) == BIT_AND_EXPR
10998 && TREE_CODE (arg1) == BIT_AND_EXPR
10999 && operand_equal_p (TREE_OPERAND (arg0, 0),
11000 TREE_OPERAND (arg1, 0), 0))
11002 tree mask0 = TREE_OPERAND (arg0, 1);
11003 tree mask1 = TREE_OPERAND (arg1, 1);
11004 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
11006 if (operand_equal_p (tem, mask1, 0))
11008 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
11009 TREE_OPERAND (arg0, 0), mask1);
11010 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
11015 /* See if ARG1 is zero and X - ARG1 reduces to X. */
11016 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
11017 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11019 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
11020 ARG0 is zero and X + ARG0 reduces to X, since that would mean
11021 (-ARG1 + ARG0) reduces to -ARG1. */
11022 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
11023 return negate_expr (fold_convert_loc (loc, type, arg1));
11025 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11026 __complex__ ( x, -y ). This is not the same for SNaNs or if
11027 signed zeros are involved. */
11028 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11029 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11030 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11032 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11033 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11034 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11035 bool arg0rz = false, arg0iz = false;
11036 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11037 || (arg0i && (arg0iz = real_zerop (arg0i))))
11039 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11040 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11041 if (arg0rz && arg1i && real_zerop (arg1i))
11043 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11044 arg1r ? arg1r
11045 : build1 (REALPART_EXPR, rtype, arg1));
11046 tree ip = arg0i ? arg0i
11047 : build1 (IMAGPART_EXPR, rtype, arg0);
11048 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11050 else if (arg0iz && arg1r && real_zerop (arg1r))
11052 tree rp = arg0r ? arg0r
11053 : build1 (REALPART_EXPR, rtype, arg0);
11054 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11055 arg1i ? arg1i
11056 : build1 (IMAGPART_EXPR, rtype, arg1));
11057 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11062 /* Fold &x - &x. This can happen from &x.foo - &x.
11063 This is unsafe for certain floats even in non-IEEE formats.
11064 In IEEE, it is unsafe because it does wrong for NaNs.
11065 Also note that operand_equal_p is always false if an operand
11066 is volatile. */
11068 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11069 && operand_equal_p (arg0, arg1, 0))
11070 return build_zero_cst (type);
11072 /* A - B -> A + (-B) if B is easily negatable. */
11073 if (negate_expr_p (arg1)
11074 && ((FLOAT_TYPE_P (type)
11075 /* Avoid this transformation if B is a positive REAL_CST. */
11076 && (TREE_CODE (arg1) != REAL_CST
11077 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11078 || INTEGRAL_TYPE_P (type)))
11079 return fold_build2_loc (loc, PLUS_EXPR, type,
11080 fold_convert_loc (loc, type, arg0),
11081 fold_convert_loc (loc, type,
11082 negate_expr (arg1)));
11084 /* Try folding difference of addresses. */
11086 HOST_WIDE_INT diff;
11088 if ((TREE_CODE (arg0) == ADDR_EXPR
11089 || TREE_CODE (arg1) == ADDR_EXPR)
11090 && ptr_difference_const (arg0, arg1, &diff))
11091 return build_int_cst_type (type, diff);
11094 /* Fold &a[i] - &a[j] to i-j. */
11095 if (TREE_CODE (arg0) == ADDR_EXPR
11096 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11097 && TREE_CODE (arg1) == ADDR_EXPR
11098 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11100 tree tem = fold_addr_of_array_ref_difference (loc, type,
11101 TREE_OPERAND (arg0, 0),
11102 TREE_OPERAND (arg1, 0));
11103 if (tem)
11104 return tem;
11107 if (FLOAT_TYPE_P (type)
11108 && flag_unsafe_math_optimizations
11109 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11110 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11111 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11112 return tem;
11114 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11115 one. Make sure the type is not saturating and has the signedness of
11116 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11117 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11118 if ((TREE_CODE (arg0) == MULT_EXPR
11119 || TREE_CODE (arg1) == MULT_EXPR)
11120 && !TYPE_SATURATING (type)
11121 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11122 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11123 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11125 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11126 if (tem)
11127 return tem;
11130 goto associate;
11132 case MULT_EXPR:
11133 /* (-A) * (-B) -> A * B */
11134 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11135 return fold_build2_loc (loc, MULT_EXPR, type,
11136 fold_convert_loc (loc, type,
11137 TREE_OPERAND (arg0, 0)),
11138 fold_convert_loc (loc, type,
11139 negate_expr (arg1)));
11140 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11141 return fold_build2_loc (loc, MULT_EXPR, type,
11142 fold_convert_loc (loc, type,
11143 negate_expr (arg0)),
11144 fold_convert_loc (loc, type,
11145 TREE_OPERAND (arg1, 0)));
11147 if (! FLOAT_TYPE_P (type))
11149 if (integer_zerop (arg1))
11150 return omit_one_operand_loc (loc, type, arg1, arg0);
11151 if (integer_onep (arg1))
11152 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11153 /* Transform x * -1 into -x. Make sure to do the negation
11154 on the original operand with conversions not stripped
11155 because we can only strip non-sign-changing conversions. */
11156 if (integer_minus_onep (arg1))
11157 return fold_convert_loc (loc, type, negate_expr (op0));
11158 /* Transform x * -C into -x * C if x is easily negatable. */
11159 if (TREE_CODE (arg1) == INTEGER_CST
11160 && tree_int_cst_sgn (arg1) == -1
11161 && negate_expr_p (arg0)
11162 && (tem = negate_expr (arg1)) != arg1
11163 && !TREE_OVERFLOW (tem))
11164 return fold_build2_loc (loc, MULT_EXPR, type,
11165 fold_convert_loc (loc, type,
11166 negate_expr (arg0)),
11167 tem);
11169 /* (a * (1 << b)) is (a << b) */
11170 if (TREE_CODE (arg1) == LSHIFT_EXPR
11171 && integer_onep (TREE_OPERAND (arg1, 0)))
11172 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11173 TREE_OPERAND (arg1, 1));
11174 if (TREE_CODE (arg0) == LSHIFT_EXPR
11175 && integer_onep (TREE_OPERAND (arg0, 0)))
11176 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11177 TREE_OPERAND (arg0, 1));
11179 /* (A + A) * C -> A * 2 * C */
11180 if (TREE_CODE (arg0) == PLUS_EXPR
11181 && TREE_CODE (arg1) == INTEGER_CST
11182 && operand_equal_p (TREE_OPERAND (arg0, 0),
11183 TREE_OPERAND (arg0, 1), 0))
11184 return fold_build2_loc (loc, MULT_EXPR, type,
11185 omit_one_operand_loc (loc, type,
11186 TREE_OPERAND (arg0, 0),
11187 TREE_OPERAND (arg0, 1)),
11188 fold_build2_loc (loc, MULT_EXPR, type,
11189 build_int_cst (type, 2) , arg1));
11191 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11192 sign-changing only. */
11193 if (TREE_CODE (arg1) == INTEGER_CST
11194 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11195 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11196 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11198 strict_overflow_p = false;
11199 if (TREE_CODE (arg1) == INTEGER_CST
11200 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11201 &strict_overflow_p)))
11203 if (strict_overflow_p)
11204 fold_overflow_warning (("assuming signed overflow does not "
11205 "occur when simplifying "
11206 "multiplication"),
11207 WARN_STRICT_OVERFLOW_MISC);
11208 return fold_convert_loc (loc, type, tem);
11211 /* Optimize z * conj(z) for integer complex numbers. */
11212 if (TREE_CODE (arg0) == CONJ_EXPR
11213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11214 return fold_mult_zconjz (loc, type, arg1);
11215 if (TREE_CODE (arg1) == CONJ_EXPR
11216 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11217 return fold_mult_zconjz (loc, type, arg0);
11219 else
11221 /* Maybe fold x * 0 to 0. The expressions aren't the same
11222 when x is NaN, since x * 0 is also NaN. Nor are they the
11223 same in modes with signed zeros, since multiplying a
11224 negative value by 0 gives -0, not +0. */
11225 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11226 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11227 && real_zerop (arg1))
11228 return omit_one_operand_loc (loc, type, arg1, arg0);
11229 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11230 Likewise for complex arithmetic with signed zeros. */
11231 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11232 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11233 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11234 && real_onep (arg1))
11235 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11237 /* Transform x * -1.0 into -x. */
11238 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11239 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11240 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11241 && real_minus_onep (arg1))
11242 return fold_convert_loc (loc, type, negate_expr (arg0));
11244 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11245 the result for floating point types due to rounding so it is applied
11246 only if -fassociative-math was specify. */
11247 if (flag_associative_math
11248 && TREE_CODE (arg0) == RDIV_EXPR
11249 && TREE_CODE (arg1) == REAL_CST
11250 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11252 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11253 arg1);
11254 if (tem)
11255 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11256 TREE_OPERAND (arg0, 1));
11259 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11260 if (operand_equal_p (arg0, arg1, 0))
11262 tree tem = fold_strip_sign_ops (arg0);
11263 if (tem != NULL_TREE)
11265 tem = fold_convert_loc (loc, type, tem);
11266 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11270 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11271 This is not the same for NaNs or if signed zeros are
11272 involved. */
11273 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11274 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11275 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11276 && TREE_CODE (arg1) == COMPLEX_CST
11277 && real_zerop (TREE_REALPART (arg1)))
11279 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11280 if (real_onep (TREE_IMAGPART (arg1)))
11281 return
11282 fold_build2_loc (loc, COMPLEX_EXPR, type,
11283 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11284 rtype, arg0)),
11285 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11286 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11287 return
11288 fold_build2_loc (loc, COMPLEX_EXPR, type,
11289 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11290 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11291 rtype, arg0)));
11294 /* Optimize z * conj(z) for floating point complex numbers.
11295 Guarded by flag_unsafe_math_optimizations as non-finite
11296 imaginary components don't produce scalar results. */
11297 if (flag_unsafe_math_optimizations
11298 && TREE_CODE (arg0) == CONJ_EXPR
11299 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11300 return fold_mult_zconjz (loc, type, arg1);
11301 if (flag_unsafe_math_optimizations
11302 && TREE_CODE (arg1) == CONJ_EXPR
11303 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11304 return fold_mult_zconjz (loc, type, arg0);
11306 if (flag_unsafe_math_optimizations)
11308 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11309 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11311 /* Optimizations of root(...)*root(...). */
11312 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11314 tree rootfn, arg;
11315 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11316 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11318 /* Optimize sqrt(x)*sqrt(x) as x. */
11319 if (BUILTIN_SQRT_P (fcode0)
11320 && operand_equal_p (arg00, arg10, 0)
11321 && ! HONOR_SNANS (TYPE_MODE (type)))
11322 return arg00;
11324 /* Optimize root(x)*root(y) as root(x*y). */
11325 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11326 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11327 return build_call_expr_loc (loc, rootfn, 1, arg);
11330 /* Optimize expN(x)*expN(y) as expN(x+y). */
11331 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11333 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11334 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11335 CALL_EXPR_ARG (arg0, 0),
11336 CALL_EXPR_ARG (arg1, 0));
11337 return build_call_expr_loc (loc, expfn, 1, arg);
11340 /* Optimizations of pow(...)*pow(...). */
11341 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11342 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11343 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11345 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11346 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11347 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11348 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11350 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11351 if (operand_equal_p (arg01, arg11, 0))
11353 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11354 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11355 arg00, arg10);
11356 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11359 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11360 if (operand_equal_p (arg00, arg10, 0))
11362 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11363 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11364 arg01, arg11);
11365 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11369 /* Optimize tan(x)*cos(x) as sin(x). */
11370 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11371 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11372 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11373 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11374 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11375 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11376 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11377 CALL_EXPR_ARG (arg1, 0), 0))
11379 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11381 if (sinfn != NULL_TREE)
11382 return build_call_expr_loc (loc, sinfn, 1,
11383 CALL_EXPR_ARG (arg0, 0));
11386 /* Optimize x*pow(x,c) as pow(x,c+1). */
11387 if (fcode1 == BUILT_IN_POW
11388 || fcode1 == BUILT_IN_POWF
11389 || fcode1 == BUILT_IN_POWL)
11391 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11392 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11393 if (TREE_CODE (arg11) == REAL_CST
11394 && !TREE_OVERFLOW (arg11)
11395 && operand_equal_p (arg0, arg10, 0))
11397 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11398 REAL_VALUE_TYPE c;
11399 tree arg;
11401 c = TREE_REAL_CST (arg11);
11402 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11403 arg = build_real (type, c);
11404 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11408 /* Optimize pow(x,c)*x as pow(x,c+1). */
11409 if (fcode0 == BUILT_IN_POW
11410 || fcode0 == BUILT_IN_POWF
11411 || fcode0 == BUILT_IN_POWL)
11413 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11414 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11415 if (TREE_CODE (arg01) == REAL_CST
11416 && !TREE_OVERFLOW (arg01)
11417 && operand_equal_p (arg1, arg00, 0))
11419 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11420 REAL_VALUE_TYPE c;
11421 tree arg;
11423 c = TREE_REAL_CST (arg01);
11424 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11425 arg = build_real (type, c);
11426 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11430 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11431 if (!in_gimple_form
11432 && optimize
11433 && operand_equal_p (arg0, arg1, 0))
11435 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11437 if (powfn)
11439 tree arg = build_real (type, dconst2);
11440 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11445 goto associate;
11447 case BIT_IOR_EXPR:
11448 bit_ior:
11449 if (integer_all_onesp (arg1))
11450 return omit_one_operand_loc (loc, type, arg1, arg0);
11451 if (integer_zerop (arg1))
11452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11453 if (operand_equal_p (arg0, arg1, 0))
11454 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11456 /* ~X | X is -1. */
11457 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11460 t1 = build_zero_cst (type);
11461 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11462 return omit_one_operand_loc (loc, type, t1, arg1);
11465 /* X | ~X is -1. */
11466 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11467 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11469 t1 = build_zero_cst (type);
11470 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11471 return omit_one_operand_loc (loc, type, t1, arg0);
11474 /* Canonicalize (X & C1) | C2. */
11475 if (TREE_CODE (arg0) == BIT_AND_EXPR
11476 && TREE_CODE (arg1) == INTEGER_CST
11477 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11479 double_int c1, c2, c3, msk;
11480 int width = TYPE_PRECISION (type), w;
11481 bool try_simplify = true;
11483 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11484 c2 = tree_to_double_int (arg1);
11486 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11487 if ((c1 & c2) == c1)
11488 return omit_one_operand_loc (loc, type, arg1,
11489 TREE_OPERAND (arg0, 0));
11491 msk = double_int::mask (width);
11493 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11494 if (msk.and_not (c1 | c2).is_zero ())
11495 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11496 TREE_OPERAND (arg0, 0), arg1);
11498 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11499 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11500 mode which allows further optimizations. */
11501 c1 &= msk;
11502 c2 &= msk;
11503 c3 = c1.and_not (c2);
11504 for (w = BITS_PER_UNIT;
11505 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11506 w <<= 1)
11508 unsigned HOST_WIDE_INT mask
11509 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11510 if (((c1.low | c2.low) & mask) == mask
11511 && (c1.low & ~mask) == 0 && c1.high == 0)
11513 c3 = double_int::from_uhwi (mask);
11514 break;
11518 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11519 with that optimization from the BIT_AND_EXPR optimizations.
11520 This could end up in an infinite recursion. */
11521 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11522 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11523 == INTEGER_CST)
11525 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11526 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11528 try_simplify = (masked != c1);
11531 if (try_simplify && c3 != c1)
11532 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11533 fold_build2_loc (loc, BIT_AND_EXPR, type,
11534 TREE_OPERAND (arg0, 0),
11535 double_int_to_tree (type,
11536 c3)),
11537 arg1);
11540 /* (X & Y) | Y is (X, Y). */
11541 if (TREE_CODE (arg0) == BIT_AND_EXPR
11542 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11543 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11544 /* (X & Y) | X is (Y, X). */
11545 if (TREE_CODE (arg0) == BIT_AND_EXPR
11546 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11547 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11548 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11549 /* X | (X & Y) is (Y, X). */
11550 if (TREE_CODE (arg1) == BIT_AND_EXPR
11551 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11552 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11553 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11554 /* X | (Y & X) is (Y, X). */
11555 if (TREE_CODE (arg1) == BIT_AND_EXPR
11556 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11557 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11558 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11560 /* (X & ~Y) | (~X & Y) is X ^ Y */
11561 if (TREE_CODE (arg0) == BIT_AND_EXPR
11562 && TREE_CODE (arg1) == BIT_AND_EXPR)
11564 tree a0, a1, l0, l1, n0, n1;
11566 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11567 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11569 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11570 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11572 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11573 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11575 if ((operand_equal_p (n0, a0, 0)
11576 && operand_equal_p (n1, a1, 0))
11577 || (operand_equal_p (n0, a1, 0)
11578 && operand_equal_p (n1, a0, 0)))
11579 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11582 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11583 if (t1 != NULL_TREE)
11584 return t1;
11586 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11588 This results in more efficient code for machines without a NAND
11589 instruction. Combine will canonicalize to the first form
11590 which will allow use of NAND instructions provided by the
11591 backend if they exist. */
11592 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11593 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11595 return
11596 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11597 build2 (BIT_AND_EXPR, type,
11598 fold_convert_loc (loc, type,
11599 TREE_OPERAND (arg0, 0)),
11600 fold_convert_loc (loc, type,
11601 TREE_OPERAND (arg1, 0))));
11604 /* See if this can be simplified into a rotate first. If that
11605 is unsuccessful continue in the association code. */
11606 goto bit_rotate;
11608 case BIT_XOR_EXPR:
11609 if (integer_zerop (arg1))
11610 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11611 if (integer_all_onesp (arg1))
11612 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11613 if (operand_equal_p (arg0, arg1, 0))
11614 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11616 /* ~X ^ X is -1. */
11617 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11618 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11620 t1 = build_zero_cst (type);
11621 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11622 return omit_one_operand_loc (loc, type, t1, arg1);
11625 /* X ^ ~X is -1. */
11626 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11627 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11629 t1 = build_zero_cst (type);
11630 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11631 return omit_one_operand_loc (loc, type, t1, arg0);
11634 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11635 with a constant, and the two constants have no bits in common,
11636 we should treat this as a BIT_IOR_EXPR since this may produce more
11637 simplifications. */
11638 if (TREE_CODE (arg0) == BIT_AND_EXPR
11639 && TREE_CODE (arg1) == BIT_AND_EXPR
11640 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11641 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11642 && integer_zerop (const_binop (BIT_AND_EXPR,
11643 TREE_OPERAND (arg0, 1),
11644 TREE_OPERAND (arg1, 1))))
11646 code = BIT_IOR_EXPR;
11647 goto bit_ior;
11650 /* (X | Y) ^ X -> Y & ~ X*/
11651 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11652 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11654 tree t2 = TREE_OPERAND (arg0, 1);
11655 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11656 arg1);
11657 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11658 fold_convert_loc (loc, type, t2),
11659 fold_convert_loc (loc, type, t1));
11660 return t1;
11663 /* (Y | X) ^ X -> Y & ~ X*/
11664 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11665 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11667 tree t2 = TREE_OPERAND (arg0, 0);
11668 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11669 arg1);
11670 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11671 fold_convert_loc (loc, type, t2),
11672 fold_convert_loc (loc, type, t1));
11673 return t1;
11676 /* X ^ (X | Y) -> Y & ~ X*/
11677 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11678 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11680 tree t2 = TREE_OPERAND (arg1, 1);
11681 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11682 arg0);
11683 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11684 fold_convert_loc (loc, type, t2),
11685 fold_convert_loc (loc, type, t1));
11686 return t1;
11689 /* X ^ (Y | X) -> Y & ~ X*/
11690 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11691 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11693 tree t2 = TREE_OPERAND (arg1, 0);
11694 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11695 arg0);
11696 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11697 fold_convert_loc (loc, type, t2),
11698 fold_convert_loc (loc, type, t1));
11699 return t1;
11702 /* Convert ~X ^ ~Y to X ^ Y. */
11703 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11704 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11705 return fold_build2_loc (loc, code, type,
11706 fold_convert_loc (loc, type,
11707 TREE_OPERAND (arg0, 0)),
11708 fold_convert_loc (loc, type,
11709 TREE_OPERAND (arg1, 0)));
11711 /* Convert ~X ^ C to X ^ ~C. */
11712 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11713 && TREE_CODE (arg1) == INTEGER_CST)
11714 return fold_build2_loc (loc, code, type,
11715 fold_convert_loc (loc, type,
11716 TREE_OPERAND (arg0, 0)),
11717 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11719 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11720 if (TREE_CODE (arg0) == BIT_AND_EXPR
11721 && integer_onep (TREE_OPERAND (arg0, 1))
11722 && integer_onep (arg1))
11723 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11724 build_zero_cst (TREE_TYPE (arg0)));
11726 /* Fold (X & Y) ^ Y as ~X & Y. */
11727 if (TREE_CODE (arg0) == BIT_AND_EXPR
11728 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11730 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11731 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11732 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11733 fold_convert_loc (loc, type, arg1));
11735 /* Fold (X & Y) ^ X as ~Y & X. */
11736 if (TREE_CODE (arg0) == BIT_AND_EXPR
11737 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11738 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11740 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11741 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11742 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11743 fold_convert_loc (loc, type, arg1));
11745 /* Fold X ^ (X & Y) as X & ~Y. */
11746 if (TREE_CODE (arg1) == BIT_AND_EXPR
11747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11749 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11750 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11751 fold_convert_loc (loc, type, arg0),
11752 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11754 /* Fold X ^ (Y & X) as ~Y & X. */
11755 if (TREE_CODE (arg1) == BIT_AND_EXPR
11756 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11757 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11759 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11760 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11761 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11762 fold_convert_loc (loc, type, arg0));
11765 /* See if this can be simplified into a rotate first. If that
11766 is unsuccessful continue in the association code. */
11767 goto bit_rotate;
11769 case BIT_AND_EXPR:
11770 if (integer_all_onesp (arg1))
11771 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11772 if (integer_zerop (arg1))
11773 return omit_one_operand_loc (loc, type, arg1, arg0);
11774 if (operand_equal_p (arg0, arg1, 0))
11775 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11777 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11778 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11779 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11780 || (TREE_CODE (arg0) == EQ_EXPR
11781 && integer_zerop (TREE_OPERAND (arg0, 1))))
11782 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11783 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11785 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11786 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11787 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11788 || (TREE_CODE (arg1) == EQ_EXPR
11789 && integer_zerop (TREE_OPERAND (arg1, 1))))
11790 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11791 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11793 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11794 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11795 && TREE_CODE (arg1) == INTEGER_CST
11796 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11798 tree tmp1 = fold_convert_loc (loc, type, arg1);
11799 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11800 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11801 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11802 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11803 return
11804 fold_convert_loc (loc, type,
11805 fold_build2_loc (loc, BIT_IOR_EXPR,
11806 type, tmp2, tmp3));
11809 /* (X | Y) & Y is (X, Y). */
11810 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11811 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11812 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11813 /* (X | Y) & X is (Y, X). */
11814 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11815 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11816 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11817 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11818 /* X & (X | Y) is (Y, X). */
11819 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11820 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11821 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11822 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11823 /* X & (Y | X) is (Y, X). */
11824 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11825 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11826 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11827 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11829 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11830 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11831 && integer_onep (TREE_OPERAND (arg0, 1))
11832 && integer_onep (arg1))
11834 tree tem2;
11835 tem = TREE_OPERAND (arg0, 0);
11836 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11837 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11838 tem, tem2);
11839 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11840 build_zero_cst (TREE_TYPE (tem)));
11842 /* Fold ~X & 1 as (X & 1) == 0. */
11843 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11844 && integer_onep (arg1))
11846 tree tem2;
11847 tem = TREE_OPERAND (arg0, 0);
11848 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11849 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11850 tem, tem2);
11851 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11852 build_zero_cst (TREE_TYPE (tem)));
11854 /* Fold !X & 1 as X == 0. */
11855 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11856 && integer_onep (arg1))
11858 tem = TREE_OPERAND (arg0, 0);
11859 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11860 build_zero_cst (TREE_TYPE (tem)));
11863 /* Fold (X ^ Y) & Y as ~X & Y. */
11864 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11865 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11867 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11868 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11869 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11870 fold_convert_loc (loc, type, arg1));
11872 /* Fold (X ^ Y) & X as ~Y & X. */
11873 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11874 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11875 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11877 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11878 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11879 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11880 fold_convert_loc (loc, type, arg1));
11882 /* Fold X & (X ^ Y) as X & ~Y. */
11883 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11884 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11886 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11887 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11888 fold_convert_loc (loc, type, arg0),
11889 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11891 /* Fold X & (Y ^ X) as ~Y & X. */
11892 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11893 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11894 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11896 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11897 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11898 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11899 fold_convert_loc (loc, type, arg0));
11902 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11903 multiple of 1 << CST. */
11904 if (TREE_CODE (arg1) == INTEGER_CST)
11906 double_int cst1 = tree_to_double_int (arg1);
11907 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11908 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11909 if ((cst1 & ncst1) == ncst1
11910 && multiple_of_p (type, arg0,
11911 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11912 return fold_convert_loc (loc, type, arg0);
11915 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11916 bits from CST2. */
11917 if (TREE_CODE (arg1) == INTEGER_CST
11918 && TREE_CODE (arg0) == MULT_EXPR
11919 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11921 double_int masked
11922 = mask_with_tz (type, tree_to_double_int (arg1),
11923 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11925 if (masked.is_zero ())
11926 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11927 arg0, arg1);
11928 else if (masked != tree_to_double_int (arg1))
11929 return fold_build2_loc (loc, code, type, op0,
11930 double_int_to_tree (type, masked));
11933 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11934 ((A & N) + B) & M -> (A + B) & M
11935 Similarly if (N & M) == 0,
11936 ((A | N) + B) & M -> (A + B) & M
11937 and for - instead of + (or unary - instead of +)
11938 and/or ^ instead of |.
11939 If B is constant and (B & M) == 0, fold into A & M. */
11940 if (tree_fits_uhwi_p (arg1))
11942 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11943 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11944 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11945 && (TREE_CODE (arg0) == PLUS_EXPR
11946 || TREE_CODE (arg0) == MINUS_EXPR
11947 || TREE_CODE (arg0) == NEGATE_EXPR)
11948 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11949 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11951 tree pmop[2];
11952 int which = 0;
11953 unsigned HOST_WIDE_INT cst0;
11955 /* Now we know that arg0 is (C + D) or (C - D) or
11956 -C and arg1 (M) is == (1LL << cst) - 1.
11957 Store C into PMOP[0] and D into PMOP[1]. */
11958 pmop[0] = TREE_OPERAND (arg0, 0);
11959 pmop[1] = NULL;
11960 if (TREE_CODE (arg0) != NEGATE_EXPR)
11962 pmop[1] = TREE_OPERAND (arg0, 1);
11963 which = 1;
11966 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11967 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11968 & cst1) != cst1)
11969 which = -1;
11971 for (; which >= 0; which--)
11972 switch (TREE_CODE (pmop[which]))
11974 case BIT_AND_EXPR:
11975 case BIT_IOR_EXPR:
11976 case BIT_XOR_EXPR:
11977 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11978 != INTEGER_CST)
11979 break;
11980 /* tree_to_[su]hwi not used, because we don't care about
11981 the upper bits. */
11982 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11983 cst0 &= cst1;
11984 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11986 if (cst0 != cst1)
11987 break;
11989 else if (cst0 != 0)
11990 break;
11991 /* If C or D is of the form (A & N) where
11992 (N & M) == M, or of the form (A | N) or
11993 (A ^ N) where (N & M) == 0, replace it with A. */
11994 pmop[which] = TREE_OPERAND (pmop[which], 0);
11995 break;
11996 case INTEGER_CST:
11997 /* If C or D is a N where (N & M) == 0, it can be
11998 omitted (assumed 0). */
11999 if ((TREE_CODE (arg0) == PLUS_EXPR
12000 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
12001 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
12002 pmop[which] = NULL;
12003 break;
12004 default:
12005 break;
12008 /* Only build anything new if we optimized one or both arguments
12009 above. */
12010 if (pmop[0] != TREE_OPERAND (arg0, 0)
12011 || (TREE_CODE (arg0) != NEGATE_EXPR
12012 && pmop[1] != TREE_OPERAND (arg0, 1)))
12014 tree utype = TREE_TYPE (arg0);
12015 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
12017 /* Perform the operations in a type that has defined
12018 overflow behavior. */
12019 utype = unsigned_type_for (TREE_TYPE (arg0));
12020 if (pmop[0] != NULL)
12021 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
12022 if (pmop[1] != NULL)
12023 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
12026 if (TREE_CODE (arg0) == NEGATE_EXPR)
12027 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
12028 else if (TREE_CODE (arg0) == PLUS_EXPR)
12030 if (pmop[0] != NULL && pmop[1] != NULL)
12031 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
12032 pmop[0], pmop[1]);
12033 else if (pmop[0] != NULL)
12034 tem = pmop[0];
12035 else if (pmop[1] != NULL)
12036 tem = pmop[1];
12037 else
12038 return build_int_cst (type, 0);
12040 else if (pmop[0] == NULL)
12041 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
12042 else
12043 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
12044 pmop[0], pmop[1]);
12045 /* TEM is now the new binary +, - or unary - replacement. */
12046 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
12047 fold_convert_loc (loc, utype, arg1));
12048 return fold_convert_loc (loc, type, tem);
12053 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12054 if (t1 != NULL_TREE)
12055 return t1;
12056 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12057 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12058 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12060 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12062 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
12063 && (~TREE_INT_CST_LOW (arg1)
12064 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12065 return
12066 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12069 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12071 This results in more efficient code for machines without a NOR
12072 instruction. Combine will canonicalize to the first form
12073 which will allow use of NOR instructions provided by the
12074 backend if they exist. */
12075 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12076 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12078 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12079 build2 (BIT_IOR_EXPR, type,
12080 fold_convert_loc (loc, type,
12081 TREE_OPERAND (arg0, 0)),
12082 fold_convert_loc (loc, type,
12083 TREE_OPERAND (arg1, 0))));
12086 /* If arg0 is derived from the address of an object or function, we may
12087 be able to fold this expression using the object or function's
12088 alignment. */
12089 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12091 unsigned HOST_WIDE_INT modulus, residue;
12092 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12094 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12095 integer_onep (arg1));
12097 /* This works because modulus is a power of 2. If this weren't the
12098 case, we'd have to replace it by its greatest power-of-2
12099 divisor: modulus & -modulus. */
12100 if (low < modulus)
12101 return build_int_cst (type, residue & low);
12104 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12105 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12106 if the new mask might be further optimized. */
12107 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12108 || TREE_CODE (arg0) == RSHIFT_EXPR)
12109 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12110 && TREE_CODE (arg1) == INTEGER_CST
12111 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12112 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12113 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12114 < TYPE_PRECISION (TREE_TYPE (arg0))))
12116 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12117 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12118 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12119 tree shift_type = TREE_TYPE (arg0);
12121 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12122 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12123 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12124 && TYPE_PRECISION (TREE_TYPE (arg0))
12125 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12127 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12128 tree arg00 = TREE_OPERAND (arg0, 0);
12129 /* See if more bits can be proven as zero because of
12130 zero extension. */
12131 if (TREE_CODE (arg00) == NOP_EXPR
12132 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12134 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12135 if (TYPE_PRECISION (inner_type)
12136 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12137 && TYPE_PRECISION (inner_type) < prec)
12139 prec = TYPE_PRECISION (inner_type);
12140 /* See if we can shorten the right shift. */
12141 if (shiftc < prec)
12142 shift_type = inner_type;
12145 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12146 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12147 zerobits <<= prec - shiftc;
12148 /* For arithmetic shift if sign bit could be set, zerobits
12149 can contain actually sign bits, so no transformation is
12150 possible, unless MASK masks them all away. In that
12151 case the shift needs to be converted into logical shift. */
12152 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12153 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12155 if ((mask & zerobits) == 0)
12156 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12157 else
12158 zerobits = 0;
12162 /* ((X << 16) & 0xff00) is (X, 0). */
12163 if ((mask & zerobits) == mask)
12164 return omit_one_operand_loc (loc, type,
12165 build_int_cst (type, 0), arg0);
12167 newmask = mask | zerobits;
12168 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12170 /* Only do the transformation if NEWMASK is some integer
12171 mode's mask. */
12172 for (prec = BITS_PER_UNIT;
12173 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12174 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12175 break;
12176 if (prec < HOST_BITS_PER_WIDE_INT
12177 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12179 tree newmaskt;
12181 if (shift_type != TREE_TYPE (arg0))
12183 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12184 fold_convert_loc (loc, shift_type,
12185 TREE_OPERAND (arg0, 0)),
12186 TREE_OPERAND (arg0, 1));
12187 tem = fold_convert_loc (loc, type, tem);
12189 else
12190 tem = op0;
12191 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12192 if (!tree_int_cst_equal (newmaskt, arg1))
12193 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12198 goto associate;
12200 case RDIV_EXPR:
12201 /* Don't touch a floating-point divide by zero unless the mode
12202 of the constant can represent infinity. */
12203 if (TREE_CODE (arg1) == REAL_CST
12204 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12205 && real_zerop (arg1))
12206 return NULL_TREE;
12208 /* Optimize A / A to 1.0 if we don't care about
12209 NaNs or Infinities. Skip the transformation
12210 for non-real operands. */
12211 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12212 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12213 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12214 && operand_equal_p (arg0, arg1, 0))
12216 tree r = build_real (TREE_TYPE (arg0), dconst1);
12218 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12221 /* The complex version of the above A / A optimization. */
12222 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12223 && operand_equal_p (arg0, arg1, 0))
12225 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12226 if (! HONOR_NANS (TYPE_MODE (elem_type))
12227 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12229 tree r = build_real (elem_type, dconst1);
12230 /* omit_two_operands will call fold_convert for us. */
12231 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12235 /* (-A) / (-B) -> A / B */
12236 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12237 return fold_build2_loc (loc, RDIV_EXPR, type,
12238 TREE_OPERAND (arg0, 0),
12239 negate_expr (arg1));
12240 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12241 return fold_build2_loc (loc, RDIV_EXPR, type,
12242 negate_expr (arg0),
12243 TREE_OPERAND (arg1, 0));
12245 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12246 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12247 && real_onep (arg1))
12248 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12250 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12251 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12252 && real_minus_onep (arg1))
12253 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12254 negate_expr (arg0)));
12256 /* If ARG1 is a constant, we can convert this to a multiply by the
12257 reciprocal. This does not have the same rounding properties,
12258 so only do this if -freciprocal-math. We can actually
12259 always safely do it if ARG1 is a power of two, but it's hard to
12260 tell if it is or not in a portable manner. */
12261 if (optimize
12262 && (TREE_CODE (arg1) == REAL_CST
12263 || (TREE_CODE (arg1) == COMPLEX_CST
12264 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12265 || (TREE_CODE (arg1) == VECTOR_CST
12266 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12268 if (flag_reciprocal_math
12269 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12270 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12271 /* Find the reciprocal if optimizing and the result is exact.
12272 TODO: Complex reciprocal not implemented. */
12273 if (TREE_CODE (arg1) != COMPLEX_CST)
12275 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12277 if (inverse)
12278 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12281 /* Convert A/B/C to A/(B*C). */
12282 if (flag_reciprocal_math
12283 && TREE_CODE (arg0) == RDIV_EXPR)
12284 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12285 fold_build2_loc (loc, MULT_EXPR, type,
12286 TREE_OPERAND (arg0, 1), arg1));
12288 /* Convert A/(B/C) to (A/B)*C. */
12289 if (flag_reciprocal_math
12290 && TREE_CODE (arg1) == RDIV_EXPR)
12291 return fold_build2_loc (loc, MULT_EXPR, type,
12292 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12293 TREE_OPERAND (arg1, 0)),
12294 TREE_OPERAND (arg1, 1));
12296 /* Convert C1/(X*C2) into (C1/C2)/X. */
12297 if (flag_reciprocal_math
12298 && TREE_CODE (arg1) == MULT_EXPR
12299 && TREE_CODE (arg0) == REAL_CST
12300 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12302 tree tem = const_binop (RDIV_EXPR, arg0,
12303 TREE_OPERAND (arg1, 1));
12304 if (tem)
12305 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12306 TREE_OPERAND (arg1, 0));
12309 if (flag_unsafe_math_optimizations)
12311 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12312 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12314 /* Optimize sin(x)/cos(x) as tan(x). */
12315 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12316 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12317 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12318 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12319 CALL_EXPR_ARG (arg1, 0), 0))
12321 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12323 if (tanfn != NULL_TREE)
12324 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12327 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12328 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12329 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12330 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12331 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12332 CALL_EXPR_ARG (arg1, 0), 0))
12334 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12336 if (tanfn != NULL_TREE)
12338 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12339 CALL_EXPR_ARG (arg0, 0));
12340 return fold_build2_loc (loc, RDIV_EXPR, type,
12341 build_real (type, dconst1), tmp);
12345 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12346 NaNs or Infinities. */
12347 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12348 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12349 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12351 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12352 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12354 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12355 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12356 && operand_equal_p (arg00, arg01, 0))
12358 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12360 if (cosfn != NULL_TREE)
12361 return build_call_expr_loc (loc, cosfn, 1, arg00);
12365 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12366 NaNs or Infinities. */
12367 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12368 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12369 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12371 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12372 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12374 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12375 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12376 && operand_equal_p (arg00, arg01, 0))
12378 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12380 if (cosfn != NULL_TREE)
12382 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12383 return fold_build2_loc (loc, RDIV_EXPR, type,
12384 build_real (type, dconst1),
12385 tmp);
12390 /* Optimize pow(x,c)/x as pow(x,c-1). */
12391 if (fcode0 == BUILT_IN_POW
12392 || fcode0 == BUILT_IN_POWF
12393 || fcode0 == BUILT_IN_POWL)
12395 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12396 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12397 if (TREE_CODE (arg01) == REAL_CST
12398 && !TREE_OVERFLOW (arg01)
12399 && operand_equal_p (arg1, arg00, 0))
12401 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12402 REAL_VALUE_TYPE c;
12403 tree arg;
12405 c = TREE_REAL_CST (arg01);
12406 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12407 arg = build_real (type, c);
12408 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12412 /* Optimize a/root(b/c) into a*root(c/b). */
12413 if (BUILTIN_ROOT_P (fcode1))
12415 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12417 if (TREE_CODE (rootarg) == RDIV_EXPR)
12419 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12420 tree b = TREE_OPERAND (rootarg, 0);
12421 tree c = TREE_OPERAND (rootarg, 1);
12423 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12425 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12426 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12430 /* Optimize x/expN(y) into x*expN(-y). */
12431 if (BUILTIN_EXPONENT_P (fcode1))
12433 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12434 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12435 arg1 = build_call_expr_loc (loc,
12436 expfn, 1,
12437 fold_convert_loc (loc, type, arg));
12438 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12441 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12442 if (fcode1 == BUILT_IN_POW
12443 || fcode1 == BUILT_IN_POWF
12444 || fcode1 == BUILT_IN_POWL)
12446 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12447 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12448 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12449 tree neg11 = fold_convert_loc (loc, type,
12450 negate_expr (arg11));
12451 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12452 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12455 return NULL_TREE;
12457 case TRUNC_DIV_EXPR:
12458 /* Optimize (X & (-A)) / A where A is a power of 2,
12459 to X >> log2(A) */
12460 if (TREE_CODE (arg0) == BIT_AND_EXPR
12461 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12462 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12464 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12465 arg1, TREE_OPERAND (arg0, 1));
12466 if (sum && integer_zerop (sum)) {
12467 unsigned long pow2;
12469 if (TREE_INT_CST_LOW (arg1))
12470 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12471 else
12472 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12473 + HOST_BITS_PER_WIDE_INT;
12475 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12476 TREE_OPERAND (arg0, 0),
12477 build_int_cst (integer_type_node, pow2));
12481 /* Fall through */
12483 case FLOOR_DIV_EXPR:
12484 /* Simplify A / (B << N) where A and B are positive and B is
12485 a power of 2, to A >> (N + log2(B)). */
12486 strict_overflow_p = false;
12487 if (TREE_CODE (arg1) == LSHIFT_EXPR
12488 && (TYPE_UNSIGNED (type)
12489 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12491 tree sval = TREE_OPERAND (arg1, 0);
12492 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12494 tree sh_cnt = TREE_OPERAND (arg1, 1);
12495 unsigned long pow2;
12497 if (TREE_INT_CST_LOW (sval))
12498 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12499 else
12500 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12501 + HOST_BITS_PER_WIDE_INT;
12503 if (strict_overflow_p)
12504 fold_overflow_warning (("assuming signed overflow does not "
12505 "occur when simplifying A / (B << N)"),
12506 WARN_STRICT_OVERFLOW_MISC);
12508 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12509 sh_cnt,
12510 build_int_cst (TREE_TYPE (sh_cnt),
12511 pow2));
12512 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12513 fold_convert_loc (loc, type, arg0), sh_cnt);
12517 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12518 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12519 if (INTEGRAL_TYPE_P (type)
12520 && TYPE_UNSIGNED (type)
12521 && code == FLOOR_DIV_EXPR)
12522 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12524 /* Fall through */
12526 case ROUND_DIV_EXPR:
12527 case CEIL_DIV_EXPR:
12528 case EXACT_DIV_EXPR:
12529 if (integer_onep (arg1))
12530 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12531 if (integer_zerop (arg1))
12532 return NULL_TREE;
12533 /* X / -1 is -X. */
12534 if (!TYPE_UNSIGNED (type)
12535 && TREE_CODE (arg1) == INTEGER_CST
12536 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12537 && TREE_INT_CST_HIGH (arg1) == -1)
12538 return fold_convert_loc (loc, type, negate_expr (arg0));
12540 /* Convert -A / -B to A / B when the type is signed and overflow is
12541 undefined. */
12542 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12543 && TREE_CODE (arg0) == NEGATE_EXPR
12544 && negate_expr_p (arg1))
12546 if (INTEGRAL_TYPE_P (type))
12547 fold_overflow_warning (("assuming signed overflow does not occur "
12548 "when distributing negation across "
12549 "division"),
12550 WARN_STRICT_OVERFLOW_MISC);
12551 return fold_build2_loc (loc, code, type,
12552 fold_convert_loc (loc, type,
12553 TREE_OPERAND (arg0, 0)),
12554 fold_convert_loc (loc, type,
12555 negate_expr (arg1)));
12557 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12558 && TREE_CODE (arg1) == NEGATE_EXPR
12559 && negate_expr_p (arg0))
12561 if (INTEGRAL_TYPE_P (type))
12562 fold_overflow_warning (("assuming signed overflow does not occur "
12563 "when distributing negation across "
12564 "division"),
12565 WARN_STRICT_OVERFLOW_MISC);
12566 return fold_build2_loc (loc, code, type,
12567 fold_convert_loc (loc, type,
12568 negate_expr (arg0)),
12569 fold_convert_loc (loc, type,
12570 TREE_OPERAND (arg1, 0)));
12573 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12574 operation, EXACT_DIV_EXPR.
12576 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12577 At one time others generated faster code, it's not clear if they do
12578 after the last round to changes to the DIV code in expmed.c. */
12579 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12580 && multiple_of_p (type, arg0, arg1))
12581 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12583 strict_overflow_p = false;
12584 if (TREE_CODE (arg1) == INTEGER_CST
12585 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12586 &strict_overflow_p)))
12588 if (strict_overflow_p)
12589 fold_overflow_warning (("assuming signed overflow does not occur "
12590 "when simplifying division"),
12591 WARN_STRICT_OVERFLOW_MISC);
12592 return fold_convert_loc (loc, type, tem);
12595 return NULL_TREE;
12597 case CEIL_MOD_EXPR:
12598 case FLOOR_MOD_EXPR:
12599 case ROUND_MOD_EXPR:
12600 case TRUNC_MOD_EXPR:
12601 /* X % 1 is always zero, but be sure to preserve any side
12602 effects in X. */
12603 if (integer_onep (arg1))
12604 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12606 /* X % 0, return X % 0 unchanged so that we can get the
12607 proper warnings and errors. */
12608 if (integer_zerop (arg1))
12609 return NULL_TREE;
12611 /* 0 % X is always zero, but be sure to preserve any side
12612 effects in X. Place this after checking for X == 0. */
12613 if (integer_zerop (arg0))
12614 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12616 /* X % -1 is zero. */
12617 if (!TYPE_UNSIGNED (type)
12618 && TREE_CODE (arg1) == INTEGER_CST
12619 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12620 && TREE_INT_CST_HIGH (arg1) == -1)
12621 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12623 /* X % -C is the same as X % C. */
12624 if (code == TRUNC_MOD_EXPR
12625 && !TYPE_UNSIGNED (type)
12626 && TREE_CODE (arg1) == INTEGER_CST
12627 && !TREE_OVERFLOW (arg1)
12628 && TREE_INT_CST_HIGH (arg1) < 0
12629 && !TYPE_OVERFLOW_TRAPS (type)
12630 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12631 && !sign_bit_p (arg1, arg1))
12632 return fold_build2_loc (loc, code, type,
12633 fold_convert_loc (loc, type, arg0),
12634 fold_convert_loc (loc, type,
12635 negate_expr (arg1)));
12637 /* X % -Y is the same as X % Y. */
12638 if (code == TRUNC_MOD_EXPR
12639 && !TYPE_UNSIGNED (type)
12640 && TREE_CODE (arg1) == NEGATE_EXPR
12641 && !TYPE_OVERFLOW_TRAPS (type))
12642 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12643 fold_convert_loc (loc, type,
12644 TREE_OPERAND (arg1, 0)));
12646 strict_overflow_p = false;
12647 if (TREE_CODE (arg1) == INTEGER_CST
12648 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12649 &strict_overflow_p)))
12651 if (strict_overflow_p)
12652 fold_overflow_warning (("assuming signed overflow does not occur "
12653 "when simplifying modulus"),
12654 WARN_STRICT_OVERFLOW_MISC);
12655 return fold_convert_loc (loc, type, tem);
12658 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12659 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12660 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12661 && (TYPE_UNSIGNED (type)
12662 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12664 tree c = arg1;
12665 /* Also optimize A % (C << N) where C is a power of 2,
12666 to A & ((C << N) - 1). */
12667 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12668 c = TREE_OPERAND (arg1, 0);
12670 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12672 tree mask
12673 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12674 build_int_cst (TREE_TYPE (arg1), 1));
12675 if (strict_overflow_p)
12676 fold_overflow_warning (("assuming signed overflow does not "
12677 "occur when simplifying "
12678 "X % (power of two)"),
12679 WARN_STRICT_OVERFLOW_MISC);
12680 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12681 fold_convert_loc (loc, type, arg0),
12682 fold_convert_loc (loc, type, mask));
12686 return NULL_TREE;
12688 case LROTATE_EXPR:
12689 case RROTATE_EXPR:
12690 if (integer_all_onesp (arg0))
12691 return omit_one_operand_loc (loc, type, arg0, arg1);
12692 goto shift;
12694 case RSHIFT_EXPR:
12695 /* Optimize -1 >> x for arithmetic right shifts. */
12696 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12697 && tree_expr_nonnegative_p (arg1))
12698 return omit_one_operand_loc (loc, type, arg0, arg1);
12699 /* ... fall through ... */
12701 case LSHIFT_EXPR:
12702 shift:
12703 if (integer_zerop (arg1))
12704 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12705 if (integer_zerop (arg0))
12706 return omit_one_operand_loc (loc, type, arg0, arg1);
12708 /* Prefer vector1 << scalar to vector1 << vector2
12709 if vector2 is uniform. */
12710 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12711 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12712 return fold_build2_loc (loc, code, type, op0, tem);
12714 /* Since negative shift count is not well-defined,
12715 don't try to compute it in the compiler. */
12716 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12717 return NULL_TREE;
12719 prec = element_precision (type);
12721 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12722 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12723 && tree_to_uhwi (arg1) < prec
12724 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12725 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12727 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12728 + tree_to_uhwi (arg1));
12730 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12731 being well defined. */
12732 if (low >= prec)
12734 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12735 low = low % prec;
12736 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12737 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12738 TREE_OPERAND (arg0, 0));
12739 else
12740 low = prec - 1;
12743 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12744 build_int_cst (TREE_TYPE (arg1), low));
12747 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12748 into x & ((unsigned)-1 >> c) for unsigned types. */
12749 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12750 || (TYPE_UNSIGNED (type)
12751 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12752 && tree_fits_uhwi_p (arg1)
12753 && tree_to_uhwi (arg1) < prec
12754 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12755 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12757 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12758 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12759 tree lshift;
12760 tree arg00;
12762 if (low0 == low1)
12764 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12766 lshift = build_minus_one_cst (type);
12767 lshift = const_binop (code, lshift, arg1);
12769 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12773 /* Rewrite an LROTATE_EXPR by a constant into an
12774 RROTATE_EXPR by a new constant. */
12775 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12777 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12778 tem = const_binop (MINUS_EXPR, tem, arg1);
12779 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12782 /* If we have a rotate of a bit operation with the rotate count and
12783 the second operand of the bit operation both constant,
12784 permute the two operations. */
12785 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12786 && (TREE_CODE (arg0) == BIT_AND_EXPR
12787 || TREE_CODE (arg0) == BIT_IOR_EXPR
12788 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12789 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12790 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12791 fold_build2_loc (loc, code, type,
12792 TREE_OPERAND (arg0, 0), arg1),
12793 fold_build2_loc (loc, code, type,
12794 TREE_OPERAND (arg0, 1), arg1));
12796 /* Two consecutive rotates adding up to the precision of the
12797 type can be ignored. */
12798 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12799 && TREE_CODE (arg0) == RROTATE_EXPR
12800 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12801 && TREE_INT_CST_HIGH (arg1) == 0
12802 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12803 && ((TREE_INT_CST_LOW (arg1)
12804 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12805 == prec))
12806 return TREE_OPERAND (arg0, 0);
12808 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12809 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12810 if the latter can be further optimized. */
12811 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12812 && TREE_CODE (arg0) == BIT_AND_EXPR
12813 && TREE_CODE (arg1) == INTEGER_CST
12814 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12816 tree mask = fold_build2_loc (loc, code, type,
12817 fold_convert_loc (loc, type,
12818 TREE_OPERAND (arg0, 1)),
12819 arg1);
12820 tree shift = fold_build2_loc (loc, code, type,
12821 fold_convert_loc (loc, type,
12822 TREE_OPERAND (arg0, 0)),
12823 arg1);
12824 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12825 if (tem)
12826 return tem;
12829 return NULL_TREE;
12831 case MIN_EXPR:
12832 if (operand_equal_p (arg0, arg1, 0))
12833 return omit_one_operand_loc (loc, type, arg0, arg1);
12834 if (INTEGRAL_TYPE_P (type)
12835 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12836 return omit_one_operand_loc (loc, type, arg1, arg0);
12837 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12838 if (tem)
12839 return tem;
12840 goto associate;
12842 case MAX_EXPR:
12843 if (operand_equal_p (arg0, arg1, 0))
12844 return omit_one_operand_loc (loc, type, arg0, arg1);
12845 if (INTEGRAL_TYPE_P (type)
12846 && TYPE_MAX_VALUE (type)
12847 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12848 return omit_one_operand_loc (loc, type, arg1, arg0);
12849 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12850 if (tem)
12851 return tem;
12852 goto associate;
12854 case TRUTH_ANDIF_EXPR:
12855 /* Note that the operands of this must be ints
12856 and their values must be 0 or 1.
12857 ("true" is a fixed value perhaps depending on the language.) */
12858 /* If first arg is constant zero, return it. */
12859 if (integer_zerop (arg0))
12860 return fold_convert_loc (loc, type, arg0);
12861 case TRUTH_AND_EXPR:
12862 /* If either arg is constant true, drop it. */
12863 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12864 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12865 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12866 /* Preserve sequence points. */
12867 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12868 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12869 /* If second arg is constant zero, result is zero, but first arg
12870 must be evaluated. */
12871 if (integer_zerop (arg1))
12872 return omit_one_operand_loc (loc, type, arg1, arg0);
12873 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12874 case will be handled here. */
12875 if (integer_zerop (arg0))
12876 return omit_one_operand_loc (loc, type, arg0, arg1);
12878 /* !X && X is always false. */
12879 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12880 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12881 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12882 /* X && !X is always false. */
12883 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12884 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12885 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12887 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12888 means A >= Y && A != MAX, but in this case we know that
12889 A < X <= MAX. */
12891 if (!TREE_SIDE_EFFECTS (arg0)
12892 && !TREE_SIDE_EFFECTS (arg1))
12894 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12895 if (tem && !operand_equal_p (tem, arg0, 0))
12896 return fold_build2_loc (loc, code, type, tem, arg1);
12898 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12899 if (tem && !operand_equal_p (tem, arg1, 0))
12900 return fold_build2_loc (loc, code, type, arg0, tem);
12903 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12904 != NULL_TREE)
12905 return tem;
12907 return NULL_TREE;
12909 case TRUTH_ORIF_EXPR:
12910 /* Note that the operands of this must be ints
12911 and their values must be 0 or true.
12912 ("true" is a fixed value perhaps depending on the language.) */
12913 /* If first arg is constant true, return it. */
12914 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12915 return fold_convert_loc (loc, type, arg0);
12916 case TRUTH_OR_EXPR:
12917 /* If either arg is constant zero, drop it. */
12918 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12919 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12920 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12921 /* Preserve sequence points. */
12922 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12923 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12924 /* If second arg is constant true, result is true, but we must
12925 evaluate first arg. */
12926 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12927 return omit_one_operand_loc (loc, type, arg1, arg0);
12928 /* Likewise for first arg, but note this only occurs here for
12929 TRUTH_OR_EXPR. */
12930 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12931 return omit_one_operand_loc (loc, type, arg0, arg1);
12933 /* !X || X is always true. */
12934 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12935 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12936 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12937 /* X || !X is always true. */
12938 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12939 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12940 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12942 /* (X && !Y) || (!X && Y) is X ^ Y */
12943 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12944 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12946 tree a0, a1, l0, l1, n0, n1;
12948 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12949 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12951 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12952 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12954 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12955 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12957 if ((operand_equal_p (n0, a0, 0)
12958 && operand_equal_p (n1, a1, 0))
12959 || (operand_equal_p (n0, a1, 0)
12960 && operand_equal_p (n1, a0, 0)))
12961 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12964 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12965 != NULL_TREE)
12966 return tem;
12968 return NULL_TREE;
12970 case TRUTH_XOR_EXPR:
12971 /* If the second arg is constant zero, drop it. */
12972 if (integer_zerop (arg1))
12973 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12974 /* If the second arg is constant true, this is a logical inversion. */
12975 if (integer_onep (arg1))
12977 tem = invert_truthvalue_loc (loc, arg0);
12978 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12980 /* Identical arguments cancel to zero. */
12981 if (operand_equal_p (arg0, arg1, 0))
12982 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12984 /* !X ^ X is always true. */
12985 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12986 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12987 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12989 /* X ^ !X is always true. */
12990 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12991 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12992 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12994 return NULL_TREE;
12996 case EQ_EXPR:
12997 case NE_EXPR:
12998 STRIP_NOPS (arg0);
12999 STRIP_NOPS (arg1);
13001 tem = fold_comparison (loc, code, type, op0, op1);
13002 if (tem != NULL_TREE)
13003 return tem;
13005 /* bool_var != 0 becomes bool_var. */
13006 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
13007 && code == NE_EXPR)
13008 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13010 /* bool_var == 1 becomes bool_var. */
13011 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
13012 && code == EQ_EXPR)
13013 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13015 /* bool_var != 1 becomes !bool_var. */
13016 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
13017 && code == NE_EXPR)
13018 return fold_convert_loc (loc, type,
13019 fold_build1_loc (loc, TRUTH_NOT_EXPR,
13020 TREE_TYPE (arg0), arg0));
13022 /* bool_var == 0 becomes !bool_var. */
13023 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
13024 && code == EQ_EXPR)
13025 return fold_convert_loc (loc, type,
13026 fold_build1_loc (loc, TRUTH_NOT_EXPR,
13027 TREE_TYPE (arg0), arg0));
13029 /* !exp != 0 becomes !exp */
13030 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
13031 && code == NE_EXPR)
13032 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
13034 /* If this is an equality comparison of the address of two non-weak,
13035 unaliased symbols neither of which are extern (since we do not
13036 have access to attributes for externs), then we know the result. */
13037 if (TREE_CODE (arg0) == ADDR_EXPR
13038 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
13039 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
13040 && ! lookup_attribute ("alias",
13041 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
13042 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
13043 && TREE_CODE (arg1) == ADDR_EXPR
13044 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
13045 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
13046 && ! lookup_attribute ("alias",
13047 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13048 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13050 /* We know that we're looking at the address of two
13051 non-weak, unaliased, static _DECL nodes.
13053 It is both wasteful and incorrect to call operand_equal_p
13054 to compare the two ADDR_EXPR nodes. It is wasteful in that
13055 all we need to do is test pointer equality for the arguments
13056 to the two ADDR_EXPR nodes. It is incorrect to use
13057 operand_equal_p as that function is NOT equivalent to a
13058 C equality test. It can in fact return false for two
13059 objects which would test as equal using the C equality
13060 operator. */
13061 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13062 return constant_boolean_node (equal
13063 ? code == EQ_EXPR : code != EQ_EXPR,
13064 type);
13067 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13068 a MINUS_EXPR of a constant, we can convert it into a comparison with
13069 a revised constant as long as no overflow occurs. */
13070 if (TREE_CODE (arg1) == INTEGER_CST
13071 && (TREE_CODE (arg0) == PLUS_EXPR
13072 || TREE_CODE (arg0) == MINUS_EXPR)
13073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13074 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13075 ? MINUS_EXPR : PLUS_EXPR,
13076 fold_convert_loc (loc, TREE_TYPE (arg0),
13077 arg1),
13078 TREE_OPERAND (arg0, 1)))
13079 && !TREE_OVERFLOW (tem))
13080 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13082 /* Similarly for a NEGATE_EXPR. */
13083 if (TREE_CODE (arg0) == NEGATE_EXPR
13084 && TREE_CODE (arg1) == INTEGER_CST
13085 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13086 arg1)))
13087 && TREE_CODE (tem) == INTEGER_CST
13088 && !TREE_OVERFLOW (tem))
13089 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13091 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13092 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13093 && TREE_CODE (arg1) == INTEGER_CST
13094 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13095 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13096 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13097 fold_convert_loc (loc,
13098 TREE_TYPE (arg0),
13099 arg1),
13100 TREE_OPERAND (arg0, 1)));
13102 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13103 if ((TREE_CODE (arg0) == PLUS_EXPR
13104 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13105 || TREE_CODE (arg0) == MINUS_EXPR)
13106 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13107 0)),
13108 arg1, 0)
13109 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13110 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13112 tree val = TREE_OPERAND (arg0, 1);
13113 return omit_two_operands_loc (loc, type,
13114 fold_build2_loc (loc, code, type,
13115 val,
13116 build_int_cst (TREE_TYPE (val),
13117 0)),
13118 TREE_OPERAND (arg0, 0), arg1);
13121 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13122 if (TREE_CODE (arg0) == MINUS_EXPR
13123 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13124 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13125 1)),
13126 arg1, 0)
13127 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13129 return omit_two_operands_loc (loc, type,
13130 code == NE_EXPR
13131 ? boolean_true_node : boolean_false_node,
13132 TREE_OPERAND (arg0, 1), arg1);
13135 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13136 for !=. Don't do this for ordered comparisons due to overflow. */
13137 if (TREE_CODE (arg0) == MINUS_EXPR
13138 && integer_zerop (arg1))
13139 return fold_build2_loc (loc, code, type,
13140 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13142 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13143 if (TREE_CODE (arg0) == ABS_EXPR
13144 && (integer_zerop (arg1) || real_zerop (arg1)))
13145 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13147 /* If this is an EQ or NE comparison with zero and ARG0 is
13148 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13149 two operations, but the latter can be done in one less insn
13150 on machines that have only two-operand insns or on which a
13151 constant cannot be the first operand. */
13152 if (TREE_CODE (arg0) == BIT_AND_EXPR
13153 && integer_zerop (arg1))
13155 tree arg00 = TREE_OPERAND (arg0, 0);
13156 tree arg01 = TREE_OPERAND (arg0, 1);
13157 if (TREE_CODE (arg00) == LSHIFT_EXPR
13158 && integer_onep (TREE_OPERAND (arg00, 0)))
13160 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13161 arg01, TREE_OPERAND (arg00, 1));
13162 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13163 build_int_cst (TREE_TYPE (arg0), 1));
13164 return fold_build2_loc (loc, code, type,
13165 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13166 arg1);
13168 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13169 && integer_onep (TREE_OPERAND (arg01, 0)))
13171 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13172 arg00, TREE_OPERAND (arg01, 1));
13173 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13174 build_int_cst (TREE_TYPE (arg0), 1));
13175 return fold_build2_loc (loc, code, type,
13176 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13177 arg1);
13181 /* If this is an NE or EQ comparison of zero against the result of a
13182 signed MOD operation whose second operand is a power of 2, make
13183 the MOD operation unsigned since it is simpler and equivalent. */
13184 if (integer_zerop (arg1)
13185 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13186 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13187 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13188 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13189 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13190 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13192 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13193 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13194 fold_convert_loc (loc, newtype,
13195 TREE_OPERAND (arg0, 0)),
13196 fold_convert_loc (loc, newtype,
13197 TREE_OPERAND (arg0, 1)));
13199 return fold_build2_loc (loc, code, type, newmod,
13200 fold_convert_loc (loc, newtype, arg1));
13203 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13204 C1 is a valid shift constant, and C2 is a power of two, i.e.
13205 a single bit. */
13206 if (TREE_CODE (arg0) == BIT_AND_EXPR
13207 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13208 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13209 == INTEGER_CST
13210 && integer_pow2p (TREE_OPERAND (arg0, 1))
13211 && integer_zerop (arg1))
13213 tree itype = TREE_TYPE (arg0);
13214 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13215 prec = TYPE_PRECISION (itype);
13217 /* Check for a valid shift count. */
13218 if (TREE_INT_CST_HIGH (arg001) == 0
13219 && TREE_INT_CST_LOW (arg001) < prec)
13221 tree arg01 = TREE_OPERAND (arg0, 1);
13222 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13223 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13224 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13225 can be rewritten as (X & (C2 << C1)) != 0. */
13226 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13228 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13229 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13230 return fold_build2_loc (loc, code, type, tem,
13231 fold_convert_loc (loc, itype, arg1));
13233 /* Otherwise, for signed (arithmetic) shifts,
13234 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13235 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13236 else if (!TYPE_UNSIGNED (itype))
13237 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13238 arg000, build_int_cst (itype, 0));
13239 /* Otherwise, of unsigned (logical) shifts,
13240 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13241 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13242 else
13243 return omit_one_operand_loc (loc, type,
13244 code == EQ_EXPR ? integer_one_node
13245 : integer_zero_node,
13246 arg000);
13250 /* If we have (A & C) == C where C is a power of 2, convert this into
13251 (A & C) != 0. Similarly for NE_EXPR. */
13252 if (TREE_CODE (arg0) == BIT_AND_EXPR
13253 && integer_pow2p (TREE_OPERAND (arg0, 1))
13254 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13255 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13256 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13257 integer_zero_node));
13259 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13260 bit, then fold the expression into A < 0 or A >= 0. */
13261 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13262 if (tem)
13263 return tem;
13265 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13266 Similarly for NE_EXPR. */
13267 if (TREE_CODE (arg0) == BIT_AND_EXPR
13268 && TREE_CODE (arg1) == INTEGER_CST
13269 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13271 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13272 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13273 TREE_OPERAND (arg0, 1));
13274 tree dandnotc
13275 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13276 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13277 notc);
13278 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13279 if (integer_nonzerop (dandnotc))
13280 return omit_one_operand_loc (loc, type, rslt, arg0);
13283 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13284 Similarly for NE_EXPR. */
13285 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13286 && TREE_CODE (arg1) == INTEGER_CST
13287 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13289 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13290 tree candnotd
13291 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13292 TREE_OPERAND (arg0, 1),
13293 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13294 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13295 if (integer_nonzerop (candnotd))
13296 return omit_one_operand_loc (loc, type, rslt, arg0);
13299 /* If this is a comparison of a field, we may be able to simplify it. */
13300 if ((TREE_CODE (arg0) == COMPONENT_REF
13301 || TREE_CODE (arg0) == BIT_FIELD_REF)
13302 /* Handle the constant case even without -O
13303 to make sure the warnings are given. */
13304 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13306 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13307 if (t1)
13308 return t1;
13311 /* Optimize comparisons of strlen vs zero to a compare of the
13312 first character of the string vs zero. To wit,
13313 strlen(ptr) == 0 => *ptr == 0
13314 strlen(ptr) != 0 => *ptr != 0
13315 Other cases should reduce to one of these two (or a constant)
13316 due to the return value of strlen being unsigned. */
13317 if (TREE_CODE (arg0) == CALL_EXPR
13318 && integer_zerop (arg1))
13320 tree fndecl = get_callee_fndecl (arg0);
13322 if (fndecl
13323 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13324 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13325 && call_expr_nargs (arg0) == 1
13326 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13328 tree iref = build_fold_indirect_ref_loc (loc,
13329 CALL_EXPR_ARG (arg0, 0));
13330 return fold_build2_loc (loc, code, type, iref,
13331 build_int_cst (TREE_TYPE (iref), 0));
13335 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13336 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13337 if (TREE_CODE (arg0) == RSHIFT_EXPR
13338 && integer_zerop (arg1)
13339 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13341 tree arg00 = TREE_OPERAND (arg0, 0);
13342 tree arg01 = TREE_OPERAND (arg0, 1);
13343 tree itype = TREE_TYPE (arg00);
13344 if (TREE_INT_CST_HIGH (arg01) == 0
13345 && TREE_INT_CST_LOW (arg01)
13346 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13348 if (TYPE_UNSIGNED (itype))
13350 itype = signed_type_for (itype);
13351 arg00 = fold_convert_loc (loc, itype, arg00);
13353 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13354 type, arg00, build_zero_cst (itype));
13358 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13359 if (integer_zerop (arg1)
13360 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13361 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13362 TREE_OPERAND (arg0, 1));
13364 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13365 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13366 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13367 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13368 build_zero_cst (TREE_TYPE (arg0)));
13369 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13370 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13371 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13372 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13373 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13374 build_zero_cst (TREE_TYPE (arg0)));
13376 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13377 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13378 && TREE_CODE (arg1) == INTEGER_CST
13379 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13380 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13381 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13382 TREE_OPERAND (arg0, 1), arg1));
13384 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13385 (X & C) == 0 when C is a single bit. */
13386 if (TREE_CODE (arg0) == BIT_AND_EXPR
13387 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13388 && integer_zerop (arg1)
13389 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13391 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13392 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13393 TREE_OPERAND (arg0, 1));
13394 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13395 type, tem,
13396 fold_convert_loc (loc, TREE_TYPE (arg0),
13397 arg1));
13400 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13401 constant C is a power of two, i.e. a single bit. */
13402 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13403 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13404 && integer_zerop (arg1)
13405 && integer_pow2p (TREE_OPERAND (arg0, 1))
13406 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13407 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13409 tree arg00 = TREE_OPERAND (arg0, 0);
13410 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13411 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13414 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13415 when is C is a power of two, i.e. a single bit. */
13416 if (TREE_CODE (arg0) == BIT_AND_EXPR
13417 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13418 && integer_zerop (arg1)
13419 && integer_pow2p (TREE_OPERAND (arg0, 1))
13420 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13421 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13423 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13424 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13425 arg000, TREE_OPERAND (arg0, 1));
13426 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13427 tem, build_int_cst (TREE_TYPE (tem), 0));
13430 if (integer_zerop (arg1)
13431 && tree_expr_nonzero_p (arg0))
13433 tree res = constant_boolean_node (code==NE_EXPR, type);
13434 return omit_one_operand_loc (loc, type, res, arg0);
13437 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13438 if (TREE_CODE (arg0) == NEGATE_EXPR
13439 && TREE_CODE (arg1) == NEGATE_EXPR)
13440 return fold_build2_loc (loc, code, type,
13441 TREE_OPERAND (arg0, 0),
13442 fold_convert_loc (loc, TREE_TYPE (arg0),
13443 TREE_OPERAND (arg1, 0)));
13445 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13446 if (TREE_CODE (arg0) == BIT_AND_EXPR
13447 && TREE_CODE (arg1) == BIT_AND_EXPR)
13449 tree arg00 = TREE_OPERAND (arg0, 0);
13450 tree arg01 = TREE_OPERAND (arg0, 1);
13451 tree arg10 = TREE_OPERAND (arg1, 0);
13452 tree arg11 = TREE_OPERAND (arg1, 1);
13453 tree itype = TREE_TYPE (arg0);
13455 if (operand_equal_p (arg01, arg11, 0))
13456 return fold_build2_loc (loc, code, type,
13457 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13458 fold_build2_loc (loc,
13459 BIT_XOR_EXPR, itype,
13460 arg00, arg10),
13461 arg01),
13462 build_zero_cst (itype));
13464 if (operand_equal_p (arg01, arg10, 0))
13465 return fold_build2_loc (loc, code, type,
13466 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13467 fold_build2_loc (loc,
13468 BIT_XOR_EXPR, itype,
13469 arg00, arg11),
13470 arg01),
13471 build_zero_cst (itype));
13473 if (operand_equal_p (arg00, arg11, 0))
13474 return fold_build2_loc (loc, code, type,
13475 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13476 fold_build2_loc (loc,
13477 BIT_XOR_EXPR, itype,
13478 arg01, arg10),
13479 arg00),
13480 build_zero_cst (itype));
13482 if (operand_equal_p (arg00, arg10, 0))
13483 return fold_build2_loc (loc, code, type,
13484 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13485 fold_build2_loc (loc,
13486 BIT_XOR_EXPR, itype,
13487 arg01, arg11),
13488 arg00),
13489 build_zero_cst (itype));
13492 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13493 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13495 tree arg00 = TREE_OPERAND (arg0, 0);
13496 tree arg01 = TREE_OPERAND (arg0, 1);
13497 tree arg10 = TREE_OPERAND (arg1, 0);
13498 tree arg11 = TREE_OPERAND (arg1, 1);
13499 tree itype = TREE_TYPE (arg0);
13501 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13502 operand_equal_p guarantees no side-effects so we don't need
13503 to use omit_one_operand on Z. */
13504 if (operand_equal_p (arg01, arg11, 0))
13505 return fold_build2_loc (loc, code, type, arg00,
13506 fold_convert_loc (loc, TREE_TYPE (arg00),
13507 arg10));
13508 if (operand_equal_p (arg01, arg10, 0))
13509 return fold_build2_loc (loc, code, type, arg00,
13510 fold_convert_loc (loc, TREE_TYPE (arg00),
13511 arg11));
13512 if (operand_equal_p (arg00, arg11, 0))
13513 return fold_build2_loc (loc, code, type, arg01,
13514 fold_convert_loc (loc, TREE_TYPE (arg01),
13515 arg10));
13516 if (operand_equal_p (arg00, arg10, 0))
13517 return fold_build2_loc (loc, code, type, arg01,
13518 fold_convert_loc (loc, TREE_TYPE (arg01),
13519 arg11));
13521 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13522 if (TREE_CODE (arg01) == INTEGER_CST
13523 && TREE_CODE (arg11) == INTEGER_CST)
13525 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13526 fold_convert_loc (loc, itype, arg11));
13527 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13528 return fold_build2_loc (loc, code, type, tem,
13529 fold_convert_loc (loc, itype, arg10));
13533 /* Attempt to simplify equality/inequality comparisons of complex
13534 values. Only lower the comparison if the result is known or
13535 can be simplified to a single scalar comparison. */
13536 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13537 || TREE_CODE (arg0) == COMPLEX_CST)
13538 && (TREE_CODE (arg1) == COMPLEX_EXPR
13539 || TREE_CODE (arg1) == COMPLEX_CST))
13541 tree real0, imag0, real1, imag1;
13542 tree rcond, icond;
13544 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13546 real0 = TREE_OPERAND (arg0, 0);
13547 imag0 = TREE_OPERAND (arg0, 1);
13549 else
13551 real0 = TREE_REALPART (arg0);
13552 imag0 = TREE_IMAGPART (arg0);
13555 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13557 real1 = TREE_OPERAND (arg1, 0);
13558 imag1 = TREE_OPERAND (arg1, 1);
13560 else
13562 real1 = TREE_REALPART (arg1);
13563 imag1 = TREE_IMAGPART (arg1);
13566 rcond = fold_binary_loc (loc, code, type, real0, real1);
13567 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13569 if (integer_zerop (rcond))
13571 if (code == EQ_EXPR)
13572 return omit_two_operands_loc (loc, type, boolean_false_node,
13573 imag0, imag1);
13574 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13576 else
13578 if (code == NE_EXPR)
13579 return omit_two_operands_loc (loc, type, boolean_true_node,
13580 imag0, imag1);
13581 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13585 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13586 if (icond && TREE_CODE (icond) == INTEGER_CST)
13588 if (integer_zerop (icond))
13590 if (code == EQ_EXPR)
13591 return omit_two_operands_loc (loc, type, boolean_false_node,
13592 real0, real1);
13593 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13595 else
13597 if (code == NE_EXPR)
13598 return omit_two_operands_loc (loc, type, boolean_true_node,
13599 real0, real1);
13600 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13605 return NULL_TREE;
13607 case LT_EXPR:
13608 case GT_EXPR:
13609 case LE_EXPR:
13610 case GE_EXPR:
13611 tem = fold_comparison (loc, code, type, op0, op1);
13612 if (tem != NULL_TREE)
13613 return tem;
13615 /* Transform comparisons of the form X +- C CMP X. */
13616 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13617 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13618 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13619 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13620 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13621 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13623 tree arg01 = TREE_OPERAND (arg0, 1);
13624 enum tree_code code0 = TREE_CODE (arg0);
13625 int is_positive;
13627 if (TREE_CODE (arg01) == REAL_CST)
13628 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13629 else
13630 is_positive = tree_int_cst_sgn (arg01);
13632 /* (X - c) > X becomes false. */
13633 if (code == GT_EXPR
13634 && ((code0 == MINUS_EXPR && is_positive >= 0)
13635 || (code0 == PLUS_EXPR && is_positive <= 0)))
13637 if (TREE_CODE (arg01) == INTEGER_CST
13638 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13639 fold_overflow_warning (("assuming signed overflow does not "
13640 "occur when assuming that (X - c) > X "
13641 "is always false"),
13642 WARN_STRICT_OVERFLOW_ALL);
13643 return constant_boolean_node (0, type);
13646 /* Likewise (X + c) < X becomes false. */
13647 if (code == LT_EXPR
13648 && ((code0 == PLUS_EXPR && is_positive >= 0)
13649 || (code0 == MINUS_EXPR && is_positive <= 0)))
13651 if (TREE_CODE (arg01) == INTEGER_CST
13652 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13653 fold_overflow_warning (("assuming signed overflow does not "
13654 "occur when assuming that "
13655 "(X + c) < X is always false"),
13656 WARN_STRICT_OVERFLOW_ALL);
13657 return constant_boolean_node (0, type);
13660 /* Convert (X - c) <= X to true. */
13661 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13662 && code == LE_EXPR
13663 && ((code0 == MINUS_EXPR && is_positive >= 0)
13664 || (code0 == PLUS_EXPR && is_positive <= 0)))
13666 if (TREE_CODE (arg01) == INTEGER_CST
13667 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13668 fold_overflow_warning (("assuming signed overflow does not "
13669 "occur when assuming that "
13670 "(X - c) <= X is always true"),
13671 WARN_STRICT_OVERFLOW_ALL);
13672 return constant_boolean_node (1, type);
13675 /* Convert (X + c) >= X to true. */
13676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13677 && code == GE_EXPR
13678 && ((code0 == PLUS_EXPR && is_positive >= 0)
13679 || (code0 == MINUS_EXPR && is_positive <= 0)))
13681 if (TREE_CODE (arg01) == INTEGER_CST
13682 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13683 fold_overflow_warning (("assuming signed overflow does not "
13684 "occur when assuming that "
13685 "(X + c) >= X is always true"),
13686 WARN_STRICT_OVERFLOW_ALL);
13687 return constant_boolean_node (1, type);
13690 if (TREE_CODE (arg01) == INTEGER_CST)
13692 /* Convert X + c > X and X - c < X to true for integers. */
13693 if (code == GT_EXPR
13694 && ((code0 == PLUS_EXPR && is_positive > 0)
13695 || (code0 == MINUS_EXPR && is_positive < 0)))
13697 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13698 fold_overflow_warning (("assuming signed overflow does "
13699 "not occur when assuming that "
13700 "(X + c) > X is always true"),
13701 WARN_STRICT_OVERFLOW_ALL);
13702 return constant_boolean_node (1, type);
13705 if (code == LT_EXPR
13706 && ((code0 == MINUS_EXPR && is_positive > 0)
13707 || (code0 == PLUS_EXPR && is_positive < 0)))
13709 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13710 fold_overflow_warning (("assuming signed overflow does "
13711 "not occur when assuming that "
13712 "(X - c) < X is always true"),
13713 WARN_STRICT_OVERFLOW_ALL);
13714 return constant_boolean_node (1, type);
13717 /* Convert X + c <= X and X - c >= X to false for integers. */
13718 if (code == LE_EXPR
13719 && ((code0 == PLUS_EXPR && is_positive > 0)
13720 || (code0 == MINUS_EXPR && is_positive < 0)))
13722 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13723 fold_overflow_warning (("assuming signed overflow does "
13724 "not occur when assuming that "
13725 "(X + c) <= X is always false"),
13726 WARN_STRICT_OVERFLOW_ALL);
13727 return constant_boolean_node (0, type);
13730 if (code == GE_EXPR
13731 && ((code0 == MINUS_EXPR && is_positive > 0)
13732 || (code0 == PLUS_EXPR && is_positive < 0)))
13734 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13735 fold_overflow_warning (("assuming signed overflow does "
13736 "not occur when assuming that "
13737 "(X - c) >= X is always false"),
13738 WARN_STRICT_OVERFLOW_ALL);
13739 return constant_boolean_node (0, type);
13744 /* Comparisons with the highest or lowest possible integer of
13745 the specified precision will have known values. */
13747 tree arg1_type = TREE_TYPE (arg1);
13748 unsigned int width = TYPE_PRECISION (arg1_type);
13750 if (TREE_CODE (arg1) == INTEGER_CST
13751 && width <= HOST_BITS_PER_DOUBLE_INT
13752 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13754 HOST_WIDE_INT signed_max_hi;
13755 unsigned HOST_WIDE_INT signed_max_lo;
13756 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13758 if (width <= HOST_BITS_PER_WIDE_INT)
13760 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13761 - 1;
13762 signed_max_hi = 0;
13763 max_hi = 0;
13765 if (TYPE_UNSIGNED (arg1_type))
13767 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13768 min_lo = 0;
13769 min_hi = 0;
13771 else
13773 max_lo = signed_max_lo;
13774 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13775 min_hi = -1;
13778 else
13780 width -= HOST_BITS_PER_WIDE_INT;
13781 signed_max_lo = -1;
13782 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13783 - 1;
13784 max_lo = -1;
13785 min_lo = 0;
13787 if (TYPE_UNSIGNED (arg1_type))
13789 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13790 min_hi = 0;
13792 else
13794 max_hi = signed_max_hi;
13795 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13799 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13800 && TREE_INT_CST_LOW (arg1) == max_lo)
13801 switch (code)
13803 case GT_EXPR:
13804 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13806 case GE_EXPR:
13807 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13809 case LE_EXPR:
13810 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13812 case LT_EXPR:
13813 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13815 /* The GE_EXPR and LT_EXPR cases above are not normally
13816 reached because of previous transformations. */
13818 default:
13819 break;
13821 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13822 == max_hi
13823 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13824 switch (code)
13826 case GT_EXPR:
13827 arg1 = const_binop (PLUS_EXPR, arg1,
13828 build_int_cst (TREE_TYPE (arg1), 1));
13829 return fold_build2_loc (loc, EQ_EXPR, type,
13830 fold_convert_loc (loc,
13831 TREE_TYPE (arg1), arg0),
13832 arg1);
13833 case LE_EXPR:
13834 arg1 = const_binop (PLUS_EXPR, arg1,
13835 build_int_cst (TREE_TYPE (arg1), 1));
13836 return fold_build2_loc (loc, NE_EXPR, type,
13837 fold_convert_loc (loc, TREE_TYPE (arg1),
13838 arg0),
13839 arg1);
13840 default:
13841 break;
13843 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13844 == min_hi
13845 && TREE_INT_CST_LOW (arg1) == min_lo)
13846 switch (code)
13848 case LT_EXPR:
13849 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13851 case LE_EXPR:
13852 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13854 case GE_EXPR:
13855 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13857 case GT_EXPR:
13858 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13860 default:
13861 break;
13863 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13864 == min_hi
13865 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13866 switch (code)
13868 case GE_EXPR:
13869 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13870 return fold_build2_loc (loc, NE_EXPR, type,
13871 fold_convert_loc (loc,
13872 TREE_TYPE (arg1), arg0),
13873 arg1);
13874 case LT_EXPR:
13875 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13876 return fold_build2_loc (loc, EQ_EXPR, type,
13877 fold_convert_loc (loc, TREE_TYPE (arg1),
13878 arg0),
13879 arg1);
13880 default:
13881 break;
13884 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13885 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13886 && TYPE_UNSIGNED (arg1_type)
13887 /* We will flip the signedness of the comparison operator
13888 associated with the mode of arg1, so the sign bit is
13889 specified by this mode. Check that arg1 is the signed
13890 max associated with this sign bit. */
13891 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13892 /* signed_type does not work on pointer types. */
13893 && INTEGRAL_TYPE_P (arg1_type))
13895 /* The following case also applies to X < signed_max+1
13896 and X >= signed_max+1 because previous transformations. */
13897 if (code == LE_EXPR || code == GT_EXPR)
13899 tree st = signed_type_for (arg1_type);
13900 return fold_build2_loc (loc,
13901 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13902 type, fold_convert_loc (loc, st, arg0),
13903 build_int_cst (st, 0));
13909 /* If we are comparing an ABS_EXPR with a constant, we can
13910 convert all the cases into explicit comparisons, but they may
13911 well not be faster than doing the ABS and one comparison.
13912 But ABS (X) <= C is a range comparison, which becomes a subtraction
13913 and a comparison, and is probably faster. */
13914 if (code == LE_EXPR
13915 && TREE_CODE (arg1) == INTEGER_CST
13916 && TREE_CODE (arg0) == ABS_EXPR
13917 && ! TREE_SIDE_EFFECTS (arg0)
13918 && (0 != (tem = negate_expr (arg1)))
13919 && TREE_CODE (tem) == INTEGER_CST
13920 && !TREE_OVERFLOW (tem))
13921 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13922 build2 (GE_EXPR, type,
13923 TREE_OPERAND (arg0, 0), tem),
13924 build2 (LE_EXPR, type,
13925 TREE_OPERAND (arg0, 0), arg1));
13927 /* Convert ABS_EXPR<x> >= 0 to true. */
13928 strict_overflow_p = false;
13929 if (code == GE_EXPR
13930 && (integer_zerop (arg1)
13931 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13932 && real_zerop (arg1)))
13933 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13935 if (strict_overflow_p)
13936 fold_overflow_warning (("assuming signed overflow does not occur "
13937 "when simplifying comparison of "
13938 "absolute value and zero"),
13939 WARN_STRICT_OVERFLOW_CONDITIONAL);
13940 return omit_one_operand_loc (loc, type,
13941 constant_boolean_node (true, type),
13942 arg0);
13945 /* Convert ABS_EXPR<x> < 0 to false. */
13946 strict_overflow_p = false;
13947 if (code == LT_EXPR
13948 && (integer_zerop (arg1) || real_zerop (arg1))
13949 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13951 if (strict_overflow_p)
13952 fold_overflow_warning (("assuming signed overflow does not occur "
13953 "when simplifying comparison of "
13954 "absolute value and zero"),
13955 WARN_STRICT_OVERFLOW_CONDITIONAL);
13956 return omit_one_operand_loc (loc, type,
13957 constant_boolean_node (false, type),
13958 arg0);
13961 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13962 and similarly for >= into !=. */
13963 if ((code == LT_EXPR || code == GE_EXPR)
13964 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13965 && TREE_CODE (arg1) == LSHIFT_EXPR
13966 && integer_onep (TREE_OPERAND (arg1, 0)))
13967 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13968 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13969 TREE_OPERAND (arg1, 1)),
13970 build_zero_cst (TREE_TYPE (arg0)));
13972 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13973 otherwise Y might be >= # of bits in X's type and thus e.g.
13974 (unsigned char) (1 << Y) for Y 15 might be 0.
13975 If the cast is widening, then 1 << Y should have unsigned type,
13976 otherwise if Y is number of bits in the signed shift type minus 1,
13977 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13978 31 might be 0xffffffff80000000. */
13979 if ((code == LT_EXPR || code == GE_EXPR)
13980 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13981 && CONVERT_EXPR_P (arg1)
13982 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13983 && (TYPE_PRECISION (TREE_TYPE (arg1))
13984 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13985 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13986 || (TYPE_PRECISION (TREE_TYPE (arg1))
13987 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13988 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13990 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13991 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13992 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13993 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13994 build_zero_cst (TREE_TYPE (arg0)));
13997 return NULL_TREE;
13999 case UNORDERED_EXPR:
14000 case ORDERED_EXPR:
14001 case UNLT_EXPR:
14002 case UNLE_EXPR:
14003 case UNGT_EXPR:
14004 case UNGE_EXPR:
14005 case UNEQ_EXPR:
14006 case LTGT_EXPR:
14007 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
14009 t1 = fold_relational_const (code, type, arg0, arg1);
14010 if (t1 != NULL_TREE)
14011 return t1;
14014 /* If the first operand is NaN, the result is constant. */
14015 if (TREE_CODE (arg0) == REAL_CST
14016 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
14017 && (code != LTGT_EXPR || ! flag_trapping_math))
14019 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
14020 ? integer_zero_node
14021 : integer_one_node;
14022 return omit_one_operand_loc (loc, type, t1, arg1);
14025 /* If the second operand is NaN, the result is constant. */
14026 if (TREE_CODE (arg1) == REAL_CST
14027 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
14028 && (code != LTGT_EXPR || ! flag_trapping_math))
14030 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
14031 ? integer_zero_node
14032 : integer_one_node;
14033 return omit_one_operand_loc (loc, type, t1, arg0);
14036 /* Simplify unordered comparison of something with itself. */
14037 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
14038 && operand_equal_p (arg0, arg1, 0))
14039 return constant_boolean_node (1, type);
14041 if (code == LTGT_EXPR
14042 && !flag_trapping_math
14043 && operand_equal_p (arg0, arg1, 0))
14044 return constant_boolean_node (0, type);
14046 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
14048 tree targ0 = strip_float_extensions (arg0);
14049 tree targ1 = strip_float_extensions (arg1);
14050 tree newtype = TREE_TYPE (targ0);
14052 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
14053 newtype = TREE_TYPE (targ1);
14055 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
14056 return fold_build2_loc (loc, code, type,
14057 fold_convert_loc (loc, newtype, targ0),
14058 fold_convert_loc (loc, newtype, targ1));
14061 return NULL_TREE;
14063 case COMPOUND_EXPR:
14064 /* When pedantic, a compound expression can be neither an lvalue
14065 nor an integer constant expression. */
14066 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14067 return NULL_TREE;
14068 /* Don't let (0, 0) be null pointer constant. */
14069 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14070 : fold_convert_loc (loc, type, arg1);
14071 return pedantic_non_lvalue_loc (loc, tem);
14073 case COMPLEX_EXPR:
14074 if ((TREE_CODE (arg0) == REAL_CST
14075 && TREE_CODE (arg1) == REAL_CST)
14076 || (TREE_CODE (arg0) == INTEGER_CST
14077 && TREE_CODE (arg1) == INTEGER_CST))
14078 return build_complex (type, arg0, arg1);
14079 if (TREE_CODE (arg0) == REALPART_EXPR
14080 && TREE_CODE (arg1) == IMAGPART_EXPR
14081 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14082 && operand_equal_p (TREE_OPERAND (arg0, 0),
14083 TREE_OPERAND (arg1, 0), 0))
14084 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14085 TREE_OPERAND (arg1, 0));
14086 return NULL_TREE;
14088 case ASSERT_EXPR:
14089 /* An ASSERT_EXPR should never be passed to fold_binary. */
14090 gcc_unreachable ();
14092 case VEC_PACK_TRUNC_EXPR:
14093 case VEC_PACK_FIX_TRUNC_EXPR:
14095 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14096 tree *elts;
14098 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14099 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14100 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14101 return NULL_TREE;
14103 elts = XALLOCAVEC (tree, nelts);
14104 if (!vec_cst_ctor_to_array (arg0, elts)
14105 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14106 return NULL_TREE;
14108 for (i = 0; i < nelts; i++)
14110 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14111 ? NOP_EXPR : FIX_TRUNC_EXPR,
14112 TREE_TYPE (type), elts[i]);
14113 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14114 return NULL_TREE;
14117 return build_vector (type, elts);
14120 case VEC_WIDEN_MULT_LO_EXPR:
14121 case VEC_WIDEN_MULT_HI_EXPR:
14122 case VEC_WIDEN_MULT_EVEN_EXPR:
14123 case VEC_WIDEN_MULT_ODD_EXPR:
14125 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14126 unsigned int out, ofs, scale;
14127 tree *elts;
14129 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14130 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14131 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14132 return NULL_TREE;
14134 elts = XALLOCAVEC (tree, nelts * 4);
14135 if (!vec_cst_ctor_to_array (arg0, elts)
14136 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14137 return NULL_TREE;
14139 if (code == VEC_WIDEN_MULT_LO_EXPR)
14140 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14141 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14142 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14143 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14144 scale = 1, ofs = 0;
14145 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14146 scale = 1, ofs = 1;
14148 for (out = 0; out < nelts; out++)
14150 unsigned int in1 = (out << scale) + ofs;
14151 unsigned int in2 = in1 + nelts * 2;
14152 tree t1, t2;
14154 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14155 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14157 if (t1 == NULL_TREE || t2 == NULL_TREE)
14158 return NULL_TREE;
14159 elts[out] = const_binop (MULT_EXPR, t1, t2);
14160 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14161 return NULL_TREE;
14164 return build_vector (type, elts);
14167 default:
14168 return NULL_TREE;
14169 } /* switch (code) */
14172 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14173 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14174 of GOTO_EXPR. */
14176 static tree
14177 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14179 switch (TREE_CODE (*tp))
14181 case LABEL_EXPR:
14182 return *tp;
14184 case GOTO_EXPR:
14185 *walk_subtrees = 0;
14187 /* ... fall through ... */
14189 default:
14190 return NULL_TREE;
14194 /* Return whether the sub-tree ST contains a label which is accessible from
14195 outside the sub-tree. */
14197 static bool
14198 contains_label_p (tree st)
14200 return
14201 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14204 /* Fold a ternary expression of code CODE and type TYPE with operands
14205 OP0, OP1, and OP2. Return the folded expression if folding is
14206 successful. Otherwise, return NULL_TREE. */
14208 tree
14209 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14210 tree op0, tree op1, tree op2)
14212 tree tem;
14213 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14214 enum tree_code_class kind = TREE_CODE_CLASS (code);
14216 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14217 && TREE_CODE_LENGTH (code) == 3);
14219 /* Strip any conversions that don't change the mode. This is safe
14220 for every expression, except for a comparison expression because
14221 its signedness is derived from its operands. So, in the latter
14222 case, only strip conversions that don't change the signedness.
14224 Note that this is done as an internal manipulation within the
14225 constant folder, in order to find the simplest representation of
14226 the arguments so that their form can be studied. In any cases,
14227 the appropriate type conversions should be put back in the tree
14228 that will get out of the constant folder. */
14229 if (op0)
14231 arg0 = op0;
14232 STRIP_NOPS (arg0);
14235 if (op1)
14237 arg1 = op1;
14238 STRIP_NOPS (arg1);
14241 if (op2)
14243 arg2 = op2;
14244 STRIP_NOPS (arg2);
14247 switch (code)
14249 case COMPONENT_REF:
14250 if (TREE_CODE (arg0) == CONSTRUCTOR
14251 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14253 unsigned HOST_WIDE_INT idx;
14254 tree field, value;
14255 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14256 if (field == arg1)
14257 return value;
14259 return NULL_TREE;
14261 case COND_EXPR:
14262 case VEC_COND_EXPR:
14263 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14264 so all simple results must be passed through pedantic_non_lvalue. */
14265 if (TREE_CODE (arg0) == INTEGER_CST)
14267 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14268 tem = integer_zerop (arg0) ? op2 : op1;
14269 /* Only optimize constant conditions when the selected branch
14270 has the same type as the COND_EXPR. This avoids optimizing
14271 away "c ? x : throw", where the throw has a void type.
14272 Avoid throwing away that operand which contains label. */
14273 if ((!TREE_SIDE_EFFECTS (unused_op)
14274 || !contains_label_p (unused_op))
14275 && (! VOID_TYPE_P (TREE_TYPE (tem))
14276 || VOID_TYPE_P (type)))
14277 return pedantic_non_lvalue_loc (loc, tem);
14278 return NULL_TREE;
14280 else if (TREE_CODE (arg0) == VECTOR_CST)
14282 if (integer_all_onesp (arg0))
14283 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14284 if (integer_zerop (arg0))
14285 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14287 if ((TREE_CODE (arg1) == VECTOR_CST
14288 || TREE_CODE (arg1) == CONSTRUCTOR)
14289 && (TREE_CODE (arg2) == VECTOR_CST
14290 || TREE_CODE (arg2) == CONSTRUCTOR))
14292 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14293 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14294 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14295 for (i = 0; i < nelts; i++)
14297 tree val = VECTOR_CST_ELT (arg0, i);
14298 if (integer_all_onesp (val))
14299 sel[i] = i;
14300 else if (integer_zerop (val))
14301 sel[i] = nelts + i;
14302 else /* Currently unreachable. */
14303 return NULL_TREE;
14305 tree t = fold_vec_perm (type, arg1, arg2, sel);
14306 if (t != NULL_TREE)
14307 return t;
14311 if (operand_equal_p (arg1, op2, 0))
14312 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14314 /* If we have A op B ? A : C, we may be able to convert this to a
14315 simpler expression, depending on the operation and the values
14316 of B and C. Signed zeros prevent all of these transformations,
14317 for reasons given above each one.
14319 Also try swapping the arguments and inverting the conditional. */
14320 if (COMPARISON_CLASS_P (arg0)
14321 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14322 arg1, TREE_OPERAND (arg0, 1))
14323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14325 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14326 if (tem)
14327 return tem;
14330 if (COMPARISON_CLASS_P (arg0)
14331 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14332 op2,
14333 TREE_OPERAND (arg0, 1))
14334 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14336 location_t loc0 = expr_location_or (arg0, loc);
14337 tem = fold_invert_truthvalue (loc0, arg0);
14338 if (tem && COMPARISON_CLASS_P (tem))
14340 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14341 if (tem)
14342 return tem;
14346 /* If the second operand is simpler than the third, swap them
14347 since that produces better jump optimization results. */
14348 if (truth_value_p (TREE_CODE (arg0))
14349 && tree_swap_operands_p (op1, op2, false))
14351 location_t loc0 = expr_location_or (arg0, loc);
14352 /* See if this can be inverted. If it can't, possibly because
14353 it was a floating-point inequality comparison, don't do
14354 anything. */
14355 tem = fold_invert_truthvalue (loc0, arg0);
14356 if (tem)
14357 return fold_build3_loc (loc, code, type, tem, op2, op1);
14360 /* Convert A ? 1 : 0 to simply A. */
14361 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14362 : (integer_onep (op1)
14363 && !VECTOR_TYPE_P (type)))
14364 && integer_zerop (op2)
14365 /* If we try to convert OP0 to our type, the
14366 call to fold will try to move the conversion inside
14367 a COND, which will recurse. In that case, the COND_EXPR
14368 is probably the best choice, so leave it alone. */
14369 && type == TREE_TYPE (arg0))
14370 return pedantic_non_lvalue_loc (loc, arg0);
14372 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14373 over COND_EXPR in cases such as floating point comparisons. */
14374 if (integer_zerop (op1)
14375 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14376 : (integer_onep (op2)
14377 && !VECTOR_TYPE_P (type)))
14378 && truth_value_p (TREE_CODE (arg0)))
14379 return pedantic_non_lvalue_loc (loc,
14380 fold_convert_loc (loc, type,
14381 invert_truthvalue_loc (loc,
14382 arg0)));
14384 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14385 if (TREE_CODE (arg0) == LT_EXPR
14386 && integer_zerop (TREE_OPERAND (arg0, 1))
14387 && integer_zerop (op2)
14388 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14390 /* sign_bit_p looks through both zero and sign extensions,
14391 but for this optimization only sign extensions are
14392 usable. */
14393 tree tem2 = TREE_OPERAND (arg0, 0);
14394 while (tem != tem2)
14396 if (TREE_CODE (tem2) != NOP_EXPR
14397 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14399 tem = NULL_TREE;
14400 break;
14402 tem2 = TREE_OPERAND (tem2, 0);
14404 /* sign_bit_p only checks ARG1 bits within A's precision.
14405 If <sign bit of A> has wider type than A, bits outside
14406 of A's precision in <sign bit of A> need to be checked.
14407 If they are all 0, this optimization needs to be done
14408 in unsigned A's type, if they are all 1 in signed A's type,
14409 otherwise this can't be done. */
14410 if (tem
14411 && TYPE_PRECISION (TREE_TYPE (tem))
14412 < TYPE_PRECISION (TREE_TYPE (arg1))
14413 && TYPE_PRECISION (TREE_TYPE (tem))
14414 < TYPE_PRECISION (type))
14416 unsigned HOST_WIDE_INT mask_lo;
14417 HOST_WIDE_INT mask_hi;
14418 int inner_width, outer_width;
14419 tree tem_type;
14421 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14422 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14423 if (outer_width > TYPE_PRECISION (type))
14424 outer_width = TYPE_PRECISION (type);
14426 if (outer_width > HOST_BITS_PER_WIDE_INT)
14428 mask_hi = (HOST_WIDE_INT_M1U
14429 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14430 mask_lo = -1;
14432 else
14434 mask_hi = 0;
14435 mask_lo = (HOST_WIDE_INT_M1U
14436 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14438 if (inner_width > HOST_BITS_PER_WIDE_INT)
14440 mask_hi &= ~(HOST_WIDE_INT_M1U
14441 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14442 mask_lo = 0;
14444 else
14445 mask_lo &= ~(HOST_WIDE_INT_M1U
14446 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14448 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14449 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14451 tem_type = signed_type_for (TREE_TYPE (tem));
14452 tem = fold_convert_loc (loc, tem_type, tem);
14454 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14455 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14457 tem_type = unsigned_type_for (TREE_TYPE (tem));
14458 tem = fold_convert_loc (loc, tem_type, tem);
14460 else
14461 tem = NULL;
14464 if (tem)
14465 return
14466 fold_convert_loc (loc, type,
14467 fold_build2_loc (loc, BIT_AND_EXPR,
14468 TREE_TYPE (tem), tem,
14469 fold_convert_loc (loc,
14470 TREE_TYPE (tem),
14471 arg1)));
14474 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14475 already handled above. */
14476 if (TREE_CODE (arg0) == BIT_AND_EXPR
14477 && integer_onep (TREE_OPERAND (arg0, 1))
14478 && integer_zerop (op2)
14479 && integer_pow2p (arg1))
14481 tree tem = TREE_OPERAND (arg0, 0);
14482 STRIP_NOPS (tem);
14483 if (TREE_CODE (tem) == RSHIFT_EXPR
14484 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14485 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14486 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14487 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14488 TREE_OPERAND (tem, 0), arg1);
14491 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14492 is probably obsolete because the first operand should be a
14493 truth value (that's why we have the two cases above), but let's
14494 leave it in until we can confirm this for all front-ends. */
14495 if (integer_zerop (op2)
14496 && TREE_CODE (arg0) == NE_EXPR
14497 && integer_zerop (TREE_OPERAND (arg0, 1))
14498 && integer_pow2p (arg1)
14499 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14500 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14501 arg1, OEP_ONLY_CONST))
14502 return pedantic_non_lvalue_loc (loc,
14503 fold_convert_loc (loc, type,
14504 TREE_OPERAND (arg0, 0)));
14506 /* Disable the transformations below for vectors, since
14507 fold_binary_op_with_conditional_arg may undo them immediately,
14508 yielding an infinite loop. */
14509 if (code == VEC_COND_EXPR)
14510 return NULL_TREE;
14512 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14513 if (integer_zerop (op2)
14514 && truth_value_p (TREE_CODE (arg0))
14515 && truth_value_p (TREE_CODE (arg1))
14516 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14517 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14518 : TRUTH_ANDIF_EXPR,
14519 type, fold_convert_loc (loc, type, arg0), arg1);
14521 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14522 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14523 && truth_value_p (TREE_CODE (arg0))
14524 && truth_value_p (TREE_CODE (arg1))
14525 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14527 location_t loc0 = expr_location_or (arg0, loc);
14528 /* Only perform transformation if ARG0 is easily inverted. */
14529 tem = fold_invert_truthvalue (loc0, arg0);
14530 if (tem)
14531 return fold_build2_loc (loc, code == VEC_COND_EXPR
14532 ? BIT_IOR_EXPR
14533 : TRUTH_ORIF_EXPR,
14534 type, fold_convert_loc (loc, type, tem),
14535 arg1);
14538 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14539 if (integer_zerop (arg1)
14540 && truth_value_p (TREE_CODE (arg0))
14541 && truth_value_p (TREE_CODE (op2))
14542 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14544 location_t loc0 = expr_location_or (arg0, loc);
14545 /* Only perform transformation if ARG0 is easily inverted. */
14546 tem = fold_invert_truthvalue (loc0, arg0);
14547 if (tem)
14548 return fold_build2_loc (loc, code == VEC_COND_EXPR
14549 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14550 type, fold_convert_loc (loc, type, tem),
14551 op2);
14554 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14555 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14556 && truth_value_p (TREE_CODE (arg0))
14557 && truth_value_p (TREE_CODE (op2))
14558 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14559 return fold_build2_loc (loc, code == VEC_COND_EXPR
14560 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14561 type, fold_convert_loc (loc, type, arg0), op2);
14563 return NULL_TREE;
14565 case CALL_EXPR:
14566 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14567 of fold_ternary on them. */
14568 gcc_unreachable ();
14570 case BIT_FIELD_REF:
14571 if ((TREE_CODE (arg0) == VECTOR_CST
14572 || (TREE_CODE (arg0) == CONSTRUCTOR
14573 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14574 && (type == TREE_TYPE (TREE_TYPE (arg0))
14575 || (TREE_CODE (type) == VECTOR_TYPE
14576 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14578 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14579 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14580 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14581 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14583 if (n != 0
14584 && (idx % width) == 0
14585 && (n % width) == 0
14586 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14588 idx = idx / width;
14589 n = n / width;
14591 if (TREE_CODE (arg0) == VECTOR_CST)
14593 if (n == 1)
14594 return VECTOR_CST_ELT (arg0, idx);
14596 tree *vals = XALLOCAVEC (tree, n);
14597 for (unsigned i = 0; i < n; ++i)
14598 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14599 return build_vector (type, vals);
14602 /* Constructor elements can be subvectors. */
14603 unsigned HOST_WIDE_INT k = 1;
14604 if (CONSTRUCTOR_NELTS (arg0) != 0)
14606 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14607 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14608 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14611 /* We keep an exact subset of the constructor elements. */
14612 if ((idx % k) == 0 && (n % k) == 0)
14614 if (CONSTRUCTOR_NELTS (arg0) == 0)
14615 return build_constructor (type, NULL);
14616 idx /= k;
14617 n /= k;
14618 if (n == 1)
14620 if (idx < CONSTRUCTOR_NELTS (arg0))
14621 return CONSTRUCTOR_ELT (arg0, idx)->value;
14622 return build_zero_cst (type);
14625 vec<constructor_elt, va_gc> *vals;
14626 vec_alloc (vals, n);
14627 for (unsigned i = 0;
14628 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14629 ++i)
14630 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14631 CONSTRUCTOR_ELT
14632 (arg0, idx + i)->value);
14633 return build_constructor (type, vals);
14635 /* The bitfield references a single constructor element. */
14636 else if (idx + n <= (idx / k + 1) * k)
14638 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14639 return build_zero_cst (type);
14640 else if (n == k)
14641 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14642 else
14643 return fold_build3_loc (loc, code, type,
14644 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14645 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14650 /* A bit-field-ref that referenced the full argument can be stripped. */
14651 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14652 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14653 && integer_zerop (op2))
14654 return fold_convert_loc (loc, type, arg0);
14656 /* On constants we can use native encode/interpret to constant
14657 fold (nearly) all BIT_FIELD_REFs. */
14658 if (CONSTANT_CLASS_P (arg0)
14659 && can_native_interpret_type_p (type)
14660 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14661 /* This limitation should not be necessary, we just need to
14662 round this up to mode size. */
14663 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14664 /* Need bit-shifting of the buffer to relax the following. */
14665 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14667 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14668 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14669 unsigned HOST_WIDE_INT clen;
14670 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14671 /* ??? We cannot tell native_encode_expr to start at
14672 some random byte only. So limit us to a reasonable amount
14673 of work. */
14674 if (clen <= 4096)
14676 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14677 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14678 if (len > 0
14679 && len * BITS_PER_UNIT >= bitpos + bitsize)
14681 tree v = native_interpret_expr (type,
14682 b + bitpos / BITS_PER_UNIT,
14683 bitsize / BITS_PER_UNIT);
14684 if (v)
14685 return v;
14690 return NULL_TREE;
14692 case FMA_EXPR:
14693 /* For integers we can decompose the FMA if possible. */
14694 if (TREE_CODE (arg0) == INTEGER_CST
14695 && TREE_CODE (arg1) == INTEGER_CST)
14696 return fold_build2_loc (loc, PLUS_EXPR, type,
14697 const_binop (MULT_EXPR, arg0, arg1), arg2);
14698 if (integer_zerop (arg2))
14699 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14701 return fold_fma (loc, type, arg0, arg1, arg2);
14703 case VEC_PERM_EXPR:
14704 if (TREE_CODE (arg2) == VECTOR_CST)
14706 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14707 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14708 tree t;
14709 bool need_mask_canon = false;
14710 bool all_in_vec0 = true;
14711 bool all_in_vec1 = true;
14712 bool maybe_identity = true;
14713 bool single_arg = (op0 == op1);
14714 bool changed = false;
14716 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14717 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14718 for (i = 0; i < nelts; i++)
14720 tree val = VECTOR_CST_ELT (arg2, i);
14721 if (TREE_CODE (val) != INTEGER_CST)
14722 return NULL_TREE;
14724 sel[i] = TREE_INT_CST_LOW (val) & mask;
14725 if (TREE_INT_CST_HIGH (val)
14726 || ((unsigned HOST_WIDE_INT)
14727 TREE_INT_CST_LOW (val) != sel[i]))
14728 need_mask_canon = true;
14730 if (sel[i] < nelts)
14731 all_in_vec1 = false;
14732 else
14733 all_in_vec0 = false;
14735 if ((sel[i] & (nelts-1)) != i)
14736 maybe_identity = false;
14739 if (maybe_identity)
14741 if (all_in_vec0)
14742 return op0;
14743 if (all_in_vec1)
14744 return op1;
14747 if (all_in_vec0)
14748 op1 = op0;
14749 else if (all_in_vec1)
14751 op0 = op1;
14752 for (i = 0; i < nelts; i++)
14753 sel[i] -= nelts;
14754 need_mask_canon = true;
14757 if ((TREE_CODE (op0) == VECTOR_CST
14758 || TREE_CODE (op0) == CONSTRUCTOR)
14759 && (TREE_CODE (op1) == VECTOR_CST
14760 || TREE_CODE (op1) == CONSTRUCTOR))
14762 t = fold_vec_perm (type, op0, op1, sel);
14763 if (t != NULL_TREE)
14764 return t;
14767 if (op0 == op1 && !single_arg)
14768 changed = true;
14770 if (need_mask_canon && arg2 == op2)
14772 tree *tsel = XALLOCAVEC (tree, nelts);
14773 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14774 for (i = 0; i < nelts; i++)
14775 tsel[i] = build_int_cst (eltype, sel[i]);
14776 op2 = build_vector (TREE_TYPE (arg2), tsel);
14777 changed = true;
14780 if (changed)
14781 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14783 return NULL_TREE;
14785 default:
14786 return NULL_TREE;
14787 } /* switch (code) */
14790 /* Perform constant folding and related simplification of EXPR.
14791 The related simplifications include x*1 => x, x*0 => 0, etc.,
14792 and application of the associative law.
14793 NOP_EXPR conversions may be removed freely (as long as we
14794 are careful not to change the type of the overall expression).
14795 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14796 but we can constant-fold them if they have constant operands. */
14798 #ifdef ENABLE_FOLD_CHECKING
14799 # define fold(x) fold_1 (x)
14800 static tree fold_1 (tree);
14801 static
14802 #endif
14803 tree
14804 fold (tree expr)
14806 const tree t = expr;
14807 enum tree_code code = TREE_CODE (t);
14808 enum tree_code_class kind = TREE_CODE_CLASS (code);
14809 tree tem;
14810 location_t loc = EXPR_LOCATION (expr);
14812 /* Return right away if a constant. */
14813 if (kind == tcc_constant)
14814 return t;
14816 /* CALL_EXPR-like objects with variable numbers of operands are
14817 treated specially. */
14818 if (kind == tcc_vl_exp)
14820 if (code == CALL_EXPR)
14822 tem = fold_call_expr (loc, expr, false);
14823 return tem ? tem : expr;
14825 return expr;
14828 if (IS_EXPR_CODE_CLASS (kind))
14830 tree type = TREE_TYPE (t);
14831 tree op0, op1, op2;
14833 switch (TREE_CODE_LENGTH (code))
14835 case 1:
14836 op0 = TREE_OPERAND (t, 0);
14837 tem = fold_unary_loc (loc, code, type, op0);
14838 return tem ? tem : expr;
14839 case 2:
14840 op0 = TREE_OPERAND (t, 0);
14841 op1 = TREE_OPERAND (t, 1);
14842 tem = fold_binary_loc (loc, code, type, op0, op1);
14843 return tem ? tem : expr;
14844 case 3:
14845 op0 = TREE_OPERAND (t, 0);
14846 op1 = TREE_OPERAND (t, 1);
14847 op2 = TREE_OPERAND (t, 2);
14848 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14849 return tem ? tem : expr;
14850 default:
14851 break;
14855 switch (code)
14857 case ARRAY_REF:
14859 tree op0 = TREE_OPERAND (t, 0);
14860 tree op1 = TREE_OPERAND (t, 1);
14862 if (TREE_CODE (op1) == INTEGER_CST
14863 && TREE_CODE (op0) == CONSTRUCTOR
14864 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14866 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14867 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14868 unsigned HOST_WIDE_INT begin = 0;
14870 /* Find a matching index by means of a binary search. */
14871 while (begin != end)
14873 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14874 tree index = (*elts)[middle].index;
14876 if (TREE_CODE (index) == INTEGER_CST
14877 && tree_int_cst_lt (index, op1))
14878 begin = middle + 1;
14879 else if (TREE_CODE (index) == INTEGER_CST
14880 && tree_int_cst_lt (op1, index))
14881 end = middle;
14882 else if (TREE_CODE (index) == RANGE_EXPR
14883 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14884 begin = middle + 1;
14885 else if (TREE_CODE (index) == RANGE_EXPR
14886 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14887 end = middle;
14888 else
14889 return (*elts)[middle].value;
14893 return t;
14896 /* Return a VECTOR_CST if possible. */
14897 case CONSTRUCTOR:
14899 tree type = TREE_TYPE (t);
14900 if (TREE_CODE (type) != VECTOR_TYPE)
14901 return t;
14903 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14904 unsigned HOST_WIDE_INT idx, pos = 0;
14905 tree value;
14907 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14909 if (!CONSTANT_CLASS_P (value))
14910 return t;
14911 if (TREE_CODE (value) == VECTOR_CST)
14913 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14914 vec[pos++] = VECTOR_CST_ELT (value, i);
14916 else
14917 vec[pos++] = value;
14919 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14920 vec[pos] = build_zero_cst (TREE_TYPE (type));
14922 return build_vector (type, vec);
14925 case CONST_DECL:
14926 return fold (DECL_INITIAL (t));
14928 default:
14929 return t;
14930 } /* switch (code) */
14933 #ifdef ENABLE_FOLD_CHECKING
14934 #undef fold
14936 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14937 hash_table <pointer_hash <tree_node> >);
14938 static void fold_check_failed (const_tree, const_tree);
14939 void print_fold_checksum (const_tree);
14941 /* When --enable-checking=fold, compute a digest of expr before
14942 and after actual fold call to see if fold did not accidentally
14943 change original expr. */
14945 tree
14946 fold (tree expr)
14948 tree ret;
14949 struct md5_ctx ctx;
14950 unsigned char checksum_before[16], checksum_after[16];
14951 hash_table <pointer_hash <tree_node> > ht;
14953 ht.create (32);
14954 md5_init_ctx (&ctx);
14955 fold_checksum_tree (expr, &ctx, ht);
14956 md5_finish_ctx (&ctx, checksum_before);
14957 ht.empty ();
14959 ret = fold_1 (expr);
14961 md5_init_ctx (&ctx);
14962 fold_checksum_tree (expr, &ctx, ht);
14963 md5_finish_ctx (&ctx, checksum_after);
14964 ht.dispose ();
14966 if (memcmp (checksum_before, checksum_after, 16))
14967 fold_check_failed (expr, ret);
14969 return ret;
14972 void
14973 print_fold_checksum (const_tree expr)
14975 struct md5_ctx ctx;
14976 unsigned char checksum[16], cnt;
14977 hash_table <pointer_hash <tree_node> > ht;
14979 ht.create (32);
14980 md5_init_ctx (&ctx);
14981 fold_checksum_tree (expr, &ctx, ht);
14982 md5_finish_ctx (&ctx, checksum);
14983 ht.dispose ();
14984 for (cnt = 0; cnt < 16; ++cnt)
14985 fprintf (stderr, "%02x", checksum[cnt]);
14986 putc ('\n', stderr);
14989 static void
14990 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14992 internal_error ("fold check: original tree changed by fold");
14995 static void
14996 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14997 hash_table <pointer_hash <tree_node> > ht)
14999 tree_node **slot;
15000 enum tree_code code;
15001 union tree_node buf;
15002 int i, len;
15004 recursive_label:
15005 if (expr == NULL)
15006 return;
15007 slot = ht.find_slot (expr, INSERT);
15008 if (*slot != NULL)
15009 return;
15010 *slot = CONST_CAST_TREE (expr);
15011 code = TREE_CODE (expr);
15012 if (TREE_CODE_CLASS (code) == tcc_declaration
15013 && DECL_ASSEMBLER_NAME_SET_P (expr))
15015 /* Allow DECL_ASSEMBLER_NAME to be modified. */
15016 memcpy ((char *) &buf, expr, tree_size (expr));
15017 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
15018 expr = (tree) &buf;
15020 else if (TREE_CODE_CLASS (code) == tcc_type
15021 && (TYPE_POINTER_TO (expr)
15022 || TYPE_REFERENCE_TO (expr)
15023 || TYPE_CACHED_VALUES_P (expr)
15024 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
15025 || TYPE_NEXT_VARIANT (expr)))
15027 /* Allow these fields to be modified. */
15028 tree tmp;
15029 memcpy ((char *) &buf, expr, tree_size (expr));
15030 expr = tmp = (tree) &buf;
15031 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
15032 TYPE_POINTER_TO (tmp) = NULL;
15033 TYPE_REFERENCE_TO (tmp) = NULL;
15034 TYPE_NEXT_VARIANT (tmp) = NULL;
15035 if (TYPE_CACHED_VALUES_P (tmp))
15037 TYPE_CACHED_VALUES_P (tmp) = 0;
15038 TYPE_CACHED_VALUES (tmp) = NULL;
15041 md5_process_bytes (expr, tree_size (expr), ctx);
15042 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
15043 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
15044 if (TREE_CODE_CLASS (code) != tcc_type
15045 && TREE_CODE_CLASS (code) != tcc_declaration
15046 && code != TREE_LIST
15047 && code != SSA_NAME
15048 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
15049 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
15050 switch (TREE_CODE_CLASS (code))
15052 case tcc_constant:
15053 switch (code)
15055 case STRING_CST:
15056 md5_process_bytes (TREE_STRING_POINTER (expr),
15057 TREE_STRING_LENGTH (expr), ctx);
15058 break;
15059 case COMPLEX_CST:
15060 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
15061 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
15062 break;
15063 case VECTOR_CST:
15064 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15065 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15066 break;
15067 default:
15068 break;
15070 break;
15071 case tcc_exceptional:
15072 switch (code)
15074 case TREE_LIST:
15075 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15076 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15077 expr = TREE_CHAIN (expr);
15078 goto recursive_label;
15079 break;
15080 case TREE_VEC:
15081 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15082 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15083 break;
15084 default:
15085 break;
15087 break;
15088 case tcc_expression:
15089 case tcc_reference:
15090 case tcc_comparison:
15091 case tcc_unary:
15092 case tcc_binary:
15093 case tcc_statement:
15094 case tcc_vl_exp:
15095 len = TREE_OPERAND_LENGTH (expr);
15096 for (i = 0; i < len; ++i)
15097 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15098 break;
15099 case tcc_declaration:
15100 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15101 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15102 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15104 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15105 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15106 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15107 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15108 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15110 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15111 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15113 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15115 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15116 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15117 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15119 break;
15120 case tcc_type:
15121 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15122 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15123 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15124 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15125 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15126 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15127 if (INTEGRAL_TYPE_P (expr)
15128 || SCALAR_FLOAT_TYPE_P (expr))
15130 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15131 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15133 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15134 if (TREE_CODE (expr) == RECORD_TYPE
15135 || TREE_CODE (expr) == UNION_TYPE
15136 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15137 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15138 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15139 break;
15140 default:
15141 break;
15145 /* Helper function for outputting the checksum of a tree T. When
15146 debugging with gdb, you can "define mynext" to be "next" followed
15147 by "call debug_fold_checksum (op0)", then just trace down till the
15148 outputs differ. */
15150 DEBUG_FUNCTION void
15151 debug_fold_checksum (const_tree t)
15153 int i;
15154 unsigned char checksum[16];
15155 struct md5_ctx ctx;
15156 hash_table <pointer_hash <tree_node> > ht;
15157 ht.create (32);
15159 md5_init_ctx (&ctx);
15160 fold_checksum_tree (t, &ctx, ht);
15161 md5_finish_ctx (&ctx, checksum);
15162 ht.empty ();
15164 for (i = 0; i < 16; i++)
15165 fprintf (stderr, "%d ", checksum[i]);
15167 fprintf (stderr, "\n");
15170 #endif
15172 /* Fold a unary tree expression with code CODE of type TYPE with an
15173 operand OP0. LOC is the location of the resulting expression.
15174 Return a folded expression if successful. Otherwise, return a tree
15175 expression with code CODE of type TYPE with an operand OP0. */
15177 tree
15178 fold_build1_stat_loc (location_t loc,
15179 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15181 tree tem;
15182 #ifdef ENABLE_FOLD_CHECKING
15183 unsigned char checksum_before[16], checksum_after[16];
15184 struct md5_ctx ctx;
15185 hash_table <pointer_hash <tree_node> > ht;
15187 ht.create (32);
15188 md5_init_ctx (&ctx);
15189 fold_checksum_tree (op0, &ctx, ht);
15190 md5_finish_ctx (&ctx, checksum_before);
15191 ht.empty ();
15192 #endif
15194 tem = fold_unary_loc (loc, code, type, op0);
15195 if (!tem)
15196 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15198 #ifdef ENABLE_FOLD_CHECKING
15199 md5_init_ctx (&ctx);
15200 fold_checksum_tree (op0, &ctx, ht);
15201 md5_finish_ctx (&ctx, checksum_after);
15202 ht.dispose ();
15204 if (memcmp (checksum_before, checksum_after, 16))
15205 fold_check_failed (op0, tem);
15206 #endif
15207 return tem;
15210 /* Fold a binary tree expression with code CODE of type TYPE with
15211 operands OP0 and OP1. LOC is the location of the resulting
15212 expression. Return a folded expression if successful. Otherwise,
15213 return a tree expression with code CODE of type TYPE with operands
15214 OP0 and OP1. */
15216 tree
15217 fold_build2_stat_loc (location_t loc,
15218 enum tree_code code, tree type, tree op0, tree op1
15219 MEM_STAT_DECL)
15221 tree tem;
15222 #ifdef ENABLE_FOLD_CHECKING
15223 unsigned char checksum_before_op0[16],
15224 checksum_before_op1[16],
15225 checksum_after_op0[16],
15226 checksum_after_op1[16];
15227 struct md5_ctx ctx;
15228 hash_table <pointer_hash <tree_node> > ht;
15230 ht.create (32);
15231 md5_init_ctx (&ctx);
15232 fold_checksum_tree (op0, &ctx, ht);
15233 md5_finish_ctx (&ctx, checksum_before_op0);
15234 ht.empty ();
15236 md5_init_ctx (&ctx);
15237 fold_checksum_tree (op1, &ctx, ht);
15238 md5_finish_ctx (&ctx, checksum_before_op1);
15239 ht.empty ();
15240 #endif
15242 tem = fold_binary_loc (loc, code, type, op0, op1);
15243 if (!tem)
15244 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15246 #ifdef ENABLE_FOLD_CHECKING
15247 md5_init_ctx (&ctx);
15248 fold_checksum_tree (op0, &ctx, ht);
15249 md5_finish_ctx (&ctx, checksum_after_op0);
15250 ht.empty ();
15252 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15253 fold_check_failed (op0, tem);
15255 md5_init_ctx (&ctx);
15256 fold_checksum_tree (op1, &ctx, ht);
15257 md5_finish_ctx (&ctx, checksum_after_op1);
15258 ht.dispose ();
15260 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15261 fold_check_failed (op1, tem);
15262 #endif
15263 return tem;
15266 /* Fold a ternary tree expression with code CODE of type TYPE with
15267 operands OP0, OP1, and OP2. Return a folded expression if
15268 successful. Otherwise, return a tree expression with code CODE of
15269 type TYPE with operands OP0, OP1, and OP2. */
15271 tree
15272 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15273 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15275 tree tem;
15276 #ifdef ENABLE_FOLD_CHECKING
15277 unsigned char checksum_before_op0[16],
15278 checksum_before_op1[16],
15279 checksum_before_op2[16],
15280 checksum_after_op0[16],
15281 checksum_after_op1[16],
15282 checksum_after_op2[16];
15283 struct md5_ctx ctx;
15284 hash_table <pointer_hash <tree_node> > ht;
15286 ht.create (32);
15287 md5_init_ctx (&ctx);
15288 fold_checksum_tree (op0, &ctx, ht);
15289 md5_finish_ctx (&ctx, checksum_before_op0);
15290 ht.empty ();
15292 md5_init_ctx (&ctx);
15293 fold_checksum_tree (op1, &ctx, ht);
15294 md5_finish_ctx (&ctx, checksum_before_op1);
15295 ht.empty ();
15297 md5_init_ctx (&ctx);
15298 fold_checksum_tree (op2, &ctx, ht);
15299 md5_finish_ctx (&ctx, checksum_before_op2);
15300 ht.empty ();
15301 #endif
15303 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15304 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15305 if (!tem)
15306 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15308 #ifdef ENABLE_FOLD_CHECKING
15309 md5_init_ctx (&ctx);
15310 fold_checksum_tree (op0, &ctx, ht);
15311 md5_finish_ctx (&ctx, checksum_after_op0);
15312 ht.empty ();
15314 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15315 fold_check_failed (op0, tem);
15317 md5_init_ctx (&ctx);
15318 fold_checksum_tree (op1, &ctx, ht);
15319 md5_finish_ctx (&ctx, checksum_after_op1);
15320 ht.empty ();
15322 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15323 fold_check_failed (op1, tem);
15325 md5_init_ctx (&ctx);
15326 fold_checksum_tree (op2, &ctx, ht);
15327 md5_finish_ctx (&ctx, checksum_after_op2);
15328 ht.dispose ();
15330 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15331 fold_check_failed (op2, tem);
15332 #endif
15333 return tem;
15336 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15337 arguments in ARGARRAY, and a null static chain.
15338 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15339 of type TYPE from the given operands as constructed by build_call_array. */
15341 tree
15342 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15343 int nargs, tree *argarray)
15345 tree tem;
15346 #ifdef ENABLE_FOLD_CHECKING
15347 unsigned char checksum_before_fn[16],
15348 checksum_before_arglist[16],
15349 checksum_after_fn[16],
15350 checksum_after_arglist[16];
15351 struct md5_ctx ctx;
15352 hash_table <pointer_hash <tree_node> > ht;
15353 int i;
15355 ht.create (32);
15356 md5_init_ctx (&ctx);
15357 fold_checksum_tree (fn, &ctx, ht);
15358 md5_finish_ctx (&ctx, checksum_before_fn);
15359 ht.empty ();
15361 md5_init_ctx (&ctx);
15362 for (i = 0; i < nargs; i++)
15363 fold_checksum_tree (argarray[i], &ctx, ht);
15364 md5_finish_ctx (&ctx, checksum_before_arglist);
15365 ht.empty ();
15366 #endif
15368 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15370 #ifdef ENABLE_FOLD_CHECKING
15371 md5_init_ctx (&ctx);
15372 fold_checksum_tree (fn, &ctx, ht);
15373 md5_finish_ctx (&ctx, checksum_after_fn);
15374 ht.empty ();
15376 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15377 fold_check_failed (fn, tem);
15379 md5_init_ctx (&ctx);
15380 for (i = 0; i < nargs; i++)
15381 fold_checksum_tree (argarray[i], &ctx, ht);
15382 md5_finish_ctx (&ctx, checksum_after_arglist);
15383 ht.dispose ();
15385 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15386 fold_check_failed (NULL_TREE, tem);
15387 #endif
15388 return tem;
15391 /* Perform constant folding and related simplification of initializer
15392 expression EXPR. These behave identically to "fold_buildN" but ignore
15393 potential run-time traps and exceptions that fold must preserve. */
15395 #define START_FOLD_INIT \
15396 int saved_signaling_nans = flag_signaling_nans;\
15397 int saved_trapping_math = flag_trapping_math;\
15398 int saved_rounding_math = flag_rounding_math;\
15399 int saved_trapv = flag_trapv;\
15400 int saved_folding_initializer = folding_initializer;\
15401 flag_signaling_nans = 0;\
15402 flag_trapping_math = 0;\
15403 flag_rounding_math = 0;\
15404 flag_trapv = 0;\
15405 folding_initializer = 1;
15407 #define END_FOLD_INIT \
15408 flag_signaling_nans = saved_signaling_nans;\
15409 flag_trapping_math = saved_trapping_math;\
15410 flag_rounding_math = saved_rounding_math;\
15411 flag_trapv = saved_trapv;\
15412 folding_initializer = saved_folding_initializer;
15414 tree
15415 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15416 tree type, tree op)
15418 tree result;
15419 START_FOLD_INIT;
15421 result = fold_build1_loc (loc, code, type, op);
15423 END_FOLD_INIT;
15424 return result;
15427 tree
15428 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15429 tree type, tree op0, tree op1)
15431 tree result;
15432 START_FOLD_INIT;
15434 result = fold_build2_loc (loc, code, type, op0, op1);
15436 END_FOLD_INIT;
15437 return result;
15440 tree
15441 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15442 int nargs, tree *argarray)
15444 tree result;
15445 START_FOLD_INIT;
15447 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15449 END_FOLD_INIT;
15450 return result;
15453 #undef START_FOLD_INIT
15454 #undef END_FOLD_INIT
15456 /* Determine if first argument is a multiple of second argument. Return 0 if
15457 it is not, or we cannot easily determined it to be.
15459 An example of the sort of thing we care about (at this point; this routine
15460 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15461 fold cases do now) is discovering that
15463 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15465 is a multiple of
15467 SAVE_EXPR (J * 8)
15469 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15471 This code also handles discovering that
15473 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15475 is a multiple of 8 so we don't have to worry about dealing with a
15476 possible remainder.
15478 Note that we *look* inside a SAVE_EXPR only to determine how it was
15479 calculated; it is not safe for fold to do much of anything else with the
15480 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15481 at run time. For example, the latter example above *cannot* be implemented
15482 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15483 evaluation time of the original SAVE_EXPR is not necessarily the same at
15484 the time the new expression is evaluated. The only optimization of this
15485 sort that would be valid is changing
15487 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15489 divided by 8 to
15491 SAVE_EXPR (I) * SAVE_EXPR (J)
15493 (where the same SAVE_EXPR (J) is used in the original and the
15494 transformed version). */
15497 multiple_of_p (tree type, const_tree top, const_tree bottom)
15499 if (operand_equal_p (top, bottom, 0))
15500 return 1;
15502 if (TREE_CODE (type) != INTEGER_TYPE)
15503 return 0;
15505 switch (TREE_CODE (top))
15507 case BIT_AND_EXPR:
15508 /* Bitwise and provides a power of two multiple. If the mask is
15509 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15510 if (!integer_pow2p (bottom))
15511 return 0;
15512 /* FALLTHRU */
15514 case MULT_EXPR:
15515 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15516 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15518 case PLUS_EXPR:
15519 case MINUS_EXPR:
15520 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15521 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15523 case LSHIFT_EXPR:
15524 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15526 tree op1, t1;
15528 op1 = TREE_OPERAND (top, 1);
15529 /* const_binop may not detect overflow correctly,
15530 so check for it explicitly here. */
15531 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15532 > TREE_INT_CST_LOW (op1)
15533 && TREE_INT_CST_HIGH (op1) == 0
15534 && 0 != (t1 = fold_convert (type,
15535 const_binop (LSHIFT_EXPR,
15536 size_one_node,
15537 op1)))
15538 && !TREE_OVERFLOW (t1))
15539 return multiple_of_p (type, t1, bottom);
15541 return 0;
15543 case NOP_EXPR:
15544 /* Can't handle conversions from non-integral or wider integral type. */
15545 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15546 || (TYPE_PRECISION (type)
15547 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15548 return 0;
15550 /* .. fall through ... */
15552 case SAVE_EXPR:
15553 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15555 case COND_EXPR:
15556 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15557 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15559 case INTEGER_CST:
15560 if (TREE_CODE (bottom) != INTEGER_CST
15561 || integer_zerop (bottom)
15562 || (TYPE_UNSIGNED (type)
15563 && (tree_int_cst_sgn (top) < 0
15564 || tree_int_cst_sgn (bottom) < 0)))
15565 return 0;
15566 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15567 top, bottom));
15569 default:
15570 return 0;
15574 /* Return true if CODE or TYPE is known to be non-negative. */
15576 static bool
15577 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15579 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15580 && truth_value_p (code))
15581 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15582 have a signed:1 type (where the value is -1 and 0). */
15583 return true;
15584 return false;
15587 /* Return true if (CODE OP0) is known to be non-negative. If the return
15588 value is based on the assumption that signed overflow is undefined,
15589 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15590 *STRICT_OVERFLOW_P. */
15592 bool
15593 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15594 bool *strict_overflow_p)
15596 if (TYPE_UNSIGNED (type))
15597 return true;
15599 switch (code)
15601 case ABS_EXPR:
15602 /* We can't return 1 if flag_wrapv is set because
15603 ABS_EXPR<INT_MIN> = INT_MIN. */
15604 if (!INTEGRAL_TYPE_P (type))
15605 return true;
15606 if (TYPE_OVERFLOW_UNDEFINED (type))
15608 *strict_overflow_p = true;
15609 return true;
15611 break;
15613 case NON_LVALUE_EXPR:
15614 case FLOAT_EXPR:
15615 case FIX_TRUNC_EXPR:
15616 return tree_expr_nonnegative_warnv_p (op0,
15617 strict_overflow_p);
15619 case NOP_EXPR:
15621 tree inner_type = TREE_TYPE (op0);
15622 tree outer_type = type;
15624 if (TREE_CODE (outer_type) == REAL_TYPE)
15626 if (TREE_CODE (inner_type) == REAL_TYPE)
15627 return tree_expr_nonnegative_warnv_p (op0,
15628 strict_overflow_p);
15629 if (INTEGRAL_TYPE_P (inner_type))
15631 if (TYPE_UNSIGNED (inner_type))
15632 return true;
15633 return tree_expr_nonnegative_warnv_p (op0,
15634 strict_overflow_p);
15637 else if (INTEGRAL_TYPE_P (outer_type))
15639 if (TREE_CODE (inner_type) == REAL_TYPE)
15640 return tree_expr_nonnegative_warnv_p (op0,
15641 strict_overflow_p);
15642 if (INTEGRAL_TYPE_P (inner_type))
15643 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15644 && TYPE_UNSIGNED (inner_type);
15647 break;
15649 default:
15650 return tree_simple_nonnegative_warnv_p (code, type);
15653 /* We don't know sign of `t', so be conservative and return false. */
15654 return false;
15657 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15658 value is based on the assumption that signed overflow is undefined,
15659 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15660 *STRICT_OVERFLOW_P. */
15662 bool
15663 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15664 tree op1, bool *strict_overflow_p)
15666 if (TYPE_UNSIGNED (type))
15667 return true;
15669 switch (code)
15671 case POINTER_PLUS_EXPR:
15672 case PLUS_EXPR:
15673 if (FLOAT_TYPE_P (type))
15674 return (tree_expr_nonnegative_warnv_p (op0,
15675 strict_overflow_p)
15676 && tree_expr_nonnegative_warnv_p (op1,
15677 strict_overflow_p));
15679 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15680 both unsigned and at least 2 bits shorter than the result. */
15681 if (TREE_CODE (type) == INTEGER_TYPE
15682 && TREE_CODE (op0) == NOP_EXPR
15683 && TREE_CODE (op1) == NOP_EXPR)
15685 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15686 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15687 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15688 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15690 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15691 TYPE_PRECISION (inner2)) + 1;
15692 return prec < TYPE_PRECISION (type);
15695 break;
15697 case MULT_EXPR:
15698 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15700 /* x * x is always non-negative for floating point x
15701 or without overflow. */
15702 if (operand_equal_p (op0, op1, 0)
15703 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15704 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15706 if (TYPE_OVERFLOW_UNDEFINED (type))
15707 *strict_overflow_p = true;
15708 return true;
15712 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15713 both unsigned and their total bits is shorter than the result. */
15714 if (TREE_CODE (type) == INTEGER_TYPE
15715 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15716 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15718 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15719 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15720 : TREE_TYPE (op0);
15721 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15722 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15723 : TREE_TYPE (op1);
15725 bool unsigned0 = TYPE_UNSIGNED (inner0);
15726 bool unsigned1 = TYPE_UNSIGNED (inner1);
15728 if (TREE_CODE (op0) == INTEGER_CST)
15729 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15731 if (TREE_CODE (op1) == INTEGER_CST)
15732 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15734 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15735 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15737 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15738 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15739 : TYPE_PRECISION (inner0);
15741 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15742 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15743 : TYPE_PRECISION (inner1);
15745 return precision0 + precision1 < TYPE_PRECISION (type);
15748 return false;
15750 case BIT_AND_EXPR:
15751 case MAX_EXPR:
15752 return (tree_expr_nonnegative_warnv_p (op0,
15753 strict_overflow_p)
15754 || tree_expr_nonnegative_warnv_p (op1,
15755 strict_overflow_p));
15757 case BIT_IOR_EXPR:
15758 case BIT_XOR_EXPR:
15759 case MIN_EXPR:
15760 case RDIV_EXPR:
15761 case TRUNC_DIV_EXPR:
15762 case CEIL_DIV_EXPR:
15763 case FLOOR_DIV_EXPR:
15764 case ROUND_DIV_EXPR:
15765 return (tree_expr_nonnegative_warnv_p (op0,
15766 strict_overflow_p)
15767 && tree_expr_nonnegative_warnv_p (op1,
15768 strict_overflow_p));
15770 case TRUNC_MOD_EXPR:
15771 case CEIL_MOD_EXPR:
15772 case FLOOR_MOD_EXPR:
15773 case ROUND_MOD_EXPR:
15774 return tree_expr_nonnegative_warnv_p (op0,
15775 strict_overflow_p);
15776 default:
15777 return tree_simple_nonnegative_warnv_p (code, type);
15780 /* We don't know sign of `t', so be conservative and return false. */
15781 return false;
15784 /* Return true if T is known to be non-negative. If the return
15785 value is based on the assumption that signed overflow is undefined,
15786 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15787 *STRICT_OVERFLOW_P. */
15789 bool
15790 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15792 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15793 return true;
15795 switch (TREE_CODE (t))
15797 case INTEGER_CST:
15798 return tree_int_cst_sgn (t) >= 0;
15800 case REAL_CST:
15801 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15803 case FIXED_CST:
15804 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15806 case COND_EXPR:
15807 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15808 strict_overflow_p)
15809 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15810 strict_overflow_p));
15811 default:
15812 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15813 TREE_TYPE (t));
15815 /* We don't know sign of `t', so be conservative and return false. */
15816 return false;
15819 /* Return true if T is known to be non-negative. If the return
15820 value is based on the assumption that signed overflow is undefined,
15821 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15822 *STRICT_OVERFLOW_P. */
15824 bool
15825 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15826 tree arg0, tree arg1, bool *strict_overflow_p)
15828 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15829 switch (DECL_FUNCTION_CODE (fndecl))
15831 CASE_FLT_FN (BUILT_IN_ACOS):
15832 CASE_FLT_FN (BUILT_IN_ACOSH):
15833 CASE_FLT_FN (BUILT_IN_CABS):
15834 CASE_FLT_FN (BUILT_IN_COSH):
15835 CASE_FLT_FN (BUILT_IN_ERFC):
15836 CASE_FLT_FN (BUILT_IN_EXP):
15837 CASE_FLT_FN (BUILT_IN_EXP10):
15838 CASE_FLT_FN (BUILT_IN_EXP2):
15839 CASE_FLT_FN (BUILT_IN_FABS):
15840 CASE_FLT_FN (BUILT_IN_FDIM):
15841 CASE_FLT_FN (BUILT_IN_HYPOT):
15842 CASE_FLT_FN (BUILT_IN_POW10):
15843 CASE_INT_FN (BUILT_IN_FFS):
15844 CASE_INT_FN (BUILT_IN_PARITY):
15845 CASE_INT_FN (BUILT_IN_POPCOUNT):
15846 CASE_INT_FN (BUILT_IN_CLZ):
15847 CASE_INT_FN (BUILT_IN_CLRSB):
15848 case BUILT_IN_BSWAP32:
15849 case BUILT_IN_BSWAP64:
15850 /* Always true. */
15851 return true;
15853 CASE_FLT_FN (BUILT_IN_SQRT):
15854 /* sqrt(-0.0) is -0.0. */
15855 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15856 return true;
15857 return tree_expr_nonnegative_warnv_p (arg0,
15858 strict_overflow_p);
15860 CASE_FLT_FN (BUILT_IN_ASINH):
15861 CASE_FLT_FN (BUILT_IN_ATAN):
15862 CASE_FLT_FN (BUILT_IN_ATANH):
15863 CASE_FLT_FN (BUILT_IN_CBRT):
15864 CASE_FLT_FN (BUILT_IN_CEIL):
15865 CASE_FLT_FN (BUILT_IN_ERF):
15866 CASE_FLT_FN (BUILT_IN_EXPM1):
15867 CASE_FLT_FN (BUILT_IN_FLOOR):
15868 CASE_FLT_FN (BUILT_IN_FMOD):
15869 CASE_FLT_FN (BUILT_IN_FREXP):
15870 CASE_FLT_FN (BUILT_IN_ICEIL):
15871 CASE_FLT_FN (BUILT_IN_IFLOOR):
15872 CASE_FLT_FN (BUILT_IN_IRINT):
15873 CASE_FLT_FN (BUILT_IN_IROUND):
15874 CASE_FLT_FN (BUILT_IN_LCEIL):
15875 CASE_FLT_FN (BUILT_IN_LDEXP):
15876 CASE_FLT_FN (BUILT_IN_LFLOOR):
15877 CASE_FLT_FN (BUILT_IN_LLCEIL):
15878 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15879 CASE_FLT_FN (BUILT_IN_LLRINT):
15880 CASE_FLT_FN (BUILT_IN_LLROUND):
15881 CASE_FLT_FN (BUILT_IN_LRINT):
15882 CASE_FLT_FN (BUILT_IN_LROUND):
15883 CASE_FLT_FN (BUILT_IN_MODF):
15884 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15885 CASE_FLT_FN (BUILT_IN_RINT):
15886 CASE_FLT_FN (BUILT_IN_ROUND):
15887 CASE_FLT_FN (BUILT_IN_SCALB):
15888 CASE_FLT_FN (BUILT_IN_SCALBLN):
15889 CASE_FLT_FN (BUILT_IN_SCALBN):
15890 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15891 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15892 CASE_FLT_FN (BUILT_IN_SINH):
15893 CASE_FLT_FN (BUILT_IN_TANH):
15894 CASE_FLT_FN (BUILT_IN_TRUNC):
15895 /* True if the 1st argument is nonnegative. */
15896 return tree_expr_nonnegative_warnv_p (arg0,
15897 strict_overflow_p);
15899 CASE_FLT_FN (BUILT_IN_FMAX):
15900 /* True if the 1st OR 2nd arguments are nonnegative. */
15901 return (tree_expr_nonnegative_warnv_p (arg0,
15902 strict_overflow_p)
15903 || (tree_expr_nonnegative_warnv_p (arg1,
15904 strict_overflow_p)));
15906 CASE_FLT_FN (BUILT_IN_FMIN):
15907 /* True if the 1st AND 2nd arguments are nonnegative. */
15908 return (tree_expr_nonnegative_warnv_p (arg0,
15909 strict_overflow_p)
15910 && (tree_expr_nonnegative_warnv_p (arg1,
15911 strict_overflow_p)));
15913 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15914 /* True if the 2nd argument is nonnegative. */
15915 return tree_expr_nonnegative_warnv_p (arg1,
15916 strict_overflow_p);
15918 CASE_FLT_FN (BUILT_IN_POWI):
15919 /* True if the 1st argument is nonnegative or the second
15920 argument is an even integer. */
15921 if (TREE_CODE (arg1) == INTEGER_CST
15922 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15923 return true;
15924 return tree_expr_nonnegative_warnv_p (arg0,
15925 strict_overflow_p);
15927 CASE_FLT_FN (BUILT_IN_POW):
15928 /* True if the 1st argument is nonnegative or the second
15929 argument is an even integer valued real. */
15930 if (TREE_CODE (arg1) == REAL_CST)
15932 REAL_VALUE_TYPE c;
15933 HOST_WIDE_INT n;
15935 c = TREE_REAL_CST (arg1);
15936 n = real_to_integer (&c);
15937 if ((n & 1) == 0)
15939 REAL_VALUE_TYPE cint;
15940 real_from_integer (&cint, VOIDmode, n,
15941 n < 0 ? -1 : 0, 0);
15942 if (real_identical (&c, &cint))
15943 return true;
15946 return tree_expr_nonnegative_warnv_p (arg0,
15947 strict_overflow_p);
15949 default:
15950 break;
15952 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15953 type);
15956 /* Return true if T is known to be non-negative. If the return
15957 value is based on the assumption that signed overflow is undefined,
15958 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15959 *STRICT_OVERFLOW_P. */
15961 static bool
15962 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15964 enum tree_code code = TREE_CODE (t);
15965 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15966 return true;
15968 switch (code)
15970 case TARGET_EXPR:
15972 tree temp = TARGET_EXPR_SLOT (t);
15973 t = TARGET_EXPR_INITIAL (t);
15975 /* If the initializer is non-void, then it's a normal expression
15976 that will be assigned to the slot. */
15977 if (!VOID_TYPE_P (t))
15978 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15980 /* Otherwise, the initializer sets the slot in some way. One common
15981 way is an assignment statement at the end of the initializer. */
15982 while (1)
15984 if (TREE_CODE (t) == BIND_EXPR)
15985 t = expr_last (BIND_EXPR_BODY (t));
15986 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15987 || TREE_CODE (t) == TRY_CATCH_EXPR)
15988 t = expr_last (TREE_OPERAND (t, 0));
15989 else if (TREE_CODE (t) == STATEMENT_LIST)
15990 t = expr_last (t);
15991 else
15992 break;
15994 if (TREE_CODE (t) == MODIFY_EXPR
15995 && TREE_OPERAND (t, 0) == temp)
15996 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15997 strict_overflow_p);
15999 return false;
16002 case CALL_EXPR:
16004 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
16005 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
16007 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
16008 get_callee_fndecl (t),
16009 arg0,
16010 arg1,
16011 strict_overflow_p);
16013 case COMPOUND_EXPR:
16014 case MODIFY_EXPR:
16015 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
16016 strict_overflow_p);
16017 case BIND_EXPR:
16018 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
16019 strict_overflow_p);
16020 case SAVE_EXPR:
16021 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
16022 strict_overflow_p);
16024 default:
16025 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
16026 TREE_TYPE (t));
16029 /* We don't know sign of `t', so be conservative and return false. */
16030 return false;
16033 /* Return true if T is known to be non-negative. If the return
16034 value is based on the assumption that signed overflow is undefined,
16035 set *STRICT_OVERFLOW_P to true; otherwise, don't change
16036 *STRICT_OVERFLOW_P. */
16038 bool
16039 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
16041 enum tree_code code;
16042 if (t == error_mark_node)
16043 return false;
16045 code = TREE_CODE (t);
16046 switch (TREE_CODE_CLASS (code))
16048 case tcc_binary:
16049 case tcc_comparison:
16050 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16051 TREE_TYPE (t),
16052 TREE_OPERAND (t, 0),
16053 TREE_OPERAND (t, 1),
16054 strict_overflow_p);
16056 case tcc_unary:
16057 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16058 TREE_TYPE (t),
16059 TREE_OPERAND (t, 0),
16060 strict_overflow_p);
16062 case tcc_constant:
16063 case tcc_declaration:
16064 case tcc_reference:
16065 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16067 default:
16068 break;
16071 switch (code)
16073 case TRUTH_AND_EXPR:
16074 case TRUTH_OR_EXPR:
16075 case TRUTH_XOR_EXPR:
16076 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16077 TREE_TYPE (t),
16078 TREE_OPERAND (t, 0),
16079 TREE_OPERAND (t, 1),
16080 strict_overflow_p);
16081 case TRUTH_NOT_EXPR:
16082 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16083 TREE_TYPE (t),
16084 TREE_OPERAND (t, 0),
16085 strict_overflow_p);
16087 case COND_EXPR:
16088 case CONSTRUCTOR:
16089 case OBJ_TYPE_REF:
16090 case ASSERT_EXPR:
16091 case ADDR_EXPR:
16092 case WITH_SIZE_EXPR:
16093 case SSA_NAME:
16094 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16096 default:
16097 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16101 /* Return true if `t' is known to be non-negative. Handle warnings
16102 about undefined signed overflow. */
16104 bool
16105 tree_expr_nonnegative_p (tree t)
16107 bool ret, strict_overflow_p;
16109 strict_overflow_p = false;
16110 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16111 if (strict_overflow_p)
16112 fold_overflow_warning (("assuming signed overflow does not occur when "
16113 "determining that expression is always "
16114 "non-negative"),
16115 WARN_STRICT_OVERFLOW_MISC);
16116 return ret;
16120 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16121 For floating point we further ensure that T is not denormal.
16122 Similar logic is present in nonzero_address in rtlanal.h.
16124 If the return value is based on the assumption that signed overflow
16125 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16126 change *STRICT_OVERFLOW_P. */
16128 bool
16129 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16130 bool *strict_overflow_p)
16132 switch (code)
16134 case ABS_EXPR:
16135 return tree_expr_nonzero_warnv_p (op0,
16136 strict_overflow_p);
16138 case NOP_EXPR:
16140 tree inner_type = TREE_TYPE (op0);
16141 tree outer_type = type;
16143 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16144 && tree_expr_nonzero_warnv_p (op0,
16145 strict_overflow_p));
16147 break;
16149 case NON_LVALUE_EXPR:
16150 return tree_expr_nonzero_warnv_p (op0,
16151 strict_overflow_p);
16153 default:
16154 break;
16157 return false;
16160 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16161 For floating point we further ensure that T is not denormal.
16162 Similar logic is present in nonzero_address in rtlanal.h.
16164 If the return value is based on the assumption that signed overflow
16165 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16166 change *STRICT_OVERFLOW_P. */
16168 bool
16169 tree_binary_nonzero_warnv_p (enum tree_code code,
16170 tree type,
16171 tree op0,
16172 tree op1, bool *strict_overflow_p)
16174 bool sub_strict_overflow_p;
16175 switch (code)
16177 case POINTER_PLUS_EXPR:
16178 case PLUS_EXPR:
16179 if (TYPE_OVERFLOW_UNDEFINED (type))
16181 /* With the presence of negative values it is hard
16182 to say something. */
16183 sub_strict_overflow_p = false;
16184 if (!tree_expr_nonnegative_warnv_p (op0,
16185 &sub_strict_overflow_p)
16186 || !tree_expr_nonnegative_warnv_p (op1,
16187 &sub_strict_overflow_p))
16188 return false;
16189 /* One of operands must be positive and the other non-negative. */
16190 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16191 overflows, on a twos-complement machine the sum of two
16192 nonnegative numbers can never be zero. */
16193 return (tree_expr_nonzero_warnv_p (op0,
16194 strict_overflow_p)
16195 || tree_expr_nonzero_warnv_p (op1,
16196 strict_overflow_p));
16198 break;
16200 case MULT_EXPR:
16201 if (TYPE_OVERFLOW_UNDEFINED (type))
16203 if (tree_expr_nonzero_warnv_p (op0,
16204 strict_overflow_p)
16205 && tree_expr_nonzero_warnv_p (op1,
16206 strict_overflow_p))
16208 *strict_overflow_p = true;
16209 return true;
16212 break;
16214 case MIN_EXPR:
16215 sub_strict_overflow_p = false;
16216 if (tree_expr_nonzero_warnv_p (op0,
16217 &sub_strict_overflow_p)
16218 && tree_expr_nonzero_warnv_p (op1,
16219 &sub_strict_overflow_p))
16221 if (sub_strict_overflow_p)
16222 *strict_overflow_p = true;
16224 break;
16226 case MAX_EXPR:
16227 sub_strict_overflow_p = false;
16228 if (tree_expr_nonzero_warnv_p (op0,
16229 &sub_strict_overflow_p))
16231 if (sub_strict_overflow_p)
16232 *strict_overflow_p = true;
16234 /* When both operands are nonzero, then MAX must be too. */
16235 if (tree_expr_nonzero_warnv_p (op1,
16236 strict_overflow_p))
16237 return true;
16239 /* MAX where operand 0 is positive is positive. */
16240 return tree_expr_nonnegative_warnv_p (op0,
16241 strict_overflow_p);
16243 /* MAX where operand 1 is positive is positive. */
16244 else if (tree_expr_nonzero_warnv_p (op1,
16245 &sub_strict_overflow_p)
16246 && tree_expr_nonnegative_warnv_p (op1,
16247 &sub_strict_overflow_p))
16249 if (sub_strict_overflow_p)
16250 *strict_overflow_p = true;
16251 return true;
16253 break;
16255 case BIT_IOR_EXPR:
16256 return (tree_expr_nonzero_warnv_p (op1,
16257 strict_overflow_p)
16258 || tree_expr_nonzero_warnv_p (op0,
16259 strict_overflow_p));
16261 default:
16262 break;
16265 return false;
16268 /* Return true when T is an address and is known to be nonzero.
16269 For floating point we further ensure that T is not denormal.
16270 Similar logic is present in nonzero_address in rtlanal.h.
16272 If the return value is based on the assumption that signed overflow
16273 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16274 change *STRICT_OVERFLOW_P. */
16276 bool
16277 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16279 bool sub_strict_overflow_p;
16280 switch (TREE_CODE (t))
16282 case INTEGER_CST:
16283 return !integer_zerop (t);
16285 case ADDR_EXPR:
16287 tree base = TREE_OPERAND (t, 0);
16288 if (!DECL_P (base))
16289 base = get_base_address (base);
16291 if (!base)
16292 return false;
16294 /* Weak declarations may link to NULL. Other things may also be NULL
16295 so protect with -fdelete-null-pointer-checks; but not variables
16296 allocated on the stack. */
16297 if (DECL_P (base)
16298 && (flag_delete_null_pointer_checks
16299 || (DECL_CONTEXT (base)
16300 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16301 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16302 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16304 /* Constants are never weak. */
16305 if (CONSTANT_CLASS_P (base))
16306 return true;
16308 return false;
16311 case COND_EXPR:
16312 sub_strict_overflow_p = false;
16313 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16314 &sub_strict_overflow_p)
16315 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16316 &sub_strict_overflow_p))
16318 if (sub_strict_overflow_p)
16319 *strict_overflow_p = true;
16320 return true;
16322 break;
16324 default:
16325 break;
16327 return false;
16330 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16331 attempt to fold the expression to a constant without modifying TYPE,
16332 OP0 or OP1.
16334 If the expression could be simplified to a constant, then return
16335 the constant. If the expression would not be simplified to a
16336 constant, then return NULL_TREE. */
16338 tree
16339 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16341 tree tem = fold_binary (code, type, op0, op1);
16342 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16345 /* Given the components of a unary expression CODE, TYPE and OP0,
16346 attempt to fold the expression to a constant without modifying
16347 TYPE or OP0.
16349 If the expression could be simplified to a constant, then return
16350 the constant. If the expression would not be simplified to a
16351 constant, then return NULL_TREE. */
16353 tree
16354 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16356 tree tem = fold_unary (code, type, op0);
16357 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16360 /* If EXP represents referencing an element in a constant string
16361 (either via pointer arithmetic or array indexing), return the
16362 tree representing the value accessed, otherwise return NULL. */
16364 tree
16365 fold_read_from_constant_string (tree exp)
16367 if ((TREE_CODE (exp) == INDIRECT_REF
16368 || TREE_CODE (exp) == ARRAY_REF)
16369 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16371 tree exp1 = TREE_OPERAND (exp, 0);
16372 tree index;
16373 tree string;
16374 location_t loc = EXPR_LOCATION (exp);
16376 if (TREE_CODE (exp) == INDIRECT_REF)
16377 string = string_constant (exp1, &index);
16378 else
16380 tree low_bound = array_ref_low_bound (exp);
16381 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16383 /* Optimize the special-case of a zero lower bound.
16385 We convert the low_bound to sizetype to avoid some problems
16386 with constant folding. (E.g. suppose the lower bound is 1,
16387 and its mode is QI. Without the conversion,l (ARRAY
16388 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16389 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16390 if (! integer_zerop (low_bound))
16391 index = size_diffop_loc (loc, index,
16392 fold_convert_loc (loc, sizetype, low_bound));
16394 string = exp1;
16397 if (string
16398 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16399 && TREE_CODE (string) == STRING_CST
16400 && TREE_CODE (index) == INTEGER_CST
16401 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16402 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16403 == MODE_INT)
16404 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16405 return build_int_cst_type (TREE_TYPE (exp),
16406 (TREE_STRING_POINTER (string)
16407 [TREE_INT_CST_LOW (index)]));
16409 return NULL;
16412 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16413 an integer constant, real, or fixed-point constant.
16415 TYPE is the type of the result. */
16417 static tree
16418 fold_negate_const (tree arg0, tree type)
16420 tree t = NULL_TREE;
16422 switch (TREE_CODE (arg0))
16424 case INTEGER_CST:
16426 double_int val = tree_to_double_int (arg0);
16427 bool overflow;
16428 val = val.neg_with_overflow (&overflow);
16429 t = force_fit_type_double (type, val, 1,
16430 (overflow | TREE_OVERFLOW (arg0))
16431 && !TYPE_UNSIGNED (type));
16432 break;
16435 case REAL_CST:
16436 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16437 break;
16439 case FIXED_CST:
16441 FIXED_VALUE_TYPE f;
16442 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16443 &(TREE_FIXED_CST (arg0)), NULL,
16444 TYPE_SATURATING (type));
16445 t = build_fixed (type, f);
16446 /* Propagate overflow flags. */
16447 if (overflow_p | TREE_OVERFLOW (arg0))
16448 TREE_OVERFLOW (t) = 1;
16449 break;
16452 default:
16453 gcc_unreachable ();
16456 return t;
16459 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16460 an integer constant or real constant.
16462 TYPE is the type of the result. */
16464 tree
16465 fold_abs_const (tree arg0, tree type)
16467 tree t = NULL_TREE;
16469 switch (TREE_CODE (arg0))
16471 case INTEGER_CST:
16473 double_int val = tree_to_double_int (arg0);
16475 /* If the value is unsigned or non-negative, then the absolute value
16476 is the same as the ordinary value. */
16477 if (TYPE_UNSIGNED (type)
16478 || !val.is_negative ())
16479 t = arg0;
16481 /* If the value is negative, then the absolute value is
16482 its negation. */
16483 else
16485 bool overflow;
16486 val = val.neg_with_overflow (&overflow);
16487 t = force_fit_type_double (type, val, -1,
16488 overflow | TREE_OVERFLOW (arg0));
16491 break;
16493 case REAL_CST:
16494 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16495 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16496 else
16497 t = arg0;
16498 break;
16500 default:
16501 gcc_unreachable ();
16504 return t;
16507 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16508 constant. TYPE is the type of the result. */
16510 static tree
16511 fold_not_const (const_tree arg0, tree type)
16513 double_int val;
16515 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16517 val = ~tree_to_double_int (arg0);
16518 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16521 /* Given CODE, a relational operator, the target type, TYPE and two
16522 constant operands OP0 and OP1, return the result of the
16523 relational operation. If the result is not a compile time
16524 constant, then return NULL_TREE. */
16526 static tree
16527 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16529 int result, invert;
16531 /* From here on, the only cases we handle are when the result is
16532 known to be a constant. */
16534 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16536 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16537 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16539 /* Handle the cases where either operand is a NaN. */
16540 if (real_isnan (c0) || real_isnan (c1))
16542 switch (code)
16544 case EQ_EXPR:
16545 case ORDERED_EXPR:
16546 result = 0;
16547 break;
16549 case NE_EXPR:
16550 case UNORDERED_EXPR:
16551 case UNLT_EXPR:
16552 case UNLE_EXPR:
16553 case UNGT_EXPR:
16554 case UNGE_EXPR:
16555 case UNEQ_EXPR:
16556 result = 1;
16557 break;
16559 case LT_EXPR:
16560 case LE_EXPR:
16561 case GT_EXPR:
16562 case GE_EXPR:
16563 case LTGT_EXPR:
16564 if (flag_trapping_math)
16565 return NULL_TREE;
16566 result = 0;
16567 break;
16569 default:
16570 gcc_unreachable ();
16573 return constant_boolean_node (result, type);
16576 return constant_boolean_node (real_compare (code, c0, c1), type);
16579 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16581 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16582 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16583 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16586 /* Handle equality/inequality of complex constants. */
16587 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16589 tree rcond = fold_relational_const (code, type,
16590 TREE_REALPART (op0),
16591 TREE_REALPART (op1));
16592 tree icond = fold_relational_const (code, type,
16593 TREE_IMAGPART (op0),
16594 TREE_IMAGPART (op1));
16595 if (code == EQ_EXPR)
16596 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16597 else if (code == NE_EXPR)
16598 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16599 else
16600 return NULL_TREE;
16603 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16605 unsigned count = VECTOR_CST_NELTS (op0);
16606 tree *elts = XALLOCAVEC (tree, count);
16607 gcc_assert (VECTOR_CST_NELTS (op1) == count
16608 && TYPE_VECTOR_SUBPARTS (type) == count);
16610 for (unsigned i = 0; i < count; i++)
16612 tree elem_type = TREE_TYPE (type);
16613 tree elem0 = VECTOR_CST_ELT (op0, i);
16614 tree elem1 = VECTOR_CST_ELT (op1, i);
16616 tree tem = fold_relational_const (code, elem_type,
16617 elem0, elem1);
16619 if (tem == NULL_TREE)
16620 return NULL_TREE;
16622 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16625 return build_vector (type, elts);
16628 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16630 To compute GT, swap the arguments and do LT.
16631 To compute GE, do LT and invert the result.
16632 To compute LE, swap the arguments, do LT and invert the result.
16633 To compute NE, do EQ and invert the result.
16635 Therefore, the code below must handle only EQ and LT. */
16637 if (code == LE_EXPR || code == GT_EXPR)
16639 tree tem = op0;
16640 op0 = op1;
16641 op1 = tem;
16642 code = swap_tree_comparison (code);
16645 /* Note that it is safe to invert for real values here because we
16646 have already handled the one case that it matters. */
16648 invert = 0;
16649 if (code == NE_EXPR || code == GE_EXPR)
16651 invert = 1;
16652 code = invert_tree_comparison (code, false);
16655 /* Compute a result for LT or EQ if args permit;
16656 Otherwise return T. */
16657 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16659 if (code == EQ_EXPR)
16660 result = tree_int_cst_equal (op0, op1);
16661 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16662 result = INT_CST_LT_UNSIGNED (op0, op1);
16663 else
16664 result = INT_CST_LT (op0, op1);
16666 else
16667 return NULL_TREE;
16669 if (invert)
16670 result ^= 1;
16671 return constant_boolean_node (result, type);
16674 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16675 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16676 itself. */
16678 tree
16679 fold_build_cleanup_point_expr (tree type, tree expr)
16681 /* If the expression does not have side effects then we don't have to wrap
16682 it with a cleanup point expression. */
16683 if (!TREE_SIDE_EFFECTS (expr))
16684 return expr;
16686 /* If the expression is a return, check to see if the expression inside the
16687 return has no side effects or the right hand side of the modify expression
16688 inside the return. If either don't have side effects set we don't need to
16689 wrap the expression in a cleanup point expression. Note we don't check the
16690 left hand side of the modify because it should always be a return decl. */
16691 if (TREE_CODE (expr) == RETURN_EXPR)
16693 tree op = TREE_OPERAND (expr, 0);
16694 if (!op || !TREE_SIDE_EFFECTS (op))
16695 return expr;
16696 op = TREE_OPERAND (op, 1);
16697 if (!TREE_SIDE_EFFECTS (op))
16698 return expr;
16701 return build1 (CLEANUP_POINT_EXPR, type, expr);
16704 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16705 of an indirection through OP0, or NULL_TREE if no simplification is
16706 possible. */
16708 tree
16709 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16711 tree sub = op0;
16712 tree subtype;
16714 STRIP_NOPS (sub);
16715 subtype = TREE_TYPE (sub);
16716 if (!POINTER_TYPE_P (subtype))
16717 return NULL_TREE;
16719 if (TREE_CODE (sub) == ADDR_EXPR)
16721 tree op = TREE_OPERAND (sub, 0);
16722 tree optype = TREE_TYPE (op);
16723 /* *&CONST_DECL -> to the value of the const decl. */
16724 if (TREE_CODE (op) == CONST_DECL)
16725 return DECL_INITIAL (op);
16726 /* *&p => p; make sure to handle *&"str"[cst] here. */
16727 if (type == optype)
16729 tree fop = fold_read_from_constant_string (op);
16730 if (fop)
16731 return fop;
16732 else
16733 return op;
16735 /* *(foo *)&fooarray => fooarray[0] */
16736 else if (TREE_CODE (optype) == ARRAY_TYPE
16737 && type == TREE_TYPE (optype)
16738 && (!in_gimple_form
16739 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16741 tree type_domain = TYPE_DOMAIN (optype);
16742 tree min_val = size_zero_node;
16743 if (type_domain && TYPE_MIN_VALUE (type_domain))
16744 min_val = TYPE_MIN_VALUE (type_domain);
16745 if (in_gimple_form
16746 && TREE_CODE (min_val) != INTEGER_CST)
16747 return NULL_TREE;
16748 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16749 NULL_TREE, NULL_TREE);
16751 /* *(foo *)&complexfoo => __real__ complexfoo */
16752 else if (TREE_CODE (optype) == COMPLEX_TYPE
16753 && type == TREE_TYPE (optype))
16754 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16755 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16756 else if (TREE_CODE (optype) == VECTOR_TYPE
16757 && type == TREE_TYPE (optype))
16759 tree part_width = TYPE_SIZE (type);
16760 tree index = bitsize_int (0);
16761 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16765 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16766 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16768 tree op00 = TREE_OPERAND (sub, 0);
16769 tree op01 = TREE_OPERAND (sub, 1);
16771 STRIP_NOPS (op00);
16772 if (TREE_CODE (op00) == ADDR_EXPR)
16774 tree op00type;
16775 op00 = TREE_OPERAND (op00, 0);
16776 op00type = TREE_TYPE (op00);
16778 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16779 if (TREE_CODE (op00type) == VECTOR_TYPE
16780 && type == TREE_TYPE (op00type))
16782 HOST_WIDE_INT offset = tree_to_shwi (op01);
16783 tree part_width = TYPE_SIZE (type);
16784 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16785 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16786 tree index = bitsize_int (indexi);
16788 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16789 return fold_build3_loc (loc,
16790 BIT_FIELD_REF, type, op00,
16791 part_width, index);
16794 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16795 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16796 && type == TREE_TYPE (op00type))
16798 tree size = TYPE_SIZE_UNIT (type);
16799 if (tree_int_cst_equal (size, op01))
16800 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16802 /* ((foo *)&fooarray)[1] => fooarray[1] */
16803 else if (TREE_CODE (op00type) == ARRAY_TYPE
16804 && type == TREE_TYPE (op00type))
16806 tree type_domain = TYPE_DOMAIN (op00type);
16807 tree min_val = size_zero_node;
16808 if (type_domain && TYPE_MIN_VALUE (type_domain))
16809 min_val = TYPE_MIN_VALUE (type_domain);
16810 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16811 TYPE_SIZE_UNIT (type));
16812 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16813 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16814 NULL_TREE, NULL_TREE);
16819 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16820 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16821 && type == TREE_TYPE (TREE_TYPE (subtype))
16822 && (!in_gimple_form
16823 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16825 tree type_domain;
16826 tree min_val = size_zero_node;
16827 sub = build_fold_indirect_ref_loc (loc, sub);
16828 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16829 if (type_domain && TYPE_MIN_VALUE (type_domain))
16830 min_val = TYPE_MIN_VALUE (type_domain);
16831 if (in_gimple_form
16832 && TREE_CODE (min_val) != INTEGER_CST)
16833 return NULL_TREE;
16834 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16835 NULL_TREE);
16838 return NULL_TREE;
16841 /* Builds an expression for an indirection through T, simplifying some
16842 cases. */
16844 tree
16845 build_fold_indirect_ref_loc (location_t loc, tree t)
16847 tree type = TREE_TYPE (TREE_TYPE (t));
16848 tree sub = fold_indirect_ref_1 (loc, type, t);
16850 if (sub)
16851 return sub;
16853 return build1_loc (loc, INDIRECT_REF, type, t);
16856 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16858 tree
16859 fold_indirect_ref_loc (location_t loc, tree t)
16861 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16863 if (sub)
16864 return sub;
16865 else
16866 return t;
16869 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16870 whose result is ignored. The type of the returned tree need not be
16871 the same as the original expression. */
16873 tree
16874 fold_ignored_result (tree t)
16876 if (!TREE_SIDE_EFFECTS (t))
16877 return integer_zero_node;
16879 for (;;)
16880 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16882 case tcc_unary:
16883 t = TREE_OPERAND (t, 0);
16884 break;
16886 case tcc_binary:
16887 case tcc_comparison:
16888 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16889 t = TREE_OPERAND (t, 0);
16890 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16891 t = TREE_OPERAND (t, 1);
16892 else
16893 return t;
16894 break;
16896 case tcc_expression:
16897 switch (TREE_CODE (t))
16899 case COMPOUND_EXPR:
16900 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16901 return t;
16902 t = TREE_OPERAND (t, 0);
16903 break;
16905 case COND_EXPR:
16906 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16907 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16908 return t;
16909 t = TREE_OPERAND (t, 0);
16910 break;
16912 default:
16913 return t;
16915 break;
16917 default:
16918 return t;
16922 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16923 This can only be applied to objects of a sizetype. */
16925 tree
16926 round_up_loc (location_t loc, tree value, int divisor)
16928 tree div = NULL_TREE;
16930 gcc_assert (divisor > 0);
16931 if (divisor == 1)
16932 return value;
16934 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16935 have to do anything. Only do this when we are not given a const,
16936 because in that case, this check is more expensive than just
16937 doing it. */
16938 if (TREE_CODE (value) != INTEGER_CST)
16940 div = build_int_cst (TREE_TYPE (value), divisor);
16942 if (multiple_of_p (TREE_TYPE (value), value, div))
16943 return value;
16946 /* If divisor is a power of two, simplify this to bit manipulation. */
16947 if (divisor == (divisor & -divisor))
16949 if (TREE_CODE (value) == INTEGER_CST)
16951 double_int val = tree_to_double_int (value);
16952 bool overflow_p;
16954 if ((val.low & (divisor - 1)) == 0)
16955 return value;
16957 overflow_p = TREE_OVERFLOW (value);
16958 val.low &= ~(divisor - 1);
16959 val.low += divisor;
16960 if (val.low == 0)
16962 val.high++;
16963 if (val.high == 0)
16964 overflow_p = true;
16967 return force_fit_type_double (TREE_TYPE (value), val,
16968 -1, overflow_p);
16970 else
16972 tree t;
16974 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16975 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16976 t = build_int_cst (TREE_TYPE (value), -divisor);
16977 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16980 else
16982 if (!div)
16983 div = build_int_cst (TREE_TYPE (value), divisor);
16984 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16985 value = size_binop_loc (loc, MULT_EXPR, value, div);
16988 return value;
16991 /* Likewise, but round down. */
16993 tree
16994 round_down_loc (location_t loc, tree value, int divisor)
16996 tree div = NULL_TREE;
16998 gcc_assert (divisor > 0);
16999 if (divisor == 1)
17000 return value;
17002 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
17003 have to do anything. Only do this when we are not given a const,
17004 because in that case, this check is more expensive than just
17005 doing it. */
17006 if (TREE_CODE (value) != INTEGER_CST)
17008 div = build_int_cst (TREE_TYPE (value), divisor);
17010 if (multiple_of_p (TREE_TYPE (value), value, div))
17011 return value;
17014 /* If divisor is a power of two, simplify this to bit manipulation. */
17015 if (divisor == (divisor & -divisor))
17017 tree t;
17019 t = build_int_cst (TREE_TYPE (value), -divisor);
17020 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
17022 else
17024 if (!div)
17025 div = build_int_cst (TREE_TYPE (value), divisor);
17026 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
17027 value = size_binop_loc (loc, MULT_EXPR, value, div);
17030 return value;
17033 /* Returns the pointer to the base of the object addressed by EXP and
17034 extracts the information about the offset of the access, storing it
17035 to PBITPOS and POFFSET. */
17037 static tree
17038 split_address_to_core_and_offset (tree exp,
17039 HOST_WIDE_INT *pbitpos, tree *poffset)
17041 tree core;
17042 enum machine_mode mode;
17043 int unsignedp, volatilep;
17044 HOST_WIDE_INT bitsize;
17045 location_t loc = EXPR_LOCATION (exp);
17047 if (TREE_CODE (exp) == ADDR_EXPR)
17049 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17050 poffset, &mode, &unsignedp, &volatilep,
17051 false);
17052 core = build_fold_addr_expr_loc (loc, core);
17054 else
17056 core = exp;
17057 *pbitpos = 0;
17058 *poffset = NULL_TREE;
17061 return core;
17064 /* Returns true if addresses of E1 and E2 differ by a constant, false
17065 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17067 bool
17068 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17070 tree core1, core2;
17071 HOST_WIDE_INT bitpos1, bitpos2;
17072 tree toffset1, toffset2, tdiff, type;
17074 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17075 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17077 if (bitpos1 % BITS_PER_UNIT != 0
17078 || bitpos2 % BITS_PER_UNIT != 0
17079 || !operand_equal_p (core1, core2, 0))
17080 return false;
17082 if (toffset1 && toffset2)
17084 type = TREE_TYPE (toffset1);
17085 if (type != TREE_TYPE (toffset2))
17086 toffset2 = fold_convert (type, toffset2);
17088 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17089 if (!cst_and_fits_in_hwi (tdiff))
17090 return false;
17092 *diff = int_cst_value (tdiff);
17094 else if (toffset1 || toffset2)
17096 /* If only one of the offsets is non-constant, the difference cannot
17097 be a constant. */
17098 return false;
17100 else
17101 *diff = 0;
17103 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17104 return true;
17107 /* Simplify the floating point expression EXP when the sign of the
17108 result is not significant. Return NULL_TREE if no simplification
17109 is possible. */
17111 tree
17112 fold_strip_sign_ops (tree exp)
17114 tree arg0, arg1;
17115 location_t loc = EXPR_LOCATION (exp);
17117 switch (TREE_CODE (exp))
17119 case ABS_EXPR:
17120 case NEGATE_EXPR:
17121 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17122 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17124 case MULT_EXPR:
17125 case RDIV_EXPR:
17126 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17127 return NULL_TREE;
17128 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17129 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17130 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17131 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17132 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17133 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17134 break;
17136 case COMPOUND_EXPR:
17137 arg0 = TREE_OPERAND (exp, 0);
17138 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17139 if (arg1)
17140 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17141 break;
17143 case COND_EXPR:
17144 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17145 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17146 if (arg0 || arg1)
17147 return fold_build3_loc (loc,
17148 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17149 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17150 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17151 break;
17153 case CALL_EXPR:
17155 const enum built_in_function fcode = builtin_mathfn_code (exp);
17156 switch (fcode)
17158 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17159 /* Strip copysign function call, return the 1st argument. */
17160 arg0 = CALL_EXPR_ARG (exp, 0);
17161 arg1 = CALL_EXPR_ARG (exp, 1);
17162 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17164 default:
17165 /* Strip sign ops from the argument of "odd" math functions. */
17166 if (negate_mathfn_p (fcode))
17168 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17169 if (arg0)
17170 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17172 break;
17175 break;
17177 default:
17178 break;
17180 return NULL_TREE;